{ "architectures": [ "KBERTForMaskedLM" ], "intermediate_dim": 1536, "model_dim": 768, "num_attention_heads": 6, "num_layers": 12, "tokenizer_uri": "answerdotai/ModernBERT-base", "torch_dtype": "bfloat16", "transformers_version": "4.47.1" }