kbert_trial0 / config.json
lapp0's picture
Upload KBERTForMaskedLM
8b26622 verified
raw
history blame contribute delete
263 Bytes
{
"architectures": [
"KBERTForMaskedLM"
],
"intermediate_dim": 1536,
"model_dim": 768,
"num_attention_heads": 6,
"num_layers": 12,
"tokenizer_uri": "answerdotai/ModernBERT-base",
"torch_dtype": "bfloat16",
"transformers_version": "4.47.1"
}