tiny-DbrxForCausalLM / config.json
qgallouedec's picture
qgallouedec HF staff
Upload DbrxForCausalLM
d974c5c verified
raw
history blame contribute delete
541 Bytes
{
"architectures": [
"DbrxForCausalLM"
],
"attn_config": {
"model_type": ""
},
"d_model": 8,
"emb_pdrop": 0.0,
"ffn_config": {
"model_type": ""
},
"initializer_range": 0.02,
"intermediate_size": 32,
"max_seq_len": 2048,
"model_type": "dbrx",
"n_heads": 4,
"n_layers": 2,
"num_key_value_heads": 2,
"output_router_logits": false,
"resid_pdrop": 0.0,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.47.0.dev0",
"use_cache": true,
"vocab_size": 100302
}