Adia_Llama3.1 / config.json
Hawoly18's picture
Upload config.json
256f85c verified
raw
history blame contribute delete
371 Bytes
{
"architectures": ["LlamaForCausalLM"],
"attention_dropout": 0.0,
"hidden_size": 4096,
"intermediate_size": 14336,
"max_position_embeddings": 2048,
"num_attention_heads": 32,
"num_hidden_layers": 32,
"rope_scaling": {
"factor": 8.0,
"original_max_position_embeddings": 8192
},
"vocab_size": 128256
}