{ | |
"architectures": ["LlamaForCausalLM"], | |
"attention_dropout": 0.0, | |
"hidden_size": 4096, | |
"intermediate_size": 14336, | |
"max_position_embeddings": 2048, | |
"num_attention_heads": 32, | |
"num_hidden_layers": 32, | |
"rope_scaling": { | |
"factor": 8.0, | |
"original_max_position_embeddings": 8192 | |
}, | |
"vocab_size": 128256 | |
} | |