File size: 446 Bytes
e0bf031
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
{
	"architectures": ["LlamaForCausalLM"],
	"bos_token_id": 0,
	"eos_token_id": 1,
	"hidden_act": "silu",
	"hidden_size": 4096,
	"intermediate_size": 11008,
	"initializer_range": 0.02,
	"max_sequence_length": 2048,
	"model_type": "llama",
	"num_attention_heads": 32,
	"num_hidden_layers": 32,
	"pad_token_id": -1,
	"rms_norm_eps": 1e-6,
	"torch_dtype": "float16",
	"transformers_version": "4.27.0.dev0",
	"use_cache": true,
	"vocab_size": 32000
}