blast-llama-4B / config.json
cwoolee's picture
Upload model
279b61f verified
raw
history blame
1.49 kB
{
"architectures": [
"BlastModelForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "modeling_blast.BlastLlamaConfig",
"AutoModelForCausalLM": "modeling_blast.BlastModelForCausalLM"
},
"blast_num_blocks": [
16
],
"blast_rank": {
"down_proj": 1488,
"gate_proj": 1488,
"k_proj": 1024,
"o_proj": 1024,
"q_proj": 1024,
"up_proj": 1488,
"v_proj": 1024
},
"bos_token_id": 1,
"eos_token_id": 2,
"hidden_act": "silu",
"hidden_size": 4096,
"indices": [
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31
],
"initializer_range": 0.02,
"intermediate_size": 11008,
"max_position_embeddings": 2048,
"max_sequence_length": 2048,
"mlp_bias": false,
"model_type": "blast_llama",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"pad_token_id": 0,
"precompute_matrix": true,
"pretraining_tp": 1,
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 10000.0,
"target_modules": [
"q_proj",
"k_proj",
"v_proj",
"o_proj",
"gate_proj",
"up_proj",
"down_proj"
],
"tie_word_embeddings": false,
"torch_dtype": "float16",
"transformers_version": "4.44.2",
"use_cache": true,
"vocab_size": 32000
}