|
"_commit_hash": null, |
|
"_name_or_path": "togethercomputer/evo-1-131k-base", |
|
"architectures": [ |
|
"StripedHyenaModelForCausalLM" |
|
], |
|
"attn_layer_idxs": [ |
|
8, |
|
16, |
|
24 |
|
], |
|
"auto_map": { |
|
"AutoConfig": "configuration_hyena.StripedHyenaConfig", |
|
"AutoModelForCausalLM": "modeling_hyena.StripedHyenaModelForCausalLM", |
|
"AutoTokenizer": [ |
|
"tokenizer.ByteTokenizer", |
|
null |
|
] |
|
}, |
|
"column_split": false, |
|
"column_split_hyena": true, |
|
"eps": 1e-06, |
|
"final_norm": true, |
|
"hidden_size": 4096, |
|
"hyena_filter_groups": 1, |
|
"hyena_layer_idxs": [ |
|
0, |
|
1, |
|
2, |
|
3, |
|
4, |
|
5, |
|
6, |
|
7, |
|
9, |
|
10, |
|
11, |
|
12, |
|
13, |
|
14, |
|
15, |
|
17, |
|
18, |
|
19, |
|
20, |
|
21, |
|
22, |
|
23, |
|
25, |
|
26, |
|
27, |
|
28, |
|
29, |
|
30, |
|
31 |
|
], |
|
"inference_mode": false, |
|
"inner_mlp_size": 10928, |
|
"log_intermediate_values": false, |
|
"make_vocab_size_divisible_by": 8, |
|
"max_seqlen": 131072, |
|
"mha_out_proj_bias": true, |
|
"mlp_activation": "gelu", |
|
"model_parallel_size": 1, |
|
"model_type": "stripedhyena", |
|
"num_attention_heads": 32, |
|
"num_filters": 4096, |
|
"num_layers": 32, |
|
"pipe_parallel_size": 1, |
|
"prefill_style": "fft", |
|
"proj_groups": 1, |
|
"qkv_proj_bias": true, |
|
"rotary_emb_base": 10000, |
|
"rotary_emb_scaling_factor": 16, |
|
"short_filter_bias": true, |
|
"short_filter_length": 3, |
|
"smeared_gqa": false, |
|
"split_k0": true, |
|
"state_size": 8, |
|
"tie_embeddings": true, |
|
"torch_dtype": "bfloat16", |
|
"transformers_version": null, |
|
"use_cache": true, |
|
"use_flash_attn": true, |
|
"use_flash_depthwise": true, |
|
"use_flash_rmsnorm": false, |
|
"use_flashfft": false, |
|
"use_interpolated_rotary_pos_emb": true, |
|
"vocab_size": 512 |
|
} |