|
{ |
|
"architectures": [ |
|
"tnl1-385m-10b-token_no-act" |
|
], |
|
"auto_map": { |
|
"AutoConfig": "configuration_transnormer.TransnormerConfig", |
|
"AutoModelForCausalLM": "modeling_transnormer.TransnormerForCausalLM" |
|
}, |
|
"bos_token_id": 50260, |
|
"eos_token_id": 50260, |
|
"vocab_size": 50272, |
|
"use_cache": true, |
|
"init_std": 0.02, |
|
"decoder_embed_dim": 1024, |
|
"decoder_layers": 24, |
|
"decoder_attention_heads": 8, |
|
"no_scale_embedding": false, |
|
"add_bos_token": false, |
|
"norm_type": "simplermsnorm", |
|
"linear_use_lrpe_list": [ |
|
1, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0, |
|
0 |
|
], |
|
"hidden_dim": 1024, |
|
"linear_act_fun": "relu", |
|
"glu_dim": 2816, |
|
"bias": false, |
|
"torch_dtype": "bfloat16", |
|
"transformers_version": "4.38.2" |
|
} |