Phi-3-mini-4k-instruct / config.json
Xenova's picture
Xenova HF staff
Duplicate from schmuell/phi3-int4
85c6f62 verified
raw
history blame
3 kB
{
"_name_or_path": "/scratch/azureml/cr/j/c114671e623f4fd783db31f5a24e9c46/cap/data-capability/wd/INPUT_model_path",
"architectures": [
"PhiLongRoPEForCausalLM"
],
"attention_dropout": 0.0,
"xauto_map": {
"AutoConfig": "configuration_phi_longrope.PhiLongRoPEConfig",
"AutoModelForCausalLM": "modeling_phi_longrope.PhiLongRoPEForCausalLM"
},
"bos_token_id": 1,
"eos_token_id": 2,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "mistral",
"xmodel_type": "phi_longrope",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
3.7391297817230225,
3.7699999999999756,
6.679999999999908,
11.956522941589355,
14.695652961730957,
16.649999999999697,
20.83000000000035,
22.91304588317871,
25.652175903320312,
28.391305923461914,
31.130435943603516,
33.869564056396484,
36.60869598388672,
39.34782409667969,
42.08695983886719,
44.826087951660156,
47.565216064453125,
50.30434799194336,
53.04347610473633,
55.78260803222656,
58.5217399597168,
61.26087188720703,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0,
64.0
],
"short_factor": [
1.0,
1.0,
1.0,
1.0,
1.05,
1.05,
1.1500000000000001,
1.2000000000000002,
1.2500000000000002,
1.5000000000000004,
1.8000000000000007,
2.25,
2.3000000000000007,
2.3500000000000014,
2.5,
2.5,
2.55,
2.5999999999999988,
2.6999999999999993,
2.6999999999999993,
2.8499999999999988,
2.8999999999999986,
2.9999999999999982,
2.9999999999999982,
2.9999999999999982,
3.099999999999998,
3.299999999999997,
3.399999999999997,
3.599999999999996,
3.649999999999996,
3.7999999999999954,
3.899999999999995,
3.899999999999995,
3.899999999999995,
4.099999999999994,
4.099999999999994,
4.099999999999994,
4.099999999999994,
4.099999999999994,
4.149999999999994,
4.149999999999994,
4.149999999999994,
4.149999999999994,
4.149999999999994,
4.199999999999994,
4.249999999999994,
4.299999999999994,
4.399999999999993
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 131072,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.38.1",
"use_cache": true,
"vocab_size": 32038
}