{ | |
"_name_or_path": "microsoft/phi-1_5", | |
"activation_function": "gelu_new", | |
"architecture": { | |
"block_cls": "parallel", | |
"mixer": {}, | |
"mlp": { | |
"mlp_cls": "mlp" | |
} | |
}, | |
"architectures": [ | |
"PhiForCausalLM" | |
], | |
"auto_map": { | |
"AutoConfig": "microsoft/phi-1_5--configuration_phi.PhiConfig", | |
"AutoModelForCausalLM": "microsoft/phi-1_5--modeling_phi.PhiForCausalLM" | |
}, | |
"embd_layer": "default", | |
"embd_pdrop": 0.0, | |
"flash_attn": false, | |
"flash_rotary": false, | |
"fused_dense": false, | |
"initializer_range": 0.02, | |
"layer_norm_epsilon": 1e-05, | |
"model_type": "phi", | |
"n_embd": 2048, | |
"n_head": 32, | |
"n_head_kv": null, | |
"n_inner": null, | |
"n_layer": 24, | |
"n_positions": 2048, | |
"phyagi_version": "0.0.4.dev", | |
"resid_pdrop": 0.0, | |
"rotary_dim": 32, | |
"tie_word_embeddings": false, | |
"torch_dtype": "float16", | |
"transformers_version": "4.34.1", | |
"use_cache": true, | |
"vocab_size": 50304 | |
} | |