{ "_name_or_path": "/covalent/.cache/models/tmpoel8etc5a80hfd_0", "architectures": [ "MiniCPM3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_minicpm.MiniCPM3Config", "AutoModel": "openbmb/MiniCPM3-4B--modeling_minicpm.MiniCPM3Model", "AutoModelForCausalLM": "openbmb/MiniCPM3-4B--modeling_minicpm.MiniCPM3ForCausalLM", "AutoModelForSeq2SeqLM": "openbmb/MiniCPM3-4B--modeling_minicpm.MiniCPM3ForCausalLM", "AutoModelForSequenceClassification": "openbmb/MiniCPM3-4B--modeling_minicpm.MiniCPM3ForSequenceClassification" }, "bos_token_id": 1, "dim_model_base": 256, "eos_token_id": [ 2, 73440 ], "head_dim": 96, "hidden_act": "silu", "hidden_size": 2560, "initializer_range": 0.1, "intermediate_size": 6400, "kv_lora_rank": 256, "max_position_embeddings": 32768, "model_type": "minicpm3", "num_attention_heads": 40, "num_hidden_layers": 62, "num_key_value_heads": 40, "pretraining_tp": 1, "q_lora_rank": 768, "qk_nope_head_dim": 64, "qk_rope_head_dim": 32, "quantization_config": { "_load_in_4bit": false, "_load_in_8bit": true, "bnb_4bit_compute_dtype": "bfloat16", "bnb_4bit_quant_storage": "uint8", "bnb_4bit_quant_type": "fp4", "bnb_4bit_use_double_quant": false, "llm_int8_enable_fp32_cpu_offload": false, "llm_int8_has_fp16_weight": false, "llm_int8_skip_modules": [ "lm_head" ], "llm_int8_threshold": 6.0, "load_in_4bit": false, "load_in_8bit": true, "quant_method": "bitsandbytes" }, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0591234137867171, 1.1241891283591912, 1.2596935748670968, 1.5380380402321725, 2.093982484148734, 3.1446935121267696, 4.937952647693647, 7.524541999994549, 10.475458000005451, 13.062047352306353, 14.85530648787323, 15.906017515851266, 16.461961959767827, 16.740306425132907, 16.87581087164081, 16.940876586213285 ], "original_max_position_embeddings": 32768, "short_factor": [ 1.0591234137867171, 1.1241891283591912, 1.2596935748670968, 1.5380380402321725, 2.093982484148734, 3.1446935121267696, 4.937952647693647, 7.524541999994549, 10.475458000005451, 13.062047352306353, 14.85530648787323, 15.906017515851266, 16.461961959767827, 16.740306425132907, 16.87581087164081, 16.940876586213285 ], "type": "longrope" }, "rope_theta": 10000.0, "scale_depth": 1.4, "scale_emb": 12, "torch_dtype": "float16", "transformers_version": "4.46.2", "use_cache": true, "v_head_dim": 64, "vocab_size": 73448, "api_key": null }