{ "_name_or_path": "15b-50B", "add_bos_token": false, "architectures": [ "TransnormerForCausalLM" ], "auto_map": { "AutoConfig": "configuration_transnormer.TransnormerConfig", "AutoModelForCausalLM": "modeling_transnormer.TransnormerForCausalLM" }, "bias": false, "bos_token_id": 100261, "decoder_attention_heads": 40, "decoder_embed_dim": 5120, "decoder_layers": 42, "eos_token_id": 100257, "gate_dim": 16, "glu_dim": 15360, "hidden_dim": 5120, "init_std": 0.02, "linear_act_fun": "swish", "linear_use_lrpe": 0, "linear_use_lrpe_list": [ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ], "model_type": "transnormer", "no_scale_embedding": false, "norm_type": "simplermsnorm", "pad_token_id": 100262, "torch_dtype": "bfloat16", "transformers_version": "4.33.1", "use_cache": true, "vocab_size": 100280 }