npc0 commited on
Commit
47a1c22
1 Parent(s): fbdf575

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -35
config.json DELETED
@@ -1,35 +0,0 @@
1
- {
2
- "_name_or_path": "THUDM/chatglm3-6b",
3
- "model_type": "chatglm",
4
- "architectures": [
5
- "ChatGLMModel"
6
- ],
7
- "add_bias_linear": false,
8
- "add_qkv_bias": true,
9
- "apply_query_key_layer_scaling": true,
10
- "apply_residual_connection_post_layernorm": false,
11
- "attention_dropout": 0.0,
12
- "attention_softmax_in_fp32": true,
13
- "bias_dropout_fusion": true,
14
- "ffn_hidden_size": 13696,
15
- "fp32_residual_connection": false,
16
- "hidden_dropout": 0.0,
17
- "hidden_size": 4096,
18
- "kv_channels": 128,
19
- "layernorm_epsilon": 1e-05,
20
- "multi_query_attention": true,
21
- "multi_query_group_num": 2,
22
- "num_attention_heads": 32,
23
- "num_layers": 28,
24
- "original_rope": true,
25
- "padded_vocab_size": 65024,
26
- "post_layer_norm": true,
27
- "rmsnorm": true,
28
- "seq_length": 8192,
29
- "use_cache": true,
30
- "torch_dtype": "float16",
31
- "transformers_version": "4.30.2",
32
- "tie_word_embeddings": false,
33
- "eos_token_id": 2,
34
- "pad_token_id": 0
35
- }