phoebeklett commited on
Commit
2e17c96
1 Parent(s): 2bd703d

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -6
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_name_or_path": "mosaicml/mpt-7b-chat",
3
  "architectures": [
4
- "ExtendedMPTForCausalLM"
5
  ],
6
  "attn_config": {
7
  "alibi": true,
@@ -21,8 +21,8 @@
21
  "use_active_externalism": true
22
  },
23
  "auto_map": {
24
- "AutoConfig": "configuration.ExtendedMPTConfig",
25
- "AutoModelForCausalLM": "modeling_mpt.ExtendedMPTForCausalLM"
26
  },
27
  "d_model": 4096,
28
  "emb_pdrop": 0,
@@ -52,7 +52,7 @@
52
  "resid_pdrop": 0,
53
  "tokenizer_name": "sam-mosaic/gpt-neox-20b-chatml",
54
  "torch_dtype": "float32",
55
- "transformers_version": "4.33.3",
56
  "use_active_externalism_by_layer": [
57
  true,
58
  true,
@@ -87,7 +87,7 @@
87
  true,
88
  true
89
  ],
90
- "use_cache": false,
91
  "verbose": 0,
92
  "vocab_size": 50432
93
  }
 
1
  {
2
+ "_name_or_path": "normalcomputing/extended-mind-mpt-7b-chat",
3
  "architectures": [
4
+ "ExtendedMptForCausalLM"
5
  ],
6
  "attn_config": {
7
  "alibi": true,
 
21
  "use_active_externalism": true
22
  },
23
  "auto_map": {
24
+ "AutoConfig": "configuration.ExtendedMptConfig",
25
+ "AutoModelForCausalLM": "modeling.ExtendedMptForCausalLM"
26
  },
27
  "d_model": 4096,
28
  "emb_pdrop": 0,
 
52
  "resid_pdrop": 0,
53
  "tokenizer_name": "sam-mosaic/gpt-neox-20b-chatml",
54
  "torch_dtype": "float32",
55
+ "transformers_version": "4.33.0",
56
  "use_active_externalism_by_layer": [
57
  true,
58
  true,
 
87
  true,
88
  true
89
  ],
90
+ "use_cache": true,
91
  "verbose": 0,
92
  "vocab_size": 50432
93
  }