quantumaikr commited on
Commit
554a491
1 Parent(s): 7547df2

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "result/checkpoint-3000",
3
  "architectures": [
4
- "PlanktonForCausalLM"
5
  ],
6
  "bos_token_id": 0,
7
  "eos_token_id": 1,
@@ -10,7 +10,7 @@
10
  "initializer_range": 0.02,
11
  "intermediate_size": 3600,
12
  "max_position_embeddings": 2000,
13
- "model_type": "PLANKTON",
14
  "num_attention_heads": 15,
15
  "num_hidden_layers": 15,
16
  "num_key_value_heads": 15,
 
1
  {
2
  "_name_or_path": "result/checkpoint-3000",
3
  "architectures": [
4
+ "LlamaForCausalLM"
5
  ],
6
  "bos_token_id": 0,
7
  "eos_token_id": 1,
 
10
  "initializer_range": 0.02,
11
  "intermediate_size": 3600,
12
  "max_position_embeddings": 2000,
13
+ "model_type": "llama",
14
  "num_attention_heads": 15,
15
  "num_hidden_layers": 15,
16
  "num_key_value_heads": 15,