quantumaikr commited on
Commit
c18cdca
·
1 Parent(s): 9376fc9

Upload PlanktonForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +6 -6
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -5,14 +5,14 @@
5
  "bos_token_id": 0,
6
  "eos_token_id": 1,
7
  "hidden_act": "silu",
8
- "hidden_size": 500,
9
  "initializer_range": 0.02,
10
- "intermediate_size": 1500,
11
- "max_position_embeddings": 2000,
12
  "model_type": "PLANKTON",
13
- "num_attention_heads": 20,
14
- "num_hidden_layers": 20,
15
- "num_key_value_heads": 20,
16
  "pad_token_id": 3,
17
  "pretraining_tp": 1,
18
  "rms_norm_eps": 1e-06,
 
5
  "bos_token_id": 0,
6
  "eos_token_id": 1,
7
  "hidden_act": "silu",
8
+ "hidden_size": 600,
9
  "initializer_range": 0.02,
10
+ "intermediate_size": 2000,
11
+ "max_position_embeddings": 4096,
12
  "model_type": "PLANKTON",
13
+ "num_attention_heads": 10,
14
+ "num_hidden_layers": 10,
15
+ "num_key_value_heads": 10,
16
  "pad_token_id": 3,
17
  "pretraining_tp": 1,
18
  "rms_norm_eps": 1e-06,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5dec3624ac1edab8c78814101f3cbd7bb88bd99e5f5c6bfd70fbbe9ba0754a4b
3
- size 428140201
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:941a3659732bea432bf5682350fe146dc693213bbc28d6238d911914bf69d410
3
+ size 403279368