anonymous-repository commited on
Commit
cac4627
·
verified ·
1 Parent(s): e6c3f4c

Upload FalconForDistill

Browse files
Files changed (3) hide show
  1. config.json +3 -3
  2. generation_config.json +1 -1
  3. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
- "_name_or_path": "tiiuae/falcon-rw-1b",
3
  "alibi": true,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
6
- "FalconForCausalLM"
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
@@ -16,7 +16,7 @@
16
  },
17
  "bias": true,
18
  "bos_token_id": 1,
19
- "eos_token_id": 2,
20
  "hidden_dropout": 0.0,
21
  "hidden_size": 2048,
22
  "initializer_range": 0.02,
 
1
  {
2
+ "_name_or_path": "model/falcon/falcon-rw-1b-bf16-train_batch=4-train=rw_code_falcon_dataset",
3
  "alibi": true,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
6
+ "FalconForDistill"
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
 
16
  },
17
  "bias": true,
18
  "bos_token_id": 1,
19
+ "eos_token_id": null,
20
  "hidden_dropout": 0.0,
21
  "hidden_size": 2048,
22
  "initializer_range": 0.02,
generation_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
- "eos_token_id": 2,
5
  "transformers_version": "4.34.1"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
+ "pad_token_id": 50256,
5
  "transformers_version": "4.34.1"
6
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e16395599e03becc6fd63a0b7ba0ee891890dfb1d8018b61298bdd616e0225cb
3
- size 2623349334
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d432331081341fc4edf8f0bb76f72301441d0723a914cda9dd5f3da6f90367d9
3
+ size 5246704278