Chakita commited on
Commit
cceb9b4
1 Parent(s): 2a1b492

Upload BloomForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. pytorch_model.bin +1 -1
config.json CHANGED
@@ -10,14 +10,14 @@
10
  "bos_token_id": 1,
11
  "eos_token_id": 2,
12
  "hidden_dropout": 0.0,
 
13
  "initializer_range": 0.02,
14
  "layer_norm_epsilon": 1e-05,
15
  "masked_softmax_fusion": true,
16
  "model_type": "bloom",
17
- "n_embed": 1024,
18
  "n_inner": null,
19
  "n_layer": 24,
20
- "num_attention_heads": 16,
21
  "offset_alibi": 100,
22
  "pad_token_id": 3,
23
  "pretraining_tp": 1,
@@ -25,7 +25,7 @@
25
  "skip_bias_add_qkv": false,
26
  "slow_but_exact": false,
27
  "torch_dtype": "float32",
28
- "transformers_version": "4.20.1",
29
  "unk_token_id": 0,
30
  "use_cache": true,
31
  "vocab_size": 250691
 
10
  "bos_token_id": 1,
11
  "eos_token_id": 2,
12
  "hidden_dropout": 0.0,
13
+ "hidden_size": 1024,
14
  "initializer_range": 0.02,
15
  "layer_norm_epsilon": 1e-05,
16
  "masked_softmax_fusion": true,
17
  "model_type": "bloom",
18
+ "n_head": 16,
19
  "n_inner": null,
20
  "n_layer": 24,
 
21
  "offset_alibi": 100,
22
  "pad_token_id": 3,
23
  "pretraining_tp": 1,
 
25
  "skip_bias_add_qkv": false,
26
  "slow_but_exact": false,
27
  "torch_dtype": "float32",
28
+ "transformers_version": "4.24.0",
29
  "unk_token_id": 0,
30
  "use_cache": true,
31
  "vocab_size": 250691
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cee50f2c24972307a1ae871189e3c31f9d1ed29d7ff6e6bb3d649f70d78f65f4
3
  size 2236181047
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef5b17b61c17cfe6037471adccbbec9a60fc7efc548b538b1162f54bd90f9dab
3
  size 2236181047