mdouglas HF staff commited on
Commit
d10fe5b
1 Parent(s): 45f1ea0

Upload 7 files

Browse files
Files changed (2) hide show
  1. config.json +6 -6
  2. model.safetensors +2 -2
config.json CHANGED
@@ -3,20 +3,20 @@
3
  "architectures": [
4
  "GPT2LMHeadModel"
5
  ],
6
- "attn_pdrop": 0.1,
7
  "bos_token_id": 50256,
8
- "embd_pdrop": 0.1,
9
  "eos_token_id": 50256,
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "gpt2",
13
- "n_embd": 768,
14
- "n_head": 12,
15
  "n_inner": null,
16
- "n_layer": 12,
17
  "n_positions": 1024,
18
  "reorder_and_upcast_attn": false,
19
- "resid_pdrop": 0.1,
20
  "scale_attn_by_inverse_layer_idx": false,
21
  "scale_attn_weights": true,
22
  "summary_activation": null,
 
3
  "architectures": [
4
  "GPT2LMHeadModel"
5
  ],
6
+ "attn_pdrop": 0,
7
  "bos_token_id": 50256,
8
+ "embd_pdrop": 0,
9
  "eos_token_id": 50256,
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "gpt2",
13
+ "n_embd": 1280,
14
+ "n_head": 20,
15
  "n_inner": null,
16
+ "n_layer": 36,
17
  "n_positions": 1024,
18
  "reorder_and_upcast_attn": false,
19
+ "resid_pdrop": 0,
20
  "scale_attn_by_inverse_layer_idx": false,
21
  "scale_attn_weights": true,
22
  "summary_activation": null,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ae71963af980d55f25a61dadc921f1b80715075bae2f7dfa6d25dcce0773fb24
3
- size 248892880
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:401e7218cf0b6dcfce3cffb3621b711fdc56e5a3824aefb3574606404676bf17
3
+ size 1548100624