Sayan01 commited on
Commit
8b41cfe
1 Parent(s): aa47e53

Upload PhiForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. model.safetensors +2 -2
config.json CHANGED
@@ -9,9 +9,9 @@
9
  "embd_pdrop": 0.0,
10
  "eos_token_id": 50256,
11
  "hidden_act": "gelu_new",
12
- "hidden_size": 2048,
13
  "initializer_range": 0.02,
14
- "intermediate_size": 4096,
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 2048,
17
  "model_type": "phi",
 
9
  "embd_pdrop": 0.0,
10
  "eos_token_id": 50256,
11
  "hidden_act": "gelu_new",
12
+ "hidden_size": 2560,
13
  "initializer_range": 0.02,
14
+ "intermediate_size": 5120,
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 2048,
17
  "model_type": "phi",
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b07cdf036cf8559920fe47de4c24862ee5ed0d25a5a970f9d4cf9a490eb46e3d
3
- size 1493897696
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5af5af86655e784358f639b8f5efcecc06a734ecef63282143bdeb8572b34b48
3
+ size 2202884688