Sayan01 commited on
Commit
aa47e53
·
verified ·
1 Parent(s): 4f57ea2

Upload PhiForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -11,7 +11,7 @@
11
  "hidden_act": "gelu_new",
12
  "hidden_size": 2048,
13
  "initializer_range": 0.02,
14
- "intermediate_size": 8192,
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 2048,
17
  "model_type": "phi",
 
11
  "hidden_act": "gelu_new",
12
  "hidden_size": 2048,
13
  "initializer_range": 0.02,
14
+ "intermediate_size": 4096,
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 2048,
17
  "model_type": "phi",
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:27d1084bd29d4479f4ef4642f033dc0ab9645dcc1f54548e3958b6333a649e70
3
- size 2030899760
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b07cdf036cf8559920fe47de4c24862ee5ed0d25a5a970f9d4cf9a490eb46e3d
3
+ size 1493897696