Joetib commited on
Commit
20d811b
1 Parent(s): 1c8ec68

Upload GPTNeoXForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "EleutherAI/pythia-410M",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
@@ -21,7 +21,7 @@
21
  "rotary_emb_base": 10000,
22
  "rotary_pct": 0.25,
23
  "tie_word_embeddings": false,
24
- "torch_dtype": "float32",
25
  "transformers_version": "4.35.0",
26
  "use_cache": true,
27
  "use_parallel_residual": true,
 
1
  {
2
+ "_name_or_path": "pythia-finetuned-5-steps",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
 
21
  "rotary_emb_base": 10000,
22
  "rotary_pct": 0.25,
23
  "tie_word_embeddings": false,
24
+ "torch_dtype": "float16",
25
  "transformers_version": "4.35.0",
26
  "use_cache": true,
27
  "use_parallel_residual": true,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7ce4cafabb641128d81b7b1b5df9f135bd643b145c53199a6e90c6a39a44151e
3
- size 1621370224
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:297f1e680e970efd0a78c049ccc3256f43607db6b23a48389f8df3886094ec85
3
+ size 810701896