pszemraj commited on
Commit
08a2b1b
1 Parent(s): b7b0a80
Files changed (2) hide show
  1. config.json +2 -2
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "BEE-spoke-data/smol_llama-101M-GQA",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -21,7 +21,7 @@
21
  "rope_scaling": null,
22
  "rope_theta": 10000.0,
23
  "tie_word_embeddings": false,
24
- "torch_dtype": "bfloat16",
25
  "transformers_version": "4.36.0.dev0",
26
  "use_cache": true,
27
  "vocab_size": 32128
 
1
  {
2
+ "_name_or_path": "pszemraj/smol_llama-101M-GQA-midjourney-messages-cleaned-1024-vN",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
21
  "rope_scaling": null,
22
  "rope_theta": 10000.0,
23
  "tie_word_embeddings": false,
24
+ "torch_dtype": "float32",
25
  "transformers_version": "4.36.0.dev0",
26
  "use_cache": true,
27
  "vocab_size": 32128
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5cffcb432a2884fda08bf8ce40fd712b1bc61259ec9ce6de00c1b27e0a1cb315
3
- size 202532568
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84d9d2ddd4a1a4371a2c4a73cf27b909cbaf8abbbe7a48c66acfba72ad0b9784
3
+ size 405058720