Iheb-Chaabane commited on
Commit
79863c9
1 Parent(s): 0753ac9

Upload LlamaForCausalLM

Browse files
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "LlamaForCausalLM"
4
  ],
@@ -25,5 +26,5 @@
25
  "torch_dtype": "bfloat16",
26
  "transformers_version": "4.46.1",
27
  "use_cache": true,
28
- "vocab_size": 131080
29
  }
 
1
  {
2
+ "_name_or_path": "/home/ec2-user/work/model/dpo_8b",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
26
  "torch_dtype": "bfloat16",
27
  "transformers_version": "4.46.1",
28
  "use_cache": true,
29
+ "vocab_size": 131072
30
  }
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ada95243d59a5b8a5d60ea7bec7907ca20b92bb124d5054b51792fe059b72195
3
- size 4938949584
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07e279c8c6075600e5dc795364efff8897de0f0c22a1d2d8db79a70adf8edb3f
3
+ size 4938900432
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c0f167f4dc5fb028251a03f67ce36bef07a163084fbd8f7d63ca043d770ab9ca
3
  size 4942085160
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5d6600f34e9972eed3201425ba75c2d58f574655f373ea8b86ddfa37d391f2a
3
  size 4942085160
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:11a6edf04d6b4ab1044d88107eb8a4c71d6378c7d232c3c668870ceae1d2a80c
3
  size 4224838512
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a96480584a0b5bd09c556e53d952146008bb423e5e12ea9bbd0b60d62f9a2f72
3
  size 4224838512
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4be476e54f6ce54be4690cb9b7241959fd2096ab9a4b97648679e1fce43c575b
3
- size 805355648
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b84ea911989e21ebf4ac05018171f73016d8ae72b7904e89289be0b4672a403
3
+ size 805306496
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 14911199232
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00004-of-00004.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 14911113216
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00004-of-00004.safetensors",