voidful commited on
Commit
aed5b35
1 Parent(s): 2418413

Upload MllamaForCausalLM

Browse files
config.json CHANGED
@@ -38,5 +38,5 @@
38
  "torch_dtype": "float32",
39
  "transformers_version": "4.45.2",
40
  "use_cache": true,
41
- "vocab_size": 51868
42
  }
 
38
  "torch_dtype": "float32",
39
  "transformers_version": "4.45.2",
40
  "use_cache": true,
41
+ "vocab_size": 51871
42
  }
model-00001-of-00008.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f6a627fdd24cd25838cf054ae392a4734696bd582a63073e22580f2ae5e8f60
3
- size 4977137944
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89c5fd91ef01e3a7b0e72e4cc981ce874ca79064077438a31814d16cc88ede46
3
+ size 4977187096
model-00008-of-00008.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:163864c5c1c8e804ceb8f0d86cc74a694ada89e2d5e511b1f2ef6937f3b8beeb
3
- size 1957168600
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91115035d2df5144e0b241dac2c57de077c2cc5d612045244a1b1b6305179677
3
+ size 1957217752
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 36597555264
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00008-of-00008.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 36597653568
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00008-of-00008.safetensors",