[WIP] Upload folder using huggingface_hub (multi-commit 0bb930c7d8029239c283b898a046127f24d8e8c88ee1fd7426528349cbbd9b30)
#2
by
Goekdeniz-Guelmez
- opened
- README.md +4 -6
- model.safetensors +2 -2
- tokenizer_config.json +0 -1
README.md
CHANGED
@@ -1,17 +1,15 @@
|
|
1 |
---
|
2 |
-
license: apache-2.0
|
3 |
-
extra_gated_description: If you want to learn more about how we process your personal
|
4 |
-
data, please read our <a href="https://mistral.ai/terms/">Privacy Policy</a>.
|
5 |
base_model: mistralai/Mamba-Codestral-7B-v0.1
|
|
|
6 |
tags:
|
7 |
- mlx
|
|
|
|
|
8 |
---
|
9 |
|
10 |
# mlx-community/Mamba-Codestral-7B-v0.1-4bit
|
11 |
|
12 |
-
The Model [mlx-community/Mamba-Codestral-7B-v0.1-4bit](https://huggingface.co/mlx-community/Mamba-Codestral-7B-v0.1-4bit) was
|
13 |
-
converted to MLX format from [mistralai/Mamba-Codestral-7B-v0.1](https://huggingface.co/mistralai/Mamba-Codestral-7B-v0.1)
|
14 |
-
using mlx-lm version **0.20.2**.
|
15 |
|
16 |
## Use with mlx
|
17 |
|
|
|
1 |
---
|
|
|
|
|
|
|
2 |
base_model: mistralai/Mamba-Codestral-7B-v0.1
|
3 |
+
license: apache-2.0
|
4 |
tags:
|
5 |
- mlx
|
6 |
+
extra_gated_description: If you want to learn more about how we process your personal
|
7 |
+
data, please read our <a href="https://mistral.ai/terms/">Privacy Policy</a>.
|
8 |
---
|
9 |
|
10 |
# mlx-community/Mamba-Codestral-7B-v0.1-4bit
|
11 |
|
12 |
+
The Model [mlx-community/Mamba-Codestral-7B-v0.1-4bit](https://huggingface.co/mlx-community/Mamba-Codestral-7B-v0.1-4bit) was converted to MLX format from [mistralai/Mamba-Codestral-7B-v0.1](https://huggingface.co/mistralai/Mamba-Codestral-7B-v0.1) using mlx-lm version **0.18.2**.
|
|
|
|
|
13 |
|
14 |
## Use with mlx
|
15 |
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:21a0f0ce8194f8ef5726f7da5ebfed3bc10217b1d23e8ee8d05179b52992cf22
|
3 |
+
size 4104014183
|
tokenizer_config.json
CHANGED
@@ -6175,7 +6175,6 @@
|
|
6175 |
"bos_token": "<s>",
|
6176 |
"clean_up_tokenization_spaces": false,
|
6177 |
"eos_token": "</s>",
|
6178 |
-
"extra_special_tokens": {},
|
6179 |
"legacy": false,
|
6180 |
"model_max_length": 1000000000000000019884624838656,
|
6181 |
"pad_token": null,
|
|
|
6175 |
"bos_token": "<s>",
|
6176 |
"clean_up_tokenization_spaces": false,
|
6177 |
"eos_token": "</s>",
|
|
|
6178 |
"legacy": false,
|
6179 |
"model_max_length": 1000000000000000019884624838656,
|
6180 |
"pad_token": null,
|