Fix model name in some examples
Browse files
README.md
CHANGED
@@ -40,7 +40,7 @@ response = generate(model, tokenizer, prompt="hello", verbose=True)
|
|
40 |
```python
|
41 |
from mlx_lm import load, generate
|
42 |
|
43 |
-
model, tokenizer = load("mlx-community/c4ai-command-r-
|
44 |
|
45 |
# Format message with the command-r tool use template
|
46 |
conversation = [
|
@@ -138,7 +138,7 @@ Action:```json
|
|
138 |
```python
|
139 |
from mlx_lm import load, generate
|
140 |
|
141 |
-
model, tokenizer = load("mlx-community/c4ai-command-r-
|
142 |
|
143 |
# Format message with the command-r tool use template
|
144 |
conversation = [
|
|
|
40 |
```python
|
41 |
from mlx_lm import load, generate
|
42 |
|
43 |
+
model, tokenizer = load("mlx-community/c4ai-command-r-plus-4bit")
|
44 |
|
45 |
# Format message with the command-r tool use template
|
46 |
conversation = [
|
|
|
138 |
```python
|
139 |
from mlx_lm import load, generate
|
140 |
|
141 |
+
model, tokenizer = load("mlx-community/c4ai-command-r-plus-4bit")
|
142 |
|
143 |
# Format message with the command-r tool use template
|
144 |
conversation = [
|