Update README.md
Browse files
README.md
CHANGED
@@ -21,8 +21,8 @@ In addition, Doge uses Inner Function Attention with Dynamic Mask as sequence tr
|
|
21 |
```python
|
22 |
>>> from transformers import AutoTokenizer, AutoModelForCausalLM
|
23 |
|
24 |
-
>>> tokenizer = AutoTokenizer.from_pretrained("
|
25 |
-
>>> model = AutoModelForCausalLM.from_pretrained("
|
26 |
>>> inputs = tokenizer("Hey how are you doing?", return_tensors="pt")
|
27 |
|
28 |
>>> out = model.generate(**inputs, max_new_tokens=100)
|
|
|
21 |
```python
|
22 |
>>> from transformers import AutoTokenizer, AutoModelForCausalLM
|
23 |
|
24 |
+
>>> tokenizer = AutoTokenizer.from_pretrained("JingzeShi/Doge-76M")
|
25 |
+
>>> model = AutoModelForCausalLM.from_pretrained("JingzeShi/Doge-76M", trust_remote_code=True)
|
26 |
>>> inputs = tokenizer("Hey how are you doing?", return_tensors="pt")
|
27 |
|
28 |
>>> out = model.generate(**inputs, max_new_tokens=100)
|