nicholasKluge
commited on
Commit
•
b80ed60
1
Parent(s):
183333a
Update README.md
Browse files
README.md
CHANGED
@@ -110,7 +110,7 @@ Using the `pipeline`:
|
|
110 |
```python
|
111 |
from transformers import pipeline
|
112 |
|
113 |
-
generator = pipeline("text-generation", model="nicholasKluge/
|
114 |
|
115 |
completions = generator("Astronomia é a ciência", num_return_sequences=2, max_new_tokens=100)
|
116 |
|
@@ -125,8 +125,8 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
125 |
import torch
|
126 |
|
127 |
# Load model and the tokenizer
|
128 |
-
tokenizer = AutoTokenizer.from_pretrained("nicholasKluge/
|
129 |
-
model = AutoModelForCausalLM.from_pretrained("nicholasKluge/
|
130 |
|
131 |
# Pass the model to your device
|
132 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
110 |
```python
|
111 |
from transformers import pipeline
|
112 |
|
113 |
+
generator = pipeline("text-generation", model="nicholasKluge/TeenyTinyLlama-160m")
|
114 |
|
115 |
completions = generator("Astronomia é a ciência", num_return_sequences=2, max_new_tokens=100)
|
116 |
|
|
|
125 |
import torch
|
126 |
|
127 |
# Load model and the tokenizer
|
128 |
+
tokenizer = AutoTokenizer.from_pretrained("nicholasKluge/TeenyTinyLlama-160m", revision='main')
|
129 |
+
model = AutoModelForCausalLM.from_pretrained("nicholasKluge/TeenyTinyLlama-160m", revision='main')
|
130 |
|
131 |
# Pass the model to your device
|
132 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|