Fix model device in code example
#4
by
lewtun
HF staff
- opened
README.md
CHANGED
@@ -225,7 +225,7 @@ checkpoint = "bigcode/santacoder"
|
|
225 |
device = "cuda" # for GPU usage or "cpu" for CPU usage
|
226 |
|
227 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
|
228 |
-
model = AutoModelForCausalLM.from_pretrained(checkpoint, trust_remote_code=True).to()
|
229 |
|
230 |
inputs = tokenizer.encode("def print_hello_world():", return_tensors="pt").to(device)
|
231 |
outputs = model.generate(inputs)
|
|
|
225 |
device = "cuda" # for GPU usage or "cpu" for CPU usage
|
226 |
|
227 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
|
228 |
+
model = AutoModelForCausalLM.from_pretrained(checkpoint, trust_remote_code=True).to(device)
|
229 |
|
230 |
inputs = tokenizer.encode("def print_hello_world():", return_tensors="pt").to(device)
|
231 |
outputs = model.generate(inputs)
|