Update README.md
Browse files
README.md
CHANGED
@@ -35,8 +35,6 @@ Here is working code to inference, though it can be improved:
|
|
35 |
import torch
|
36 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
37 |
|
38 |
-
sysprompt = "The assistant gives helpful, detailed, and polite answers to the user's questions.\n"
|
39 |
-
|
40 |
model = AutoModelForCausalLM.from_pretrained("teknium/Puffin-Phi-v2", trust_remote_code=True, torch_dtype=torch.bfloat16).to("cuda")
|
41 |
tokenizer = AutoTokenizer.from_pretrained("teknium/Puffin-Phi-v2", trust_remote_code=True, torch_dtype=torch.bfloat16)
|
42 |
inputs = tokenizer(f"### Instruction:\nWrite a negative review for the website, Twitter.\n### Response:\n", return_tensors="pt", return_attention_mask=False)
|
|
|
35 |
import torch
|
36 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
37 |
|
|
|
|
|
38 |
model = AutoModelForCausalLM.from_pretrained("teknium/Puffin-Phi-v2", trust_remote_code=True, torch_dtype=torch.bfloat16).to("cuda")
|
39 |
tokenizer = AutoTokenizer.from_pretrained("teknium/Puffin-Phi-v2", trust_remote_code=True, torch_dtype=torch.bfloat16)
|
40 |
inputs = tokenizer(f"### Instruction:\nWrite a negative review for the website, Twitter.\n### Response:\n", return_tensors="pt", return_attention_mask=False)
|