Spaces:
Sleeping
Sleeping
from transformers import AutoTokenizer | |
import transformers | |
import torch | |
model = "meta-llama/Llama-2-7b-chat-hf" | |
tokenizer = AutoTokenizer.from_pretrained( | |
model, | |
use_auth_token=True, | |
) | |
pipeline = transformers.pipeline( | |
"text-generation", | |
model=model, | |
torch_dtype=torch.float16, | |
device_map="auto", | |
) | |
def gen(x, max_length=200): | |
sequences = pipeline( | |
x, | |
do_sample=True, | |
top_k=10, | |
num_return_sequences=1, | |
eos_token_id=tokenizer.eos_token_id, | |
max_length=max_length, | |
) | |
return sequences[0]["generated_text"].replace(x, "") | |
print(gen('I liked "Breaking Bad" and "Band of Brothers". Do you have any recommendations of other shows I might like?\n')) | |