bevelapi / models /blenderbot.py
BeveledCube's picture
re added num_beams
5525b46
raw
history blame contribute delete
911 Bytes
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
# https://www.youtube.com/watch?v=irjYqV6EebU
model_name = "facebook/blenderbot-1B-distill"
# https://huggingface.co/models?sort=trending&search=facebook%2Fblenderbot
# facebook/blenderbot-3B
# facebook/blenderbot-1B-distill
# facebook/blenderbot-400M-distill
# facebook/blenderbot-90M
# facebook/blenderbot_small-90M
def load():
global model
global tokenizer
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
def generate(input_text):
# Tokenize the input text
input_ids = tokenizer.encode(input_text, return_tensors="pt")
# Generate output using the model
output_ids = model.generate(input_ids, no_repeat_ngram_size=2, max_new_tokens=200, num_beams=2, eos_token_id=tokenizer.eos_token_id)
return tokenizer.decode(output_ids[0], skip_special_tokens=True)