Spaces:
Runtime error
Runtime error
from transformers import GPT2LMHeadModel, GPT2Tokenizer | |
import gradio as gr | |
model_name = "gpt2" | |
model = GPT2LMHeadModel.from_pretrained(model_name) | |
tokenizer = GPT2Tokenizer.from_pretrained(model_name) | |
def generate(text): | |
token_ids = tokenizer.encode(text, return_tensors="pt") | |
gpt2_tensors = model.generate(token_ids, | |
max_length=200, | |
no_repeat_ngram_size=True, | |
num_beams=3, | |
do_sample=True, | |
temperature=1.5) | |
response="" | |
for i, x in enumerate(gpt2_tensors): | |
response += f"{i}: {tokenizer.decode(x, skip_special_tokens=True)}" | |
return response | |
in_text = gr.Textbox(lines=1, label="English", placeholder="English text here") | |
out = gr.Textbox(lines=1, label="Generated tensors") | |
gr.Interface(generate, inputs=in_text, outputs=out).launch() |