from transformers import pipeline, set_seed from transformers import BioGptTokenizer, BioGptForCausalLM import gradio as gr model = BioGptForCausalLM.from_pretrained("microsoft/biogpt") tokenizer = BioGptTokenizer.from_pretrained("microsoft/biogpt") generator = pipeline('text-generation', model=model, tokenizer=tokenizer) set_seed(42) def predict(text): return generator(text, max_length=100, num_return_sequences=5, do_sample=True) txt1 = gr.Textbox( label="Input", lines=5, ) txt2 = gr.Textbox( label="Output", lines=20, ) interface = gr.Interface(fn=predict, inputs=txt1, outputs=txt2 title="Biogpt", description="Biogpt") interface.launch()