samariddin's picture
addded
fbfd6f7
raw
history blame
886 Bytes
import gradio as gr
import torch
from transformers import BertTokenizerFast, EncoderDecoderModel
device = 'cuda' if torch.cuda.is_available() else 'cpu'
ckpt = 'csebuetnlp/mT5_multilingual_XLSum'
tokenizer = BertTokenizerFast.from_pretrained(ckpt)
model = EncoderDecoderModel.from_pretrained(ckpt).to(device)
def generate_summary(text):
inputs = tokenizer([text], padding="max_length", truncation=True, max_length=512, return_tensors="pt")
input_ids = inputs.input_ids.to(device)
attention_mask = inputs.attention_mask.to(device)
output = model.generate(input_ids, attention_mask=attention_mask)
return tokenizer.decode(output[0], skip_special_tokens=True)
demo = gr.Interface(fn=generate_summary,
inputs=gr.Textbox(lines=10, placeholder="Insert the text here"),
outputs=gr.Textbox(lines=4)
)
demo.launch()