Spaces:
Sleeping
Sleeping
File size: 914 Bytes
ee9b596 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
import gradio as gr
from transformers import PreTrainedTokenizerFast, BartForConditionalGeneration
model_name = "ainize/kobart-news"
tokenizer = PreTrainedTokenizerFast.from_pretrained(model_name)
model = BartForConditionalGeneration.from_pretrained(model_name)
def summ(txt):
input_ids = tokenizer.encode(txt, return_tensors="pt")
summary_text_ids = model.generate(
input_ids=input_ids,
bos_token_id=model.config.bos_token_id, # BOS는 Beginning Of Sentence
eos_token_id=model.config.eos_token_id, # EOS는 End Of Sentence
length_penalty=2.0, # 요약을 얼마나 짧게 할지
max_length=142,
min_length=56,
num_beams=4) # beam search
return tokenizer.decode(summary_text_ids[0], skip_special_tokens=True)
interface = gr.Interface(summ,
[gr.Textbox(label="original text")],
[gr.Textbox(label="summary")])
interface.launch() |