achimoraites commited on
Commit
859ab27
1 Parent(s): e01b69c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -3
app.py CHANGED
@@ -5,10 +5,8 @@ tokenizer = AutoTokenizer.from_pretrained("achimoraites/flan-t5-base-samsum")
5
  model = AutoModelForSeq2SeqLM.from_pretrained("achimoraites/flan-t5-base-samsum")
6
 
7
  def summarize(text, num_beams=48, max_length=2048):
8
- tokenizer = AutoTokenizer.from_pretrained("achimoraites/flan-t5-base-samsum")
9
- model = AutoModelForSeq2SeqLM.from_pretrained("achimoraites/flan-t5-base-samsum")
10
  inputs = tokenizer([text], max_length=1024, return_tensors="pt", truncation=True)
11
  summary_ids = model.generate(inputs["input_ids"], num_beams=num_beams, min_length=0, max_length=max_length)
12
  return tokenizer.batch_decode(summary_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
13
 
14
- gr.Interface(fn=summarize, inputs="text", outputs="text", live=True).load("models/achimoraites/flan-t5-base-samsum").launch()
 
5
  model = AutoModelForSeq2SeqLM.from_pretrained("achimoraites/flan-t5-base-samsum")
6
 
7
  def summarize(text, num_beams=48, max_length=2048):
 
 
8
  inputs = tokenizer([text], max_length=1024, return_tensors="pt", truncation=True)
9
  summary_ids = model.generate(inputs["input_ids"], num_beams=num_beams, min_length=0, max_length=max_length)
10
  return tokenizer.batch_decode(summary_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
11
 
12
+ gr.Interface(fn=summarize, inputs="text", outputs="text", live=False).launch()