Soumen commited on
Commit
0d9c3fb
1 Parent(s): 0bef9f7
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -193,7 +193,7 @@ def main():
193
  #st.text("Using Google T5 Transformer ..")
194
  inputs = tokenizer.encode("summarize: " + text,
195
  return_tensors='pt',
196
- max_length= 1024,
197
  truncation=True)
198
  summary_ids = model.generate(inputs, max_length=150, min_length=80, length_penalty=5., num_beams=2)
199
  summary = tokenizer.decode(summary_ids[0])
 
193
  #st.text("Using Google T5 Transformer ..")
194
  inputs = tokenizer.encode("summarize: " + text,
195
  return_tensors='pt',
196
+ max_length= 512,
197
  truncation=True)
198
  summary_ids = model.generate(inputs, max_length=150, min_length=80, length_penalty=5., num_beams=2)
199
  summary = tokenizer.decode(summary_ids[0])