mosidi commited on
Commit
b2358d4
1 Parent(s): 0159e29

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -20,8 +20,8 @@ tokenizer = PegasusTokenizer.from_pretrained(model_name)
20
  model = PegasusForConditionalGeneration.from_pretrained(model_name).to(torch_device)
21
 
22
  def get_response(input_text,num_return_sequences,num_beams):
23
- batch = tokenizer([input_text],truncation=True,padding='longest',max_length=60, return_tensors="pt").to(torch_device)
24
- translated = model.generate(**batch,max_length=60,num_beams=num_beams, num_return_sequences=num_return_sequences, temperature=1.5)
25
  tgt_text = tokenizer.batch_decode(translated, skip_special_tokens=True)
26
  return tgt_text
27
  ```
 
20
  model = PegasusForConditionalGeneration.from_pretrained(model_name).to(torch_device)
21
 
22
  def get_response(input_text,num_return_sequences,num_beams):
23
+ batch = tokenizer([input_text],truncation=True,padding='longest',max_length=2500, return_tensors="pt").to(torch_device)
24
+ translated = model.generate(**batch,max_length=2500,num_beams=num_beams, num_return_sequences=num_return_sequences, temperature=1.5)
25
  tgt_text = tokenizer.batch_decode(translated, skip_special_tokens=True)
26
  return tgt_text
27
  ```