Tonic euclaise commited on
Commit
06a86ac
1 Parent(s): 425b746

Fix v3 (#3)

Browse files

- Fix v3 (b8d7597743ebc5b04c0c0ba4160b45267f988be2)


Co-authored-by: Jade <euclaise@users.noreply.huggingface.co>

Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -26,7 +26,7 @@ def generate_text(usertitle, content, temperature, max_length, N=3):
26
  # 'title': usertitle,
27
  # 'content': content
28
  # }
29
- input_text = f"[[[title:]]] {usertitle}\n[[[content:]]]{content}\n\nThe following is an interaction between a user and an AI assistant that is related to the above text.\n\n[[[User]]]"
30
  inputs = tokenizer(input_text, return_tensors='pt').to('cuda')
31
  attention_mask = torch.ones(inputs['input_ids'].shape, dtype=torch.long, device='cuda')
32
  generated_sequences = model.generate(inputs['input_ids'], attention_mask=attention_mask, temperature=temperature, max_length=max_length, pad_token_id=tokenizer.eos_token_id, num_return_sequences=N, do_sample=True)
 
26
  # 'title': usertitle,
27
  # 'content': content
28
  # }
29
+ input_text = f"[[[Title]]] {usertitle}\n[[[Content]]] {content.strip()}\n\nThe following is an interaction between a user and an AI assistant that is related to the above text.\n\n[[[User]]] "
30
  inputs = tokenizer(input_text, return_tensors='pt').to('cuda')
31
  attention_mask = torch.ones(inputs['input_ids'].shape, dtype=torch.long, device='cuda')
32
  generated_sequences = model.generate(inputs['input_ids'], attention_mask=attention_mask, temperature=temperature, max_length=max_length, pad_token_id=tokenizer.eos_token_id, num_return_sequences=N, do_sample=True)