Spaces:
Sleeping
Sleeping
tongxiaojun
commited on
Commit
•
934805d
1
Parent(s):
d7dbd92
Update app.py
Browse files
app.py
CHANGED
@@ -14,13 +14,13 @@ model.eval()
|
|
14 |
|
15 |
|
16 |
def generate(text):
|
17 |
-
text = '<s>{}</s></s>'.format(text)
|
18 |
input_ids = tokenizer(text, return_tensors="pt").input_ids
|
19 |
#input_ids = input_ids.to(device)
|
20 |
outputs = model.generate(input_ids, max_new_tokens=200, do_sample=True, top_p=0.7, temperature=0.35,
|
21 |
repetition_penalty=1.2, eos_token_id=tokenizer.eos_token_id)
|
22 |
rets = tokenizer.batch_decode(outputs)
|
23 |
-
output = rets[0].strip().replace(text, "").replace('
|
24 |
return output
|
25 |
|
26 |
with gr.Blocks() as demo:
|
|
|
14 |
|
15 |
|
16 |
def generate(text):
|
17 |
+
#text = '<s>{}</s></s>'.format(text)
|
18 |
input_ids = tokenizer(text, return_tensors="pt").input_ids
|
19 |
#input_ids = input_ids.to(device)
|
20 |
outputs = model.generate(input_ids, max_new_tokens=200, do_sample=True, top_p=0.7, temperature=0.35,
|
21 |
repetition_penalty=1.2, eos_token_id=tokenizer.eos_token_id)
|
22 |
rets = tokenizer.batch_decode(outputs)
|
23 |
+
output = rets[0].strip().replace(text, "").replace('<|endoftext|>', "")
|
24 |
return output
|
25 |
|
26 |
with gr.Blocks() as demo:
|