Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -133,7 +133,7 @@ def humanize_text(text):
|
|
133 |
input_ids = st.session_state.tokenizer(
|
134 |
prompt,
|
135 |
return_tensors="pt",
|
136 |
-
max_length=
|
137 |
truncation=True
|
138 |
).input_ids
|
139 |
|
@@ -142,12 +142,12 @@ def humanize_text(text):
|
|
142 |
input_ids,
|
143 |
max_length=1024,
|
144 |
min_length=len(text.split()),
|
145 |
-
do_sample=
|
146 |
temperature=0.1,
|
147 |
top_p=0.95,
|
148 |
-
num_beams=
|
149 |
-
repetition_penalty=1.
|
150 |
-
length_penalty=
|
151 |
)
|
152 |
result = st.session_state.tokenizer.decode(outputs[0], skip_special_tokens=True)
|
153 |
result = clean_generated_text(result)
|
|
|
133 |
input_ids = st.session_state.tokenizer(
|
134 |
prompt,
|
135 |
return_tensors="pt",
|
136 |
+
max_length=512,
|
137 |
truncation=True
|
138 |
).input_ids
|
139 |
|
|
|
142 |
input_ids,
|
143 |
max_length=1024,
|
144 |
min_length=len(text.split()),
|
145 |
+
do_sample=False,
|
146 |
temperature=0.1,
|
147 |
top_p=0.95,
|
148 |
+
num_beams=2,
|
149 |
+
repetition_penalty=1.1,
|
150 |
+
length_penalty=1.0
|
151 |
)
|
152 |
result = st.session_state.tokenizer.decode(outputs[0], skip_special_tokens=True)
|
153 |
result = clean_generated_text(result)
|