Spaces:
Sleeping
Sleeping
Commit
•
aa16dec
1
Parent(s):
aa9fefe
truncation fixed
Browse files- pages/Borgesian.py +3 -2
pages/Borgesian.py
CHANGED
@@ -19,14 +19,15 @@ def generate_response(text, temperature, length, top_p):
|
|
19 |
with torch.no_grad():
|
20 |
out = borgesian.generate(input_ids, do_sample=True, num_beams=2, temperature=float(temperature), top_p=float(top_p), max_length=length, truncate=".")
|
21 |
generated_text = list(map(tokenizer.decode, out))[0]
|
22 |
-
|
|
|
23 |
|
24 |
st.title('Borgesian')
|
25 |
st.image('borges.jpg')
|
26 |
st.write('Write a prompt in Russian, and the GPT-based model will follow up with a Borgesian text.')
|
27 |
st.write('Define the parameters of generation:')
|
28 |
temperature = st.slider('Temperature', value = 1.5, min_value = 1.0, max_value = 5.0, step = 0.1)
|
29 |
-
length = st.slider('Length', value = 50, min_value = 20, max_value =
|
30 |
top_p = st.slider('Top-p value', value = 0.9, min_value = 0.5, max_value = 1.0, step = 0.05)
|
31 |
|
32 |
user_input = st.text_area("Enter your text:")
|
|
|
19 |
with torch.no_grad():
|
20 |
out = borgesian.generate(input_ids, do_sample=True, num_beams=2, temperature=float(temperature), top_p=float(top_p), max_length=length, truncate=".")
|
21 |
generated_text = list(map(tokenizer.decode, out))[0]
|
22 |
+
last_full_stop_index = generated_text.rfind('.')
|
23 |
+
st.write(generated_text[:last_full_stop_index + 1])
|
24 |
|
25 |
st.title('Borgesian')
|
26 |
st.image('borges.jpg')
|
27 |
st.write('Write a prompt in Russian, and the GPT-based model will follow up with a Borgesian text.')
|
28 |
st.write('Define the parameters of generation:')
|
29 |
temperature = st.slider('Temperature', value = 1.5, min_value = 1.0, max_value = 5.0, step = 0.1)
|
30 |
+
length = st.slider('Length', value = 50, min_value = 20, max_value = 250, step = 1)
|
31 |
top_p = st.slider('Top-p value', value = 0.9, min_value = 0.5, max_value = 1.0, step = 0.05)
|
32 |
|
33 |
user_input = st.text_area("Enter your text:")
|