Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -52,8 +52,8 @@ def text2story(text):
|
|
52 |
inputs = tokenizer(prompt, return_tensors="pt")
|
53 |
outputs = model.generate(
|
54 |
inputs.input_ids,
|
55 |
-
min_length=
|
56 |
-
max_new_tokens=
|
57 |
temperature=0.7,
|
58 |
top_p=0.9,
|
59 |
top_k=40,
|
@@ -71,8 +71,8 @@ def text2story(text):
|
|
71 |
inputs = tokenizer(fallback_prompt, return_tensors="pt")
|
72 |
fallback_story = fallback_generator.generate(
|
73 |
inputs.input_ids,
|
74 |
-
min_length=
|
75 |
-
max_new_tokens=
|
76 |
temperature=0.7,
|
77 |
top_p=0.9,
|
78 |
top_k=40,
|
@@ -204,7 +204,7 @@ if uploaded_file is not None:
|
|
204 |
|
205 |
# Stage 2: Text to Story
|
206 |
with st.container():
|
207 |
-
st.markdown("<h3><span class='stage-icon'>📝</span>
|
208 |
|
209 |
with progress_placeholder.container():
|
210 |
st.write("正在創作故事...")
|
|
|
52 |
inputs = tokenizer(prompt, return_tensors="pt")
|
53 |
outputs = model.generate(
|
54 |
inputs.input_ids,
|
55 |
+
min_length=50,
|
56 |
+
max_new_tokens=100,
|
57 |
temperature=0.7,
|
58 |
top_p=0.9,
|
59 |
top_k=40,
|
|
|
71 |
inputs = tokenizer(fallback_prompt, return_tensors="pt")
|
72 |
fallback_story = fallback_generator.generate(
|
73 |
inputs.input_ids,
|
74 |
+
min_length=50,
|
75 |
+
max_new_tokens=100,
|
76 |
temperature=0.7,
|
77 |
top_p=0.9,
|
78 |
top_k=40,
|
|
|
204 |
|
205 |
# Stage 2: Text to Story
|
206 |
with st.container():
|
207 |
+
st.markdown("<h3><span class='stage-icon'>📝</span> 以下係我哋嘅魔術師正根據你嘅畫作嘅故事: </h3>", unsafe_allow_html=True)
|
208 |
|
209 |
with progress_placeholder.container():
|
210 |
st.write("正在創作故事...")
|