Spaces:
Running
Running
seawolf2357
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -68,20 +68,30 @@ def respond(
|
|
68 |
except json.JSONDecodeError:
|
69 |
continue # ์ ํจํ์ง ์์ JSON์ด ์์ ๊ฒฝ์ฐ ๋ฌด์ํ๊ณ ๋ค์ ์ฒญํฌ๋ก ๋์ด๊ฐ
|
70 |
|
71 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
72 |
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
)
|
85 |
|
86 |
if __name__ == "__main__":
|
87 |
demo.queue().launch(max_threads=20)
|
|
|
68 |
except json.JSONDecodeError:
|
69 |
continue # ์ ํจํ์ง ์์ JSON์ด ์์ ๊ฒฝ์ฐ ๋ฌด์ํ๊ณ ๋ค์ ์ฒญํฌ๋ก ๋์ด๊ฐ
|
70 |
|
71 |
+
# Gradio Blocks API ์ฌ์ฉ
|
72 |
+
with gr.Blocks() as demo:
|
73 |
+
with gr.Row():
|
74 |
+
chatbot = gr.Chatbot()
|
75 |
+
with gr.Column():
|
76 |
+
message = gr.Textbox(label="Your message:")
|
77 |
+
system_message = gr.Textbox(value="AI Assistant Role", label="System message")
|
78 |
+
max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
|
79 |
+
temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
|
80 |
+
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
|
81 |
+
send_button = gr.Button("Send")
|
82 |
|
83 |
+
def handle_response(message, history, system_message, max_tokens, temperature, top_p):
|
84 |
+
bot_response = respond(message, history, system_message, max_tokens, temperature, top_p)
|
85 |
+
for response in bot_response:
|
86 |
+
history.append((message, response))
|
87 |
+
yield history, history
|
88 |
+
|
89 |
+
send_button.click(
|
90 |
+
handle_response,
|
91 |
+
inputs=[message, chatbot, system_message, max_tokens, temperature, top_p],
|
92 |
+
outputs=[chatbot, chatbot],
|
93 |
+
queue=True
|
94 |
+
)
|
95 |
|
96 |
if __name__ == "__main__":
|
97 |
demo.queue().launch(max_threads=20)
|