Spaces:
Running
Running
lihuayong
commited on
Commit
•
cc3f8e3
1
Parent(s):
dc009cb
lhy: 禁止输入框在按下回车时发送请求,扩大输入框
Browse files- ChuanhuChatbot.py +49 -23
ChuanhuChatbot.py
CHANGED
@@ -20,14 +20,16 @@ gr.Chatbot.postprocess = postprocess
|
|
20 |
with open("assets/custom.css", "r", encoding="utf-8") as f:
|
21 |
customCSS = f.read()
|
22 |
|
|
|
23 |
def create_new_model():
|
24 |
-
return get_model(model_name
|
|
|
25 |
|
26 |
with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
27 |
user_name = gr.State("")
|
28 |
promptTemplates = gr.State(load_template(get_template_names(plain=True)[0], mode=2))
|
29 |
user_question = gr.State("")
|
30 |
-
assert type(my_api_key)==str
|
31 |
user_api_key = gr.State(my_api_key)
|
32 |
current_model = gr.State(create_new_model)
|
33 |
|
@@ -46,6 +48,7 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
46 |
with gr.Row():
|
47 |
with gr.Column(min_width=225, scale=12):
|
48 |
user_input = gr.Textbox(
|
|
|
49 |
elem_id="user_input_tb",
|
50 |
show_label=False, placeholder=i18n("在这里输入")
|
51 |
).style(container=False)
|
@@ -77,11 +80,14 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
77 |
label="API-Key",
|
78 |
)
|
79 |
if multi_api_key:
|
80 |
-
usageTxt = gr.Markdown(i18n("多账号模式已开启,无需输入key,可直接开始对话"),
|
|
|
81 |
else:
|
82 |
-
usageTxt = gr.Markdown(i18n("**发送消息** 或 **提交key** 以显示额度"), elem_id="usage_display",
|
|
|
83 |
model_select_dropdown = gr.Dropdown(
|
84 |
-
label=i18n("选择模型"), choices=MODELS, multiselect=False, value=MODELS[DEFAULT_MODEL],
|
|
|
85 |
)
|
86 |
lora_select_dropdown = gr.Dropdown(
|
87 |
label=i18n("选择LoRA模型"), choices=[], multiselect=False, interactive=True, visible=False
|
@@ -161,10 +167,11 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
161 |
|
162 |
with gr.Tab(label=i18n("高级")):
|
163 |
gr.Markdown(i18n("# ⚠️ 务必谨慎更改 ⚠️\n\n如果无法使用请恢复默认设置"))
|
164 |
-
gr.HTML(get_html("appearance_switcher.html").format(label=i18n("切换亮暗色主题")),
|
|
|
165 |
use_streaming_checkbox = gr.Checkbox(
|
166 |
-
|
167 |
-
|
168 |
with gr.Accordion(i18n("参数"), open=False):
|
169 |
temperature_slider = gr.Slider(
|
170 |
minimum=-0,
|
@@ -267,18 +274,24 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
267 |
gr.Markdown(CHUANHU_DESCRIPTION, elem_id="description")
|
268 |
gr.HTML(get_html("footer.html").format(versions=versions_html()), elem_id="footer")
|
269 |
|
|
|
270 |
# https://github.com/gradio-app/gradio/pull/3296
|
271 |
def create_greeting(request: gr.Request):
|
272 |
-
if hasattr(request, "username") and request.username:
|
273 |
logging.info(f"Get User Name: {request.username}")
|
274 |
user_info, user_name = gr.Markdown.update(value=f"User: {request.username}"), request.username
|
275 |
else:
|
276 |
user_info, user_name = gr.Markdown.update(value=f"", visible=False), ""
|
277 |
-
current_model = get_model(model_name
|
278 |
current_model.set_user_identifier(user_name)
|
279 |
chatbot = gr.Chatbot.update(label=MODELS[DEFAULT_MODEL])
|
280 |
-
return user_info, user_name, current_model, toggle_like_btn_visibility(
|
281 |
-
|
|
|
|
|
|
|
|
|
|
|
282 |
chatgpt_predict_args = dict(
|
283 |
fn=predict,
|
284 |
inputs=[
|
@@ -310,7 +323,8 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
310 |
)
|
311 |
|
312 |
transfer_input_args = dict(
|
313 |
-
fn=transfer_input, inputs=[user_input], outputs=[user_question, user_input, submitBtn, cancelBtn],
|
|
|
314 |
)
|
315 |
|
316 |
get_usage_args = dict(
|
@@ -323,18 +337,20 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
323 |
outputs=[saveFileName, systemPromptTxt, chatbot]
|
324 |
)
|
325 |
|
326 |
-
|
327 |
# Chatbot
|
328 |
cancelBtn.click(interrupt, [current_model], [])
|
329 |
|
330 |
-
|
331 |
-
user_input.submit(**
|
|
|
332 |
|
333 |
submitBtn.click(**transfer_input_args).then(**chatgpt_predict_args, api_name="predict").then(**end_outputing_args)
|
334 |
submitBtn.click(**get_usage_args)
|
335 |
|
336 |
-
index_files.change(handle_file_upload, [current_model, index_files, chatbot, language_select_dropdown],
|
337 |
-
|
|
|
|
|
338 |
|
339 |
emptyBtn.click(
|
340 |
reset,
|
@@ -388,12 +404,21 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
388 |
two_column.change(update_doc_config, [two_column], None)
|
389 |
|
390 |
# LLM Models
|
391 |
-
keyTxt.change(set_key, [current_model, keyTxt], [user_api_key, status_display], api_name="set_key").then(
|
|
|
392 |
keyTxt.submit(**get_usage_args)
|
393 |
single_turn_checkbox.change(set_single_turn, [current_model, single_turn_checkbox], None)
|
394 |
-
model_select_dropdown.change(get_model,
|
395 |
-
|
396 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
397 |
|
398 |
# Template
|
399 |
systemPromptTxt.change(set_system_prompt, [current_model, systemPromptTxt], None)
|
@@ -427,7 +452,8 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
427 |
)
|
428 |
historyRefreshBtn.click(get_history_names, [gr.State(False), user_name], [historyFileSelectDropdown])
|
429 |
historyFileSelectDropdown.change(**load_history_from_file_args)
|
430 |
-
downloadFile.change(upload_chat_history, [current_model, downloadFile, user_name],
|
|
|
431 |
|
432 |
# Advanced
|
433 |
max_context_length_slider.change(set_token_upper_limit, [current_model, max_context_length_slider], None)
|
|
|
20 |
with open("assets/custom.css", "r", encoding="utf-8") as f:
|
21 |
customCSS = f.read()
|
22 |
|
23 |
+
|
24 |
def create_new_model():
|
25 |
+
return get_model(model_name=MODELS[DEFAULT_MODEL], access_key=my_api_key)[0]
|
26 |
+
|
27 |
|
28 |
with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
29 |
user_name = gr.State("")
|
30 |
promptTemplates = gr.State(load_template(get_template_names(plain=True)[0], mode=2))
|
31 |
user_question = gr.State("")
|
32 |
+
assert type(my_api_key) == str
|
33 |
user_api_key = gr.State(my_api_key)
|
34 |
current_model = gr.State(create_new_model)
|
35 |
|
|
|
48 |
with gr.Row():
|
49 |
with gr.Column(min_width=225, scale=12):
|
50 |
user_input = gr.Textbox(
|
51 |
+
lines=3,
|
52 |
elem_id="user_input_tb",
|
53 |
show_label=False, placeholder=i18n("在这里输入")
|
54 |
).style(container=False)
|
|
|
80 |
label="API-Key",
|
81 |
)
|
82 |
if multi_api_key:
|
83 |
+
usageTxt = gr.Markdown(i18n("多账号模式已开启,无需输入key,可直接开始对话"),
|
84 |
+
elem_id="usage_display", elem_classes="insert_block")
|
85 |
else:
|
86 |
+
usageTxt = gr.Markdown(i18n("**发送消息** 或 **提交key** 以显示额度"), elem_id="usage_display",
|
87 |
+
elem_classes="insert_block")
|
88 |
model_select_dropdown = gr.Dropdown(
|
89 |
+
label=i18n("选择模型"), choices=MODELS, multiselect=False, value=MODELS[DEFAULT_MODEL],
|
90 |
+
interactive=True
|
91 |
)
|
92 |
lora_select_dropdown = gr.Dropdown(
|
93 |
label=i18n("选择LoRA模型"), choices=[], multiselect=False, interactive=True, visible=False
|
|
|
167 |
|
168 |
with gr.Tab(label=i18n("高级")):
|
169 |
gr.Markdown(i18n("# ⚠️ 务必谨慎更改 ⚠️\n\n如果无法使用请恢复默认设置"))
|
170 |
+
gr.HTML(get_html("appearance_switcher.html").format(label=i18n("切换亮暗色主题")),
|
171 |
+
elem_classes="insert_block")
|
172 |
use_streaming_checkbox = gr.Checkbox(
|
173 |
+
label=i18n("实时传输回答"), value=True, visible=ENABLE_STREAMING_OPTION
|
174 |
+
)
|
175 |
with gr.Accordion(i18n("参数"), open=False):
|
176 |
temperature_slider = gr.Slider(
|
177 |
minimum=-0,
|
|
|
274 |
gr.Markdown(CHUANHU_DESCRIPTION, elem_id="description")
|
275 |
gr.HTML(get_html("footer.html").format(versions=versions_html()), elem_id="footer")
|
276 |
|
277 |
+
|
278 |
# https://github.com/gradio-app/gradio/pull/3296
|
279 |
def create_greeting(request: gr.Request):
|
280 |
+
if hasattr(request, "username") and request.username: # is not None or is not ""
|
281 |
logging.info(f"Get User Name: {request.username}")
|
282 |
user_info, user_name = gr.Markdown.update(value=f"User: {request.username}"), request.username
|
283 |
else:
|
284 |
user_info, user_name = gr.Markdown.update(value=f"", visible=False), ""
|
285 |
+
current_model = get_model(model_name=MODELS[DEFAULT_MODEL], access_key=my_api_key)[0]
|
286 |
current_model.set_user_identifier(user_name)
|
287 |
chatbot = gr.Chatbot.update(label=MODELS[DEFAULT_MODEL])
|
288 |
+
return user_info, user_name, current_model, toggle_like_btn_visibility(
|
289 |
+
DEFAULT_MODEL), *current_model.auto_load(), get_history_names(False, user_name), chatbot
|
290 |
+
|
291 |
+
|
292 |
+
demo.load(create_greeting, inputs=None,
|
293 |
+
outputs=[user_info, user_name, current_model, like_dislike_area, systemPromptTxt, chatbot,
|
294 |
+
historyFileSelectDropdown, chatbot], api_name="load")
|
295 |
chatgpt_predict_args = dict(
|
296 |
fn=predict,
|
297 |
inputs=[
|
|
|
323 |
)
|
324 |
|
325 |
transfer_input_args = dict(
|
326 |
+
fn=transfer_input, inputs=[user_input], outputs=[user_question, user_input, submitBtn, cancelBtn],
|
327 |
+
show_progress=True
|
328 |
)
|
329 |
|
330 |
get_usage_args = dict(
|
|
|
337 |
outputs=[saveFileName, systemPromptTxt, chatbot]
|
338 |
)
|
339 |
|
|
|
340 |
# Chatbot
|
341 |
cancelBtn.click(interrupt, [current_model], [])
|
342 |
|
343 |
+
# 禁止按下回车时发送,避免输出长文本时误触
|
344 |
+
# user_input.submit(**transfer_input_args).then(**chatgpt_predict_args).then(**end_outputing_args)
|
345 |
+
# user_input.submit(**get_usage_args)
|
346 |
|
347 |
submitBtn.click(**transfer_input_args).then(**chatgpt_predict_args, api_name="predict").then(**end_outputing_args)
|
348 |
submitBtn.click(**get_usage_args)
|
349 |
|
350 |
+
index_files.change(handle_file_upload, [current_model, index_files, chatbot, language_select_dropdown],
|
351 |
+
[index_files, chatbot, status_display])
|
352 |
+
summarize_btn.click(handle_summarize_index, [current_model, index_files, chatbot, language_select_dropdown],
|
353 |
+
[chatbot, status_display])
|
354 |
|
355 |
emptyBtn.click(
|
356 |
reset,
|
|
|
404 |
two_column.change(update_doc_config, [two_column], None)
|
405 |
|
406 |
# LLM Models
|
407 |
+
keyTxt.change(set_key, [current_model, keyTxt], [user_api_key, status_display], api_name="set_key").then(
|
408 |
+
**get_usage_args)
|
409 |
keyTxt.submit(**get_usage_args)
|
410 |
single_turn_checkbox.change(set_single_turn, [current_model, single_turn_checkbox], None)
|
411 |
+
model_select_dropdown.change(get_model,
|
412 |
+
[model_select_dropdown, lora_select_dropdown, user_api_key, temperature_slider,
|
413 |
+
top_p_slider, systemPromptTxt, user_name],
|
414 |
+
[current_model, status_display, chatbot, lora_select_dropdown], show_progress=True,
|
415 |
+
api_name="get_model")
|
416 |
+
model_select_dropdown.change(toggle_like_btn_visibility, [model_select_dropdown], [like_dislike_area],
|
417 |
+
show_progress=False)
|
418 |
+
lora_select_dropdown.change(get_model,
|
419 |
+
[model_select_dropdown, lora_select_dropdown, user_api_key, temperature_slider,
|
420 |
+
top_p_slider, systemPromptTxt, user_name], [current_model, status_display, chatbot],
|
421 |
+
show_progress=True)
|
422 |
|
423 |
# Template
|
424 |
systemPromptTxt.change(set_system_prompt, [current_model, systemPromptTxt], None)
|
|
|
452 |
)
|
453 |
historyRefreshBtn.click(get_history_names, [gr.State(False), user_name], [historyFileSelectDropdown])
|
454 |
historyFileSelectDropdown.change(**load_history_from_file_args)
|
455 |
+
downloadFile.change(upload_chat_history, [current_model, downloadFile, user_name],
|
456 |
+
[saveFileName, systemPromptTxt, chatbot])
|
457 |
|
458 |
# Advanced
|
459 |
max_context_length_slider.change(set_token_upper_limit, [current_model, max_context_length_slider], None)
|