3v324v23 commited on
Commit
3041858
1 Parent(s): 9c2a6bc

优化提示

Browse files
Files changed (1) hide show
  1. request_llm/bridge_newbing.py +2 -2
request_llm/bridge_newbing.py CHANGED
@@ -611,7 +611,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
611
  单线程方法
612
  函数的说明请见 request_llm/bridge_all.py
613
  """
614
- chatbot.append((inputs, "[Local Message]: 等待Bing响应 ..."))
615
 
616
  global newbing_handle
617
  if (newbing_handle is None) or (not newbing_handle.success):
@@ -633,7 +633,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
633
  for i in range(len(history)//2):
634
  history_feedin.append([history[2*i], history[2*i+1]] )
635
 
636
- chatbot[-1] = (inputs, "[Local Message]: 等待Bing响应 ...")
637
  yield from update_ui(chatbot=chatbot, history=history, msg="NewBing响应缓慢,尚未完成全部响应,请耐心完成后再提交新问题。")
638
  for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=system_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
639
  chatbot[-1] = (inputs, preprocess_newbing_out(response))
 
611
  单线程方法
612
  函数的说明请见 request_llm/bridge_all.py
613
  """
614
+ chatbot.append((inputs, "[Local Message]: 等待NewBing响应中 ..."))
615
 
616
  global newbing_handle
617
  if (newbing_handle is None) or (not newbing_handle.success):
 
633
  for i in range(len(history)//2):
634
  history_feedin.append([history[2*i], history[2*i+1]] )
635
 
636
+ chatbot[-1] = (inputs, "[Local Message]: 等待NewBing响应中 ...")
637
  yield from update_ui(chatbot=chatbot, history=history, msg="NewBing响应缓慢,尚未完成全部响应,请耐心完成后再提交新问题。")
638
  for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=system_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
639
  chatbot[-1] = (inputs, preprocess_newbing_out(response))