starsaround commited on
Commit
f221667
1 Parent(s): eda129a

update using gr.ChatInterface

Browse files
Files changed (1) hide show
  1. app.py +45 -72
app.py CHANGED
@@ -50,6 +50,43 @@ provider_dict = {
50
  'GetGpt': GetGpt
51
  }
52
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  prompt_set_list = {}
54
  for prompt_file in os.listdir("prompt_set"):
55
  key = prompt_file
@@ -76,88 +113,24 @@ with gr.Blocks() as demo:
76
  Chatbot:"""
77
 
78
  memory = ConversationBufferWindowMemory(k=10, memory_key="chat_history")
79
-
80
- chatbot = gr.Chatbot([[None, None]], label='AI')
81
- msg = gr.Textbox(value="", label='请输入:')
82
  with gr.Row():
83
- clear = gr.Button("清空对话", scale=2)
84
- chat_mode = gr.Checkbox(value=True, label='聊天模式', interactive=True, scale=1)
85
  system_msg = gr.Textbox(value="你是一名助手,可以解答问题。", label='系统提示')
 
 
 
 
 
 
86
  with gr.Row():
87
  default_prompt_set = "1 中文提示词.json"
88
  prompt_set_name = gr.Dropdown(prompt_set_list.keys(), value=default_prompt_set, label='提示词集合')
89
  prompt_name = gr.Dropdown(prompt_set_list[default_prompt_set].keys(), label='提示词', min_width=20)
90
- with gr.Row():
91
- model_name = gr.Dropdown(['gpt-3.5-turbo', 'gpt-4'], value='gpt-3.5-turbo', label='模型')
92
- provider_name = gr.Dropdown(provider_dict.keys(), value='GetGpt', label='提供者', min_width=20)
93
-
94
- def change_prompt_set(prompt_set_name):
95
- return gr.Dropdown.update(choices=list(prompt_set_list[prompt_set_name].keys()))
96
-
97
- def change_prompt(prompt_set_name, prompt_name):
98
- return gr.update(value=prompt_set_list[prompt_set_name][prompt_name])
99
 
100
- def user(user_message, history):
101
- return gr.update(value="", interactive=False), history + [[user_message, None]]
102
-
103
- def bot(history, model_name, provider_name, system_msg, chat_mode):
104
- history[-1][1] = ''
105
- if len(system_msg)>3000:
106
- system_msg = system_msg[:2000] + system_msg[-1000:]
107
-
108
- if chat_mode:
109
- global template, memory
110
- llm.model_name = model_name
111
- llm.provider_name = provider_name
112
- prompt = PromptTemplate(
113
- input_variables=["chat_history", "human_input"], template=template.format(system_instruction=system_msg)
114
- )
115
- llm_chain = LLMChain(
116
- llm=llm,
117
- prompt=prompt,
118
- verbose=False,
119
- memory=memory,
120
- )
121
- bot_msg = llm_chain.run(history[-1][0])
122
- for c in bot_msg:
123
- history[-1][1] += c
124
- yield history
125
- else:
126
- prompt = """
127
- 请你仔细阅读以下提示,然后针对用户的话进行回答。
128
- 提示:
129
- ```
130
- {}
131
- ```
132
- 用户最新的话:
133
- ```
134
- {}
135
- ```
136
- 请回答:
137
- """
138
- bot_msg = g4f.ChatCompletion.create(model=model_name,
139
- provider=provider_dict[provider_name],
140
- messages=[{"role": "user",
141
- "content": prompt.format(system_msg,
142
- history[-1][0])}],
143
- stream=True)
144
- for c in bot_msg:
145
- history[-1][1] += c
146
- yield history
147
-
148
- def empty_chat():
149
- global memory
150
- memory = ConversationBufferWindowMemory(k=10, memory_key="chat_history")
151
- return None
152
- response = msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
153
- bot, [chatbot, model_name, provider_name, system_msg, chat_mode], chatbot
154
- )
155
  prompt_set_name.select(change_prompt_set, prompt_set_name, prompt_name)
156
  prompt_name.select(change_prompt, [prompt_set_name, prompt_name], system_msg)
157
 
158
- response.then(lambda: gr.update(interactive=True), None, [msg], queue=False)
159
- clear.click(empty_chat, None, [chatbot], queue=False)
160
-
161
  demo.title = "AI Chat"
162
  demo.queue()
163
  demo.launch()
 
50
  'GetGpt': GetGpt
51
  }
52
 
53
+ def change_prompt_set(prompt_set_name):
54
+ return gr.Dropdown.update(choices=list(prompt_set_list[prompt_set_name].keys()))
55
+
56
+ def change_prompt(prompt_set_name, prompt_name):
57
+ return gr.update(value=prompt_set_list[prompt_set_name][prompt_name])
58
+
59
+ def user(user_message, history):
60
+ return gr.update(value="", interactive=False), history + [[user_message, None]]
61
+
62
+ def bot(message, history, model_name, provider_name, system_msg):
63
+ response = ''
64
+
65
+ if len(system_msg)>3000:
66
+ system_msg = system_msg[:2000] + system_msg[-1000:]
67
+
68
+ global template, memory
69
+ llm.model_name = model_name
70
+ llm.provider_name = provider_name
71
+ prompt = PromptTemplate(
72
+ input_variables=["chat_history", "human_input"], template=template.format(system_instruction=system_msg)
73
+ )
74
+ llm_chain = LLMChain(
75
+ llm=llm,
76
+ prompt=prompt,
77
+ verbose=False,
78
+ memory=memory,
79
+ )
80
+ bot_msg = llm_chain.run(message)
81
+ for c in bot_msg:
82
+ response += c
83
+ yield response
84
+
85
+ def empty_chat():
86
+ global memory
87
+ memory = ConversationBufferWindowMemory(k=10, memory_key="chat_history")
88
+ return None
89
+
90
  prompt_set_list = {}
91
  for prompt_file in os.listdir("prompt_set"):
92
  key = prompt_file
 
113
  Chatbot:"""
114
 
115
  memory = ConversationBufferWindowMemory(k=10, memory_key="chat_history")
 
 
 
116
  with gr.Row():
117
+ model_name = gr.Dropdown(['gpt-3.5-turbo', 'gpt-4'], value='gpt-3.5-turbo', label='模型')
118
+ provider = gr.Dropdown(provider_dict.keys(), value='GetGpt', label='提供者', min_width=20)
119
  system_msg = gr.Textbox(value="你是一名助手,可以解答问题。", label='系统提示')
120
+ gr.ChatInterface(bot,
121
+ additional_inputs=[
122
+ model_name,
123
+ provider,
124
+ system_msg]
125
+ )
126
  with gr.Row():
127
  default_prompt_set = "1 中文提示词.json"
128
  prompt_set_name = gr.Dropdown(prompt_set_list.keys(), value=default_prompt_set, label='提示词集合')
129
  prompt_name = gr.Dropdown(prompt_set_list[default_prompt_set].keys(), label='提示词', min_width=20)
 
 
 
 
 
 
 
 
 
130
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
  prompt_set_name.select(change_prompt_set, prompt_set_name, prompt_name)
132
  prompt_name.select(change_prompt, [prompt_set_name, prompt_name], system_msg)
133
 
 
 
 
134
  demo.title = "AI Chat"
135
  demo.queue()
136
  demo.launch()