winglian commited on
Commit
a24d070
1 Parent(s): 11957cb

Update tabbed.py

Browse files
Files changed (1) hide show
  1. tabbed.py +35 -1
tabbed.py CHANGED
@@ -60,6 +60,34 @@ def chat(history, system_message, max_tokens, temperature, top_p, top_k, repeat_
60
  yield history, history
61
 
62
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  def clear_chat(chat_history_state, chat_message):
64
  chat_history_state = []
65
  chat_message = ''
@@ -114,6 +142,7 @@ with gr.Blocks() as demo:
114
  )
115
  with gr.Row():
116
  submit = gr.Button(value="Send message", variant="secondary").style(full_width=True)
 
117
  clear = gr.Button(value="New topic", variant="secondary").style(full_width=False)
118
  stop = gr.Button(value="Stop", variant="secondary").style(full_width=False)
119
  with gr.Row():
@@ -136,11 +165,16 @@ with gr.Blocks() as demo:
136
  ).then(
137
  fn=chat, inputs=[chat_history_state, system_msg, max_tokens, temperature, top_p, top_k, repeat_penalty], outputs=[chatbot, chat_history_state], queue=True
138
  )
 
 
 
 
 
139
  message_submit_event = message.submit(
140
  fn=user, inputs=[message, chat_history_state], outputs=[message, chat_history_state], queue=True
141
  ).then(
142
  fn=chat, inputs=[chat_history_state, system_msg, max_tokens, temperature, top_p, top_k, repeat_penalty], outputs=[chatbot, chat_history_state], queue=True
143
  )
144
- stop.click(fn=None, inputs=None, outputs=None, cancels=[submit_click_event, message_submit_event], queue=False)
145
 
146
  demo.queue(**config["queue"]).launch(debug=True, server_name="0.0.0.0", server_port=7860)
 
60
  yield history, history
61
 
62
 
63
+ def rp_chat(history, system_message, max_tokens, temperature, top_p, top_k, repeat_penalty):
64
+ history = history or []
65
+
66
+ messages = "<|system|>" + system_message + \
67
+ "\n".join(["\n".join(["<|user|>"+item[0], "<|model|>"+item[1]])
68
+ for item in history])
69
+
70
+ # remove last space from assistant, some models output a ZWSP if you leave a space
71
+ messages = messages[:-1]
72
+
73
+ history[-1][1] = ""
74
+ for output in llm(
75
+ messages,
76
+ echo=False,
77
+ stream=True,
78
+ max_tokens=max_tokens,
79
+ temperature=temperature,
80
+ top_p=top_p,
81
+ top_k=top_k,
82
+ repeat_penalty=repeat_penalty,
83
+ **config['chat']
84
+ ):
85
+ answer = output['choices'][0]['text']
86
+ history[-1][1] += answer
87
+ # stream the response
88
+ yield history, history
89
+
90
+
91
  def clear_chat(chat_history_state, chat_message):
92
  chat_history_state = []
93
  chat_message = ''
 
142
  )
143
  with gr.Row():
144
  submit = gr.Button(value="Send message", variant="secondary").style(full_width=True)
145
+ roleplay = gr.Button(value="Roleplay", variant="secondary").style(full_width=True)
146
  clear = gr.Button(value="New topic", variant="secondary").style(full_width=False)
147
  stop = gr.Button(value="Stop", variant="secondary").style(full_width=False)
148
  with gr.Row():
 
165
  ).then(
166
  fn=chat, inputs=[chat_history_state, system_msg, max_tokens, temperature, top_p, top_k, repeat_penalty], outputs=[chatbot, chat_history_state], queue=True
167
  )
168
+ roleplay_click_event = roleplay.click(
169
+ fn=user, inputs=[message, chat_history_state], outputs=[message, chat_history_state], queue=True
170
+ ).then(
171
+ fn=rp_chat, inputs=[chat_history_state, system_msg, max_tokens, temperature, top_p, top_k, repeat_penalty], outputs=[chatbot, chat_history_state], queue=True
172
+ )
173
  message_submit_event = message.submit(
174
  fn=user, inputs=[message, chat_history_state], outputs=[message, chat_history_state], queue=True
175
  ).then(
176
  fn=chat, inputs=[chat_history_state, system_msg, max_tokens, temperature, top_p, top_k, repeat_penalty], outputs=[chatbot, chat_history_state], queue=True
177
  )
178
+ stop.click(fn=None, inputs=None, outputs=None, cancels=[submit_click_event, message_submit_event, roleplay_click_event], queue=False)
179
 
180
  demo.queue(**config["queue"]).launch(debug=True, server_name="0.0.0.0", server_port=7860)