Peter commited on
Commit
38ca40a
1 Parent(s): bc8f2a7

:art: format to black

Browse files
Files changed (2) hide show
  1. app.py +19 -11
  2. converse.py +10 -3
app.py CHANGED
@@ -57,8 +57,13 @@ def chat(trivia_query, temperature=0.7, top_p=0.95, top_k=50):
57
  [str]: the bot's response
58
  """
59
  history = []
60
- response = ask_gpt(message=trivia_query, chat_pipe=my_chatbot,
61
- top_p=top_p, top_k=top_k, temperature=temperature)
 
 
 
 
 
62
  history = [trivia_query, response]
63
  html = ""
64
  for item in history:
@@ -152,7 +157,7 @@ def get_parser():
152
  parser.add_argument(
153
  "--basic-sc",
154
  required=False,
155
- default=True, # TODO: change this back to False once Neuspell issues are resolved.
156
  action="store_true",
157
  help="turn on symspell (baseline) correction instead of the more advanced neural net models",
158
  )
@@ -191,17 +196,21 @@ if __name__ == "__main__":
191
  print(f"using model stored here: \n {model_loc} \n")
192
  iface = gr.Interface(
193
  chat,
194
- inputs=["text",
195
- Slider(minimum=0.0, maximum=1.0, step=0.01, default=0.6, label="temperature"),
196
- Slider(minimum=0.0, maximum=1.0, step=0.01, default=0.95, label="top_p"),
197
- Slider(minimum=0, maximum=250, step=1, default=50, label="top_k")],
 
 
 
 
198
  outputs="html",
199
  examples_per_page=8,
200
  examples=[
201
  ["How can you help me?", 0.6, 0.95, 50],
202
  ["what can you do?", 0.6, 0.95, 50],
203
  ["Hi, my name is……", 0.6, 0.95, 50],
204
- ["Happy birthday!", 0.9, 0.95, 50],
205
  ["I have a question, can you help me?", 0.6, 0.95, 50],
206
  ["Do you know a joke?", 0.6, 0.85, 50],
207
  ["Will you marry me?", 0.6, 0.95, 138],
@@ -211,11 +220,10 @@ if __name__ == "__main__":
211
  ["Do you have a hobby?", 0.6, 0.95, 50],
212
  ["You’re clever", 0.6, 0.95, 50],
213
  ["Tell me about your personality", 0.6, 0.95, 50],
214
- ["You’re annoying", 0.6, 0.95, 50],
215
  ["I want to speak to a human now.", 0.6, 0.95, 50],
216
- ["Don’t you speak English?!", 0.6, 0.95, 50],
217
  ["Are you human?", 0.6, 0.95, 50],
218
-
219
  ],
220
  title=f"GPT Chatbot Demo: {default_model} Model",
221
  description=f"A Demo of a Chatbot trained for conversation with humans. Size XL= 1.5B parameters.\n\n"
 
57
  [str]: the bot's response
58
  """
59
  history = []
60
+ response = ask_gpt(
61
+ message=trivia_query,
62
+ chat_pipe=my_chatbot,
63
+ top_p=top_p,
64
+ top_k=top_k,
65
+ temperature=temperature,
66
+ )
67
  history = [trivia_query, response]
68
  html = ""
69
  for item in history:
 
157
  parser.add_argument(
158
  "--basic-sc",
159
  required=False,
160
+ default=True, # TODO: change this back to False once Neuspell issues are resolved.
161
  action="store_true",
162
  help="turn on symspell (baseline) correction instead of the more advanced neural net models",
163
  )
 
196
  print(f"using model stored here: \n {model_loc} \n")
197
  iface = gr.Interface(
198
  chat,
199
+ inputs=[
200
+ "text",
201
+ Slider(
202
+ minimum=0.0, maximum=1.0, step=0.01, default=0.6, label="temperature"
203
+ ),
204
+ Slider(minimum=0.0, maximum=1.0, step=0.01, default=0.95, label="top_p"),
205
+ Slider(minimum=0, maximum=250, step=1, default=50, label="top_k"),
206
+ ],
207
  outputs="html",
208
  examples_per_page=8,
209
  examples=[
210
  ["How can you help me?", 0.6, 0.95, 50],
211
  ["what can you do?", 0.6, 0.95, 50],
212
  ["Hi, my name is……", 0.6, 0.95, 50],
213
+ ["Happy birthday!", 0.9, 0.95, 50],
214
  ["I have a question, can you help me?", 0.6, 0.95, 50],
215
  ["Do you know a joke?", 0.6, 0.85, 50],
216
  ["Will you marry me?", 0.6, 0.95, 138],
 
220
  ["Do you have a hobby?", 0.6, 0.95, 50],
221
  ["You’re clever", 0.6, 0.95, 50],
222
  ["Tell me about your personality", 0.6, 0.95, 50],
223
+ ["You’re annoying", 0.6, 0.95, 50],
224
  ["I want to speak to a human now.", 0.6, 0.95, 50],
225
+ ["Don’t you speak English?!", 0.6, 0.95, 50],
226
  ["Are you human?", 0.6, 0.95, 50],
 
227
  ],
228
  title=f"GPT Chatbot Demo: {default_model} Model",
229
  description=f"A Demo of a Chatbot trained for conversation with humans. Size XL= 1.5B parameters.\n\n"
converse.py CHANGED
@@ -213,8 +213,12 @@ def consolidate_texts(
213
  name_spk = "person alpha" if name_spk is None else name_spk
214
  if verbose:
215
  print("====" * 10)
216
- print(f"\n[DEBUG] initial model_resp has {len(model_resp)} lines: \n\t{model_resp}")
217
- print(f" the first element is \n\t{model_resp[0]} and it is {type(model_resp[0])}")
 
 
 
 
218
  fn_resp = []
219
 
220
  name_counter = 0
@@ -228,7 +232,10 @@ def consolidate_texts(
228
  if print_debug:
229
  print(f"\nDEBUG: \n\t{resline}\ncaused the break")
230
  break # the name of the speaker is in the line, so we're done
231
- if any([": " in resline,":\n" in resline]) and name_resp.lower() not in resline.lower():
 
 
 
232
  if print_debug:
233
  print(f"\nDEBUG: \n\t{resline}\ncaused the break")
234
  break
 
213
  name_spk = "person alpha" if name_spk is None else name_spk
214
  if verbose:
215
  print("====" * 10)
216
+ print(
217
+ f"\n[DEBUG] initial model_resp has {len(model_resp)} lines: \n\t{model_resp}"
218
+ )
219
+ print(
220
+ f" the first element is \n\t{model_resp[0]} and it is {type(model_resp[0])}"
221
+ )
222
  fn_resp = []
223
 
224
  name_counter = 0
 
232
  if print_debug:
233
  print(f"\nDEBUG: \n\t{resline}\ncaused the break")
234
  break # the name of the speaker is in the line, so we're done
235
+ if (
236
+ any([": " in resline, ":\n" in resline])
237
+ and name_resp.lower() not in resline.lower()
238
+ ):
239
  if print_debug:
240
  print(f"\nDEBUG: \n\t{resline}\ncaused the break")
241
  break