vpcom commited on
Commit
1eab9d7
1 Parent(s): f1273a4

fix: if Len(message) is zero then randomly generate a prompt and show that in the correct place

Browse files
Files changed (1) hide show
  1. app.py +8 -10
app.py CHANGED
@@ -126,7 +126,7 @@ def format_prompt(message, history, system_prompt):
126
  prompt += f"{user_prompt}"
127
  prompt += f"{bot_response}"
128
  prompt += f"""{message}"""
129
- return prompt.replace('\n','؛').replace('\t','/')
130
 
131
  def generate(
132
  prompt, history, system_prompt,
@@ -136,13 +136,6 @@ def generate(
136
  global HISTORY
137
  HISTORY = history
138
  global PROMPT
139
-
140
- if len(PROMPT)==0:
141
- prompt = random.choice(["ا","ب","پ","ت","ث","ج","چ","ح","خ","ل","م","ن","و",
142
- "د","ذ","ر","ز","ژ","س","ش","ص","ض","ط","ظ","ع","غ",
143
- "ف","ق","ه","ی",
144
- ])
145
-
146
  PROMPT = prompt
147
 
148
  temperature = float(temperature)
@@ -404,7 +397,7 @@ class Chatbot(gr.Chatbot):
404
  processed_messages = []
405
  for message_pair in y:
406
  result = ""
407
- print('Message Pairs: ',message_pair[0],message_pair[1])
408
  if message_pair[0] is not None:
409
  result += message_pair[0]
410
  if message_pair[1] is not None:
@@ -419,7 +412,7 @@ class Chatbot(gr.Chatbot):
419
  )
420
  processed_messages.append(
421
  [
422
- None,self._postprocess_chat_messages((result).replace('؛','\n').replace('/','\t'))
423
  #self._postprocess_chat_messages(message_pair[1])),
424
  ]
425
  )
@@ -523,6 +516,11 @@ class ChatInterface(gr.ChatInterface):
523
  *args,
524
  ) -> tuple[list[list[str | None]], list[list[str | None]]]:
525
  history = history_with_input[:-1]
 
 
 
 
 
526
  inputs, _, _ = special_args(
527
  self.fn, inputs=[message, history, *args], request=request
528
  )
 
126
  prompt += f"{user_prompt}"
127
  prompt += f"{bot_response}"
128
  prompt += f"""{message}"""
129
+ return prompt.replace('\n','؛').replace('\t','/').strip()
130
 
131
  def generate(
132
  prompt, history, system_prompt,
 
136
  global HISTORY
137
  HISTORY = history
138
  global PROMPT
 
 
 
 
 
 
 
139
  PROMPT = prompt
140
 
141
  temperature = float(temperature)
 
397
  processed_messages = []
398
  for message_pair in y:
399
  result = ""
400
+ #print('Message Pairs: ',message_pair[0],message_pair[1])
401
  if message_pair[0] is not None:
402
  result += message_pair[0]
403
  if message_pair[1] is not None:
 
412
  )
413
  processed_messages.append(
414
  [
415
+ None,self._postprocess_chat_messages((result).replace('؛','\n').replace('/','\t').strip())
416
  #self._postprocess_chat_messages(message_pair[1])),
417
  ]
418
  )
 
516
  *args,
517
  ) -> tuple[list[list[str | None]], list[list[str | None]]]:
518
  history = history_with_input[:-1]
519
+ if len(message)==0:
520
+ message = random.choice(["ا","ب","پ","ت","ث","ج","چ","ح","خ","ل","م","ن","و",
521
+ "د","ذ","ر","ز","ژ","س","ش","ص","ض","ط","ظ","ع","غ",
522
+ "ف","ق","ه","ی",
523
+ ])
524
  inputs, _, _ = special_args(
525
  self.fn, inputs=[message, history, *args], request=request
526
  )