JPBianchi commited on
Commit
bc45e34
1 Parent(s): 87dd32d

Fixed error in question prompt

Browse files
Files changed (1) hide show
  1. app.py +11 -18
app.py CHANGED
@@ -565,20 +565,20 @@ def reword_query(query, guest, model_name='llama2-13b-chat', response_processing
565
 
566
  prompt_fields = {
567
  "you_are":f"You are an expert in linguistics and semantics, analyzing the question asked by a user to a vector search system, \
568
- and making sure that the question is well formulated and that the system can understand it.",
569
 
570
- "your_task":f"Your task is to detect if the name of the guest ({guest}) is mentioned in the user's question, \
571
- and if that is not the case, rewrite the question using the guest name, \
572
  without changing the meaning of the question. \
573
  Most of the time, the user will have used a pronoun to designate the guest, in which case, \
574
- simply replace the pronoun with the guest name.",
575
-
576
- "question":f"If the user mentions the guest name ({guest}) in the following question '{query}', just return his question as is. \
577
- If the user does not mention the guest name, rewrite the question using the guest name.",
578
 
579
  "final_instruction":f"Only regenerate the requested rewritten question or the original, WITHOUT ANY COMMENT OR REPHRASING. \
580
  Your answer must be as close as possible to the original question, \
581
  and exactly identical, word for word, if the user mentions the guest name, i.e. {guest}.",
 
 
582
  }
583
 
584
  # prompt created by chatGPT :-)
@@ -694,27 +694,20 @@ def reword_query(query, guest, model_name='llama2-13b-chat', response_processing
694
  """
695
  prompt = openai_prompt.format(**prompt_fields)
696
 
697
- openai_prompt2 = """
698
- {your_task}\n
699
- ```
700
- \n\n
701
- {final_instruction}
702
- """
703
- prompt2 = openai_prompt2.format(**{'your_task':prompt_fields['your_task'],
704
- 'final_instruction':prompt_fields['final_instruction']})
705
-
706
  try:
707
  # https://platform.openai.com/docs/guides/text-generation/chat-completions-api
708
  resp = GPTllm.get_chat_completion(prompt=prompt,
709
  system_message=prompt_fields['you_are'],
710
- user_message = None, #prompt_fields['question'],
711
  temperature=0.01,
712
  max_tokens=1500, # it's a long question...
713
  show_response=True,
714
  stream=False)
715
 
716
  if resp.choices[0].finish_reason == 'stop':
717
- return {'rewritten_question': resp.choices[0].message.content,
 
 
718
  'changed': True, 'status': 'success'}
719
  else:
720
  raise Exception("LLM did not stop") # to go to the except block
 
565
 
566
  prompt_fields = {
567
  "you_are":f"You are an expert in linguistics and semantics, analyzing the question asked by a user to a vector search system, \
568
+ and making sure that the question is well formulated and understandable by any average reader.",
569
 
570
+ "your_task":f"Your task is to detect if the name of the guest ({guest}) is mentioned in the question '{query}', \
571
+ If that is not the case, rewrite the question using the guest name, \
572
  without changing the meaning of the question. \
573
  Most of the time, the user will have used a pronoun to designate the guest, in which case, \
574
+ simply replace the pronoun with the guest name. \
575
+ If the guest name is already present in the question, return the original question as is.",
 
 
576
 
577
  "final_instruction":f"Only regenerate the requested rewritten question or the original, WITHOUT ANY COMMENT OR REPHRASING. \
578
  Your answer must be as close as possible to the original question, \
579
  and exactly identical, word for word, if the user mentions the guest name, i.e. {guest}.",
580
+
581
+ "question":f"{query}"
582
  }
583
 
584
  # prompt created by chatGPT :-)
 
694
  """
695
  prompt = openai_prompt.format(**prompt_fields)
696
 
 
 
 
 
 
 
 
 
 
697
  try:
698
  # https://platform.openai.com/docs/guides/text-generation/chat-completions-api
699
  resp = GPTllm.get_chat_completion(prompt=prompt,
700
  system_message=prompt_fields['you_are'],
701
+ user_message = None,
702
  temperature=0.01,
703
  max_tokens=1500, # it's a long question...
704
  show_response=True,
705
  stream=False)
706
 
707
  if resp.choices[0].finish_reason == 'stop':
708
+ if guest in resp.choices[0].message.content:
709
+ new_question = resp.choices[0].message.content
710
+ return {'rewritten_question': new_question,
711
  'changed': True, 'status': 'success'}
712
  else:
713
  raise Exception("LLM did not stop") # to go to the except block