hlydecker commited on
Commit
9d72a4b
1 Parent(s): 3601962

Update streamlit_langchain_chat/prompts.py

Browse files
Files changed (1) hide show
  1. streamlit_langchain_chat/prompts.py +2 -19
streamlit_langchain_chat/prompts.py CHANGED
@@ -44,7 +44,7 @@ qa_prompt_GPCL = prompts.PromptTemplate(
44
  input_variables=["question", "context_str"],
45
  template="You are an AI assistant providing helpful advice about University of Sydney policy. You are given the following extracted parts of a long document and a question. Provide a conversational answer based on the context provided."
46
  "You should only provide hyperlinks that reference the context below. Do NOT make up hyperlinks."
47
- 'If you can not find the answer in the context below, just say "Hmm, I am not sure." Do not try to make up an answer.'
48
  "If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context.\n\n"
49
  "Question: {question}\n"
50
  "=========\n"
@@ -53,23 +53,6 @@ qa_prompt_GPCL = prompts.PromptTemplate(
53
  "Answer in Markdown:",
54
  )
55
 
56
- # # usado por GPCL
57
- # condense_template = """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.
58
- # In addition the standalone question must be in the same language as the follow up question.
59
- # If the follow up question has no relation to the conversation, the standalone question will be the same as the follow up question.
60
- # Remember, if the follow up question does not specify a language, the standalone question must be in the same language as the follow up question.
61
- #
62
- # Chat History:
63
- # {chat_history}
64
- # Follow Up Input: {question}
65
- # Standalone question:
66
- # """
67
- # condense_prompt = prompts.PromptTemplate(
68
- # input_variables=["chat_history", "question"],
69
- # template=condense_template,
70
- # )
71
-
72
-
73
  search_prompt = prompts.PromptTemplate(
74
  input_variables=["question"],
75
  template="We want to answer the following question: {question} \n"
@@ -93,7 +76,7 @@ citation_prompt = prompts.PromptTemplate(
93
  partial_variables={"date": _get_datetime},
94
  )
95
 
96
- system_template = """You are a business consultant that answers in an unbiased, professional tone.
97
  You sometimes refuse to answer if there is insufficient information.
98
  If the user does not specify a language, answer in the language of the user's question. """
99
  system_message_prompt = SystemMessagePromptTemplate.from_template(system_template)
 
44
  input_variables=["question", "context_str"],
45
  template="You are an AI assistant providing helpful advice about University of Sydney policy. You are given the following extracted parts of a long document and a question. Provide a conversational answer based on the context provided."
46
  "You should only provide hyperlinks that reference the context below. Do NOT make up hyperlinks."
47
+ 'If you can not find the answer in the context below, just say "Hmm, I am not sure. Could you please rephrase your question?" Do not try to make up an answer.'
48
  "If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context.\n\n"
49
  "Question: {question}\n"
50
  "=========\n"
 
53
  "Answer in Markdown:",
54
  )
55
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  search_prompt = prompts.PromptTemplate(
57
  input_variables=["question"],
58
  template="We want to answer the following question: {question} \n"
 
76
  partial_variables={"date": _get_datetime},
77
  )
78
 
79
+ system_template = """You are an AI chatbot with knowledge of the University of Sydney's legal policies that answers in an unbiased, professional tone.
80
  You sometimes refuse to answer if there is insufficient information.
81
  If the user does not specify a language, answer in the language of the user's question. """
82
  system_message_prompt = SystemMessagePromptTemplate.from_template(system_template)