Spaces:
Running
Running
Sean-Case
commited on
Commit
•
ed48a70
1
Parent(s):
71c040a
Disabled chat memory as I think it is not needed for most user queries (max_memory_length = 0)
Browse files- chatfuncs/chatfuncs.py +11 -9
chatfuncs/chatfuncs.py
CHANGED
@@ -54,6 +54,8 @@ PandasDataFrame = TypeVar('pd.core.frame.DataFrame')
|
|
54 |
embeddings = None # global variable setup
|
55 |
vectorstore = None # global variable setup
|
56 |
|
|
|
|
|
57 |
full_text = "" # Define dummy source text (full text) just to enable highlight function to load
|
58 |
|
59 |
ctrans_llm = [] # Define empty list to hold CTrans LLMs for functions to run
|
@@ -196,7 +198,7 @@ def create_prompt_templates():
|
|
196 |
|
197 |
def adapt_q_from_chat_history(question, chat_history, extracted_memory, keyword_model=""):#keyword_model): # new_question_keywords,
|
198 |
|
199 |
-
chat_history_str, chat_history_first_q, chat_history_first_ans,
|
200 |
|
201 |
if chat_history_str:
|
202 |
# Keyword extraction is now done in the add_inputs_to_history function
|
@@ -838,13 +840,13 @@ def clear_chat(chat_history_state, sources, chat_message, current_topic):
|
|
838 |
|
839 |
return chat_history_state, sources, chat_message, current_topic
|
840 |
|
841 |
-
def _get_chat_history(chat_history: List[Tuple[str, str]],
|
842 |
|
843 |
-
if not chat_history:
|
844 |
chat_history = []
|
845 |
|
846 |
-
if len(chat_history) >
|
847 |
-
chat_history = chat_history[-
|
848 |
|
849 |
#print(chat_history)
|
850 |
|
@@ -863,17 +865,17 @@ def _get_chat_history(chat_history: List[Tuple[str, str]], max_chat_length:int =
|
|
863 |
ai = f"Assistant: " + ai_s
|
864 |
conversation += "\n" + "\n".join([human, ai])
|
865 |
|
866 |
-
return conversation, first_q, first_ans,
|
867 |
|
868 |
def add_inputs_answer_to_history(user_message, history, current_topic):
|
869 |
|
870 |
#history.append((user_message, [-1]))
|
871 |
|
872 |
-
chat_history_str, chat_history_first_q, chat_history_first_ans,
|
873 |
|
874 |
|
875 |
-
# Only get the keywords for the first question and response, or do it every time if over '
|
876 |
-
if (len(history) == 1) | (len(history) >
|
877 |
|
878 |
#print("History after appending is:")
|
879 |
#print(history)
|
|
|
54 |
embeddings = None # global variable setup
|
55 |
vectorstore = None # global variable setup
|
56 |
|
57 |
+
max_memory_length = 0 # How long should the memory of the conversation last?
|
58 |
+
|
59 |
full_text = "" # Define dummy source text (full text) just to enable highlight function to load
|
60 |
|
61 |
ctrans_llm = [] # Define empty list to hold CTrans LLMs for functions to run
|
|
|
198 |
|
199 |
def adapt_q_from_chat_history(question, chat_history, extracted_memory, keyword_model=""):#keyword_model): # new_question_keywords,
|
200 |
|
201 |
+
chat_history_str, chat_history_first_q, chat_history_first_ans, max_memory_length = _get_chat_history(chat_history)
|
202 |
|
203 |
if chat_history_str:
|
204 |
# Keyword extraction is now done in the add_inputs_to_history function
|
|
|
840 |
|
841 |
return chat_history_state, sources, chat_message, current_topic
|
842 |
|
843 |
+
def _get_chat_history(chat_history: List[Tuple[str, str]], max_memory_length:int = max_memory_length): # Limit to last x interactions only
|
844 |
|
845 |
+
if (not chat_history) | (max_memory_length == 0):
|
846 |
chat_history = []
|
847 |
|
848 |
+
if len(chat_history) > max_memory_length:
|
849 |
+
chat_history = chat_history[-max_memory_length:]
|
850 |
|
851 |
#print(chat_history)
|
852 |
|
|
|
865 |
ai = f"Assistant: " + ai_s
|
866 |
conversation += "\n" + "\n".join([human, ai])
|
867 |
|
868 |
+
return conversation, first_q, first_ans, max_memory_length
|
869 |
|
870 |
def add_inputs_answer_to_history(user_message, history, current_topic):
|
871 |
|
872 |
#history.append((user_message, [-1]))
|
873 |
|
874 |
+
chat_history_str, chat_history_first_q, chat_history_first_ans, max_memory_length = _get_chat_history(history)
|
875 |
|
876 |
|
877 |
+
# Only get the keywords for the first question and response, or do it every time if over 'max_memory_length' responses in the conversation
|
878 |
+
if (len(history) == 1) | (len(history) > max_memory_length):
|
879 |
|
880 |
#print("History after appending is:")
|
881 |
#print(history)
|