starsaround commited on
Commit
00628ec
1 Parent(s): 32f419d

Update app.py

Browse files

Deal with text length.

Files changed (1) hide show
  1. app.py +19 -18
app.py CHANGED
@@ -45,6 +45,8 @@ from langchain.agents.agent_types import AgentType
45
  from langchain.tools import WikipediaQueryRun
46
  from langchain.utilities import WikipediaAPIWrapper
47
  from langchain.tools import DuckDuckGoSearchRun
 
 
48
 
49
  provider_dict = {
50
  'Ails': Ails,
@@ -125,31 +127,31 @@ def bot(history, model_name, provider_name, system_msg, agent):
125
  return str(response)
126
  else:
127
  new_template = template.format(system_instruction=system_msg)
128
- prompt = PromptTemplate(
129
- input_variables=["chat_history", "human_input"], template=new_template
130
- )
131
 
132
- prev_memory = ''
133
  if len(history)>1 and history[-2][1]!=None:
134
- memory.chat_memory.add_user_message(history[-2][0])
135
  memory.chat_memory.add_ai_message(history[-2][1])
 
 
 
 
 
 
 
136
  prev_memory = memory.load_memory_variables({})['chat_history']
137
-
138
- prompt = new_template.format(
139
- chat_history = prev_memory,
140
- human_input = message
141
- )
142
-
143
- bot_msg = llm._call(prompt=prompt)
144
 
145
- for c in bot_msg:
146
- history[-1][1] += c
147
- yield history
148
 
149
  def empty_fn():
150
  global memory
151
- memory = ConversationBufferWindowMemory(k=10, memory_key="chat_history")
152
- return None
153
 
154
  def undo_fn(history):
155
  return history[:-1]
@@ -180,7 +182,6 @@ with gr.Blocks() as demo:
180
  ```
181
  The following is a conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
182
  {{chat_history}}
183
- Human: {{human_input}}
184
  AI:"""
185
 
186
  memory = ConversationBufferWindowMemory(k=6, memory_key="chat_history")
 
45
  from langchain.tools import WikipediaQueryRun
46
  from langchain.utilities import WikipediaAPIWrapper
47
  from langchain.tools import DuckDuckGoSearchRun
48
+ from models_for_langchain.memory_func import validate_memory_len
49
+
50
 
51
  provider_dict = {
52
  'Ails': Ails,
 
127
  return str(response)
128
  else:
129
  new_template = template.format(system_instruction=system_msg)
 
 
 
130
 
 
131
  if len(history)>1 and history[-2][1]!=None:
 
132
  memory.chat_memory.add_ai_message(history[-2][1])
133
+ memory.chat_memory.add_user_message(history[-1][0])
134
+ validate_memory_len(memory=memory, max_token_limit=1800)
135
+ if len(memory.chat_memory.messages)==0:
136
+ for c in '文本长度超过限制,请清空后再试':
137
+ history[-1][1] += c
138
+ yield history
139
+ else:
140
  prev_memory = memory.load_memory_variables({})['chat_history']
141
+ prompt = new_template.format(
142
+ chat_history = prev_memory,
143
+ )
144
+ # print(f'prompt = \n --------\n{prompt}\n --------')
145
+ bot_msg = llm._call(prompt=prompt)
 
 
146
 
147
+ for c in bot_msg:
148
+ history[-1][1] += c
149
+ yield history
150
 
151
  def empty_fn():
152
  global memory
153
+ memory = ConversationBufferWindowMemory(k=6, memory_key="chat_history")
154
+ return [[None, None]]
155
 
156
  def undo_fn(history):
157
  return history[:-1]
 
182
  ```
183
  The following is a conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
184
  {{chat_history}}
 
185
  AI:"""
186
 
187
  memory = ConversationBufferWindowMemory(k=6, memory_key="chat_history")