OuroborosM commited on
Commit
d3ca070
·
1 Parent(s): ac9ad2c

add memory

Browse files
Files changed (1) hide show
  1. app.py +5 -3
app.py CHANGED
@@ -17,6 +17,7 @@ from langchain.tools import DuckDuckGoSearchRun
17
  from langchain.utilities import WikipediaAPIWrapper
18
  from langchain.python import PythonREPL
19
  from langchain.chains import LLMMathChain
 
20
  import azure.cognitiveservices.speech as speechsdk
21
  import requests
22
 
@@ -47,7 +48,7 @@ from langchain.text_splitter import RecursiveCharacterTextSplitter
47
  from langchain.docstore.document import Document
48
 
49
 
50
-
51
  # Custom document loaders
52
  class MyElmLoader(UnstructuredEmailLoader):
53
  """Wrapper to fallback to text/plain when default does not work"""
@@ -384,6 +385,7 @@ Thought: I now know the final answer
384
  Final Answer: the final answer to the original input question"""
385
 
386
  SUFFIX = """Begin!
 
387
  Question: {input}
388
  Thought:{agent_scratchpad}"""
389
 
@@ -401,8 +403,8 @@ agent = initialize_agent(tools, llm,
401
  }
402
  )
403
 
404
- # print(agent.agent.llm_chain.prompt.template)
405
- print(agent.agent.llm_chain.prompt)
406
 
407
  global vectordb
408
  vectordb = Chroma(persist_directory='db', embedding_function=embeddings)
 
17
  from langchain.utilities import WikipediaAPIWrapper
18
  from langchain.python import PythonREPL
19
  from langchain.chains import LLMMathChain
20
+ from langchain.memory import ConversationBufferMemory
21
  import azure.cognitiveservices.speech as speechsdk
22
  import requests
23
 
 
48
  from langchain.docstore.document import Document
49
 
50
 
51
+ memory = ConversationBufferMemory(memory_key="chat_history")
52
  # Custom document loaders
53
  class MyElmLoader(UnstructuredEmailLoader):
54
  """Wrapper to fallback to text/plain when default does not work"""
 
385
  Final Answer: the final answer to the original input question"""
386
 
387
  SUFFIX = """Begin!
388
+ {chat_history}
389
  Question: {input}
390
  Thought:{agent_scratchpad}"""
391
 
 
403
  }
404
  )
405
 
406
+ print(agent.agent.llm_chain.prompt.template)
407
+ # print(agent.agent.llm_chain.prompt)
408
 
409
  global vectordb
410
  vectordb = Chroma(persist_directory='db', embedding_function=embeddings)