jonathanjordan21 commited on
Commit
989b6d4
1 Parent(s): d5ef1c5

Update custom_llm.py

Browse files
Files changed (1) hide show
  1. custom_llm.py +4 -3
custom_llm.py CHANGED
@@ -47,11 +47,11 @@ def custom_chain_with_history(llm, memory):
47
  <|you|>
48
  """)
49
 
50
- def prompt_memory(_):
51
  t = ""
52
  # for x in memory.chat_memory.messages:
53
  for x in memory.messages:
54
- t += f"<|assistant|>\n<s>{x.content}</s>\n" if type(x) is AIMessage else f"<|user|>\n{x.content}\n"
55
  return "" if len(t) == 0 else t
56
 
57
  def format_docs(docs):
@@ -66,7 +66,8 @@ def custom_chain_with_history(llm, memory):
66
  # ]
67
  # )
68
 
69
- return {"chat_history":prompt_memory, "context":create_vectorstore().as_retriever(search_type="similarity", search_kwargs={"k": 8}) | format_docs, "question": RunnablePassthrough()} | prompt | llm
 
70
 
71
  class CustomLLM(LLM):
72
  repo_id : str
 
47
  <|you|>
48
  """)
49
 
50
+ def prompt_memory(memory):
51
  t = ""
52
  # for x in memory.chat_memory.messages:
53
  for x in memory.messages:
54
+ t += f"<|you|>\n<s>{x.content}</s>\n" if type(x) is AIMessage else f"<|user|>\n{x.content}\n"
55
  return "" if len(t) == 0 else t
56
 
57
  def format_docs(docs):
 
66
  # ]
67
  # )
68
 
69
+ # return {"chat_history":lambda x:, "context":create_vectorstore().as_retriever(search_type="similarity", search_kwargs={"k": 8}) | format_docs, "question": RunnablePassthrough()} | prompt | llm
70
+ return {"chat_history":lambda x:prompt_memory(x['memory']), "context":itemgetter("question") | create_vectorstore().as_retriever(search_type="similarity", search_kwargs={"k": 8}) | format_docs, "question": lambda x:x['question']} | prompt | llm
71
 
72
  class CustomLLM(LLM):
73
  repo_id : str