Asaad Almutareb commited on
Commit
5604c54
1 Parent(s): 1105e95

moved the db.add for agent memory storing to be able to include the sources

Browse files
app.py CHANGED
@@ -40,8 +40,6 @@ def initialize_chroma_db() -> Chroma:
40
 
41
  if __name__ == "__main__":
42
 
43
- current_id = generate_uuid()
44
-
45
  db = initialize_chroma_db()
46
 
47
  def add_text(history, text):
@@ -52,6 +50,17 @@ if __name__ == "__main__":
52
  response = infer(history[-1][0], history)
53
  sources = extract_urls(all_sources)
54
  src_list = '\n'.join(sources)
 
 
 
 
 
 
 
 
 
 
 
55
  response_w_sources = response['output']+"\n\n\n Sources: \n\n\n"+src_list
56
  history[-1][1] = response_w_sources
57
  return history
@@ -63,19 +72,7 @@ if __name__ == "__main__":
63
  "input": question,
64
  "chat_history": history
65
  }
66
- )
67
-
68
- db.add(
69
- ids=[current_id],
70
- documents=[result['output']],
71
- metadatas=[
72
- {
73
- "query":query,
74
- "sources":result['sources'].__str__()
75
- }
76
- ]
77
- )
78
-
79
  return result
80
 
81
  def vote(data: gr.LikeData):
 
40
 
41
  if __name__ == "__main__":
42
 
 
 
43
  db = initialize_chroma_db()
44
 
45
  def add_text(history, text):
 
50
  response = infer(history[-1][0], history)
51
  sources = extract_urls(all_sources)
52
  src_list = '\n'.join(sources)
53
+ current_id = generate_uuid()
54
+ db.add(
55
+ ids=[current_id],
56
+ documents=[response['output']],
57
+ metadatas=[
58
+ {
59
+ "query":history[-1][0],
60
+ "sources":src_list
61
+ }
62
+ ]
63
+ )
64
  response_w_sources = response['output']+"\n\n\n Sources: \n\n\n"+src_list
65
  history[-1][1] = response_w_sources
66
  return history
 
72
  "input": question,
73
  "chat_history": history
74
  }
75
+ )
 
 
 
 
 
 
 
 
 
 
 
 
76
  return result
77
 
78
  def vote(data: gr.LikeData):
hf_mixtral_agent.py CHANGED
@@ -71,7 +71,7 @@ agent_executor = AgentExecutor(
71
  agent=agent,
72
  tools=tools,
73
  verbose=True,
74
- max_iterations=6, # cap number of iterations
75
  #max_execution_time=60, # timout at 60 sec
76
  return_intermediate_steps=True,
77
  handle_parsing_errors=True,
 
71
  agent=agent,
72
  tools=tools,
73
  verbose=True,
74
+ max_iterations=10, # cap number of iterations
75
  #max_execution_time=60, # timout at 60 sec
76
  return_intermediate_steps=True,
77
  handle_parsing_errors=True,
innovation_pathfinder_ai/utils/utils.py CHANGED
@@ -32,7 +32,7 @@ def create_wikipedia_urls_from_text(text):
32
  # Construct the URL and add it to the list
33
  url = f"https://en.wikipedia.org/wiki/{url_title}"
34
  urls.append(url)
35
- print(urls)
36
 
37
  return urls
38
 
 
32
  # Construct the URL and add it to the list
33
  url = f"https://en.wikipedia.org/wiki/{url_title}"
34
  urls.append(url)
35
+ #print(urls)
36
 
37
  return urls
38