LordFarquaad42 commited on
Commit
7665c32
·
1 Parent(s): 3f7983f

abstracted

Browse files
Files changed (2) hide show
  1. functions/gptResponse.py +2 -1
  2. pages/ragChat.py +38 -12
functions/gptResponse.py CHANGED
@@ -8,7 +8,8 @@ load_dotenv()
8
  openai_key = os.getenv(
9
  "OPENAI_API_KEY"
10
  ) # may wanna ask user for this or handle error when its not there
11
-
 
12
 
13
  def get_response(user_query, chat_history, context):
14
  template = """
 
8
  openai_key = os.getenv(
9
  "OPENAI_API_KEY"
10
  ) # may wanna ask user for this or handle error when its not there
11
+ # if not openai_key:
12
+ # raise ValueError("OpenAI API key not found in environment variables.")
13
 
14
  def get_response(user_query, chat_history, context):
15
  template = """
pages/ragChat.py CHANGED
@@ -6,22 +6,25 @@ from functions.web_chain import vectorize, loadUrlData, get_pdf_text
6
  import asyncio
7
 
8
 
9
- async def main():
10
- sidebar()
11
  st.title("Upload Data")
12
 
13
  uploaded_files = st.file_uploader("Upload PDFs", accept_multiple_files=True)
14
- st.warning("If you plan to add more files, after processing initial files, make sure the uploaded files you already processed are removed")
 
 
15
  url = st.text_input("Enter a website link")
16
  if "button_pressed" not in st.session_state:
17
  st.session_state.button_pressed = False
18
- if st.button('Process URL and Files'):
19
  st.session_state.button_pressed = True
20
  with st.spinner("Vectorizing Data, wait times vary depending on size..."):
21
  if url:
22
  try:
23
  if "retriever" not in st.session_state:
24
- st.session_state.retriever = vectorize(loadUrlData(url), "document")
 
 
25
  except Exception as e:
26
  st.error(f"Failed to load URL: {e}")
27
  if uploaded_files:
@@ -37,10 +40,15 @@ async def main():
37
  except Exception as e:
38
  st.error(f"Failed to load PDF: {e}")
39
  st.success("Data is ready to be queried!")
40
-
 
 
 
41
  if st.session_state.button_pressed:
42
  if "chat_history" not in st.session_state:
43
- st.session_state.chat_history = [AIMessage(content="Hello, I am a bot. How can I help you?")]
 
 
44
 
45
  st.title("RAG CHAT")
46
  for message in st.session_state.chat_history:
@@ -57,23 +65,41 @@ async def main():
57
  with st.chat_message("Human"):
58
  st.write(user_query)
59
 
60
- if 'retriever' in st.session_state:
61
  try:
62
- ragAnswer = await st.session_state.retriever.amax_marginal_relevance_search(user_query, k=4, fetch_k=10)
 
 
 
 
63
  context = []
64
  for i, doc in enumerate(ragAnswer):
65
  print(f"{i}: {doc.page_content}")
66
  context.append(doc.page_content)
67
  with st.spinner("Generating Response"):
68
- response = get_response(user_query, st.session_state.chat_history, context)
 
 
69
  if response:
70
- st.session_state.chat_history.append(AIMessage(content=response))
 
 
71
  with st.chat_message("AI"):
72
  st.write(response)
73
  else:
74
- st.write("No response received.")
75
  except Exception as e:
76
  st.error(f"Error during retrieval or response generation: {e}")
77
 
 
 
 
 
 
 
 
 
 
78
  if __name__ == "__main__":
 
79
  asyncio.run(main())
 
6
  import asyncio
7
 
8
 
9
+ async def add_data():
 
10
  st.title("Upload Data")
11
 
12
  uploaded_files = st.file_uploader("Upload PDFs", accept_multiple_files=True)
13
+ st.warning(
14
+ "If you plan to add more files, after processing initial files, make sure the uploaded files you already processed are removed"
15
+ )
16
  url = st.text_input("Enter a website link")
17
  if "button_pressed" not in st.session_state:
18
  st.session_state.button_pressed = False
19
+ if st.button("Process URL and Files"):
20
  st.session_state.button_pressed = True
21
  with st.spinner("Vectorizing Data, wait times vary depending on size..."):
22
  if url:
23
  try:
24
  if "retriever" not in st.session_state:
25
+ st.session_state.retriever = vectorize(
26
+ loadUrlData(url), "document"
27
+ )
28
  except Exception as e:
29
  st.error(f"Failed to load URL: {e}")
30
  if uploaded_files:
 
40
  except Exception as e:
41
  st.error(f"Failed to load PDF: {e}")
42
  st.success("Data is ready to be queried!")
43
+ st.session_state.data_hungry = False
44
+
45
+
46
+ async def rag_chat():
47
  if st.session_state.button_pressed:
48
  if "chat_history" not in st.session_state:
49
+ st.session_state.chat_history = [
50
+ AIMessage(content="Hello, I am a bot. How can I help you?")
51
+ ]
52
 
53
  st.title("RAG CHAT")
54
  for message in st.session_state.chat_history:
 
65
  with st.chat_message("Human"):
66
  st.write(user_query)
67
 
68
+ if "retriever" in st.session_state:
69
  try:
70
+ ragAnswer = (
71
+ await st.session_state.retriever.amax_marginal_relevance_search(
72
+ user_query, k=4, fetch_k=10
73
+ )
74
+ )
75
  context = []
76
  for i, doc in enumerate(ragAnswer):
77
  print(f"{i}: {doc.page_content}")
78
  context.append(doc.page_content)
79
  with st.spinner("Generating Response"):
80
+ response = get_response(
81
+ user_query, st.session_state.chat_history, context
82
+ )
83
  if response:
84
+ st.session_state.chat_history.append(
85
+ AIMessage(content=response)
86
+ )
87
  with st.chat_message("AI"):
88
  st.write(response)
89
  else:
90
+ st.write("No response received.")
91
  except Exception as e:
92
  st.error(f"Error during retrieval or response generation: {e}")
93
 
94
+
95
+ async def main():
96
+ st.session_state.data_hungry = st.toggle("Add Custom Data", False)
97
+
98
+ if(st.session_state.data_hungry):
99
+ await add_data()
100
+ else:
101
+ await rag_chat()
102
+
103
  if __name__ == "__main__":
104
+ sidebar()
105
  asyncio.run(main())