bhulston commited on
Commit
9f17ce8
1 Parent(s): fe4ae60

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -17
app.py CHANGED
@@ -52,7 +52,7 @@ days = st.multiselect("What days are you free?",
52
  options = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat"],
53
  default = None,
54
  placeholder = "Any day"
55
- )
56
 
57
  assistant = st.chat_message("assistant")
58
  initial_message = "Hello, I am your GPT-powered USC Class Helper! How can I assist you today?"
@@ -96,7 +96,9 @@ def get_rag_results(prompt):
96
  )
97
 
98
  response, additional_metadata = clean_pinecone(response)
 
99
  response = reranker(query, response) # BERT cross encoder for ranking
 
100
 
101
  return response, additional_metadata
102
 
@@ -122,22 +124,17 @@ if prompt := st.chat_input("What kind of class are you looking for?"):
122
  for m in st.session_state.messages]
123
  message_history = " ".join([message["content"] for message in messages])
124
 
125
- route = routing_agent(prompt, OPENAI_API, message_history)
126
-
127
- if route == "1":
128
- ## Option for accessing Vector DB
129
- rag_response, additional_metadata = get_rag_results(prompt)
130
- result_query = 'Original Query:' + prompt + 'Query Results:' + str(rag_response) + '\n Additional Class Times:' + str(additional_metadata)
131
- assistant_response = results_agent(result_query, OPENAI_API)
132
- else:
133
- ## Option if not accessing Database
134
- assistant_response = openai.ChatCompletion.create(
135
- model = "gpt-4",
136
- messages = [
137
- {"role": m["role"], "content": m["content"]}
138
- for m in st.session_state.messages
139
- ]
140
- )["choices"][0]["message"]["content"]
141
 
142
  ## Display response regardless of route
143
  for chunk in assistant_response.split():
 
52
  options = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat"],
53
  default = None,
54
  placeholder = "Any day"
55
+ )
56
 
57
  assistant = st.chat_message("assistant")
58
  initial_message = "Hello, I am your GPT-powered USC Class Helper! How can I assist you today?"
 
96
  )
97
 
98
  response, additional_metadata = clean_pinecone(response)
99
+ print("Here is the pinecone response:", response)
100
  response = reranker(query, response) # BERT cross encoder for ranking
101
+ print("Here is the response from the reranker", response)
102
 
103
  return response, additional_metadata
104
 
 
124
  for m in st.session_state.messages]
125
  message_history = " ".join([message["content"] for message in messages])
126
 
127
+ rag_response, additional_metadata = get_rag_results(prompt)
128
+ result_query = 'Original Query:' + prompt + '\nQuery Results:' + str(rag_response) + '\nMessage History:' + str(message_history)
129
+ # '\n Additional Class Times:' + str(additional_metadata)
130
+ assistant_response = results_agent(result_query, OPENAI_API)
131
+ # assistant_response = openai.ChatCompletion.create(
132
+ # model = "gpt-4",
133
+ # messages = [
134
+ # {"role": m["role"], "content": m["content"]}
135
+ # for m in st.session_state.messages
136
+ # ]
137
+ # )["choices"][0]["message"]["content"]
 
 
 
 
 
138
 
139
  ## Display response regardless of route
140
  for chunk in assistant_response.split():