Spaces:
Sleeping
Sleeping
feed back Changes in App.py
Browse files
app.py
CHANGED
@@ -54,11 +54,12 @@ def setting_api_key(openai_api_key, serp_api_key):
|
|
54 |
|
55 |
def setup_llm_embed():
|
56 |
template = """<|system|>
|
57 |
-
|
|
|
|
|
58 |
Please check if the following pieces of context has any mention of the keywords provided
|
59 |
in the question.Response as much as you could with context you get.
|
60 |
-
|
61 |
-
science and Data Analytics.if the following pieces of Context does not relate to Question,
|
62 |
You must not answer on your own,you don't know the answer.
|
63 |
</s>
|
64 |
<|user|>
|
@@ -110,7 +111,7 @@ class CustomRetriever(BaseRetriever):
|
|
110 |
self,
|
111 |
vector_retriever: VectorIndexRetriever,
|
112 |
keyword_retriever: KeywordTableSimpleRetriever,
|
113 |
-
mode: str = "
|
114 |
) -> None:
|
115 |
|
116 |
self._vector_retriever = vector_retriever
|
@@ -258,6 +259,7 @@ def file_nodes_vector():
|
|
258 |
keyword_index = load_index_from_storage(storage_context_)
|
259 |
|
260 |
except FileNotFoundError:
|
|
|
261 |
documents = SimpleDirectoryReader(input_dir="research_papers/").load_data()
|
262 |
# LLM and Embedding Model Setup
|
263 |
|
@@ -266,10 +268,12 @@ def file_nodes_vector():
|
|
266 |
Settings.embed_model = embed_model
|
267 |
|
268 |
# Splitting Nodes
|
|
|
269 |
nodes = semantic_split(embed_model, documents)
|
270 |
ctx_vector = ctx_vector_func(llm, embed_model, nodes)
|
271 |
|
272 |
# Creating Vector index and Keyword Index
|
|
|
273 |
|
274 |
vector_index, keyword_index = create_vector_and_keyword_index(nodes, ctx_vector)
|
275 |
return vector_index, keyword_index
|
@@ -326,11 +330,11 @@ def main():
|
|
326 |
st.session_state.key_flag = False
|
327 |
|
328 |
col_left, col_right = st.columns([1, 2])
|
329 |
-
with
|
330 |
st.write("""<h1 style="font-size: 15px;">Enter your OpenAI API key </h1>""", unsafe_allow_html=True)
|
331 |
openai_api_key = st.text_input(placeholder="OpenAI api key ", label=" ", type="password")
|
332 |
|
333 |
-
st.write("""<h1 style="font-size: 15px;">Enter your
|
334 |
serp_api_key = st.text_input(placeholder="Serp api key ", label=" ", type="password")
|
335 |
|
336 |
set_keys_button = st.button("Set Keys ", type="primary")
|
@@ -356,34 +360,39 @@ def main():
|
|
356 |
vector_index, keyword_index = file_nodes_vector()
|
357 |
response = response_generation(query, st.session_state.cohere_api_key, vector_index, keyword_index)
|
358 |
if str(response) in ["Empty Response", "RAG Output"] or not response:
|
359 |
-
with st.spinner("Getting Information from Articles
|
360 |
content_ = get_article_and_arxiv_content(query)
|
361 |
new_vector_index, new_keyword_index, new_nodes = func_add_new_article_content(content_)
|
362 |
response = response_generation(query, st.session_state.cohere_api_key, new_vector_index, new_keyword_index)
|
363 |
-
st.write_stream(str(response))
|
364 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
365 |
col1, col2 = st.columns([1, 10])
|
366 |
thumps_up_button = col1.button("π")
|
367 |
thumps_down_button = col2.button("π")
|
368 |
if thumps_up_button:
|
369 |
st.write("Thank you for your positive feedback!")
|
370 |
-
|
371 |
-
if thumps_down_button:
|
372 |
-
st.write("""We're sorry , We will improve it.""")
|
373 |
-
|
374 |
-
else:
|
375 |
-
st.write_stream(str(response))
|
376 |
-
col1, col2 = st.columns([1, 10])
|
377 |
-
if col1.button("π"):
|
378 |
-
st.write("Thank you for your positive feedback!")
|
379 |
-
if col2.button("π"):
|
380 |
st.write("We're sorry , We will improve it.")
|
381 |
|
382 |
elif generate_response_button and not str(query) and not st.session_state.key_flag:
|
383 |
st.warning("Please set the necessary API keys and Enter the query")
|
384 |
|
385 |
elif generate_response_button and str(query) and not st.session_state.key_flag:
|
386 |
-
st.warning("Please set the necessary API keys
|
387 |
|
388 |
elif generate_response_button and st.session_state.key_flag and not str(query):
|
389 |
st.warning("Please Enter the query !")
|
|
|
54 |
|
55 |
def setup_llm_embed():
|
56 |
template = """<|system|>
|
57 |
+
you are Question answering system based AI, Machine Learning , Deep Learning , Generative AI, Data
|
58 |
+
science, Data Analytics and Mathematics.
|
59 |
+
Mention Clearly Before response "Answer From RAG"
|
60 |
Please check if the following pieces of context has any mention of the keywords provided
|
61 |
in the question.Response as much as you could with context you get.
|
62 |
+
If the following pieces of Context does not relate to Question,
|
|
|
63 |
You must not answer on your own,you don't know the answer.
|
64 |
</s>
|
65 |
<|user|>
|
|
|
111 |
self,
|
112 |
vector_retriever: VectorIndexRetriever,
|
113 |
keyword_retriever: KeywordTableSimpleRetriever,
|
114 |
+
mode: str = "OR",
|
115 |
) -> None:
|
116 |
|
117 |
self._vector_retriever = vector_retriever
|
|
|
259 |
keyword_index = load_index_from_storage(storage_context_)
|
260 |
|
261 |
except FileNotFoundError:
|
262 |
+
st.write("Generating New Embeddings ...")
|
263 |
documents = SimpleDirectoryReader(input_dir="research_papers/").load_data()
|
264 |
# LLM and Embedding Model Setup
|
265 |
|
|
|
268 |
Settings.embed_model = embed_model
|
269 |
|
270 |
# Splitting Nodes
|
271 |
+
st.write("Splitting Content based on semantic !")
|
272 |
nodes = semantic_split(embed_model, documents)
|
273 |
ctx_vector = ctx_vector_func(llm, embed_model, nodes)
|
274 |
|
275 |
# Creating Vector index and Keyword Index
|
276 |
+
st.write("Generating Vector Index ...")
|
277 |
|
278 |
vector_index, keyword_index = create_vector_and_keyword_index(nodes, ctx_vector)
|
279 |
return vector_index, keyword_index
|
|
|
330 |
st.session_state.key_flag = False
|
331 |
|
332 |
col_left, col_right = st.columns([1, 2])
|
333 |
+
with col_left:
|
334 |
st.write("""<h1 style="font-size: 15px;">Enter your OpenAI API key </h1>""", unsafe_allow_html=True)
|
335 |
openai_api_key = st.text_input(placeholder="OpenAI api key ", label=" ", type="password")
|
336 |
|
337 |
+
st.write("""<h1 style="font-size: 15px;">Enter your SerpApi key </h1>""", unsafe_allow_html=True)
|
338 |
serp_api_key = st.text_input(placeholder="Serp api key ", label=" ", type="password")
|
339 |
|
340 |
set_keys_button = st.button("Set Keys ", type="primary")
|
|
|
360 |
vector_index, keyword_index = file_nodes_vector()
|
361 |
response = response_generation(query, st.session_state.cohere_api_key, vector_index, keyword_index)
|
362 |
if str(response) in ["Empty Response", "RAG Output"] or not response:
|
363 |
+
with st.spinner("Getting Information from Articles and Research papers ..."):
|
364 |
content_ = get_article_and_arxiv_content(query)
|
365 |
new_vector_index, new_keyword_index, new_nodes = func_add_new_article_content(content_)
|
366 |
response = response_generation(query, st.session_state.cohere_api_key, new_vector_index, new_keyword_index)
|
|
|
367 |
|
368 |
+
if str(response) not in ["Empty Response", "RAG Output"]:
|
369 |
+
st.write_stream(str(response))
|
370 |
+
col1, col2 = st.columns([1, 10])
|
371 |
+
thumps_up_button = col1.button("π")
|
372 |
+
thumps_down_button = col2.button("π")
|
373 |
+
if thumps_up_button:
|
374 |
+
st.write("Thank you for your positive feedback!")
|
375 |
+
updating_vector(new_nodes, vector_index, keyword_index)
|
376 |
+
elif thumps_down_button:
|
377 |
+
st.write("""We're sorry , We will improve it.""")
|
378 |
+
else:
|
379 |
+
st.write(f"RAG system Couldn't get the information related to {query}")
|
380 |
+
|
381 |
+
elif response:
|
382 |
+
st.write_stream(str(response))
|
383 |
col1, col2 = st.columns([1, 10])
|
384 |
thumps_up_button = col1.button("π")
|
385 |
thumps_down_button = col2.button("π")
|
386 |
if thumps_up_button:
|
387 |
st.write("Thank you for your positive feedback!")
|
388 |
+
elif thumps_down_button:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
389 |
st.write("We're sorry , We will improve it.")
|
390 |
|
391 |
elif generate_response_button and not str(query) and not st.session_state.key_flag:
|
392 |
st.warning("Please set the necessary API keys and Enter the query")
|
393 |
|
394 |
elif generate_response_button and str(query) and not st.session_state.key_flag:
|
395 |
+
st.warning("Please set the necessary API keys")
|
396 |
|
397 |
elif generate_response_button and st.session_state.key_flag and not str(query):
|
398 |
st.warning("Please Enter the query !")
|