Spaces:
Runtime error
Runtime error
gabrielaltay
commited on
Commit
•
385ebea
1
Parent(s):
7333f5e
trace
Browse files- .gitignore +2 -1
- app.py +34 -4
.gitignore
CHANGED
@@ -1 +1,2 @@
|
|
1 |
-
.streamlit
|
|
|
|
1 |
+
.streamlit
|
2 |
+
test.py
|
app.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
from collections import defaultdict
|
2 |
import json
|
|
|
3 |
import re
|
4 |
|
5 |
from langchain_core.documents import Document
|
@@ -17,8 +18,12 @@ import streamlit as st
|
|
17 |
|
18 |
|
19 |
st.set_page_config(layout="wide", page_title="LegisQA")
|
20 |
-
SS = st.session_state
|
21 |
|
|
|
|
|
|
|
|
|
|
|
22 |
SEED = 292764
|
23 |
CONGRESS_NUMBERS = [113, 114, 115, 116, 117, 118]
|
24 |
SPONSOR_PARTIES = ["D", "R", "L", "I"]
|
@@ -95,13 +100,12 @@ def load_bge_embeddings():
|
|
95 |
|
96 |
def load_pinecone_vectorstore():
|
97 |
emb_fn = load_bge_embeddings()
|
98 |
-
pc = Pinecone(api_key=st.secrets["pinecone_api_key"])
|
99 |
-
index = pc.Index(st.secrets["pinecone_index_name"])
|
100 |
vectorstore = PineconeVectorStore(
|
101 |
-
index=index,
|
102 |
embedding=emb_fn,
|
103 |
text_key="text",
|
104 |
distance_strategy=DistanceStrategy.COSINE,
|
|
|
|
|
105 |
)
|
106 |
return vectorstore
|
107 |
|
@@ -417,16 +421,42 @@ with query_tab:
|
|
417 |
search_kwargs={"k": SS["n_ret_docs"], "filter": vs_filter},
|
418 |
)
|
419 |
prompt = PromptTemplate.from_template(SS["prompt_template"])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
420 |
rag_chain_from_docs = (
|
421 |
RunnablePassthrough.assign(context=(lambda x: format_docs(x["context"])))
|
422 |
| prompt
|
423 |
| llm
|
424 |
| StrOutputParser()
|
425 |
)
|
|
|
|
|
|
|
|
|
426 |
rag_chain_with_source = RunnableParallel(
|
427 |
{"context": retriever, "question": RunnablePassthrough()}
|
428 |
).assign(answer=rag_chain_from_docs)
|
429 |
|
|
|
|
|
|
|
|
|
430 |
with get_openai_callback() as cb:
|
431 |
SS["out"] = rag_chain_with_source.invoke(SS["query"])
|
432 |
SS["cb"] = cb
|
|
|
1 |
from collections import defaultdict
|
2 |
import json
|
3 |
+
import os
|
4 |
import re
|
5 |
|
6 |
from langchain_core.documents import Document
|
|
|
18 |
|
19 |
|
20 |
st.set_page_config(layout="wide", page_title="LegisQA")
|
|
|
21 |
|
22 |
+
os.environ["LANGCHAIN_API_KEY"] = st.secrets["langchain_api_key"]
|
23 |
+
os.environ["LANGCHAIN_TRACING_V2"] = "true"
|
24 |
+
os.environ["LANGCHAIN_PROJECT"] = st.secrets["langchain_project"]
|
25 |
+
|
26 |
+
SS = st.session_state
|
27 |
SEED = 292764
|
28 |
CONGRESS_NUMBERS = [113, 114, 115, 116, 117, 118]
|
29 |
SPONSOR_PARTIES = ["D", "R", "L", "I"]
|
|
|
100 |
|
101 |
def load_pinecone_vectorstore():
|
102 |
emb_fn = load_bge_embeddings()
|
|
|
|
|
103 |
vectorstore = PineconeVectorStore(
|
|
|
104 |
embedding=emb_fn,
|
105 |
text_key="text",
|
106 |
distance_strategy=DistanceStrategy.COSINE,
|
107 |
+
pinecone_api_key=st.secrets["pinecone_api_key"],
|
108 |
+
index_name=st.secrets["pinecone_index_name"],
|
109 |
)
|
110 |
return vectorstore
|
111 |
|
|
|
421 |
search_kwargs={"k": SS["n_ret_docs"], "filter": vs_filter},
|
422 |
)
|
423 |
prompt = PromptTemplate.from_template(SS["prompt_template"])
|
424 |
+
|
425 |
+
# # takes in a dict. adds context key with formatted docs
|
426 |
+
# rag_chain_from_docs = (
|
427 |
+
# RunnablePassthrough.assign(context=(lambda x: format_docs(x["context"])))
|
428 |
+
# | prompt
|
429 |
+
# | llm
|
430 |
+
# | StrOutputParser()
|
431 |
+
# )
|
432 |
+
|
433 |
+
# # takes in a query string.
|
434 |
+
# # passes to retriever and passthru
|
435 |
+
# # assign answer
|
436 |
+
# rag_chain_with_source = RunnableParallel(
|
437 |
+
# {"context": retriever, "question": RunnablePassthrough()}
|
438 |
+
# ).assign(answer=rag_chain_from_docs)
|
439 |
+
|
440 |
+
|
441 |
+
# takes in a dict. adds context key with formatted docs
|
442 |
rag_chain_from_docs = (
|
443 |
RunnablePassthrough.assign(context=(lambda x: format_docs(x["context"])))
|
444 |
| prompt
|
445 |
| llm
|
446 |
| StrOutputParser()
|
447 |
)
|
448 |
+
|
449 |
+
# takes in a query string.
|
450 |
+
# passes to retriever and passthru
|
451 |
+
# assign answer
|
452 |
rag_chain_with_source = RunnableParallel(
|
453 |
{"context": retriever, "question": RunnablePassthrough()}
|
454 |
).assign(answer=rag_chain_from_docs)
|
455 |
|
456 |
+
|
457 |
+
|
458 |
+
print(rag_chain_with_source)
|
459 |
+
|
460 |
with get_openai_callback() as cb:
|
461 |
SS["out"] = rag_chain_with_source.invoke(SS["query"])
|
462 |
SS["cb"] = cb
|