rajesh1729
commited on
Commit
β’
d0e6488
1
Parent(s):
226a55c
Update app.py
Browse files
app.py
CHANGED
@@ -16,20 +16,29 @@ if "chain" not in st.session_state:
|
|
16 |
|
17 |
def create_sidebar():
|
18 |
with st.sidebar:
|
19 |
-
st.title("PDF Chat")
|
20 |
-
|
21 |
-
api_key = st.text_input("OpenAI API Key:", type="password")
|
|
|
22 |
st.markdown("""
|
23 |
-
###
|
24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
- LangChain
|
|
|
26 |
- FAISS
|
|
|
27 |
|
28 |
-
|
29 |
-
1. Add API key
|
30 |
-
2. Upload PDF
|
31 |
-
3. Chat!
|
32 |
""")
|
|
|
33 |
return api_key
|
34 |
|
35 |
def process_pdfs(papers, api_key):
|
@@ -85,24 +94,42 @@ def main():
|
|
85 |
st.set_page_config(page_title="PDF Chat")
|
86 |
|
87 |
api_key = create_sidebar()
|
88 |
-
|
89 |
-
if not api_key:
|
90 |
-
st.warning("Please enter your OpenAI API key")
|
91 |
-
return
|
92 |
|
93 |
-
st.title("Chat with
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
94 |
|
95 |
-
|
|
|
|
|
96 |
|
97 |
if papers:
|
|
|
98 |
if st.button("Process PDFs"):
|
99 |
process_pdfs(papers, api_key)
|
100 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
for message in st.session_state.messages:
|
102 |
with st.chat_message(message["role"]):
|
103 |
st.markdown(message["content"])
|
104 |
|
105 |
-
if prompt := st.chat_input("Ask about your PDFs"):
|
106 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
107 |
|
108 |
with st.chat_message("user"):
|
@@ -110,7 +137,7 @@ def main():
|
|
110 |
|
111 |
with st.chat_message("assistant"):
|
112 |
if st.session_state.chain is None:
|
113 |
-
response = "Please upload and process a PDF first
|
114 |
else:
|
115 |
with st.spinner("Thinking..."):
|
116 |
result = st.session_state.chain({"question": prompt})
|
|
|
16 |
|
17 |
def create_sidebar():
|
18 |
with st.sidebar:
|
19 |
+
st.title("π€ PDF Chat")
|
20 |
+
|
21 |
+
api_key = st.text_input("OpenAI API Key:", type="password", help="Get your API key from OpenAI website")
|
22 |
+
|
23 |
st.markdown("""
|
24 |
+
### What is this?
|
25 |
+
A simple app that lets you chat with your PDF files using GPT and RAG.
|
26 |
+
|
27 |
+
### How to use
|
28 |
+
1. Paste your OpenAI API key
|
29 |
+
2. Upload PDF file(s)
|
30 |
+
3. Click 'Process PDFs'
|
31 |
+
4. Start asking questions!
|
32 |
+
|
33 |
+
### Built using
|
34 |
- LangChain
|
35 |
+
- OpenAI
|
36 |
- FAISS
|
37 |
+
- Streamlit
|
38 |
|
39 |
+
Made with β
|
|
|
|
|
|
|
40 |
""")
|
41 |
+
|
42 |
return api_key
|
43 |
|
44 |
def process_pdfs(papers, api_key):
|
|
|
94 |
st.set_page_config(page_title="PDF Chat")
|
95 |
|
96 |
api_key = create_sidebar()
|
|
|
|
|
|
|
|
|
97 |
|
98 |
+
st.title("π¬ Chat with your PDFs")
|
99 |
+
st.markdown("""
|
100 |
+
### π Hey there!
|
101 |
+
This is a simple demo showing how to chat with your PDF documents using GPT and RAG (Retrieval Augmented Generation).
|
102 |
+
|
103 |
+
#### Try it out:
|
104 |
+
- Upload one or more PDFs
|
105 |
+
- Ask questions about their content
|
106 |
+
- The app will use RAG to find relevant info and answer your questions
|
107 |
+
""")
|
108 |
+
|
109 |
+
st.divider()
|
110 |
|
111 |
+
# File uploader with custom styling
|
112 |
+
st.markdown("### π Upload your documents")
|
113 |
+
papers = st.file_uploader("Choose PDF files", type=["pdf"], accept_multiple_files=True)
|
114 |
|
115 |
if papers:
|
116 |
+
st.markdown(f"*{len(papers)} files uploaded*")
|
117 |
if st.button("Process PDFs"):
|
118 |
process_pdfs(papers, api_key)
|
119 |
|
120 |
+
st.divider()
|
121 |
+
|
122 |
+
if not api_key:
|
123 |
+
st.warning("π Please enter your OpenAI API key in the sidebar to start")
|
124 |
+
return
|
125 |
+
|
126 |
+
# Chat interface
|
127 |
+
st.markdown("### π Chat")
|
128 |
for message in st.session_state.messages:
|
129 |
with st.chat_message(message["role"]):
|
130 |
st.markdown(message["content"])
|
131 |
|
132 |
+
if prompt := st.chat_input("Ask about your PDFs..."):
|
133 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
134 |
|
135 |
with st.chat_message("user"):
|
|
|
137 |
|
138 |
with st.chat_message("assistant"):
|
139 |
if st.session_state.chain is None:
|
140 |
+
response = "Please upload and process a PDF first! π"
|
141 |
else:
|
142 |
with st.spinner("Thinking..."):
|
143 |
result = st.session_state.chain({"question": prompt})
|