Spaces:
Runtime error
Runtime error
HemanthSai7
commited on
Commit
β’
931abd0
1
Parent(s):
c035779
Frontend
Browse filesUpdated home page of the Streamlit app
- frontend/components/__init__.py +2 -1
- frontend/components/toaster.py +22 -0
- frontend/layouts/mainlayout.py +5 -2
- frontend/pages/2_π€_bot.py +109 -3
- frontend/π‘_Home.py +1 -1
frontend/components/__init__.py
CHANGED
@@ -1,3 +1,4 @@
|
|
1 |
from .authors import *
|
2 |
from .user_greetings import *
|
3 |
-
from .logo import add_logo
|
|
|
|
1 |
from .authors import *
|
2 |
from .user_greetings import *
|
3 |
+
from .logo import add_logo
|
4 |
+
from .toaster import toaster_messages
|
frontend/components/toaster.py
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
import time
|
4 |
+
|
5 |
+
|
6 |
+
def toaster_messages(func: callable):
|
7 |
+
def wrapper():
|
8 |
+
msg = st.toast("Uploading PDF...")
|
9 |
+
time.sleep(8)
|
10 |
+
msg.toast("Converting PDF into small chunks...")
|
11 |
+
time.sleep(8)
|
12 |
+
msg.toast("Breaking down chunks into tokens...")
|
13 |
+
time.sleep(8)
|
14 |
+
msg.toast("Creating embeddging vectors...")
|
15 |
+
time.sleep(8)
|
16 |
+
msg.toast("Creating vector store...")
|
17 |
+
time.sleep(8)
|
18 |
+
msg.toast("Vector store created successfully!")
|
19 |
+
|
20 |
+
func()
|
21 |
+
|
22 |
+
return wrapper
|
frontend/layouts/mainlayout.py
CHANGED
@@ -1,12 +1,15 @@
|
|
1 |
import json
|
2 |
import streamlit as st
|
|
|
|
|
|
|
3 |
from typing import Callable
|
4 |
from components import authors, user_greetings, add_logo
|
5 |
|
6 |
|
7 |
def mainlayout(func: Callable):
|
8 |
def wrapper():
|
9 |
-
with open("
|
10 |
st_page_layouts = json.load(f)
|
11 |
|
12 |
st.set_page_config(
|
@@ -16,7 +19,7 @@ def mainlayout(func: Callable):
|
|
16 |
else "home"
|
17 |
]
|
18 |
)
|
19 |
-
add_logo("
|
20 |
st.markdown("# Studybot π")
|
21 |
user_greetings()
|
22 |
authors()
|
|
|
1 |
import json
|
2 |
import streamlit as st
|
3 |
+
|
4 |
+
import requests
|
5 |
+
|
6 |
from typing import Callable
|
7 |
from components import authors, user_greetings, add_logo
|
8 |
|
9 |
|
10 |
def mainlayout(func: Callable):
|
11 |
def wrapper():
|
12 |
+
with open("layouts/st_page_layouts.json", "r", encoding="utf-8") as f:
|
13 |
st_page_layouts = json.load(f)
|
14 |
|
15 |
st.set_page_config(
|
|
|
19 |
else "home"
|
20 |
]
|
21 |
)
|
22 |
+
add_logo("images/studybotlogo.svg", svg=True)
|
23 |
st.markdown("# Studybot π")
|
24 |
user_greetings()
|
25 |
authors()
|
frontend/pages/2_π€_bot.py
CHANGED
@@ -1,9 +1,115 @@
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
|
3 |
from layouts.mainlayout import mainlayout
|
4 |
|
5 |
@mainlayout
|
6 |
-
def
|
7 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
|
3 |
import streamlit as st
|
4 |
|
5 |
from layouts.mainlayout import mainlayout
|
6 |
|
7 |
@mainlayout
|
8 |
+
def upload_data():
|
9 |
+
# upload pdf
|
10 |
+
upload_pdf = st.file_uploader("Upload PDF", type="pdf")
|
11 |
+
if upload_pdf is not None:
|
12 |
+
files = {"file": upload_pdf}
|
13 |
+
with st.spinner("Uploading PDF..."):
|
14 |
+
response = requests.post(
|
15 |
+
"https://hemanthsai7-studybotapi.hf.space/api/upload", files=files
|
16 |
+
)
|
17 |
+
|
18 |
+
if response.status_code == 200:
|
19 |
+
st.success(
|
20 |
+
f'{response.json()["message"][0]}. Vector Store created successfully!'
|
21 |
+
)
|
22 |
+
st.session_state.uploaded_pdf=True
|
23 |
+
else:
|
24 |
+
st.error("Failed to upload PDF!")
|
25 |
+
|
26 |
+
|
27 |
+
|
28 |
+
upload_data()
|
29 |
+
|
30 |
+
with st.expander("What happens when I upload a PDF? π", expanded=True):
|
31 |
+
st.info(
|
32 |
+
"""
|
33 |
+
- The PDF is uploaded to the backend server. βοΈ
|
34 |
+
|
35 |
+
- The PDF is converted into small chunks for faster processing. π
|
36 |
+
|
37 |
+
- The chunks are broken down into tokens. A token is a single word or a group of words. π
|
38 |
+
|
39 |
+
- The tokens are converted into embedding vectors. π
|
40 |
+
|
41 |
+
- The embedding vectors are stored in a vector store. ποΈ
|
42 |
+
""",
|
43 |
+
icon="βΉοΈ",
|
44 |
+
)
|
45 |
+
|
46 |
+
st.divider()
|
47 |
+
|
48 |
+
if "uploaded_pdf" in st.session_state.keys():
|
49 |
+
# chatbot
|
50 |
+
st.subheader("Ask Studybot a question! π€")
|
51 |
+
|
52 |
+
|
53 |
+
if "messages" not in st.session_state.keys():
|
54 |
+
st.session_state.messages = [
|
55 |
+
{
|
56 |
+
"role": "assistant",
|
57 |
+
"content": "What's troubling you? Ask me a question right away!",
|
58 |
+
}
|
59 |
+
]
|
60 |
+
|
61 |
+
# Display or clear chat messages
|
62 |
+
for message in st.session_state.messages:
|
63 |
+
with st.chat_message(message["role"]):
|
64 |
+
st.write(message["content"])
|
65 |
+
|
66 |
+
|
67 |
+
def clear_chat_history():
|
68 |
+
st.session_state.messages = [
|
69 |
+
{
|
70 |
+
"role": "assistant",
|
71 |
+
"content": "What's troubling you? Ask me a question right away!",
|
72 |
+
}
|
73 |
+
]
|
74 |
+
|
75 |
+
|
76 |
+
st.sidebar.button("Clear Chat History", on_click=clear_chat_history)
|
77 |
+
|
78 |
+
|
79 |
+
def generate_mistral_response(question: str):
|
80 |
+
for dict_message in st.session_state.messages:
|
81 |
+
if dict_message["role"] == "user":
|
82 |
+
question = dict_message["content"]
|
83 |
+
|
84 |
+
answer = requests.post(
|
85 |
+
"https://hemanthsai7-studybotapi.hf.space/api/inference",
|
86 |
+
json={"promptMessage": question},
|
87 |
+
).json()
|
88 |
+
|
89 |
+
return answer
|
90 |
+
|
91 |
+
|
92 |
+
# User-provided prompt
|
93 |
+
if prompt := st.chat_input(
|
94 |
+
disabled=not st.session_state.messages[-1]["role"] == "assistant",
|
95 |
+
placeholder="Hello, please ask me a question! π€"):
|
96 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
97 |
+
with st.chat_message("user"):
|
98 |
+
st.write(prompt)
|
99 |
+
|
100 |
+
# ask question
|
101 |
+
st.write(st.session_state)
|
102 |
|
103 |
+
# Generate a new response if last message is not from assistant
|
104 |
+
if st.session_state.messages[-1]["role"] != "assistant":
|
105 |
+
with st.chat_message("assistant"):
|
106 |
+
with st.spinner("Thinking..."):
|
107 |
+
response = generate_mistral_response(prompt)
|
108 |
+
placeholder = st.empty()
|
109 |
+
full_response = ""
|
110 |
+
for item in response:
|
111 |
+
full_response += item
|
112 |
+
placeholder.markdown(full_response)
|
113 |
+
placeholder.markdown(full_response)
|
114 |
+
message = {"role": "assistant", "content": full_response}
|
115 |
+
st.session_state.messages.append(message)
|
frontend/π‘_Home.py
CHANGED
@@ -63,7 +63,7 @@ def home():
|
|
63 |
"<h2 style='text-align: center; color: black;'>Studybot Architecture</h1>",
|
64 |
unsafe_allow_html=True,
|
65 |
)
|
66 |
-
st.image("
|
67 |
|
68 |
|
69 |
home()
|
|
|
63 |
"<h2 style='text-align: center; color: black;'>Studybot Architecture</h1>",
|
64 |
unsafe_allow_html=True,
|
65 |
)
|
66 |
+
st.image("images/architecture.png")
|
67 |
|
68 |
|
69 |
home()
|