HemanthSai7 commited on
Commit
cdca970
β€’
1 Parent(s): 8cccba3

frontend with chatbot completion

Browse files
frontend/components/__init__.py CHANGED
@@ -1,4 +1,5 @@
1
  from .authors import *
2
  from .user_greetings import *
3
  from .logo import add_logo
4
- from .file_streaming import *
 
 
1
  from .authors import *
2
  from .user_greetings import *
3
  from .logo import add_logo
4
+ from .file_streaming import *
5
+ from .display import *
frontend/components/display.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+
4
+ def display_source_document(source_document: list):
5
+ for i,source in enumerate(source_document):
6
+ st.markdown(f"""{i+1}. ##### Source content
7
+ - {source["page_content"]}
8
+
9
+ - Page number: {source["metadata"]["page"]}
10
+ """
11
+ )
frontend/pages/2_πŸ€–_bot.py CHANGED
@@ -6,6 +6,7 @@ from layouts.mainlayout import mainlayout
6
  from langchain.memory.chat_message_histories import StreamlitChatMessageHistory
7
 
8
  from components.file_streaming import *
 
9
 
10
 
11
  @mainlayout
@@ -31,6 +32,7 @@ def display():
31
 
32
  display()
33
 
 
34
  uploaded_files = st.sidebar.file_uploader(label="Upload PDF files", type=["pdf"])
35
 
36
  if not uploaded_files:
@@ -38,23 +40,86 @@ if not uploaded_files:
38
  st.stop()
39
  upload_data(uploaded_files)
40
 
41
- msgs = StreamlitChatMessageHistory()
42
 
43
- if len(msgs.messages) == 0 or st.sidebar.button("Clear message history"):
44
- msgs.clear()
45
- msgs.add_ai_message("How can I help you?")
 
 
 
 
46
 
47
- avatars = {"human": "user", "ai": "assistant"}
48
- for msg in msgs.messages:
49
- st.chat_message(avatars[msg.type]).write(msg.content)
 
50
 
51
- if user_query := st.chat_input(placeholder="Ask me anything!"):
52
- st.chat_message("user").write(user_query)
53
 
54
- with st.chat_message("assistant"):
55
- retrieval_handler = PrintRetrievalHandler(st.container())
56
- stream_handler = StreamHandler(st.empty())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  response = requests.post(
58
- "http://127.0.0.1:8000/api/inference",
59
- json={"promptMessage": user_query},
60
- ).json()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  from langchain.memory.chat_message_histories import StreamlitChatMessageHistory
7
 
8
  from components.file_streaming import *
9
+ from components.display import *
10
 
11
 
12
  @mainlayout
 
32
 
33
  display()
34
 
35
+ BASE_URL = "http://127.0.0.1:8000"
36
  uploaded_files = st.sidebar.file_uploader(label="Upload PDF files", type=["pdf"])
37
 
38
  if not uploaded_files:
 
40
  st.stop()
41
  upload_data(uploaded_files)
42
 
 
43
 
44
+ if "messages" not in st.session_state.keys():
45
+ st.session_state.messages = [
46
+ {
47
+ "role": "assistant",
48
+ "content": "What's troubling you? Ask me a question right away!",
49
+ }
50
+ ]
51
 
52
+ # Display or clear chat messages
53
+ for message in st.session_state.messages:
54
+ with st.chat_message(message["role"]):
55
+ st.write(message["content"])
56
 
 
 
57
 
58
+ def clear_chat_history():
59
+ st.session_state.messages = [
60
+ {
61
+ "role": "assistant",
62
+ "content": "What's troubling you? Ask me a question right away!",
63
+ }
64
+ ]
65
+
66
+
67
+ st.sidebar.button("Clear Chat History", on_click=clear_chat_history)
68
+
69
+
70
+ def generate_mistral_response(question: str):
71
+ for dict_message in st.session_state.messages:
72
+ if dict_message["role"] == "user":
73
+ question = dict_message["content"]
74
+
75
+ try:
76
  response = requests.post(
77
+ f"{BASE_URL}/api/inference",
78
+ json={"promptMessage": question}).json()
79
+
80
+ if response["status"]=="error":
81
+ st.error("Please refresh the page and try uploading the file again.")
82
+ st.stop()
83
+
84
+ answer = response["result"]["answer"]
85
+
86
+ except Exception as e:
87
+ if response.json()=='exception.ModelDeployingException()':
88
+ st.error("Model is deploying in the backend servers. Please try again after some time")
89
+ st.stop()
90
+
91
+
92
+ with st.expander("Source documents 🧐", expanded=True):
93
+ source_documents = requests.post(
94
+ f"{BASE_URL}/api/inference",
95
+ json={"promptMessage": question}).json()["result"]["source_documents"]
96
+ display_source_document(source_documents)
97
+
98
+
99
+ return answer
100
+
101
+
102
+ # User-provided prompt
103
+ if prompt := st.chat_input(
104
+ disabled=not st.session_state.messages[-1]["role"] == "assistant",
105
+ placeholder="Hello, please ask me a question! πŸ€–"):
106
+ st.session_state.messages.append({"role": "user", "content": prompt})
107
+ with st.chat_message("user"):
108
+ st.write(prompt)
109
+
110
+ # ask question
111
+ st.write(st.session_state)
112
+
113
+ # Generate a new response if last message is not from assistant
114
+ if st.session_state.messages[-1]["role"] != "assistant":
115
+ with st.chat_message("assistant"):
116
+ with st.spinner("Thinking..."):
117
+ response = generate_mistral_response(prompt)
118
+ placeholder = st.empty()
119
+ full_response = ""
120
+ for item in response:
121
+ full_response += item
122
+ placeholder.markdown(full_response)
123
+ placeholder.markdown(full_response)
124
+ message = {"role": "assistant", "content": full_response}
125
+ st.session_state.messages.append(message)
test.py CHANGED
@@ -20,68 +20,4 @@
20
  # # print(b.bill)
21
  # a=A()
22
  # a.bill=3
23
- # print(a.bill)
24
-
25
- # if "uploaded_pdf" in st.session_state.keys():
26
- # # chatbot
27
- # st.subheader("Ask Studybot a question! πŸ€–")
28
-
29
- # if "messages" not in st.session_state.keys():
30
- # st.session_state.messages = [
31
- # {
32
- # "role": "assistant",
33
- # "content": "What's troubling you? Ask me a question right away!",
34
- # }
35
- # ]
36
-
37
- # # Display or clear chat messages
38
- # for message in st.session_state.messages:
39
- # with st.chat_message(message["role"]):
40
- # st.write(message["content"])
41
-
42
- # def clear_chat_history():
43
- # st.session_state.messages = [
44
- # {
45
- # "role": "assistant",
46
- # "content": "What's troubling you? Ask me a question right away!",
47
- # }
48
- # ]
49
-
50
- # st.sidebar.button("Clear Chat History", on_click=clear_chat_history)
51
-
52
- # def generate_mistral_response(question: str):
53
- # for dict_message in st.session_state.messages:
54
- # if dict_message["role"] == "user":
55
- # question = dict_message["content"]
56
-
57
- # answer = requests.post(
58
- # "https://hemanthsai7-studybotapi.hf.space/api/inference",
59
- # json={"promptMessage": question},
60
- # ).json()
61
-
62
- # return answer
63
-
64
- # User-provided prompt
65
- # if prompt := st.chat_input(
66
- # disabled=not st.session_state.messages[-1]["role"] == "assistant",
67
- # placeholder="Hello, please ask me a question! πŸ€–"):
68
- # st.session_state.messages.append({"role": "user", "content": prompt})
69
- # with st.chat_message("user"):
70
- # st.write(prompt)
71
-
72
- # # ask question
73
- # st.write(st.session_state)
74
-
75
- # # Generate a new response if last message is not from assistant
76
- # if st.session_state.messages[-1]["role"] != "assistant":
77
- # with st.chat_message("assistant"):
78
- # with st.spinner("Thinking..."):
79
- # response = generate_mistral_response(prompt)
80
- # placeholder = st.empty()
81
- # full_response = ""
82
- # for item in response:
83
- # full_response += item
84
- # placeholder.markdown(full_response)
85
- # placeholder.markdown(full_response)
86
- # message = {"role": "assistant", "content": full_response}
87
- # st.session_state.messages.append(message)
 
20
  # # print(b.bill)
21
  # a=A()
22
  # a.bill=3
23
+ # print(a.bill)