File size: 1,770 Bytes
931abd0
 
7a89bde
 
 
15d650d
7a89bde
15d650d
931abd0
 
15d650d
 
 
 
 
 
931abd0
15d650d
 
 
931abd0
15d650d
931abd0
15d650d
 
 
 
931abd0
15d650d
931abd0
 
15d650d
931abd0
15d650d
931abd0
15d650d
 
 
 
931abd0
15d650d
931abd0
15d650d
 
 
931abd0
15d650d
 
 
931abd0
15d650d
 
931abd0
15d650d
 
 
 
 
 
931abd0
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import requests

import streamlit as st

from layouts.mainlayout import mainlayout
from langchain.memory.chat_message_histories import StreamlitChatMessageHistory

from components.file_streaming import *


@mainlayout
def display():
    with st.expander("What happens when I upload a PDF? πŸ“‘", expanded=True):
        st.info(
            """
            - The PDF is uploaded to the backend server. βš™οΈ

            - The PDF is converted into small chunks for  faster processing. πŸš€
            
            - The chunks are broken down into tokens. A token is a single word or a group of words. πŸ“

            - The tokens are converted into embedding vectors. πŸ“Š

            - The embedding vectors are stored in a vector store. πŸ—„οΈ
            """,
            icon="ℹ️",
        )

    st.divider()


display()

uploaded_files = st.sidebar.file_uploader(label="Upload PDF files", type=["pdf"])

if not uploaded_files:
    st.info("Please upload PDF documents to continue.")
    st.stop()
upload_data(uploaded_files)

msgs = StreamlitChatMessageHistory()

if len(msgs.messages) == 0 or st.sidebar.button("Clear message history"):
    msgs.clear()
    msgs.add_ai_message("How can I help you?")

avatars = {"human": "user", "ai": "assistant"}
for msg in msgs.messages:
    st.chat_message(avatars[msg.type]).write(msg.content)

if user_query := st.chat_input(placeholder="Ask me anything!"):
    st.chat_message("user").write(user_query)

    with st.chat_message("assistant"):
        retrieval_handler = PrintRetrievalHandler(st.container())
        stream_handler = StreamHandler(st.empty())
        response = requests.post(
            "http://127.0.0.1:8000/api/inference",
            json={"promptMessage": user_query},
        ).json()