Spaces:
Sleeping
Sleeping
subhankarhalder
commited on
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import faiss
|
2 |
+
from langchain_together.embeddings import TogetherEmbeddings
|
3 |
+
import numpy as np
|
4 |
+
import pickle
|
5 |
+
import os
|
6 |
+
import streamlit as st
|
7 |
+
|
8 |
+
os.environ["TOGETHER_API_KEY"] = st.secrets["together_api_key"]
|
9 |
+
|
10 |
+
@st.cache_data
|
11 |
+
def load_data():
|
12 |
+
with open("list_of_texts.pkl", "rb") as f:
|
13 |
+
list_of_texts = pickle.load(f)
|
14 |
+
index = faiss.read_index("faiss.index")
|
15 |
+
return list_of_texts, index
|
16 |
+
|
17 |
+
|
18 |
+
def response(sentence, embeddings, list_of_texts, index, ):
|
19 |
+
vector = embeddings.embed_query(sentence)
|
20 |
+
vector = np.array([vector]).astype('float32')
|
21 |
+
k = 5
|
22 |
+
D, I = index.search(vector, k)
|
23 |
+
nearest_texts = [list_of_texts[i] for i in I[0]]
|
24 |
+
return nearest_texts[0]
|
25 |
+
|
26 |
+
embeddings = TogetherEmbeddings(model="togethercomputer/m2-bert-80M-8k-retrieval")
|
27 |
+
list_of_texts, index = load_data()
|
28 |
+
st.title("Ship Document Retreiver")
|
29 |
+
|
30 |
+
# Initialize chat history
|
31 |
+
if "messages" not in st.session_state:
|
32 |
+
st.session_state.messages = []
|
33 |
+
|
34 |
+
# Display chat messages from history on app rerun
|
35 |
+
for message in st.session_state.messages:
|
36 |
+
with st.chat_message(message["role"]):
|
37 |
+
st.markdown(message["content"])
|
38 |
+
|
39 |
+
|
40 |
+
if prompt := st.chat_input("What is up?"):
|
41 |
+
# Display user message in chat message container
|
42 |
+
with st.chat_message("user"):
|
43 |
+
st.markdown(prompt)
|
44 |
+
# Add user message to chat history
|
45 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
46 |
+
get_response = response(prompt, embeddings, list_of_texts, index)
|
47 |
+
with st.chat_message("assistant"):
|
48 |
+
st.markdown(get_response)
|
49 |
+
st.session_state.messages.append({"role": "assistant", "content": response})
|