Upload 4 files
Browse files- app.py +305 -0
- faiss_index/index.faiss +0 -0
- faiss_index/index.pkl +3 -0
- requirements.txt +62 -0
app.py
ADDED
@@ -0,0 +1,305 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import sqlite3
|
3 |
+
from hashlib import sha256
|
4 |
+
import streamlit as st
|
5 |
+
from langchain_community.embeddings import LlamaCppEmbeddings
|
6 |
+
from langchain.text_splitter import CharacterTextSplitter
|
7 |
+
from langchain_community.document_loaders import TextLoader
|
8 |
+
from langchain_community.vectorstores import FAISS
|
9 |
+
from langchain.embeddings import HuggingFaceEmbeddings
|
10 |
+
from langchain.chains.llm import LLMChain
|
11 |
+
from langchain_community.llms import LlamaCpp
|
12 |
+
from langchain.chains import LLMChain
|
13 |
+
from langchain_community.llms import OpenAI
|
14 |
+
from langchain_core.prompts import PromptTemplate, ChatPromptTemplate
|
15 |
+
from langchain_core.runnables import RunnablePassthrough
|
16 |
+
from langchain_core.documents import Document
|
17 |
+
from langchain.chains import create_retrieval_chain
|
18 |
+
from langchain.chains.combine_documents import create_stuff_documents_chain
|
19 |
+
from datetime import date
|
20 |
+
|
21 |
+
# Create a SQLite database and table
|
22 |
+
conn = sqlite3.connect("user_credentials.db")
|
23 |
+
cursor = conn.cursor()
|
24 |
+
cursor.execute('''
|
25 |
+
CREATE TABLE IF NOT EXISTS users (
|
26 |
+
username TEXT PRIMARY KEY,
|
27 |
+
password TEXT
|
28 |
+
)
|
29 |
+
''')
|
30 |
+
conn.commit()
|
31 |
+
|
32 |
+
|
33 |
+
if 'embeddings' not in st.session_state:
|
34 |
+
st.session_state.embeddings = HuggingFaceEmbeddings(
|
35 |
+
model_name="sentence-transformers/all-MiniLM-L6-v2",
|
36 |
+
model_kwargs={"device": "cpu"},
|
37 |
+
)
|
38 |
+
def get_similar_docs(query):
|
39 |
+
db = FAISS.load_local('faiss_index',st.session_state.embeddings)
|
40 |
+
docs = db.similarity_search_with_score(query,100)
|
41 |
+
return docs
|
42 |
+
def format_docs(docs):
|
43 |
+
return " ".join(doc.page_content for doc in docs)
|
44 |
+
def get_advice_from_llm(query):
|
45 |
+
db = FAISS.load_local(st.session_state.username,st.session_state.embeddings)
|
46 |
+
retriever = db.as_retriever()
|
47 |
+
llm = LlamaCpp(model_path="./tinyllama-1.1b-chat-v1.0.Q8_0.gguf",n_ctx = 2048)
|
48 |
+
chat_history_str = "\n".join(["<|im_start|>" + entry[0]+ entry[1] +"<|im_emd|>\n" for entry in st.session_state['chat_history']])
|
49 |
+
|
50 |
+
template = """"
|
51 |
+
<|im_start|>system
|
52 |
+
{context}""" + chat_history_str + "<|im_end|>"\
|
53 |
+
"""
|
54 |
+
<|im_start|>user{input}<|im_end|>
|
55 |
+
|
56 |
+
<|im_start|>assistant
|
57 |
+
"""
|
58 |
+
|
59 |
+
prompt = PromptTemplate(input_variables=["input","context"], template=template)
|
60 |
+
llm_chain = LLMChain(llm=llm, prompt=prompt)
|
61 |
+
|
62 |
+
rag_chain = ( {"context": retriever|format_docs, "input": RunnablePassthrough()}| llm_chain)
|
63 |
+
answer = rag_chain.invoke(query)
|
64 |
+
return answer
|
65 |
+
|
66 |
+
def vectordb_entry():
|
67 |
+
loader = TextLoader(f"./{st.session_state.username}.txt")
|
68 |
+
documents = loader.load()
|
69 |
+
text_splitter = CharacterTextSplitter(chunk_size=100, chunk_overlap=10)
|
70 |
+
docs = text_splitter.split_documents(documents)
|
71 |
+
db = FAISS.load_local(st.session_state.username,st.session_state.embeddings)
|
72 |
+
db.add_documents(docs)
|
73 |
+
db.save_local(st.session_state.username)
|
74 |
+
|
75 |
+
def save_into_text_file(file_path,text):
|
76 |
+
with open(file_path, 'w') as file:
|
77 |
+
file.write(text)
|
78 |
+
print(f"String saved to {file_path}")
|
79 |
+
|
80 |
+
def journal():
|
81 |
+
|
82 |
+
messages = st.container(height=600)
|
83 |
+
query = st.chat_input("Need some advice?")
|
84 |
+
|
85 |
+
if 'input_key' not in st.session_state:
|
86 |
+
st.session_state.input_key = 0
|
87 |
+
|
88 |
+
if 'chat_history' not in st.session_state:
|
89 |
+
st.session_state.chat_history = []
|
90 |
+
|
91 |
+
if query:
|
92 |
+
answer = get_advice_from_llm(query)
|
93 |
+
st.session_state.chat_history.append(("user", query))
|
94 |
+
st.session_state.chat_history.append(("assistant", answer['text']))
|
95 |
+
st.session_state.input_key += 1
|
96 |
+
|
97 |
+
if 'chat_history' in st.session_state and st.session_state.chat_history:
|
98 |
+
for speaker, message in st.session_state.chat_history:
|
99 |
+
if speaker == "user":
|
100 |
+
who = "You"
|
101 |
+
else:
|
102 |
+
who = "JournaLLM"
|
103 |
+
|
104 |
+
messages.chat_message(speaker).write(who + ': '+ str(message))
|
105 |
+
|
106 |
+
if st.button('Reset Chat'):
|
107 |
+
st.session_state.chat_history = []
|
108 |
+
st.session_state.input_key += 1
|
109 |
+
st.experimental_rerun()
|
110 |
+
|
111 |
+
|
112 |
+
# Function to hash passwords
|
113 |
+
def hash_password(password):
|
114 |
+
return sha256(password.encode()).hexdigest()
|
115 |
+
|
116 |
+
# Function to check login credentials
|
117 |
+
def authenticate(username, password):
|
118 |
+
hashed_password = hash_password(password)
|
119 |
+
cursor.execute("SELECT * FROM users WHERE username=? AND password=?", (username, hashed_password))
|
120 |
+
return cursor.fetchone() is not None
|
121 |
+
|
122 |
+
# Function to add a new user to the database
|
123 |
+
def add_user(username, password):
|
124 |
+
hashed_password = hash_password(password)
|
125 |
+
try:
|
126 |
+
cursor.execute("INSERT INTO users (username, password) VALUES (?, ?)", (username, hashed_password))
|
127 |
+
conn.commit()
|
128 |
+
return True # User added successfully
|
129 |
+
except sqlite3.IntegrityError:
|
130 |
+
return False # Username already exists
|
131 |
+
|
132 |
+
# Streamlit Login Page
|
133 |
+
def login_page():
|
134 |
+
st.title("Login Page")
|
135 |
+
un = st.text_input("Username:")
|
136 |
+
pw = st.text_input("Password:", type="password")
|
137 |
+
if un and pw:
|
138 |
+
st.session_state['username'] = un
|
139 |
+
st.session_state['password'] = pw
|
140 |
+
|
141 |
+
if st.button("Login"):
|
142 |
+
if not st.session_state['username'] or not st.session_state['password']:
|
143 |
+
st.error("Both username and password are required.")
|
144 |
+
elif authenticate(st.session_state['username'], st.session_state['password']):
|
145 |
+
create_table()
|
146 |
+
st.success("Login successful!")
|
147 |
+
else:
|
148 |
+
st.error("Invalid credentials. Please try again.")
|
149 |
+
|
150 |
+
# Streamlit Signup Page
|
151 |
+
def signup_page():
|
152 |
+
st.title("Signup Page")
|
153 |
+
new_username = st.text_input("New Username:")
|
154 |
+
new_password = st.text_input("New Password:", type="password")
|
155 |
+
|
156 |
+
if st.button("Signup"):
|
157 |
+
if not new_username or not new_password:
|
158 |
+
st.error("Both username and password are required.")
|
159 |
+
else:
|
160 |
+
result = add_user(new_username, new_password)
|
161 |
+
if result:
|
162 |
+
file_path = f"{new_username}.txt"
|
163 |
+
text = "I've started writing my journal"
|
164 |
+
# Open the file in write mode and write the string
|
165 |
+
with open(file_path, 'w') as file:
|
166 |
+
file.write(text)
|
167 |
+
|
168 |
+
print(f"String saved to {file_path}")
|
169 |
+
loader = TextLoader(f"./{new_username}.txt")
|
170 |
+
documents = loader.load()
|
171 |
+
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
|
172 |
+
docs = text_splitter.split_documents(documents)
|
173 |
+
embeddings = HuggingFaceEmbeddings(
|
174 |
+
model_name="sentence-transformers/all-MiniLM-L6-v2",
|
175 |
+
model_kwargs={"device": "cpu"},
|
176 |
+
)
|
177 |
+
db = FAISS.from_documents(docs,embeddings)
|
178 |
+
db.save_local(new_username)
|
179 |
+
st.success("Signup successful! You can now login.")
|
180 |
+
|
181 |
+
else:
|
182 |
+
st.error("Username already exists. Please choose a different username.")
|
183 |
+
|
184 |
+
|
185 |
+
def create_table():
|
186 |
+
conn = sqlite3.connect(f'{st.session_state.username}_entries.db')
|
187 |
+
cursor = conn.cursor()
|
188 |
+
|
189 |
+
cursor.execute('''
|
190 |
+
CREATE TABLE IF NOT EXISTS entries (
|
191 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
192 |
+
date TEXT,
|
193 |
+
notes TEXT
|
194 |
+
)
|
195 |
+
''')
|
196 |
+
|
197 |
+
conn.commit()
|
198 |
+
conn.close()
|
199 |
+
|
200 |
+
# Function to insert data into the SQLite database
|
201 |
+
def insert_data(date, notes):
|
202 |
+
conn = sqlite3.connect(f'{st.session_state.username}_entries.db')
|
203 |
+
cursor = conn.cursor()
|
204 |
+
|
205 |
+
cursor.execute('''
|
206 |
+
INSERT INTO entries (date, notes)
|
207 |
+
VALUES (?, ?)
|
208 |
+
''', (date, notes))
|
209 |
+
|
210 |
+
conn.commit()
|
211 |
+
conn.close()
|
212 |
+
|
213 |
+
# Function to retrieve data for a selected date
|
214 |
+
def retrieve_data(selected_date):
|
215 |
+
conn = sqlite3.connect(f'{st.session_state.username}_entries.db')
|
216 |
+
cursor = conn.cursor()
|
217 |
+
|
218 |
+
cursor.execute('''
|
219 |
+
SELECT date, notes FROM entries WHERE date = ?
|
220 |
+
''', (selected_date,))
|
221 |
+
|
222 |
+
data = cursor.fetchall()
|
223 |
+
|
224 |
+
conn.close()
|
225 |
+
return data
|
226 |
+
|
227 |
+
|
228 |
+
def entry():
|
229 |
+
st.title('JournaLLM')
|
230 |
+
st.write('Welcome to JournaLLM, \
|
231 |
+
your personal space for mindful \
|
232 |
+
reflection and goal tracking! This app is designed to help you \
|
233 |
+
seamlessly capture your daily thoughts, \
|
234 |
+
set meaningful goals, and track your progress.')
|
235 |
+
c1,c2 = st.columns(2)
|
236 |
+
if 'input_key' not in st.session_state:
|
237 |
+
st.session_state.input_key = 0
|
238 |
+
|
239 |
+
file_path = f"{st.session_state.username}.txt"
|
240 |
+
|
241 |
+
c1.write("Today's Entry")
|
242 |
+
text0 = c1.text_area("Enter text ")
|
243 |
+
|
244 |
+
# template = f'''Question: What happened on {date.today().strftime("%B %d, %Y")}?
|
245 |
+
# How did I feel on {date.today().strftime("%B %d, %Y")}?
|
246 |
+
# What were the events that happened on {date.today().strftime("%B %d, %Y")}?
|
247 |
+
# Describe your day, {date.today().strftime("%B %d, %Y")}. \n Answer: '''
|
248 |
+
text = f""" <|im_start|>system
|
249 |
+
What happened on {date.today().strftime("%B %d, %Y")}?
|
250 |
+
How did I feel on {date.today().strftime("%B %d, %Y")}?
|
251 |
+
What were the events that happened on {date.today().strftime("%B %d, %Y")}?
|
252 |
+
Describe your day, {date.today().strftime("%B %d, %Y")}.<|im_end|>
|
253 |
+
|
254 |
+
<|im_start|>user
|
255 |
+
{text0}<|im_end|>"""
|
256 |
+
|
257 |
+
|
258 |
+
if c1.button('Pen down') and text:
|
259 |
+
save_into_text_file(file_path,text)
|
260 |
+
vectordb_entry()
|
261 |
+
c1.write('Entry saved')
|
262 |
+
st.session_state.input_key += 1
|
263 |
+
#display previous entries
|
264 |
+
insert_data(date.today().strftime("%B %d, %Y"), text0)
|
265 |
+
|
266 |
+
#displaying
|
267 |
+
c2.write('View previous entries')
|
268 |
+
selected_date = c2.date_input('Select a date', date.today())
|
269 |
+
data = retrieve_data(selected_date.strftime("%B %d, %Y"))
|
270 |
+
if data:
|
271 |
+
en = c2.container(height=300)
|
272 |
+
for i in data:
|
273 |
+
en.write(i[1])
|
274 |
+
#[en.write(x[1]) for x in data]
|
275 |
+
else:
|
276 |
+
c2.info('No entries for the selected date.')
|
277 |
+
|
278 |
+
|
279 |
+
|
280 |
+
# Main Streamlit App
|
281 |
+
def main():
|
282 |
+
st.set_page_config(layout="wide")
|
283 |
+
st.sidebar.title("Navigation")
|
284 |
+
page = st.sidebar.radio("Go to", ["Login", "Signup","Journal","Advice"])
|
285 |
+
|
286 |
+
if page == "Login":
|
287 |
+
login_page()
|
288 |
+
elif page == "Signup":
|
289 |
+
signup_page()
|
290 |
+
elif page == "Journal":
|
291 |
+
if st.session_state.username == "":
|
292 |
+
st.write('Please login to continue.')
|
293 |
+
else:
|
294 |
+
st.write(f"Logged in as {st.session_state.username}")
|
295 |
+
entry()
|
296 |
+
elif page == "Advice":
|
297 |
+
if st.session_state.username == "":
|
298 |
+
st.write('Please login to continue.')
|
299 |
+
else:
|
300 |
+
st.write(f"Logged in as {st.session_state.username}")
|
301 |
+
journal()
|
302 |
+
|
303 |
+
if __name__ == "__main__":
|
304 |
+
main()
|
305 |
+
|
faiss_index/index.faiss
ADDED
Binary file (18.5 kB). View file
|
|
faiss_index/index.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d2e4a24d7834bc63e9ae618d90d89e0308c45c69e9db0e475a6582b4b66996e6
|
3 |
+
size 2572
|
requirements.txt
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
aiohttp==3.9.3
|
2 |
+
aiosignal==1.3.1
|
3 |
+
altair==5.2.0
|
4 |
+
annotated-types==0.6.0
|
5 |
+
anyio==4.2.0
|
6 |
+
attrs==23.2.0
|
7 |
+
blinker==1.7.0
|
8 |
+
cachetools==5.3.2
|
9 |
+
certifi==2024.2.2
|
10 |
+
charset-normalizer==3.3.2
|
11 |
+
click==8.1.7
|
12 |
+
dataclasses-json==0.6.4
|
13 |
+
diskcache==5.6.3
|
14 |
+
faiss-cpu==1.7.4
|
15 |
+
filelock==3.13.1
|
16 |
+
frozenlist==1.4.1
|
17 |
+
fsspec==2023.12.2
|
18 |
+
gitdb==4.0.11
|
19 |
+
GitPython==3.1.41
|
20 |
+
greenlet==3.0.3
|
21 |
+
huggingface-hub==0.20.3
|
22 |
+
idna==3.6
|
23 |
+
importlib-metadata==7.0.1
|
24 |
+
Jinja2==3.1.3
|
25 |
+
joblib==1.3.2
|
26 |
+
jsonpatch==1.33
|
27 |
+
jsonpointer==2.4
|
28 |
+
jsonschema==4.21.1
|
29 |
+
jsonschema-specifications==2023.12.1
|
30 |
+
langchain==0.1.5
|
31 |
+
langchain-community==0.0.17
|
32 |
+
langchain-core==0.1.18
|
33 |
+
langsmith==0.0.86
|
34 |
+
llama_cpp_python==0.2.38
|
35 |
+
markdown-it-py==3.0.0
|
36 |
+
MarkupSafe==2.1.4
|
37 |
+
marshmallow==3.20.2
|
38 |
+
mdurl==0.1.2
|
39 |
+
mpmath==1.3.0
|
40 |
+
multidict==6.0.5
|
41 |
+
networkx==3.2.1
|
42 |
+
nltk==3.8.1
|
43 |
+
numpy==1.26.3
|
44 |
+
pandas==2.2.0
|
45 |
+
pillow==10.2.0
|
46 |
+
referencing==0.33.0
|
47 |
+
regex==2023.12.25
|
48 |
+
requests==2.31.0
|
49 |
+
rich==13.7.0
|
50 |
+
rpds-py==0.17.1
|
51 |
+
safetensors==0.4.2
|
52 |
+
scikit-learn==1.4.0
|
53 |
+
scipy==1.12.0
|
54 |
+
sentence-transformers==2.3.1
|
55 |
+
sentencepiece==0.1.99
|
56 |
+
smmap==5.0.1
|
57 |
+
sniffio==1.3.0
|
58 |
+
SQLAlchemy==2.0.25
|
59 |
+
streamlit==1.30.0
|
60 |
+
sympy==1.12
|
61 |
+
tokenizers==0.15.1
|
62 |
+
transformers==4.37.2
|