SinhNguyen commited on
Commit
139146c
1 Parent(s): 5b1a096

global streamlit config

Browse files
Files changed (1) hide show
  1. app.py +17 -13
app.py CHANGED
@@ -10,6 +10,21 @@ from langchain.llms import HuggingFaceHub
10
  import os
11
  from dotenv import load_dotenv
12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
  def get_pdf_text(pdf_docs):
15
  text = ""
@@ -40,6 +55,7 @@ def load_embeddings():
40
 
41
  embeddings = load_embeddings()
42
 
 
43
  def get_vectorstore(text_chunks):
44
  vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embeddings)
45
  return vectorstore
@@ -49,6 +65,7 @@ def load_llm():
49
  llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.5, "max_length":218})
50
  return llm
51
 
 
52
  llm = load_llm()
53
 
54
  def get_conversation_chain(vectorstore):
@@ -77,25 +94,12 @@ def handle_userinput(user_question):
77
 
78
  def main():
79
  load_dotenv()
80
- st.set_page_config(page_title="PDF Buddy", page_icon=":coffee:")
81
- st.markdown(
82
- """
83
- <style>
84
- body {
85
- background-color: #fce6ef;
86
- }
87
- </style>
88
- """,
89
- unsafe_allow_html=True
90
- )
91
- st.write(css, unsafe_allow_html=True)
92
 
93
  if "conversation" not in st.session_state:
94
  st.session_state.conversation = None
95
  if "chat_history" not in st.session_state:
96
  st.session_state.chat_history = None
97
 
98
- st.header("PDF Buddy :coffee:")
99
  user_question = st.text_input("Ask a question about your documents:")
100
  if user_question:
101
  handle_userinput(user_question)
 
10
  import os
11
  from dotenv import load_dotenv
12
 
13
+ # Set the Streamlit page configuration and CSS styles
14
+ st.set_page_config(page_title="PDF Buddy", page_icon=":coffee:")
15
+ st.markdown(
16
+ """
17
+ <style>
18
+ body {
19
+ background-color: #fce6ef;
20
+ }
21
+ </style>
22
+ """,
23
+ unsafe_allow_html=True
24
+ )
25
+ st.write(css, unsafe_allow_html=True)
26
+ st.header("PDF Buddy :coffee:")
27
+
28
 
29
  def get_pdf_text(pdf_docs):
30
  text = ""
 
55
 
56
  embeddings = load_embeddings()
57
 
58
+
59
  def get_vectorstore(text_chunks):
60
  vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embeddings)
61
  return vectorstore
 
65
  llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.5, "max_length":218})
66
  return llm
67
 
68
+ # Load the model and store it as a global variable
69
  llm = load_llm()
70
 
71
  def get_conversation_chain(vectorstore):
 
94
 
95
  def main():
96
  load_dotenv()
 
 
 
 
 
 
 
 
 
 
 
 
97
 
98
  if "conversation" not in st.session_state:
99
  st.session_state.conversation = None
100
  if "chat_history" not in st.session_state:
101
  st.session_state.chat_history = None
102
 
 
103
  user_question = st.text_input("Ask a question about your documents:")
104
  if user_question:
105
  handle_userinput(user_question)