Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -11,6 +11,12 @@ from htmlTemplates import css, bot_template, user_template
|
|
11 |
from langchain.llms import HuggingFaceHub, LlamaCpp
|
12 |
from huggingface_hub import snapshot_download, hf_hub_download
|
13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
def get_pdf_text(pdf_docs):
|
15 |
text = ""
|
16 |
for pdf in pdf_docs:
|
@@ -38,12 +44,7 @@ def get_vectorstore(text_chunks):
|
|
38 |
return vectorstore
|
39 |
|
40 |
|
41 |
-
def get_conversation_chain(vectorstore):
|
42 |
-
|
43 |
-
repo_name = "IlyaGusev/saiga2_7b_gguf"
|
44 |
-
model_name = "model-q2_K.gguf"
|
45 |
-
|
46 |
-
snapshot_download(repo_id=repo_name, local_dir=".", allow_patterns=model_name)
|
47 |
|
48 |
llm = LlamaCpp(model_path=model_name, n_ctx=2048)
|
49 |
#llm = ChatOpenAI()
|
@@ -103,7 +104,7 @@ with st.sidebar:
|
|
103 |
|
104 |
# create conversation chain
|
105 |
st.session_state.conversation = get_conversation_chain(
|
106 |
-
vectorstore)
|
107 |
|
108 |
|
109 |
|
|
|
11 |
from langchain.llms import HuggingFaceHub, LlamaCpp
|
12 |
from huggingface_hub import snapshot_download, hf_hub_download
|
13 |
|
14 |
+
|
15 |
+
repo_name = "IlyaGusev/saiga2_7b_gguf"
|
16 |
+
model_name = "model-q2_K.gguf"
|
17 |
+
|
18 |
+
snapshot_download(repo_id=repo_name, local_dir=".", allow_patterns=model_name)
|
19 |
+
|
20 |
def get_pdf_text(pdf_docs):
|
21 |
text = ""
|
22 |
for pdf in pdf_docs:
|
|
|
44 |
return vectorstore
|
45 |
|
46 |
|
47 |
+
def get_conversation_chain(vectorstore, model_name):
|
|
|
|
|
|
|
|
|
|
|
48 |
|
49 |
llm = LlamaCpp(model_path=model_name, n_ctx=2048)
|
50 |
#llm = ChatOpenAI()
|
|
|
104 |
|
105 |
# create conversation chain
|
106 |
st.session_state.conversation = get_conversation_chain(
|
107 |
+
vectorstore, model_name)
|
108 |
|
109 |
|
110 |
|