Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -195,14 +195,14 @@ if create_vectorstores:
|
|
195 |
vectorstore_name="Vit_All_OpenAI_Embeddings",
|
196 |
)
|
197 |
print("vector_store_OpenAI:",vector_store_OpenAI._collection.count(),"chunks.")
|
198 |
-
|
199 |
vector_store_google,new_vectorstore_name = create_vectorstore(
|
200 |
embeddings=embeddings_google,
|
201 |
documents = chunks,
|
202 |
vectorstore_name="Vit_All_Google_Embeddings"
|
203 |
)
|
204 |
print("vector_store_google:",vector_store_google._collection.count(),"chunks.")
|
205 |
-
|
206 |
|
207 |
vector_store_HF = create_vectorstore(
|
208 |
embeddings=embeddings_HuggingFace,
|
@@ -217,12 +217,12 @@ vector_store_OpenAI = Chroma(
|
|
217 |
persist_directory = LOCAL_VECTOR_STORE_DIR.as_posix() + "/Vit_All_OpenAI_Embeddings",
|
218 |
embedding_function=embeddings_OpenAI)
|
219 |
print("vector_store_OpenAI:",vector_store_OpenAI._collection.count(),"chunks.")
|
220 |
-
|
221 |
vector_store_google = Chroma(
|
222 |
persist_directory = current_dir + "/Vit_All_Google_Embeddings",
|
223 |
embedding_function=embeddings_google)
|
224 |
print("vector_store_google:",vector_store_google._collection.count(),"chunks.")
|
225 |
-
|
226 |
|
227 |
vector_store_HF = Chroma(
|
228 |
persist_directory = current_dir + "/Vit_All_HF_Embeddings",
|
@@ -254,7 +254,7 @@ vectorstore,search_type="similarity",k=4,score_threshold=None
|
|
254 |
|
255 |
# similarity search
|
256 |
#base_retriever_OpenAI = Vectorstore_backed_retriever(vector_store_OpenAI,"similarity",k=10)
|
257 |
-
base_retriever_google = Vectorstore_backed_retriever(vector_store_google,"similarity",k=10)
|
258 |
base_retriever_HF = Vectorstore_backed_retriever(vector_store_HF,"similarity",k=10)
|
259 |
|
260 |
|
@@ -645,7 +645,7 @@ chain = ConversationalRetrievalChain.from_llm(
|
|
645 |
LLM_provider="Google",api_key=google_api_key,temperature=0.1,
|
646 |
model_name="gemini-pro"),
|
647 |
memory=create_memory("gemini-pro"),
|
648 |
-
retriever =
|
649 |
llm=instantiate_LLM(
|
650 |
LLM_provider="Google",api_key=google_api_key,temperature=0.5,
|
651 |
model_name="gemini-pro"),
|
|
|
195 |
vectorstore_name="Vit_All_OpenAI_Embeddings",
|
196 |
)
|
197 |
print("vector_store_OpenAI:",vector_store_OpenAI._collection.count(),"chunks.")
|
198 |
+
|
199 |
vector_store_google,new_vectorstore_name = create_vectorstore(
|
200 |
embeddings=embeddings_google,
|
201 |
documents = chunks,
|
202 |
vectorstore_name="Vit_All_Google_Embeddings"
|
203 |
)
|
204 |
print("vector_store_google:",vector_store_google._collection.count(),"chunks.")
|
205 |
+
"""
|
206 |
|
207 |
vector_store_HF = create_vectorstore(
|
208 |
embeddings=embeddings_HuggingFace,
|
|
|
217 |
persist_directory = LOCAL_VECTOR_STORE_DIR.as_posix() + "/Vit_All_OpenAI_Embeddings",
|
218 |
embedding_function=embeddings_OpenAI)
|
219 |
print("vector_store_OpenAI:",vector_store_OpenAI._collection.count(),"chunks.")
|
220 |
+
|
221 |
vector_store_google = Chroma(
|
222 |
persist_directory = current_dir + "/Vit_All_Google_Embeddings",
|
223 |
embedding_function=embeddings_google)
|
224 |
print("vector_store_google:",vector_store_google._collection.count(),"chunks.")
|
225 |
+
"""
|
226 |
|
227 |
vector_store_HF = Chroma(
|
228 |
persist_directory = current_dir + "/Vit_All_HF_Embeddings",
|
|
|
254 |
|
255 |
# similarity search
|
256 |
#base_retriever_OpenAI = Vectorstore_backed_retriever(vector_store_OpenAI,"similarity",k=10)
|
257 |
+
#base_retriever_google = Vectorstore_backed_retriever(vector_store_google,"similarity",k=10)
|
258 |
base_retriever_HF = Vectorstore_backed_retriever(vector_store_HF,"similarity",k=10)
|
259 |
|
260 |
|
|
|
645 |
LLM_provider="Google",api_key=google_api_key,temperature=0.1,
|
646 |
model_name="gemini-pro"),
|
647 |
memory=create_memory("gemini-pro"),
|
648 |
+
retriever = base_retriever_HF, #base_retriever_HF
|
649 |
llm=instantiate_LLM(
|
650 |
LLM_provider="Google",api_key=google_api_key,temperature=0.5,
|
651 |
model_name="gemini-pro"),
|