bstraehle commited on
Commit
0f74892
·
1 Parent(s): ff7b136

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -4
app.py CHANGED
@@ -40,10 +40,12 @@ def invoke(openai_api_key, use_rag, prompt):
40
  text_splitter = RecursiveCharacterTextSplitter(chunk_size = 1500, chunk_overlap = 150)
41
  splits = text_splitter.split_documents(docs)
42
  vector_db = Chroma.from_documents(documents = splits, embedding = OpenAIEmbeddings(), persist_directory = CHROMA_DIR)
43
- #else:
44
- # vector_db = Chroma(persist_directory = CHROMA_DIR, embedding_function = OpenAIEmbeddings())
45
- llm = ChatOpenAI(model_name = MODEL_NAME, openai_api_key = openai_api_key, temperature = 0)
46
- qa_chain = RetrievalQA.from_chain_type(llm, retriever = vector_db.as_retriever(search_kwargs = {"k": 3}), return_source_documents = True, chain_type_kwargs = {"prompt": QA_CHAIN_PROMPT})
 
 
47
  result = qa_chain({"query": prompt})
48
  #print(result)
49
  return result["result"]
 
40
  text_splitter = RecursiveCharacterTextSplitter(chunk_size = 1500, chunk_overlap = 150)
41
  splits = text_splitter.split_documents(docs)
42
  vector_db = Chroma.from_documents(documents = splits, embedding = OpenAIEmbeddings(), persist_directory = CHROMA_DIR)
43
+ llm = ChatOpenAI(model_name = MODEL_NAME, openai_api_key = openai_api_key, temperature = 0)
44
+ qa_chain = RetrievalQA.from_chain_type(llm, retriever = vector_db.as_retriever(search_kwargs = {"k": 3}), return_source_documents = True, chain_type_kwargs = {"prompt": QA_CHAIN_PROMPT})
45
+ else:
46
+ #vector_db = Chroma(persist_directory = CHROMA_DIR, embedding_function = OpenAIEmbeddings())
47
+ llm = ChatOpenAI(model_name = MODEL_NAME, openai_api_key = openai_api_key, temperature = 0)
48
+ qa_chain = RetrievalQA.from_chain_type(llm, chain_type_kwargs = {"prompt": QA_CHAIN_PROMPT})
49
  result = qa_chain({"query": prompt})
50
  #print(result)
51
  return result["result"]