bstraehle commited on
Commit
6553dbd
1 Parent(s): 4b7a531

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -24,6 +24,8 @@ QA_CHAIN_PROMPT = PromptTemplate(input_variables = ["context", "question"], temp
24
 
25
  print(0)
26
 
 
 
27
  def invoke(openai_api_key, youtube_url, prompt):
28
  openai.api_key = openai_api_key
29
  if (os.path.isdir("docs/chroma/") == False):
@@ -36,9 +38,9 @@ def invoke(openai_api_key, youtube_url, prompt):
36
  chroma_dir = "docs/chroma/"
37
  vectordb = Chroma.from_documents(documents = splits, embedding = OpenAIEmbeddings(), persist_directory = chroma_dir)
38
  llm = ChatOpenAI(model_name = "gpt-4", temperature = 0)
39
- qa_chain = RetrievalQA.from_chain_type(llm, retriever = vectordb.as_retriever(), return_source_documents = True, chain_type_kwargs = {"prompt": QA_CHAIN_PROMPT})
40
  print(2)
41
- result = qa_chain({"query": prompt})
42
  shutil.rmtree(youtube_dir)
43
  #shutil.rmtree(chroma_dir)
44
  return result["result"]
 
24
 
25
  print(0)
26
 
27
+ qa_chain = None
28
+
29
  def invoke(openai_api_key, youtube_url, prompt):
30
  openai.api_key = openai_api_key
31
  if (os.path.isdir("docs/chroma/") == False):
 
38
  chroma_dir = "docs/chroma/"
39
  vectordb = Chroma.from_documents(documents = splits, embedding = OpenAIEmbeddings(), persist_directory = chroma_dir)
40
  llm = ChatOpenAI(model_name = "gpt-4", temperature = 0)
41
+ global qa_chain = RetrievalQA.from_chain_type(llm, retriever = vectordb.as_retriever(), return_source_documents = True, chain_type_kwargs = {"prompt": QA_CHAIN_PROMPT})
42
  print(2)
43
+ result = global qa_chain({"query": prompt})
44
  shutil.rmtree(youtube_dir)
45
  #shutil.rmtree(chroma_dir)
46
  return result["result"]