lekkalar commited on
Commit
f47c9bf
1 Parent(s): 0c53820

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -2
app.py CHANGED
@@ -21,10 +21,22 @@ def load_pdf_and_generate_embeddings(pdf_doc, open_ai_key, relevant_pages='all')
21
  #Create an instance of OpenAIEmbeddings, which is responsible for generating embeddings for text
22
  embeddings = OpenAIEmbeddings()
23
 
 
 
24
  if relevant_pages == 'all':
25
- pages = pages
 
 
 
 
 
 
 
 
 
 
26
  #To create a vector store, we use the Chroma class, which takes the documents (pages in our case) and the embeddings instance
27
- vectordb = Chroma.from_documents(pages, embedding=embeddings)
28
 
29
  #Finally, we create the bot using the RetrievalQA class
30
  global pdf_qa
 
21
  #Create an instance of OpenAIEmbeddings, which is responsible for generating embeddings for text
22
  embeddings = OpenAIEmbeddings()
23
 
24
+ pages_to_be_loaded =[]
25
+
26
  if relevant_pages == 'all':
27
+ pages_to_be_loaded = pages.copy()
28
+ else:
29
+ page_numbers = relevant_pages.split(",")
30
+ for page_number in page_numbers:
31
+ pageIndex = page_number-1
32
+ if pageIndex >=0 and pageIndex <len(pages):
33
+ pages_to_be_loaded.append(pages[pageIndex])
34
+ #In the scenario where none of the page numbers supplied exist in the PDF, we will revert to using the entire PDF.
35
+ if len(pages_to_be_loaded) ==0:
36
+ pages_to_be_loaded = pages.copy()
37
+
38
  #To create a vector store, we use the Chroma class, which takes the documents (pages in our case) and the embeddings instance
39
+ vectordb = Chroma.from_documents(pages_to_be_loaded, embedding=embeddings)
40
 
41
  #Finally, we create the bot using the RetrievalQA class
42
  global pdf_qa