mazed commited on
Commit
12464ce
1 Parent(s): 35c27af

Upload 3 files

Browse files
Files changed (3) hide show
  1. .env +1 -0
  2. app.py +98 -0
  3. requirements.txt +105 -0
.env ADDED
@@ -0,0 +1 @@
 
 
1
+ GOOGLE_API_KEY = "AIzaSyBBrePLC0eqi2LTVio-a7fyFKDqnoB9HdM"
app.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from PyPDF2 import PdfReader
3
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
4
+ import os
5
+ import io
6
+
7
+ from langchain_huggingface import HuggingFaceEmbeddings
8
+ import google.generativeai as genai
9
+ from langchain_community.vectorstores import FAISS
10
+ from langchain_google_genai import ChatGoogleGenerativeAI
11
+ from langchain.chains.question_answering import load_qa_chain
12
+ from langchain.prompts import PromptTemplate
13
+ from dotenv import load_dotenv
14
+
15
+ # load the environment variables
16
+ load_dotenv()
17
+
18
+ # configure api key
19
+ genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
20
+
21
+ # function to extract text from pdfs
22
+ def get_pdf_text(pdf_docs):
23
+ text=""
24
+ for pdf in pdf_docs:
25
+ pdf_reader = PdfReader(io.BytesIO(pdf.read()))
26
+ for page in pdf_reader.pages:
27
+ text+=page.extract_text()
28
+ return text
29
+
30
+ # function to convert text to chunks
31
+ def get_text_chunks(text):
32
+ text_splitter=RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000)
33
+ chunks=text_splitter.split_text(text)
34
+ return chunks
35
+
36
+
37
+ # function to create vector embedding
38
+ def get_vector_store(text_chunks):
39
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
40
+ vector_store=FAISS.from_texts(text_chunks, embedding=embeddings)
41
+ vector_store.save_local("faiss_index")
42
+
43
+ #
44
+ def get_conversational_chain():
45
+ prompt_template="""
46
+ Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in the provided context just say, "answer is not available in the context", don't provide any wrong answer\n\n
47
+ Context:\n{context}?\n
48
+ Question: \n{question}\n
49
+
50
+ Answer:
51
+ """
52
+
53
+ model=ChatGoogleGenerativeAI(model="gemini-pro",temperature=0.3)
54
+
55
+ prompt=PromptTemplate(template=prompt_template, input_variables=["context", "question"])
56
+ chain=load_qa_chain(model, chain_type="stuff",prompt=prompt)
57
+ return chain
58
+
59
+ def user_input(user_question):
60
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
61
+ new_db = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
62
+ docs = new_db.similarity_search(user_question)
63
+
64
+ chain = get_conversational_chain()
65
+
66
+ response = chain(
67
+ {"input_documents":docs, "question": user_question}
68
+ , return_only_outputs=True)
69
+
70
+ print(response)
71
+ #st.write("Reply: ", response["output_text"])
72
+ st.write(response["output_text"])
73
+
74
+ def main():
75
+ st.set_page_config("Chat With Multiple PDF")
76
+ #st.header("Chat with Multiple PDF using Gemini")
77
+
78
+ # App name
79
+ st.markdown("<h5 style='text-align: center;'>ChatPDF</h5>", unsafe_allow_html=True)
80
+
81
+ user_question = st.text_input("Ask a Question from the PDF Files")
82
+
83
+ if user_question:
84
+ user_input(user_question)
85
+
86
+ with st.sidebar:
87
+ #st.title("Menu:")
88
+ pdf_docs = st.file_uploader("Upload PDFs and Click on Submit & Process", accept_multiple_files=True)
89
+ if st.button("Submit & Process"):
90
+ with st.spinner("Processing..."):
91
+ raw_text = get_pdf_text(pdf_docs)
92
+ text_chunks = get_text_chunks(raw_text)
93
+ get_vector_store(text_chunks)
94
+ st.success("Done")
95
+
96
+
97
+ if __name__=="__main__":
98
+ main()
requirements.txt ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiohappyeyeballs==2.3.4
2
+ aiohttp==3.10.0
3
+ aiosignal==1.3.1
4
+ altair==5.3.0
5
+ annotated-types==0.7.0
6
+ async-timeout==4.0.3
7
+ attrs==23.2.0
8
+ blinker==1.8.2
9
+ cachetools==5.4.0
10
+ certifi==2024.7.4
11
+ charset-normalizer==3.3.2
12
+ click==8.1.7
13
+ colorama==0.4.6
14
+ dataclasses-json==0.6.7
15
+ faiss-cpu==1.8.0.post1
16
+ filelock==3.15.4
17
+ frozenlist==1.4.1
18
+ fsspec==2024.6.1
19
+ gitdb==4.0.11
20
+ GitPython==3.1.43
21
+ google-ai-generativelanguage==0.6.6
22
+ google-api-core==2.19.1
23
+ google-api-python-client==2.139.0
24
+ google-auth==2.32.0
25
+ google-auth-httplib2==0.2.0
26
+ google-generativeai==0.7.2
27
+ googleapis-common-protos==1.63.2
28
+ greenlet==3.0.3
29
+ grpcio==1.65.2
30
+ grpcio-status==1.62.2
31
+ httplib2==0.22.0
32
+ huggingface-hub==0.24.5
33
+ idna==3.7
34
+ Jinja2==3.1.4
35
+ joblib==1.4.2
36
+ jsonpatch==1.33
37
+ jsonpointer==3.0.0
38
+ jsonschema==4.23.0
39
+ jsonschema-specifications==2023.12.1
40
+ langchain==0.2.12
41
+ langchain-community==0.2.10
42
+ langchain-core==0.2.27
43
+ langchain-google-genai==1.0.8
44
+ langchain-huggingface==0.0.3
45
+ langchain-text-splitters==0.2.2
46
+ langsmith==0.1.96
47
+ markdown-it-py==3.0.0
48
+ MarkupSafe==2.1.5
49
+ marshmallow==3.21.3
50
+ mdurl==0.1.2
51
+ mpmath==1.3.0
52
+ multidict==6.0.5
53
+ mypy-extensions==1.0.0
54
+ networkx==3.3
55
+ numpy==1.26.4
56
+ orjson==3.10.6
57
+ packaging==24.1
58
+ pandas==2.2.2
59
+ pillow==10.4.0
60
+ proto-plus==1.24.0
61
+ protobuf==4.25.4
62
+ pyarrow==17.0.0
63
+ pyasn1==0.6.0
64
+ pyasn1_modules==0.4.0
65
+ pydantic==2.8.2
66
+ pydantic_core==2.20.1
67
+ pydeck==0.9.1
68
+ Pygments==2.18.0
69
+ pyparsing==3.1.2
70
+ PyPDF2==3.0.1
71
+ python-dateutil==2.9.0.post0
72
+ python-dotenv==1.0.1
73
+ pytz==2024.1
74
+ PyYAML==6.0.1
75
+ referencing==0.35.1
76
+ regex==2024.7.24
77
+ requests==2.32.3
78
+ rich==13.7.1
79
+ rpds-py==0.19.1
80
+ rsa==4.9
81
+ safetensors==0.4.3
82
+ scikit-learn==1.5.1
83
+ scipy==1.14.0
84
+ sentence-transformers==3.0.1
85
+ six==1.16.0
86
+ smmap==5.0.1
87
+ SQLAlchemy==2.0.31
88
+ streamlit==1.37.0
89
+ sympy==1.13.1
90
+ tenacity==8.5.0
91
+ threadpoolctl==3.5.0
92
+ tokenizers==0.19.1
93
+ toml==0.10.2
94
+ toolz==0.12.1
95
+ torch==2.4.0
96
+ tornado==6.4.1
97
+ tqdm==4.66.4
98
+ transformers==4.43.3
99
+ typing-inspect==0.9.0
100
+ typing_extensions==4.12.2
101
+ tzdata==2024.1
102
+ uritemplate==4.1.1
103
+ urllib3==2.2.2
104
+ watchdog==4.0.1
105
+ yarl==1.9.4