import gradio as gr import os import time from langchain.document_loaders import OnlinePDFLoader #for laoding the pdf from langchain.embeddings import OpenAIEmbeddings # for creating embeddings from langchain.vectorstores import Chroma # for the vectorization part from langchain.chains import RetrievalQA # for conversing with chatGPT from langchain.chat_models import ChatOpenAI # the LLM model we'll use (ChatGPT) def load_pdf(pdf_doc, open_ai_key): if openai_key is not None: os.environ['OPENAI_API_KEY'] = open_ai_key #Load the pdf file loader = OnlinePDFLoader(pdf_doc.name) pages = loader.load_and_split() #Create an instance of OpenAIEmbeddings, which is responsible for generating embeddings for text embeddings = OpenAIEmbeddings() #To create a vector store, we use the Chroma class, which takes the documents (pages in our case), the embeddings instance, and a directory to store the vector data vectordb = Chroma.from_documents(pages, embedding=embeddings) #Finally, we create the bot using the RetrievalQAChain class global pdf_qa pdf_qa = RetrievalQA.from_chain_type(ChatOpenAI(temperature=0, model_name="gpt-4"), vectordb.as_retriever(), return_source_documents=False) return "Ready" else: return "Please provide an OpenAI API key" def answer_query(query): question = query return pdf_qa.run(question) html = """

ChatPDF

Upload a PDF File, then click on Load PDF File
Once the document has been loaded you can begin chatting with the PDF =)

""" css = """container{max-width:700px; margin-left:auto; margin-right:auto,padding:20px}""" with gr.Blocks(css=css,theme=gr.themes.Monochrome()) as demo: gr.HTML(html) with gr.Column(): gr.Markdown('ChatPDF') pdf_doc = gr.File(label="Load a pdf",file_types=['.pdf','.docx'],type='file') with gr.Row(): load_pdf = gr.Button('Load pdf file') status = gr.Textbox(label="Status",placeholder='',interactive=False) with gr.Row(): input = gr.Textbox(label="Type in your question") output = gr.Textbox(label="output") submit_query = gr.Button("submit") load_pdf.click(load_pdf,inputs=[pdf_doc, openai_key],outputs=status) submit_query.click(answer_query,input,output) demo.launch()