File size: 2,076 Bytes
6fd3b90
 
 
76532d7
4ca8caf
 
 
 
6b2cf58
4ca8caf
 
bebbb83
 
 
 
4ca8caf
 
 
 
 
 
 
 
 
 
 
 
bebbb83
 
 
4ca8caf
 
bebbb83
 
 
4ca8caf
cb594b3
4ca8caf
 
 
 
 
 
bebbb83
4ca8caf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cb594b3
4ca8caf
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import urllib.request 
import fitz
import re
from openai import OpenAI
import gradio as gr
import os
import shutil
from pathlib import Path
import tensorflow_hub as hub
from tempfile import NamedTemporaryFile

client = OpenAI(
    base_url="https://openrouter.ai/api/v1",
    api_key=os.getenv('OPENROUTER_API_KEY')
)

from util import pdf_to_text, text_to_chunks, SemanticSearch

recommender = SemanticSearch()
def load_recommender(path, start_page=1):
    global recommender
    texts = pdf_to_text(path, start_page=start_page)
    chunks = text_to_chunks(texts, start_page=start_page)
    recommender.fit(chunks)
    return 'Corpus Loaded.'


def generate_text(prompt):
    message = client.chat.completions.create(
        model="google/gemini-pro",
        messages=[
            {"role": "user", "content": prompt}
        ],
    ).choices[0].message.content
    return message

def question_answer(chat_history, file, question):
    suffix = Path(file.name).suffix
    with NamedTemporaryFile(delete=False, suffix=suffix) as tmp:
        shutil.copyfile(file.name, tmp.name)
        tmp_path = Path(tmp.name)

    load_recommender(str(tmp_path))
    answer = generate_text(question)
    chat_history.append([question, answer])
    return chat_history

title = 'PDF GPT '
description = """ PDF GPT """

with gr.Blocks(css="""#chatbot { font-size: 14px; min-height: 1200; }""") as demo:

    gr.Markdown(f'<center><h3>{title}</h3></center>')
    gr.Markdown(description)

    with gr.Row():
        
        with gr.Group():
            with gr.Accordion("URL or pdf file"):
                file = gr.File(label='Upload your PDF/ Research Paper / Book here', file_types=['.pdf'])
            question = gr.Textbox(label='Enter your question here')
            btn = gr.Button(value='Submit')

        with gr.Group():
            chatbot = gr.Chatbot(label="Chat History", elem_id="chatbot")

    btn.click(
        question_answer,
        inputs=[chatbot, file, question],
        outputs=[chatbot],
        api_name="predict",
    )

demo.launch(server_name="0.0.0.0")