File size: 2,132 Bytes
630d709 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
from llama_index.llms.anthropic import Anthropic
from llama_index.llms.mistralai import MistralAI
from llama_index.embeddings.mistralai import MistralAIEmbedding
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
from llama_index.core.settings import Settings
from llama_index.core import SimpleDirectoryReader, VectorStoreIndex
import gradio as gr
from gradio_pdf import PDF
import os
choices = ['open-mistral-7b', 'claude-3-haiku']
def model_selection(choices):
if choices == "open-mistral-7b":
api_key = 'lJzlUC91kbvbMlOqCdAVorDdnmLEIU8b'
llm = MistralAI(api_key=api_key, model="open-mistral-7b")
embed_model = MistralAIEmbedding(model_name='mistral-embed', api_key=api_key)
else:
os.environ["ANTHROPIC_API_KEY"] = "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"
llm = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], model = 'claude-3-haiku-20240307')
embed_model = HuggingFaceEmbedding(model_name='BAAI/bge-base-en-v1.5')
Settings.llm = llm
Settings.embed_model = embed_model
return choices
def qa(question: str, doc: str) -> str:
my_pdf = SimpleDirectoryReader(input_files=[doc]).load_data()
my_pdf_index = VectorStoreIndex.from_documents(my_pdf)
my_pdf_engine = my_pdf_index.as_query_engine()
response = my_pdf_engine.query(question)
return response
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
model_choice = gr.Radio(choices=choices, label = 'Choose a model')
#select_button = gr.Button("Select Model")
model_choice.change(model_selection, inputs=model_choice)
pdf_input = gr.File(label="Upload PDF")
with gr.Column():
#pdf_input = gr.File(label="Upload PDF")
question_input = gr.Textbox(label="Ask Question from PDF document")
qa_button = gr.Button("Get Answer")
answer_output = gr.Textbox(label="Answer")
qa_button.click(fn=qa, inputs=[question_input, pdf_input], outputs=answer_output)
demo.launch(debug = True)
|