gufett0's picture
changed to autotokenizer
eba9c47
raw
history blame
414 Bytes
from backend import handle_query
import gradio as gr
iface = gr.ChatInterface(
fn=handle_query,
title="PDF Information and Inference",
description="Retrieval-Augmented Generation - Ask me anything about the content of the PDF.",
#examples=["What is the main topic of the document?", "Can you summarize the key points?"],
#cache_examples=True,
)
if __name__ == "__main__":
iface.launch()