import gradio as gr import logging, os, sys, threading from custom_utils import connect_to_database, rag_ingestion, rag_retrieval, rag_inference lock = threading.Lock() RAG_INGESTION = False RAG_OFF = "Off" RAG_NAIVE = "Naive RAG" RAG_ADVANCED = "Advanced RAG" logging.basicConfig(stream = sys.stdout, level = logging.INFO) logging.getLogger().addHandler(logging.StreamHandler(stream = sys.stdout)) def invoke(openai_api_key, prompt, rag_option): if not openai_api_key: raise gr.Error("OpenAI API Key is required.") if not prompt: raise gr.Error("Prompt is required.") if not rag_option: raise gr.Error("Retrieval-Augmented Generation is required.") with lock: db, collection = connect_to_database() if (RAG_INGESTION): return rag_ingestion(collection) else: ### Pre-retrieval processing: index filter ### Post-retrieval processing: result filter match_stage = { "$match": { "accommodates": { "$eq": 2}, "bedrooms": { "$eq": 1} } } additional_stages = [match_stage] ### search_results = rag_retrieval(openai_api_key, prompt, db, collection, additional_stages) return rag_inference(openai_api_key, prompt, search_results) gr.close_all() PROMPT = "Recommend a place that's modern, spacious, and within walking distance from restaurants." demo = gr.Interface( fn = invoke, inputs = [gr.Textbox(label = "OpenAI API Key", type = "password", lines = 1), gr.Textbox(label = "Prompt", value = PROMPT, lines = 1), gr.Radio([RAG_OFF, RAG_NAIVE, RAG_ADVANCED], label = "Retrieval-Augmented Generation", value = RAG_ADVANCED)], outputs = [gr.Markdown(label = "Completion")], title = "Context-Aware Reasoning Application", description = os.environ["DESCRIPTION"] ) demo.launch()