import argparse import logging import sentence_transformers import datasets import gradio as gr logging.disable(logging.CRITICAL) model = sentence_transformers.SentenceTransformer( "dangvantuan/sentence-camembert-large", device="cpu") dataset = datasets.load_dataset("json", data_files=["./dataset.json"], split="train") dataset.load_faiss_index("embeddings", "index.faiss") def search(query, k): query_embedding = model.encode(query) _, retrieved_examples = dataset.get_nearest_examples( "embeddings", query_embedding, k=int(k), ) results = [] for text, start, end, title, url in zip( retrieved_examples["text"], retrieved_examples["start"], retrieved_examples["end"], retrieved_examples["title"], retrieved_examples["url"], ): start = start end = end result = { "title": title, "transcript": f"[{str(start)} ====> {str(end)}] {text}", "link": url, } results.append(result) # Format results as a single string for single textbox output results_text = ("