import gradio as gr import random import time from sentence_transformers import SentenceTransformer import pinecone import os retriever = SentenceTransformer("sentence-transformers/all-MiniLM-L12-v2") pinecone_key = os.environ['PINECONE_SECRET'] pinecone.init( api_key=os.environ['PINECONE_SECRET'], environment="eu-west1-gcp" ) index_name = 'quran-semantic-search' index = pinecone.Index(index_name) def query_pinecone(query, top_k): xq = retriever.encode([query]).tolist() # search vector database for similar vector xc = index.query(xq, top_k=top_k, include_metadata=True) return xc['matches'] def format_search_result(result): data = result['metadata'] message = f"Ayah no: {data['ayah']}\nSurah no: {data['surah']}\nSentence:{data['arabic-text']}\nTranslation: {data['en-translation']}\n Tafsir:{data['en-tafsir-mokhtasar']}\n Relevant Tafsir: {data['vector-chunk']}" return message with gr.Blocks() as demo: gr.Markdown( """ # Quran Semantic Search! Ask questions and seek advice and see what the Holy book has to say about it. """) chatbot = gr.Chatbot() msg = gr.Textbox() clear = gr.Button("Clear") def user(user_message, history): return "", history + [[user_message, None]] def bot(history): query = history[-1][0] results = query_pinecone(query, top_k=3) for match in results: if history[-1][1] == None: history[-1][1] = format_search_result(match) else: history.append([None, format_search_result(match)]) return history msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( bot, chatbot, chatbot ) clear.click(lambda: None, None, chatbot, queue=False) demo.launch()