Spaces:
Sleeping
Sleeping
File size: 4,172 Bytes
d713a77 8b92625 0eacc0c 8b92625 d41780e 6e8591f 1776f2f 92304dd 1776f2f 92304dd 1105e95 1314610 6e8591f d41780e 8b92625 d713a77 1776f2f 92304dd 1776f2f 1314610 1776f2f 8b92625 1776f2f 8b92625 d41780e 8b92625 5604c54 aae1d57 7be5589 5604c54 aae1d57 8b92625 7be5589 8b92625 9620371 8b92625 5604c54 8b92625 e66a350 8b92625 e66a350 0eacc0c 8b92625 0eacc0c 710401a 8b92625 0eacc0c 8b92625 0eacc0c 710401a 8b92625 d713a77 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 |
from fastapi import FastAPI
import gradio as gr
from gradio.themes.base import Base
from hf_mixtral_agent import agent_executor
from innovation_pathfinder_ai.source_container.container import (
all_sources
)
from innovation_pathfinder_ai.utils.utils import extract_urls
from innovation_pathfinder_ai.utils import logger
from innovation_pathfinder_ai.utils.utils import (
generate_uuid
)
from langchain_community.vectorstores import Chroma
import chromadb
from configparser import ConfigParser
import dotenv
import os
dotenv.load_dotenv()
config = ConfigParser()
config.read('innovation_pathfinder_ai/config.ini')
persist_directory = config.get('main', 'VECTOR_DATABASE_LOCATION')
logger = logger.get_console_logger("app")
app = FastAPI()
def initialize_chroma_db() -> Chroma:
collection_name = config.get('main', 'CONVERSATION_COLLECTION_NAME')
client = chromadb.PersistentClient(
path=persist_directory
)
collection = client.get_or_create_collection(
name=collection_name,
)
return collection
if __name__ == "__main__":
db = initialize_chroma_db()
def add_text(history, text):
history = history + [(text, None)]
return history, ""
def bot(history):
response = infer(history[-1][0], history)
sources = extract_urls(all_sources)
src_list = '\n'.join(sources)
current_id = generate_uuid()
db.add(
ids=[current_id],
documents=[response['output']],
metadatas=[
{
"human_message":history[-1][0],
"sources": 'Internal Knowledge Base From: \n\n' + src_list
}
]
)
if not sources:
response_w_sources = response['output']+"\n\n\n Sources: \n\n\n Internal knowledge base"
else:
response_w_sources = response['output']+"\n\n\n Sources: \n\n\n"+src_list
history[-1][1] = response_w_sources
all_sources.clear()
return history
def infer(question, history):
query = question
result = agent_executor.invoke(
{
"input": question,
"chat_history": history
}
)
return result
def vote(data: gr.LikeData):
if data.liked:
print("You upvoted this response: " + data.value)
else:
print("You downvoted this response: " + data.value)
css="""
#col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
"""
title = """
<div style="text-align:left;">
<p>Hello Human, I am your AI knowledge research assistant. I can explore topics across ArXiv, Wikipedia and use Google search.<br />
</div>
"""
with gr.Blocks(theme=gr.themes.Soft(), title="AlfredAI - AI Knowledge Research Assistant") as demo:
# with gr.Tab("Google|Wikipedia|Arxiv"):
with gr.Column(elem_id="col-container"):
gr.HTML(title)
with gr.Row():
question = gr.Textbox(label="Question",
placeholder="Type your question and hit Enter",)
chatbot = gr.Chatbot([],
elem_id="AI Assistant",
bubble_full_width=False,
avatar_images=(None, "./innovation_pathfinder_ai/assets/avatar.png"),
height=480,)
chatbot.like(vote, None, None)
clear = gr.Button("Clear")
question.submit(add_text, [chatbot, question], [chatbot, question], queue=False).then(
bot, chatbot, chatbot
)
clear.click(lambda: None, None, chatbot, queue=False)
with gr.Accordion("Open for More!", open=False):
gr.Markdown("Nothing yet...")
demo.queue()
demo.launch(debug=True, favicon_path="innovation_pathfinder_ai/assets/favicon.ico", share=True)
x = 0 # for debugging purposes
app = gr.mount_gradio_app(app, demo, path="/") |