Spaces:
Sleeping
Sleeping
File size: 6,019 Bytes
2927735 d8b89de 2927735 d8b89de 2927735 d8b89de 2927735 d8b89de 2927735 d8b89de 2927735 d8b89de 2927735 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 |
# Application file for Gradio App for OpenAI Model
import gradio as gr
import time
import datetime
import os
from lc_base.chain import openai_chain
from driveapi.drive import upload_chat_to_drive
from driveapi.drive_database import create_chroma_db
# global time_diff, model_name, search_type
time_diff = 0
model_name="gpt-3.5-turbo-1106"
# model_name = "gpt-4-1106-preview"
# model_name = "gpt-4-0125-preview"
search_type = "stuff"
input_question = ""
model_response = ""
user_feedback = ""
dir = ""
title = """<h1 align="center">ResearchBuddy</h1>"""
description = """<br><br><h3 align="center">This is a GPT based Research Buddy to assist in navigating new research topics.</h3>"""
def save_api_key(api_key):
os.environ['OPENAI_API_KEY'] = str(api_key)
return f"API Key saved in the environment: {api_key}"
def save_drive_link(drive_link):
os.environ['DRIVE_LINK'] = str(drive_link)
print(f"API Key saved in the environment: {drive_link}")
return None
def create_data_from_drive():
global db
db = create_chroma_db()
return "Processing Completed - You can start the chat now!"
def user(user_message, history):
return "", history + [[user_message, None]]
def respond(message, chat_history):
global time_diff, model_response, input_question
print("Database is ...................")
print(type(db))
question = str(message)
chain = openai_chain(inp_dir=dir)
# prompt = '''You are an AI assistant equipped with advanced analytical capabilities.
# You have been provided with a carefully curated set of documents relevant to a specific question.
# Your task is to meticulously analyze these documents and provide a comprehensive answer to the following question.
# Ensure that your response is detailed, accurate, and maintains a formal, academic tone.
# The information required to answer this question is contained within the documents.
# Please proceed with a thorough examination to deliver a well-informed response. Question: '''
# query = prompt + question
query = question
start_time = time.time()
output = chain.get_response_from_drive(query=query, database=db, k=10, model_name=model_name, type=search_type)
print(output)
# Update global variables to log
time_diff = time.time() - start_time
model_response = output
input_question = question
bot_message = output
chat_history.append((message, bot_message))
time.sleep(2)
return " ", chat_history
def save_feedback(feedback):
global user_feedback
user_feedback = feedback
curr_date = datetime.datetime.now()
file_name = f"chat_{curr_date.day}_{curr_date.month}_{curr_date.hour}_{curr_date.minute}_{curr_date.second}.csv"
log_data = [
["Question", "Response", "Model", "Time", "Feedback"],
[input_question, model_response, model_name, time_diff, user_feedback]
]
if user_feedback[0] != "None":
upload_chat_to_drive(log_data, file_name)
def default_feedback():
return "None"
def default_text():
return ""
def text_feedback(feedback):
global text_feedback
text_feedback = feedback
curr_date = datetime.datetime.now()
file_name = f"chat_{curr_date.day}_{curr_date.month}_{curr_date.hour}_{curr_date.minute}_{curr_date.second}.csv"
log_data = [
["Question", "Response", "Model", "Time", "Feedback"],
[input_question, model_response, model_name, time_diff, text_feedback]
]
upload_chat_to_drive(log_data, file_name)
with gr.Blocks(theme=gr.themes.Soft(primary_hue="emerald", neutral_hue="slate")) as chat:
gr.HTML(title)
global db
with gr.Row():
with gr.Column():
api_key_input = gr.Textbox(lines=1, label="Enter your OpenAI API Key, then press Enter...")
with gr.Column():
drive_link_input = gr.Textbox(lines=1, label="Enter your shared drive link, then press Enter...")
with gr.Row():
process_files_input = gr.Button(value="Process files")
with gr.Row():
status_message = gr.Text(label="Status", value="Click - Process Files")
api_key_input.submit(save_api_key, [api_key_input])
drive_link_input.submit(fn=save_drive_link, inputs=[drive_link_input])
drive_link_check = os.environ.get("DRIVE_LINK")
process_files_input.click(fn=create_data_from_drive, outputs=status_message)
chatbot = gr.Chatbot(height=750)
msg = gr.Textbox(label="Send a message", placeholder="Send a message",
show_label=False, container=False)
with gr.Row():
with gr.Column():
gr.Examples([
["Explain these documents to me in simpler terms."],
["What does these documents talk about?"],
["Give the key topics covered in these documents in less than 10 words."],
["What are the key findings in these documents?"],
], inputs=msg, label= "Click on any example to copy in the chatbox"
)
with gr.Row():
with gr.Column():
feedback_radio = gr.Radio(
choices=["1", "2", "3", "4", "5", "6", "None"],
value=["None"],
label="How would you rate the current response?",
info="Choosing a number sends the following diagnostic data to the developer - Question, Response, Time Taken. Let it be [None] to not send any data.",
)
with gr.Column():
feedback_text = gr.Textbox(lines=1, label="Additional comments on the current response...")
msg.submit(respond, [msg, chatbot], [msg, chatbot])
msg.submit(default_feedback, outputs=[feedback_radio])
msg.submit(default_text, outputs=[feedback_text])
feedback_radio.change(
fn=save_feedback,
inputs=[feedback_radio]
)
feedback_text.submit(
fn=text_feedback,
inputs=[feedback_text]
)
gr.HTML(description)
chat.queue()
chat.launch()
|