import openai
import random
import time
import gradio as gr
import os
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import DeepLake
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
def set_api_key(key):
os.environ["OPENAI_API_KEY"] = key
return f"Your API Key has been set to: {key}"
def reset_api_key():
os.environ["OPENAI_API_KEY"] = ""
return "Your API Key has been reset"
def get_api_key():
api_key = os.getenv("OPENAI_API_KEY")
return api_key
def set_model(model):
os.environ["OPENAI_MODEL"] = model
return f"{model} selected"
def get_model():
model = os.getenv("OPENAI_MODEL")
return model
def respond(message, chat_history):
# Get embeddings
embeddings = OpenAIEmbeddings()
#Connect to existing vectorstore
db = DeepLake(dataset_path="./documentation_db", embedding_function=embeddings, read_only=True)
#Set retriever settings
retriever = db.as_retriever(search_kwargs={"distance_metric":'cos',
"fetch_k":10,
"maximal_marginal_relevance":True,
"k":10})
if len(chat_history) != 0:
chat_history = [(chat_history[0][0], chat_history[0][1])]
model = get_model()
# Create ChatOpenAI and ConversationalRetrievalChain
model = ChatOpenAI(model=model)
qa = ConversationalRetrievalChain.from_llm(model, retriever)
bot_message = qa({"question": message, "chat_history": chat_history})
chat_history = [(message, bot_message["answer"])]
time.sleep(1)
return "", chat_history
with gr.Blocks() as demo:
gr.Markdown("
Langchain Coding Assistant
")
gr.Markdown("## This Gradio app is powered by ChatGPT and LangChain. You can submit your OpenAI API key and use the chatbot to get assistance for your coding questions. This chatbot can also provide assistance coding with LangChain. \n ### 1. Enter your OpenAI API key in the 'OpenAI API Key Submission' tab. \n ### 2. Click 'Submit' to set your API key.\n ### 3. Go to the 'Coding Assistant' tab and provide your prompt. You can re-verify that your API Key was set correctly in this tab as well.")
gr.Markdown("#### Note: This application is able to query up-to-date documentation on LangChain and will be frequently updated to include the latest updates to the documentation. Support for other newer packges will be added soon. ")
with gr.Tab("OpenAI API Key Submission"):
api_input = gr.Textbox(label = "API Key",
placeholder = "Please provide your OpenAI API key here.")
api_key_status = gr.Textbox(label = "API Key Status",
placeholder = "Your API Key has not be set yet. Please enter your key.",
interactive = False)
api_submit_button = gr.Button("Submit")
api_reset_button = gr.Button("Clear API Key from session")
with gr.Tab("Coding Assistant"):
api_check_button = gr.Button("Get API Key")
api_print = gr.Textbox(label = "OpenAI API Key - Please ensure the API Key is set correctly")
model_selection = gr.Dropdown(
["gpt-3.5-turbo", "gpt-4"], label="Model Selection", info="Please ensure you provide the API Key that corresponds to the Model you select!"
)
model_submit_button = gr.Button("Submit Model Selection")
model_status = gr.Textbox("Selected Model", interactive = False)
chatbot = gr.Chatbot(label="ChatGPT Powered Coding Assistant")
msg = gr.Textbox(label="User Prompt", placeholder="Your Query Here")
clear = gr.Button("Clear")
api_submit_button.click(set_api_key, inputs=api_input, outputs=api_key_status)
api_reset_button.click(reset_api_key, outputs=api_key_status)
api_check_button.click(get_api_key, outputs=api_print)
model_submit_button.click(set_model, inputs=model_selection, outputs=model_status)
msg.submit(respond, inputs = [msg, chatbot], outputs = [msg, chatbot])
clear.click(lambda: None, None, chatbot, queue=False)
demo.launch()