Spaces:
Runtime error
Runtime error
File size: 3,834 Bytes
802f7a2 b13202a 802f7a2 b13202a 802f7a2 b13202a 802f7a2 b13202a 802f7a2 b13202a 802f7a2 b13202a 802f7a2 b13202a 802f7a2 b13202a 802f7a2 95c4e90 802f7a2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
import gradio as gr
from dotenv import load_dotenv
import os
import anthropic
import openai
from uuid import uuid4
from util import ClaudeCompletion, GPTCompletion
gr.close_all()
load_dotenv()
CLAUDE_API_KEY = os.environ["CLAUDE_API_KEY"]
openai.api_key = os.environ["OPENAI_API_KEY"]
claudeClient = anthropic.Client(CLAUDE_API_KEY)
def talkToClaude(prompt):
completion = ClaudeCompletion(prompt, max_tokens_to_sample=1024)
claudeResponse = completion.execute(claudeClient)
return claudeResponse
def chatWithClaude(chatHistory):
completion = ClaudeCompletion("", max_tokens_to_sample=2048, temperature=0.5)
claudeResponse = completion.chatComplete(claudeClient, chatHistory)
lastMessage = chatHistory[-1][0]
chatHistory.pop()
chatHistory.append((lastMessage, claudeResponse))
return chatHistory
def startNewChat(system, chatHistory):
systemMes = "You are a human chatting with an AI assistant. This is the context of your conversation: " + system
completion = GPTCompletion(system=systemMes, max_tokens=2048, temperature=1.5)
gptResponse = completion.chatComplete(chatHistory, firstMessage="Hi, I am an AI assistant. How can I help you?")
chatHistory.append((gptResponse, ""))
return chatHistory
def chatWithGPT(chatHistory):
completion = GPTCompletion(system="You are a human chatting with an AI assistant.", max_tokens=2048, temperature=1.5)
gptResponse = completion.chatComplete(chatHistory)
chatHistory.append((gptResponse, ""))
return chatHistory
with gr.Blocks() as demo:
chatState = gr.State([])
gptsTurnState = gr.State(True)
def startConversation(prompt, gptsTurn):
# nextBtn.interactive = False
chatHistory = []
startNewChat(prompt, chatHistory)
# nextBtn.interactive = True
gptsTurn = False
return chatHistory, chatHistory, gptsTurn
def nextResponse(chatHistory, gptsTurn):
# nextBtn.interactive = False
if (gptsTurn):
chatWithGPT(chatHistory)
else:
chatWithClaude(chatHistory)
gptsTurn = not gptsTurn
# nextBtn.interactive = True
return chatHistory, chatHistory, gptsTurn
context = gr.Textbox(label="Context",
placeholder="Set the context for two LLMs to chat with each other",
info="OpenAI's gpt-3.5-turbo model with chat completion and Anthropic's claude-v1.3 text completion model will talk to each other. Context will be integrated into GPT's system message for the first line of the dialoge and will be scrapped afterwards. The two LLM's are expected to keep the conversation going based on the conversation history. Clicking \"Start conversation\" will reset the conversation and bring the first message. Then, clicking \"Next response\" will bring the next line of the dialogue.")
start_button = gr.Button("Start conversation")
chatbot = gr.Chatbot().style(height=460)
nextBtn = gr.Button("Next response")
examples = gr.Examples(
["A person is interested in Formula 1 and asks questions about the sport",
"A little child wants to understand how planes fly",
"You are a young indie game dev searching for ideas about their next strategy game",
"You are an old irish man interested in medieval european history"]
, context)
# nextBtn.interactive = False
start_button.click(startConversation, [context, gptsTurnState], [chatbot, chatState, gptsTurnState], scroll_to_output=True, show_progress=True)
nextBtn.click(nextResponse, [chatState, gptsTurnState], [chatbot, chatState, gptsTurnState], scroll_to_output=True, show_progress=True)
try:
demo.launch()
except KeyboardInterrupt:
demo.close()
except Exception as e:
print(e)
demo.close()
gr.close_all() |