simonraj's picture
Update app.py
3c57c39
import gradio as gr
import os
import json
import requests
#Streaming endpoint
API_URL = "https://api.openai.com/v1/chat/completions" #os.getenv("API_URL") + "/generate_stream"
#Huggingface provided GPT4 OpenAI API Key
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
#Inferenec function
def predict(system_msg, inputs, top_p, temperature, chat_counter, chatbot=[], history=[]):
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENAI_API_KEY}"
}
print(f"system message is ^^ {system_msg}")
if system_msg.strip() == '':
initial_message = [{"role": "user", "content": f"{inputs}"},]
multi_turn_message = []
else:
initial_message= [{"role": "system", "content": system_msg},
{"role": "user", "content": f"{inputs}"},]
multi_turn_message = [{"role": "system", "content": system_msg},]
if chat_counter == 0 :
payload = {
"model": "gpt-3.5-turbo",
"messages": initial_message ,
"temperature" : 0.6,
"top_p":1.0,
"n" : 1,
"stream": True,
"presence_penalty":0,
"frequency_penalty":0,
}
print(f"chat_counter - {chat_counter}")
else: #if chat_counter != 0 :
messages=multi_turn_message # Of the type of - [{"role": "system", "content": system_msg},]
for data in chatbot:
user = {}
user["role"] = "user"
user["content"] = data[0]
assistant = {}
assistant["role"] = "assistant"
assistant["content"] = data[1]
messages.append(user)
messages.append(assistant)
temp = {}
temp["role"] = "user"
temp["content"] = inputs
messages.append(temp)
#messages
payload = {
"model": "gpt-3.5-turbo",
"messages": messages, # Of the type of [{"role": "user", "content": f"{inputs}"}],
"temperature" : temperature, #1.0,
"top_p": top_p, #1.0,
"n" : 1,
"stream": True,
"presence_penalty":0,
"frequency_penalty":0,
"max_tokens": 500 # Limiting the token count to 400
}
chat_counter+=1
history.append(inputs)
print(f"Logging : payload is - {payload}")
# make a POST request to the API endpoint using the requests.post method, passing in stream=True
response = requests.post(API_URL, headers=headers, json=payload, stream=True)
print(f"Logging : response code - {response}")
token_counter = 0
partial_words = ""
counter=0
for chunk in response.iter_lines():
#Skipping first chunk
if counter == 0:
counter+=1
continue
# check whether each line is non-empty
if chunk.decode() :
chunk = chunk.decode()
# decode each line as response data is in bytes
if len(chunk) > 12 and "content" in json.loads(chunk[6:])['choices'][0]['delta']:
partial_words = partial_words + json.loads(chunk[6:])['choices'][0]["delta"]["content"]
if token_counter == 0:
history.append(" " + partial_words)
else:
history[-1] = partial_words
chat = [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2) ] # convert to tuples of list
token_counter+=1
yield chat, history, chat_counter, response # resembles {chatbot: chat, state: history}
#Resetting to blank
def reset_textbox():
return gr.update(value='')
#to set a component as visible=False
def set_visible_false():
return gr.update(visible=False)
#to set a component as visible=True
def set_visible_true():
return gr.update(visible=True)
title = """<h1 align="center">🔥Design Thinking Assistant for Primary 6 Students 🚀</h1>"""
#Using info to add additional information about System message in GPT4
system_msg_info = """The system message is used to set the context and behavior of the AI assistant at the beginning of a conversation."""
#Modifying existing Gradio Theme
theme = gr.themes.Soft(primary_hue="indigo", secondary_hue="blue", neutral_hue="blue",
text_size=gr.themes.sizes.text_lg)
with gr.Blocks(css = """#col_container { margin-left: auto; margin-right: auto;} #chatbot {height: 600px; overflow: auto;width: 800px; font-size: 12px;}""",
theme=theme) as demo:
gr.HTML(title)
with gr.Column(elem_id = "col_container"):
#GPT4 API Key is provided by Huggingface
with gr.Accordion(label="System message:", open=False):
system_msg = gr.Textbox(label="Instruct the AI Assistant to set its behaviour",
info = system_msg_info,
value = "You are Design Thinking Coach, an AI assistant acting as a coach for Primary 6 students in Singapore working on an environmental awareness project using design thinking. Reply and a brief and concise manner. Guide students through the design thinking process to develop effective solutions using upcycled materials. Use Socratic questioning to encourage students to deeply empathize with sustainability issues through research. Stimulate creative ideas and provide constructive feedback without giving away answers directly. Make sure your explanations of design thinking are clear and suitable for Primary 6 students. If a student asks about something unrelated to design thinking or environmental awareness, politely redirect them back to the project scope.")
accordion_msg = gr.HTML(value="🚧 To set System message you will have to refresh the app", visible=False)
chatbot = gr.Chatbot(label='Design Thinking Coach', elem_id="chatbot")
inputs = gr.Textbox(placeholder= "Hi there!", label= "Type an input and press Enter")
state = gr.State([])
with gr.Row():
with gr.Column(scale=7):
b1 = gr.Button().style(full_width=True)
with gr.Column(scale=3):
server_status_code = gr.Textbox(label="Status code from OpenAI server", )
#top_p, temperature
with gr.Accordion("Parameters", open=False):
top_p = gr.Slider( minimum=-0, maximum=1.0, value=1.0, step=0.05, interactive=True, label="Top-p (nucleus sampling)",)
temperature = gr.Slider( minimum=-0, maximum=5.0, value=1.0, step=0.1, interactive=True, label="Temperature",)
chat_counter = gr.Number(value=0, visible=False, precision=0)
#Event handling
inputs.submit( predict, [system_msg, inputs, top_p, temperature, chat_counter, chatbot, state], [chatbot, state, chat_counter, server_status_code],) #openai_api_key
b1.click( predict, [system_msg, inputs, top_p, temperature, chat_counter, chatbot, state], [chatbot, state, chat_counter, server_status_code],) #openai_api_key
inputs.submit(set_visible_false, [], [system_msg])
b1.click(set_visible_false, [], [system_msg])
inputs.submit(set_visible_true, [], [accordion_msg])
b1.click(set_visible_true, [], [accordion_msg])
b1.click(reset_textbox, [], [inputs])
inputs.submit(reset_textbox, [], [inputs])
# Fix the indentation here
with gr.Accordion(label="Examples for System message:", open=False):
gr.Examples(
examples=[["Take on the role of Coach Jamie, a friendly and encouraging design thinking coach. Coach Jamie will guide a group of Primary 6 students in Singapore through the design thinking process to raise environmental awareness about climate change for kids aged 4-12 years old and seniors aged 65 years old and above.\n\nCoach Jamie asks the students focused, concise questions to help them empathize with their target audiences and define the problem within 100 tokens. The coach then stimulates the students' creativity in generating innovative yet feasible ideas to raise awareness. Coach Jamie assists the students in reviewing their ideas constructively and choosing the most viable solutions to prototype and test. The coach guides the students in iterative improvement of their solutions based on user feedback from both target age groups. Throughout the process, Coach Jamie provides clear explanations of design thinking methodology and motivates the students positively while giving specific, constructive feedback to further their learning - all in a warm, friendly manner using 100 tokens or less per interaction."]],
inputs=system_msg,)
demo.queue(max_size=99, concurrency_count=40).launch(debug=True)