Spaces:
Sleeping
Sleeping
import gradio as gr | |
import os | |
from openai import OpenAI | |
import os.path | |
################# Start PERSONA-SPECIFIC VALUES ###################### | |
coach_code = "gp" | |
coach_name_short = "General Patton" | |
coach_name_upper = "GENERAL PATTON" | |
coach_name_long = "General George S. Patton" | |
sys_prompt_new = os.getenv("PROMPT_NEW") | |
################# End PERSONA-SPECIFIC VALUES ###################### | |
################# Start OpenAI-SPECIFIC VALUES ###################### | |
# Initialize OpenAI API client with API key | |
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) | |
# OpenAI model | |
openai_model = "gpt-3.5-turbo-0125" | |
################# End OpenAI-SPECIFIC VALUES ###################### | |
################# Log in ###################### | |
security = os.getenv("AUTH") | |
############### CHAT ################### | |
def predict(user_input, history): | |
max_length = 500 | |
if len(user_input) > max_length: | |
raise gr.Error(f"Input is TOO LONG. Max length is {max_length} characters. Try again.") | |
history_openai_format = [ | |
{"role": "system", "content": "IDENTITY: " + sys_prompt_new} | |
] | |
for human, assistant in history: | |
history_openai_format.append({"role": "user", "content": human }) | |
history_openai_format.append({"role": "assistant", "content":assistant}) | |
history_openai_format.append({"role": "user", "content": user_input}) | |
completion = client.chat.completions.create( | |
model=openai_model, | |
messages= history_openai_format, | |
temperature=1.2, | |
frequency_penalty=0.4, | |
presence_penalty=0.1, | |
stream=True | |
) | |
output_stream = "" | |
for chunk in completion: | |
if chunk.choices[0].delta.content is not None: | |
output_stream = output_stream + (chunk.choices[0].delta.content) | |
yield output_stream | |
message_content = output_stream | |
return message_content | |
def same_auth(username, password): | |
username = username + security | |
return username == password | |
#GUI | |
theme = gr.themes.Default() | |
with gr.Blocks(theme) as demo: | |
gr.ChatInterface(predict, submit_btn="Chat with "+ coach_name_short, retry_btn=None, undo_btn=None, clear_btn=None, autofocus=True) | |
demo.launch(show_api=False, auth=same_auth) |