chat_GPT4 / app.py
gcapde's picture
Update app.py
a75a1b2 verified
raw
history blame contribute delete
No virus
3.37 kB
import os
import openai
from transformers import pipeline, Conversation
import gradio as gr
import json
from dotenv import load_dotenv
# Load environment variables from the .env file de forma local
load_dotenv()
import base64
with open("Iso_Logotipo_Ceibal.png", "rb") as image_file:
encoded_image = base64.b64encode(image_file.read()).decode()
openai.api_key = os.environ['OPENAI_API_KEY']
access_pwd = os.environ['INTERFACE_PWD']
def clear_chat(message, chat_history):
return "", []
def add_new_message(message, chat_history):
new_chat = []
for turn in chat_history:
user, bot = turn
new_chat.append({"role": "user", "content": user})
new_chat.append({"role": "assistant","content":bot})
new_chat.append({"role": "user","content":message})
return new_chat
def respond(message, chat_history):
prompt = add_new_message(message, chat_history)
# stream = client.generate_stream(prompt,
# max_new_tokens=1024,
# stop_sequences=["\nUser:", "<|endoftext|>"],
# temperature=temperature)
# #stop_sequences to not generate the user answer
# acc_text = ""
response = openai.ChatCompletion.create(
model="gpt-4o", # gpt-4o, gpt-4-turbo, gpt-4, and gpt-3.5-turbo (tambi茅n se puede fijar a una versi贸n, ej. gpt-4o-2024-05-13)
messages= prompt,
temperature=0.5,
max_tokens=1000,
stream=True,
)#.choices[0].message.content
#chat_history.append((message, response))
token_counter = 0
partial_words = ""
counter=0
for chunk in response:
chunk_message = chunk['choices'][0]['delta']
if(len(chat_history))<1:
# print("entr贸 aca谩")
partial_words += chunk_message.content
chat_history.append([message,chunk_message.content])
else:
# print("antes", chat_history)
if(len(chunk_message)!=0):
if(len(chunk_message)==2):
partial_words += chunk_message.content
chat_history.append([message,chunk_message.content])
else:
partial_words += chunk_message.content
chat_history[-1] =([message,partial_words])
yield "",chat_history
with gr.Blocks() as demo:
gr.Markdown("""
<center>
<img src='data:image/jpg;base64,{}' width=200px>
<h3>
En este espacio podr谩s interactuar con ChatGPT y su modelo GPT4!
</h3>
</center>
""".format(encoded_image))
with gr.Row():
chatbot = gr.Chatbot(height=250) #just to fit the notebook
with gr.Row():
with gr.Row():
with gr.Column(scale=4):
msg = gr.Textbox(label="Texto de entrada")
with gr.Column(scale=1):
btn = gr.Button("Enviar")
clear = gr.ClearButton(components=[msg, chatbot], value="Borrar chat")
btn.click(respond, inputs=[msg, chatbot], outputs=[msg, chatbot])
msg.submit(respond, inputs=[msg ,chatbot], outputs=[msg, chatbot]) #Press enter to submit
clear.click(clear_chat,inputs=[msg, chatbot], outputs=[msg, chatbot])
demo.queue()
demo.launch(auth = ('Ceibal_IA', access_pwd))