copywriter2a / app.py
JeCabrera's picture
Update app.py
500f371 verified
raw
history blame
1.7 kB
import gradio as gr
import google.generativeai as genai
import os
from dotenv import load_dotenv
# Cargar variables de entorno
load_dotenv()
# Configurar la API de Google Gemini
genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
def respond(
message,
history,
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
# Configurar el modelo de Gemini
model_name = "gemini-1.5-pro" # Ajusta el modelo según sea necesario
generation_config = {
"temperature": temperature,
"top_p": top_p,
"max_output_tokens": max_tokens,
"response_mime_type": "text/plain",
}
model = genai.GenerativeModel(model_name=model_name, generation_config=generation_config)
chat_session = model.start_chat(
history=messages
)
response = chat_session.send_message(message)
return response.text
# Crear la interfaz de Gradio
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="You are a helpful assistant.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
],
)
if __name__ == "__main__":
demo.launch()