Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
# Cargar el modelo y tokenizador | |
model_name = "bigscience/bloomz-560m" # Modelo multiling眉e | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
# Funci贸n para manejar el chatbot | |
def chatbot(input_text, history=[]): | |
# Historial de conversaci贸n (opcional) | |
conversation = " ".join([f"Usuario: {turn[0]} Chatbot: {turn[1]}" for turn in history]) | |
new_input = f"{conversation} Usuario: {input_text} Chatbot:" | |
inputs = tokenizer(new_input, return_tensors="pt", truncation=True) | |
outputs = model.generate(inputs["input_ids"], max_length=200, pad_token_id=tokenizer.eos_token_id) | |
response = tokenizer.decode(outputs[0], skip_special_tokens=True).split("Chatbot:")[-1].strip() | |
# Actualizar historial | |
history.append((input_text, response)) | |
return history, history | |
# Configuraci贸n de la interfaz Gradio | |
iface = gr.Interface( | |
fn=chatbot, | |
inputs=["text", "state"], # Input de texto y estado para el historial | |
outputs=["chatbot", "state"], # Output del chatbot y estado | |
title="Neuronpyme Chatbot", | |
description="Un chatbot en espa帽ol para ayudarte a implementar IA en tu negocio.", | |
theme="compact" | |
) | |
# Lanzar la aplicaci贸n | |
iface.launch() | |