Spaces:
Running
Running
import gradio as gr | |
from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration | |
# Load the Blenderbot model and tokenizer | |
MODEL_NAME = "facebook/blenderbot-3B" | |
tokenizer = BlenderbotTokenizer.from_pretrained(MODEL_NAME) | |
model = BlenderbotForConditionalGeneration.from_pretrained(MODEL_NAME) | |
def chatbot_response(user_input, chat_history=[]): | |
"""Generates a response from Blenderbot with memory.""" | |
# Format chat history | |
history_text = " ".join([f"User: {msg[0]} Assistant: {msg[1]}" for msg in chat_history]) | |
formatted_input = f"You are a helpful assistant. The user says: {user_input}" | |
inputs = tokenizer(formatted_input, return_tensors="pt") | |
reply_ids = model.generate(**inputs, max_length=100) | |
response = tokenizer.decode(reply_ids[0], skip_special_tokens=True) | |
# Update chat history correctly | |
chat_history.append((user_input, response)) | |
return chat_history, chat_history # β Returning two values | |
# Set up Gradio interface | |
with gr.Blocks() as demo: | |
gr.Markdown("# π€ Blenderbot 3B Chatbot") | |
chatbot = gr.Chatbot() | |
user_input = gr.Textbox(label="Your message") | |
submit_btn = gr.Button("Send") | |
clear_btn = gr.Button("Clear Chat") | |
chat_state = gr.State([]) | |
submit_btn.click(chatbot_response, inputs=[user_input, chat_state], outputs=[chatbot, chat_state]) | |
clear_btn.click(lambda: ([], []), inputs=[], outputs=[chatbot, chat_state]) | |
# Launch the chatbot | |
demo.launch() | |