Spaces:
Runtime error
Runtime error
import gradio as gr | |
import pandas as pd | |
import spaces | |
from transformers import RagTokenizer, RagRetriever, RagTokenForGeneration | |
# Load the tokenizer and retriever | |
tokenizer = RagTokenizer.from_pretrained("facebook/rag-token-nq") | |
retriever = RagRetriever.from_pretrained("facebook/rag-token-nq", use_dummy_dataset=True) | |
# Load the model | |
model = RagTokenForGeneration.from_pretrained("facebook/rag-token-nq", retriever=retriever) | |
# Tokenize the contexts and responses | |
inputs = tokenizer(contexts, return_tensors='pt', padding=True, truncation=True) | |
labels = tokenizer(responses, return_tensors='pt', padding=True, truncation=True) | |
# Extract the abstracts | |
abstracts = df['Abstract'].dropna().tolist() | |
# Load your dataset | |
df = pd.read_csv('10kstats.csv') | |
# Generate context-response pairs (abstract-question pairs) | |
# Here we use the abstracts as contexts and simulate questions | |
contexts = abstracts | |
responses = ["Can you tell me more about this research?" for _ in abstracts] | |
def generate_response(input_text): | |
input_ids = tokenizer([input_text], return_tensors='pt')['input_ids'] | |
outputs = model.generate(input_ids) | |
response = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0] | |
return response | |
# Create the Gradio interface | |
iface = gr.Interface( | |
fn=generate_response, | |
inputs="text", | |
outputs="text", | |
title="RAG Chatbot", | |
description="A chatbot powered by Retrieval-Augmented Generation (RAG) model." | |
) | |
# Launch the interface | |
iface.launch() |