Spaces:
Running
Running
import torch | |
from fastai.text.all import * | |
from blurr.data.all import * | |
from blurr.modeling.all import * | |
from transformers import BartForConditionalGeneration | |
# Load the pre-trained model and tokenizer (adjust for Bart if needed) | |
pretrained_model_name = "facebook/bart-large-cnn" # Or "facebook/bart-base" | |
hf_tokenizer = BartTokenizer.from_pretrained(pretrained_model_name) | |
def summarize(article): | |
# Define your data transformation pipeline here, if applicable | |
# ... | |
# Load the exported model | |
learn = load_learner('article_highlights.pkl') | |
# Generate the summary | |
summary = learn.blurr_generate(article)[0] | |
return summary | |
# Create the Gradio interface | |
iface = gr.Interface( | |
fn=summarize, | |
inputs="text", | |
outputs="text", | |
title="Article Summarizer (Part 3)", | |
description="Enter an article and get a summary.", | |
examples=[["This is an example article..."]] | |
) | |
# Launch the Gradio interface | |
iface.launch() | |