import gradio as gr from fastai.text.all import * from blurr.text.data.all import * from blurr.text.modeling.all import * from transformers import T5Tokenizer, T5ForConditionalGeneration from transformers import BartForConditionalGeneration # Load the pre-trained model and tokenizer pretrained_model_name = "facebook/bart-large-cnn" hf_tokenizer = T5Tokenizer.from_pretrained(pretrained_model_name) learn = load_learner('article_highlights.pkl') def summarize(article): # Preprocess the input text processed_text = learn.dblock.pipeline(article) # Generate the summary summary = learn.predict(processed_text)[0]['highlights'] return summary # Create the Gradio interface iface = gr.Interface( fn=summarize, inputs="text", outputs="text", title="Article Summarizer (Part 3)", description="Enter an article and get a summary.", examples=[["This is an example article..."]] ) # Launch the Gradio interface iface.launch()