import streamlit as st from transformers import pipeline # Load models and tokenizers using Hugging Face's pipeline def load_pipelines(): bart_pipeline = pipeline("summarization", model="facebook/bart-large-cnn") t5_pipeline = pipeline("summarization", model="t5-large") pegasus_pipeline = pipeline("summarization", model="google/pegasus-cnn_dailymail") return { 'BART': bart_pipeline, 'T5': t5_pipeline, 'Pegasus': pegasus_pipeline, } prompt = """ Summarize the below paragraph """ # Streamlit app layout st.title("Text Summarization with Pre-trained Models (BART, T5, Pegasus)") text_input = st.text_area("Enter text to summarize:") if st.button("Generate Summary"): if text_input: pipelines = load_pipelines() summaries = {} for model_name, pipeline in pipelines.items(): summary = pipeline(f"{prompt}\n{text_input}", max_length=150, min_length=50, length_penalty=2.0, num_beams=4, early_stopping=True)[0]['summary_text'] summaries[model_name] = summary st.subheader("Summaries") for model_name, summary in summaries.items(): st.write(f"**{model_name}**") st.write(summary.replace('', '')) st.write("---") else: st.error("Please enter text to summarize.")