Spaces:
Runtime error
Runtime error
File size: 1,674 Bytes
2f8382c ccd2173 e09e77e f25eb0c e09e77e 1724904 e09e77e a5dca46 f25eb0c 2f8382c f25eb0c e377fb1 2f8382c f25eb0c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
# Load the fine-tuned BART tokenizer and model
tokenizer = AutoTokenizer.from_pretrained("EE21/BART-ToSSimplify")
model = AutoModelForSeq2SeqLM.from_pretrained("EE21/BART-ToSSimplify")
# Function to summarize using the fine-tuned BART model
def summarize_with_bart_ft(input_text):
inputs = tokenizer.encode("summarize: " + input_text, return_tensors="pt", max_length=1024, truncation=True)
summary_ids = model.generate(inputs, max_length=300, min_length=100, num_beams=1, early_stopping=False, length_penalty=1)
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
return summary
# Function to summarize using BART-large-cnn
def summarize_with_bart_cnn(input_text):
pipe = pipeline("summarization", model="facebook/bart-large-cnn")
summary = pipe(input_text, max_length=300, min_length=100, num_beams=1, early_stopping=False, length_penalty=1)
return summary[0]['summary_text']
# Function to summarize using led-base-book-summary
def summarize_with_led(input_text):
pipe_led = pipeline("summarization", model="pszemraj/led-base-book-summary")
summary = pipe_led(input_text, max_length=300, min_length=100, num_beams=1, early_stopping=False, length_penalty=1)
return summary[0]['summary_text']
# Function to summarize using long-t5-tglobal-base-sci-simplify
def summarize_with_t5(input_text):
pipe_t5 = pipeline("summarization", model="pszemraj/long-t5-tglobal-base-sci-simplify")
summary = pipe_t5(input_text, max_length=300, min_length=100, num_beams=1, early_stopping=False, length_penalty=1)
return summary[0]['summary_text']
|