Spaces:
Sleeping
Sleeping
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline | |
# Function to summarize using the fine-tuned BART model | |
def summarize_with_bart_ft(input_text): | |
pipe_bart_ft = pipeline("summarization", model="EE21/BART-ToSSimplify") | |
summary = pipe_bart_ft(input_text, max_length=300, min_length=100, num_beams=1, early_stopping=False, length_penalty=1) | |
return summary[0]['summary_text'] | |
# Function to summarize using BART-large-cnn | |
def summarize_with_bart_cnn(input_text): | |
pipe = pipeline("summarization", model="facebook/bart-large-cnn") | |
summary = pipe(input_text, max_length=300, min_length=100, num_beams=1, early_stopping=False, length_penalty=1) | |
return summary[0]['summary_text'] | |
# Function to summarize using led-base-book-summary | |
def summarize_with_led(input_text): | |
pipe_led = pipeline("summarization", model="pszemraj/led-base-book-summary") | |
summary = pipe_led(input_text, max_length=300, min_length=100, num_beams=1, early_stopping=False, length_penalty=1) | |
return summary[0]['summary_text'] | |
# Function to summarize using long-t5-tglobal-base-sci-simplify | |
def summarize_with_t5(input_text): | |
pipe_t5 = pipeline("summarization", model="pszemraj/long-t5-tglobal-base-sci-simplify") | |
summary = pipe_t5(input_text, max_length=300, min_length=100, num_beams=1, early_stopping=False, length_penalty=1) | |
return summary[0]['summary_text'] | |