Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import PegasusForConditionalGeneration, PegasusTokenizer, pipeline | |
# Load the model and tokenizer | |
model = PegasusForConditionalGeneration.from_pretrained("fatihfauzan26/PEGASUS_liputan6") | |
tokenizer = PegasusTokenizer.from_pretrained("google/pegasus-cnn_dailymail") | |
# Initialize the summarization pipeline | |
summarizer = pipeline("summarization", model=model, tokenizer=tokenizer) | |
# Streamlit interface | |
st.title("Summarization App using PEGASUS") | |
# Input article for summarization | |
sample_article = st.text_area('Enter the article you want to summarize', height=300) | |
if sample_article: | |
# Generate summary | |
input_ids = tokenizer.encode(sample_article, return_tensors='pt') | |
summary_ids = model.generate(input_ids, | |
min_length=30, | |
max_length=128, | |
num_beams=8, | |
repetition_penalty=2.0, | |
length_penalty=0.8, | |
early_stopping=True, | |
no_repeat_ngram_size=2, | |
use_cache=True, | |
do_sample=True, | |
temperature=1.2, | |
top_k=50, | |
top_p=0.95) | |
# Decode the summary | |
summary_text = tokenizer.decode(summary_ids[0], skip_special_tokens=True) | |
# Display results | |
st.subheader("Summary") | |
st.write(summary_text) | |