Spaces:
Sleeping
Sleeping
File size: 1,547 Bytes
7bd9b65 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
import streamlit as st
from transformers import PegasusForConditionalGeneration, PegasusTokenizer, pipeline
# Load the model and tokenizer
model = PegasusForConditionalGeneration.from_pretrained("fatihfauzan26/PEGASUS_liputan6")
tokenizer = PegasusTokenizer.from_pretrained("google/pegasus-cnn_dailymail")
# Initialize the summarization pipeline
summarizer = pipeline("summarization", model=model, tokenizer=tokenizer)
# Streamlit interface
st.title("Summarization App using PEGASUS")
# Input article for summarization
sample_article = st.text_area('Enter the article you want to summarize', height=300)
if sample_article:
# Generate summary
input_ids = tokenizer.encode(sample_article, return_tensors='pt')
summary_ids = model.generate(input_ids,
min_length=30,
max_length=128,
num_beams=8,
repetition_penalty=2.0,
length_penalty=0.8,
early_stopping=True,
no_repeat_ngram_size=2,
use_cache=True,
do_sample=True,
temperature=1.2,
top_k=50,
top_p=0.95)
# Decode the summary
summary_text = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
# Display results
st.subheader("Summary")
st.write(summary_text)
|