Spaces:
Sleeping
Sleeping
File size: 2,172 Bytes
675db3d eeddc18 723b51a d7d5a1b eeddc18 f3e80e9 723b51a 11fbe38 723b51a 11fbe38 d7d5a1b 1f79a74 eeddc18 f3e80e9 d7d5a1b 34f7b43 eeddc18 d7d5a1b 34f7b43 d7d5a1b 34f7b43 d7d5a1b 34f7b43 d7d5a1b 7dda172 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import streamlit as st
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
import torch
def generate_summary(model, tokenizer, dialogue):
# Tokenize input dialogue
inputs = tokenizer(dialogue, return_tensors="pt", max_length=1024, truncation=True)
# Generate summary
with torch.no_grad():
summary_ids = model.generate(inputs["input_ids"], max_length=150, length_penalty=0.8, num_beams=4)
# Decode and return the summary
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True, clean_up_tokenization_spaces=True)
return summary
st.set_page_config(
page_title="Dialogue Summarizer App",
page_icon="ale.png",
)
# Display the app name below the logo
st.title("Dialogue Summarizer App")
st.info("\n🖥️ Note: This application is running on CPU. Please be patient ⏳.")
st.markdown("This app summarizes dialogues. Enter a short dialogue in the text area. For best results, keep the dialogues at least a few sentences. You can also use the examples provided at the bottom of the page.")
# Create two columns layout using st.columns
col1, col2 = st.columns(2)
# User input on the left side with increased height
user_input = col1.text_area("Enter the dialog:", height=300)
# Add "Summarize" and "Clear" buttons
summarize_button = col1.button("Summarize")
clear_button = col1.button("Clear")
# If "Clear" button is clicked, clear the user input
if clear_button:
user_input = ""
# If "Summarize" button is clicked and there is user input, generate and display summary on the right side
if summarize_button and user_input:
# Load pre-trained Pegasus model and tokenizer
model_name = "ale-dp/pegasus-finetuned-dialog-summarizer"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
# Generate summary
summary = generate_summary(model, tokenizer, user_input)
# Display the generated summary on the right side
col2.subheader("Generated Summary:")
col2.write(summary)
st.markdown("**Dialogue examples:**")
for idx, example in enumerate(dialogue_examples, 1):
st.write(f"Example {idx}:\n{example}")
|