Demo2 / app.py
sophicist's picture
Synced repo using 'sync_with_huggingface' Github Action
e8e4ff3 verified
raw
history blame
511 Bytes
from transformers import pipeline
import gradio as gr
# Initialize the summarization model using a PyTorch-based model
model = pipeline("summarization", model="sshleifer/distilbart-cnn-12-6")
def predict(prompt):
summary = model(prompt)[0]['summary_text']
return summary
# Build the Gradio interface
with gr.Blocks() as demo:
textbox = gr.Textbox(placeholder="Enter text block to summarize", lines=4)
gr.Interface(fn=predict, inputs=textbox, outputs="text")
# Launch the demo
demo.launch()