Jaehan's picture
Update app.py
565a330
raw
history blame contribute delete
918 Bytes
from transformers import AutoTokenizer, AutoModelWithLMHead
import gradio as gr
model_name = "deep-learning-analytics/wikihow-t5-small"
text2text_token = AutoTokenizer.from_pretrained(model_name)
model = AutoModelWithLMHead.from_pretrained(model_name)
def text2text_summary(para):
initial_text = para.strip().replace("\n","")
token_text = text2text_token.encode(initial_text, return_tensors="pt")
token_ids = model.generate(
token_text,
max_length=250,
num_beams=5,
repetition_penalty=2.5,
early_stopping=True )
response = text2text_token.decode(token_ids[0], skip_special_tokens=True)
return response
# UX
in_para = gr.Textbox(lines=10, label="Input paragraph", placeholder="Place your paragraph to summarize here...")
out = gr.Textbox(lines=1, label="Summary")
gr.Interface(text2text_summary, inputs=in_para, outputs=out).launch()