|
import gradio as gr |
|
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM |
|
|
|
from transformers import GPT2LMHeadModel, GPT2Config |
|
|
|
model_name = "facebook/bart-large-cnn" |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
model = AutoModelForSeq2SeqLM.from_pretrained(model_name) |
|
|
|
|
|
def summarize_text(input_text): |
|
|
|
input_ids = tokenizer.encode(input_text, return_tensors="pt", max_length=1024, truncation=True) |
|
summary_ids = model.generate(input_ids, max_length=20, min_length=5, length_penalty=2.0, num_beams=4, early_stopping=True) |
|
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True) |
|
return summary |
|
|
|
iface = gr.Interface( |
|
fn=summarize_text, |
|
inputs="text", |
|
outputs="text", |
|
title="Concept Tagger", |
|
description="Concept tag your text using Theus model (1.3B Concept Tagging).", |
|
) |
|
|
|
if __name__ == "__main__": |
|
iface.launch() |
|
|