wiraindrak's picture
Create new file
0fd3cfb
raw
history blame
1.04 kB
from transformers import T5Tokenizer, T5Model, T5ForConditionalGeneration
tokenizer_t5 = T5Tokenizer.from_pretrained("panggi/t5-base-indonesian-summarization-cased")
model_t5 = T5ForConditionalGeneration.from_pretrained("panggi/t5-base-indonesian-summarization-cased")
def summ_t5(text):
input_ids = tokenizer.encode(text, return_tensors='pt')
summary_ids = model_.generate(input_ids,
max_length=100,
num_beams=2,
repetition_penalty=2.5,
length_penalty=1.0,
early_stopping=True,
no_repeat_ngram_size=2,
use_cache=True)
summary_text = tokenizer_t5.decode(summary_ids[0], skip_special_tokens=True)
return summary_text
sentiment_demo = gr.Interface(
fn=summ_t5,
inputs="text",
outputs="text")
if __name__ == "__main__":
Parallel(summ_demo,
inputs=gr.Textbox(lines=10, label="Input Text", placeholder="Enter article here..."),
title="Summary of Summarizer - Indonesia",
examples=examples).launch()