rockdrigoma's picture
Update app.py
df5be20
raw
history blame
1.85 kB
import gradio as gr
from transformers import AutoModelForSeq2SeqLM
from transformers import AutoTokenizer
article='''
# Team members
- Emilio Alejandro Morales [(milmor)](https://huggingface.co/milmor)
- Rodrigo Martínez Arzate [(rockdrigoma)](https://huggingface.co/rockdrigoma)
- Luis Armando Mercado [(luisarmando)](https://huggingface.co/luisarmando)
- Jacobo del Valle [(jjdv)](https://huggingface.co/jjdv)
'''
model = AutoModelForSeq2SeqLM.from_pretrained('hackathon-pln-es/t5-small-spanish-nahuatl')
tokenizer = AutoTokenizer.from_pretrained('hackathon-pln-es/t5-small-spanish-nahuatl')
def predict(input):
input_ids = tokenizer('translate Spanish to Nahuatl: ' + input, return_tensors='pt').input_ids
outputs = model.generate(input_ids)
outputs = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0]
return outputs
gr.Interface(
fn=predict,
inputs=gr.inputs.Textbox(lines=1, label="Input Text in Spanish"),
outputs=[
gr.outputs.Textbox(label="Translated text in Nahuatl"),
],
theme="peach",
title='🌽 Spanish to Nahuatl Automatic Translation',
description='This model is a T5 Transformer (t5-small) fine-tuned on spanish and nahuatl sentences collected from the web. The dataset is normalized using "sep" normalization from py-elotl. For more details visit https://huggingface.co/hackathon-pln-es/t5-small-spanish-nahuatl',
examples=[
'conejo',
'estrella',
'Muchos perros son blancos',
'te amo',
'quiero comer',
'esto se llama agua',
'Mi hermano es un ajolote',
'mi abuelo se llama Juan',
'El pueblo del ajolote',
'te amo con todo mi corazón'],
article=article,
allow_flagging="manual",
flagging_options=["right translation", "wrong translation", "error", "other"],
flagging_dir="logs"
).launch(enable_queue=True)