|
import gradio as gr |
|
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline |
|
import torch |
|
|
|
LANGS = ["kin_Latn","eng_Latn"] |
|
TASK = "translation" |
|
|
|
|
|
|
|
|
|
|
|
device = 0 if torch.cuda.is_available() else -1 |
|
|
|
|
|
|
|
tourism_model = AutoModelForSeq2SeqLM.from_pretrained("mbazaNLP/Nllb_finetuned_tourism_en_kin") |
|
|
|
|
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained("mbazaNLP/Nllb_finetuned_general_en_kin") |
|
|
|
|
|
TASK = "translation" |
|
|
|
|
|
device = 0 if torch.cuda.is_available() else -1 |
|
|
|
|
|
|
|
def translate(text, source_lang, target_lang, max_length=400): |
|
""" |
|
Translate text from source language to target language |
|
""" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
translation_pipeline = pipeline(TASK, |
|
model=tourism_model, |
|
tokenizer=tokenizer, |
|
src_lang=source_lang, |
|
tgt_lang=target_lang, |
|
max_length=max_length, |
|
device=device) |
|
result = translation_pipeline(text) |
|
return result[0]['translation_text'] |
|
|
|
|
|
gradio_ui= gr.Interface( |
|
fn=translate, |
|
title="NLLB-Tourism EN-KIN Translation Demo", |
|
inputs= [ |
|
gr.components.Textbox(label="Text"), |
|
gr.components.Dropdown(label="Source Language", choices=LANGS), |
|
gr.components.Dropdown(label="Target Language", choices=LANGS), |
|
|
|
], |
|
outputs=gr.outputs.Textbox(label="Translated text") |
|
) |
|
|
|
gradio_ui.launch() |