import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline import torch LANGS = ["kin_Latn","eng_Latn"] TASK = "translation" # CKPT = "DigitalUmuganda/Finetuned-NLLB" models = ["facebook/nllb-200-distilled-600M","DigitalUmuganda/Finetuned-NLLB"] # model = AutoModelForSeq2SeqLM.from_pretrained(CKPT) # tokenizer = AutoTokenizer.from_pretrained(CKPT) device = 0 if torch.cuda.is_available() else -1 # def translate(text, src_lang, tgt_lang, max_length=400): def translate(model,text, src_lang, tgt_lang, max_length=400): """ Translate the text from source lang to target lang """ model = AutoModelForSeq2SeqLM.from_pretrained(model) tokenizer = AutoTokenizer.from_pretrained(model) translation_pipeline = pipeline(TASK, model=model, tokenizer=tokenizer, src_lang=src_lang, tgt_lang=tgt_lang, max_length=max_length, device=device) result = translation_pipeline(text) return result[0]['translation_text'] gr.Interface( translate, [ gr.components.Dropdown(label="choose a model",choices=models) gr.components.Textbox(label="Text"), gr.components.Dropdown(label="Source Language", choices=LANGS), gr.components.Dropdown(label="Target Language", choices=LANGS), #gr.components.Slider(8, 512, value=400, step=8, label="Max Length") ], ["text"], #examples=examples, # article=article, cache_examples=False, title="Finetuned-NLLB", #description=description ).launch()