bilalfaye's picture
Create app.py
ba8613b verified
import gradio as gr
from transformers import pipeline
import torch
# Load the model and tokenizer
model_name = 'bilalfaye/nllb-200-distilled-600M-wo-fr-en'
device = "cuda" if torch.cuda.is_available() else "cpu"
# Define the translation pipeline
translator = pipeline(
"translation",
model=model_name,
device=device
)
# Define the translation function
def translate_chat(message, history, source_language, target_language):
# Mapping of languages to model codes
lang_map = {
"Wolof": "wol_Latn",
"English": "eng_Latn",
"French": "fra_Latn"
}
if source_language not in lang_map or target_language not in lang_map:
return "Invalid language selection."
src_lang = lang_map[source_language]
tgt_lang = lang_map[target_language]
if src_lang == tgt_lang:
return "Source and target languages must be different."
# Perform the translation
translation = translator(message, src_lang=src_lang, tgt_lang=tgt_lang)
return translation[0]["translation_text"]
# Gradio chat interface
interface = gr.ChatInterface(
fn=translate_chat,
additional_inputs=[
gr.Dropdown(
choices=["Wolof", "French", "English"],
label="Source Language",
value="Wolof", # Default value
),
gr.Dropdown(
choices=["Wolof", "French", "English"],
label="Target Language",
value="English", # Default value
),
],
title="Wolof ↔ French ↔ English Translator",
description="Select the source and target languages (in the bottom) to translate between Wolof, French, and English.",
)
# Launch the app
interface.launch(debug=True, share=True)