File size: 657 Bytes
df7bba0
 
 
 
 
d512635
 
df7bba0
d512635
 
df7bba0
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
import gradio as gr
import os

os.system("pip install transformers sentencepiece torch")

from transformers import M2M100ForConditionalGeneration
from tokenization_small100 import SMALL100Tokenizer

model = M2M100ForConditionalGeneration.from_pretrained("alirezamsh/small100")
tokenizer = SMALL100Tokenizer.from_pretrained("alirezamsh/small100")

def fn(text, lang):
    tokenizer.tgt_lang = lang
    encoded = tokenizer(text, return_tensors="pt")
    generated_tokens = model.generate(**encoded)
    return tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)

demo = gr.Interface(fn=fn, inputs=["text", "text"], outputs="text")
demo.launch()