|
|
|
|
|
|
|
|
|
|
|
|
|
import gradio as gr |
|
|
|
from tokenizer import tokenize, tokenize2 |
|
from translater import translate |
|
from detokenizer import detokenize, detokenize2 |
|
|
|
def run(source_text, mode): |
|
source_tokens = tokenize(source_text, mode) |
|
source_tokens2 = tokenize2(source_text, mode) |
|
source_tokenized_text = ' '.join(source_tokens) |
|
target_tokens, target_tokens2 = translate(source_tokens, source_tokens2, mode) |
|
target_text = detokenize(target_tokens, mode) |
|
target_text2 = detokenize2(target_tokens2, mode) |
|
return target_text, target_text2, source_tokenized_text |
|
|
|
|
|
demo = gr.Interface(fn=run, |
|
inputs=["text", gr.Dropdown(["汉译英", "英译汉"])], |
|
outputs=["text", "text", "text"],) |
|
|
|
demo.launch() |