Spaces:
Sleeping
Sleeping
from pythainlp.tokenize import word_tokenize | |
import gradio as gr | |
def cut_word(name): | |
return ' '.join(word_tokenize(text)) | |
iface = gr.Interface(fn=cut_word, inputs="textbox", outputs="text") | |
iface.launch() |