clarkchan's picture
123
92610d2
raw
history blame
412 Bytes
import gradio as gr
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("liam168/c2-roberta-base-finetuned-dianping-chinese")
def greet(name):
result = tokenizer([name],padding=True,truncation=True,return_tensors="pt")
tensor_str = ','.join(map(str, result.input_ids.numpy()))
return tensor_str
iface = gr.Interface(fn=greet, inputs="text", outputs="text")
iface.launch()