zeroMN_SHMT / app.py
zeroMN's picture
Update app.py
7f2e5b7 verified
raw
history blame
753 Bytes
import gradio as gr
import transformers
def load_model():
# 在这里加载 Hugging Face 模型
# 比如使用 transformers 库来加载模型
from transformers import AutoModelForSequenceClassification, AutoTokenizer
model_name = "zeroMN/zeroMN_SHMT"
model = AutoModelForSequenceClassification.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
return model, tokenizer
def infer(text):
model, tokenizer = load_model()
inputs = tokenizer(text, return_tensors="pt")
outputs = model("zeroMN/zeroMN_SHMT")
# 根据模型输出生成响应
return f"Predicted response for: {text}"
# 使用 Gradio 创建接口
gr.Interface(fn=infer, inputs="text", outputs="text").launch()