Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,7 @@ def load_model():
|
|
6 |
# 在这里加载 Hugging Face 模型
|
7 |
# 比如使用 transformers 库来加载模型
|
8 |
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
9 |
-
model_name = "zeroMN/
|
10 |
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
11 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
12 |
return model, tokenizer
|
@@ -14,7 +14,7 @@ def load_model():
|
|
14 |
def infer(text):
|
15 |
model, tokenizer = load_model()
|
16 |
inputs = tokenizer(text, return_tensors="pt")
|
17 |
-
outputs = model("zeroMN/
|
18 |
# 根据模型输出生成响应
|
19 |
return f"Predicted response for: {text}"
|
20 |
|
|
|
6 |
# 在这里加载 Hugging Face 模型
|
7 |
# 比如使用 transformers 库来加载模型
|
8 |
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
9 |
+
model_name = "https://huggingface.co/zeroMN/SHMT"
|
10 |
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
11 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
12 |
return model, tokenizer
|
|
|
14 |
def infer(text):
|
15 |
model, tokenizer = load_model()
|
16 |
inputs = tokenizer(text, return_tensors="pt")
|
17 |
+
outputs = model("https://huggingface.co/zeroMN/SHMT")
|
18 |
# 根据模型输出生成响应
|
19 |
return f"Predicted response for: {text}"
|
20 |
|