Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -9,8 +9,10 @@ import gradio as gr
|
|
9 |
from huggingface_hub import InferenceClient
|
10 |
|
11 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
|
|
12 |
client = InferenceClient(
|
13 |
api_key=HF_TOKEN
|
|
|
14 |
)
|
15 |
|
16 |
# 載入問答資料庫路徑
|
@@ -128,7 +130,7 @@ def generate_math_questions(grade, term, qtype="Unspecified", num_questions=10):
|
|
128 |
# 使用 InferenceClient 呼叫 API 模型產生新題目
|
129 |
completion = client.chat.completions.create(
|
130 |
# model="mistralai/Mistral-7B-Instruct-v0.3",
|
131 |
-
model="
|
132 |
# model="mistralai/Mistral-Nemo-Instruct-2407",
|
133 |
messages=messages,
|
134 |
max_tokens=2048
|
|
|
9 |
from huggingface_hub import InferenceClient
|
10 |
|
11 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
12 |
+
API_BASE_URL = os.environ.get("API_BASE_URL")
|
13 |
client = InferenceClient(
|
14 |
api_key=HF_TOKEN
|
15 |
+
base_url=API_BASE_URL
|
16 |
)
|
17 |
|
18 |
# 載入問答資料庫路徑
|
|
|
130 |
# 使用 InferenceClient 呼叫 API 模型產生新題目
|
131 |
completion = client.chat.completions.create(
|
132 |
# model="mistralai/Mistral-7B-Instruct-v0.3",
|
133 |
+
model="nvjob/DeepSeek-R1-32B-Cline:latest",
|
134 |
# model="mistralai/Mistral-Nemo-Instruct-2407",
|
135 |
messages=messages,
|
136 |
max_tokens=2048
|