aicodingfun commited on
Commit
5b9ab12
·
verified ·
1 Parent(s): a55e991

use deepseek-r1 for inference

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -9,7 +9,7 @@ import gradio as gr
9
  from huggingface_hub import InferenceClient
10
 
11
  HF_TOKEN = os.environ.get("HF_TOKEN")
12
- client = InferenceClient(api_key=HF_TOKEN)
13
 
14
  # 載入問答資料庫路徑
15
  output_dir = "./question_bank"
@@ -125,7 +125,8 @@ def generate_math_questions(grade, term, qtype="Unspecified", num_questions=10):
125
 
126
  # 使用 InferenceClient 呼叫 API 模型產生新題目
127
  completion = client.chat.completions.create(
128
- model="mistralai/Mistral-7B-Instruct-v0.3",
 
129
  messages=messages,
130
  max_tokens=num_questions * 200
131
  )
 
9
  from huggingface_hub import InferenceClient
10
 
11
  HF_TOKEN = os.environ.get("HF_TOKEN")
12
+ client = InferenceClient(provider="sambanova", api_key=HF_TOKEN)
13
 
14
  # 載入問答資料庫路徑
15
  output_dir = "./question_bank"
 
125
 
126
  # 使用 InferenceClient 呼叫 API 模型產生新題目
127
  completion = client.chat.completions.create(
128
+ # model="mistralai/Mistral-7B-Instruct-v0.3",
129
+ model="deepseek-ai/DeepSeek-R1",
130
  messages=messages,
131
  max_tokens=num_questions * 200
132
  )