mgbam commited on
Commit
1b02051
·
verified ·
1 Parent(s): 26dcc45

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -6
app.py CHANGED
@@ -118,12 +118,12 @@ AVAILABLE_MODELS = [
118
  {
119
  "name": "DeepSeek V3",
120
  "id": "deepseek-ai/DeepSeek-V3-0324",
121
- "description": "DeepSeek V3 model for code generation", "provider": "huggingface"
122
  },
123
  {
124
  "name": "DeepSeek R1",
125
  "id": "deepseek-ai/DeepSeek-R1-0528",
126
- "description": "DeepSeek R1 model for code generation", "provider": "huggingface"
127
  },
128
  {
129
  # "name": "GPT-4o-mini",
@@ -216,6 +216,7 @@ DEMO_LIST = [
216
  HF_TOKEN = os.getenv("HF_TOKEN")
217
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
218
  GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
 
219
 
220
  def get_inference_client(model_id):
221
  """Return an appropriate client based on model_id and configured provider."""
@@ -223,7 +224,13 @@ def get_inference_client(model_id):
223
  if not model_info:
224
  raise ValueError(f"Model with id '{model_id}' not found.")
225
 
226
- provider = model_info.get("provider", "huggingface") # Default to HF
 
 
 
 
 
 
227
 
228
  if provider == "groq":
229
  return InferenceClient(
@@ -277,7 +284,7 @@ def history_to_messages(history: History, system: str) -> Messages:
277
  text_content = ""
278
  for item in user_content:
279
  if isinstance(item, dict) and item.get("type") == "text":
280
- text_content += item.get("text", "")
281
  user_content = text_content if text_content else str(user_content)
282
 
283
  messages.append({'role': 'user', 'content': user_content})
@@ -294,7 +301,7 @@ def messages_to_history(messages: Messages) -> Tuple[str, History]:
294
  text_content = ""
295
  for item in user_content:
296
  if isinstance(item, dict) and item.get("type") == "text":
297
- text_content += item.get("text", "")
298
  user_content = text_content if text_content else str(user_content)
299
 
300
  history.append([user_content, r['content']])
@@ -309,7 +316,7 @@ def history_to_chatbot_messages(history: History) -> List[Dict[str, str]]:
309
  text_content = ""
310
  for item in user_msg:
311
  if isinstance(item, dict) and item.get("type") == "text":
312
- text_content += item.get("text", "")
313
  user_msg = text_content if text_content else str(user_msg)
314
 
315
  messages.append({"role": "user", "content": user_msg})
@@ -1079,6 +1086,27 @@ This will help me create a better design for you."""
1079
  history_output: history_to_chatbot_messages(_history),
1080
  sandbox: send_to_sandbox(clean_code) if language == "html" else "<div style='padding:1em;color:#888;text-align:center;'>Preview is only available for HTML. Please download your code using the download button above.</div>",
1081
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1082
  else:
1083
  # Use Hugging Face Inference Client (or Groq) for other models
1084
  completion = client.chat.completions.create(
 
118
  {
119
  "name": "DeepSeek V3",
120
  "id": "deepseek-ai/DeepSeek-V3-0324",
121
+ "description": "DeepSeek V3 model for code generation", "provider": "deepseek"
122
  },
123
  {
124
  "name": "DeepSeek R1",
125
  "id": "deepseek-ai/DeepSeek-R1-0528",
126
+ "description": "DeepSeek R1 model for code generation", "provider": "deepseek"
127
  },
128
  {
129
  # "name": "GPT-4o-mini",
 
216
  HF_TOKEN = os.getenv("HF_TOKEN")
217
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
218
  GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
219
+ DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
220
 
221
  def get_inference_client(model_id):
222
  """Return an appropriate client based on model_id and configured provider."""
 
224
  if not model_info:
225
  raise ValueError(f"Model with id '{model_id}' not found.")
226
 
227
+ provider = model_info.get("provider")
228
+
229
+ if provider == "deepseek":
230
+ if not DEEPSEEK_API_KEY:
231
+ raise ValueError("DEEPSEEK_API_KEY environment variable not set.")
232
+ from deepseek.client import DeepSeekClient
233
+ return DeepSeekClient(api_key=DEEPSEEK_API_KEY)
234
 
235
  if provider == "groq":
236
  return InferenceClient(
 
284
  text_content = ""
285
  for item in user_content:
286
  if isinstance(item, dict) and item.get("type") == "text":
287
+ text_content += str(item.get("text", ""))
288
  user_content = text_content if text_content else str(user_content)
289
 
290
  messages.append({'role': 'user', 'content': user_content})
 
301
  text_content = ""
302
  for item in user_content:
303
  if isinstance(item, dict) and item.get("type") == "text":
304
+ text_content += str(item.get("text", ""))
305
  user_content = text_content if text_content else str(user_content)
306
 
307
  history.append([user_content, r['content']])
 
316
  text_content = ""
317
  for item in user_msg:
318
  if isinstance(item, dict) and item.get("type") == "text":
319
+ text_content += str(item.get("text", ""))
320
  user_msg = text_content if text_content else str(user_msg)
321
 
322
  messages.append({"role": "user", "content": user_msg})
 
1086
  history_output: history_to_chatbot_messages(_history),
1087
  sandbox: send_to_sandbox(clean_code) if language == "html" else "<div style='padding:1em;color:#888;text-align:center;'>Preview is only available for HTML. Please download your code using the download button above.</div>",
1088
  }
1089
+ elif _current_model["provider"] == "deepseek":
1090
+ # Use Deepseek client
1091
+ stream = client.chat.completions.create(
1092
+ model=_current_model["id"],
1093
+ messages=[{"role": m["role"], "content": m["content"]} for m in messages],
1094
+ stream=True,
1095
+ max_tokens=5000,
1096
+ )
1097
+
1098
+ content = ""
1099
+ for chunk in stream:
1100
+ if chunk.choices[0].delta.content:
1101
+ content += chunk.choices[0].delta.content
1102
+ clean_code = remove_code_block(content)
1103
+ # The logic here is the same as for OpenAI, can be refactored
1104
+ yield {
1105
+ code_output: gr.update(value=clean_code, language=get_gradio_language(language)),
1106
+ history_output: history_to_chatbot_messages(_history),
1107
+ sandbox: send_to_sandbox(clean_code) if language == "html" else "<div style='padding:1em;color:#888;text-align:center;'>Preview is only available for HTML. Please download your code using the download button above.</div>",
1108
+ }
1109
+
1110
  else:
1111
  # Use Hugging Face Inference Client (or Groq) for other models
1112
  completion = client.chat.completions.create(