ciyidogan commited on
Commit
e213720
Β·
verified Β·
1 Parent(s): 8057dbd

Update admin_routes.py

Browse files
Files changed (1) hide show
  1. admin_routes.py +151 -14
admin_routes.py CHANGED
@@ -236,14 +236,80 @@ def add_activity_log(config: dict, username: str, action: str,
236
  if len(config["activity_log"]) > 1000:
237
  config["activity_log"] = config["activity_log"][-1000:]
238
 
239
- async def notify_spark(project: dict, version: dict, global_config: dict):
240
- """Notify Spark about published version"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
241
  import httpx
242
 
243
  spark_endpoint = global_config.get("spark_endpoint", "").rstrip("/")
 
 
244
  if not spark_endpoint:
245
- log("⚠️ Spark endpoint not configured")
246
- return
 
 
247
 
248
  work_mode = global_config.get("work_mode", "hfcloud")
249
  cloud_token = global_config.get("cloud_token", "")
@@ -264,17 +330,19 @@ async def notify_spark(project: dict, version: dict, global_config: dict):
264
  "fine_tune_zip": version["llm"]["fine_tune_zip"] if version["llm"]["use_fine_tune"] else None
265
  }
266
 
267
- log(f"πŸš€ Notifying Spark about {project['name']} v{version['id']}")
 
 
 
 
 
268
 
269
  async with httpx.AsyncClient(timeout=30) as client:
270
- try:
271
- response = await client.post(spark_endpoint + "/startup", json=payload)
272
- response.raise_for_status()
273
- result = response.json()
274
- log(f"βœ… Spark notification successful: {result.get('message', 'OK')}")
275
- except Exception as e:
276
- log(f"❌ Spark notification failed: {e}")
277
- raise
278
 
279
  # ===================== Auth Endpoints =====================
280
  @router.post("/login", response_model=LoginResponse)
@@ -791,7 +859,7 @@ async def publish_version(
791
  # Notify Spark if project is enabled
792
  if project.get("enabled", False):
793
  try:
794
- await notify_spark(project, version, config.get("config", {}))
795
  except Exception as e:
796
  log(f"⚠️ Failed to notify Spark: {e}")
797
  # Don't fail the publish
@@ -959,6 +1027,75 @@ async def delete_api(api_name: str, username: str = Depends(verify_token)):
959
  log(f"βœ… API '{api_name}' deleted by {username}")
960
  return {"success": True}
961
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
962
  # ===================== Test Endpoints =====================
963
  @router.post("/apis/test")
964
  async def test_api(api: APICreate, username: str = Depends(verify_token)):
 
236
  if len(config["activity_log"]) > 1000:
237
  config["activity_log"] = config["activity_log"][-1000:]
238
 
239
+ async def _spark_project_control(action: str, project_name: str, username: str):
240
+ """Common function for Spark project control"""
241
+ if not project_name:
242
+ raise HTTPException(status_code=400, detail="project_name is required")
243
+
244
+ config = load_config()
245
+ spark_endpoint = config.get("config", {}).get("spark_endpoint", "").rstrip("/")
246
+ spark_token = _get_spark_token()
247
+
248
+ if not spark_endpoint:
249
+ raise HTTPException(status_code=400, detail="Spark endpoint not configured")
250
+
251
+ if not spark_token:
252
+ raise HTTPException(status_code=400, detail="Spark token not configured")
253
+
254
+ headers = {
255
+ "Authorization": f"Bearer {spark_token}",
256
+ "Content-Type": "application/json"
257
+ }
258
+
259
+ try:
260
+ async with httpx.AsyncClient(timeout=30) as client:
261
+ if action == "delete":
262
+ response = await client.delete(
263
+ f"{spark_endpoint}/project/delete",
264
+ json={"project_name": project_name},
265
+ headers=headers
266
+ )
267
+ else:
268
+ response = await client.post(
269
+ f"{spark_endpoint}/project/{action}",
270
+ json={"project_name": project_name},
271
+ headers=headers
272
+ )
273
+
274
+ response.raise_for_status()
275
+ return response.json()
276
+
277
+ except httpx.HTTPStatusError as e:
278
+ error_detail = e.response.json() if e.response.text else {"error": str(e)}
279
+ raise HTTPException(status_code=e.response.status_code, detail=error_detail)
280
+ except Exception as e:
281
+ log(f"❌ Spark {action} failed: {e}")
282
+ raise HTTPException(status_code=500, detail=str(e))
283
+
284
+ def _get_spark_token() -> Optional[str]:
285
+ """Get Spark token based on work_mode"""
286
+ config = load_config()
287
+ work_mode = config.get("config", {}).get("work_mode", "on-premise")
288
+
289
+ if work_mode in ("hfcloud", "cloud"):
290
+ # Cloud mode - use HuggingFace Secrets
291
+ token = os.getenv("SPARK_TOKEN")
292
+ if not token:
293
+ log("❌ SPARK_TOKEN not found in HuggingFace Secrets!")
294
+ return token
295
+ else:
296
+ # On-premise mode - use .env file
297
+ from dotenv import load_dotenv
298
+ load_dotenv()
299
+ return os.getenv("SPARK_TOKEN")
300
+
301
+ async def notify_spark_manual(project: dict, version: dict, global_config: dict):
302
+ """Manual Spark notification (similar to notify_spark but returns response)"""
303
  import httpx
304
 
305
  spark_endpoint = global_config.get("spark_endpoint", "").rstrip("/")
306
+ spark_token = _get_spark_token()
307
+
308
  if not spark_endpoint:
309
+ raise ValueError("Spark endpoint not configured")
310
+
311
+ if not spark_token:
312
+ raise ValueError("Spark token not configured")
313
 
314
  work_mode = global_config.get("work_mode", "hfcloud")
315
  cloud_token = global_config.get("cloud_token", "")
 
330
  "fine_tune_zip": version["llm"]["fine_tune_zip"] if version["llm"]["use_fine_tune"] else None
331
  }
332
 
333
+ headers = {
334
+ "Authorization": f"Bearer {spark_token}",
335
+ "Content-Type": "application/json"
336
+ }
337
+
338
+ log(f"πŸš€ Manually notifying Spark about {project['name']} v{version['id']}")
339
 
340
  async with httpx.AsyncClient(timeout=30) as client:
341
+ response = await client.post(spark_endpoint + "/startup", json=payload, headers=headers)
342
+ response.raise_for_status()
343
+ result = response.json()
344
+ log(f"βœ… Spark manual notification successful: {result.get('message', 'OK')}")
345
+ return result
 
 
 
346
 
347
  # ===================== Auth Endpoints =====================
348
  @router.post("/login", response_model=LoginResponse)
 
859
  # Notify Spark if project is enabled
860
  if project.get("enabled", False):
861
  try:
862
+ await notify_spark_manual(project, version, config.get("config", {}))
863
  except Exception as e:
864
  log(f"⚠️ Failed to notify Spark: {e}")
865
  # Don't fail the publish
 
1027
  log(f"βœ… API '{api_name}' deleted by {username}")
1028
  return {"success": True}
1029
 
1030
+ # ===================== Spark Integration Endpoints =====================
1031
+ @router.post("/spark/startup")
1032
+ async def spark_startup(request: dict = Body(...), username: str = Depends(verify_token)):
1033
+ """Trigger Spark startup for a project"""
1034
+ project_name = request.get("project_name")
1035
+ if not project_name:
1036
+ raise HTTPException(status_code=400, detail="project_name is required")
1037
+
1038
+ config = load_config()
1039
+
1040
+ # Find project
1041
+ project = next((p for p in config.get("projects", []) if p["name"] == project_name), None)
1042
+ if not project:
1043
+ raise HTTPException(status_code=404, detail=f"Project not found: {project_name}")
1044
+
1045
+ # Find published version
1046
+ version = next((v for v in project.get("versions", []) if v.get("published", False)), None)
1047
+ if not version:
1048
+ raise HTTPException(status_code=400, detail=f"No published version found for project: {project_name}")
1049
+
1050
+ # Notify Spark
1051
+ try:
1052
+ result = await notify_spark_manual(project, version, config.get("config", {}))
1053
+ return {"message": result.get("message", "Spark startup initiated")}
1054
+ except Exception as e:
1055
+ log(f"❌ Spark startup failed: {e}")
1056
+ raise HTTPException(status_code=500, detail=str(e))
1057
+
1058
+ @router.get("/spark/projects")
1059
+ async def spark_get_projects(username: str = Depends(verify_token)):
1060
+ """Get Spark project list"""
1061
+ config = load_config()
1062
+ spark_endpoint = config.get("config", {}).get("spark_endpoint", "").rstrip("/")
1063
+ spark_token = _get_spark_token()
1064
+
1065
+ if not spark_endpoint:
1066
+ raise HTTPException(status_code=400, detail="Spark endpoint not configured")
1067
+
1068
+ if not spark_token:
1069
+ raise HTTPException(status_code=400, detail="Spark token not configured")
1070
+
1071
+ headers = {
1072
+ "Authorization": f"Bearer {spark_token}"
1073
+ }
1074
+
1075
+ try:
1076
+ async with httpx.AsyncClient(timeout=30) as client:
1077
+ response = await client.get(spark_endpoint + "/project/list", headers=headers)
1078
+ response.raise_for_status()
1079
+ return response.json()
1080
+ except Exception as e:
1081
+ log(f"❌ Failed to get Spark projects: {e}")
1082
+ raise HTTPException(status_code=500, detail=str(e))
1083
+
1084
+ @router.post("/spark/project/enable")
1085
+ async def spark_enable_project(request: dict = Body(...), username: str = Depends(verify_token)):
1086
+ """Enable project in Spark"""
1087
+ return await _spark_project_control("enable", request.get("project_name"), username)
1088
+
1089
+ @router.post("/spark/project/disable")
1090
+ async def spark_disable_project(request: dict = Body(...), username: str = Depends(verify_token)):
1091
+ """Disable project in Spark"""
1092
+ return await _spark_project_control("disable", request.get("project_name"), username)
1093
+
1094
+ @router.delete("/spark/project/{project_name}")
1095
+ async def spark_delete_project(project_name: str, username: str = Depends(verify_token)):
1096
+ """Delete project from Spark"""
1097
+ return await _spark_project_control("delete", project_name, username)
1098
+
1099
  # ===================== Test Endpoints =====================
1100
  @router.post("/apis/test")
1101
  async def test_api(api: APICreate, username: str = Depends(verify_token)):