burtenshaw commited on
Commit
df45759
Β·
1 Parent(s): 061fdd4

improve wandb integration

Browse files
Files changed (1) hide show
  1. app.py +81 -35
app.py CHANGED
@@ -17,6 +17,7 @@ import socket
17
 
18
  import gradio as gr
19
  import pandas as pd
 
20
  from autotrain.project import AutoTrainProject
21
  from autotrain.params import (
22
  LLMTrainingParams,
@@ -198,10 +199,25 @@ def run_training_background(run_id: str, params: Any, backend: str):
198
  # Create AutoTrain project - this will handle W&B internally
199
  project = AutoTrainProject(params=params, backend=backend, process=True)
200
 
201
- # Generate approximate W&B URL
 
 
 
 
 
 
 
202
  wandb_url = f"https://wandb.ai/{WANDB_PROJECT}"
 
 
 
 
 
 
 
 
203
 
204
- # Update with W&B URL
205
  runs = load_runs()
206
  for run in runs:
207
  if run["run_id"] == run_id:
@@ -209,13 +225,6 @@ def run_training_background(run_id: str, params: Any, backend: str):
209
  break
210
  save_runs(runs)
211
 
212
- # Actually run the training - this blocks until completion
213
- print(f"Executing training job for run {run_id}...")
214
- result = project.create()
215
-
216
- print(f"Training completed successfully for run {run_id}")
217
- print(f"Result: {result}")
218
-
219
  # Update status to completed
220
  runs = load_runs()
221
  for run in runs:
@@ -604,6 +613,25 @@ def get_system_status(random_string: str = "") -> str:
604
  "spaces-l4x4",
605
  ]
606
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
607
  status_text = f"""πŸš€ AutoTrain Gradio MCP Server - System Status
608
 
609
  **Server Status:** Running
@@ -613,15 +641,11 @@ def get_system_status(random_string: str = "") -> str:
613
  **Failed Runs:** {failed_runs}
614
 
615
  **Available Tasks:** {len(available_tasks)}
616
- {chr(10).join(f" β€’ {task}" for task in available_tasks)}
617
 
618
  **Available Backends:** {len(available_backends)}
619
- {chr(10).join(f" β€’ {backend}" for backend in available_backends[:10])}
620
- {
621
- f" ... and {len(available_backends) - 10} more"
622
- if len(available_backends) > 10
623
- else ""
624
- }
625
 
626
  πŸ’‘ **Access Points:**
627
  β€’ Gradio UI: http://SPACE_URL
@@ -630,12 +654,8 @@ def get_system_status(random_string: str = "") -> str:
630
 
631
  πŸ› οΈ **W&B Integration:**
632
  β€’ Project: {WANDB_PROJECT}
633
- β€’ API Key: {"βœ… Configured" if os.environ.get("WANDB_API_KEY") else "❌ Missing"}
634
- β€’ Training Metrics: {
635
- "βœ… Enabled"
636
- if os.environ.get("WANDB_API_KEY")
637
- else "❌ System metrics only"
638
- }
639
  β€’ Set WANDB_API_KEY for complete training metrics logging"""
640
 
641
  return status_text
@@ -742,7 +762,7 @@ with gr.Blocks(
742
  """,
743
  ) as app:
744
  gr.Markdown("""
745
- # πŸš€ AutoTrain Gradio MCP Server
746
 
747
  Get your AI models to train your AI models!
748
 
@@ -856,6 +876,17 @@ with gr.Blocks(
856
 
857
  # MCP Info Tab
858
  with gr.Tab("πŸ”— MCP Integration"):
 
 
 
 
 
 
 
 
 
 
 
859
  gr.Markdown(f"""
860
  ## MCP Server Information
861
 
@@ -905,24 +936,39 @@ with gr.Blocks(
905
  - `push_to_hub="true"` - Push to Hub using project name as repo
906
  - `hub_repo_id="my-org/my-model"` - Push to custom repository
907
 
908
- ### Claude Desktop Configuration:
909
 
910
- ```json
911
- {{
912
- "mcpServers": {{
913
- "autotrain": {{
914
- "url": "http://localhost:7860/gradio_api/mcp/sse"
915
- }}
916
- }}
917
- }}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
918
  ```
919
 
920
  ### Current Stats:
921
 
922
- Total Runs: {len(load_runs())}
923
  W&B Project: {WANDB_PROJECT}
924
- W&B Auth: {"βœ… Configured" if os.environ.get("WANDB_API_KEY") else "❌ Missing WANDB_API_KEY"}
925
- Hub Auth: {"βœ… Configured" if os.environ.get("HF_TOKEN") else "❌ Missing HF_TOKEN"}
926
  """)
927
 
928
  # MCP Tools Tab
 
17
 
18
  import gradio as gr
19
  import pandas as pd
20
+ import wandb
21
  from autotrain.project import AutoTrainProject
22
  from autotrain.params import (
23
  LLMTrainingParams,
 
199
  # Create AutoTrain project - this will handle W&B internally
200
  project = AutoTrainProject(params=params, backend=backend, process=True)
201
 
202
+ # Actually run the training - this blocks until completion
203
+ print(f"Executing training job for run {run_id}...")
204
+ result = project.create()
205
+
206
+ print(f"Training completed successfully for run {run_id}")
207
+ print(f"Result: {result}")
208
+
209
+ # Get the actual W&B run URL after training starts
210
  wandb_url = f"https://wandb.ai/{WANDB_PROJECT}"
211
+ try:
212
+ if wandb.run is not None:
213
+ wandb_url = wandb.run.url
214
+ print(f"Got actual W&B URL: {wandb_url}")
215
+ else:
216
+ print("No active W&B run found, using default URL")
217
+ except Exception as e:
218
+ print(f"Could not get W&B URL: {e}")
219
 
220
+ # Update with actual W&B URL
221
  runs = load_runs()
222
  for run in runs:
223
  if run["run_id"] == run_id:
 
225
  break
226
  save_runs(runs)
227
 
 
 
 
 
 
 
 
228
  # Update status to completed
229
  runs = load_runs()
230
  for run in runs:
 
613
  "spaces-l4x4",
614
  ]
615
 
616
+ # Extract nested expressions to avoid f-string nesting
617
+ tasks_list = chr(10).join(f" β€’ {task}" for task in available_tasks)
618
+ backends_list = chr(10).join(
619
+ f" β€’ {backend}" for backend in available_backends[:10]
620
+ )
621
+ backends_more = (
622
+ f" ... and {len(available_backends) - 10} more"
623
+ if len(available_backends) > 10
624
+ else ""
625
+ )
626
+ wandb_api_status = (
627
+ "βœ… Configured" if os.environ.get("WANDB_API_KEY") else "❌ Missing"
628
+ )
629
+ wandb_metrics_status = (
630
+ "βœ… Enabled"
631
+ if os.environ.get("WANDB_API_KEY")
632
+ else "❌ System metrics only"
633
+ )
634
+
635
  status_text = f"""πŸš€ AutoTrain Gradio MCP Server - System Status
636
 
637
  **Server Status:** Running
 
641
  **Failed Runs:** {failed_runs}
642
 
643
  **Available Tasks:** {len(available_tasks)}
644
+ {tasks_list}
645
 
646
  **Available Backends:** {len(available_backends)}
647
+ {backends_list}
648
+ {backends_more}
 
 
 
 
649
 
650
  πŸ’‘ **Access Points:**
651
  β€’ Gradio UI: http://SPACE_URL
 
654
 
655
  πŸ› οΈ **W&B Integration:**
656
  β€’ Project: {WANDB_PROJECT}
657
+ β€’ API Key: {wandb_api_status}
658
+ β€’ Training Metrics: {wandb_metrics_status}
 
 
 
 
659
  β€’ Set WANDB_API_KEY for complete training metrics logging"""
660
 
661
  return status_text
 
762
  """,
763
  ) as app:
764
  gr.Markdown("""
765
+ # πŸš€ AutoTrain MCP Server
766
 
767
  Get your AI models to train your AI models!
768
 
 
876
 
877
  # MCP Info Tab
878
  with gr.Tab("πŸ”— MCP Integration"):
879
+ # Extract nested expressions to avoid f-string nesting in Gradio markdown
880
+ total_runs = len(load_runs())
881
+ wandb_auth = (
882
+ "βœ… Configured"
883
+ if os.environ.get("WANDB_API_KEY")
884
+ else "❌ Missing WANDB_API_KEY"
885
+ )
886
+ hub_auth = (
887
+ "βœ… Configured" if os.environ.get("HF_TOKEN") else "❌ Missing HF_TOKEN"
888
+ )
889
+
890
  gr.Markdown(f"""
891
  ## MCP Server Information
892
 
 
936
  - `push_to_hub="true"` - Push to Hub using project name as repo
937
  - `hub_repo_id="my-org/my-model"` - Push to custom repository
938
 
939
+ ### Connection to the MCP Server
940
 
941
+ Connect to it like this:
942
+
943
+ ```javascript
944
+ {"mcpServers": {"autotrain": {"url": "http://SPACE_URL/gradio_api/mcp/sse",
945
+ "headers": {"Authorization": "Bearer <YOUR-HUGGING-FACE-TOKEN>"}
946
+ }
947
+ }
948
+ }
949
+ ```
950
+
951
+ Or like this for Claude Desktop:
952
+
953
+ ```javascript
954
+ {"mcpServers": {"autotrain": {"command": "npx",
955
+ "args": [
956
+ "mcp-remote",
957
+ "http://SPACE_URL/gradio_api/mcp/sse",
958
+ "--header",
959
+ "Authorization: Bearer <YOUR-HUGGING-FACE-TOKEN>"
960
+ ]
961
+ }
962
+ }
963
+ }
964
  ```
965
 
966
  ### Current Stats:
967
 
968
+ Total Runs: {total_runs}
969
  W&B Project: {WANDB_PROJECT}
970
+ W&B Auth: {wandb_auth}
971
+ Hub Auth: {hub_auth}
972
  """)
973
 
974
  # MCP Tools Tab