File size: 3,597 Bytes
02532a9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Script to upload GPU diagnostics to Hugging Face Spaces with direct token input
"""
import os
import sys
from huggingface_hub import HfApi, create_repo, upload_file
# Default repository name
DEFAULT_REPO = "mknolan/gpu-diagnostic-test"
# Token should be entered at runtime, not hardcoded
DEFAULT_TOKEN = ""
def main():
"""Main function to upload files to Hugging Face Spaces"""
# Get Hugging Face token with WRITE access - direct input instead of getpass
if DEFAULT_TOKEN:
token = DEFAULT_TOKEN
else:
token = input("Enter your Hugging Face token (with WRITE access): ")
# Get repository name
repo_name = input("Enter repository name (default: {}): ".format(DEFAULT_REPO)) or DEFAULT_REPO
print("Uploading to Space: {}".format(repo_name))
# Initialize Hugging Face API
api = HfApi(token=token)
try:
# Try to get the repository, create if it doesn't exist
try:
repo = api.repo_info(repo_id=repo_name, repo_type="space")
print("Repo {} ready".format(repo_name))
except Exception:
print("Creating new Space: {}".format(repo_name))
create_repo(
repo_id=repo_name,
token=token,
repo_type="space",
space_sdk="gradio",
private=False
)
print("Uploading diagnostic files to Hugging Face Space...")
# Upload Dockerfile as "Dockerfile" (HF requires this name)
api.upload_file(
path_or_fileobj="Dockerfile.gpu_test",
path_in_repo="Dockerfile",
repo_id=repo_name,
repo_type="space",
token=token,
commit_message="Add Docker configuration for GPU diagnostics"
)
print("Uploaded Dockerfile")
# Upload the Python script
api.upload_file(
path_or_fileobj="gpu_test.py",
path_in_repo="app.py", # HF Spaces often looks for app.py as the main file
repo_id=repo_name,
repo_type="space",
token=token,
commit_message="Add GPU diagnostic script"
)
print("Uploaded gpu_test.py as app.py")
# Create a README.md file
readme_content = """# GPU Diagnostics Tool
This Space provides diagnostics for GPU detection and functionality in Hugging Face Spaces.
## Purpose
- Tests if CUDA is available through PyTorch
- Tests if nvidia-smi can detect the GPU
- Tests tensor operations on GPU
- Provides detailed diagnostic information about the GPU environment
## How to Use
1. Click the "Submit" button
2. View the JSON results to diagnose GPU issues
"""
# Write README to a temporary file
with open("temp_readme.md", "w") as f:
f.write(readme_content)
# Upload README
api.upload_file(
path_or_fileobj="temp_readme.md",
path_in_repo="README.md",
repo_id=repo_name,
repo_type="space",
token=token,
commit_message="Add README"
)
print("Uploaded README.md")
# Clean up temp file
os.remove("temp_readme.md")
print("Upload completed!")
print("Check your Space at: https://huggingface.co/spaces/{}".format(repo_name))
except Exception as e:
print("Error: {}".format(e))
return 1
return 0
if __name__ == "__main__":
sys.exit(main()) |