cursor_slides_internvl2 / upload_gpu_test_direct.py
mknolan's picture
Upload InternVL2 implementation
02532a9 verified
raw
history blame
3.6 kB
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Script to upload GPU diagnostics to Hugging Face Spaces with direct token input
"""
import os
import sys
from huggingface_hub import HfApi, create_repo, upload_file
# Default repository name
DEFAULT_REPO = "mknolan/gpu-diagnostic-test"
# Token should be entered at runtime, not hardcoded
DEFAULT_TOKEN = ""
def main():
"""Main function to upload files to Hugging Face Spaces"""
# Get Hugging Face token with WRITE access - direct input instead of getpass
if DEFAULT_TOKEN:
token = DEFAULT_TOKEN
else:
token = input("Enter your Hugging Face token (with WRITE access): ")
# Get repository name
repo_name = input("Enter repository name (default: {}): ".format(DEFAULT_REPO)) or DEFAULT_REPO
print("Uploading to Space: {}".format(repo_name))
# Initialize Hugging Face API
api = HfApi(token=token)
try:
# Try to get the repository, create if it doesn't exist
try:
repo = api.repo_info(repo_id=repo_name, repo_type="space")
print("Repo {} ready".format(repo_name))
except Exception:
print("Creating new Space: {}".format(repo_name))
create_repo(
repo_id=repo_name,
token=token,
repo_type="space",
space_sdk="gradio",
private=False
)
print("Uploading diagnostic files to Hugging Face Space...")
# Upload Dockerfile as "Dockerfile" (HF requires this name)
api.upload_file(
path_or_fileobj="Dockerfile.gpu_test",
path_in_repo="Dockerfile",
repo_id=repo_name,
repo_type="space",
token=token,
commit_message="Add Docker configuration for GPU diagnostics"
)
print("Uploaded Dockerfile")
# Upload the Python script
api.upload_file(
path_or_fileobj="gpu_test.py",
path_in_repo="app.py", # HF Spaces often looks for app.py as the main file
repo_id=repo_name,
repo_type="space",
token=token,
commit_message="Add GPU diagnostic script"
)
print("Uploaded gpu_test.py as app.py")
# Create a README.md file
readme_content = """# GPU Diagnostics Tool
This Space provides diagnostics for GPU detection and functionality in Hugging Face Spaces.
## Purpose
- Tests if CUDA is available through PyTorch
- Tests if nvidia-smi can detect the GPU
- Tests tensor operations on GPU
- Provides detailed diagnostic information about the GPU environment
## How to Use
1. Click the "Submit" button
2. View the JSON results to diagnose GPU issues
"""
# Write README to a temporary file
with open("temp_readme.md", "w") as f:
f.write(readme_content)
# Upload README
api.upload_file(
path_or_fileobj="temp_readme.md",
path_in_repo="README.md",
repo_id=repo_name,
repo_type="space",
token=token,
commit_message="Add README"
)
print("Uploaded README.md")
# Clean up temp file
os.remove("temp_readme.md")
print("Upload completed!")
print("Check your Space at: https://huggingface.co/spaces/{}".format(repo_name))
except Exception as e:
print("Error: {}".format(e))
return 1
return 0
if __name__ == "__main__":
sys.exit(main())