Spaces:
Running
on
Zero
Running
on
Zero
# utils/storage.py | |
import os | |
import urllib.parse | |
import tempfile | |
import shutil | |
from huggingface_hub import login, upload_folder | |
from utils.constants import HF_API_TOKEN, upload_file_types, model_extensions, image_extensions | |
def generate_permalink(valid_files, base_url_external, permalink_viewer_url="surn-3d-viewer.hf.space"): | |
""" | |
Given a list of valid files, checks if they contain exactly 1 model file and 2 image files. | |
Constructs and returns a permalink URL with query parameters if the criteria is met. | |
Otherwise, returns None. | |
""" | |
model_link = None | |
images_links = [] | |
for f in valid_files: | |
filename = os.path.basename(f) | |
ext = os.path.splitext(filename)[1].lower() | |
if ext in model_extensions: | |
if model_link is None: | |
model_link = f"{base_url_external}/{filename}" | |
elif ext in image_extensions: | |
images_links.append(f"{base_url_external}/{filename}") | |
if model_link and len(images_links) == 2: | |
# Construct a permalink to the viewer project with query parameters. | |
permalink_viewer_url = f"https://{permalink_viewer_url}/" | |
params = {"3d": model_link, "hm": images_links[0], "image": images_links[1]} | |
query_str = urllib.parse.urlencode(params) | |
return f"{permalink_viewer_url}?{query_str}" | |
return None | |
def generate_permalink_from_urls(model_url, hm_url, img_url, permalink_viewer_url="surn-3d-viewer.hf.space"): | |
""" | |
Constructs and returns a permalink URL with query string parameters for the viewer. | |
Each parameter is passed separately so that the image positions remain consistent. | |
Parameters: | |
model_url (str): Processed URL for the 3D model. | |
hm_url (str): Processed URL for the height map image. | |
img_url (str): Processed URL for the main image. | |
permalink_viewer_url (str): The base viewer URL. | |
Returns: | |
str: The generated permalink URL. | |
""" | |
import urllib.parse | |
params = {"3d": model_url, "hm": hm_url, "image": img_url} | |
query_str = urllib.parse.urlencode(params) | |
return f"https://{permalink_viewer_url}/?{query_str}" | |
def upload_files_to_repo(files, repo_id, folder_name, create_permalink=False, repo_type="dataset", permalink_viewer_url="surn-3d-viewer.hf.space"): | |
""" | |
Uploads multiple files to a Hugging Face repository using a batch upload approach via upload_folder. | |
Parameters: | |
files (list): A list of file paths (str) to upload. | |
repo_id (str): The repository ID on Hugging Face for storage, e.g. "Surn/Storage". | |
folder_name (str): The subfolder within the repository where files will be saved. | |
create_permalink (bool): If True and if exactly three files are uploaded (1 model and 2 images), | |
returns a single permalink to the project with query parameters. | |
Otherwise, returns individual permalinks for each file. | |
repo_type (str): Repository type ("space", "dataset", etc.). Default is "dataset". | |
Returns: | |
If create_permalink is True and files match the criteria: | |
tuple: (response, permalink) where response is the output of the batch upload | |
and permalink is the URL string (with fully qualified file paths) for the project. | |
Otherwise: | |
list: A list of tuples (response, permalink) for each file. | |
""" | |
# Log in using the HF API token. | |
login(token=HF_API_TOKEN) | |
valid_files = [] | |
# Ensure folder_name does not have a trailing slash. | |
folder_name = folder_name.rstrip("/") | |
# Filter for valid files based on allowed extensions. | |
for f in files: | |
file_name = f if isinstance(f, str) else f.name if hasattr(f, "name") else None | |
if file_name is None: | |
continue | |
ext = os.path.splitext(file_name)[1].lower() | |
if ext in upload_file_types: | |
valid_files.append(f) | |
if not valid_files: | |
return [] # or raise an exception | |
# Create a temporary directory; copy valid files directly into it. | |
with tempfile.TemporaryDirectory() as temp_dir: | |
for file_path in valid_files: | |
filename = os.path.basename(file_path) | |
dest_path = os.path.join(temp_dir, filename) | |
shutil.copy(file_path, dest_path) | |
# Batch upload all files in the temporary folder. | |
# Files will be uploaded under the folder (path_in_repo) given by folder_name. | |
response = upload_folder( | |
folder_path=temp_dir, | |
repo_id=repo_id, | |
repo_type=repo_type, | |
path_in_repo=folder_name, | |
commit_message="Batch upload files" | |
) | |
# Construct external URLs for each uploaded file. | |
# For datasets, files are served at: | |
# https://huggingface.co/datasets/<repo_id>/resolve/main/<folder_name>/<filename> | |
base_url_external = f"https://huggingface.co/datasets/{repo_id}/resolve/main/{folder_name}" | |
individual_links = [] | |
for file_path in valid_files: | |
filename = os.path.basename(file_path) | |
link = f"{base_url_external}/{filename}" | |
individual_links.append(link) | |
# If permalink creation is requested and exactly 3 valid files are provided, | |
# try to generate a permalink using generate_permalink(). | |
if create_permalink and len(valid_files) == 3: | |
permalink = generate_permalink_from_urls(individual_links[0], individual_links[1], individual_links[2], permalink_viewer_url) | |
if permalink: | |
return [response, permalink] | |
# Otherwise, return individual tuples for each file. | |
return [(response, link) for link in individual_links] | |