Spaces:
Running
Running
File size: 1,642 Bytes
7c52128 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import torch
import logging
logger = logging.getLogger(__name__)
def get_available_gpu_count():
"""Get the number of available GPUs on the system.
Returns:
int: Number of available GPUs, or 0 if no GPUs are available
"""
try:
if torch.cuda.is_available():
return torch.cuda.device_count()
else:
return 0
except Exception as e:
logger.warning(f"Error detecting GPUs: {e}")
return 0
def get_gpu_info():
"""Get information about available GPUs.
Returns:
list: List of dictionaries with GPU information
"""
gpu_info = []
try:
if torch.cuda.is_available():
for i in range(torch.cuda.device_count()):
gpu = {
'index': i,
'name': torch.cuda.get_device_name(i),
'memory_total': torch.cuda.get_device_properties(i).total_memory
}
gpu_info.append(gpu)
except Exception as e:
logger.warning(f"Error getting GPU details: {e}")
return gpu_info
def get_recommended_precomputation_items(num_videos, num_gpus):
"""Calculate recommended precomputation items.
Args:
num_videos (int): Number of videos in dataset
num_gpus (int): Number of GPUs to use
Returns:
int: Recommended precomputation items value
"""
if num_gpus <= 0:
num_gpus = 1
# Calculate items per GPU, but ensure it's at least 1
items_per_gpu = max(1, num_videos // num_gpus)
# Limit to a maximum of 512
return min(512, items_per_gpu) |