# utils/version_info.py import subprocess import os import sys import gc import gradio as gr git = os.environ.get('GIT', "git") def commit_hash(): try: return subprocess.check_output([git, "rev-parse", "HEAD"], shell=False, encoding='utf8').strip() except Exception: return "" def get_xformers_version(): try: import xformers return xformers.__version__ except Exception: return "" def get_transformers_version(): try: import transformers return transformers.__version__ except Exception: return "" def get_accelerate_version(): try: import accelerate return accelerate.__version__ except Exception: return "" def get_safetensors_version(): try: import safetensors return safetensors.__version__ except Exception: return "" def get_diffusers_version(): try: import diffusers return diffusers.__version__ except Exception: return "" def get_torch_info(): from torch import __version__ as torch_version_, version, cuda, backends initialize_cuda() try: info = [torch_version_, f"CUDA Version:{version.cuda}", f"Available:{cuda.is_available()}", f"flash attention enabled: {backends.cuda.flash_sdp_enabled()}", f"Capabilities: {cuda.get_device_capability(0)}", f"Device Name: {cuda.get_device_name(0)}", f"Device Count: {cuda.device_count()}",f"Devices: {os.environ['CUDA_VISIBLE_DEVICES']}", f"Zero :{os.environ['CUDA_MODULE_LOADING']}"] del torch_version_, version, cuda, backends return info except Exception: del torch_version_, version, cuda, backends return "" def release_torch_resources(): from torch import cuda # Clear the CUDA cache cuda.empty_cache() cuda.ipc_collect() # Delete any objects that are using GPU memory #for obj in gc.get_objects(): # if is_tensor(obj) or (hasattr(obj, 'data') and is_tensor(obj.data)): # del obj # Run garbage collection del cuda gc.collect() def initialize_cuda(): from torch import cuda, version if cuda.is_available(): device = cuda.device("cuda") print(f"CUDA is available. Using device: {cuda.get_device_name(0)} with CUDA version: {version.cuda}") result = "cuda" else: device = cuda.device("cpu") print("CUDA is not available. Using CPU.") result = "cpu" return result def versions_html(): from torch import __version__ as torch_version_ python_version = ".".join([str(x) for x in sys.version_info[0:3]]) commit = commit_hash() # Define the Toggle Dark Mode link with JavaScript toggle_dark_link = ''' Toggle Dark Mode ''' v_html = f""" version: " else commit}" target="_blank">{"huggingface" if commit == "" else commit}  •  python: {python_version}  •  torch: {torch_version_}  •  diffusers: {get_diffusers_version()}  •  transformers: {get_transformers_version()}  •  safetensors: {get_safetensors_version()}  •  gradio: {gr.__version__}  •  {toggle_dark_link}
Full GPU Info:{get_torch_info()} """ del torch_version_ return v_html