|
|
|
|
|
import subprocess |
|
import os |
|
import sys |
|
import gc |
|
import gradio as gr |
|
|
|
git = os.environ.get('GIT', "git") |
|
|
|
def commit_hash(): |
|
try: |
|
return subprocess.check_output([git, "rev-parse", "HEAD"], shell=False, encoding='utf8').strip() |
|
except Exception: |
|
return "<none>" |
|
|
|
def get_xformers_version(): |
|
try: |
|
import xformers |
|
return xformers.__version__ |
|
except Exception: |
|
return "<none>" |
|
def get_transformers_version(): |
|
try: |
|
import transformers |
|
return transformers.__version__ |
|
except Exception: |
|
return "<none>" |
|
|
|
def get_accelerate_version(): |
|
try: |
|
import accelerate |
|
return accelerate.__version__ |
|
except Exception: |
|
return "<none>" |
|
def get_safetensors_version(): |
|
try: |
|
import safetensors |
|
return safetensors.__version__ |
|
except Exception: |
|
return "<none>" |
|
def get_diffusers_version(): |
|
try: |
|
import diffusers |
|
return diffusers.__version__ |
|
except Exception: |
|
return "<none>" |
|
def get_open3d_version(): |
|
try: |
|
import open3d |
|
return f"{open3d.__version__} cuda:{open3d.core.cuda.is_available()}" |
|
except Exception: |
|
return "<none>" |
|
|
|
def get_torch_info(): |
|
from torch import __version__ as torch_version_, version, cuda, backends |
|
initialize_cuda() |
|
try: |
|
info = [torch_version_, f"CUDA Version:{version.cuda}", f"Available:{cuda.is_available()}", f"flash attention enabled: {backends.cuda.flash_sdp_enabled()}", f"Capabilities: {cuda.get_device_capability(0)}", f"Device Name: {cuda.get_device_name(0)}", f"Device Count: {cuda.device_count()}",f"Devices: {os.environ['CUDA_VISIBLE_DEVICES']}", f"Zero :{os.environ['CUDA_MODULE_LOADING']}"] |
|
del torch_version_, version, cuda, backends |
|
return info |
|
except Exception: |
|
del torch_version_, version, cuda, backends |
|
return "<none>" |
|
|
|
def release_torch_resources(): |
|
from torch import cuda |
|
if cuda.is_available(): |
|
|
|
cuda.empty_cache() |
|
cuda.ipc_collect() |
|
|
|
|
|
|
|
|
|
|
|
del cuda |
|
gc.collect() |
|
|
|
|
|
def initialize_cuda(): |
|
from torch import cuda, version |
|
if cuda.is_available(): |
|
device = cuda.device("cuda") |
|
print(f"CUDA is available. Using device: {cuda.get_device_name(0)} with CUDA version: {version.cuda}") |
|
result = "cuda" |
|
else: |
|
|
|
print("CUDA is not available. Using CPU.") |
|
result = "cpu" |
|
return result |
|
|
|
def versions_html(): |
|
from torch import __version__ as torch_version_ |
|
python_version = ".".join([str(x) for x in sys.version_info[0:3]]) |
|
commit = commit_hash() |
|
|
|
|
|
toggle_dark_link = ''' |
|
<a href="#" onclick="document.body.classList.toggle('dark'); return false;" style="cursor: pointer; text-decoration: underline;"> |
|
Toggle Dark Mode |
|
</a> |
|
''' |
|
|
|
v_html = f""" |
|
version: <a href="https://huggingface.co/spaces/Surn/HexaGrid/commit/{"huggingface" if commit == "<none>" else commit}" target="_blank">{"huggingface" if commit == "<none>" else commit}</a> |
|
 •  |
|
python: <span title="{sys.version}">{python_version}</span> |
|
 •  |
|
torch: {torch_version_} |
|
 •  |
|
diffusers: {get_diffusers_version()} |
|
 •  |
|
transformers: {get_transformers_version()} |
|
 •  |
|
safetensors: {get_safetensors_version()} |
|
 •  |
|
open3d: {get_open3d_version()} |
|
 •  |
|
gradio: {gr.__version__} |
|
 •  |
|
{toggle_dark_link} |
|
<br> |
|
Full GPU Info:{get_torch_info()} |
|
""" |
|
del torch_version_ |
|
return v_html |