File size: 4,460 Bytes
7f0bd23 0933474 7f0bd23 7418a86 7f0bd23 7418a86 7f0bd23 0933474 7f0bd23 027506b 7f0bd23 027506b 7f0bd23 7418a86 7f0bd23 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 |
# modules/version_info.py
import subprocess
import os
import sys
# import gc
import gradio as gr
git = os.environ.get('GIT', "git")
def commit_hash():
try:
return subprocess.check_output([git, "rev-parse", "HEAD"], shell=False, encoding='utf8').strip()
except Exception:
return "<none>"
# def get_xformers_version():
# try:
# import xformers
# return xformers.__version__
# except Exception:
# return "<none>"
def get_transformers_version():
try:
import transformers
return transformers.__version__
except Exception:
return "<none>"
# def get_accelerate_version():
# try:
# import accelerate
# return accelerate.__version__
# except Exception:
# return "<none>"
# def get_safetensors_version():
# try:
# import safetensors
# return safetensors.__version__
# except Exception:
# return "<none>"
# def get_diffusers_version():
# try:
# import diffusers
# return diffusers.__version__
# except Exception:
# return "<none>"
# def get_open3d_version():
# try:
# import open3d
# return f"{open3d.__version__} cuda:{open3d.core.cuda.is_available()}"
# except Exception:
# return "<none>"
def get_torch_info():
from torch import __version__ as torch_version_, version, cuda, backends
initialize_cuda()
try:
info = [torch_version_, f"CUDA Version:{version.cuda}", f"Available:{cuda.is_available()}", f"flash attention enabled: {backends.cuda.flash_sdp_enabled()}", f"Capabilities: {cuda.get_device_capability(0)}", f"Device Name: {cuda.get_device_name(0)}", f"Device Count: {cuda.device_count()}",f"Devices: {os.environ['CUDA_VISIBLE_DEVICES']}", f"Zero :{os.environ['CUDA_MODULE_LOADING']}"]
del torch_version_, version, cuda, backends
return info
except Exception:
del torch_version_, version, cuda, backends
return "<none>"
# def release_torch_resources():
# from torch import cuda
# if cuda.is_available():
# # Clear the CUDA cache
# cuda.empty_cache()
# cuda.ipc_collect()
# # Delete any objects that are using GPU memory
# #for obj in gc.get_objects():
# # if is_tensor(obj) or (hasattr(obj, 'data') and is_tensor(obj.data)):
# # del obj
# # Run garbage collection
# del cuda
# gc.collect()
def initialize_cuda():
from torch import cuda, version
if cuda.is_available():
device = cuda.device("cuda")
print(f"CUDA is available. Using device: {cuda.get_device_name(0)} with CUDA version: {version.cuda}")
result = "cuda"
else:
#device = cuda.device("cpu")
print("CUDA is not available. Using CPU.")
result = "cpu"
return result
def versions_html():
from torch import __version__ as torch_version_
python_version = ".".join([str(x) for x in sys.version_info[0:3]])
commit = commit_hash()
# Define the Toggle Dark Mode link with JavaScript
toggle_dark_link = '''
<a href="#" onclick="document.body.classList.toggle('dark'); return false;" style="cursor: pointer; text-decoration: underline;">
Toggle Dark Mode
</a>
'''
# Add a link to the shortener JSON file in the Hugging Face repo
from modules.constants import HF_REPO_ID, SHORTENER_JSON_FILE # Import constants
shortener_url = f"https://huggingface.co/datasets/{HF_REPO_ID}/resolve/main/{SHORTENER_JSON_FILE}"
shortener_link = f'''
<a href="{shortener_url}" target="_blank" style="cursor: pointer; text-decoration: underline;">
View Shortener JSON
</a>
'''
v_html = f"""
version: <a href="https://huggingface.co/spaces/Surn/3D-Viewer/commit/{"huggingface" if commit == "<none>" else commit}" target="_blank">{"huggingface" if commit == "<none>" else commit}</a>
 • 
python: <span title="{sys.version}">{python_version}</span>
 • 
torch: {torch_version_}
 • 
transformers: {get_transformers_version()}
 • 
gradio: {gr.__version__}
 • 
{toggle_dark_link}
 • 
{shortener_link}
<br>
Full GPU Info:
"""
del torch_version_
return v_html |