Spaces:
Sleeping
Sleeping
Update utils.py
Browse files
utils.py
CHANGED
@@ -1,100 +1,40 @@
|
|
1 |
-
#
|
2 |
|
3 |
import os
|
4 |
-
|
5 |
-
from
|
6 |
-
import json
|
7 |
|
8 |
-
MDX_DOWNLOAD_LINK = "https://github.com/TRvlvr/model_repo/releases/download/all_public_uvr_models/"
|
9 |
-
UVR_MODELS = [
|
10 |
-
"UVR-MDX-NET-Voc_FT.onnx",
|
11 |
-
"UVR_MDXNET_KARA_2.onnx",
|
12 |
-
"Reverb_HQ_By_FoxJoy.onnx",
|
13 |
-
"UVR-MDX-NET-Inst_HQ_4.onnx",
|
14 |
-
]
|
15 |
|
16 |
-
|
17 |
-
|
18 |
|
19 |
-
# Ensure mdx_models is a directory
|
20 |
-
if mdxnet_models_dir.exists() and not mdxnet_models_dir.is_dir():
|
21 |
-
mdxnet_models_dir.unlink()
|
22 |
-
mdxnet_models_dir.mkdir(parents=True, exist_ok=True)
|
23 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
-
# 🔧 Auto-generate fallback data.json if needed
|
26 |
-
def generate_data_json(mdxnet_models_dir):
|
27 |
-
presets = {
|
28 |
-
"UVR-MDX-NET-Voc_FT.onnx": {
|
29 |
-
"mdx_dim_f_set": 2048,
|
30 |
-
"mdx_dim_t_set": 3,
|
31 |
-
"mdx_n_fft_scale_set": 6144,
|
32 |
-
"primary_stem": "Vocals",
|
33 |
-
"compensate": 1.035
|
34 |
-
},
|
35 |
-
"UVR_MDXNET_KARA_2.onnx": {
|
36 |
-
"mdx_dim_f_set": 1024,
|
37 |
-
"mdx_dim_t_set": 3,
|
38 |
-
"mdx_n_fft_scale_set": 4096,
|
39 |
-
"primary_stem": "Main",
|
40 |
-
"compensate": 1.035
|
41 |
-
},
|
42 |
-
"UVR-MDX-NET-Inst_HQ_4.onnx": {
|
43 |
-
"mdx_dim_f_set": 2048,
|
44 |
-
"mdx_dim_t_set": 3,
|
45 |
-
"mdx_n_fft_scale_set": 6144,
|
46 |
-
"primary_stem": "Instrumental",
|
47 |
-
"compensate": 1.0
|
48 |
-
},
|
49 |
-
"Reverb_HQ_By_FoxJoy.onnx": {
|
50 |
-
"mdx_dim_f_set": 2048,
|
51 |
-
"mdx_dim_t_set": 3,
|
52 |
-
"mdx_n_fft_scale_set": 6144,
|
53 |
-
"primary_stem": "Vocals",
|
54 |
-
"compensate": 1.035
|
55 |
-
},
|
56 |
-
}
|
57 |
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
data[model_hash] = params
|
64 |
|
65 |
-
|
66 |
-
|
67 |
-
print("✅ Auto-generated data.json with", len(data), "entries.")
|
68 |
|
|
|
|
|
|
|
|
|
|
|
69 |
|
70 |
-
|
71 |
-
|
72 |
-
print("🔍 Checking for data.json at:", json_path)
|
73 |
-
print("📁 mdx_models contents:", list(mdxnet_models_dir.iterdir()))
|
74 |
-
print("📂 Current working directory:", os.getcwd())
|
75 |
-
|
76 |
-
if not json_path.exists():
|
77 |
-
print("⚠️ data.json missing, generating fallback.")
|
78 |
-
generate_data_json(mdxnet_models_dir)
|
79 |
-
else:
|
80 |
-
print("✅ data.json found.")
|
81 |
-
|
82 |
-
|
83 |
-
def download_models():
|
84 |
-
for model in UVR_MODELS:
|
85 |
-
url = os.path.join(MDX_DOWNLOAD_LINK, model)
|
86 |
-
download_manager(url, str(mdxnet_models_dir))
|
87 |
-
|
88 |
-
|
89 |
-
if __name__ == "__main__":
|
90 |
-
download_models()
|
91 |
-
theme = "NoCrypt/miku"
|
92 |
-
app = get_gui(theme)
|
93 |
-
app.queue(default_concurrency_limit=40)
|
94 |
-
app.launch(
|
95 |
-
max_threads=40,
|
96 |
-
share=False,
|
97 |
-
show_error=True,
|
98 |
-
quiet=False,
|
99 |
-
debug=False,
|
100 |
-
)
|
|
|
1 |
+
# utils.py
|
2 |
|
3 |
import os
|
4 |
+
import requests
|
5 |
+
from log import logger
|
|
|
6 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
+
def create_directories(path):
|
9 |
+
os.makedirs(path, exist_ok=True)
|
10 |
|
|
|
|
|
|
|
|
|
11 |
|
12 |
+
def remove_directory_contents(path):
|
13 |
+
if not os.path.exists(path):
|
14 |
+
return
|
15 |
+
for filename in os.listdir(path):
|
16 |
+
file_path = os.path.join(path, filename)
|
17 |
+
if os.path.isfile(file_path):
|
18 |
+
os.remove(file_path)
|
19 |
+
elif os.path.isdir(file_path):
|
20 |
+
import shutil
|
21 |
+
shutil.rmtree(file_path)
|
22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
|
24 |
+
def download_manager(url, output_dir):
|
25 |
+
filename = os.path.basename(url)
|
26 |
+
output_path = os.path.join(output_dir, filename)
|
27 |
+
if os.path.exists(output_path):
|
28 |
+
return output_path
|
|
|
29 |
|
30 |
+
logger.info(f"Downloading: {filename}")
|
31 |
+
os.makedirs(os.path.dirname(output_path), exist_ok=True) # Ensure directory exists
|
|
|
32 |
|
33 |
+
with requests.get(url, stream=True) as r:
|
34 |
+
r.raise_for_status()
|
35 |
+
with open(output_path, 'wb') as f:
|
36 |
+
for chunk in r.iter_content(chunk_size=8192):
|
37 |
+
f.write(chunk)
|
38 |
|
39 |
+
logger.info(f"Downloaded: {filename}")
|
40 |
+
return output_path
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|