CCockrum commited on
Commit
7477320
·
verified ·
1 Parent(s): 98872fa

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +29 -89
utils.py CHANGED
@@ -1,100 +1,40 @@
1
- # main.py
2
 
3
  import os
4
- from pathlib import Path
5
- from mdx_core import MDX
6
- import json
7
 
8
- MDX_DOWNLOAD_LINK = "https://github.com/TRvlvr/model_repo/releases/download/all_public_uvr_models/"
9
- UVR_MODELS = [
10
- "UVR-MDX-NET-Voc_FT.onnx",
11
- "UVR_MDXNET_KARA_2.onnx",
12
- "Reverb_HQ_By_FoxJoy.onnx",
13
- "UVR-MDX-NET-Inst_HQ_4.onnx",
14
- ]
15
 
16
- BASE_DIR = Path(__file__).resolve().parent
17
- mdxnet_models_dir = BASE_DIR / "mdx_models"
18
 
19
- # Ensure mdx_models is a directory
20
- if mdxnet_models_dir.exists() and not mdxnet_models_dir.is_dir():
21
- mdxnet_models_dir.unlink()
22
- mdxnet_models_dir.mkdir(parents=True, exist_ok=True)
23
 
 
 
 
 
 
 
 
 
 
 
24
 
25
- # 🔧 Auto-generate fallback data.json if needed
26
- def generate_data_json(mdxnet_models_dir):
27
- presets = {
28
- "UVR-MDX-NET-Voc_FT.onnx": {
29
- "mdx_dim_f_set": 2048,
30
- "mdx_dim_t_set": 3,
31
- "mdx_n_fft_scale_set": 6144,
32
- "primary_stem": "Vocals",
33
- "compensate": 1.035
34
- },
35
- "UVR_MDXNET_KARA_2.onnx": {
36
- "mdx_dim_f_set": 1024,
37
- "mdx_dim_t_set": 3,
38
- "mdx_n_fft_scale_set": 4096,
39
- "primary_stem": "Main",
40
- "compensate": 1.035
41
- },
42
- "UVR-MDX-NET-Inst_HQ_4.onnx": {
43
- "mdx_dim_f_set": 2048,
44
- "mdx_dim_t_set": 3,
45
- "mdx_n_fft_scale_set": 6144,
46
- "primary_stem": "Instrumental",
47
- "compensate": 1.0
48
- },
49
- "Reverb_HQ_By_FoxJoy.onnx": {
50
- "mdx_dim_f_set": 2048,
51
- "mdx_dim_t_set": 3,
52
- "mdx_n_fft_scale_set": 6144,
53
- "primary_stem": "Vocals",
54
- "compensate": 1.035
55
- },
56
- }
57
 
58
- data = {}
59
- for filename, params in presets.items():
60
- path = mdxnet_models_dir / filename
61
- if path.exists():
62
- model_hash = MDX.get_hash(str(path))
63
- data[model_hash] = params
64
 
65
- with open(mdxnet_models_dir / "data.json", "w") as f:
66
- json.dump(data, f, indent=2)
67
- print("✅ Auto-generated data.json with", len(data), "entries.")
68
 
 
 
 
 
 
69
 
70
- # Check for data.json and create if missing
71
- json_path = mdxnet_models_dir / "data.json"
72
- print("🔍 Checking for data.json at:", json_path)
73
- print("📁 mdx_models contents:", list(mdxnet_models_dir.iterdir()))
74
- print("📂 Current working directory:", os.getcwd())
75
-
76
- if not json_path.exists():
77
- print("⚠️ data.json missing, generating fallback.")
78
- generate_data_json(mdxnet_models_dir)
79
- else:
80
- print("✅ data.json found.")
81
-
82
-
83
- def download_models():
84
- for model in UVR_MODELS:
85
- url = os.path.join(MDX_DOWNLOAD_LINK, model)
86
- download_manager(url, str(mdxnet_models_dir))
87
-
88
-
89
- if __name__ == "__main__":
90
- download_models()
91
- theme = "NoCrypt/miku"
92
- app = get_gui(theme)
93
- app.queue(default_concurrency_limit=40)
94
- app.launch(
95
- max_threads=40,
96
- share=False,
97
- show_error=True,
98
- quiet=False,
99
- debug=False,
100
- )
 
1
+ # utils.py
2
 
3
  import os
4
+ import requests
5
+ from log import logger
 
6
 
 
 
 
 
 
 
 
7
 
8
+ def create_directories(path):
9
+ os.makedirs(path, exist_ok=True)
10
 
 
 
 
 
11
 
12
+ def remove_directory_contents(path):
13
+ if not os.path.exists(path):
14
+ return
15
+ for filename in os.listdir(path):
16
+ file_path = os.path.join(path, filename)
17
+ if os.path.isfile(file_path):
18
+ os.remove(file_path)
19
+ elif os.path.isdir(file_path):
20
+ import shutil
21
+ shutil.rmtree(file_path)
22
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
+ def download_manager(url, output_dir):
25
+ filename = os.path.basename(url)
26
+ output_path = os.path.join(output_dir, filename)
27
+ if os.path.exists(output_path):
28
+ return output_path
 
29
 
30
+ logger.info(f"Downloading: {filename}")
31
+ os.makedirs(os.path.dirname(output_path), exist_ok=True) # Ensure directory exists
 
32
 
33
+ with requests.get(url, stream=True) as r:
34
+ r.raise_for_status()
35
+ with open(output_path, 'wb') as f:
36
+ for chunk in r.iter_content(chunk_size=8192):
37
+ f.write(chunk)
38
 
39
+ logger.info(f"Downloaded: {filename}")
40
+ return output_path