Upload 10 files
Browse files- Extras/ffff_assets/arcface_converter_w600k_r50_ghost.onnx +3 -0
- Extras/ffff_assets/arcface_converter_w600k_r50_simswap.onnx +3 -0
- Extras/ffff_assets/blendswap_256.onnx +3 -0
- Extras/ffff_assets/codeformer.onnx +3 -0
- Extras/ffff_assets/ddcolor.onnx +3 -0
- Extras/ffff_assets/ddcolor_artistic.onnx +3 -0
- Extras/ffff_assets/deoldify.onnx +3 -0
- Extras/ffff_assets/deoldify_artistic.onnx +3 -0
- Extras/ffff_assets/deoldify_stable.onnx +3 -0
- Extras/ffff_assets/download.py +81 -0
Extras/ffff_assets/arcface_converter_w600k_r50_ghost.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8100e7d224ae062085df187267b6e5fafeaa1544033e4b7796eec164e5ec013a
|
3 |
+
size 12600855
|
Extras/ffff_assets/arcface_converter_w600k_r50_simswap.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3b5625aa577e62e9094ba4796557e90cb394c956b490079e16495db9145ba433
|
3 |
+
size 12600855
|
Extras/ffff_assets/blendswap_256.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:20ac5e3c002288e7abd4ea6b1af74103892a6628382d1fa905d13c0270c2667b
|
3 |
+
size 1661432957
|
Extras/ffff_assets/codeformer.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:21710e7ab61c82683576c428e9c1b6fe1ed419586b7b39e394c3449c294b550f
|
3 |
+
size 376951650
|
Extras/ffff_assets/ddcolor.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dd6156f056d0690891ef35f9ee87176d4d976ab19bbd36e57d18e841e064d797
|
3 |
+
size 980103562
|
Extras/ffff_assets/ddcolor_artistic.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:adad4b897990d627e139e858e8a6552c1f99e39e49e811d21e8f591803c877f1
|
3 |
+
size 980103562
|
Extras/ffff_assets/deoldify.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a00de09afacbed28b30ea63d174e1ade855b497c0fc61372217953c429696a9a
|
3 |
+
size 873387235
|
Extras/ffff_assets/deoldify_artistic.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9ac296cf05fecbdb604f50211f632b722402bc1f9a96ee5a8987b01c0c3c688f
|
3 |
+
size 255044725
|
Extras/ffff_assets/deoldify_stable.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b1623cecd1bace5966d0364b86920e9a1421a7fd0f55d63604e359e9ab9c4824
|
3 |
+
size 873387235
|
Extras/ffff_assets/download.py
ADDED
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import shutil
|
3 |
+
import ssl
|
4 |
+
import subprocess
|
5 |
+
import urllib.request
|
6 |
+
from functools import lru_cache
|
7 |
+
from typing import List, Tuple
|
8 |
+
from urllib.parse import urlparse
|
9 |
+
|
10 |
+
from tqdm import tqdm
|
11 |
+
|
12 |
+
from ffff import logger, process_manager, state_manager, wording
|
13 |
+
from ffff.common_helper import is_macos
|
14 |
+
from ffff.filesystem import get_file_size, is_file, remove_file
|
15 |
+
from ffff.hash_helper import validate_hash
|
16 |
+
from ffff.typing import DownloadSet
|
17 |
+
|
18 |
+
if is_macos():
|
19 |
+
ssl._create_default_https_context = ssl._create_unverified_context
|
20 |
+
|
21 |
+
def conditional_download(download_directory_path : str, urls : List[str]) -> None:
|
22 |
+
for url in urls:
|
23 |
+
download_file_name = os.path.basename(urlparse(url).path)
|
24 |
+
download_file_path = os.path.join(download_directory_path, download_file_name)
|
25 |
+
initial_size = get_file_size(download_file_path)
|
26 |
+
download_size = get_download_size(url)
|
27 |
+
|
28 |
+
if initial_size < download_size:
|
29 |
+
with tqdm(total = download_size, initial = initial_size, desc = wording.get('downloading'), unit = 'B', unit_scale = True, unit_divisor = 1024, ascii = ' =', disable = state_manager.get_item('log_level') in [ 'warn', 'error' ]) as progress:
|
30 |
+
subprocess.Popen([ shutil.which('curl'), '--create-dirs', '--silent', '--insecure', '--location', '--continue-at', '-', '--output', download_file_path, url ])
|
31 |
+
current_size = initial_size
|
32 |
+
|
33 |
+
progress.set_postfix(file = download_file_name)
|
34 |
+
while current_size < download_size:
|
35 |
+
if is_file(download_file_path):
|
36 |
+
current_size = get_file_size(download_file_path)
|
37 |
+
progress.update(current_size - progress.n)
|
38 |
+
|
39 |
+
@lru_cache(maxsize = None)
|
40 |
+
def get_download_size(url : str) -> int:
|
41 |
+
try:
|
42 |
+
response = urllib.request.urlopen(url, timeout = 10)
|
43 |
+
content_length = response.headers.get('Content-Length')
|
44 |
+
return int(content_length)
|
45 |
+
except (OSError, TypeError, ValueError):
|
46 |
+
return 0
|
47 |
+
|
48 |
+
def is_download_done(url : str, file_path : str) -> bool:
|
49 |
+
if is_file(file_path):
|
50 |
+
return get_download_size(url) == get_file_size(file_path)
|
51 |
+
return False
|
52 |
+
|
53 |
+
def conditional_download_hashes(download_directory_path : str, hashes : DownloadSet) -> bool:
|
54 |
+
process_manager.check()
|
55 |
+
if not state_manager.get_item('skip_download'):
|
56 |
+
for index in hashes:
|
57 |
+
hash_path = hashes.get(index).get('path')
|
58 |
+
if not is_file(hash_path):
|
59 |
+
hash_url = hashes.get(index).get('url')
|
60 |
+
conditional_download(download_directory_path, [hash_url])
|
61 |
+
|
62 |
+
process_manager.end()
|
63 |
+
return True
|
64 |
+
|
65 |
+
def conditional_download_sources(download_directory_path : str, sources : DownloadSet) -> bool:
|
66 |
+
process_manager.check()
|
67 |
+
if not state_manager.get_item('skip_download'):
|
68 |
+
for index in sources:
|
69 |
+
source_path = sources.get(index).get('path')
|
70 |
+
if not is_file(source_path):
|
71 |
+
source_url = sources.get(index).get('url')
|
72 |
+
conditional_download(download_directory_path, [source_url])
|
73 |
+
|
74 |
+
process_manager.end()
|
75 |
+
return True
|
76 |
+
|
77 |
+
def validate_hash_paths(hash_paths : List[str]) -> Tuple[List[str], List[str]]:
|
78 |
+
return hash_paths, [] # Asume todos los paths como válidos
|
79 |
+
|
80 |
+
def validate_source_paths(source_paths : List[str]) -> Tuple[List[str], List[str]]:
|
81 |
+
return source_paths, [] # Asume todos los paths como válidos
|