ttttdiva commited on
Commit
a890d5f
·
verified ·
1 Parent(s): 4de47e3

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +133 -468
main.py CHANGED
@@ -1,477 +1,142 @@
1
- import asyncio
2
- import datetime
3
- import json
4
- import logging
5
  import os
6
- import re
7
- import shutil
8
  import subprocess
9
- import time
10
  import uuid
11
- from typing import Optional
12
-
13
  import requests
14
- from bs4 import BeautifulSoup
15
- from fake_useragent import UserAgent
16
- from fastapi import FastAPI
17
- from huggingface_hub import HfApi, hf_hub_download, login
18
 
 
 
 
 
19
  logging.basicConfig(level=logging.INFO)
20
  logger = logging.getLogger(__name__)
21
 
22
- class Config:
23
- HUGGINGFACE_API_KEY = os.environ["HUGGINGFACE_API_KEY"]
24
- CIVITAI_API_TOKEN = os.environ["CIVITAI_API_TOKEN"]
25
- LOG_FILE = "civitai_backup.log"
26
- LIST_FILE = "model_list.log"
27
- REPO_IDS = {
28
- "log": "ttttdiva/CivitAI_log_test",
29
- "model_list": "ttttdiva/CivitAI_model_info_test",
30
- "current": ""
31
- }
32
- URLS = {
33
- "latest": "https://civitai.com/api/v1/models?sort=Newest",
34
- "modelPage": "https://civitai.com/models/",
35
- "modelId": "https://civitai.com/api/v1/models/",
36
- "modelVersionId": "https://civitai.com/api/v1/model-versions/",
37
- "hash": "https://civitai.com/api/v1/model-versions/by-hash/"
38
- }
39
- JST = datetime.timezone(datetime.timedelta(hours=9))
40
- UA = UserAgent()
41
- HEADERS = {
42
- 'Authorization': f'Bearer {CIVITAI_API_TOKEN}',
43
- 'User-Agent': UA.random,
44
- "Content-Type": "application/json"
45
- }
46
-
47
- class CivitAICrawler:
48
- def __init__(self, config: Config):
49
- import base64
50
-
51
- rclone_conf_base64 = os.environ.get("RCLONE_CONF_BASE64")
52
- if rclone_conf_base64:
53
- config_dir = os.path.join(os.getcwd(), ".rclone_config")
54
- os.makedirs(config_dir, exist_ok=True)
55
-
56
- conf_path = os.path.join(config_dir, "rclone.conf")
57
- with open(conf_path, "wb") as f:
58
- f.write(base64.b64decode(rclone_conf_base64))
59
-
60
- os.environ["RCLONE_CONFIG"] = conf_path
61
- logger.info(f"[INFO] Created rclone.conf at {conf_path}")
62
- else:
63
- logger.warning("[WARN] RCLONE_CONF_BASE64 not found; rclone may fail.")
64
-
65
- self.config = config
66
- self.api = HfApi()
67
- self.app = FastAPI()
68
- self.repo_ids = self.config.REPO_IDS.copy()
69
- self.jst = self.config.JST
70
- self.setup_routes()
71
-
72
- def setup_routes(self):
73
- @self.app.get("/")
74
- def read_root():
75
- now = str(datetime.datetime.now(self.jst))
76
- description = f"""
77
- CivitAIを定期的に周回し新規モデルを {self.repo_ids['current']} にバックアップするSpaceです。
78
- モデル一覧は https://huggingface.co/{self.repo_ids['model_list']}/blob/main/model_list.log 参照。
79
- Status: {now} + currently running.
80
- """
81
- return description
82
-
83
- @self.app.on_event("startup")
84
- async def startup_event():
85
- asyncio.create_task(self.crawl())
86
-
87
- @staticmethod
88
- def get_filename_from_cd(content_disposition: Optional[str], default_name: str) -> str:
89
- if content_disposition:
90
- parts = content_disposition.split(';')
91
- for part in parts:
92
- if "filename=" in part:
93
- return part.split("=")[1].strip().strip('"')
94
- return default_name
95
-
96
- def download_file(self, url: str, destination_folder: str, default_name: str) -> Optional[str]:
97
- """URLからファイルをダウンロードし、destination_folder に保存する。"""
98
- os.makedirs(destination_folder, exist_ok=True) # 念のためフォルダ作成
99
- try:
100
- resp = requests.get(url, headers=self.config.HEADERS, stream=True)
101
- resp.raise_for_status()
102
- except requests.RequestException as e:
103
- logger.error(f"Failed to download file from {url}: {e}")
104
- return None
105
-
106
- filename = self.get_filename_from_cd(resp.headers.get('content-disposition'), default_name)
107
- file_path = os.path.join(destination_folder, filename)
108
- with open(file_path, 'wb') as f:
109
- for chunk in resp.iter_content(chunk_size=8192):
110
  f.write(chunk)
111
- logger.info(f"Downloaded: {file_path}")
112
- return file_path
113
-
114
- def get_model_info(self, model_id: str) -> dict:
115
- try:
116
- resp = requests.get(self.config.URLS["modelId"] + str(model_id), headers=self.config.HEADERS)
117
- resp.raise_for_status()
118
- return resp.json()
119
- except requests.RequestException as e:
120
- logger.error(f"Failed to retrieve model info for ID {model_id}: {e}")
121
- return {}
122
-
123
- def download_images(self, model_versions: list, folder: str):
124
- """モデル画像を folder/images にダウンロード"""
125
- images_folder = os.path.join(folder, "images")
126
- os.makedirs(images_folder, exist_ok=True)
127
- images = []
128
- for ver in model_versions:
129
- for img in ver.get("images", []):
130
- images.append(img["url"])
131
-
132
- for image_url in images:
133
- image_name = os.path.basename(image_url)
134
- self.download_file(image_url, images_folder, image_name)
135
-
136
- def save_html_content(self, model_page_url: str, folder: str):
137
- """model_page_url のHTMLを {folder}/{folder名}.html に保存"""
138
- try:
139
- resp = requests.get(model_page_url)
140
- resp.raise_for_status()
141
- html_name = os.path.basename(folder) + ".html"
142
- html_path = os.path.join(folder, html_name)
143
- with open(html_path, 'w', encoding='utf-8') as f:
144
- f.write(resp.text)
145
- logger.info(f"Saved HTML: {html_path}")
146
- except Exception as e:
147
- logger.error(f"Error saving HTML from {model_page_url}: {e}")
148
-
149
- def save_model_info_json(self, model_info: dict, folder: str):
150
- """model_info.json folder に保存"""
151
- info_path = os.path.join(folder, "model_info.json")
152
- try:
153
- with open(info_path, 'w', encoding='utf-8') as f:
154
- json.dump(model_info, f, indent=2)
155
- logger.info(f"Saved model_info.json: {info_path}")
156
- except Exception as e:
157
- logger.error(f"Failed to save model_info.json: {e}")
158
-
159
- def download_and_process_versions(self, model_versions: list, folder: str):
160
- """
161
- 最新バージョンは folder/ に、
162
- 古いバージョンは folder/old_versions/ にまとめて保存。
163
- """
164
- if not model_versions:
165
- return
166
-
167
- # 最新バージョン => folder
168
- latest_ver = model_versions[0]
169
- for file_info in latest_ver.get("files", []):
170
- download_url = file_info["downloadUrl"]
171
- file_name = file_info["name"]
172
- self.download_file(download_url, folder, file_name)
173
-
174
- # 古いバージョン => folder/old_versions
175
- if len(model_versions) > 1:
176
- oldv_folder = os.path.join(folder, "old_versions")
177
- os.makedirs(oldv_folder, exist_ok=True)
178
- for v in model_versions[1:]:
179
- for f_info in v.get("files", []):
180
- dl_url = f_info["downloadUrl"]
181
- f_name = f_info["name"]
182
- self.download_file(dl_url, oldv_folder, f_name)
183
-
184
- def encrypt_and_upload_folder(self, local_folder: str) -> Optional[str]:
185
- """
186
- rclone copy local_folder => cryptLocal:
187
- => 差分検知で "encrypted/xxxxxx" を発見 -> upload_folder -> 削除
188
- => 戻り値は暗号フォルダ名
189
- """
190
- if not os.path.isdir(local_folder):
191
- logger.error(f"encrypt_and_upload_folder: {local_folder} is not a directory.")
192
- return None
193
-
194
- encrypted_base_dir = os.path.join(os.getcwd(), "encrypted")
195
- os.makedirs(encrypted_base_dir, exist_ok=True)
196
-
197
- before_set = set(os.listdir(encrypted_base_dir))
198
- # cleanup old stuff
199
- for itm in before_set:
200
- itm_path = os.path.join(encrypted_base_dir, itm)
201
- try:
202
- if os.path.isfile(itm_path):
203
- os.remove(itm_path)
204
- else:
205
- shutil.rmtree(itm_path)
206
- logger.info(f"[CLEANUP] Removed {itm_path}")
207
- except Exception as e:
208
- logger.warning(f"[CLEANUP] Failed to remove {itm_path}: {e}")
209
-
210
- # rclone copy local_folder => cryptLocal:
211
- try:
212
- subprocess.run(
213
- ["rclone", "copy", local_folder, "cryptLocal:", "--create-empty-src-dirs"],
214
- check=True
215
- )
216
- logger.info(f"[OK] rclone copy {local_folder} => cryptLocal:")
217
- except subprocess.CalledProcessError as e:
218
- logger.error(f"rclone copy failed: {e}")
219
- return None
220
-
221
- after_set = set(os.listdir(encrypted_base_dir))
222
- diff = after_set - before_set
223
- if not diff:
224
- logger.error("[ERROR] No new directory in ./encrypted after rclone copy.")
225
- return None
226
- if len(diff) > 1:
227
- logger.warning(f"[WARN] multiple new dirs? {diff}")
228
-
229
- enc_folder_name = diff.pop()
230
- enc_folder_path = os.path.join(encrypted_base_dir, enc_folder_name)
231
- if not os.path.isdir(enc_folder_path):
232
- logger.error(f"[ERROR] {enc_folder_path} is not a directory.")
233
- return None
234
-
235
- # upload_folder
236
- try:
237
- self.upload_folder(enc_folder_path, path_in_repo=enc_folder_name)
238
- logger.info(f"[OK] Uploaded {enc_folder_path}")
239
- except Exception as e:
240
- logger.error(f"Failed to upload {enc_folder_path}: {e}")
241
-
242
- # cleanup local
243
- try:
244
- shutil.rmtree(local_folder)
245
- shutil.rmtree(enc_folder_path)
246
- logger.info(f"[CLEANUP] Removed {local_folder} & {enc_folder_path}")
247
- except Exception as e:
248
- logger.warning(f"[CLEANUP] Could not remove local folders: {e}")
249
-
250
- return enc_folder_name
251
-
252
- def upload_file(self, file_path: str, repo_id: Optional[str] = None, path_in_repo: Optional[str] = None):
253
- if repo_id is None:
254
- repo_id = self.repo_ids['current']
255
- if path_in_repo is None:
256
- path_in_repo = os.path.basename(file_path)
257
-
258
- max_retries = 5
259
- attempt = 0
260
- while attempt < max_retries:
261
- try:
262
- self.api.upload_file(
263
- path_or_fileobj=file_path,
264
- repo_id=repo_id,
265
- path_in_repo=path_in_repo
266
- )
267
- logger.info(f"Uploaded file: {file_path} to {repo_id} at {path_in_repo}")
268
- return
269
- except Exception as e:
270
- attempt += 1
271
- error_message = str(e)
272
- if "over the limit of 100000 files" in error_message:
273
- logger.warning("File limit exceeded, creating a new repo.")
274
- self.repo_ids['current'] = self.increment_repo_name(self.repo_ids['current'])
275
- self.api.create_repo(repo_id=self.repo_ids['current'], private=True)
276
- attempt = 0
277
- continue
278
- elif "you can retry this action in about 1 hour" in error_message:
279
- logger.warning("Rate limit. Wait 1hr.")
280
- time.sleep(3600)
281
- attempt -= 1
282
- else:
283
- if attempt < max_retries:
284
- logger.warning(f"Failed to upload {file_path}, retry {attempt}/{max_retries}")
285
- else:
286
- logger.error(f"Failed after {max_retries} attempts: {e}")
287
- raise
288
-
289
- def upload_folder(self, folder_path: str, path_in_repo: Optional[str] = None):
290
- if path_in_repo is None:
291
- path_in_repo = os.path.basename(folder_path)
292
-
293
- max_retries = 5
294
- attempt = 0
295
- while attempt < max_retries:
296
- try:
297
- self.api.upload_folder(
298
- folder_path=folder_path,
299
- repo_id=self.repo_ids['current'],
300
- path_in_repo=path_in_repo
301
- )
302
- logger.info(f"Uploaded folder: {folder_path} => {self.repo_ids['current']}:{path_in_repo}")
303
- return
304
- except Exception as e:
305
- attempt += 1
306
- error_message = str(e)
307
- if "over the limit of 100000 files" in error_message:
308
- logger.warning("File limit exceeded, creating new repo.")
309
- self.repo_ids['current'] = self.increment_repo_name(self.repo_ids['current'])
310
- self.api.create_repo(repo_id=self.repo_ids['current'], private=True)
311
- attempt = 0
312
- continue
313
- elif "you can retry this action in about 1 hour" in error_message:
314
- logger.warning("Rate limit. Waiting 1hr.")
315
- time.sleep(3600)
316
- attempt -= 1
317
- else:
318
- if attempt < max_retries:
319
- logger.warning(f"Failed to upload folder {folder_path}, attempt {attempt}/{max_retries}")
320
- else:
321
- logger.error(f"Failed after {max_retries} attempts: {e}")
322
- raise
323
-
324
- @staticmethod
325
- def increment_repo_name(repo_id: str) -> str:
326
- match = re.search(r'(\d+)$', repo_id)
327
- if match:
328
- number = int(match.group(1)) + 1
329
- new_repo_id = re.sub(r'\d+$', str(number), repo_id)
330
- else:
331
- new_repo_id = f"{repo_id}1"
332
- return new_repo_id
333
-
334
- def read_model_list(self) -> dict:
335
- model_list = {}
336
- try:
337
- with open(self.config.LIST_FILE, "r", encoding="utf-8") as f:
338
- for line in f:
339
- line = line.strip()
340
- if line:
341
- parts = line.split(": ", 1)
342
- if len(parts) == 2:
343
- modelpage_name, model_hf_url = parts
344
- model_list[model_hf_url] = modelpage_name
345
- except Exception as e:
346
- logger.error(f"Failed to read model list: {e}")
347
- return model_list
348
-
349
- def get_repo_info(self, repo_id):
350
- try:
351
- repo_info = self.api.repo_info(repo_id=repo_id, files_metadata=True)
352
- file_paths = [sibling.rfilename for sibling in repo_info.siblings]
353
- return file_paths
354
- except Exception as e:
355
- logger.error(f"Failed to get repo info for {repo_id}: {e}")
356
- return []
357
-
358
- def process_model(self, model_url: str):
359
- try:
360
- model_id = model_url.rstrip("/").split("/")[-1]
361
- model_info = self.get_model_info(model_id)
362
- if not model_info or "modelVersions" not in model_info:
363
- logger.error(f"No valid model info for ID {model_id}. Skipping.")
364
- return
365
-
366
- versions = model_info["modelVersions"]
367
- if not versions:
368
- logger.warning(f"No modelVersions found for ID {model_id}.")
369
- return
370
-
371
- folder_name = model_info.get("name", "UnnamedModel")
372
- folder_name = re.sub(r'[\\/*?:"<>|]', '_', folder_name)
373
- folder_name += "_" + str(uuid.uuid4())[:8]
374
- os.makedirs(folder_name, exist_ok=True)
375
-
376
- # ダウンロード(最新+古い)
377
- self.download_and_process_versions(versions, folder_name)
378
- # 画像
379
- self.download_images(versions, folder_name)
380
- # HTML
381
- model_page_url = f"{self.config.URLS['modelPage']}{model_id}"
382
- self.save_html_content(model_page_url, folder_name)
383
- # model_info.json
384
- self.save_model_info_json(model_info, folder_name)
385
-
386
- # 最後にフォルダごとアップ
387
- enc_folder = self.encrypt_and_upload_folder(folder_name)
388
- if enc_folder is None:
389
- enc_folder = "[ENCRYPT_FAILED]"
390
-
391
- hf_enc_url = f"https://huggingface.co/{self.repo_ids['current']}/tree/main/{enc_folder}"
392
- with open(self.config.LIST_FILE, "a", encoding="utf-8") as f:
393
- f.write(f"{model_info.get('name','Unknown')} (ID:{model_id}): {hf_enc_url}\n")
394
-
395
- except Exception as e:
396
- logger.error(f"Error in process_model({model_url}): {e}")
397
-
398
- async def crawl(self):
399
- while True:
400
- try:
401
- login(token=self.config.HUGGINGFACE_API_KEY, add_to_git_credential=True)
402
-
403
- model_list_path = hf_hub_download(
404
- repo_id=self.repo_ids['model_list'],
405
- filename=self.config.LIST_FILE
406
- )
407
- shutil.copyfile(model_list_path, f"./{self.config.LIST_FILE}")
408
-
409
- local_file_path = hf_hub_download(
410
- repo_id=self.repo_ids["log"],
411
- filename=self.config.LOG_FILE
412
- )
413
- shutil.copyfile(local_file_path, f"./{self.config.LOG_FILE}")
414
-
415
- with open(self.config.LOG_FILE, "r", encoding="utf-8") as file:
416
- lines = file.read().splitlines()
417
- old_models = json.loads(lines[0]) if len(lines) > 0 else []
418
- self.repo_ids["current"] = lines[1] if len(lines) > 1 else ""
419
-
420
- r = requests.get(self.config.URLS["latest"], headers=self.config.HEADERS)
421
- r.raise_for_status()
422
- latest_models = r.json().get("items", [])
423
- latest_ids = [m["id"] for m in latest_models if "id" in m]
424
-
425
- new_ids = list(set(latest_ids) - set(old_models))
426
- if new_ids:
427
- logger.info(f"New model IDs found: {new_ids}")
428
- mid = new_ids[0]
429
-
430
- for attempt in range(1,6):
431
- try:
432
- self.process_model(f"{self.config.URLS['modelId']}{mid}")
433
- break
434
- except Exception as e:
435
- logger.error(f"Failed model {mid} (attempt {attempt}/5): {e}")
436
- if attempt == 5:
437
- logger.error(f"Skipping model {mid}")
438
- else:
439
- await asyncio.sleep(2)
440
-
441
- old_models.append(mid)
442
- with open(self.config.LOG_FILE, "w", encoding="utf-8") as f:
443
- f.write(json.dumps(old_models)+"\n")
444
- f.write(f"{self.repo_ids['current']}\n")
445
- logger.info(f"Updated log with new model ID: {mid}")
446
-
447
- self.upload_file(
448
- file_path=self.config.LOG_FILE,
449
- repo_id=self.repo_ids["log"],
450
- path_in_repo=self.config.LOG_FILE
451
- )
452
- self.upload_file(
453
- file_path=self.config.LIST_FILE,
454
- repo_id=self.repo_ids["model_list"],
455
- path_in_repo=self.config.LIST_FILE
456
- )
457
- else:
458
- with open(self.config.LOG_FILE, "w", encoding="utf-8") as f:
459
- f.write(json.dumps(latest_ids)+"\n")
460
- f.write(f"{self.repo_ids['current']}\n")
461
- logger.info("No new models found. Updated log.")
462
- self.upload_file(
463
- file_path=self.config.LOG_FILE,
464
- repo_id=self.repo_ids["log"],
465
- path_in_repo=self.config.LOG_FILE
466
- )
467
- logger.info("Uploaded log file.")
468
- await asyncio.sleep(60)
469
- continue
470
- except Exception as e:
471
- logger.error(f"Error in crawl loop: {e}")
472
- await asyncio.sleep(300)
473
-
474
- # FastAPI
475
- config = Config()
476
- crawler = CivitAICrawler(config)
477
- app = crawler.app
 
 
 
 
 
1
  import os
 
 
2
  import subprocess
3
+ import shutil
4
  import uuid
5
+ import base64
 
6
  import requests
 
 
 
 
7
 
8
+ from huggingface_hub import HfApi, login
9
+
10
+ # ログ出力用
11
+ import logging
12
  logging.basicConfig(level=logging.INFO)
13
  logger = logging.getLogger(__name__)
14
 
15
+ # === 環境変数 ===
16
+ HUGGINGFACE_API_KEY = os.environ.get("HUGGINGFACE_API_KEY", "")
17
+ RCLONE_CONF_BASE64 = os.environ.get("RCLONE_CONF_BASE64", "")
18
+ REPO_ID = os.environ.get("REPO_ID", "username/testrepo")
19
+ # あなたのアップロード先リポジトリ (例: "ttttdiva/CivitAI_Auto12")
20
+
21
+ def setup_rclone_conf():
22
+ """RCLONE_CONF_BASE64 から rclone.conf を復元"""
23
+ if not RCLONE_CONF_BASE64:
24
+ logger.warning("[WARN] RCLONE_CONF_BASE64 not set. rclone may fail.")
25
+ return
26
+ conf_dir = ".rclone_config"
27
+ os.makedirs(conf_dir, exist_ok=True)
28
+ conf_path = os.path.join(conf_dir, "rclone.conf")
29
+ with open(conf_path, "wb") as f:
30
+ f.write(base64.b64decode(RCLONE_CONF_BASE64))
31
+ os.environ["RCLONE_CONFIG"] = conf_path
32
+ logger.info(f"[INFO] rclone.conf created => {conf_path}")
33
+
34
+ def download_file(url: str, dest_folder: str, filename: str):
35
+ """URL をダウンロードして dest_folder/filename に保存"""
36
+ os.makedirs(dest_folder, exist_ok=True)
37
+ try:
38
+ r = requests.get(url, stream=True)
39
+ r.raise_for_status()
40
+ filepath = os.path.join(dest_folder, filename)
41
+ with open(filepath, 'wb') as f:
42
+ for chunk in r.iter_content(chunk_size=8192):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  f.write(chunk)
44
+ logger.info(f"[OK] Downloaded: {filepath}")
45
+ return filepath
46
+ except Exception as e:
47
+ logger.error(f"[ERR] download_file failed: {e}")
48
+ return None
49
+
50
+ def encrypt_and_upload_folder(local_folder: str):
51
+ """
52
+ 1) rclone copy local_folder => cryptLocal:
53
+ => /home/user/app/encrypted/<暗号フォルダ>
54
+ 2) その暗号フォルダを Hugging Face へアップロード
55
+ 3) ローカル削除
56
+ """
57
+ if not os.path.isdir(local_folder):
58
+ logger.error(f"[ERR] {local_folder} is not a directory.")
59
+ return
60
+
61
+ encrypted_dir = os.path.join(os.getcwd(), "encrypted")
62
+ os.makedirs(encrypted_dir, exist_ok=True)
63
+
64
+ # 差分検知
65
+ before = set(os.listdir(encrypted_dir))
66
+
67
+ # rclone copy
68
+ try:
69
+ subprocess.run(["rclone", "copy", local_folder, "cryptLocal:", "--create-empty-src-dirs"], check=True)
70
+ logger.info(f"[OK] rclone copy {local_folder} => cryptLocal:")
71
+ except subprocess.CalledProcessError as e:
72
+ logger.error(f"[ERR] rclone copy failed: {e}")
73
+ return
74
+
75
+ after = set(os.listdir(encrypted_dir))
76
+ diff = after - before
77
+ if not diff:
78
+ logger.error("[ERR] No new folder in ./encrypted after rclone copy.")
79
+ return
80
+ if len(diff) > 1:
81
+ logger.warning(f"[WARN] multiple new folders? {diff}")
82
+ enc_folder_name = diff.pop()
83
+ enc_folder_path = os.path.join(encrypted_dir, enc_folder_name)
84
+ logger.info(f"[DEBUG] enc_folder_path => {enc_folder_path}")
85
+
86
+ if not os.path.isdir(enc_folder_path):
87
+ logger.error(f"[ERR] {enc_folder_path} is not a directory.")
88
+ return
89
+
90
+ # Hugging Face にアップロード (フォルダ)
91
+ try:
92
+ api = HfApi()
93
+ # subfolder_label = enc_folder_name # そのまま
94
+ subfolder_label = enc_folder_name # そのまま
95
+
96
+ api.upload_folder(
97
+ folder_path=enc_folder_path,
98
+ repo_id=REPO_ID,
99
+ path_in_repo=subfolder_label
100
+ )
101
+ logger.info(f"[OK] uploaded folder => {enc_folder_path} to {REPO_ID}:{subfolder_label}")
102
+ except Exception as e:
103
+ logger.error(f"[ERR] upload_folder failed: {e}")
104
+
105
+ # cleanup
106
+ shutil.rmtree(local_folder, ignore_errors=True)
107
+ shutil.rmtree(enc_folder_path, ignore_errors=True)
108
+ logger.info(f"[CLEANUP] removed {local_folder} & {enc_folder_path}")
109
+
110
+ def main():
111
+ logger.info("===== Starting minimal test =====")
112
+
113
+ # 1) rclone.conf 復元
114
+ setup_rclone_conf()
115
+
116
+ # 2) Hugging Face login
117
+ if not HUGGINGFACE_API_KEY:
118
+ logger.error("[ERR] HUGGINGFACE_API_KEY not set.")
119
+ return
120
+ login(token=HUGGINGFACE_API_KEY, add_to_git_credential=True)
121
+ logger.info("[OK] HF login success")
122
+
123
+ # 3) ローカルにフォルダ + ファイル
124
+ test_folder = "MyTestModel"
125
+ if os.path.exists(test_folder):
126
+ shutil.rmtree(test_folder)
127
+ os.makedirs(test_folder, exist_ok=True)
128
+
129
+ # ダウンロード例 (画像2つ + info.json 1つ)
130
+ download_file("https://picsum.photos/200/300", test_folder, "image1.jpg")
131
+ download_file("https://picsum.photos/300/300", test_folder, "image2.jpg")
132
+
133
+ info_path = os.path.join(test_folder, "model_info.json")
134
+ with open(info_path, 'w', encoding='utf-8') as f:
135
+ f.write('{"model":"test","desc":"some description"}')
136
+ logger.info(f"[OK] Created {info_path}")
137
+
138
+ # 4) 暗号化してアップロード
139
+ encrypt_and_upload_folder(test_folder)
140
+
141
+ if __name__ == "__main__":
142
+ main()