luigi12345 commited on
Commit
7ece4f3
β€’
1 Parent(s): 9bbc5d1

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +630 -0
app.py ADDED
@@ -0,0 +1,630 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ import pandas as pd
4
+ from huggingface_hub import (
5
+ HfApi,
6
+ upload_folder,
7
+ create_repo,
8
+ login,
9
+ hf_hub_download,
10
+ list_repo_files,
11
+ )
12
+ import logging
13
+ import time
14
+ import json
15
+ import keyring # Secure token storage
16
+ import socket # Offline detection
17
+ import hashlib # Data integrity
18
+ from pathlib import Path
19
+ from threading import Thread
20
+ from watchdog.observers import Observer
21
+ from watchdog.events import FileSystemEventHandler
22
+ import schedule
23
+ import datetime
24
+ from streamlit_option_menu import option_menu
25
+
26
+ # Set page configuration
27
+ st.set_page_config(page_title="InfiniteStorageFace", layout="wide")
28
+
29
+ # Initialize logging
30
+ logging.basicConfig(
31
+ level=logging.INFO,
32
+ format="%(asctime)s [%(levelname)s] %(message)s",
33
+ handlers=[logging.StreamHandler()]
34
+ )
35
+
36
+ # Initialize session state variables
37
+ session_defaults = {
38
+ 'logs': [],
39
+ 'uploading': False,
40
+ 'cancel': False,
41
+ 'observer': None,
42
+ 'selected_files': [],
43
+ 'sync_paused': False,
44
+ 'token': "",
45
+ 'settings_loaded': False,
46
+ 'remote_files': {},
47
+ 'queued_files': [],
48
+ 'scheduled_sync': False,
49
+ 'repo_id': "",
50
+ 'repo_exists': False,
51
+ 'folder_path': "",
52
+ 'private': True, # Default to private repositories
53
+ 'ignore_patterns_selected': [],
54
+ 'process_individually': False,
55
+ 'subfolder': "",
56
+ 'total_files_synced': 0,
57
+ 'total_files': 0,
58
+ 'sync_option': 'Sync', # Default option is Sync
59
+ }
60
+ for key, default_value in session_defaults.items():
61
+ if key not in st.session_state:
62
+ st.session_state[key] = default_value
63
+
64
+ # Centralized ignore patterns mapping
65
+ IGNORE_PATTERNS_MAP = {
66
+ "Ignore __pycache__": "**/__pycache__/**",
67
+ "Ignore .git": ".git/**",
68
+ "Ignore .venv": "venv/**",
69
+ "Ignore *.pyc": "*.pyc",
70
+ "Ignore *.log": "*.log",
71
+ "Ignore *.tmp": "*.tmp",
72
+ "Ignore *.DS_Store": "*.DS_Store"
73
+ }
74
+
75
+ # Default values
76
+ DEFAULT_REPO = "your_username/your_private_vault"
77
+ DEFAULT_LOCAL_PATH = str(Path.home())
78
+
79
+ # Helper Functions
80
+ def is_connected():
81
+ """Check for internet connectivity."""
82
+ try:
83
+ socket.create_connection(("1.1.1.1", 53), timeout=3)
84
+ return True
85
+ except OSError:
86
+ return False
87
+
88
+ def log(message, level="INFO"):
89
+ """Log messages with timestamp."""
90
+ timestamp = time.strftime("[%Y-%m-%d %H:%M:%S]")
91
+ full_message = f"{timestamp} {message}"
92
+ st.session_state.logs.append(full_message)
93
+ if level == "INFO":
94
+ logging.info(message)
95
+ elif level == "WARNING":
96
+ logging.warning(message)
97
+ elif level == "ERROR":
98
+ logging.error(message)
99
+ elif level == "DEBUG":
100
+ logging.debug(message)
101
+
102
+ def authenticate(token):
103
+ """Authenticate user with Hugging Face token."""
104
+ if not token:
105
+ log("❌ Hugging Face Token is required.", level="ERROR")
106
+ return False
107
+ try:
108
+ login(token)
109
+ keyring.set_password("huggingface", "token", token)
110
+ log("βœ… Authenticated successfully!")
111
+ return True
112
+ except Exception as e:
113
+ log(f"❌ Authentication failed: {e}", level="ERROR")
114
+ return False
115
+
116
+ def create_repo_if_not_exists(repo_id, token, private):
117
+ """Create a repository if it doesn't exist."""
118
+ api = HfApi()
119
+ try:
120
+ api.list_repo_files(repo_id=repo_id, repo_type="dataset", token=token)
121
+ log(f"βœ… Repository '{repo_id}' exists.")
122
+ st.session_state.repo_exists = True
123
+ return True
124
+ except Exception:
125
+ log(f"⚠️ Repository '{repo_id}' does not exist. Creating it...", level="WARNING")
126
+ try:
127
+ create_repo(
128
+ repo_id=repo_id,
129
+ token=token,
130
+ private=private,
131
+ repo_type="dataset",
132
+ exist_ok=True,
133
+ )
134
+ log(f"βœ… Created new repository: '{repo_id}'.")
135
+ st.session_state.repo_exists = True
136
+ return True
137
+ except Exception as create_err:
138
+ log(f"❌ Failed to create repository '{repo_id}': {create_err}", level="ERROR")
139
+ return False
140
+
141
+ def compute_checksum(file_path):
142
+ """Compute the checksum of a file for data integrity."""
143
+ sha256 = hashlib.sha256()
144
+ try:
145
+ with open(file_path, "rb") as f:
146
+ for chunk in iter(lambda: f.read(4096), b""):
147
+ sha256.update(chunk)
148
+ return sha256.hexdigest()
149
+ except Exception as e:
150
+ log(f"❌ Failed to compute checksum for '{file_path}': {e}")
151
+ return None
152
+
153
+ def upload_folder_structure(folder_path, repo_id, token, target_path, ignore_patterns, retry=3):
154
+ """Upload a folder structure with error handling and retries."""
155
+ upload_params = {
156
+ "folder_path": folder_path,
157
+ "repo_id": repo_id,
158
+ "repo_type": "dataset",
159
+ "token": token,
160
+ "path_in_repo": target_path,
161
+ "ignore_patterns": ignore_patterns,
162
+ }
163
+ log(f"πŸš€ Uploading folder '{folder_path}' to '{target_path}' in repository '{repo_id}'...")
164
+ for attempt in range(1, retry + 1):
165
+ try:
166
+ upload_folder(**upload_params)
167
+ log(f"βœ… Upload completed for '{folder_path}'!")
168
+ return True
169
+ except Exception as upload_err:
170
+ log(f"❌ Upload failed for '{folder_path}' on attempt {attempt}: {upload_err}", level="ERROR")
171
+ if attempt < retry:
172
+ log(f"πŸ”„ Retrying upload ({attempt}/{retry})...", level="WARNING")
173
+ time.sleep(2 ** attempt) # Exponential backoff
174
+ else:
175
+ log(f"❌ All retry attempts failed for '{folder_path}'.", level="ERROR")
176
+ return False
177
+
178
+ # Function to handle uploads
179
+ def upload_files():
180
+ st.session_state.uploading = True
181
+ token = st.session_state.token
182
+ repo_id = st.session_state.repo_id
183
+ private = st.session_state.private
184
+ folder_path = st.session_state.folder_path
185
+ subfolder = st.session_state.subfolder
186
+ process_individually = st.session_state.process_individually
187
+ ignore_patterns_selected = st.session_state.ignore_patterns_selected
188
+ ignore_patterns = [IGNORE_PATTERNS_MAP[pattern] for pattern in ignore_patterns_selected]
189
+ target_path = subfolder.replace("\\", "/") if subfolder else ""
190
+
191
+ if not is_connected():
192
+ log("❌ No internet connection. Sync will resume when connection is restored.", level="ERROR")
193
+ st.session_state.uploading = False
194
+ return
195
+
196
+ if not authenticate(token):
197
+ st.session_state.uploading = False
198
+ return
199
+
200
+ if not create_repo_if_not_exists(repo_id, token, private):
201
+ st.session_state.uploading = False
202
+ return
203
+
204
+ if not os.path.isdir(folder_path):
205
+ log(f"❌ The folder path '{folder_path}' does not exist.", level="ERROR")
206
+ st.session_state.uploading = False
207
+ return
208
+
209
+ # Count the total files in folders
210
+ st.session_state.total_files = sum(len(files) for _, _, files in os.walk(folder_path))
211
+ st.session_state.total_files_synced = 0
212
+
213
+ # Upload only folders (no individual files except in root)
214
+ for item in os.listdir(folder_path):
215
+ item_path = os.path.join(folder_path, item)
216
+ if os.path.isdir(item_path):
217
+ # Upload each folder
218
+ success = upload_folder_structure(item_path, repo_id, token, f"{target_path}/{item}", ignore_patterns)
219
+ if success:
220
+ for root, _, files in os.walk(item_path):
221
+ for file in files:
222
+ local_file = os.path.join(root, file)
223
+ relative_path = os.path.relpath(local_file, folder_path).replace("\\", "/")
224
+ remote_file = f"{target_path}/{relative_path}"
225
+ st.session_state.total_files_synced += 1
226
+ local_checksum = compute_checksum(local_file)
227
+ remote_checksum = get_remote_file_checksum(repo_id, token, remote_file)
228
+ if local_checksum and remote_checksum and local_checksum == remote_checksum:
229
+ log(f"βœ… Data integrity verified for '{relative_path}'.")
230
+ else:
231
+ log(f"❌ Data integrity verification failed for '{relative_path}'.", level="ERROR")
232
+ elif os.path.isfile(item_path):
233
+ # Upload files only if they are in the root directory
234
+ relative_path = os.path.relpath(item_path, folder_path).replace("\\", "/")
235
+ if "/" not in relative_path: # Check if the file is in the root
236
+ success = upload_folder_structure(item_path, repo_id, token, f"{target_path}/{item}", ignore_patterns)
237
+ if success:
238
+ st.session_state.total_files_synced += 1
239
+ local_checksum = compute_checksum(item_path)
240
+ remote_checksum = get_remote_file_checksum(repo_id, token, f"{target_path}/{item}")
241
+ if local_checksum and remote_checksum and local_checksum == remote_checksum:
242
+ log(f"βœ… Data integrity verified for '{relative_path}'.")
243
+ else:
244
+ log(f"❌ Data integrity verification failed for '{relative_path}'.", level="ERROR")
245
+
246
+ st.session_state.uploading = False
247
+ log("πŸš€ Upload process completed.")
248
+
249
+ def get_remote_file_checksum(repo_id, token, file_path):
250
+ # Placeholder: Hugging Face Hub does not provide file checksums directly
251
+ return None
252
+
253
+ # Function to monitor folder changes with real-time sync and offline queueing
254
+ class ChangeHandler(FileSystemEventHandler):
255
+ def on_modified(self, event):
256
+ if not st.session_state.uploading and not st.session_state.sync_paused:
257
+ if is_connected():
258
+ log("πŸ”„ Changes detected. Starting upload...")
259
+ upload_thread = Thread(target=upload_files)
260
+ upload_thread.start()
261
+ else:
262
+ log("❌ No internet connection. Queuing changes for later upload.", level="WARNING")
263
+ queue_changes(event.src_path)
264
+
265
+ # Queue changes when offline
266
+ def queue_changes(file_path):
267
+ queued_files = st.session_state.get("queued_files", [])
268
+ queued_files.append(file_path)
269
+ st.session_state["queued_files"] = queued_files
270
+ log(f"πŸ•’ Queued file for upload: {file_path}")
271
+
272
+ # Check and upload queued changes when back online
273
+ def check_queued_uploads():
274
+ if is_connected() and st.session_state.get("queued_files"):
275
+ log("πŸ”„ Uploading queued files...")
276
+ for file in st.session_state["queued_files"]:
277
+ upload_files_specific(file)
278
+ st.session_state["queued_files"] = []
279
+
280
+ # Upload a specific file (for queued uploads)
281
+ def upload_files_specific(file_path):
282
+ token = st.session_state.token
283
+ repo_id = st.session_state.repo_id
284
+ private = st.session_state.private
285
+ folder_path = st.session_state.folder_path
286
+ subfolder = st.session_state.subfolder
287
+ ignore_patterns_selected = st.session_state.ignore_patterns_selected
288
+ ignore_patterns = [IGNORE_PATTERNS_MAP[pattern] for pattern in ignore_patterns_selected]
289
+ target_path = subfolder.replace("\\", "/") if subfolder else ""
290
+
291
+ if not authenticate(token):
292
+ return
293
+
294
+ if not create_repo_if_not_exists(repo_id, token, private):
295
+ return
296
+
297
+ if not os.path.isfile(file_path):
298
+ log(f"❌ The file path '{file_path}' does not exist.", level="ERROR")
299
+ return
300
+
301
+ relative_path = os.path.relpath(file_path, folder_path).replace("\\", "/")
302
+ remote_path = f"{target_path}/{relative_path}"
303
+
304
+ success = upload_folder_structure(file_path, repo_id, token, remote_path, ignore_patterns)
305
+ if success:
306
+ log(f"βœ… Uploaded queued file '{relative_path}'.")
307
+ else:
308
+ log(f"❌ Failed to upload queued file '{relative_path}'.", level="ERROR")
309
+
310
+ # Function to get version history
311
+ def get_version_history():
312
+ api = HfApi()
313
+ token = st.session_state.token
314
+ repo_id = st.session_state.repo_id
315
+ try:
316
+ commits = api.list_repo_commits(repo_id=repo_id, repo_type="dataset", token=token)
317
+ history = []
318
+ for commit in commits:
319
+ date_str = commit.created_at.strftime('%Y-%m-%d %H:%M:%S')
320
+ history.append(f"Commit {commit.commit_id[:7]} by {commit.author_name} on {date_str}: {commit.title}")
321
+ return "\n".join(history)
322
+ except Exception as e:
323
+ log(f"❌ Error fetching version history: {e}", level="ERROR")
324
+ return "Error fetching version history."
325
+
326
+ # Function to download from remote
327
+ def download_from_remote():
328
+ token = st.session_state.token
329
+ repo_id = st.session_state.repo_id
330
+ folder_path = st.session_state.folder_path
331
+ subfolder = st.session_state.subfolder
332
+ target_path = subfolder.replace("\\", "/") if subfolder else ""
333
+
334
+ api = HfApi()
335
+ try:
336
+ remote_files = api.list_repo_files(repo_id=repo_id, repo_type="dataset", token=token)
337
+ for file in remote_files:
338
+ local_file_path = os.path.join(folder_path, file)
339
+ os.makedirs(os.path.dirname(local_file_path), exist_ok=True)
340
+ hf_hub_download(repo_id, file, repo_type="dataset", token=token, local_dir=os.path.dirname(local_file_path), local_dir_use_symlinks=False)
341
+ log(f"βœ… Downloaded '{file}' to '{local_file_path}'")
342
+ except Exception as e:
343
+ log(f"❌ Error downloading from remote: {e}", level="ERROR")
344
+
345
+ def pause_sync():
346
+ st.session_state.sync_paused = True
347
+ log("⏸️ Sync paused.", level="WARNING")
348
+
349
+ def resume_sync():
350
+ st.session_state.sync_paused = False
351
+ log("▢️ Sync resumed.", level="INFO")
352
+ upload_thread = Thread(target=upload_files)
353
+ upload_thread.start()
354
+
355
+ def save_settings():
356
+ settings = {
357
+ "repo_id": st.session_state.repo_id,
358
+ "private": st.session_state.private,
359
+ "folder_path": st.session_state.folder_path,
360
+ "subfolder": st.session_state.subfolder,
361
+ "process_individually": st.session_state.process_individually,
362
+ "ignore_patterns_selected": st.session_state.ignore_patterns_selected,
363
+ "selected_files": st.session_state.selected_files
364
+ }
365
+ with open("settings.json", "w") as f:
366
+ json.dump(settings, f)
367
+ log("πŸ’Ύ Settings saved.", level="INFO")
368
+
369
+ def load_settings():
370
+ if os.path.exists("settings.json"):
371
+ with open("settings.json", "r") as f:
372
+ settings = json.load(f)
373
+ st.session_state.repo_id = settings.get("repo_id", DEFAULT_REPO)
374
+ st.session_state.private = settings.get("private", True)
375
+ st.session_state.folder_path = settings.get("folder_path", DEFAULT_LOCAL_PATH)
376
+ st.session_state.subfolder = settings.get("subfolder", "")
377
+ st.session_state.process_individually = settings.get("process_individually", False)
378
+ st.session_state.ignore_patterns_selected = settings.get("ignore_patterns_selected", [])
379
+ st.session_state.selected_files = settings.get("selected_files", [])
380
+ log("πŸ”„ Settings loaded.", level="INFO")
381
+ else:
382
+ log("❌ No saved settings found.", level="ERROR")
383
+
384
+ def get_local_files(folder_path):
385
+ files = []
386
+ for root, dirs, filenames in os.walk(folder_path):
387
+ for filename in filenames:
388
+ relative_path = os.path.relpath(os.path.join(root, filename), folder_path)
389
+ files.append(relative_path.replace("\\", "/"))
390
+ return files
391
+
392
+ def schedule_sync():
393
+ def scheduled_upload():
394
+ if is_connected() and not st.session_state.uploading and not st.session_state.sync_paused:
395
+ log("⏰ Scheduled sync triggered.", level="INFO")
396
+ upload_files()
397
+
398
+ schedule.every().day.at("02:00").do(scheduled_upload)
399
+ st.session_state.scheduled_sync = True
400
+ log("⏰ Scheduled daily sync at 02:00.", level="INFO")
401
+
402
+ def run_scheduler():
403
+ while True:
404
+ schedule.run_pending()
405
+ time.sleep(1)
406
+
407
+ # --------------------------- Main Interface ---------------------------
408
+
409
+ st.title("πŸš€ InfiniteStorageFace")
410
+ st.write("Effortlessly sync your local folders to your private Hugging Face repository!")
411
+
412
+ # Create tabs for navigation
413
+ tabs = st.tabs(["Home", "Vault", "Settings", "Logs", "Help"])
414
+
415
+ with tabs[0]:
416
+ st.header("Welcome to InfiniteStorageFace")
417
+ st.write("Use the tabs to navigate through the application.")
418
+
419
+ st.subheader("Vault Overview")
420
+ st.write(f"**Repository ID:** {st.session_state.repo_id or 'Not Set'}")
421
+ st.write(f"**Private Repository:** {'Yes' if st.session_state.private else 'No'}")
422
+ st.write(f"**Total Files Synced:** {st.session_state.total_files_synced}")
423
+ st.write(f"**Total Files in Folder:** {st.session_state.total_files}")
424
+
425
+ # Display repository contents
426
+ st.subheader("Repository Contents")
427
+ if st.session_state.repo_exists:
428
+ repo_files = list_repo_files(repo_id=st.session_state.repo_id, token=st.session_state.token)
429
+ if repo_files:
430
+ for file in repo_files:
431
+ st.write(f"πŸ“„ {file}")
432
+ else:
433
+ st.write("Repository is empty.")
434
+ else:
435
+ st.write("Repository not found or not authenticated.")
436
+
437
+ with tabs[1]:
438
+ st.header("Vault Sync and Upload")
439
+
440
+ # Select Sync or Upload
441
+ st.session_state.sync_option = st.radio("Choose an option:", ["Sync", "Upload"], index=0)
442
+
443
+ # Folder selection using file browser
444
+ st.subheader("Select Folder to Sync/Upload")
445
+ st.session_state.folder_path = st.text_input("Folder Path", value=st.session_state.folder_path or DEFAULT_LOCAL_PATH)
446
+ # Alternatively, use a file browser component
447
+ # st.session_state.folder_path = file_browser()
448
+
449
+ if os.path.isdir(st.session_state.folder_path):
450
+ # Display folders only
451
+ folders = [f for f in os.listdir(st.session_state.folder_path) if os.path.isdir(os.path.join(st.session_state.folder_path, f))]
452
+ st.session_state.selected_files = st.multiselect(
453
+ "Select Folders to Sync/Upload (leave empty to include all):",
454
+ folders,
455
+ default=st.session_state.get('selected_files', []),
456
+ help="Select specific folders to include."
457
+ )
458
+ else:
459
+ st.error("❌ Invalid folder path.")
460
+
461
+ # Sync Controls
462
+ col_start, col_stop = st.columns(2)
463
+ with col_start:
464
+ if st.session_state.sync_option == 'Sync':
465
+ start_sync = st.button("Start Sync", key="start_sync")
466
+ else:
467
+ start_upload = st.button("Start Upload", key="start_upload")
468
+ with col_stop:
469
+ stop_sync = st.button("Stop", key="stop_sync")
470
+
471
+ # Handle buttons
472
+ if st.session_state.sync_option == 'Sync':
473
+ if start_sync:
474
+ st.session_state.cancel = False
475
+ if not st.session_state.observer:
476
+ event_handler = ChangeHandler()
477
+ st.session_state.observer = Observer()
478
+ st.session_state.observer.schedule(event_handler, st.session_state.folder_path, recursive=True)
479
+ st.session_state.observer.start()
480
+ log("πŸ‘€ Started monitoring for changes.", level="INFO")
481
+ log("πŸ”„ Sync started.", level="INFO")
482
+ upload_thread = Thread(target=upload_files)
483
+ upload_thread.start()
484
+
485
+ if stop_sync:
486
+ st.session_state.cancel = True
487
+ if st.session_state.observer:
488
+ st.session_state.observer.stop()
489
+ st.session_state.observer.join()
490
+ st.session_state.observer = None
491
+ log("πŸ›‘ Sync stopped.", level="INFO")
492
+ else:
493
+ if start_upload:
494
+ st.session_state.cancel = False
495
+ log("πŸ”„ Upload started.", level="INFO")
496
+ upload_thread = Thread(target=upload_files)
497
+ upload_thread.start()
498
+
499
+ if stop_sync:
500
+ st.session_state.cancel = True
501
+ log("πŸ›‘ Upload stopped.", level="INFO")
502
+
503
+ # Display sync status and statistics
504
+ st.subheader("Status")
505
+ if st.session_state.uploading:
506
+ st.info("πŸš€ Uploading...")
507
+ elif st.session_state.sync_paused:
508
+ st.warning("⏸️ Sync Paused.")
509
+ else:
510
+ st.success("βœ… Idle.")
511
+
512
+ st.write(f"**Total Files Synced:** {st.session_state.total_files_synced}")
513
+ st.write(f"**Total Files in Folder:** {st.session_state.total_files}")
514
+
515
+ with tabs[2]:
516
+ st.header("Settings")
517
+
518
+ # Securely retrieve token
519
+ if not st.session_state.token:
520
+ stored_token = keyring.get_password("huggingface", "token")
521
+ if stored_token:
522
+ st.session_state.token = stored_token
523
+
524
+ st.session_state.token = st.text_input(
525
+ "Hugging Face Token",
526
+ type="password",
527
+ value=st.session_state.token,
528
+ help="Enter your Hugging Face API token. It will be securely stored."
529
+ )
530
+
531
+ st.session_state.repo_id = st.text_input(
532
+ "Vault ID (Repository ID)",
533
+ value=st.session_state.get('repo_id', DEFAULT_REPO),
534
+ help="Format: username/repo-name"
535
+ )
536
+
537
+ st.session_state.private = st.checkbox(
538
+ "Make Vault Private",
539
+ value=st.session_state.get('private', True),
540
+ help="Private vaults are not publicly accessible."
541
+ )
542
+
543
+ st.session_state.subfolder = st.text_input(
544
+ "Subfolder in Vault (Optional)",
545
+ value=st.session_state.get('subfolder', ""),
546
+ help="Specify a subdirectory within the vault."
547
+ )
548
+
549
+ st.session_state.process_individually = st.checkbox(
550
+ "Process First-Level Folders Individually",
551
+ value=st.session_state.get('process_individually', False),
552
+ help="Upload each first-level folder individually."
553
+ )
554
+
555
+ st.session_state.ignore_patterns_selected = st.multiselect(
556
+ "Select Patterns to Ignore",
557
+ options=list(IGNORE_PATTERNS_MAP.keys()),
558
+ default=st.session_state.get('ignore_patterns_selected', ["Ignore __pycache__", "Ignore .git", "Ignore *.pyc"]),
559
+ help="Select file patterns to exclude."
560
+ )
561
+
562
+ save_settings_button = st.button("Save Settings", key="save_settings")
563
+ load_settings_button = st.button("Load Settings", key="load_settings")
564
+
565
+ if save_settings_button:
566
+ save_settings()
567
+
568
+ if load_settings_button:
569
+ load_settings()
570
+
571
+ with tabs[3]:
572
+ st.header("Logs")
573
+ # Integrated terminal-like logs (using hypothetical package)
574
+ # st_terminal(st.session_state.logs)
575
+ logs_text = "\n".join(st.session_state.logs[-100:])
576
+ st.text_area("Logs", value=logs_text, height=300)
577
+
578
+ with tabs[4]:
579
+ st.header("Help and Documentation")
580
+ st.markdown("""
581
+ ### InfiniteStorageFace Documentation
582
+
583
+ **Getting Started:**
584
+
585
+ - **Vault ID**: This is your repository ID in the format `username/repo-name`. Treat this as your personal storage vault.
586
+ - **Hugging Face Token**: Obtain your API token from your [Hugging Face account settings](https://huggingface.co/settings/tokens).
587
+ - **Folder Selection**: Use the file browser or enter the path to the folder you want to sync or upload.
588
+
589
+ **Sync vs Upload:**
590
+
591
+ - **Sync**: Continuously monitors the selected folder for changes and syncs them to your vault.
592
+ - **Upload**: Performs a one-time upload of the selected folder or files to your vault.
593
+
594
+ **Settings:**
595
+
596
+ - **Private Vault**: By default, your vault is private. Only you can access it.
597
+ - **Ignore Patterns**: Select file patterns that you want to exclude from syncing or uploading.
598
+
599
+ **Logs and Status:**
600
+
601
+ - View real-time logs in the **Logs** tab.
602
+ - Check the sync status and statistics in the **Vault** tab.
603
+
604
+ **Support:**
605
+
606
+ - For any issues or questions, please refer to the official documentation or contact support.
607
+
608
+ """)
609
+
610
+ # Check queued uploads
611
+ check_queued_uploads()
612
+
613
+ # Cleanup on exit
614
+ def cleanup():
615
+ """Cleanup observers and threads on exit."""
616
+ if st.session_state.observer is not None:
617
+ st.session_state.observer.stop()
618
+ st.session_state.observer.join()
619
+
620
+ # Run scheduled sync if enabled
621
+ if st.session_state.scheduled_sync:
622
+ schedule_sync()
623
+ if 'scheduler_thread' not in st.session_state:
624
+ scheduler_thread = Thread(target=run_scheduler, daemon=True)
625
+ scheduler_thread.start()
626
+ st.session_state['scheduler_thread'] = scheduler_thread
627
+ log("πŸ•’ Scheduler started.", level="INFO")
628
+
629
+ # Handle session end
630
+ # st.on_session_end(cleanup)