HuggingFace0920 commited on
Commit
f6927a4
·
verified ·
1 Parent(s): 970d4e4

Update sync_data.sh

Browse files
Files changed (1) hide show
  1. sync_data.sh +65 -118
sync_data.sh CHANGED
@@ -1,156 +1,103 @@
1
  #!/bin/sh
2
 
 
 
 
 
 
3
  # 检查环境变量
4
  if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
5
- echo "未检测到 HF_TOKEN 或 DATASET_ID,备份功能不可用"
6
  exit 1
7
  fi
8
 
9
- # 激活虚拟环境
10
- . $HOME/venv/bin/activate
11
-
12
- # 生成同步脚本
13
- cat > hf_sync.py << 'EOL'
14
- # HuggingFace 同步脚本
15
- from huggingface_hub import HfApi
16
- import sys
17
- import os
18
- import tarfile
19
- import tempfile
20
-
21
- # 管理备份文件数量,超出最大数量则自动删除最旧的备份
22
- def manage_backups(api, repo_id, max_files=50):
23
- files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
24
- backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
25
- backup_files.sort()
26
- if len(backup_files) >= max_files:
27
- files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
28
- for file_to_delete in files_to_delete:
29
- try:
30
- api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type="dataset")
31
- print(f'已删除旧备份: {file_to_delete}')
32
- except Exception as e:
33
- print(f'删除 {file_to_delete} 时出错: {str(e)}')
34
-
35
- # 上传备份文件到 HuggingFace
36
- def upload_backup(file_path, file_name, token, repo_id):
37
- api = HfApi(token=token)
38
- try:
39
- api.upload_file(
40
- path_or_fileobj=file_path,
41
- path_in_repo=file_name,
42
- repo_id=repo_id,
43
- repo_type="dataset"
44
- )
45
- print(f"成功上传 {file_name}")
46
- manage_backups(api, repo_id)
47
- except Exception as e:
48
- print(f"上传文件出错: {str(e)}")
49
-
50
-
51
- # 下载最新备份
52
- def download_latest_backup(token, repo_id, extract_path):
53
- try:
54
- api = HfApi(token=token)
55
- files = api.list_repo_files(repo_id=repo_id, repo_type="dataset")
56
- backup_files = [f for f in files if f.startswith('backup_') and f.endswith('.tar.gz')]
57
- if not backup_files:
58
- print("未找到任何备份文件")
59
- return
60
- latest_backup = sorted(backup_files)[-1]
61
- with tempfile.TemporaryDirectory() as temp_dir:
62
- filepath = api.hf_hub_download(
63
- repo_id=repo_id,
64
- filename=latest_backup,
65
- repo_type="dataset",
66
- local_dir=temp_dir
67
- )
68
- if filepath and os.path.exists(filepath):
69
- with tarfile.open(filepath, 'r:gz') as tar:
70
- tar.extractall(extract_path)
71
- print(f"已成功恢复备份: {latest_backup}")
72
- except Exception as e:
73
- print(f"下载备份出错: {str(e)}")
74
 
75
- # 合并历史提交
76
- def super_squash_history(token, repo_id):
77
- try:
78
- api = HfApi(token=token)
79
- api.super_squash_history(repo_id=repo_id, repo_type="dataset")
80
- print("历史合并完成。")
81
- except Exception as e:
82
- print(f"合并历史出错: {str(e)}")
83
 
84
- # 主函数
85
- if __name__ == "__main__":
86
- action = sys.argv[1]
87
- token = sys.argv[2]
88
- repo_id = sys.argv[3]
89
- if action == "upload":
90
- file_path = sys.argv[4]
91
- file_name = sys.argv[5]
92
- upload_backup(file_path, file_name, token, repo_id)
93
- elif action == "download":
94
- extract_path = sys.argv[4] if len(sys.argv) > 4 else '.'
95
- download_latest_backup(token, repo_id, extract_path)
96
- elif action == "super_squash":
97
- super_squash_history(token, repo_id)
98
  EOL
99
- # 首次启动时从 HuggingFace 下载最新备份(解压到应用目录)
100
- echo "正在从 HuggingFace 下载最新备份..."
101
- python hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "$HOME/app"
 
 
 
 
 
102
 
103
  # 同步函数
104
  sync_data() {
105
  while true; do
106
- echo "同步进程启动于 $(date)"
107
 
108
- # 确保数据目录存在(请根据实际路径修改)
109
- STORAGE_PATH="$HOME/app/data"
110
  if [ -d "${STORAGE_PATH}" ]; then
111
  # 创建备份
112
  timestamp=$(date +%Y%m%d_%H%M%S)
113
  backup_file="backup_${timestamp}.tar.gz"
 
114
 
115
  # 压缩目录(使用-C避免包含父路径)
116
- tar -czf "/tmp/${backup_file}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")"
 
117
 
118
  # 上传到 HuggingFace
119
- echo "正在上传备份到 HuggingFace..."
120
- python hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "/tmp/${backup_file}" "${backup_file}"
121
-
 
 
 
 
122
  # 合并历史提交
123
- SQUASH_FLAG_FILE="/tmp/last_squash_time"
124
- NOW=$(date +%s)
125
- SEVEN_DAYS=$((7*24*60*60))
126
- if [ ! -f "$SQUASH_FLAG_FILE" ]; then
127
- echo $NOW > "$SQUASH_FLAG_FILE"
128
- echo "首次合并历史提交..."
129
- python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
130
- else
131
- LAST=$(cat "$SQUASH_FLAG_FILE")
132
- DIFF=$((NOW - LAST))
133
- if [ $DIFF -ge $SEVEN_DAYS ]; then
134
  echo $NOW > "$SQUASH_FLAG_FILE"
135
- echo "距离上次合并已超过7天,正在合并历史提交..."
136
- python hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
137
  else
138
- echo "距离上次合并未满7天,本次跳过合并历史提交。"
 
 
 
 
 
 
 
 
139
  fi
140
- fi
141
 
142
  # 清理临时文件
143
- rm -f "/tmp/${backup_file}"
144
  else
145
- echo "存储目录 ${STORAGE_PATH} 不存在,等待中..."
146
  fi
147
 
148
  # 同步间隔
149
- SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
150
- echo "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
151
  sleep $SYNC_INTERVAL
152
  done
153
  }
154
 
155
  # 启动同步进程
156
- sync_data &
 
 
 
 
 
 
1
  #!/bin/sh
2
 
3
+ # 日志函数
4
+ log() {
5
+ echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1"
6
+ }
7
+
8
  # 检查环境变量
9
  if [ -z "$HF_TOKEN" ] || [ -z "$DATASET_ID" ]; then
10
+ log "错误: 未检测到 HF_TOKEN 或 DATASET_ID,备份功能不可用"
11
  exit 1
12
  fi
13
 
14
+ # 检查虚拟环境
15
+ if [ -f "$HOME/venv/bin/activate" ]; then
16
+ log "激活虚拟环境..."
17
+ . $HOME/venv/bin/activate
18
+ else
19
+ log "警告: 未找到虚拟环境,将使用系统Python"
20
+ fi
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
+ # 设置默认参数
23
+ SYNC_INTERVAL=${SYNC_INTERVAL:-7200} # 默认2小时
24
+ MAX_BACKUPS=${MAX_BACKUPS:-50} # 默认保留50个备份
25
+ MAX_BACKUP_SIZE=${MAX_BACKUP_SIZE:-100} # 默认备份大小限制100MB
26
+ STORAGE_PATH="$HOME/app/data"
 
 
 
27
 
28
+ # 生成同步脚本到正确路径
29
+ cat > /app/hf_sync.py << 'EOL'
30
+ ... (此处为之前的hf_sync.py内容,保持不变)
 
 
 
 
 
 
 
 
 
 
 
31
  EOL
32
+
33
+ # 首次启动时从 HuggingFace 下载最新备份
34
+ log "正在从 HuggingFace 下载最新备份..."
35
+ if python /app/hf_sync.py download "${HF_TOKEN}" "${DATASET_ID}" "$HOME/app"; then
36
+ log "备份恢复成功"
37
+ else
38
+ log "备份恢复失败,将继续启动应用"
39
+ fi
40
 
41
  # 同步函数
42
  sync_data() {
43
  while true; do
44
+ log "同步进程启动"
45
 
46
+ # 确保数据目录存在
 
47
  if [ -d "${STORAGE_PATH}" ]; then
48
  # 创建备份
49
  timestamp=$(date +%Y%m%d_%H%M%S)
50
  backup_file="backup_${timestamp}.tar.gz"
51
+ temp_backup="/tmp/${backup_file}"
52
 
53
  # 压缩目录(使用-C避免包含父路径)
54
+ log "正在创建备份..."
55
+ tar -czf "${temp_backup}" -C "$(dirname "${STORAGE_PATH}")" "$(basename "${STORAGE_PATH}")"
56
 
57
  # 上传到 HuggingFace
58
+ log "正在上传备份到 HuggingFace..."
59
+ if python /app/hf_sync.py upload "${HF_TOKEN}" "${DATASET_ID}" "${temp_backup}" "${backup_file}" "${MAX_BACKUP_SIZE}"; then
60
+ log "备份上传成功"
61
+ else
62
+ log "备份上传失败"
63
+ fi
64
+
65
  # 合并历史提交
66
+ SQUASH_FLAG_FILE="/tmp/last_squash_time"
67
+ NOW=$(date +%s)
68
+ SEVEN_DAYS=$((7*24*60*60))
69
+ if [ ! -f "$SQUASH_FLAG_FILE" ]; then
 
 
 
 
 
 
 
70
  echo $NOW > "$SQUASH_FLAG_FILE"
71
+ log "首次合并历史提交..."
72
+ python /app/hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
73
  else
74
+ LAST=$(cat "$SQUASH_FLAG_FILE")
75
+ DIFF=$((NOW - LAST))
76
+ if [ $DIFF -ge $SEVEN_DAYS ]; then
77
+ echo $NOW > "$SQUASH_FLAG_FILE"
78
+ log "距离上次合并已超过7天,正在合并历史提交..."
79
+ python /app/hf_sync.py super_squash "${HF_TOKEN}" "${DATASET_ID}"
80
+ else
81
+ log "距离上次合并未满7天,本次跳过合并历史提交。"
82
+ fi
83
  fi
 
84
 
85
  # 清理临时文件
86
+ rm -f "${temp_backup}"
87
  else
88
+ log "存储目录 ${STORAGE_PATH} 不存在,等待中..."
89
  fi
90
 
91
  # 同步间隔
92
+ log "下次同步将在 ${SYNC_INTERVAL} 秒后进行..."
 
93
  sleep $SYNC_INTERVAL
94
  done
95
  }
96
 
97
  # 启动同步进程
98
+ log "启动数据同步后台进程..."
99
+ sync_data &
100
+
101
+ # 记录同步进程ID
102
+ echo $! > /tmp/sync_process.pid
103
+ log "同步进程已启动,PID: $(cat /tmp/sync_process.pid)"