Spaces:
Running
Running
Upload app.py with huggingface_hub
Browse files
app.py
ADDED
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import base64
|
3 |
+
import time
|
4 |
+
import shutil
|
5 |
+
import logging
|
6 |
+
import uuid
|
7 |
+
import zipfile
|
8 |
+
from flask import Flask, request, render_template, send_file, jsonify
|
9 |
+
from flask_socketio import SocketIO
|
10 |
+
from huggingface_hub import HfApi, hf_hub_download
|
11 |
+
from flask_apscheduler import APScheduler
|
12 |
+
import subprocess
|
13 |
+
|
14 |
+
# Set up logging
|
15 |
+
logging.basicConfig(level=logging.DEBUG)
|
16 |
+
logger = logging.getLogger(__name__)
|
17 |
+
|
18 |
+
app = Flask(__name__)
|
19 |
+
app.config['SECRET_KEY'] = 'secret!'
|
20 |
+
socketio = SocketIO(app)
|
21 |
+
scheduler = APScheduler()
|
22 |
+
scheduler.init_app(app)
|
23 |
+
scheduler.start()
|
24 |
+
|
25 |
+
# Directory to store temporary files
|
26 |
+
TEMP_DIR = '/tmp/piper_onnx'
|
27 |
+
os.makedirs(TEMP_DIR, exist_ok=True)
|
28 |
+
|
29 |
+
# Dictionary to store task information
|
30 |
+
tasks = {}
|
31 |
+
|
32 |
+
def cleanup_old_files():
|
33 |
+
current_time = time.time()
|
34 |
+
for filename in os.listdir(TEMP_DIR):
|
35 |
+
file_path = os.path.join(TEMP_DIR, filename)
|
36 |
+
if os.path.isfile(file_path):
|
37 |
+
if current_time - os.path.getmtime(file_path) > 30 * 60: # 30 minutes
|
38 |
+
os.remove(file_path)
|
39 |
+
|
40 |
+
scheduler.add_job(id='cleanup_job', func=cleanup_old_files, trigger="interval", minutes=5)
|
41 |
+
|
42 |
+
@app.route('/', methods=['GET', 'POST'])
|
43 |
+
def index():
|
44 |
+
if request.method == 'POST':
|
45 |
+
repo_id = request.form['repo_id']
|
46 |
+
token = request.form['token']
|
47 |
+
model_name = request.form['model_name']
|
48 |
+
|
49 |
+
task_id = str(uuid.uuid4())
|
50 |
+
tasks[task_id] = {
|
51 |
+
'status': 'processing',
|
52 |
+
'log': [],
|
53 |
+
'download_url': None
|
54 |
+
}
|
55 |
+
|
56 |
+
scheduler.add_job(
|
57 |
+
func=process_model,
|
58 |
+
args=[task_id, repo_id, token, model_name],
|
59 |
+
id=task_id
|
60 |
+
)
|
61 |
+
|
62 |
+
return jsonify({'task_id': task_id})
|
63 |
+
|
64 |
+
return render_template('index.html')
|
65 |
+
|
66 |
+
@app.route('/status/<task_id>')
|
67 |
+
def task_status(task_id):
|
68 |
+
task = tasks.get(task_id)
|
69 |
+
if task:
|
70 |
+
return jsonify(task)
|
71 |
+
return jsonify({'error': 'Task not found'}), 404
|
72 |
+
|
73 |
+
@app.route('/download/<task_id>/<filename>')
|
74 |
+
def download_file(task_id, filename):
|
75 |
+
task = tasks.get(task_id)
|
76 |
+
if task and task['status'] == 'completed':
|
77 |
+
return send_file(task['download_url'], as_attachment=True)
|
78 |
+
return jsonify({'error': 'File not found or task not completed'}), 404
|
79 |
+
|
80 |
+
def process_model(task_id, repo_id, token, model_name):
|
81 |
+
try:
|
82 |
+
update_task(task_id, "Starting model processing...")
|
83 |
+
|
84 |
+
unique_dir = os.path.join(TEMP_DIR, f"{task_id}_{model_name}")
|
85 |
+
os.makedirs(unique_dir, exist_ok=True)
|
86 |
+
update_task(task_id, f"Created unique directory: {unique_dir}")
|
87 |
+
|
88 |
+
download_model(task_id, repo_id, token, unique_dir)
|
89 |
+
convert_to_onnx(task_id, model_name, unique_dir)
|
90 |
+
compressed_file = compress_files(task_id, model_name, unique_dir)
|
91 |
+
|
92 |
+
download_url = f"/download/{task_id}/{os.path.basename(compressed_file)}"
|
93 |
+
tasks[task_id]['status'] = 'completed'
|
94 |
+
tasks[task_id]['download_url'] = compressed_file
|
95 |
+
update_task(task_id, f"Processing completed. Download URL: {download_url}")
|
96 |
+
|
97 |
+
except Exception as e:
|
98 |
+
logger.exception("An error occurred during processing")
|
99 |
+
tasks[task_id]['status'] = 'error'
|
100 |
+
update_task(task_id, f"An error occurred: {str(e)}")
|
101 |
+
|
102 |
+
def update_task(task_id, message):
|
103 |
+
logger.info(message)
|
104 |
+
tasks[task_id]['log'].append(message)
|
105 |
+
socketio.emit('task_update', {'task_id': task_id, 'message': message})
|
106 |
+
|
107 |
+
def download_model(task_id, repo_id, token, directory):
|
108 |
+
update_task(task_id, f"Downloading model from repo: {repo_id}")
|
109 |
+
api = HfApi()
|
110 |
+
files = api.list_repo_files(repo_id=repo_id, token=token)
|
111 |
+
|
112 |
+
ckpt_files = [f for f in files if f.endswith('.ckpt')]
|
113 |
+
if not ckpt_files:
|
114 |
+
raise Exception("No .ckpt files found in the repository.")
|
115 |
+
|
116 |
+
latest_ckpt = max(ckpt_files, key=lambda f: int(f.split('-')[0].split('=')[1]))
|
117 |
+
update_task(task_id, f"Latest checkpoint file: {latest_ckpt}")
|
118 |
+
|
119 |
+
ckpt_path = hf_hub_download(repo_id=repo_id, filename=latest_ckpt, token=token, local_dir=directory)
|
120 |
+
os.rename(ckpt_path, os.path.join(directory, "model.ckpt"))
|
121 |
+
update_task(task_id, f"Downloaded and renamed checkpoint to: {os.path.join(directory, 'model.ckpt')}")
|
122 |
+
|
123 |
+
config_path = hf_hub_download(repo_id=repo_id, filename="config.json", token=token, local_dir=directory)
|
124 |
+
update_task(task_id, f"Downloaded config.json to: {config_path}")
|
125 |
+
|
126 |
+
def convert_to_onnx(task_id, model_name, directory):
|
127 |
+
update_task(task_id, f"Converting model to ONNX format: {model_name}")
|
128 |
+
ckpt_path = os.path.join(directory, "model.ckpt")
|
129 |
+
onnx_path = os.path.join(directory, f"{model_name}.onnx")
|
130 |
+
|
131 |
+
update_task(task_id, f"Checkpoint path: {ckpt_path}")
|
132 |
+
update_task(task_id, f"ONNX output path: {onnx_path}")
|
133 |
+
|
134 |
+
original_dir = os.getcwd()
|
135 |
+
os.chdir('/root/piper/src/python')
|
136 |
+
update_task(task_id, f"Changed working directory to: {os.getcwd()}")
|
137 |
+
|
138 |
+
command = [
|
139 |
+
"python3", "-m", "piper_train.export_onnx",
|
140 |
+
ckpt_path,
|
141 |
+
onnx_path
|
142 |
+
]
|
143 |
+
update_task(task_id, f"Running command: {' '.join(command)}")
|
144 |
+
|
145 |
+
try:
|
146 |
+
result = subprocess.run(command, check=True, capture_output=True, text=True)
|
147 |
+
update_task(task_id, f"Command output: {result.stdout}")
|
148 |
+
except subprocess.CalledProcessError as e:
|
149 |
+
update_task(task_id, f"Command failed with exit code {e.returncode}")
|
150 |
+
update_task(task_id, f"Error output: {e.stderr}")
|
151 |
+
raise Exception(f"ONNX conversion failed: {e.stderr}")
|
152 |
+
finally:
|
153 |
+
os.chdir(original_dir)
|
154 |
+
update_task(task_id, f"Changed back to original directory: {original_dir}")
|
155 |
+
|
156 |
+
os.rename(
|
157 |
+
os.path.join(directory, "config.json"),
|
158 |
+
os.path.join(directory, f"{model_name}.onnx.json")
|
159 |
+
)
|
160 |
+
update_task(task_id, f"Renamed config.json to {model_name}.onnx.json")
|
161 |
+
|
162 |
+
def compress_files(task_id, model_name, directory):
|
163 |
+
update_task(task_id, f"Compressing files for model: {model_name}")
|
164 |
+
output_file = os.path.join(TEMP_DIR, f"{model_name}_onnx.zip")
|
165 |
+
files_to_zip = [f for f in os.listdir(directory) if f.endswith('.onnx') or f.endswith('.onnx.json')]
|
166 |
+
with zipfile.ZipFile(output_file, 'w') as zipf:
|
167 |
+
for file in files_to_zip:
|
168 |
+
zipf.write(os.path.join(directory, file), file)
|
169 |
+
update_task(task_id, f"Created compressed file: {output_file}")
|
170 |
+
return output_file
|
171 |
+
|
172 |
+
if __name__ == '__main__':
|
173 |
+
logger.info("Starting Flask application")
|
174 |
+
socketio.run(app, host='0.0.0.0', port=7860, debug=True, allow_unsafe_werkzeug=True)
|