import os import base64 import time import shutil import logging import uuid import zipfile from flask import Flask, request, render_template, send_file, jsonify from flask_socketio import SocketIO from huggingface_hub import HfApi, hf_hub_download from flask_apscheduler import APScheduler import subprocess # Set up logging logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) app = Flask(__name__) app.config['SECRET_KEY'] = 'secret!' socketio = SocketIO(app) scheduler = APScheduler() scheduler.init_app(app) scheduler.start() # Directory to store temporary files TEMP_DIR = '/tmp/piper_onnx' os.makedirs(TEMP_DIR, exist_ok=True) # Dictionary to store task information tasks = {} def cleanup_old_files(): current_time = time.time() for filename in os.listdir(TEMP_DIR): file_path = os.path.join(TEMP_DIR, filename) if os.path.isfile(file_path): if current_time - os.path.getmtime(file_path) > 30 * 60: # 30 minutes os.remove(file_path) scheduler.add_job(id='cleanup_job', func=cleanup_old_files, trigger="interval", minutes=5) @app.route('/', methods=['GET', 'POST']) def index(): if request.method == 'POST': repo_id = request.form['repo_id'] token = request.form['token'] model_name = request.form['model_name'] task_id = str(uuid.uuid4()) tasks[task_id] = { 'status': 'processing', 'log': [], 'download_url': None } scheduler.add_job( func=process_model, args=[task_id, repo_id, token, model_name], id=task_id ) return jsonify({'task_id': task_id}) return render_template('index.html') @app.route('/status/') def task_status(task_id): task = tasks.get(task_id) if task: return jsonify(task) return jsonify({'error': 'Task not found'}), 404 @app.route('/download//') def download_file(task_id, filename): task = tasks.get(task_id) if task and task['status'] == 'completed': return send_file(task['download_url'], as_attachment=True) return jsonify({'error': 'File not found or task not completed'}), 404 def process_model(task_id, repo_id, token, model_name): try: update_task(task_id, "Starting model processing...") unique_dir = os.path.join(TEMP_DIR, f"{task_id}_{model_name}") os.makedirs(unique_dir, exist_ok=True) update_task(task_id, f"Created unique directory: {unique_dir}") download_model(task_id, repo_id, token, unique_dir) convert_to_onnx(task_id, model_name, unique_dir) compressed_file = compress_files(task_id, model_name, unique_dir) download_url = f"/download/{task_id}/{os.path.basename(compressed_file)}" tasks[task_id]['status'] = 'completed' tasks[task_id]['download_url'] = compressed_file update_task(task_id, f"Processing completed. Download URL: {download_url}") except Exception as e: logger.exception("An error occurred during processing") tasks[task_id]['status'] = 'error' update_task(task_id, f"An error occurred: {str(e)}") def update_task(task_id, message): logger.info(message) tasks[task_id]['log'].append(message) socketio.emit('task_update', {'task_id': task_id, 'message': message}) def download_model(task_id, repo_id, token, directory): update_task(task_id, f"Downloading model from repo: {repo_id}") api = HfApi() files = api.list_repo_files(repo_id=repo_id, token=token) ckpt_files = [f for f in files if f.endswith('.ckpt')] if not ckpt_files: raise Exception("No .ckpt files found in the repository.") latest_ckpt = max(ckpt_files, key=lambda f: int(f.split('-')[0].split('=')[1])) update_task(task_id, f"Latest checkpoint file: {latest_ckpt}") ckpt_path = hf_hub_download(repo_id=repo_id, filename=latest_ckpt, token=token, local_dir=directory) os.rename(ckpt_path, os.path.join(directory, "model.ckpt")) update_task(task_id, f"Downloaded and renamed checkpoint to: {os.path.join(directory, 'model.ckpt')}") config_path = hf_hub_download(repo_id=repo_id, filename="config.json", token=token, local_dir=directory) update_task(task_id, f"Downloaded config.json to: {config_path}") def convert_to_onnx(task_id, model_name, directory): update_task(task_id, f"Converting model to ONNX format: {model_name}") ckpt_path = os.path.join(directory, "model.ckpt") onnx_path = os.path.join(directory, f"{model_name}.onnx") update_task(task_id, f"Checkpoint path: {ckpt_path}") update_task(task_id, f"ONNX output path: {onnx_path}") original_dir = os.getcwd() os.chdir('/home/app/piper/src/python') update_task(task_id, f"Changed working directory to: {os.getcwd()}") command = [ "python3", "-m", "piper_train.export_onnx", ckpt_path, onnx_path ] update_task(task_id, f"Running command: {' '.join(command)}") try: result = subprocess.run(command, check=True, capture_output=True, text=True) update_task(task_id, f"Command output: {result.stdout}") except subprocess.CalledProcessError as e: update_task(task_id, f"Command failed with exit code {e.returncode}") update_task(task_id, f"Error output: {e.stderr}") raise Exception(f"ONNX conversion failed: {e.stderr}") finally: os.chdir(original_dir) update_task(task_id, f"Changed back to original directory: {original_dir}") os.rename( os.path.join(directory, "config.json"), os.path.join(directory, f"{model_name}.onnx.json") ) update_task(task_id, f"Renamed config.json to {model_name}.onnx.json") def compress_files(task_id, model_name, directory): update_task(task_id, f"Compressing files for model: {model_name}") output_file = os.path.join(TEMP_DIR, f"{model_name}_onnx.zip") files_to_zip = [f for f in os.listdir(directory) if f.endswith('.onnx') or f.endswith('.onnx.json')] with zipfile.ZipFile(output_file, 'w') as zipf: for file in files_to_zip: zipf.write(os.path.join(directory, file), file) update_task(task_id, f"Created compressed file: {output_file}") return output_file if __name__ == '__main__': logger.info("Starting Flask application") socketio.run(app, host='0.0.0.0', port=7860, debug=True, allow_unsafe_werkzeug=True)