Spaces:
Runtime error
Runtime error
import streamlit as st | |
import subprocess | |
import sys | |
import os | |
import time | |
import shutil | |
import tempfile | |
import threading | |
import queue | |
from datetime import datetime | |
from pathlib import Path | |
def run_command(command, working_dir, progress_bar, progress_text, step_start_progress, step_weight, show_progress=True): | |
try: | |
env = os.environ.copy() | |
env["PYTHONUNBUFFERED"] = "1" | |
process = subprocess.Popen( | |
command, | |
cwd=working_dir, | |
stdout=subprocess.PIPE, | |
stderr=subprocess.PIPE, | |
text=True, | |
bufsize=1, | |
universal_newlines=True, | |
env=env | |
) | |
stdout_queue = queue.Queue() | |
stderr_queue = queue.Queue() | |
def read_output(pipe, q, source): | |
for line in iter(pipe.readline, ''): | |
q.put((source, line)) | |
pipe.close() | |
stdout_thread = threading.Thread(target=read_output, args=(process.stdout, stdout_queue, 'stdout')) | |
stderr_thread = threading.Thread(target=read_output, args=(process.stderr, stderr_queue, 'stderr')) | |
stdout_thread.daemon = True | |
stderr_thread.daemon = True | |
stdout_thread.start() | |
stderr_thread.start() | |
total_progress = step_start_progress | |
stderr_lines = [] | |
while process.poll() is None or not (stdout_queue.empty() and stderr_queue.empty()): | |
try: | |
source, line = next((q.get_nowait() for q in [stdout_queue, stderr_queue] if not q.empty()), (None, None)) | |
if line: | |
if source == 'stdout': | |
if show_progress and line.startswith("PROGRESS:"): | |
try: | |
progress_str = line.strip().split("PROGRESS:")[1].replace("%", "") # Remove '%' | |
progress = float(progress_str) # Convert to float after removing '%' # Debug output | |
if Path(command[1]).name == 'gen_skes.py': | |
if progress <= 100.0: # 2D Keypoint generation (0-100% maps to 0-60%) | |
adjusted_progress = step_start_progress + (progress / 100.0 * 0.6) | |
else: # 3D Pose generation (100-200% maps to 60-80%) | |
adjusted_progress = step_start_progress + 0.6 + ((progress - 100.0) / 100.0 * 0.2) | |
total_progress = min(adjusted_progress, step_start_progress + step_weight) | |
else: # For conver_bvh.py or others with 0-100% progress | |
adjusted_progress = step_start_progress + (progress / 100.0 * step_weight) | |
total_progress = min(adjusted_progress, step_start_progress + step_weight) | |
progress_bar.progress(total_progress) | |
progress_text.text(f"Progress: {int(total_progress * 100)}%") | |
except ValueError as e: | |
print(f"DEBUG: Error parsing progress: {e}") | |
pass | |
elif source == 'stderr': | |
stderr_lines.append(line.strip()) | |
except queue.Empty: | |
time.sleep(0.01) | |
stdout_thread.join() | |
stderr_thread.join() | |
if process.returncode != 0: | |
stderr_output = '\n'.join(stderr_lines) | |
st.error(f"Error in {Path(command[1]).name}:\n{stderr_output}") | |
return False | |
if show_progress: | |
progress_bar.progress(step_start_progress + step_weight) | |
progress_text.text(f"Progress: {int((step_start_progress + step_weight) * 100)}%") | |
return True | |
except Exception as e: | |
st.error(f"Exception in {Path(command[1]).name}: {str(e)}") | |
return False | |
def cleanup_output_folder(output_dir, delay=1800): | |
time.sleep(delay) | |
if os.path.exists(output_dir): | |
shutil.rmtree(output_dir, ignore_errors=True) | |
print(f"Deleted temporary output folder after timeout: {output_dir}") | |
def process_video(video_file): | |
base_dir = Path(__file__).parent.resolve() | |
gen_skes_path = base_dir / "VideoToNPZ" / "gen_skes.py" | |
convert_obj_path = base_dir / "convertNPZtoBVH" / "conver_obj.py" | |
convert_bvh_path = base_dir / "convertNPZtoBVH" / "conver_bvh.py" | |
for script_path in [gen_skes_path, convert_obj_path, convert_bvh_path]: | |
if not script_path.exists(): | |
st.error(f"Required script not found: {script_path}") | |
return None | |
with tempfile.TemporaryDirectory() as tmp_dir: | |
video_path = Path(tmp_dir) / "input_video.mp4" | |
with open(video_path, "wb") as f: | |
f.write(video_file.read()) | |
if not video_path.exists(): | |
st.error(f"Video file not found at: {video_path}") | |
return None | |
timestamp = datetime.now().strftime("%Y%m%d%H%M%S") | |
output_dir = base_dir / f"outputs_{timestamp}" | |
output_dir.mkdir(exist_ok=True) | |
if not os.access(output_dir, os.W_OK): | |
st.error(f"Cannot write to output directory: {output_dir}") | |
return None | |
default_output_dir = base_dir / "outputs" | |
pipeline_steps = [ | |
{"command": [sys.executable, str(gen_skes_path), "-v", str(video_path)], "working_dir": gen_skes_path.parent, "weight": 0.8, "show_progress": True}, | |
{"command": [sys.executable, str(convert_obj_path), "--output-dir", str(output_dir)], "working_dir": convert_obj_path.parent, "weight": 0.0, "show_progress": False}, | |
{"command": [sys.executable, str(convert_bvh_path), "--output-dir", str(output_dir)], "working_dir": convert_bvh_path.parent, "weight": 0.2, "show_progress": True} | |
] | |
progress_bar = st.progress(0.0) | |
progress_text = st.empty() | |
total_progress = 0.0 | |
for i, step in enumerate(pipeline_steps): | |
success = run_command( | |
step["command"], | |
step["working_dir"], | |
progress_bar, | |
progress_text, | |
total_progress, | |
step["weight"], | |
show_progress=step["show_progress"] | |
) | |
if not success: | |
st.error(f"Failed at step: {' '.join(map(str, step['command']))}") | |
if default_output_dir.exists(): | |
shutil.rmtree(default_output_dir, ignore_errors=True) | |
return None | |
if i == 0 and default_output_dir.exists(): | |
npz_dir = default_output_dir / "npz" | |
if npz_dir.exists(): | |
target_npz_dir = output_dir / "npz" | |
shutil.move(str(npz_dir), str(target_npz_dir)) | |
if default_output_dir.exists(): | |
shutil.rmtree(default_output_dir, ignore_errors=True) | |
total_progress += step["weight"] | |
if step["show_progress"]: | |
progress_bar.progress(min(total_progress, 1.0)) | |
progress_text.text(f"Progress: {int(total_progress * 100)}%") | |
bvh_output_dir = output_dir / "bvh" | |
bvh_file = bvh_output_dir / "output.bvh" | |
if bvh_file.exists(): | |
cleanup_thread = threading.Thread(target=cleanup_output_folder, args=(output_dir,)) | |
cleanup_thread.daemon = True | |
cleanup_thread.start() | |
return { | |
'bvh_file': bvh_file, | |
'output_dir': output_dir | |
} | |
else: | |
st.error(f"Failed to generate BVH file at: {bvh_file}") | |
if default_output_dir.exists(): | |
shutil.rmtree(default_output_dir, ignore_errors=True) | |
return None | |
def cleanup_immediate(output_dir): | |
if output_dir and os.path.exists(output_dir): | |
shutil.rmtree(output_dir, ignore_errors=True) | |
st.success("Output folder cleaned up successfully.") | |
else: | |
st.warning("No output folder to clean up.") | |
def main(): | |
st.set_page_config( | |
page_title="Motion Capture Studio | Video to BVH Converter", | |
page_icon="π¬", | |
layout="wide", | |
initial_sidebar_state="collapsed" | |
) | |
st.markdown(""" | |
<style> | |
:root { | |
--bg-color: #1a1a1a; | |
--card-bg: #252525; | |
--primary-color: #bb86fc; | |
--secondary-color: #03dac6; | |
--error-color: #cf6679; | |
--text-color: #e0e0e0; | |
--text-secondary: #a0a0a0; | |
--border-color: #404040; | |
--shadow-color: rgba(0, 0, 0, 0.5); | |
} | |
.stApp { | |
background-color: var(--bg-color); | |
font-family: 'Arial', sans-serif; | |
} | |
h1, h2, h3, h4, h5, h6, p, li, div { | |
color: var(--text-color) !important; | |
} | |
.card { | |
background-color: var(--card-bg); | |
border-radius: 20px; | |
padding: 2rem; | |
margin: 1rem auto; | |
border: 1px solid var(--border-color); | |
box-shadow: 0 8px 30px var(--shadow-color); | |
max-width: 1200px; | |
} | |
.main-title { | |
font-size: 3.5rem; | |
font-weight: 900; | |
background: linear-gradient(135deg, var(--primary-color), var(--secondary-color)); | |
-webkit-background-clip: text; | |
-webkit-text-fill-color: transparent; | |
text-align: center; | |
margin: 1.5rem 0 0.5rem; | |
text-shadow: 0 2px 10px rgba(187, 134, 252, 0.3); | |
} | |
.subtitle { | |
font-size: 1.3rem; | |
color: var(--text-secondary); | |
text-align: center; | |
margin-bottom: 2.5rem; | |
font-weight: 300; | |
letter-spacing: 0.5px; | |
} | |
.section-title { | |
font-size: 1.5rem; | |
font-weight: 700; | |
color: var(--primary-color) !important; | |
margin-bottom: 1.2rem; | |
text-transform: uppercase; | |
letter-spacing: 1px; | |
} | |
.stButton > button { | |
background: linear-gradient(135deg, var(--primary-color), #9b59f5); | |
color: #fff !important; | |
border-radius: 12px; | |
padding: 0.8rem 2.5rem; | |
font-weight: 600; | |
font-size: 1.2rem; | |
border: none; | |
transition: all 0.3s ease; | |
width: 100%; | |
box-shadow: 0 4px 15px rgba(187, 134, 252, 0.3); | |
} | |
.stButton > button:hover { | |
transform: translateY(-3px); | |
box-shadow: 0 6px 20px rgba(187, 134, 252, 0.5); | |
background: linear-gradient(135deg, #9b59f5, var(--primary-color)); | |
} | |
.stDownloadButton > button { | |
background: linear-gradient(135deg, var(--secondary-color), #02b3a3); | |
color: #fff !important; | |
border-radius: 12px; | |
padding: 0.8rem 2.5rem; | |
font-weight: 600; | |
font-size: 1.2rem; | |
border: none; | |
transition: all 0.3s ease; | |
width: 100%; | |
box-shadow: 0 4px 15px rgba(3, 218, 198, 0.3); | |
} | |
.stDownloadButton > button:hover { | |
transform: translateY(-3px); | |
box-shadow: 0 6px 20px rgba(3, 218, 198, 0.5); | |
background: linear-gradient(135deg, #02b3a3, var(--secondary-color)); | |
} | |
.upload-container { | |
border: 2px dashed var(--border-color); | |
border-radius: 15px; | |
padding: 2.5rem; | |
text-align: center; | |
background-color: rgba(255, 255, 255, 0.05); | |
transition: all 0.3s ease; | |
} | |
.upload-container:hover { | |
border-color: var(--primary-color); | |
box-shadow: 0 0 20px rgba(187, 134, 252, 0.2); | |
} | |
.video-container { | |
border-radius: 15px; | |
overflow: hidden; | |
box-shadow: 0 6px 25px var(--shadow-color); | |
margin: 1rem 0; | |
border: 1px solid var(--border-color); | |
background-color: #000; | |
} | |
.status-indicator { | |
padding: 1rem; | |
border-radius: 12px; | |
margin: 0.8rem 0; | |
display: flex; | |
align-items: center; | |
gap: 0.5rem; | |
font-size: 1.1rem; | |
} | |
.status-indicator.info { | |
background-color: rgba(187, 134, 252, 0.1); | |
border-left: 5px solid var(--primary-color); | |
} | |
.status-indicator.success { | |
background-color: rgba(3, 218, 198, 0.1); | |
border-left: 5px solid var(--secondary-color); | |
} | |
.control-section { | |
text-align: center; | |
margin: 2rem 0; | |
padding: 1rem; | |
background-color: rgba(255, 255, 255, 0.03); | |
border-radius: 12px; | |
border: 1px solid var(--border-color); | |
} | |
.stProgress { | |
width: 100%; | |
height: 12px; | |
border-radius: 6px; | |
background-color: #333; | |
margin: 1rem 0; | |
} | |
.stProgress > div > div { | |
background: linear-gradient(135deg, var(--primary-color), var(--secondary-color)); | |
border-radius: 6px; | |
} | |
.progress-text { | |
font-weight: 600; | |
padding: 0.5rem 1rem; | |
background: linear-gradient(135deg, var(--primary-color), var(--secondary-color)); | |
color: #fff !important; | |
border-radius: 20px; | |
box-shadow: 0 2px 10px rgba(187, 134, 252, 0.3); | |
display: inline-block; | |
} | |
.separator { | |
height: 2px; | |
background: linear-gradient(90deg, transparent, var(--border-color), transparent); | |
margin: 2.5rem 0; | |
} | |
.footer { | |
text-align: center; | |
padding: 2.5rem 0 1.5rem; | |
font-size: 0.95rem; | |
color: var(--text-secondary); | |
border-top: 1px solid var(--border-color); | |
letter-spacing: 0.5px; | |
} | |
.pipeline-step { | |
display: flex; | |
align-items: center; | |
background-color: rgba(255, 255, 255, 0.05); | |
padding: 1rem; | |
border-radius: 12px; | |
margin-bottom: 1rem; | |
border: 1px solid var(--border-color); | |
transition: all 0.3s ease; | |
} | |
.pipeline-step:hover { | |
background-color: rgba(255, 255, 255, 0.08); | |
border-color: var(--primary-color); | |
box-shadow: 0 4px 15px rgba(187, 134, 252, 0.1); | |
} | |
.step-number { | |
background: linear-gradient(135deg, var(--primary-color), var(--secondary-color)); | |
color: #fff; | |
border-radius: 50%; | |
width: 40px; | |
height: 40px; | |
display: flex; | |
align-items: center; | |
justify-content: center; | |
margin-right: 1.2rem; | |
font-weight: 600; | |
font-size: 1.2rem; | |
box-shadow: 0 2px 10px rgba(187, 134, 252, 0.3); | |
} | |
.step-title { | |
font-weight: 600; | |
font-size: 1.1rem; | |
margin-bottom: 0.3rem; | |
} | |
.step-description { | |
color: var(--text-secondary); | |
font-size: 0.95rem; | |
} | |
@keyframes pulse { | |
0% { transform: scale(1); } | |
50% { transform: scale(1.03); } | |
100% { transform: scale(1); } | |
} | |
.animate-pulse { | |
animation: pulse 2s infinite; | |
} | |
</style> | |
""", unsafe_allow_html=True) | |
# Header | |
st.markdown('<h1 class="main-title animate-pulse">Motion Capture Studio</h1>', unsafe_allow_html=True) | |
st.markdown('<p class="subtitle">Transform your videos into professional BVH motion files with cutting-edge AI</p>', unsafe_allow_html=True) | |
# Main content | |
st.markdown('<div class="card">', unsafe_allow_html=True) | |
col1, col2 = st.columns([1, 1], gap="medium") | |
with col1: | |
st.markdown('<h3 class="section-title">Upload Your Video</h3>', unsafe_allow_html=True) | |
st.markdown('<div class="upload-container">', unsafe_allow_html=True) | |
uploaded_file = st.file_uploader( | |
"Drop your video here or click to browse", | |
type=['mp4', 'avi', 'mov'], | |
help="For best results, use clear full-body motion videos with good lighting", | |
key="file_uploader" | |
) | |
if uploaded_file: | |
st.session_state['uploaded_file'] = uploaded_file | |
st.markdown('</div>', unsafe_allow_html=True) | |
if not st.session_state.get('uploaded_file'): | |
st.markdown(""" | |
<div class="status-indicator info"> | |
π‘ <strong>Pro Tip:</strong> Use MP4, AVI, or MOV files with clear, well-lit full-body motion | |
</div> | |
""", unsafe_allow_html=True) | |
with col2: | |
st.markdown('<h3 class="section-title">Video Preview</h3>', unsafe_allow_html=True) | |
if uploaded_file := st.session_state.get('uploaded_file', None): | |
st.markdown('<div class="video-container">', unsafe_allow_html=True) | |
st.video(uploaded_file) | |
st.markdown('</div>', unsafe_allow_html=True) | |
else: | |
st.markdown(""" | |
<div style="height: 250px; display: flex; align-items: center; justify-content: center; | |
border: 2px dashed var(--border-color); border-radius: 15px; background-color: rgba(255, 255, 255, 0.05);"> | |
<span style="color: var(--text-secondary); font-size: 1.2rem;">Your video preview will appear here</span> | |
</div> | |
""", unsafe_allow_html=True) | |
# Control Section (Button and Progress Bar) | |
if st.session_state.get('uploaded_file'): | |
st.markdown('<div class="control-section">', unsafe_allow_html=True) | |
if st.button("β‘ Start Motion Capture", key="convert_btn"): | |
with st.spinner("Processing your video..."): | |
progress_bar = st.progress(0.0) | |
progress_text = st.empty() | |
st.markdown('<div class="status-indicator info">π Analyzing motion patterns...</div>', unsafe_allow_html=True) | |
result = process_video(st.session_state['uploaded_file']) | |
if result: | |
st.markdown('<div class="status-indicator success">β Motion capture complete!</div>', unsafe_allow_html=True) | |
with open(result['bvh_file'], "rb") as f: | |
st.download_button( | |
label="π₯ Download BVH File", | |
data=f, | |
file_name="motion_capture.bvh", | |
mime="application/octet-stream", | |
on_click=cleanup_immediate, | |
args=(result['output_dir'],), | |
key="download_btn" | |
) | |
st.markdown('</div>', unsafe_allow_html=True) | |
# Pipeline Info | |
st.markdown('<div class="separator"></div>', unsafe_allow_html=True) | |
st.markdown('<h3 class="section-title">Processing Pipeline</h3>', unsafe_allow_html=True) | |
st.markdown(""" | |
<div class="pipeline-step"> | |
<div class="step-number">1</div> | |
<div> | |
<div class="step-title">Pose Estimation</div> | |
<div class="step-description">AI detects and tracks human movements frame-by-frame</div> | |
</div> | |
</div> | |
<div class="pipeline-step"> | |
<div class="step-number">2</div> | |
<div> | |
<div class="step-title">3D Conversion</div> | |
<div class="step-description">Converts 2D poses into 3D spatial data</div> | |
</div> | |
</div> | |
<div class="pipeline-step"> | |
<div class="step-number">3</div> | |
<div> | |
<div class="step-title">BVH Generation</div> | |
<div class="step-description">Formats motion data into industry-standard BVH files</div> | |
</div> | |
</div> | |
""", unsafe_allow_html=True) | |
st.markdown('</div>', unsafe_allow_html=True) | |
# Footer | |
st.markdown(""" | |
<div class="footer"> | |
Β© 2025 Motion Capture Studio | Powered by Streamlit & Advanced AI Technology | |
</div> | |
""", unsafe_allow_html=True) | |
if __name__ == "__main__": | |
main() |