Spaces:
Runtime error
Runtime error
import os | |
import numpy as np | |
from scipy.spatial.transform import Rotation | |
from collections import deque | |
import sys | |
import argparse | |
# Custom print function to show only progress | |
def log_progress(message): | |
if message.startswith("PROGRESS:"): | |
print(message) | |
def parse_obj(filename): | |
vertices = [] | |
lines = [] | |
try: | |
with open(filename, 'r') as f: | |
for line in f: | |
if line.startswith('v '): | |
parts = line.split() | |
vertices.append([float(parts[1]), float(parts[2]), float(parts[3])]) | |
elif line.startswith('l '): | |
parts = line.split() | |
lines.append([int(parts[1]) - 1, int(parts[2]) - 1]) | |
return np.array(vertices), lines | |
except Exception as e: | |
raise ValueError(f"Error parsing OBJ file {filename}: {str(e)}") | |
def build_hierarchy(lines, root=0): | |
num_joints = max(max(line) for line in lines) + 1 | |
adj = [[] for _ in range(num_joints)] | |
for a, b in lines: | |
adj[a].append(b) | |
adj[b].append(a) | |
parent = [-1] * num_joints | |
queue = deque([root]) | |
visited = [False] * num_joints | |
visited[root] = True | |
while queue: | |
p = queue.popleft() | |
for c in adj[p]: | |
if not visited[c]: | |
parent[c] = p | |
queue.append(c) | |
visited[c] = True | |
if not all(visited): | |
raise ValueError("The skeleton has disconnected components.") | |
children = [[] for _ in range(num_joints)] | |
for c in range(num_joints): | |
if parent[c] != -1: | |
children[parent[c]].append(c) | |
return parent, children | |
def compute_offsets(vertices_ref, parent): | |
num_joints = len(vertices_ref) | |
scale_factor = 0.05 | |
offsets = np.zeros((num_joints, 3)) | |
for j in range(num_joints): | |
if parent[j] != -1: | |
offsets[j] = (vertices_ref[j] - vertices_ref[parent[j]])*scale_factor | |
return offsets | |
def compute_R_world(joint, vertices_ref, vertices_cur, children): | |
if not children[joint]: | |
return np.eye(3) | |
elif len(children[joint]) == 1: | |
c = children[joint][0] | |
V_ref = vertices_ref[c] - vertices_ref[joint] | |
V_cur = vertices_cur[c] - vertices_cur[joint] | |
norm_ref = np.linalg.norm(V_ref) | |
norm_cur = np.linalg.norm(V_cur) | |
if norm_ref < 1e-6 or norm_cur < 1e-6: | |
return np.eye(3) | |
V_ref_norm = V_ref / norm_ref | |
V_cur_norm = V_cur / norm_cur | |
cos_theta = np.clip(np.dot(V_ref_norm, V_cur_norm), -1.0, 1.0) | |
if cos_theta > 0.99999: | |
return np.eye(3) | |
axis = np.cross(V_ref_norm, V_cur_norm) | |
axis_norm = np.linalg.norm(axis) | |
if axis_norm < 1e-6: | |
return np.eye(3) | |
axis = axis / axis_norm | |
angle = np.arccos(cos_theta) | |
return Rotation.from_rotvec(axis * angle).as_matrix() | |
else: | |
A = np.column_stack([vertices_ref[c] - vertices_ref[joint] for c in children[joint]]) | |
B = np.column_stack([vertices_cur[c] - vertices_cur[joint] for c in children[joint]]) | |
M = B @ A.T | |
U, _, Vh = np.linalg.svd(M) | |
R = U @ Vh | |
if np.linalg.det(R) < 0: | |
Vh[-1, :] *= -1 | |
R = U @ Vh | |
return R | |
def calculate_motion_velocity(rotations): | |
n_frames = len(rotations) | |
if n_frames <= 1: | |
return np.zeros(n_frames) | |
velocities = np.zeros(n_frames) | |
for i in range(1, n_frames): | |
prev_quat = rotations[i-1].as_quat() | |
curr_quat = rotations[i].as_quat() | |
if np.dot(prev_quat, curr_quat) < 0: | |
curr_quat = -curr_quat | |
diff = Rotation.from_quat(prev_quat).inv() * Rotation.from_quat(curr_quat) | |
velocities[i] = np.linalg.norm(diff.as_rotvec()) | |
if n_frames > 1: | |
velocities[0] = velocities[1] | |
return velocities | |
def adaptive_smooth_rotations(rotations, window_size=7, velocity_threshold=0.03): | |
n_frames = len(rotations) | |
if n_frames <= 1: | |
return rotations | |
velocities = calculate_motion_velocity(rotations) | |
if np.max(velocities) > 0: | |
velocities = velocities / np.max(velocities) | |
smoothed = [] | |
half_window = window_size // 2 | |
for i in range(n_frames): | |
start_idx = max(0, i - half_window) | |
end_idx = min(n_frames - 1, i + half_window) | |
window_rots = rotations[start_idx:end_idx + 1] | |
velocity_factor = min(1.0, velocities[i] / velocity_threshold) | |
sigma = 0.5 + 1.5 * velocity_factor | |
dist = np.linspace(-1, 1, len(window_rots)) | |
weights = np.exp(-sigma * np.square(dist)) | |
weights = weights / np.sum(weights) | |
quats = [r.as_quat() for r in window_rots] | |
for j in range(1, len(quats)): | |
if np.dot(quats[0], quats[j]) < 0: | |
quats[j] = -quats[j] | |
result_quat = np.zeros(4) | |
for j in range(len(quats)): | |
result_quat += weights[j] * quats[j] | |
result_quat = result_quat / np.linalg.norm(result_quat) | |
smoothed.append(Rotation.from_quat(result_quat)) | |
return smoothed | |
def adaptive_smooth_positions(positions, window_size=7, velocity_threshold=0.03): | |
n_frames = len(positions) | |
if n_frames <= 1: | |
return positions | |
positions = np.array(positions) | |
smoothed = np.zeros_like(positions) | |
half_window = window_size // 2 | |
velocities = np.zeros(n_frames) | |
for i in range(1, n_frames): | |
velocities[i] = np.linalg.norm(positions[i] - positions[i-1]) | |
velocities[0] = velocities[1] | |
if np.max(velocities) > 0: | |
velocities = velocities / np.max(velocities) | |
for i in range(n_frames): | |
start_idx = max(0, i - half_window) | |
end_idx = min(n_frames - 1, i + half_window) | |
window_pos = positions[start_idx:end_idx + 1] | |
velocity_factor = min(1.0, velocities[i] / velocity_threshold) | |
sigma = 0.5 + 1.5 * velocity_factor | |
dist = np.linspace(-1, 1, len(window_pos)) | |
weights = np.exp(-sigma * np.square(dist)) | |
weights = weights / np.sum(weights) | |
smoothed[i] = np.sum(window_pos * weights[:, np.newaxis], axis=0) | |
return smoothed | |
def detect_arm_joints(children, num_joints): | |
return [j for j in range(num_joints) if len(children[j]) == 1] | |
def main(output_dir, smoothing_window=8, velocity_threshold=0.04, joint_constraint=True): | |
folder = os.path.join(output_dir, 'obj_sequence') | |
try: | |
obj_files = sorted([f for f in os.listdir(folder) if f.endswith('.obj')]) | |
except Exception as e: | |
sys.exit(f"Error accessing folder {folder}: {e}") | |
if not obj_files: | |
sys.exit("No OBJ files found.") | |
vertices_ref, lines = parse_obj(os.path.join(folder, obj_files[0])) | |
num_joints = len(vertices_ref) | |
parent, children = build_hierarchy(lines) | |
offsets = compute_offsets(vertices_ref, parent) | |
root = 0 | |
hierarchy_order = [] | |
def dfs(joint): | |
hierarchy_order.append(joint) | |
for child in children[joint]: | |
dfs(child) | |
dfs(root) | |
arm_joints = detect_arm_joints(children, num_joints) | |
all_root_positions = [] | |
all_positions = [[] for _ in range(num_joints)] | |
all_rotations = [[] for _ in range(num_joints)] | |
total_files = len(obj_files) | |
for i in range(total_files): | |
obj_file = obj_files[i] | |
vertices_cur = parse_obj(os.path.join(folder, obj_file))[0] | |
R_world = [compute_R_world(j, vertices_ref, vertices_cur, children) for j in range(num_joints)] | |
R_local = [R_world[j] if parent[j] == -1 else R_world[parent[j]].T @ R_world[j] for j in range(num_joints)] | |
rotations = [Rotation.from_matrix(R) for R in R_local] | |
all_root_positions.append(vertices_cur[root]) | |
for j in range(num_joints): | |
all_positions[j].append(vertices_cur[j]) | |
all_rotations[j].append(rotations[j]) | |
# First half of progress (0-50%) | |
progress = (i / total_files) * 50 | |
log_progress(f"PROGRESS:{progress:.2f}") | |
smoothed_root_positions = adaptive_smooth_positions(all_root_positions, smoothing_window, velocity_threshold) | |
smoothed_positions = [adaptive_smooth_positions(np.array(pos), smoothing_window, velocity_threshold) for pos in all_positions] | |
smoothed_rotations = [adaptive_smooth_rotations(rot, smoothing_window, velocity_threshold) for rot in all_rotations] | |
# Enforce bone lengths (no lengthening restraint) | |
for i in range(total_files): | |
# Start with root position | |
smoothed_positions[root][i] = smoothed_root_positions[i] | |
# Adjust each child joint to maintain bone length | |
for j in range(num_joints): | |
if parent[j] != -1: # Skip root | |
parent_pos = smoothed_positions[parent[j]][i] | |
child_pos = smoothed_positions[j][i] | |
ref_offset = offsets[j] # Reference bone length vector | |
bone_length = np.linalg.norm(ref_offset) | |
if bone_length < 1e-6: | |
continue # Skip if bone length is near zero | |
current_vec = child_pos - parent_pos | |
current_length = np.linalg.norm(current_vec) | |
if current_length < 1e-6: | |
# If current length is near zero, use reference direction | |
smoothed_positions[j][i] = parent_pos + ref_offset | |
else: | |
# Scale the current vector to match reference bone length | |
corrected_vec = (current_vec / current_length) * bone_length | |
smoothed_positions[j][i] = parent_pos + corrected_vec | |
motion_data = [] | |
joints_to_remove = {10, 13, 16, 6, 3} | |
for i in range(total_files): | |
root_pos = smoothed_root_positions[i] | |
if joint_constraint: | |
for j in range(num_joints): | |
euler = smoothed_rotations[j][i].as_euler('ZYX', degrees=True) | |
if j in arm_joints: | |
euler = np.clip(euler, -180, 180) | |
else: | |
euler = np.clip(euler, -150, 150) | |
smoothed_rotations[j][i] = Rotation.from_euler('ZYX', euler, degrees=True) | |
euler_angles = [smoothed_rotations[j][i].as_euler('ZYX', degrees=True) for j in range(num_joints)] | |
motion_line = list(root_pos) + list(euler_angles[root]) | |
for j in hierarchy_order[1:]: | |
motion_line.extend(euler_angles[j]) | |
motion_data.append(motion_line) | |
# Second half of progress (50-100%) | |
progress = 50 + (i / total_files) * 50 | |
log_progress(f"PROGRESS:{progress:.2f}") | |
bvh_dir = os.path.join(output_dir, 'bvh') | |
os.makedirs(bvh_dir, exist_ok=True) | |
bvh_file = os.path.join(bvh_dir, 'output.bvh') | |
with open(bvh_file, 'w') as f: | |
f.write("HIERARCHY\n") | |
def write_hierarchy(joint, parent, f, indent=0): | |
if parent == -1: | |
f.write("ROOT Joint{}\n".format(joint)) | |
else: | |
f.write(" " * indent + "JOINT Joint{}\n".format(joint)) | |
f.write(" " * indent + "{\n") | |
f.write(" " * (indent + 1) + "OFFSET {:.6f} {:.6f} {:.6f}\n".format(*offsets[joint])) | |
if parent == -1: | |
f.write(" " * (indent + 1) + "CHANNELS 6 Xposition Yposition Zposition Zrotation Yrotation Xrotation\n") | |
else: | |
f.write(" " * (indent + 1) + "CHANNELS 3 Zrotation Yrotation Xrotation\n") | |
for child in children[joint]: | |
write_hierarchy(child, joint, f, indent + 1) | |
if not children[joint]: | |
f.write(" " * (indent + 1) + "End Site\n") | |
f.write(" " * (indent + 1) + "{\n") | |
f.write(" " * (indent + 2) + "OFFSET 0.000000 0.000000 0.000000\n") | |
f.write(" " * (indent + 1) + "}\n") | |
f.write(" " * indent + "}\n") | |
write_hierarchy(root, -1, f) | |
f.write("MOTION\n") | |
f.write("Frames: {}\n".format(len(motion_data))) | |
f.write("Frame Time: 0.033333\n") | |
for motion_line in motion_data: | |
f.write(" ".join("{:.6f}".format(x) for x in motion_line) + "\n") | |
if __name__ == "__main__": | |
parser = argparse.ArgumentParser('Convert OBJ sequence to BVH with improved adaptive smoothing.') | |
parser.add_argument('--output-dir', type=str, default='../outputs/', help='Output directory containing obj_sequence') | |
parser.add_argument('--smoothing-window', type=int, default=7, help='Size of smoothing window') | |
parser.add_argument('--velocity-threshold', type=float, default=0.03, help='Velocity threshold for adaptive smoothing') | |
parser.add_argument('--disable-joint-constraints', action='store_false', dest='joint_constraint', | |
help='Disable joint constraints that prevent extreme rotations') | |
args = parser.parse_args() | |
main(args.output_dir, args.smoothing_window, args.velocity_threshold, args.joint_constraint) |