File size: 6,227 Bytes
c617fd9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 |
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from flow3d.transforms import cont_6d_to_rmat
class GaussianParams(nn.Module):
def __init__(
self,
means: torch.Tensor,
quats: torch.Tensor,
scales: torch.Tensor,
colors: torch.Tensor,
opacities: torch.Tensor,
motion_coefs: torch.Tensor | None = None,
scene_center: torch.Tensor | None = None,
scene_scale: torch.Tensor | float = 1.0,
):
super().__init__()
if not check_gaussian_sizes(
means, quats, scales, colors, opacities, motion_coefs
):
import ipdb
ipdb.set_trace()
params_dict = {
"means": nn.Parameter(means),
"quats": nn.Parameter(quats),
"scales": nn.Parameter(scales),
"colors": nn.Parameter(colors),
"opacities": nn.Parameter(opacities),
}
if motion_coefs is not None:
params_dict["motion_coefs"] = nn.Parameter(motion_coefs)
self.params = nn.ParameterDict(params_dict)
self.quat_activation = lambda x: F.normalize(x, dim=-1, p=2)
self.color_activation = torch.sigmoid
self.scale_activation = torch.exp
self.opacity_activation = torch.sigmoid
self.motion_coef_activation = lambda x: F.softmax(x, dim=-1)
if scene_center is None:
scene_center = torch.zeros(3, device=means.device)
self.register_buffer("scene_center", scene_center)
self.register_buffer("scene_scale", torch.as_tensor(scene_scale))
@staticmethod
def init_from_state_dict(state_dict, prefix="params."):
req_keys = ["means", "quats", "scales", "colors", "opacities"]
assert all(f"{prefix}{k}" in state_dict for k in req_keys)
args = {
"motion_coefs": None,
"scene_center": torch.zeros(3),
"scene_scale": torch.tensor(1.0),
}
for k in req_keys + list(args.keys()):
if f"{prefix}{k}" in state_dict:
args[k] = state_dict[f"{prefix}{k}"]
return GaussianParams(**args)
@property
def num_gaussians(self) -> int:
return self.params["means"].shape[0]
def get_colors(self) -> torch.Tensor:
return self.color_activation(self.params["colors"])
def get_scales(self) -> torch.Tensor:
return self.scale_activation(self.params["scales"])
def get_opacities(self) -> torch.Tensor:
return self.opacity_activation(self.params["opacities"])
def get_quats(self) -> torch.Tensor:
return self.quat_activation(self.params["quats"])
def get_coefs(self) -> torch.Tensor:
assert "motion_coefs" in self.params
return self.motion_coef_activation(self.params["motion_coefs"])
def densify_params(self, should_split, should_dup):
"""
densify gaussians
"""
updated_params = {}
for name, x in self.params.items():
x_dup = x[should_dup]
x_split = x[should_split].repeat([2] + [1] * (x.ndim - 1))
if name == "scales":
x_split -= math.log(1.6)
x_new = nn.Parameter(torch.cat([x[~should_split], x_dup, x_split], dim=0))
updated_params[name] = x_new
self.params[name] = x_new
return updated_params
def cull_params(self, should_cull):
"""
cull gaussians
"""
updated_params = {}
for name, x in self.params.items():
x_new = nn.Parameter(x[~should_cull])
updated_params[name] = x_new
self.params[name] = x_new
return updated_params
def reset_opacities(self, new_val):
"""
reset all opacities to new_val
"""
self.params["opacities"].data.fill_(new_val)
updated_params = {"opacities": self.params["opacities"]}
return updated_params
class MotionBases(nn.Module):
def __init__(self, rots, transls):
super().__init__()
self.num_frames = rots.shape[1]
self.num_bases = rots.shape[0]
assert check_bases_sizes(rots, transls)
self.params = nn.ParameterDict(
{
"rots": nn.Parameter(rots),
"transls": nn.Parameter(transls),
}
)
@staticmethod
def init_from_state_dict(state_dict, prefix="params."):
param_keys = ["rots", "transls"]
assert all(f"{prefix}{k}" in state_dict for k in param_keys)
args = {k: state_dict[f"{prefix}{k}"] for k in param_keys}
return MotionBases(**args)
def compute_transforms(self, ts: torch.Tensor, coefs: torch.Tensor) -> torch.Tensor:
"""
:param ts (B)
:param coefs (G, K)
returns transforms (G, B, 3, 4)
"""
transls = self.params["transls"][:, ts] # (K, B, 3)
rots = self.params["rots"][:, ts] # (K, B, 6)
transls = torch.einsum("pk,kni->pni", coefs, transls)
rots = torch.einsum("pk,kni->pni", coefs, rots) # (G, B, 6)
rotmats = cont_6d_to_rmat(rots) # (K, B, 3, 3)
return torch.cat([rotmats, transls[..., None]], dim=-1)
def check_gaussian_sizes(
means: torch.Tensor,
quats: torch.Tensor,
scales: torch.Tensor,
colors: torch.Tensor,
opacities: torch.Tensor,
motion_coefs: torch.Tensor | None = None,
) -> bool:
dims = means.shape[:-1]
leading_dims_match = (
quats.shape[:-1] == dims
and scales.shape[:-1] == dims
and colors.shape[:-1] == dims
and opacities.shape == dims
)
if motion_coefs is not None and motion_coefs.numel() > 0:
leading_dims_match &= motion_coefs.shape[:-1] == dims
dims_correct = (
means.shape[-1] == 3
and (quats.shape[-1] == 4)
and (scales.shape[-1] == 3)
and (colors.shape[-1] == 3)
)
return leading_dims_match and dims_correct
def check_bases_sizes(motion_rots: torch.Tensor, motion_transls: torch.Tensor) -> bool:
return (
motion_rots.shape[-1] == 6
and motion_transls.shape[-1] == 3
and motion_rots.shape[:-2] == motion_transls.shape[:-2]
)
|