Spaces:
Build error
Build error
File size: 2,457 Bytes
4409449 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
import torch
def lengths_to_mask(lengths):
max_len = max(lengths)
mask = torch.arange(max_len, device=lengths.device).expand(
len(lengths), max_len) < lengths.unsqueeze(1)
return mask
def collate_tensors(batch):
dims = batch[0].dim()
max_size = [max([b.size(i) for b in batch]) for i in range(dims)]
size = (len(batch),) + tuple(max_size)
canvas = batch[0].new_zeros(size=size)
for i, b in enumerate(batch):
sub_tensor = canvas[i]
for d in range(dims):
sub_tensor = sub_tensor.narrow(d, 0, b.size(d))
sub_tensor.add_(b)
return canvas
def collate(batch):
databatch = [b[0] for b in batch]
labelbatch = [b[1] for b in batch]
lenbatch = [len(b[0][0][0]) for b in batch]
databatchTensor = collate_tensors(databatch)
labelbatchTensor = torch.as_tensor(labelbatch)
lenbatchTensor = torch.as_tensor(lenbatch)
maskbatchTensor = lengths_to_mask(lenbatchTensor)
# x - [bs, njoints, nfeats, lengths]
# - nfeats, the representation of a joint
# y - [bs]
# mask - [bs, lengths]
# lengths - [bs]
batch = {"x": databatchTensor, "y": labelbatchTensor,
"mask": maskbatchTensor, 'lengths': lenbatchTensor}
return batch
# slow version with padding
def collate_data3d_slow(batch):
batchTensor = {}
for key in batch[0].keys():
databatch = [b[key] for b in batch]
batchTensor[key] = collate_tensors(databatch)
batch = batchTensor
# theta - [bs, lengths, 85], theta shape (85,)
# - (np.array([1., 0., 0.]), pose(72), shape(10)), axis=0)
# kp_2d - [bs, lengths, njoints, nfeats], nfeats (x,y,weight)
# kp_3d - [bs, lengths, njoints, nfeats], nfeats (x,y,z)
# w_smpl - [bs, lengths] zeros
# w_3d - [bs, lengths] zeros
return batch
def collate_data3d(batch):
batchTensor = {}
for key in batch[0].keys():
databatch = [b[key] for b in batch]
if key == "paths":
batchTensor[key] = databatch
else:
batchTensor[key] = torch.stack(databatch,axis=0)
batch = batchTensor
# theta - [bs, lengths, 85], theta shape (85,)
# - (np.array([1., 0., 0.]), pose(72), shape(10)), axis=0)
# kp_2d - [bs, lengths, njoints, nfeats], nfeats (x,y,weight)
# kp_3d - [bs, lengths, njoints, nfeats], nfeats (x,y,z)
# w_smpl - [bs, lengths] zeros
# w_3d - [bs, lengths] zeros
return batch
|