|
| 1 | +from typing import Dict, List, Tuple, Union |
| 2 | + |
| 3 | +import numpy as np |
| 4 | +import torch |
| 5 | +from pose_format.torch.masked import MaskedTensor, MaskedTorch |
| 6 | + |
| 7 | + |
| 8 | +def pad_tensors(batch: List[Union[torch.Tensor, MaskedTensor]], pad_value=0): |
| 9 | + datum = batch[0] |
| 10 | + torch_cls = MaskedTorch if isinstance(datum, MaskedTensor) else torch |
| 11 | + |
| 12 | + max_len = max(len(t) for t in batch) |
| 13 | + if max_len == 1: |
| 14 | + return torch_cls.stack(batch, dim=0) |
| 15 | + |
| 16 | + new_batch = [] |
| 17 | + for tensor in batch: |
| 18 | + missing = list(tensor.shape) |
| 19 | + missing[0] = max_len - tensor.shape[0] |
| 20 | + |
| 21 | + if missing[0] > 0: |
| 22 | + padding_tensor = torch.full(missing, fill_value=pad_value, dtype=tensor.dtype, device=tensor.device) |
| 23 | + tensor = torch_cls.cat([tensor, padding_tensor], dim=0) |
| 24 | + |
| 25 | + new_batch.append(tensor) |
| 26 | + |
| 27 | + return torch_cls.stack(new_batch, dim=0) |
| 28 | + |
| 29 | + |
| 30 | +def collate_tensors(batch: List, pad_value=0) -> Union[torch.Tensor, List]: |
| 31 | + datum = batch[0] |
| 32 | + |
| 33 | + if isinstance(datum, dict): # Recurse over dictionaries |
| 34 | + return zero_pad_collator(batch) |
| 35 | + |
| 36 | + if isinstance(datum, (int, np.int32)): |
| 37 | + return torch.tensor(batch, dtype=torch.long) |
| 38 | + |
| 39 | + if isinstance(datum, (MaskedTensor, torch.Tensor)): |
| 40 | + return pad_tensors(batch, pad_value=pad_value) |
| 41 | + |
| 42 | + return batch |
| 43 | + |
| 44 | + |
| 45 | +def zero_pad_collator(batch) -> Union[Dict[str, torch.Tensor], Tuple[torch.Tensor, ...]]: |
| 46 | + datum = batch[0] |
| 47 | + |
| 48 | + # For strings |
| 49 | + if isinstance(datum, str): |
| 50 | + return batch |
| 51 | + |
| 52 | + # For tuples |
| 53 | + if isinstance(datum, tuple): |
| 54 | + return tuple(collate_tensors([b[i] for b in batch]) for i in range(len(datum))) |
| 55 | + |
| 56 | + # For tensors |
| 57 | + if isinstance(datum, MaskedTensor): |
| 58 | + return collate_tensors(batch) |
| 59 | + |
| 60 | + # For dictionaries |
| 61 | + keys = datum.keys() |
| 62 | + return {k: collate_tensors([b[k] for b in batch]) for k in keys} |
| 63 | + |
| 64 | + |
0 commit comments