presets.py 1.89 KB
Newer Older
1
2
import torch
from torchvision.transforms import transforms
limm's avatar
limm committed
3
from transforms import ConvertBCHWtoCBHW
4
5
6


class VideoClassificationPresetTrain:
limm's avatar
limm committed
7
8
9
10
11
12
13
14
15
    def __init__(
        self,
        *,
        crop_size,
        resize_size,
        mean=(0.43216, 0.394666, 0.37645),
        std=(0.22803, 0.22145, 0.216989),
        hflip_prob=0.5,
    ):
16
17
        trans = [
            transforms.ConvertImageDtype(torch.float32),
limm's avatar
limm committed
18
19
20
21
22
            # We hard-code antialias=False to preserve results after we changed
            # its default from None to True (see
            # https://github.com/pytorch/vision/pull/7160)
            # TODO: we could re-train the video models with antialias=True?
            transforms.Resize(resize_size, antialias=False),
23
24
25
        ]
        if hflip_prob > 0:
            trans.append(transforms.RandomHorizontalFlip(hflip_prob))
limm's avatar
limm committed
26
        trans.extend([transforms.Normalize(mean=mean, std=std), transforms.RandomCrop(crop_size), ConvertBCHWtoCBHW()])
27
28
29
30
31
32
33
        self.transforms = transforms.Compose(trans)

    def __call__(self, x):
        return self.transforms(x)


class VideoClassificationPresetEval:
limm's avatar
limm committed
34
35
36
37
38
39
40
41
42
43
44
45
46
47
    def __init__(self, *, crop_size, resize_size, mean=(0.43216, 0.394666, 0.37645), std=(0.22803, 0.22145, 0.216989)):
        self.transforms = transforms.Compose(
            [
                transforms.ConvertImageDtype(torch.float32),
                # We hard-code antialias=False to preserve results after we changed
                # its default from None to True (see
                # https://github.com/pytorch/vision/pull/7160)
                # TODO: we could re-train the video models with antialias=True?
                transforms.Resize(resize_size, antialias=False),
                transforms.Normalize(mean=mean, std=std),
                transforms.CenterCrop(crop_size),
                ConvertBCHWtoCBHW(),
            ]
        )
48
49
50

    def __call__(self, x):
        return self.transforms(x)