common_testing.py 6.17 KB
Newer Older
facebook-github-bot's avatar
facebook-github-bot committed
1
2
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.

3
import os
facebook-github-bot's avatar
facebook-github-bot committed
4
import unittest
Nikhila Ravi's avatar
Nikhila Ravi committed
5
from pathlib import Path
Roman Shapovalov's avatar
Roman Shapovalov committed
6
from typing import Callable, Optional, Union
7
8

import numpy as np
facebook-github-bot's avatar
facebook-github-bot committed
9
import torch
Nikhila Ravi's avatar
Nikhila Ravi committed
10
11
12
from PIL import Image


13
14
15
16
17
18
19
20
21
22
def get_tests_dir() -> Path:
    """
    Returns Path for the directory containing this file.
    """
    return Path(__file__).resolve().parent


def get_pytorch3d_dir() -> Path:
    """
    Returns Path for the root PyTorch3D directory.
23
24

    Facebook internal systems need a special case here.
25
    """
26
27
28
29
    if os.environ.get("INSIDE_RE_WORKER") is not None:
        return Path(__file__).resolve().parent
    else:
        return Path(__file__).resolve().parent.parent
30
31


Nikhila Ravi's avatar
Nikhila Ravi committed
32
33
34
35
36
37
def load_rgb_image(filename: str, data_dir: Union[str, Path]):
    filepath = data_dir / filename
    with Image.open(filepath) as raw_image:
        image = torch.from_numpy(np.array(raw_image) / 255.0)
    image = image.to(dtype=torch.float32)
    return image[..., :3]
facebook-github-bot's avatar
facebook-github-bot committed
38
39


Roman Shapovalov's avatar
Roman Shapovalov committed
40
41
42
TensorOrArray = Union[torch.Tensor, np.ndarray]


Nikhila Ravi's avatar
Nikhila Ravi committed
43
44
45
46
47
48
49
50
def get_random_cuda_device() -> str:
    """
    Function to get a random GPU device from the
    available devices. This is useful for testing
    that custom cuda kernels can support inputs on
    any device without having to set the device explicitly.
    """
    num_devices = torch.cuda.device_count()
Nikhila Ravi's avatar
Nikhila Ravi committed
51
52
53
54
    device_id = (
        torch.randint(high=num_devices, size=(1,)).item() if num_devices > 1 else 0
    )
    return "cuda:%d" % device_id
Nikhila Ravi's avatar
Nikhila Ravi committed
55
56


facebook-github-bot's avatar
facebook-github-bot committed
57
58
59
60
61
class TestCaseMixin(unittest.TestCase):
    def assertSeparate(self, tensor1, tensor2) -> None:
        """
        Verify that tensor1 and tensor2 have their data in distinct locations.
        """
62
        self.assertNotEqual(tensor1.storage().data_ptr(), tensor2.storage().data_ptr())
facebook-github-bot's avatar
facebook-github-bot committed
63

Georgia Gkioxari's avatar
Georgia Gkioxari committed
64
65
66
67
    def assertNotSeparate(self, tensor1, tensor2) -> None:
        """
        Verify that tensor1 and tensor2 have their data in the same locations.
        """
68
        self.assertEqual(tensor1.storage().data_ptr(), tensor2.storage().data_ptr())
Georgia Gkioxari's avatar
Georgia Gkioxari committed
69

facebook-github-bot's avatar
facebook-github-bot committed
70
71
72
73
74
75
76
77
    def assertAllSeparate(self, tensor_list) -> None:
        """
        Verify that all tensors in tensor_list have their data in
        distinct locations.
        """
        ptrs = [i.storage().data_ptr() for i in tensor_list]
        self.assertCountEqual(ptrs, set(ptrs))

Roman Shapovalov's avatar
Roman Shapovalov committed
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
    def assertNormsClose(
        self,
        input: TensorOrArray,
        other: TensorOrArray,
        norm_fn: Callable[[TensorOrArray], TensorOrArray],
        *,
        rtol: float = 1e-05,
        atol: float = 1e-08,
        equal_nan: bool = False,
        msg: Optional[str] = None,
    ) -> None:
        """
        Verifies that two tensors or arrays have the same shape and are close
            given absolute and relative tolerance; raises AssertionError otherwise.
            A custom norm function is computed before comparison. If no such pre-
            processing needed, pass `torch.abs` or, equivalently, call `assertClose`.
        Args:
            input, other: two tensors or two arrays.
            norm_fn: The function evaluates
                `all(norm_fn(input - other) <= atol + rtol * norm_fn(other))`.
                norm_fn is a tensor -> tensor function; the output has:
                    * all entries non-negative,
                    * shape defined by the input shape only.
            rtol, atol, equal_nan: as for torch.allclose.
            msg: message in case the assertion is violated.
        Note:
            Optional arguments here are all keyword-only, to avoid confusion
            with msg arguments on other assert functions.
        """

        self.assertEqual(np.shape(input), np.shape(other))

        diff = norm_fn(input - other)
        other_ = norm_fn(other)

        # We want to generalise allclose(input, output), which is essentially
        #  all(diff <= atol + rtol * other)
        # but with a sophisticated handling non-finite values.
        # We work that around by calling allclose() with the following arguments:
        # allclose(diff + other_, other_). This computes what we want because
        #  all(|diff + other_ - other_| <= atol + rtol * |other_|) ==
        #    all(|norm_fn(input - other)| <= atol + rtol * |norm_fn(other)|) ==
        #    all(norm_fn(input - other) <= atol + rtol * norm_fn(other)).

122
        self.assertClose(
Roman Shapovalov's avatar
Roman Shapovalov committed
123
124
125
            diff + other_, other_, rtol=rtol, atol=atol, equal_nan=equal_nan
        )

facebook-github-bot's avatar
facebook-github-bot committed
126
127
    def assertClose(
        self,
Roman Shapovalov's avatar
Roman Shapovalov committed
128
129
        input: TensorOrArray,
        other: TensorOrArray,
facebook-github-bot's avatar
facebook-github-bot committed
130
131
132
        *,
        rtol: float = 1e-05,
        atol: float = 1e-08,
Roman Shapovalov's avatar
Roman Shapovalov committed
133
134
        equal_nan: bool = False,
        msg: Optional[str] = None,
facebook-github-bot's avatar
facebook-github-bot committed
135
136
    ) -> None:
        """
Roman Shapovalov's avatar
Roman Shapovalov committed
137
138
139
140
        Verifies that two tensors or arrays have the same shape and are close
            given absolute and relative tolerance, i.e. checks
            `all(|input - other| <= atol + rtol * |other|)`;
            raises AssertionError otherwise.
facebook-github-bot's avatar
facebook-github-bot committed
141
142
143
        Args:
            input, other: two tensors or two arrays.
            rtol, atol, equal_nan: as for torch.allclose.
Roman Shapovalov's avatar
Roman Shapovalov committed
144
            msg: message in case the assertion is violated.
facebook-github-bot's avatar
facebook-github-bot committed
145
146
147
148
149
150
151
        Note:
            Optional arguments here are all keyword-only, to avoid confusion
            with msg arguments on other assert functions.
        """

        self.assertEqual(np.shape(input), np.shape(other))

Roman Shapovalov's avatar
Roman Shapovalov committed
152
153
154
155
156
        backend = torch if torch.is_tensor(input) else np
        close = backend.allclose(
            input, other, rtol=rtol, atol=atol, equal_nan=equal_nan
        )

Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
157
        if not close and msg is None:
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
158
            diff = backend.abs(input + 0.0 - other)
159
160
161
162
163
164
165
166
167
168
169
            ratio = diff / backend.abs(other)
            try_relative = (diff <= atol) | (backend.isfinite(ratio) & (ratio > 0))
            if try_relative.all():
                if backend == np:
                    # Avoid a weirdness with zero dimensional arrays.
                    ratio = np.array(ratio)
                ratio[diff <= atol] = 0
                extra = f" Max relative diff {ratio.max()}"
            else:
                extra = ""
            shape = tuple(input.shape)
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
170
            loc = np.unravel_index(diff.argmax(), shape)
171
            max_diff = diff.max()
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
172
173
            msg = f"Not close. Max diff {max_diff}.{extra} Shape {shape}. At {loc}."
            self.fail(msg)
Jeremy Reizenstein's avatar
Jeremy Reizenstein committed
174

Roman Shapovalov's avatar
Roman Shapovalov committed
175
        self.assertTrue(close, msg)