common_testing.py 6.29 KB
Newer Older
Patrick Labatut's avatar
Patrick Labatut committed
1
2
3
4
5
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
facebook-github-bot's avatar
facebook-github-bot committed
6

7
import os
facebook-github-bot's avatar
facebook-github-bot committed
8
import unittest
Nikhila Ravi's avatar
Nikhila Ravi committed
9
from pathlib import Path
Roman Shapovalov's avatar
Roman Shapovalov committed
10
from typing import Callable, Optional, Union
11
12

import numpy as np
facebook-github-bot's avatar
facebook-github-bot committed
13
import torch
Nikhila Ravi's avatar
Nikhila Ravi committed
14
15
16
from PIL import Image


17
18
19
20
21
22
23
24
25
26
def get_tests_dir() -> Path:
    """
    Returns Path for the directory containing this file.
    """
    return Path(__file__).resolve().parent


def get_pytorch3d_dir() -> Path:
    """
    Returns Path for the root PyTorch3D directory.
27
28

    Facebook internal systems need a special case here.
29
    """
30
31
32
33
    if os.environ.get("INSIDE_RE_WORKER") is not None:
        return Path(__file__).resolve().parent
    else:
        return Path(__file__).resolve().parent.parent
34
35


Nikhila Ravi's avatar
Nikhila Ravi committed
36
def load_rgb_image(filename: str, data_dir: Union[str, Path]):
Nikhila Ravi's avatar
Nikhila Ravi committed
37
    filepath = os.path.join(data_dir, filename)
Nikhila Ravi's avatar
Nikhila Ravi committed
38
39
40
41
    with Image.open(filepath) as raw_image:
        image = torch.from_numpy(np.array(raw_image) / 255.0)
    image = image.to(dtype=torch.float32)
    return image[..., :3]
facebook-github-bot's avatar
facebook-github-bot committed
42
43


Roman Shapovalov's avatar
Roman Shapovalov committed
44
45
46
TensorOrArray = Union[torch.Tensor, np.ndarray]


Nikhila Ravi's avatar
Nikhila Ravi committed
47
48
49
50
51
52
53
54
def get_random_cuda_device() -> str:
    """
    Function to get a random GPU device from the
    available devices. This is useful for testing
    that custom cuda kernels can support inputs on
    any device without having to set the device explicitly.
    """
    num_devices = torch.cuda.device_count()
Nikhila Ravi's avatar
Nikhila Ravi committed
55
56
57
58
    device_id = (
        torch.randint(high=num_devices, size=(1,)).item() if num_devices > 1 else 0
    )
    return "cuda:%d" % device_id
Nikhila Ravi's avatar
Nikhila Ravi committed
59
60


facebook-github-bot's avatar
facebook-github-bot committed
61
62
63
64
65
class TestCaseMixin(unittest.TestCase):
    def assertSeparate(self, tensor1, tensor2) -> None:
        """
        Verify that tensor1 and tensor2 have their data in distinct locations.
        """
66
        self.assertNotEqual(tensor1.storage().data_ptr(), tensor2.storage().data_ptr())
facebook-github-bot's avatar
facebook-github-bot committed
67

Georgia Gkioxari's avatar
Georgia Gkioxari committed
68
69
70
71
    def assertNotSeparate(self, tensor1, tensor2) -> None:
        """
        Verify that tensor1 and tensor2 have their data in the same locations.
        """
72
        self.assertEqual(tensor1.storage().data_ptr(), tensor2.storage().data_ptr())
Georgia Gkioxari's avatar
Georgia Gkioxari committed
73

facebook-github-bot's avatar
facebook-github-bot committed
74
75
76
77
78
79
80
81
    def assertAllSeparate(self, tensor_list) -> None:
        """
        Verify that all tensors in tensor_list have their data in
        distinct locations.
        """
        ptrs = [i.storage().data_ptr() for i in tensor_list]
        self.assertCountEqual(ptrs, set(ptrs))

Roman Shapovalov's avatar
Roman Shapovalov committed
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
    def assertNormsClose(
        self,
        input: TensorOrArray,
        other: TensorOrArray,
        norm_fn: Callable[[TensorOrArray], TensorOrArray],
        *,
        rtol: float = 1e-05,
        atol: float = 1e-08,
        equal_nan: bool = False,
        msg: Optional[str] = None,
    ) -> None:
        """
        Verifies that two tensors or arrays have the same shape and are close
            given absolute and relative tolerance; raises AssertionError otherwise.
            A custom norm function is computed before comparison. If no such pre-
            processing needed, pass `torch.abs` or, equivalently, call `assertClose`.
        Args:
            input, other: two tensors or two arrays.
            norm_fn: The function evaluates
                `all(norm_fn(input - other) <= atol + rtol * norm_fn(other))`.
                norm_fn is a tensor -> tensor function; the output has:
                    * all entries non-negative,
                    * shape defined by the input shape only.
            rtol, atol, equal_nan: as for torch.allclose.
            msg: message in case the assertion is violated.
        Note:
            Optional arguments here are all keyword-only, to avoid confusion
            with msg arguments on other assert functions.
        """

        self.assertEqual(np.shape(input), np.shape(other))

        diff = norm_fn(input - other)
        other_ = norm_fn(other)

Patrick Labatut's avatar
Patrick Labatut committed
117
        # We want to generalize allclose(input, output), which is essentially
Roman Shapovalov's avatar
Roman Shapovalov committed
118
119
120
121
122
123
124
125
        #  all(diff <= atol + rtol * other)
        # but with a sophisticated handling non-finite values.
        # We work that around by calling allclose() with the following arguments:
        # allclose(diff + other_, other_). This computes what we want because
        #  all(|diff + other_ - other_| <= atol + rtol * |other_|) ==
        #    all(|norm_fn(input - other)| <= atol + rtol * |norm_fn(other)|) ==
        #    all(norm_fn(input - other) <= atol + rtol * norm_fn(other)).

126
        self.assertClose(
127
            diff + other_, other_, rtol=rtol, atol=atol, equal_nan=equal_nan, msg=msg
Roman Shapovalov's avatar
Roman Shapovalov committed
128
129
        )

facebook-github-bot's avatar
facebook-github-bot committed
130
131
    def assertClose(
        self,
Roman Shapovalov's avatar
Roman Shapovalov committed
132
133
        input: TensorOrArray,
        other: TensorOrArray,
facebook-github-bot's avatar
facebook-github-bot committed
134
135
136
        *,
        rtol: float = 1e-05,
        atol: float = 1e-08,
Roman Shapovalov's avatar
Roman Shapovalov committed
137
138
        equal_nan: bool = False,
        msg: Optional[str] = None,
facebook-github-bot's avatar
facebook-github-bot committed
139
140
    ) -> None:
        """
Roman Shapovalov's avatar
Roman Shapovalov committed
141
142
143
144
        Verifies that two tensors or arrays have the same shape and are close
            given absolute and relative tolerance, i.e. checks
            `all(|input - other| <= atol + rtol * |other|)`;
            raises AssertionError otherwise.
facebook-github-bot's avatar
facebook-github-bot committed
145
146
147
        Args:
            input, other: two tensors or two arrays.
            rtol, atol, equal_nan: as for torch.allclose.
Roman Shapovalov's avatar
Roman Shapovalov committed
148
            msg: message in case the assertion is violated.
facebook-github-bot's avatar
facebook-github-bot committed
149
150
151
152
153
154
155
        Note:
            Optional arguments here are all keyword-only, to avoid confusion
            with msg arguments on other assert functions.
        """

        self.assertEqual(np.shape(input), np.shape(other))

Roman Shapovalov's avatar
Roman Shapovalov committed
156
157
158
159
160
        backend = torch if torch.is_tensor(input) else np
        close = backend.allclose(
            input, other, rtol=rtol, atol=atol, equal_nan=equal_nan
        )

161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
        if close:
            return

        diff = backend.abs(input + 0.0 - other)
        ratio = diff / backend.abs(other)
        try_relative = (diff <= atol) | (backend.isfinite(ratio) & (ratio > 0))
        if try_relative.all():
            if backend == np:
                # Avoid a weirdness with zero dimensional arrays.
                ratio = np.array(ratio)
            ratio[diff <= atol] = 0
            extra = f" Max relative diff {ratio.max()}"
        else:
            extra = ""
        shape = tuple(input.shape)
        loc = np.unravel_index(int(diff.argmax()), shape)
        max_diff = diff.max()
        err = f"Not close. Max diff {max_diff}.{extra} Shape {shape}. At {loc}."
        if msg is not None:
            self.fail(f"{msg} {err}")
        self.fail(err)