"tools/dataset_converters/update_infos_to_v2.py" did not exist on "583c4accbbf8e37b15638820b7b781f4475c6bde"
test_fused_bias_leakyrelu.py 1.35 KB
Newer Older
limm's avatar
limm committed
1
# Copyright (c) OpenMMLab. All rights reserved.
2
3
import pytest
import torch
4
5
6
7
8
9
10

_USING_PARROTS = True
try:
    from parrots.autograd import gradcheck
except ImportError:
    from torch.autograd import gradcheck, gradgradcheck
    _USING_PARROTS = False
11
12


limm's avatar
limm committed
13
class TestFusedBiasLeakyReLU:
14
15
16

    @classmethod
    def setup_class(cls):
limm's avatar
limm committed
17
        if not torch.cuda.is_available():
18
            return
limm's avatar
limm committed
19
20
        cls.input_tensor = torch.randn((2, 2, 2, 2), requires_grad=True).cuda()
        cls.bias = torch.zeros(2, requires_grad=True).cuda()
21

limm's avatar
limm committed
22
23
    @pytest.mark.skipif(not torch.cuda.is_available(), reason='requires cuda')
    def test_gradient(self):
24
25

        from mmcv.ops import FusedBiasLeakyReLU
26
        if _USING_PARROTS:
limm's avatar
limm committed
27
28
29
30
31
            gradcheck(
                FusedBiasLeakyReLU(2).cuda(),
                self.input_tensor,
                delta=1e-4,
                pt_atol=1e-3)
32
33
        else:
            gradcheck(
limm's avatar
limm committed
34
                FusedBiasLeakyReLU(2).cuda(),
35
36
37
                self.input_tensor,
                eps=1e-4,
                atol=1e-3)
38

limm's avatar
limm committed
39
40
41
42
    @pytest.mark.skipif(
        not torch.cuda.is_available() or _USING_PARROTS,
        reason='requires cuda')
    def test_gradgradient(self):
43
44
45

        from mmcv.ops import FusedBiasLeakyReLU
        gradgradcheck(
limm's avatar
limm committed
46
            FusedBiasLeakyReLU(2).cuda(),
47
48
49
            self.input_tensor,
            eps=1e-4,
            atol=1e-3)