Commit 9ea57e71 authored by rusty1s's avatar rusty1s
Browse files

added no cover pragmas

parent 70465ff9
......@@ -69,7 +69,7 @@ def test_spline_basis_backward_cpu(degree):
@pytest.mark.skipif(not torch.cuda.is_available(), reason='no CUDA')
@pytest.mark.parametrize('degree', implemented_degrees.keys())
def test_spline_basis_backward_gpu(degree):
def test_spline_basis_backward_gpu(degree): # pragma: no cover
kernel_size = torch.cuda.LongTensor([5, 5, 5])
is_open_spline = torch.cuda.ByteTensor([1, 0, 1])
pseudo = torch.cuda.DoubleTensor(4, 3).uniform_(0, 1)
......
......@@ -60,7 +60,7 @@ def test_spline_conv_forward_cpu(tensor, i):
@pytest.mark.skipif(not torch.cuda.is_available(), reason='no CUDA')
@pytest.mark.parametrize('tensor,i', product(tensors, range(len(tests))))
def test_spline_conv_forward_gpu(tensor, i):
def test_spline_conv_forward_gpu(tensor, i): # pragma: no cover
data = tests[i]
src = getattr(torch.cuda, tensor)(data['src'])
......@@ -104,7 +104,7 @@ def test_spline_basis_backward_cpu(degree):
@pytest.mark.skipif(not torch.cuda.is_available(), reason='no CUDA')
@pytest.mark.parametrize('degree', [2])
def test_spline_basis_backward_gpu(degree):
def test_spline_basis_backward_gpu(degree): # pragma: no cover
src = torch.cuda.DoubleTensor(3, 2).uniform_(-1, 1)
edge_index = torch.cuda.LongTensor([[0, 1, 1, 2], [1, 0, 2, 1]])
pseudo = torch.cuda.DoubleTensor(4, 3).uniform_(0, 1)
......
......@@ -35,7 +35,7 @@ def test_spline_weighting_forward_cpu(tensor, i):
@pytest.mark.skipif(not torch.cuda.is_available(), reason='no CUDA')
@pytest.mark.parametrize('tensor,i', product(tensors, range(len(tests))))
def test_spline_weighting_forward_gpu(tensor, i):
def test_spline_weighting_forward_gpu(tensor, i): # pragma: no cover
data = tests[i]
src = getattr(torch.cuda, tensor)(data['src'])
......@@ -65,7 +65,7 @@ def test_spline_basis_backward_cpu():
@pytest.mark.skipif(not torch.cuda.is_available(), reason='no CUDA')
def test_spline_basis_backward_gpu():
def test_spline_basis_backward_gpu(): # pragma: no cover
src = torch.cuda.DoubleTensor(4, 2).uniform_(0, 1)
weight = torch.cuda.DoubleTensor(25, 2, 4).uniform_(0, 1)
kernel_size = torch.cuda.LongTensor([5, 5])
......
......@@ -13,7 +13,8 @@ def basis_forward(degree, pseudo, kernel_size, is_open_spline):
return basis, weight_index
def basis_backward(degree, grad_basis, pseudo, kernel_size, is_open_spline):
def basis_backward(degree, grad_basis, pseudo, kernel_size,
is_open_spline): # pragma: no cover
grad_pseudo = pseudo.new(pseudo.size())
basis_bw(degree, grad_pseudo, grad_basis, pseudo, kernel_size,
is_open_spline)
......@@ -32,7 +33,7 @@ class SplineBasis(Function):
return basis_forward(self.degree, pseudo, self.kernel_size,
self.is_open_spline)
def backward(self, grad_basis, grad_weight_index):
def backward(self, grad_basis, grad_weight_index): # pragma: no cover
grad_pseudo = None
pseudo, = self.saved_tensors
......
......@@ -4,7 +4,7 @@ from .new import new
def node_degree(index, n, out=None):
if out is None:
if out is None: # pragma: no cover
zero = torch.zeros(n)
else:
out.resize_(n) if torch.is_tensor(out) else out.data.resize_(n)
......
......@@ -24,7 +24,7 @@ def basis_forward(degree, basis, weight_index, pseudo, kernel_size,
def basis_backward(degree, self, grad_basis, pseudo, kernel_size,
is_open_spline):
is_open_spline): # pragma: no cover
name = '{}BasisBackward'.format(get_degree_str(degree))
func = get_func(name, self.is_cuda, self)
func(self, grad_basis, pseudo, kernel_size, is_open_spline)
......@@ -35,16 +35,19 @@ def weighting_forward(self, src, weight, basis, weight_index):
func(self, src, weight, basis, weight_index)
def weighting_backward_src(self, grad_output, weight, basis, weight_index):
def weighting_backward_src(self, grad_output, weight, basis,
weight_index): # pragma: no cover
func = get_func('weightingBackwardSrc', self.is_cuda, self)
func(self, grad_output, weight, basis, weight_index)
def weighting_backward_weight(self, grad_output, src, basis, weight_index):
def weighting_backward_weight(self, grad_output, src, basis,
weight_index): # pragma: no cover
func = get_func('weightingBackwardWeight', self.is_cuda, self)
func(self, grad_output, src, basis, weight_index)
def weighting_backward_basis(self, grad_output, src, weight, weight_index):
def weighting_backward_basis(self, grad_output, src, weight,
weight_index): # pragma: no cover
func = get_func('weightingBackwardBasis', self.is_cuda, self)
func(self, grad_output, src, weight, weight_index)
......@@ -13,19 +13,22 @@ def weighting_forward(src, weight, basis, weight_index):
return output
def weighting_backward_src(grad_output, weight, basis, weight_index):
def weighting_backward_src(grad_output, weight, basis,
weight_index): # pragma: no cover
grad_src = grad_output.new(grad_output.size(0), weight.size(1))
weighting_bw_src(grad_src, grad_output, weight, basis, weight_index)
return grad_src
def weighting_backward_weight(grad_output, src, basis, weight_index, K):
def weighting_backward_weight(grad_output, src, basis, weight_index,
K): # pragma: no cover
grad_weight = src.new(K, src.size(1), grad_output.size(1))
weighting_bw_weight(grad_weight, grad_output, src, basis, weight_index)
return grad_weight
def weighting_backward_basis(grad_output, src, weight, weight_index):
def weighting_backward_basis(grad_output, src, weight,
weight_index): # pragma: no cover
grad_basis = src.new(weight_index.size())
weighting_bw_basis(grad_basis, grad_output, src, weight, weight_index)
return grad_basis
......@@ -36,7 +39,7 @@ class SplineWeighting(Function):
self.save_for_backward(src, weight, basis, weight_index)
return weighting_forward(src, weight, basis, weight_index)
def backward(self, grad_output):
def backward(self, grad_output): # pragma: no cover
grad_src = grad_weight = grad_basis = None
src, weight, basis, weight_index = self.saved_tensors
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment