Commit 0b71aadc authored by rusty1s's avatar rusty1s
Browse files

scatter mean

parent f25c0e74
......@@ -7,7 +7,7 @@ import torch_scatter
from .utils import devices
funcs = ['add', 'sub']
funcs = ['add', 'sub', 'mean']
indices = [2, 0, 1, 1, 0]
......
......@@ -34,6 +34,20 @@ tests = [{
'dim': 0,
'fill_value': 9,
'expected': [[3, 4], [3, 5]]
}, {
'name': 'mean',
'src': [[2, 0, 1, 4, 3], [0, 2, 1, 3, 4]],
'index': [[4, 5, 4, 2, 3], [0, 0, 2, 2, 1]],
'dim': 1,
'fill_value': 0,
'expected': [[0, 0, 4, 3, 1.5, 0], [1, 4, 2, 0, 0, 0]]
}, {
'name': 'mean',
'src': [[5, 2], [2, 5], [4, 3], [1, 3]],
'index': [[0, 0], [1, 1], [1, 1], [0, 0]],
'dim': 0,
'fill_value': 0,
'expected': [[3, 2.5], [3, 4]]
}]
......@@ -41,8 +55,9 @@ tests = [{
def test_forward(test, dtype, device):
src = tensor(test['src'], dtype, device)
index = tensor(test['index'], torch.long, device)
expected = tensor(test['expected'], dtype, device)
op = getattr(torch_scatter, 'scatter_{}'.format(test['name']))
output = op(src, index, test['dim'], fill_value=test['fill_value'])
assert output.tolist() == test['expected']
assert output.tolist() == expected.tolist()
from .add import scatter_add
from .sub import scatter_sub
from .mean import scatter_mean
__version__ = '1.0.0'
__all__ = ['scatter_add', 'scatter_sub', '__version__']
__all__ = ['scatter_add', 'scatter_sub', 'scatter_mean', '__version__']
......@@ -5,7 +5,7 @@ from .utils.gen import gen
class ScatterAdd(Function):
@staticmethod
def forward(ctx, out, src, index, dim=-1):
def forward(ctx, out, src, index, dim):
ctx.mark_dirty(out)
ctx.save_for_backward(index)
return out.scatter_add_(dim, index, src)
......@@ -86,5 +86,5 @@ def scatter_add(src, index, dim=-1, out=None, dim_size=None, fill_value=0):
2 4 4 0 0 0
[torch.FloatTensor of size 2x6]
"""
out, index = gen(src, index, dim, out, dim_size, fill_value)
src, out, index, dim = gen(src, index, dim, out, dim_size, fill_value)
return ScatterAdd.apply(out, src, index, dim)
from torch.autograd import Function
from .utils.ffi import get_func
from .utils.gen import gen
class ScatterMean(Function):
@staticmethod
def forward(ctx, out, src, index, dim):
ctx.mark_dirty(out)
count = src.new_zeros(out.size())
func = get_func('scatter_mean', src)
func(dim, out, index, src, count)
count[count == 0] = 1
out /= count
ctx.save_for_backward(index, count)
return out
@staticmethod
def backward(ctx, grad_out):
index, count = ctx.saved_variables
grad_src = None
if ctx.needs_input_grad[1]:
grad_src = grad_out[index] / count[index]
return None, grad_src, None, None
def scatter_mean(src, index, dim=-1, out=None, dim_size=None, fill_value=0):
r"""
|
.. image:: https://raw.githubusercontent.com/rusty1s/pytorch_scatter/
master/docs/source/_figures/mean.svg?sanitize=true
:align: center
:width: 400px
|
Averages all values from the :attr:`src` tensor into :attr:`out` at the
indices specified in the :attr:`index` tensor along an given axis
:attr:`dim`.If multiple indices reference the same location, their
**contributions average** (`cf.` :meth:`~torch_scatter.scatter_add`).
For one-dimensional tensors, the operation computes
.. math::
\mathrm{out}_i = \mathrm{out}_i + \frac{1}{N_i} \cdot
\sum_j \mathrm{src}_j
where sum is over :math:`j` such that :math:`\mathrm{index}_j = i` and
:math:`N_i` indicates the number of indices referencing :math:`i`.
Args:
src (Tensor): The source tensor.
index (LongTensor): The indices of elements to scatter.
dim (int, optional): The axis along which to index.
(default: :obj:`-1`)
out (Tensor, optional): The destination tensor. (default: :obj:`None`)
dim_size (int, optional): If :attr:`out` is not given, automatically
create output with size :attr:`dim_size` at dimension :attr:`dim`.
If :attr:`dim_size` is not given, a minimal sized output tensor is
returned. (default: :obj:`None`)
fill_value (int, optional): If :attr:`out` is not given, automatically
fill output tensor with :attr:`fill_value`. (default: :obj:`0`)
:rtype: :class:`Tensor`
.. testsetup::
import torch
.. testcode::
from torch_scatter import scatter_mean
src = torch.tensor([[2, 0, 1, 4, 3], [0, 2, 1, 3, 4]])
index = torch.tensor([[4, 5, 4, 2, 3], [0, 0, 2, 2, 1]])
out = src.new_zeros((2, 6))
out = scatter_mean(src, index, out=out)
print(out)
.. testoutput::
0.0000 0.0000 4.0000 3.0000 1.5000 0.0000
1.0000 4.0000 2.0000 0.0000 0.0000 0.0000
[torch.FloatTensor of size 2x6]
"""
src, out, index, dim = gen(src, index, dim, out, dim_size, fill_value)
return ScatterMean.apply(out, src, index, dim)
from .._ext import ffi
def get_func(name, tensor):
name += '_'
name += 'cuda_' if tensor.is_cuda else ''
name += tensor.type().split('.')[-1][:-6]
return getattr(ffi, name)
......@@ -2,6 +2,8 @@ from itertools import repeat
def gen(src, index, dim=-1, out=None, dim_size=None, fill_value=0):
dim = range(src.dim())[dim] # Get real dim value.
# Automatically expand index tensor to the right dimensions.
if index.dim() == 1:
index_size = [*repeat(1, src.dim())]
......@@ -15,4 +17,4 @@ def gen(src, index, dim=-1, out=None, dim_size=None, fill_value=0):
out_size[dim] = dim_size
out = src.new_full(out_size, fill_value)
return out, index
return src, out, index, dim
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment