Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
torch-scatter
Commits
3f610bcf
Commit
3f610bcf
authored
Dec 21, 2017
by
rusty1s
Browse files
own file for every function
parent
0b0d099e
Changes
7
Show whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
92 additions
and
79 deletions
+92
-79
torch_scatter/functions/__init__.py
torch_scatter/functions/__init__.py
+6
-79
torch_scatter/functions/div.py
torch_scatter/functions/div.py
+13
-0
torch_scatter/functions/max.py
torch_scatter/functions/max.py
+17
-0
torch_scatter/functions/mean.py
torch_scatter/functions/mean.py
+17
-0
torch_scatter/functions/min.py
torch_scatter/functions/min.py
+14
-0
torch_scatter/functions/mul.py
torch_scatter/functions/mul.py
+13
-0
torch_scatter/functions/sub.py
torch_scatter/functions/sub.py
+12
-0
No files found.
torch_scatter/functions/__init__.py
View file @
3f610bcf
from
.scatter
import
scatter
from
.utils
import
gen_filled_tensor
,
gen_output
from
.add
import
scatter_add_
,
scatter_add
from
.add
import
scatter_add_
,
scatter_add
from
.sub
import
scatter_sub_
,
scatter_sub
from
.mul
import
scatter_mul_
,
scatter_mul
def
scatter_sub_
(
output
,
index
,
input
,
dim
=
0
):
from
.div
import
scatter_div_
,
scatter_div
"""If multiple indices reference the same location, their negated
from
.mean
import
scatter_mean_
,
scatter_mean
contributions add."""
from
.max
import
scatter_max_
,
scatter_max
return
output
.
scatter_add_
(
dim
,
index
,
-
input
)
from
.min
import
scatter_min_
,
scatter_min
def
scatter_sub
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
0
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
return
scatter_sub_
(
output
,
index
,
input
,
dim
)
def
scatter_mul_
(
output
,
index
,
input
,
dim
=
0
):
"""If multiple indices reference the same location, their
contributions multiply."""
return
scatter
(
'mul'
,
dim
,
output
,
index
,
input
)
def
scatter_mul
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
1
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
return
scatter_mul_
(
output
,
index
,
input
,
dim
)
def
scatter_div_
(
output
,
index
,
input
,
dim
=
0
):
"""If multiple indices reference the same location, their
contributions divide."""
return
scatter
(
'div'
,
dim
,
output
,
index
,
input
)
def
scatter_div
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
1
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
scatter_div_
(
output
,
index
,
input
,
dim
)
def
scatter_mean_
(
output
,
index
,
input
,
dim
=
0
):
"""If multiple indices reference the same location, their
contributions average."""
num_output
=
gen_filled_tensor
(
output
,
output
.
size
(),
fill_value
=
0
)
scatter
(
'mean'
,
dim
,
output
,
index
,
input
,
num_output
)
num_output
[
num_output
==
0
]
=
1
output
/=
num_output
return
output
def
scatter_mean
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
0
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
return
scatter_mean_
(
output
,
index
,
input
,
dim
)
def
scatter_max_
(
output
,
index
,
input
,
dim
=
0
):
"""If multiple indices reference the same location, the maximal
contribution gets taken.
:rtype: (:class:`Tensor`, :class:`LongTensor`)
"""
arg_output
=
gen_filled_tensor
(
index
,
output
.
size
(),
fill_value
=-
1
)
return
scatter
(
'max'
,
dim
,
output
,
index
,
input
,
arg_output
)
def
scatter_max
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
0
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
return
scatter_max_
(
output
,
index
,
input
,
dim
)
def
scatter_min_
(
output
,
index
,
input
,
dim
=
0
):
"""If multiple indices reference the same location, the minimal
contribution gets taken."""
arg_output
=
gen_filled_tensor
(
index
,
output
.
size
(),
fill_value
=-
1
)
return
scatter
(
'min'
,
dim
,
output
,
index
,
input
,
arg_output
)
def
scatter_min
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
0
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
return
scatter_min_
(
output
,
index
,
input
,
dim
)
__all__
=
[
__all__
=
[
'scatter_add_'
,
'scatter_add'
,
'scatter_sub_'
,
'scatter_sub'
,
'scatter_add_'
,
'scatter_add'
,
'scatter_sub_'
,
'scatter_sub'
,
...
...
torch_scatter/functions/div.py
0 → 100644
View file @
3f610bcf
from
.scatter
import
scatter
from
.utils
import
gen_output
def
scatter_div_
(
output
,
index
,
input
,
dim
=
0
):
"""If multiple indices reference the same location, their
contributions divide."""
return
scatter
(
'div'
,
dim
,
output
,
index
,
input
)
def
scatter_div
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
1
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
scatter_div_
(
output
,
index
,
input
,
dim
)
torch_scatter/functions/max.py
0 → 100644
View file @
3f610bcf
from
.scatter
import
scatter
from
.utils
import
gen_filled_tensor
,
gen_output
def
scatter_max_
(
output
,
index
,
input
,
dim
=
0
):
"""If multiple indices reference the same location, the maximal
contribution gets taken.
:rtype: (:class:`Tensor`, :class:`LongTensor`)
"""
arg_output
=
gen_filled_tensor
(
index
,
output
.
size
(),
fill_value
=-
1
)
return
scatter
(
'max'
,
dim
,
output
,
index
,
input
,
arg_output
)
def
scatter_max
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
0
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
return
scatter_max_
(
output
,
index
,
input
,
dim
)
torch_scatter/functions/mean.py
0 → 100644
View file @
3f610bcf
from
.scatter
import
scatter
from
.utils
import
gen_filled_tensor
,
gen_output
def
scatter_mean_
(
output
,
index
,
input
,
dim
=
0
):
"""If multiple indices reference the same location, their
contributions average."""
num_output
=
gen_filled_tensor
(
output
,
output
.
size
(),
fill_value
=
0
)
scatter
(
'mean'
,
dim
,
output
,
index
,
input
,
num_output
)
num_output
[
num_output
==
0
]
=
1
output
/=
num_output
return
output
def
scatter_mean
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
0
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
return
scatter_mean_
(
output
,
index
,
input
,
dim
)
torch_scatter/functions/min.py
0 → 100644
View file @
3f610bcf
from
.scatter
import
scatter
from
.utils
import
gen_filled_tensor
,
gen_output
def
scatter_min_
(
output
,
index
,
input
,
dim
=
0
):
"""If multiple indices reference the same location, the minimal
contribution gets taken."""
arg_output
=
gen_filled_tensor
(
index
,
output
.
size
(),
fill_value
=-
1
)
return
scatter
(
'min'
,
dim
,
output
,
index
,
input
,
arg_output
)
def
scatter_min
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
0
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
return
scatter_min_
(
output
,
index
,
input
,
dim
)
torch_scatter/functions/mul.py
0 → 100644
View file @
3f610bcf
from
.scatter
import
scatter
from
.utils
import
gen_output
def
scatter_mul_
(
output
,
index
,
input
,
dim
=
0
):
"""If multiple indices reference the same location, their
contributions multiply."""
return
scatter
(
'mul'
,
dim
,
output
,
index
,
input
)
def
scatter_mul
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
1
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
return
scatter_mul_
(
output
,
index
,
input
,
dim
)
torch_scatter/functions/sub.py
0 → 100644
View file @
3f610bcf
from
.utils
import
gen_output
def
scatter_sub_
(
output
,
index
,
input
,
dim
=
0
):
"""If multiple indices reference the same location, their negated
contributions add."""
return
output
.
scatter_add_
(
dim
,
index
,
-
input
)
def
scatter_sub
(
index
,
input
,
dim
=
0
,
max_index
=
None
,
fill_value
=
0
):
output
=
gen_output
(
index
,
input
,
dim
,
max_index
,
fill_value
)
return
scatter_sub_
(
output
,
index
,
input
,
dim
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment