Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
torch-scatter
Commits
cf8cf0c0
Commit
cf8cf0c0
authored
Feb 21, 2020
by
rusty1s
Browse files
fix backward pass
parent
5b1737ab
Changes
5
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
17 additions
and
5 deletions
+17
-5
test/composite/test_logsumexp.py
test/composite/test_logsumexp.py
+3
-0
test/composite/test_softmax.py
test/composite/test_softmax.py
+6
-0
test/composite/test_std.py
test/composite/test_std.py
+3
-0
torch_scatter/composite/softmax.py
torch_scatter/composite/softmax.py
+2
-2
torch_scatter/composite/std.py
torch_scatter/composite/std.py
+3
-3
No files found.
test/composite/test_logsumexp.py
View file @
cf8cf0c0
...
...
@@ -4,6 +4,7 @@ from torch_scatter import scatter_logsumexp
def
test_logsumexp
():
src
=
torch
.
tensor
([
0.5
,
0
,
0.5
,
-
2.1
,
3.2
,
7
,
-
1
,
-
100
])
src
.
requires_grad_
()
index
=
torch
.
tensor
([
0
,
1
,
0
,
1
,
1
,
2
,
4
,
4
])
out
=
scatter_logsumexp
(
src
,
index
)
...
...
@@ -16,3 +17,5 @@ def test_logsumexp():
expected
=
torch
.
stack
([
out0
,
out1
,
out2
,
out3
,
out4
],
dim
=
0
)
assert
torch
.
allclose
(
out
,
expected
)
out
.
backward
(
torch
.
randn_like
(
out
))
test/composite/test_softmax.py
View file @
cf8cf0c0
...
...
@@ -4,6 +4,7 @@ from torch_scatter import scatter_log_softmax, scatter_softmax
def
test_softmax
():
src
=
torch
.
tensor
([
0.2
,
0
,
0.2
,
-
2.1
,
3.2
,
7
,
-
1
,
float
(
'-inf'
)])
src
.
requires_grad_
()
index
=
torch
.
tensor
([
0
,
1
,
0
,
1
,
1
,
2
,
4
,
4
])
out
=
scatter_softmax
(
src
,
index
)
...
...
@@ -19,9 +20,12 @@ def test_softmax():
assert
torch
.
allclose
(
out
,
expected
)
out
.
backward
(
torch
.
randn_like
(
out
))
def
test_log_softmax
():
src
=
torch
.
tensor
([
0.2
,
0
,
0.2
,
-
2.1
,
3.2
,
7
,
-
1
,
float
(
'-inf'
)])
src
.
requires_grad_
()
index
=
torch
.
tensor
([
0
,
1
,
0
,
1
,
1
,
2
,
4
,
4
])
out
=
scatter_log_softmax
(
src
,
index
)
...
...
@@ -36,3 +40,5 @@ def test_log_softmax():
],
dim
=
0
)
assert
torch
.
allclose
(
out
,
expected
)
out
.
backward
(
torch
.
randn_like
(
out
))
test/composite/test_std.py
View file @
cf8cf0c0
...
...
@@ -4,9 +4,12 @@ from torch_scatter import scatter_std
def
test_std
():
src
=
torch
.
tensor
([[
2
,
0
,
1
,
4
,
3
],
[
0
,
2
,
1
,
3
,
4
]],
dtype
=
torch
.
float
)
src
.
requires_grad_
()
index
=
torch
.
tensor
([[
0
,
0
,
0
,
0
,
0
],
[
1
,
1
,
1
,
1
,
1
]],
dtype
=
torch
.
long
)
out
=
scatter_std
(
src
,
index
,
dim
=-
1
,
unbiased
=
True
)
std
=
src
.
std
(
dim
=-
1
,
unbiased
=
True
)[
0
]
expected
=
torch
.
tensor
([[
std
,
0
],
[
0
,
std
]])
assert
torch
.
allclose
(
out
,
expected
)
out
.
backward
(
torch
.
randn_like
(
out
))
torch_scatter/composite/softmax.py
View file @
cf8cf0c0
...
...
@@ -17,12 +17,12 @@ def scatter_softmax(src: torch.Tensor, index: torch.Tensor, dim: int = -1,
max_per_src_element
=
max_value_per_index
.
gather
(
dim
,
index
)
recentered_scores
=
src
-
max_per_src_element
recentered_scores_exp
=
recentered_scores
.
exp
_
()
recentered_scores_exp
=
recentered_scores
.
exp
()
sum_per_index
=
scatter_sum
(
recentered_scores_exp
,
index
,
dim
)
normalizing_constants
=
sum_per_index
.
add_
(
eps
).
gather
(
dim
,
index
)
return
recentered_scores_exp
.
div
_
(
normalizing_constants
)
return
recentered_scores_exp
.
div
(
normalizing_constants
)
@
torch
.
jit
.
script
...
...
torch_scatter/composite/std.py
View file @
cf8cf0c0
...
...
@@ -27,14 +27,14 @@ def scatter_std(src: torch.Tensor, index: torch.Tensor, dim: int = -1,
index
=
broadcast
(
index
,
src
,
dim
)
tmp
=
scatter_sum
(
src
,
index
,
dim
,
dim_size
=
dim_size
)
count
=
broadcast
(
count
,
tmp
,
dim
).
clamp_
(
1
)
mean
=
tmp
.
div
_
(
count
)
mean
=
tmp
.
div
(
count
)
var
=
(
src
-
mean
.
gather
(
dim
,
index
))
var
=
var
*
var
out
=
scatter_sum
(
var
,
index
,
dim
,
out
,
dim_size
)
if
unbiased
:
count
.
sub
_
(
1
).
clamp_
(
1
)
out
.
div
_
(
count
).
sqrt
_
()
count
=
count
.
sub
(
1
).
clamp_
(
1
)
out
=
out
.
div
(
count
).
sqrt
()
return
out
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment