Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
dgl
Commits
d19887cd
Unverified
Commit
d19887cd
authored
Aug 09, 2023
by
Andrei Ivanov
Committed by
GitHub
Aug 10, 2023
Browse files
Improving transform tests. (#6123)
parent
d5ae1ea0
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
15 additions
and
15 deletions
+15
-15
python/dgl/transforms/functional.py
python/dgl/transforms/functional.py
+2
-2
tests/python/common/transforms/test_transform.py
tests/python/common/transforms/test_transform.py
+13
-13
No files found.
python/dgl/transforms/functional.py
View file @
d19887cd
...
@@ -3600,11 +3600,11 @@ def random_walk_pe(g, k, eweight_name=None):
...
@@ -3600,11 +3600,11 @@ def random_walk_pe(g, k, eweight_name=None):
RW
=
(
A
/
(
A
.
sum
(
1
)
+
1e-30
)).
toarray
()
RW
=
(
A
/
(
A
.
sum
(
1
)
+
1e-30
)).
toarray
()
# Iterate for k steps
# Iterate for k steps
PE
=
[
F
.
astype
(
F
.
tensor
(
RW
.
diagonal
()),
F
.
float32
)]
PE
=
[
F
.
astype
(
F
.
tensor
(
np
.
array
(
RW
.
diagonal
())
)
,
F
.
float32
)]
RW_power
=
RW
RW_power
=
RW
for
_
in
range
(
k
-
1
):
for
_
in
range
(
k
-
1
):
RW_power
=
RW_power
@
RW
RW_power
=
RW_power
@
RW
PE
.
append
(
F
.
astype
(
F
.
tensor
(
RW_power
.
diagonal
()),
F
.
float32
))
PE
.
append
(
F
.
astype
(
F
.
tensor
(
np
.
array
(
RW_power
.
diagonal
())
)
,
F
.
float32
))
PE
=
F
.
stack
(
PE
,
dim
=-
1
)
PE
=
F
.
stack
(
PE
,
dim
=-
1
)
return
PE
return
PE
...
...
tests/python/common/transforms/test_transform.py
View file @
d19887cd
...
@@ -129,7 +129,7 @@ def create_test_heterograph5(idtype):
...
@@ -129,7 +129,7 @@ def create_test_heterograph5(idtype):
def
test_line_graph1
():
def
test_line_graph1
():
N
=
5
N
=
5
G
=
dgl
.
DGLGraph
(
nx
.
star_graph
(
N
)).
to
(
F
.
ctx
())
G
=
dgl
.
from_networkx
(
nx
.
star_graph
(
N
)).
to
(
F
.
ctx
())
G
.
edata
[
"h"
]
=
F
.
randn
((
2
*
N
,
D
))
G
.
edata
[
"h"
]
=
F
.
randn
((
2
*
N
,
D
))
L
=
G
.
line_graph
(
shared
=
True
)
L
=
G
.
line_graph
(
shared
=
True
)
assert
L
.
num_nodes
()
==
2
*
N
assert
L
.
num_nodes
()
==
2
*
N
...
@@ -185,7 +185,7 @@ def test_line_graph2(idtype):
...
@@ -185,7 +185,7 @@ def test_line_graph2(idtype):
def
test_no_backtracking
():
def
test_no_backtracking
():
N
=
5
N
=
5
G
=
dgl
.
DGLGraph
(
nx
.
star_graph
(
N
))
G
=
dgl
.
from_networkx
(
nx
.
star_graph
(
N
))
L
=
G
.
line_graph
(
backtracking
=
False
)
L
=
G
.
line_graph
(
backtracking
=
False
)
assert
L
.
num_nodes
()
==
2
*
N
assert
L
.
num_nodes
()
==
2
*
N
for
i
in
range
(
1
,
N
):
for
i
in
range
(
1
,
N
):
...
@@ -198,7 +198,7 @@ def test_no_backtracking():
...
@@ -198,7 +198,7 @@ def test_no_backtracking():
# reverse graph related
# reverse graph related
@
parametrize_idtype
@
parametrize_idtype
def
test_reverse
(
idtype
):
def
test_reverse
(
idtype
):
g
=
dgl
.
DGLG
raph
()
g
=
dgl
.
g
raph
(
[]
)
g
=
g
.
astype
(
idtype
).
to
(
F
.
ctx
())
g
=
g
.
astype
(
idtype
).
to
(
F
.
ctx
())
g
.
add_nodes
(
5
)
g
.
add_nodes
(
5
)
# The graph need not to be completely connected.
# The graph need not to be completely connected.
...
@@ -360,14 +360,14 @@ def test_reverse(idtype):
...
@@ -360,14 +360,14 @@ def test_reverse(idtype):
@
parametrize_idtype
@
parametrize_idtype
def
test_reverse_shared_frames
(
idtype
):
def
test_reverse_shared_frames
(
idtype
):
g
=
dgl
.
DGLG
raph
()
g
=
dgl
.
g
raph
(
[]
)
g
=
g
.
astype
(
idtype
).
to
(
F
.
ctx
())
g
=
g
.
astype
(
idtype
).
to
(
F
.
ctx
())
g
.
add_nodes
(
3
)
g
.
add_nodes
(
3
)
g
.
add_edges
([
0
,
1
,
2
],
[
1
,
2
,
1
])
g
.
add_edges
([
0
,
1
,
2
],
[
1
,
2
,
1
])
g
.
ndata
[
"h"
]
=
F
.
tensor
([[
0.0
],
[
1.0
],
[
2.0
]])
g
.
ndata
[
"h"
]
=
F
.
tensor
([[
0.0
],
[
1.0
],
[
2.0
]])
g
.
edata
[
"h"
]
=
F
.
tensor
([[
3.0
],
[
4.0
],
[
5.0
]])
g
.
edata
[
"h"
]
=
F
.
tensor
([[
3.0
],
[
4.0
],
[
5.0
]])
rg
=
g
.
reverse
(
share
_ndata
=
True
,
share
_edata
=
True
)
rg
=
g
.
reverse
(
copy
_ndata
=
True
,
copy
_edata
=
True
)
assert
F
.
allclose
(
g
.
ndata
[
"h"
],
rg
.
ndata
[
"h"
])
assert
F
.
allclose
(
g
.
ndata
[
"h"
],
rg
.
ndata
[
"h"
])
assert
F
.
allclose
(
g
.
edata
[
"h"
],
rg
.
edata
[
"h"
])
assert
F
.
allclose
(
g
.
edata
[
"h"
],
rg
.
edata
[
"h"
])
assert
F
.
allclose
(
assert
F
.
allclose
(
...
@@ -588,9 +588,9 @@ def test_add_reverse_edges():
...
@@ -588,9 +588,9 @@ def test_add_reverse_edges():
@
unittest
.
skipIf
(
F
.
_default_context_str
==
"gpu"
,
reason
=
"GPU not implemented"
)
@
unittest
.
skipIf
(
F
.
_default_context_str
==
"gpu"
,
reason
=
"GPU not implemented"
)
def
test_simple_graph
():
def
test_simple_graph
():
elist
=
[(
0
,
1
),
(
0
,
2
),
(
1
,
2
),
(
0
,
1
)]
elist
=
[(
0
,
1
),
(
0
,
2
),
(
1
,
2
),
(
0
,
1
)]
g
=
dgl
.
DGLG
raph
(
elist
,
readonly
=
True
)
g
=
dgl
.
g
raph
(
elist
)
assert
g
.
is_multigraph
assert
g
.
is_multigraph
sg
=
dgl
.
to_simple
_graph
(
g
)
sg
=
dgl
.
to_simple
(
g
)
assert
not
sg
.
is_multigraph
assert
not
sg
.
is_multigraph
assert
sg
.
num_edges
()
==
3
assert
sg
.
num_edges
()
==
3
src
,
dst
=
sg
.
edges
()
src
,
dst
=
sg
.
edges
()
...
@@ -603,7 +603,7 @@ def _test_bidirected_graph():
...
@@ -603,7 +603,7 @@ def _test_bidirected_graph():
def
_test
(
in_readonly
,
out_readonly
):
def
_test
(
in_readonly
,
out_readonly
):
elist
=
[(
0
,
0
),
(
0
,
1
),
(
1
,
0
),
(
1
,
1
),
(
2
,
1
),
(
2
,
2
)]
elist
=
[(
0
,
0
),
(
0
,
1
),
(
1
,
0
),
(
1
,
1
),
(
2
,
1
),
(
2
,
2
)]
num_edges
=
7
num_edges
=
7
g
=
dgl
.
DGLG
raph
(
elist
,
readonly
=
in_readonly
)
g
=
dgl
.
g
raph
(
elist
)
elist
.
append
((
1
,
2
))
elist
.
append
((
1
,
2
))
elist
=
set
(
elist
)
elist
=
set
(
elist
)
big
=
dgl
.
to_bidirected_stale
(
g
,
out_readonly
)
big
=
dgl
.
to_bidirected_stale
(
g
,
out_readonly
)
...
@@ -638,10 +638,10 @@ def test_khop_graph():
...
@@ -638,10 +638,10 @@ def test_khop_graph():
assert
F
.
allclose
(
h_0
,
h_1
,
rtol
=
1e-3
,
atol
=
1e-3
)
assert
F
.
allclose
(
h_0
,
h_1
,
rtol
=
1e-3
,
atol
=
1e-3
)
# Test for random undirected graphs
# Test for random undirected graphs
g
=
dgl
.
DGLGraph
(
nx
.
erdos_renyi_graph
(
N
,
0.3
))
g
=
dgl
.
from_networkx
(
nx
.
erdos_renyi_graph
(
N
,
0.3
))
_test
(
g
)
_test
(
g
)
# Test for random directed graphs
# Test for random directed graphs
g
=
dgl
.
DGLGraph
(
nx
.
erdos_renyi_graph
(
N
,
0.3
,
directed
=
True
))
g
=
dgl
.
from_networkx
(
nx
.
erdos_renyi_graph
(
N
,
0.3
,
directed
=
True
))
_test
(
g
)
_test
(
g
)
...
@@ -649,7 +649,7 @@ def test_khop_graph():
...
@@ -649,7 +649,7 @@ def test_khop_graph():
def
test_khop_adj
():
def
test_khop_adj
():
N
=
20
N
=
20
feat
=
F
.
randn
((
N
,
5
))
feat
=
F
.
randn
((
N
,
5
))
g
=
dgl
.
DGLGraph
(
nx
.
erdos_renyi_graph
(
N
,
0.3
,
directed
=
True
))
g
=
dgl
.
from_networkx
(
nx
.
erdos_renyi_graph
(
N
,
0.3
,
directed
=
True
))
for
k
in
range
(
3
):
for
k
in
range
(
3
):
adj
=
F
.
tensor
(
F
.
swapaxes
(
dgl
.
khop_adj
(
g
,
k
),
0
,
1
))
adj
=
F
.
tensor
(
F
.
swapaxes
(
dgl
.
khop_adj
(
g
,
k
),
0
,
1
))
# use original graph to do message passing for k times.
# use original graph to do message passing for k times.
...
@@ -667,14 +667,14 @@ def test_laplacian_lambda_max():
...
@@ -667,14 +667,14 @@ def test_laplacian_lambda_max():
N
=
20
N
=
20
eps
=
1e-6
eps
=
1e-6
# test DGLGraph
# test DGLGraph
g
=
dgl
.
DGLGraph
(
nx
.
erdos_renyi_graph
(
N
,
0.3
))
g
=
dgl
.
from_networkx
(
nx
.
erdos_renyi_graph
(
N
,
0.3
))
l_max
=
dgl
.
laplacian_lambda_max
(
g
)
l_max
=
dgl
.
laplacian_lambda_max
(
g
)
assert
l_max
[
0
]
<
2
+
eps
assert
l_max
[
0
]
<
2
+
eps
# test batched DGLGraph
# test batched DGLGraph
"""
"""
N_arr = [20, 30, 10, 12]
N_arr = [20, 30, 10, 12]
bg = dgl.batch([
bg = dgl.batch([
dgl.
DGLGraph
(nx.erdos_renyi_graph(N, 0.3))
dgl.
from_networkx
(nx.erdos_renyi_graph(N, 0.3))
for N in N_arr
for N in N_arr
])
])
l_max_arr = dgl.laplacian_lambda_max(bg)
l_max_arr = dgl.laplacian_lambda_max(bg)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment