Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
pyg_autoscale
Commits
44405d11
Commit
44405d11
authored
Jun 09, 2021
by
rusty1s
Browse files
update
parent
db97023e
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
0 additions
and
3 deletions
+0
-3
README.md
README.md
+0
-3
No files found.
README.md
View file @
44405d11
...
@@ -18,7 +18,6 @@ from torch_geometric.nn import SAGEConv
...
@@ -18,7 +18,6 @@ from torch_geometric.nn import SAGEConv
from
torch_geometric_autoscale
import
ScalableGNN
from
torch_geometric_autoscale
import
ScalableGNN
from
torch_geometric_autoscale
import
metis
,
permute
,
SubgraphLoader
from
torch_geometric_autoscale
import
metis
,
permute
,
SubgraphLoader
class
GNN
(
ScalableGNN
):
class
GNN
(
ScalableGNN
):
def
__init__
(
self
,
num_nodes
,
in_channels
,
hidden_channels
,
out_channels
,
num_layers
):
def
__init__
(
self
,
num_nodes
,
in_channels
,
hidden_channels
,
out_channels
,
num_layers
):
# pool_size determines the number of pinned CPU buffers
# pool_size determines the number of pinned CPU buffers
...
@@ -41,8 +40,6 @@ class GNN(ScalableGNN):
...
@@ -41,8 +40,6 @@ class GNN(ScalableGNN):
x
=
self
.
push_and_pull
(
history
,
x
,
*
args
)
x
=
self
.
push_and_pull
(
history
,
x
,
*
args
)
return
self
.
convs
[
-
1
](
x
,
adj_t
)
return
self
.
convs
[
-
1
](
x
,
adj_t
)
perm
,
ptr
=
metis
(
data
.
adj_t
,
num_parts
=
40
,
log
=
True
)
perm
,
ptr
=
metis
(
data
.
adj_t
,
num_parts
=
40
,
log
=
True
)
data
=
permute
(
data
,
perm
,
log
=
True
)
data
=
permute
(
data
,
perm
,
log
=
True
)
loader
=
SubgraphLoader
(
data
,
ptr
,
batch_size
=
10
,
shuffle
=
True
)
loader
=
SubgraphLoader
(
data
,
ptr
,
batch_size
=
10
,
shuffle
=
True
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment