Unverified Commit aa06e457 authored by Zihao Ye's avatar Zihao Ye Committed by GitHub
Browse files

[Doc] Improve the docstring of several nn modules. (#1187)



* upd

* upd

* upd

* upd

* lint

* upd

* upd

* upd

* upd

* upd
Co-authored-by: default avatarVoVAllen <VoVAllen@users.noreply.github.com>
parent 3ef757db
......@@ -11,8 +11,7 @@ class DenseChebConv(nn.Block):
Neural Networks on Graphs with Fast Localized Spectral Filtering
<https://arxiv.org/pdf/1606.09375.pdf>`__.
We recommend to use this module when inducing ChebConv operations on dense
graphs / k-hop graphs.
We recommend to use this module when applying ChebConv on dense graphs.
Parameters
----------
......
......@@ -9,8 +9,8 @@ from mxnet.gluon import nn
class DenseGraphConv(nn.Block):
"""Graph Convolutional Network layer where the graph structure
is given by an adjacency matrix.
We recommend user to use this module when inducing graph convolution
on dense graphs / k-hop graphs.
We recommend user to use this module when applying graph convolution on
dense graphs.
Parameters
----------
......
......@@ -9,8 +9,7 @@ from mxnet.gluon import nn
class DenseSAGEConv(nn.Block):
"""GraphSAGE layer where the graph structure is given by an
adjacency matrix.
We recommend to use this module when inducing GraphSAGE operations
on dense graphs / k-hop graphs.
We recommend to use this module when applying GraphSAGE on dense graphs.
Note that we only support gcn aggregator in DenseSAGEConv.
......
......@@ -108,7 +108,7 @@ def normalize(x, p=2, axis=1, eps=1e-12):
return x / denom
class Sequential(gluon.nn.Sequential):
"""A squential container for stacking graph neural network blocks.
r"""A squential container for stacking graph neural network blocks.
We support two modes: sequentially apply GNN blocks on the same graph or
a list of given graphs. In the second case, the number of graphs equals the
......@@ -147,7 +147,6 @@ class Sequential(gluon.nn.Sequential):
>>> n_feat = nd.random.randn(3, 4)
>>> e_feat = nd.random.randn(9, 4)
>>> net(g, n_feat, e_feat)
(
[[ 12.412863 99.61184 21.472883 -57.625923 ]
[ 10.08097 100.68611 20.627377 -60.13458 ]
......@@ -192,7 +191,6 @@ class Sequential(gluon.nn.Sequential):
>>> net.initialize()
>>> n_feat = nd.random.randn(32, 4)
>>> net([g1, g2, g3], n_feat)
[[-101.289566 -22.584694 -89.25348 -151.6447 ]
[-130.74239 -49.494812 -120.250854 -199.81546 ]
[-112.32089 -50.036713 -116.13266 -190.38638 ]
......@@ -203,15 +201,17 @@ class Sequential(gluon.nn.Sequential):
super(Sequential, self).__init__(prefix=prefix, params=params)
def forward(self, graph, *feats):
"""Sequentially apply modules to the input.
r"""Sequentially apply modules to the input.
Parameters
----------
graph: a DGLGraph or a list of DGLGraphs.
graph : DGLGraph or list of DGLGraphs
The graph(s) to apply modules on.
*feats: input features.
The output of i-th block should match that of the input
of (i+1)-th block.
*feats :
Input features.
The output of :math:`i`-th block should match that of the input
of :math:`(i+1)`-th block.
"""
if isinstance(graph, list):
for graph_i, module in zip(graph, self):
......
......@@ -10,8 +10,7 @@ class DenseChebConv(nn.Module):
Neural Networks on Graphs with Fast Localized Spectral Filtering
<https://arxiv.org/pdf/1606.09375.pdf>`__.
We recommend to use this module when inducing ChebConv operations on dense
graphs / k-hop graphs.
We recommend to use this module when applying ChebConv on dense graphs.
Parameters
----------
......
......@@ -8,8 +8,8 @@ from torch.nn import init
class DenseGraphConv(nn.Module):
"""Graph Convolutional Network layer where the graph structure
is given by an adjacency matrix.
We recommend user to use this module when inducing graph convolution
on dense graphs / k-hop graphs.
We recommend user to use this module when applying graph convolution on
dense graphs.
Parameters
----------
......
......@@ -6,8 +6,7 @@ from torch import nn
class DenseSAGEConv(nn.Module):
"""GraphSAGE layer where the graph structure is given by an
adjacency matrix.
We recommend to use this module when inducing GraphSAGE operations
on dense graphs / k-hop graphs.
We recommend to use this module when appying GraphSAGE on dense graphs.
Note that we only support gcn aggregator in DenseSAGEConv.
......
......@@ -104,7 +104,7 @@ class Identity(nn.Module):
return x
class Sequential(nn.Sequential):
"""A squential container for stacking graph neural network modules.
r"""A squential container for stacking graph neural network modules.
We support two modes: sequentially apply GNN modules on the same graph or
a list of given graphs. In the second case, the number of graphs equals the
......@@ -112,7 +112,8 @@ class Sequential(nn.Sequential):
Parameters
----------
*args : sub-modules of type torch.nn.Module, will be added to the container in
*args :
Sub-modules of type torch.nn.Module, will be added to the container in
the order they are passed in the constructor.
Examples
......@@ -144,7 +145,6 @@ class Sequential(nn.Sequential):
>>> n_feat = torch.rand(3, 4)
>>> e_feat = torch.rand(9, 4)
>>> net(g, n_feat, e_feat)
(tensor([[39.8597, 45.4542, 25.1877, 30.8086],
[40.7095, 45.3985, 25.4590, 30.0134],
[40.7894, 45.2556, 25.5221, 30.4220]]), tensor([[80.3772, 89.7752, 50.7762, 60.5520],
......@@ -181,7 +181,6 @@ class Sequential(nn.Sequential):
>>> net = Sequential(ExampleLayer(), ExampleLayer(), ExampleLayer())
>>> n_feat = torch.rand(32, 4)
>>> net([g1, g2, g3], n_feat)
tensor([[209.6221, 225.5312, 193.8920, 220.1002],
[250.0169, 271.9156, 240.2467, 267.7766],
[220.4007, 239.7365, 213.8648, 234.9637],
......@@ -191,15 +190,17 @@ class Sequential(nn.Sequential):
super(Sequential, self).__init__(*args)
def forward(self, graph, *feats):
"""Sequentially apply modules to the input.
r"""Sequentially apply modules to the input.
Parameters
----------
graph: a DGLGraph or a list of DGLGraphs.
graph : DGLGraph or list of DGLGraphs
The graph(s) to apply modules on.
*feats: input features.
The output of i-th block should match that of the input
of (i+1)-th block.
*feats :
Input features.
The output of :math:`i`-th block should match that of the input
of :math:`(i+1)`-th block.
"""
if isinstance(graph, list):
for graph_i, module in zip(graph, self):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment