Unverified Commit 6d9433b0 authored by Mufei Li's avatar Mufei Li Committed by GitHub
Browse files

[Transform] [Doc] Rename transform to transforms and update doc (#3765)

* Update

* Update

* Update

* Fix

* Update

* Update

* Update

* Fix
parent ccaa0bf2
...@@ -3,7 +3,7 @@ import os ...@@ -3,7 +3,7 @@ import os
import pickle import pickle
import numpy as np import numpy as np
from ..transform import metis_partition_assignment from ..transforms import metis_partition_assignment
from .. import backend as F from .. import backend as F
from .dataloader import SubgraphIterator from .dataloader import SubgraphIterator
......
...@@ -4,7 +4,7 @@ from collections.abc import Mapping, Sequence ...@@ -4,7 +4,7 @@ from collections.abc import Mapping, Sequence
from abc import ABC, abstractproperty, abstractmethod from abc import ABC, abstractproperty, abstractmethod
import re import re
import numpy as np import numpy as np
from .. import transform from .. import transforms
from ..base import NID, EID from ..base import NID, EID
from .. import backend as F from .. import backend as F
from .. import utils from .. import utils
...@@ -83,7 +83,7 @@ class _EidExcluder(): ...@@ -83,7 +83,7 @@ class _EidExcluder():
# to the mapping from the new graph to the old frontier. # to the mapping from the new graph to the old frontier.
# So we need to test if located_eids is empty, and do the remapping ourselves. # So we need to test if located_eids is empty, and do the remapping ourselves.
if len(located_eids) > 0: if len(located_eids) > 0:
frontier = transform.remove_edges( frontier = transforms.remove_edges(
frontier, located_eids, store_ids=True) frontier, located_eids, store_ids=True)
frontier.edata[EID] = F.gather_row(parent_eids, frontier.edata[EID]) frontier.edata[EID] = F.gather_row(parent_eids, frontier.edata[EID])
else: else:
...@@ -92,7 +92,7 @@ class _EidExcluder(): ...@@ -92,7 +92,7 @@ class _EidExcluder():
new_eids = parent_eids.copy() new_eids = parent_eids.copy()
for k, v in located_eids.items(): for k, v in located_eids.items():
if len(v) > 0: if len(v) > 0:
frontier = transform.remove_edges( frontier = transforms.remove_edges(
frontier, v, etype=k, store_ids=True) frontier, v, etype=k, store_ids=True)
new_eids[k] = F.gather_row(parent_eids[k], frontier.edges[k].data[EID]) new_eids[k] = F.gather_row(parent_eids[k], frontier.edges[k].data[EID])
frontier.edata[EID] = new_eids frontier.edata[EID] = new_eids
...@@ -455,7 +455,7 @@ class BlockSampler(Sampler): ...@@ -455,7 +455,7 @@ class BlockSampler(Sampler):
if not self.exclude_edges_in_frontier(g): if not self.exclude_edges_in_frontier(g):
frontier = exclude_edges(frontier, exclude_eids, self.output_device) frontier = exclude_edges(frontier, exclude_eids, self.output_device)
block = transform.to_block(frontier, seed_nodes_out) block = transforms.to_block(frontier, seed_nodes_out)
if self.return_eids: if self.return_eids:
self.assign_block_eids(block, frontier) self.assign_block_eids(block, frontier)
...@@ -838,7 +838,7 @@ class EdgeCollator(Collator): ...@@ -838,7 +838,7 @@ class EdgeCollator(Collator):
neg_pair_graph = heterograph( neg_pair_graph = heterograph(
neg_edges, {ntype: self.g.number_of_nodes(ntype) for ntype in self.g.ntypes}) neg_edges, {ntype: self.g.number_of_nodes(ntype) for ntype in self.g.ntypes})
pair_graph, neg_pair_graph = transform.compact_graphs([pair_graph, neg_pair_graph]) pair_graph, neg_pair_graph = transforms.compact_graphs([pair_graph, neg_pair_graph])
pair_graph.edata[EID] = induced_edges pair_graph.edata[EID] = induced_edges
seed_nodes = pair_graph.ndata[NID] seed_nodes = pair_graph.ndata[NID]
......
"""ShaDow-GNN subgraph samplers.""" """ShaDow-GNN subgraph samplers."""
from ..utils import prepare_tensor_or_dict from ..utils import prepare_tensor_or_dict
from ..base import NID from ..base import NID
from .. import transform from .. import transforms
from ..sampling import sample_neighbors from ..sampling import sample_neighbors
from .neighbor import NeighborSamplingMixin from .neighbor import NeighborSamplingMixin
from .dataloader import exclude_edges, Sampler from .dataloader import exclude_edges, Sampler
...@@ -89,7 +89,7 @@ class ShaDowKHopSampler(NeighborSamplingMixin, Sampler): ...@@ -89,7 +89,7 @@ class ShaDowKHopSampler(NeighborSamplingMixin, Sampler):
fanout = self.fanouts[i] fanout = self.fanouts[i]
frontier = sample_neighbors( frontier = sample_neighbors(
g, seed_nodes, fanout, replace=self.replace, prob=self.prob_arrays) g, seed_nodes, fanout, replace=self.replace, prob=self.prob_arrays)
block = transform.to_block(frontier, seed_nodes) block = transforms.to_block(frontier, seed_nodes)
seed_nodes = block.srcdata[NID] seed_nodes = block.srcdata[NID]
subg = g.subgraph(seed_nodes, relabel_nodes=True) subg = g.subgraph(seed_nodes, relabel_nodes=True)
......
...@@ -3692,14 +3692,14 @@ class DGLGraph(DGLBaseGraph): ...@@ -3692,14 +3692,14 @@ class DGLGraph(DGLBaseGraph):
def line_graph(self, backtracking=True, shared=False): def line_graph(self, backtracking=True, shared=False):
"""Return the line graph of this graph. """Return the line graph of this graph.
See :func:`~dgl.transform.line_graph`. See :func:`~dgl.transforms.line_graph`.
""" """
return dgl.line_graph(self, backtracking, shared) return dgl.line_graph(self, backtracking, shared)
def reverse(self, share_ndata=False, share_edata=False): def reverse(self, share_ndata=False, share_edata=False):
"""Return the reverse of this graph. """Return the reverse of this graph.
See :func:`~dgl.transform.reverse`. See :func:`~dgl.transforms.reverse`.
""" """
return dgl.reverse(self, share_ndata, share_edata) return dgl.reverse(self, share_ndata, share_edata)
......
...@@ -502,14 +502,14 @@ class BaseGraphStore(DGLGraph): ...@@ -502,14 +502,14 @@ class BaseGraphStore(DGLGraph):
def line_graph(self, backtracking=True, shared=False): def line_graph(self, backtracking=True, shared=False):
"""Return the line graph of this graph. """Return the line graph of this graph.
See :func:`~dgl.transform.line_graph`. See :func:`~dgl.transforms.line_graph`.
""" """
raise Exception("Graph store doesn't support creating an line matrix.") raise Exception("Graph store doesn't support creating an line matrix.")
def reverse(self, share_ndata=False, share_edata=False): def reverse(self, share_ndata=False, share_edata=False):
"""Return the reverse of this graph. """Return the reverse of this graph.
See :func:`~dgl.transform.reverse`. See :func:`~dgl.transforms.reverse`.
""" """
raise Exception("Graph store doesn't support reversing a matrix.") raise Exception("Graph store doesn't support reversing a matrix.")
......
...@@ -20,7 +20,7 @@ from .. import batch ...@@ -20,7 +20,7 @@ from .. import batch
from .. import backend as F from .. import backend as F
from ..convert import graph as dgl_graph from ..convert import graph as dgl_graph
from ..convert import from_networkx, to_networkx from ..convert import from_networkx, to_networkx
from ..transform import reorder_graph from ..transforms import reorder_graph
backend = os.environ.get('DGLBACKEND', 'pytorch') backend = os.environ.get('DGLBACKEND', 'pytorch')
......
...@@ -7,7 +7,7 @@ from .dgl_dataset import DGLBuiltinDataset ...@@ -7,7 +7,7 @@ from .dgl_dataset import DGLBuiltinDataset
from .utils import save_graphs, load_graphs, _get_dgl_url, deprecate_property, deprecate_class from .utils import save_graphs, load_graphs, _get_dgl_url, deprecate_property, deprecate_class
from ..convert import graph as dgl_graph from ..convert import graph as dgl_graph
from .. import backend as F from .. import backend as F
from .. import transform from .. import transforms
__all__ = ["AmazonCoBuyComputerDataset", "AmazonCoBuyPhotoDataset", "CoauthorPhysicsDataset", "CoauthorCSDataset", __all__ = ["AmazonCoBuyComputerDataset", "AmazonCoBuyPhotoDataset", "CoauthorPhysicsDataset", "CoauthorCSDataset",
"CoraFullDataset", "AmazonCoBuy", "Coauthor", "CoraFull"] "CoraFullDataset", "AmazonCoBuy", "Coauthor", "CoraFull"]
...@@ -39,7 +39,7 @@ class GNNBenchmarkDataset(DGLBuiltinDataset): ...@@ -39,7 +39,7 @@ class GNNBenchmarkDataset(DGLBuiltinDataset):
def process(self): def process(self):
npz_path = os.path.join(self.raw_path, self.name + '.npz') npz_path = os.path.join(self.raw_path, self.name + '.npz')
g = self._load_npz(npz_path) g = self._load_npz(npz_path)
g = transform.reorder_graph( g = transforms.reorder_graph(
g, node_permute_algo='rcmk', edge_permute_algo='dst', store_ids=False) g, node_permute_algo='rcmk', edge_permute_algo='dst', store_ids=False)
self._graph = g self._graph = g
self._data = [g] self._data = [g]
...@@ -96,7 +96,7 @@ class GNNBenchmarkDataset(DGLBuiltinDataset): ...@@ -96,7 +96,7 @@ class GNNBenchmarkDataset(DGLBuiltinDataset):
else: else:
labels = None labels = None
g = dgl_graph((adj_matrix.row, adj_matrix.col)) g = dgl_graph((adj_matrix.row, adj_matrix.col))
g = transform.to_bidirected(g) g = transforms.to_bidirected(g)
g.ndata['feat'] = F.tensor(attr_matrix, F.data_type_dict['float32']) g.ndata['feat'] = F.tensor(attr_matrix, F.data_type_dict['float32'])
g.ndata['label'] = F.tensor(labels, F.data_type_dict['int64']) g.ndata['label'] = F.tensor(labels, F.data_type_dict['int64'])
return g return g
......
...@@ -7,7 +7,7 @@ from .dgl_dataset import DGLDataset ...@@ -7,7 +7,7 @@ from .dgl_dataset import DGLDataset
from .utils import save_graphs, load_graphs, makedirs from .utils import save_graphs, load_graphs, makedirs
from .. import backend as F from .. import backend as F
from ..convert import from_networkx from ..convert import from_networkx
from ..transform import add_self_loop from ..transforms import add_self_loop
__all__ = ['MiniGCDataset'] __all__ = ['MiniGCDataset']
......
...@@ -6,7 +6,7 @@ import scipy.sparse as sp ...@@ -6,7 +6,7 @@ import scipy.sparse as sp
from .dgl_dataset import DGLDataset from .dgl_dataset import DGLDataset
from .utils import download, _get_dgl_url from .utils import download, _get_dgl_url
from ..convert import graph as dgl_graph from ..convert import graph as dgl_graph
from ..transform import to_bidirected from ..transforms import to_bidirected
from .. import backend as F from .. import backend as F
class QM9Dataset(DGLDataset): class QM9Dataset(DGLDataset):
......
...@@ -9,7 +9,7 @@ from .dgl_dataset import DGLBuiltinDataset ...@@ -9,7 +9,7 @@ from .dgl_dataset import DGLBuiltinDataset
from .utils import _get_dgl_url, generate_mask_tensor, load_graphs, save_graphs, deprecate_property from .utils import _get_dgl_url, generate_mask_tensor, load_graphs, save_graphs, deprecate_property
from .. import backend as F from .. import backend as F
from ..convert import from_scipy from ..convert import from_scipy
from ..transform import reorder_graph from ..transforms import reorder_graph
class RedditDataset(DGLBuiltinDataset): class RedditDataset(DGLBuiltinDataset):
......
...@@ -3,7 +3,7 @@ from collections import Mapping ...@@ -3,7 +3,7 @@ from collections import Mapping
from ..base import NID, EID from ..base import NID, EID
from ..convert import heterograph from ..convert import heterograph
from .. import backend as F from .. import backend as F
from ..transform import compact_graphs from ..transforms import compact_graphs
from ..frame import LazyFeature from ..frame import LazyFeature
from ..utils import recursive_apply from ..utils import recursive_apply
......
"""Data loading components for neighbor sampling""" """Data loading components for neighbor sampling"""
from ..base import NID, EID from ..base import NID, EID
from ..transform import to_block from ..transforms import to_block
from .base import BlockSampler from .base import BlockSampler
class NeighborSampler(BlockSampler): class NeighborSampler(BlockSampler):
......
"""ShaDow-GNN subgraph samplers.""" """ShaDow-GNN subgraph samplers."""
from ..sampling.utils import EidExcluder from ..sampling.utils import EidExcluder
from .. import transform from .. import transforms
from ..base import NID from ..base import NID
from .base import set_node_lazy_features, set_edge_lazy_features from .base import set_node_lazy_features, set_edge_lazy_features
...@@ -86,7 +86,7 @@ class ShaDowKHopSampler(object): ...@@ -86,7 +86,7 @@ class ShaDowKHopSampler(object):
frontier = g.sample_neighbors( frontier = g.sample_neighbors(
seed_nodes, fanout, output_device=self.output_device, seed_nodes, fanout, output_device=self.output_device,
replace=self.replace, prob=self.prob, exclude_edges=exclude_edges) replace=self.replace, prob=self.prob, exclude_edges=exclude_edges)
block = transform.to_block(frontier, seed_nodes) block = transforms.to_block(frontier, seed_nodes)
seed_nodes = block.srcdata[NID] seed_nodes = block.srcdata[NID]
subg = g.subgraph(seed_nodes, relabel_nodes=True, output_device=self.output_device) subg = g.subgraph(seed_nodes, relabel_nodes=True, output_device=self.output_device)
......
...@@ -9,7 +9,7 @@ import numpy as np ...@@ -9,7 +9,7 @@ import numpy as np
from ..heterograph import DGLHeteroGraph from ..heterograph import DGLHeteroGraph
from ..convert import heterograph as dgl_heterograph from ..convert import heterograph as dgl_heterograph
from ..convert import graph as dgl_graph from ..convert import graph as dgl_graph
from ..transform import compact_graphs from ..transforms import compact_graphs
from .. import heterograph_index from .. import heterograph_index
from .. import backend as F from .. import backend as F
from ..base import NID, EID, NTYPE, ETYPE, ALL, is_all from ..base import NID, EID, NTYPE, ETYPE, ALL, is_all
......
...@@ -11,13 +11,8 @@ from ....utils import expand_as_pair ...@@ -11,13 +11,8 @@ from ....utils import expand_as_pair
class AGNNConv(nn.Block): class AGNNConv(nn.Block):
r""" r"""Attention-based Graph Neural Network layer from `Attention-based Graph Neural Network for
Semi-Supervised Learning <https://arxiv.org/abs/1803.03735>`__
Description
-----------
Attention-based Graph Neural Network layer from paper `Attention-based
Graph Neural Network for Semi-Supervised Learning
<https://arxiv.org/abs/1803.03735>`__.
.. math:: .. math::
H^{l+1} = P H^{l} H^{l+1} = P H^{l}
......
...@@ -7,13 +7,9 @@ from mxnet.gluon import nn ...@@ -7,13 +7,9 @@ from mxnet.gluon import nn
from .... import function as fn from .... import function as fn
class APPNPConv(nn.Block): class APPNPConv(nn.Block):
r""" r"""Approximate Personalized Propagation of Neural Predictions layer from `Predict then
Propagate: Graph Neural Networks meet Personalized PageRank
Description <https://arxiv.org/pdf/1810.05997.pdf>`__
-----------
Approximate Personalized Propagation of Neural Predictions
layer from paper `Predict then Propagate: Graph Neural Networks
meet Personalized PageRank <https://arxiv.org/pdf/1810.05997.pdf>`__.
.. math:: .. math::
H^{0} &= X H^{0} &= X
......
...@@ -10,13 +10,8 @@ from .... import broadcast_nodes, function as fn ...@@ -10,13 +10,8 @@ from .... import broadcast_nodes, function as fn
class ChebConv(nn.Block): class ChebConv(nn.Block):
r""" r"""Chebyshev Spectral Graph Convolution layer from `Convolutional Neural Networks on Graphs
with Fast Localized Spectral Filtering <https://arxiv.org/pdf/1606.09375.pdf>`__
Description
-----------
Chebyshev Spectral Graph Convolution layer from paper `Convolutional
Neural Networks on Graphs with Fast Localized Spectral Filtering
<https://arxiv.org/pdf/1606.09375.pdf>`__.
.. math:: .. math::
h_i^{l+1} &= \sum_{k=0}^{K-1} W^{k, l}z_i^{k, l} h_i^{l+1} &= \sum_{k=0}^{K-1} W^{k, l}z_i^{k, l}
......
...@@ -7,13 +7,8 @@ from mxnet.gluon import nn ...@@ -7,13 +7,8 @@ from mxnet.gluon import nn
class DenseChebConv(nn.Block): class DenseChebConv(nn.Block):
r""" r"""Chebyshev Spectral Graph Convolution layer from `Convolutional Neural Networks on Graphs
with Fast Localized Spectral Filtering <https://arxiv.org/pdf/1606.09375.pdf>`__
Description
-----------
Chebyshev Spectral Graph Convolution layer from paper `Convolutional
Neural Networks on Graphs with Fast Localized Spectral Filtering
<https://arxiv.org/pdf/1606.09375.pdf>`__.
We recommend to use this module when applying ChebConv on dense graphs. We recommend to use this module when applying ChebConv on dense graphs.
......
...@@ -7,12 +7,9 @@ from mxnet.gluon import nn ...@@ -7,12 +7,9 @@ from mxnet.gluon import nn
class DenseGraphConv(nn.Block): class DenseGraphConv(nn.Block):
""" """Graph Convolutional layer from `Semi-Supervised Classification with Graph
Convolutional Networks <https://arxiv.org/abs/1609.02907>`__
Description
-----------
Graph Convolutional Network layer where the graph structure
is given by an adjacency matrix.
We recommend user to use this module when applying graph convolution on We recommend user to use this module when applying graph convolution on
dense graphs. dense graphs.
......
...@@ -8,12 +8,9 @@ from ....utils import check_eq_shape ...@@ -8,12 +8,9 @@ from ....utils import check_eq_shape
class DenseSAGEConv(nn.Block): class DenseSAGEConv(nn.Block):
""" """GraphSAGE layer from `Inductive Representation Learning on Large Graphs
<https://arxiv.org/abs/1706.02216>`__
Description
-----------
GraphSAGE layer where the graph structure is given by an
adjacency matrix.
We recommend to use this module when appying GraphSAGE on dense graphs. We recommend to use this module when appying GraphSAGE on dense graphs.
Note that we only support gcn aggregator in DenseSAGEConv. Note that we only support gcn aggregator in DenseSAGEConv.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment