Unverified Commit 89b321b8 authored by Hongzhi (Steve), Chen's avatar Hongzhi (Steve), Chen Committed by GitHub
Browse files

[Misc] Rename number_of_edges and number_of_nodes to num_edges and num_nodes. (#5488)



* nn-only

* data-only

---------
Co-authored-by: default avatarUbuntu <ubuntu@ip-172-31-28-63.ap-northeast-1.compute.internal>
parent 7f5da697
...@@ -62,7 +62,7 @@ class HeteroGraphConv(nn.Module): ...@@ -62,7 +62,7 @@ class HeteroGraphConv(nn.Module):
``'user'`` and ``'game'`` nodes. ``'user'`` and ``'game'`` nodes.
>>> import torch as th >>> import torch as th
>>> h1 = {'user' : th.randn((g.number_of_nodes('user'), 5))} >>> h1 = {'user' : th.randn((g.num_nodes('user'), 5))}
>>> h2 = conv(g, h1) >>> h2 = conv(g, h1)
>>> print(h2.keys()) >>> print(h2.keys())
dict_keys(['user', 'game']) dict_keys(['user', 'game'])
......
...@@ -57,7 +57,7 @@ class NodeEmbedding: # NodeEmbedding ...@@ -57,7 +57,7 @@ class NodeEmbedding: # NodeEmbedding
In each training process In each training process
>>> emb = dgl.nn.NodeEmbedding(g.number_of_nodes(), 10, 'emb', init_func=initializer) >>> emb = dgl.nn.NodeEmbedding(g.num_nodes(), 10, 'emb', init_func=initializer)
>>> optimizer = dgl.optim.SparseAdam([emb], lr=0.001) >>> optimizer = dgl.optim.SparseAdam([emb], lr=0.001)
>>> for blocks in dataloader: >>> for blocks in dataloader:
... ... ... ...
......
...@@ -182,7 +182,7 @@ class Sequential(nn.Sequential): ...@@ -182,7 +182,7 @@ class Sequential(nn.Sequential):
>>> graph.ndata['h'] = n_feat >>> graph.ndata['h'] = n_feat
>>> graph.update_all(fn.copy_u('h', 'm'), fn.sum('m', 'h')) >>> graph.update_all(fn.copy_u('h', 'm'), fn.sum('m', 'h'))
>>> n_feat += graph.ndata['h'] >>> n_feat += graph.ndata['h']
>>> return n_feat.view(graph.number_of_nodes() // 2, 2, -1).sum(1) >>> return n_feat.view(graph.num_nodes() // 2, 2, -1).sum(1)
>>> >>>
>>> g1 = dgl.DGLGraph(nx.erdos_renyi_graph(32, 0.05)) >>> g1 = dgl.DGLGraph(nx.erdos_renyi_graph(32, 0.05))
>>> g2 = dgl.DGLGraph(nx.erdos_renyi_graph(16, 0.2)) >>> g2 = dgl.DGLGraph(nx.erdos_renyi_graph(16, 0.2))
......
...@@ -66,9 +66,7 @@ class APPNPConv(layers.Layer): ...@@ -66,9 +66,7 @@ class APPNPConv(layers.Layer):
# normalization by src node # normalization by src node
feat = feat * norm feat = feat * norm
graph.ndata["h"] = feat graph.ndata["h"] = feat
graph.edata["w"] = self.edge_drop( graph.edata["w"] = self.edge_drop(tf.ones(graph.num_edges(), 1))
tf.ones(graph.number_of_edges(), 1)
)
graph.update_all(fn.u_mul_e("h", "w", "m"), fn.sum("m", "h")) graph.update_all(fn.u_mul_e("h", "w", "m"), fn.sum("m", "h"))
feat = graph.ndata.pop("h") feat = graph.ndata.pop("h")
# normalization by dst node # normalization by dst node
......
...@@ -165,7 +165,7 @@ class SAGEConv(layers.Layer): ...@@ -165,7 +165,7 @@ class SAGEConv(layers.Layer):
h_self = feat_dst h_self = feat_dst
# Handle the case of graphs without edges # Handle the case of graphs without edges
if graph.number_of_edges() == 0: if graph.num_edges() == 0:
graph.dstdata["neigh"] = tf.cast( graph.dstdata["neigh"] = tf.cast(
tf.zeros((graph.number_of_dst_nodes(), self._in_src_feats)), tf.zeros((graph.number_of_dst_nodes(), self._in_src_feats)),
tf.float32, tf.float32,
......
...@@ -58,7 +58,7 @@ class HeteroGraphConv(layers.Layer): ...@@ -58,7 +58,7 @@ class HeteroGraphConv(layers.Layer):
``'user'`` and ``'game'`` nodes. ``'user'`` and ``'game'`` nodes.
>>> import tensorflow as tf >>> import tensorflow as tf
>>> h1 = {'user' : tf.random.normal((g.number_of_nodes('user'), 5))} >>> h1 = {'user' : tf.random.normal((g.num_nodes('user'), 5))}
>>> h2 = conv(g, h1) >>> h2 = conv(g, h1)
>>> print(h2.keys()) >>> print(h2.keys())
dict_keys(['user', 'game']) dict_keys(['user', 'game'])
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment