Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
dgl
Commits
905db292
Commit
905db292
authored
Oct 07, 2018
by
GaiYu0
Browse files
cpp lg
parent
596ca471
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
117 additions
and
14 deletions
+117
-14
python/dgl/graph.py
python/dgl/graph.py
+12
-2
python/dgl/graph_index.py
python/dgl/graph_index.py
+26
-11
src/graph/graph_apis.cc
src/graph/graph_apis.cc
+11
-0
src/graph/graph_op.cc
src/graph/graph_op.cc
+43
-0
tests/test_adj_and_inc.py
tests/test_adj_and_inc.py
+4
-1
tests/test_lg.py
tests/test_lg.py
+21
-0
No files found.
python/dgl/graph.py
View file @
905db292
...
...
@@ -1247,7 +1247,7 @@ class DGLGraph(object):
"""
return
self
.
_graph
.
adjacency_matrix
()
def
incidence_matrix
(
self
):
def
incidence_matrix
(
self
,
oriented
=
False
):
"""Return the incidence matrix representation of this graph.
Returns
...
...
@@ -1255,7 +1255,7 @@ class DGLGraph(object):
utils.CtxCachedObject
An object that returns tensor given context.
"""
return
self
.
_graph
.
incidence_matrix
()
return
self
.
_graph
.
incidence_matrix
(
oriented
)
def
line_graph
(
self
):
"""Return the line graph of this graph.
...
...
@@ -1267,6 +1267,16 @@ class DGLGraph(object):
"""
return
DGLGraph
(
self
.
_graph
.
line_graph
())
def
_line_graph
(
self
,
backtracking
=
False
):
"""Return the line graph of this graph.
Returns
-------
DGLGraph
The line graph of this graph.
"""
return
DGLGraph
(
self
.
_graph
.
_line_graph
(
backtracking
))
def
_get_repr
(
attr_dict
):
if
len
(
attr_dict
)
==
1
and
__REPR__
in
attr_dict
:
return
attr_dict
[
__REPR__
]
...
...
python/dgl/graph_index.py
View file @
905db292
...
...
@@ -3,7 +3,7 @@ from __future__ import absolute_import
import
ctypes
import
numpy
as
np
import
networkx
as
nx
import
scipy
as
sp
import
scipy
.sparse
as
sp
from
._ffi.base
import
c_array
from
._ffi.function
import
_init_api
...
...
@@ -408,7 +408,7 @@ class GraphIndex(object):
self
.
_cache
[
'adj'
]
=
utils
.
CtxCachedObject
(
lambda
ctx
:
F
.
to_context
(
mat
,
ctx
))
return
self
.
_cache
[
'adj'
]
def
incidence_matrix
(
self
):
def
incidence_matrix
(
self
,
oriented
=
False
):
"""Return the incidence matrix representation of this graph.
Returns
...
...
@@ -416,8 +416,8 @@ class GraphIndex(object):
utils.CtxCachedObject
An object that returns tensor given context.
"""
# TODO(gaiyu): DiGraph
if
not
'inc'
in
self
.
_cache
:
key
=
(
'oriented '
if
oriented
else
''
)
+
'incidence matrix'
if
not
key
in
self
.
_cache
:
src
,
dst
,
_
=
self
.
edges
(
sorted
=
True
)
src
=
src
.
tousertensor
()
dst
=
dst
.
tousertensor
()
...
...
@@ -427,14 +427,22 @@ class GraphIndex(object):
col
=
F
.
pack
([
eid
,
eid
])
idx
=
F
.
stack
([
row
,
col
])
x
=
F
.
ones
((
m
,))
x
[
src
==
dst
]
=
0
dat
=
F
.
pack
([
x
,
x
])
diagonal
=
(
src
==
dst
)
if
oriented
:
x
=
-
F
.
ones
((
m
,))
y
=
F
.
ones
((
m
,))
x
[
diagonal
]
=
0
y
[
diagonal
]
=
0
dat
=
F
.
pack
([
x
,
y
])
else
:
x
=
F
.
ones
((
m
,))
x
[
diagonal
]
=
0
dat
=
F
.
pack
([
x
,
x
])
n
=
self
.
number_of_nodes
()
mat
=
F
.
sparse_tensor
(
idx
,
dat
,
[
n
,
m
])
self
.
_cache
[
'inc'
]
=
utils
.
CtxCachedObject
(
lambda
ctx
:
F
.
to_context
(
mat
,
ctx
))
self
.
_cache
[
key
]
=
utils
.
CtxCachedObject
(
lambda
ctx
:
F
.
to_context
(
mat
,
ctx
))
return
self
.
_cache
[
'inc'
]
return
self
.
_cache
[
key
]
def
to_networkx
(
self
):
"""Convert to networkx graph.
...
...
@@ -512,12 +520,19 @@ class GraphIndex(object):
"""
m
=
self
.
number_of_edges
()
ctx
=
F
.
get_context
(
F
.
ones
(
1
))
# TODO(gaiyu):
inc
=
F
.
to_scipy_sparse
(
self
.
incidence_matrix
().
get
(
ctx
))
adj
=
inc
.
transpose
().
dot
(
inc
)
-
2
*
sp
.
sparse
.
eye
(
m
)
inc
=
F
.
to_scipy_sparse
(
self
.
incidence_matrix
(
oriented
=
True
).
get
(
ctx
))
adj
=
inc
.
transpose
().
dot
(
inc
)
adj
.
data
[
adj
.
data
!=
-
1
]
=
0
adj
.
eliminate_zeros
()
adj
=
sp
.
triu
(
adj
)
lg
=
create_graph_index
()
lg
.
from_scipy_sparse_matrix
(
adj
)
return
lg
def
_line_graph
(
self
,
backtracking
):
handle
=
_CAPI_DGLGraphLineGraph
(
self
.
_handle
,
backtracking
)
return
GraphIndex
(
handle
)
def
disjoint_union
(
graphs
):
"""Return a disjoint union of the input graphs.
...
...
src/graph/graph_apis.cc
View file @
905db292
...
...
@@ -325,4 +325,15 @@ TVM_REGISTER_GLOBAL("graph_index._CAPI_DGLDisjointPartitionBySizes")
}
*
rv
=
ptr_array
;
});
TVM_REGISTER_GLOBAL
(
"graph_index._CAPI_DGLGraphLineGraph"
)
.
set_body
([]
(
TVMArgs
args
,
TVMRetValue
*
rv
)
{
GraphHandle
ghandle
=
args
[
0
];
bool
backtracking
=
args
[
1
];
const
Graph
*
gptr
=
static_cast
<
Graph
*>
(
ghandle
);
Graph
*
lgptr
=
new
Graph
();
*
lgptr
=
GraphOp
::
LineGraph
(
gptr
,
backtracking
);
GraphHandle
lghandle
=
lgptr
;
*
rv
=
lghandle
;
});
}
// namespace dgl
src/graph/graph_op.cc
View file @
905db292
// Graph operation implementation
#include <dgl/graph_op.h>
#include <algorithm>
#include <stdio.h>
namespace
dgl
{
Graph
GraphOp
::
LineGraph
(
const
Graph
*
g
,
bool
backtracking
){
typedef
std
::
pair
<
dgl_id_t
,
dgl_id_t
>
entry
;
typedef
std
::
map
<
dgl_id_t
,
std
::
vector
<
entry
>>
csm
;
// Compressed Sparse Matrix
csm
adj
;
std
::
vector
<
entry
>
vec
;
for
(
size_t
i
=
0
;
i
!=
g
->
all_edges_src_
.
size
();
++
i
)
{
auto
u
=
g
->
all_edges_src_
[
i
];
auto
v
=
g
->
all_edges_dst_
[
i
];
auto
ret
=
adj
.
insert
(
csm
::
value_type
(
u
,
vec
));
(
ret
.
first
)
->
second
.
push_back
(
std
::
make_pair
(
v
,
i
));
}
std
::
vector
<
dgl_id_t
>
lg_src
,
lg_dst
;
for
(
size_t
i
=
0
;
i
!=
g
->
all_edges_src_
.
size
();
++
i
)
{
auto
u
=
g
->
all_edges_src_
[
i
];
auto
v
=
g
->
all_edges_dst_
[
i
];
auto
j
=
adj
.
find
(
v
);
if
(
j
!=
adj
.
end
())
{
for
(
size_t
k
=
0
;
k
!=
j
->
second
.
size
();
++
k
)
{
if
(
j
->
second
[
k
].
first
!=
u
)
{
lg_src
.
push_back
(
i
);
lg_dst
.
push_back
(
j
->
second
[
k
].
second
);
}
}
}
}
const
int64_t
len
=
lg_src
.
size
();
IdArray
src
=
IdArray
::
Empty
({
len
},
DLDataType
{
kDLInt
,
64
,
1
},
DLContext
{
kDLCPU
,
0
});
IdArray
dst
=
IdArray
::
Empty
({
len
},
DLDataType
{
kDLInt
,
64
,
1
},
DLContext
{
kDLCPU
,
0
});
int64_t
*
src_ptr
=
static_cast
<
int64_t
*>
(
src
->
data
);
int64_t
*
dst_ptr
=
static_cast
<
int64_t
*>
(
dst
->
data
);
std
::
copy
(
lg_src
.
begin
(),
lg_src
.
end
(),
src_ptr
);
std
::
copy
(
lg_dst
.
begin
(),
lg_dst
.
end
(),
dst_ptr
);
Graph
lg
;
lg
.
AddVertices
(
g
->
NumEdges
());
lg
.
AddEdges
(
src
,
dst
);
return
lg
;
}
Graph
GraphOp
::
DisjointUnion
(
std
::
vector
<
const
Graph
*>
graphs
)
{
Graph
rst
;
uint64_t
cumsum
=
0
;
...
...
tests/test_adj_and_inc.py
View file @
905db292
...
...
@@ -4,7 +4,7 @@ import networkx as nx
import
numpy
as
np
import
scipy
as
sp
N
=
5
N
=
1000
a
=
sp
.
sparse
.
random
(
N
,
N
,
1
/
N
,
data_rvs
=
lambda
n
:
np
.
ones
(
n
))
b
=
sp
.
sparse
.
triu
(
a
)
+
sp
.
sparse
.
triu
(
a
,
1
).
transpose
()
g_nx
=
nx
.
from_scipy_sparse_matrix
(
b
,
create_using
=
nx
.
DiGraph
())
...
...
@@ -21,9 +21,12 @@ assert g_nx.edges == h_nx.edges
nx_adj
=
nx
.
adjacency_matrix
(
g_nx
)
nx_inc
=
nx
.
incidence_matrix
(
g_nx
,
edgelist
=
sorted
(
g_nx
.
edges
()))
nx_oriented
=
nx
.
incidence_matrix
(
g_nx
,
edgelist
=
sorted
(
g_nx
.
edges
()),
oriented
=
True
)
ctx
=
F
.
get_context
(
F
.
ones
((
1
,)))
dgl_adj
=
F
.
to_scipy_sparse
(
g_dgl
.
adjacency_matrix
().
get
(
ctx
)).
transpose
()
dgl_inc
=
F
.
to_scipy_sparse
(
g_dgl
.
incidence_matrix
().
get
(
ctx
))
dgl_oriented
=
F
.
to_scipy_sparse
(
g_dgl
.
incidence_matrix
(
oriented
=
True
).
get
(
ctx
))
assert
abs
(
nx_adj
-
dgl_adj
).
max
()
==
0
assert
abs
(
nx_inc
-
dgl_inc
).
max
()
==
0
assert
abs
(
nx_oriented
-
dgl_oriented
).
max
()
==
0
tests/test_lg.py
0 → 100644
View file @
905db292
import
dgl
import
dgl.backend
as
F
import
networkx
as
nx
import
numpy
as
np
import
scipy
as
sp
N
=
10000
a
=
sp
.
sparse
.
random
(
N
,
N
,
1
/
N
,
data_rvs
=
lambda
n
:
np
.
ones
(
n
))
b
=
sp
.
sparse
.
triu
(
a
,
1
)
+
sp
.
sparse
.
triu
(
a
,
1
).
transpose
()
g
=
dgl
.
DGLGraph
()
g
.
from_scipy_sparse_matrix
(
b
)
lg_sparse
=
g
.
line_graph
()
lg_cpp
=
g
.
_line_graph
()
assert
lg_sparse
.
number_of_nodes
()
==
lg_cpp
.
number_of_nodes
()
assert
lg_sparse
.
number_of_edges
()
==
lg_cpp
.
number_of_edges
()
src_sparse
,
dst_sparse
,
_
=
lg_sparse
.
edges
(
sorted
=
True
)
src_cpp
,
dst_cpp
,
_
=
lg_cpp
.
edges
(
sorted
=
True
)
# assert (src_sparse == src_cpp).all()
# assert (dst_sparse == dst_cpp).all()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment