Unverified Commit 84f90644 authored by yxy235's avatar yxy235 Committed by GitHub
Browse files

[GraphBolt] Rewrite tests from random to hard code. (#6731)


Co-authored-by: default avatarUbuntu <ubuntu@ip-172-31-0-133.us-west-2.compute.internal>
parent e9d5156e
...@@ -71,75 +71,102 @@ def test_find_reverse_edges_circual_reverse_types(): ...@@ -71,75 +71,102 @@ def test_find_reverse_edges_circual_reverse_types():
def test_unique_and_compact_hetero(): def test_unique_and_compact_hetero():
N1 = torch.randint(0, 50, (30,)) N1 = torch.tensor([0, 5, 2, 7, 12, 7, 9, 5, 6, 2, 3, 4, 1, 0, 9])
N2 = torch.randint(0, 50, (20,)) N2 = torch.tensor([0, 3, 3, 5, 2, 7, 2, 8, 4, 9, 2, 3])
N3 = torch.randint(0, 50, (10,)) N3 = torch.tensor([1, 2, 6, 6, 1, 8, 3, 6, 3, 2])
unique_N1 = torch.unique(N1)
unique_N2 = torch.unique(N2)
unique_N3 = torch.unique(N3)
expected_unique = { expected_unique = {
"n1": unique_N1, "n1": torch.tensor([0, 5, 2, 7, 12, 9, 6, 3, 4, 1]),
"n2": unique_N2, "n2": torch.tensor([0, 3, 5, 2, 7, 8, 4, 9]),
"n3": unique_N3, "n3": torch.tensor([1, 2, 6, 8, 3]),
} }
nodes_dict = { nodes_dict = {
"n1": N1.split(5), "n1": N1.split(5),
"n2": N2.split(4), "n2": N2.split(4),
"n3": N3.split(2), "n3": N3.split(2),
} }
expected_nodes_dict = {
"n1": [
torch.tensor([0, 1, 2, 3, 4]),
torch.tensor([3, 5, 1, 6, 2]),
torch.tensor([7, 8, 9, 0, 5]),
],
"n2": [
torch.tensor([0, 1, 1, 2]),
torch.tensor([3, 4, 3, 5]),
torch.tensor([6, 7, 3, 1]),
],
"n3": [
torch.tensor([0, 1]),
torch.tensor([2, 2]),
torch.tensor([0, 3]),
torch.tensor([4, 2]),
torch.tensor([4, 1]),
],
}
unique, compacted = gb.unique_and_compact(nodes_dict) unique, compacted = gb.unique_and_compact(nodes_dict)
for ntype, nodes in unique.items(): for ntype, nodes in unique.items():
expected_nodes = expected_unique[ntype] expected_nodes = expected_unique[ntype]
assert torch.equal(torch.sort(nodes)[0], expected_nodes) assert torch.equal(nodes, expected_nodes)
for ntype, nodes in compacted.items(): for ntype, nodes in compacted.items():
expected_nodes = nodes_dict[ntype] expected_nodes = expected_nodes_dict[ntype]
assert isinstance(nodes, list) assert isinstance(nodes, list)
for expected_node, node in zip(expected_nodes, nodes): for expected_node, node in zip(expected_nodes, nodes):
node = unique[ntype][node]
assert torch.equal(expected_node, node) assert torch.equal(expected_node, node)
def test_unique_and_compact_homo(): def test_unique_and_compact_homo():
N = torch.randint(0, 50, (200,)) N = torch.tensor([0, 5, 2, 7, 12, 7, 9, 5, 6, 2, 3, 4, 1, 0, 9])
expected_unique_N = torch.unique(N) expected_unique_N = torch.tensor([0, 5, 2, 7, 12, 9, 6, 3, 4, 1])
nodes_list = N.split(5) nodes_list = N.split(5)
expected_nodes_list = [
torch.tensor([0, 1, 2, 3, 4]),
torch.tensor([3, 5, 1, 6, 2]),
torch.tensor([7, 8, 9, 0, 5]),
]
unique, compacted = gb.unique_and_compact(nodes_list) unique, compacted = gb.unique_and_compact(nodes_list)
assert torch.equal(torch.sort(unique)[0], expected_unique_N) assert torch.equal(unique, expected_unique_N)
assert isinstance(compacted, list) assert isinstance(compacted, list)
for expected_node, node in zip(nodes_list, compacted): for expected_node, node in zip(expected_nodes_list, compacted):
node = unique[node]
assert torch.equal(expected_node, node) assert torch.equal(expected_node, node)
def test_unique_and_compact_node_pairs_hetero(): def test_unique_and_compact_node_pairs_hetero():
N1 = torch.randint(0, 50, (30,)) node_pairs = {
N2 = torch.randint(0, 50, (20,)) "n1:e1:n2": (
N3 = torch.randint(0, 50, (10,)) torch.tensor([1, 3, 4, 6, 2, 7, 9, 4, 2, 6]),
unique_N1 = torch.unique(N1) torch.tensor([2, 2, 2, 4, 1, 1, 1, 3, 3, 3]),
unique_N2 = torch.unique(N2) ),
unique_N3 = torch.unique(N3) "n1:e2:n3": (
torch.tensor([5, 2, 6, 4, 7, 2, 8, 1, 3, 0]),
torch.tensor([1, 3, 3, 3, 2, 2, 2, 7, 7, 7]),
),
"n2:e3:n3": (
torch.tensor([2, 5, 4, 1, 4, 3, 6, 0]),
torch.tensor([1, 1, 3, 3, 2, 2, 7, 7]),
),
}
expected_unique_nodes = { expected_unique_nodes = {
"n1": unique_N1, "n1": torch.tensor([1, 3, 4, 6, 2, 7, 9, 5, 8, 0]),
"n2": unique_N2, "n2": torch.tensor([1, 2, 3, 4, 5, 6, 0]),
"n3": unique_N3, "n3": torch.tensor([1, 2, 3, 7]),
} }
node_pairs = { expected_node_pairs = {
"n1:e1:n2": ( "n1:e1:n2": (
N1[:20], torch.tensor([0, 1, 2, 3, 4, 5, 6, 2, 4, 3]),
N2, torch.tensor([1, 1, 1, 3, 0, 0, 0, 2, 2, 2]),
), ),
"n1:e2:n3": ( "n1:e2:n3": (
N1[20:30], torch.tensor([7, 4, 3, 2, 5, 4, 8, 0, 1, 9]),
N3, torch.tensor([0, 2, 2, 2, 1, 1, 1, 3, 3, 3]),
), ),
"n2:e3:n3": ( "n2:e3:n3": (
N2[10:], torch.tensor([1, 4, 3, 0, 3, 2, 5, 6]),
N3, torch.tensor([0, 0, 2, 2, 1, 1, 3, 3]),
), ),
} }
...@@ -148,37 +175,35 @@ def test_unique_and_compact_node_pairs_hetero(): ...@@ -148,37 +175,35 @@ def test_unique_and_compact_node_pairs_hetero():
) )
for ntype, nodes in unique_nodes.items(): for ntype, nodes in unique_nodes.items():
expected_nodes = expected_unique_nodes[ntype] expected_nodes = expected_unique_nodes[ntype]
assert torch.equal(torch.sort(nodes)[0], expected_nodes) assert torch.equal(nodes, expected_nodes)
for etype, pair in compacted_node_pairs.items(): for etype, pair in compacted_node_pairs.items():
u, v = pair u, v = pair
u_type, _, v_type = gb.etype_str_to_tuple(etype) expected_u, expected_v = expected_node_pairs[etype]
u, v = unique_nodes[u_type][u], unique_nodes[v_type][v]
expected_u, expected_v = node_pairs[etype]
assert torch.equal(u, expected_u) assert torch.equal(u, expected_u)
assert torch.equal(v, expected_v) assert torch.equal(v, expected_v)
def test_unique_and_compact_node_pairs_homo(): def test_unique_and_compact_node_pairs_homo():
N = torch.randint(0, 50, (200,)) dst_nodes = torch.tensor([1, 1, 3, 3, 5, 5, 2, 6, 6, 6, 6])
expected_unique_N = torch.unique(N) src_ndoes = torch.tensor([2, 3, 1, 4, 5, 2, 5, 1, 4, 4, 6])
node_pairs = (src_ndoes, dst_nodes)
node_pairs = tuple(N.split(100)) expected_unique_nodes = torch.tensor([1, 2, 3, 5, 6, 4])
expected_dst_nodes = torch.tensor([0, 0, 2, 2, 3, 3, 1, 4, 4, 4, 4])
expected_src_ndoes = torch.tensor([1, 2, 0, 5, 3, 1, 3, 0, 5, 5, 4])
unique_nodes, compacted_node_pairs = gb.unique_and_compact_node_pairs( unique_nodes, compacted_node_pairs = gb.unique_and_compact_node_pairs(
node_pairs node_pairs
) )
assert torch.equal(torch.sort(unique_nodes)[0], expected_unique_N) assert torch.equal(unique_nodes, expected_unique_nodes)
u, v = compacted_node_pairs u, v = compacted_node_pairs
u, v = unique_nodes[u], unique_nodes[v] assert torch.equal(u, expected_src_ndoes)
expected_u, expected_v = node_pairs assert torch.equal(v, expected_dst_nodes)
unique_v = torch.unique(expected_v) assert torch.equal(unique_nodes[:5], torch.tensor([1, 2, 3, 5, 6]))
assert torch.equal(u, expected_u)
assert torch.equal(v, expected_v)
assert torch.equal(unique_nodes[: unique_v.size(0)], unique_v)
def test_incomplete_unique_dst_nodes_(): def test_incomplete_unique_dst_nodes_():
node_pairs = (torch.randint(0, 50, (50,)), torch.randint(100, 150, (50,))) node_pairs = (torch.arange(0, 50), torch.arange(100, 150))
unique_dst_nodes = torch.arange(150, 200) unique_dst_nodes = torch.arange(150, 200)
with pytest.raises(IndexError): with pytest.raises(IndexError):
gb.unique_and_compact_node_pairs(node_pairs, unique_dst_nodes) gb.unique_and_compact_node_pairs(node_pairs, unique_dst_nodes)
...@@ -262,30 +287,46 @@ def test_unique_and_compact_csc_formats_homo(): ...@@ -262,30 +287,46 @@ def test_unique_and_compact_csc_formats_homo():
def test_compact_csc_format_hetero(): def test_compact_csc_format_hetero():
N1 = torch.randint(0, 50, (30,)) dst_nodes = {
N2 = torch.randint(0, 50, (20,)) "n2": torch.tensor([2, 4, 1, 3]),
N3 = torch.randint(0, 50, (10,)) "n3": torch.tensor([1, 3, 2, 7]),
}
csc_formats = {
"n1:e1:n2": gb.CSCFormatBase(
indptr=torch.tensor([0, 3, 4, 7, 10]),
indices=torch.tensor([1, 3, 4, 6, 2, 7, 9, 4, 2, 6]),
),
"n1:e2:n3": gb.CSCFormatBase(
indptr=torch.tensor([0, 1, 4, 7, 10]),
indices=torch.tensor([5, 2, 6, 4, 7, 2, 8, 1, 3, 0]),
),
"n2:e3:n3": gb.CSCFormatBase(
indptr=torch.tensor([0, 2, 4, 6, 8]),
indices=torch.tensor([2, 5, 4, 1, 4, 3, 6, 0]),
),
}
expected_original_row_ids = { expected_original_row_ids = {
"n1": N1, "n1": torch.tensor(
"n2": N2, [1, 3, 4, 6, 2, 7, 9, 4, 2, 6, 5, 2, 6, 4, 7, 2, 8, 1, 3, 0]
"n3": N3, ),
"n2": torch.tensor([2, 4, 1, 3, 2, 5, 4, 1, 4, 3, 6, 0]),
"n3": torch.tensor([1, 3, 2, 7]),
} }
csc_formats = { expected_csc_formats = {
"n1:e1:n2": gb.CSCFormatBase( "n1:e1:n2": gb.CSCFormatBase(
indptr=torch.arange(0, 22, 2), indptr=torch.tensor([0, 3, 4, 7, 10]),
indices=N1[:20], indices=torch.arange(0, 10),
), ),
"n1:e2:n3": gb.CSCFormatBase( "n1:e2:n3": gb.CSCFormatBase(
indptr=torch.arange(0, 11), indptr=torch.tensor([0, 1, 4, 7, 10]),
indices=N1[20:30], indices=torch.arange(0, 10) + 10,
), ),
"n2:e3:n3": gb.CSCFormatBase( "n2:e3:n3": gb.CSCFormatBase(
indptr=torch.arange(0, 11), indptr=torch.tensor([0, 2, 4, 6, 8]),
indices=N2[10:], indices=torch.arange(0, 8) + 4,
), ),
} }
dst_nodes = {"n2": N2[:10], "n3": N3}
original_row_ids, compacted_csc_formats = gb.compact_csc_format( original_row_ids, compacted_csc_formats = gb.compact_csc_format(
csc_formats, dst_nodes csc_formats, dst_nodes
) )
...@@ -296,30 +337,31 @@ def test_compact_csc_format_hetero(): ...@@ -296,30 +337,31 @@ def test_compact_csc_format_hetero():
for etype, csc_format in compacted_csc_formats.items(): for etype, csc_format in compacted_csc_formats.items():
indptr = csc_format.indptr indptr = csc_format.indptr
indices = csc_format.indices indices = csc_format.indices
src_type, _, _ = gb.etype_str_to_tuple(etype) expected_indptr = expected_csc_formats[etype].indptr
indices = original_row_ids[src_type][indices] expected_indices = expected_csc_formats[etype].indices
expected_indptr = csc_formats[etype].indptr
expected_indices = csc_formats[etype].indices
assert torch.equal(indptr, expected_indptr) assert torch.equal(indptr, expected_indptr)
assert torch.equal(indices, expected_indices) assert torch.equal(indices, expected_indices)
def test_compact_csc_format_homo(): def test_compact_csc_format_homo():
N = torch.randint(0, 50, (200,)) seeds = torch.tensor([1, 3, 5, 2, 6])
expected_original_row_ids = N indptr = torch.tensor([0, 2, 4, 6, 7, 11])
indices = torch.tensor([2, 3, 1, 4, 5, 2, 5, 1, 4, 4, 6])
csc_formats = gb.CSCFormatBase(indptr=indptr, indices=indices)
csc_formats = gb.CSCFormatBase( expected_original_row_ids = torch.tensor(
indptr=torch.arange(0, 191, 19), indices=N[10:] [1, 3, 5, 2, 6, 2, 3, 1, 4, 5, 2, 5, 1, 4, 4, 6]
) )
dst_nodes = N[:10] expected_indptr = indptr
expected_indices = torch.arange(0, len(indices)) + 5
original_row_ids, compacted_csc_formats = gb.compact_csc_format( original_row_ids, compacted_csc_formats = gb.compact_csc_format(
csc_formats, dst_nodes csc_formats, seeds
) )
indptr = compacted_csc_formats.indptr indptr = compacted_csc_formats.indptr
indices = N[compacted_csc_formats.indices] indices = compacted_csc_formats.indices
expected_indptr = csc_formats.indptr
expected_indices = csc_formats.indices
assert torch.equal(indptr, expected_indptr) assert torch.equal(indptr, expected_indptr)
assert torch.equal(indices, expected_indices) assert torch.equal(indices, expected_indices)
assert torch.equal(original_row_ids, expected_original_row_ids) assert torch.equal(original_row_ids, expected_original_row_ids)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment