Unverified Commit f7dae453 authored by Minjie Wang's avatar Minjie Wang Committed by GitHub
Browse files

[CI] Reduce CI workload (#4196)

* try optimize CI

* fix go test; adjust timing report

* disable certain tests for mx/tf backends

* fix ut

* add pydantic
parent 7735473b
...@@ -97,7 +97,7 @@ def tutorial_test_linux(backend) { ...@@ -97,7 +97,7 @@ def tutorial_test_linux(backend) {
def go_test_linux() { def go_test_linux() {
init_git() init_git()
unpack_lib('dgl-cpu-linux', dgl_linux_libs) unpack_lib('dgl-cpu-linux', dgl_linux_libs)
timeout(time: 30, unit: 'MINUTES') { timeout(time: 20, unit: 'MINUTES') {
sh "bash tests/scripts/task_go_test.sh" sh "bash tests/scripts/task_go_test.sh"
} }
} }
...@@ -371,11 +371,6 @@ pipeline { ...@@ -371,11 +371,6 @@ pipeline {
tutorial_test_linux('pytorch') tutorial_test_linux('pytorch')
} }
} }
stage('DGL-Go CPU test') {
steps {
go_test_linux()
}
}
} }
post { post {
always { always {
...@@ -445,11 +440,6 @@ pipeline { ...@@ -445,11 +440,6 @@ pipeline {
unit_test_linux('mxnet', 'cpu') unit_test_linux('mxnet', 'cpu')
} }
} }
//stage("Tutorial test") {
// steps {
// tutorial_test_linux("mxnet")
// }
//}
} }
post { post {
always { always {
...@@ -480,6 +470,27 @@ pipeline { ...@@ -480,6 +470,27 @@ pipeline {
} }
} }
} }
stage('DGL-Go') {
agent {
docker {
label "linux-cpu-node"
image "dgllib/dgl-ci-cpu:cu101_v220629"
alwaysPull true
}
}
stages {
stage('DGL-Go CPU test') {
steps {
go_test_linux()
}
}
}
post {
always {
cleanWs disableDeferredWipeout: true, deleteDirs: true
}
}
}
} }
} }
} }
......
...@@ -13,6 +13,7 @@ from dgl import DGLError ...@@ -13,6 +13,7 @@ from dgl import DGLError
import dgl import dgl
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_minigc(): def test_minigc():
ds = data.MiniGCDataset(16, 10, 20) ds = data.MiniGCDataset(16, 10, 20)
g, l = list(zip(*ds)) g, l = list(zip(*ds))
...@@ -24,6 +25,7 @@ def test_minigc(): ...@@ -24,6 +25,7 @@ def test_minigc():
assert g2.num_edges() - g1.num_edges() == g1.num_nodes() assert g2.num_edges() - g1.num_edges() == g1.num_nodes()
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_gin(): def test_gin():
ds_n_graphs = { ds_n_graphs = {
'MUTAG': 188, 'MUTAG': 188,
...@@ -43,6 +45,7 @@ def test_gin(): ...@@ -43,6 +45,7 @@ def test_gin():
assert ds.num_classes == ds.gclasses assert ds.num_classes == ds.gclasses
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_fraud(): def test_fraud():
transform = dgl.AddSelfLoop(allow_duplicate=True) transform = dgl.AddSelfLoop(allow_duplicate=True)
...@@ -66,6 +69,7 @@ def test_fraud(): ...@@ -66,6 +69,7 @@ def test_fraud():
assert g2.num_edges() - g.num_edges() == g.num_nodes() * 3 assert g2.num_edges() - g.num_edges() == g.num_nodes() * 3
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_fakenews(): def test_fakenews():
transform = dgl.AddSelfLoop(allow_duplicate=True) transform = dgl.AddSelfLoop(allow_duplicate=True)
...@@ -82,6 +86,7 @@ def test_fakenews(): ...@@ -82,6 +86,7 @@ def test_fakenews():
assert g2.num_edges() - g.num_edges() == g.num_nodes() assert g2.num_edges() - g.num_edges() == g.num_nodes()
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_tudataset_regression(): def test_tudataset_regression():
ds = data.TUDataset('ZINC_test', force_reload=True) ds = data.TUDataset('ZINC_test', force_reload=True)
assert ds.num_classes == ds.num_labels assert ds.num_classes == ds.num_labels
...@@ -94,6 +99,7 @@ def test_tudataset_regression(): ...@@ -94,6 +99,7 @@ def test_tudataset_regression():
assert g2.num_edges() - g.num_edges() == g.num_nodes() assert g2.num_edges() - g.num_edges() == g.num_nodes()
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_data_hash(): def test_data_hash():
class HashTestDataset(data.DGLDataset): class HashTestDataset(data.DGLDataset):
def __init__(self, hash_key=()): def __init__(self, hash_key=()):
...@@ -111,6 +117,7 @@ def test_data_hash(): ...@@ -111,6 +117,7 @@ def test_data_hash():
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_citation_graph(): def test_citation_graph():
transform = dgl.AddSelfLoop(allow_duplicate=True) transform = dgl.AddSelfLoop(allow_duplicate=True)
...@@ -143,6 +150,7 @@ def test_citation_graph(): ...@@ -143,6 +150,7 @@ def test_citation_graph():
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_gnn_benchmark(): def test_gnn_benchmark():
transform = dgl.AddSelfLoop(allow_duplicate=True) transform = dgl.AddSelfLoop(allow_duplicate=True)
...@@ -193,6 +201,7 @@ def test_gnn_benchmark(): ...@@ -193,6 +201,7 @@ def test_gnn_benchmark():
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_reddit(): def test_reddit():
# RedditDataset # RedditDataset
g = data.RedditDataset()[0] g = data.RedditDataset()[0]
...@@ -206,6 +215,7 @@ def test_reddit(): ...@@ -206,6 +215,7 @@ def test_reddit():
assert g2.num_edges() - g.num_edges() == g.num_nodes() assert g2.num_edges() - g.num_edges() == g.num_nodes()
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_explain_syn(): def test_explain_syn():
dataset = data.BAShapeDataset() dataset = data.BAShapeDataset()
assert dataset.num_classes == 4 assert dataset.num_classes == 4
...@@ -265,6 +275,7 @@ def test_explain_syn(): ...@@ -265,6 +275,7 @@ def test_explain_syn():
assert 'feat' in g.ndata assert 'feat' in g.ndata
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_wiki_cs(): def test_wiki_cs():
g = data.WikiCSDataset()[0] g = data.WikiCSDataset()[0]
assert g.num_nodes() == 11701 assert g.num_nodes() == 11701
...@@ -277,6 +288,7 @@ def test_wiki_cs(): ...@@ -277,6 +288,7 @@ def test_wiki_cs():
assert g2.num_edges() - g.num_edges() == g.num_nodes() assert g2.num_edges() - g.num_edges() == g.num_nodes()
@unittest.skip(reason="Dataset too large to download for the latest CI.") @unittest.skip(reason="Dataset too large to download for the latest CI.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_yelp(): def test_yelp():
g = data.YelpDataset(reorder=True)[0] g = data.YelpDataset(reorder=True)[0]
assert g.num_nodes() == 716847 assert g.num_nodes() == 716847
...@@ -289,6 +301,7 @@ def test_yelp(): ...@@ -289,6 +301,7 @@ def test_yelp():
assert g2.num_edges() - g.num_edges() == g.num_nodes() assert g2.num_edges() - g.num_edges() == g.num_nodes()
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_flickr(): def test_flickr():
g = data.FlickrDataset(reorder=True)[0] g = data.FlickrDataset(reorder=True)[0]
assert g.num_nodes() == 89250 assert g.num_nodes() == 89250
...@@ -301,6 +314,7 @@ def test_flickr(): ...@@ -301,6 +314,7 @@ def test_flickr():
assert g2.num_edges() - g.num_edges() == g.num_nodes() assert g2.num_edges() - g.num_edges() == g.num_nodes()
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_extract_archive(): def test_extract_archive():
# gzip # gzip
with tempfile.TemporaryDirectory() as src_dir: with tempfile.TemporaryDirectory() as src_dir:
...@@ -1256,6 +1270,7 @@ def _test_NodeEdgeGraphData(): ...@@ -1256,6 +1270,7 @@ def _test_NodeEdgeGraphData():
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_csvdataset(): def test_csvdataset():
_test_NodeEdgeGraphData() _test_NodeEdgeGraphData()
_test_construct_graphs_node_ids() _test_construct_graphs_node_ids()
...@@ -1272,6 +1287,7 @@ def test_csvdataset(): ...@@ -1272,6 +1287,7 @@ def test_csvdataset():
_test_CSVDataset_customized_data_parser() _test_CSVDataset_customized_data_parser()
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_add_nodepred_split(): def test_add_nodepred_split():
dataset = data.AmazonCoBuyComputerDataset() dataset = data.AmazonCoBuyComputerDataset()
print('train_mask' in dataset[0].ndata) print('train_mask' in dataset[0].ndata)
...@@ -1284,6 +1300,7 @@ def test_add_nodepred_split(): ...@@ -1284,6 +1300,7 @@ def test_add_nodepred_split():
assert 'train_mask' in dataset[0].nodes['Publikationen'].data assert 'train_mask' in dataset[0].nodes['Publikationen'].data
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_as_nodepred1(): def test_as_nodepred1():
ds = data.AmazonCoBuyComputerDataset() ds = data.AmazonCoBuyComputerDataset()
print('train_mask' in ds[0].ndata) print('train_mask' in ds[0].ndata)
...@@ -1314,6 +1331,7 @@ def test_as_nodepred1(): ...@@ -1314,6 +1331,7 @@ def test_as_nodepred1():
new_ds[0].nodes['Personen'].data['test_mask'])) new_ds[0].nodes['Personen'].data['test_mask']))
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_as_nodepred2(): def test_as_nodepred2():
# test proper reprocessing # test proper reprocessing
...@@ -1344,6 +1362,7 @@ def test_as_nodepred2(): ...@@ -1344,6 +1362,7 @@ def test_as_nodepred2():
assert len(ds.train_idx) == int(ds[0].num_nodes('Personen') * 0.1) assert len(ds.train_idx) == int(ds[0].num_nodes('Personen') * 0.1)
@unittest.skipIf(dgl.backend.backend_name != 'pytorch', reason="ogb only supports pytorch") @unittest.skipIf(dgl.backend.backend_name != 'pytorch', reason="ogb only supports pytorch")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_as_nodepred_ogb(): def test_as_nodepred_ogb():
from ogb.nodeproppred import DglNodePropPredDataset from ogb.nodeproppred import DglNodePropPredDataset
ds = data.AsNodePredDataset(DglNodePropPredDataset("ogbn-arxiv"), split_ratio=None, verbose=True) ds = data.AsNodePredDataset(DglNodePropPredDataset("ogbn-arxiv"), split_ratio=None, verbose=True)
...@@ -1356,6 +1375,7 @@ def test_as_nodepred_ogb(): ...@@ -1356,6 +1375,7 @@ def test_as_nodepred_ogb():
ds = data.AsNodePredDataset(DglNodePropPredDataset("ogbn-arxiv"), split_ratio=[0.7, 0.2, 0.1], verbose=True) ds = data.AsNodePredDataset(DglNodePropPredDataset("ogbn-arxiv"), split_ratio=[0.7, 0.2, 0.1], verbose=True)
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_as_linkpred(): def test_as_linkpred():
# create # create
ds = data.AsLinkPredDataset(data.CoraGraphDataset(), split_ratio=[0.8, 0.1, 0.1], neg_ratio=1, verbose=True) ds = data.AsLinkPredDataset(data.CoraGraphDataset(), split_ratio=[0.8, 0.1, 0.1], neg_ratio=1, verbose=True)
...@@ -1381,6 +1401,7 @@ def test_as_linkpred_ogb(): ...@@ -1381,6 +1401,7 @@ def test_as_linkpred_ogb():
assert ds.test_edges[0][0].shape[0] == 235812 assert ds.test_edges[0][0].shape[0] == 235812
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_as_nodepred_csvdataset(): def test_as_nodepred_csvdataset():
with tempfile.TemporaryDirectory() as test_dir: with tempfile.TemporaryDirectory() as test_dir:
# generate YAML/CSVs # generate YAML/CSVs
...@@ -1423,6 +1444,7 @@ def test_as_nodepred_csvdataset(): ...@@ -1423,6 +1444,7 @@ def test_as_nodepred_csvdataset():
assert 'train_mask' in new_ds[0].ndata assert 'train_mask' in new_ds[0].ndata
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_as_graphpred(): def test_as_graphpred():
ds = data.GINDataset(name='MUTAG', self_loop=True) ds = data.GINDataset(name='MUTAG', self_loop=True)
new_ds = data.AsGraphPredDataset(ds, [0.8, 0.1, 0.1], verbose=True) new_ds = data.AsGraphPredDataset(ds, [0.8, 0.1, 0.1], verbose=True)
...@@ -1473,6 +1495,7 @@ def test_as_graphpred(): ...@@ -1473,6 +1495,7 @@ def test_as_graphpred():
assert new_ds.num_classes == 2 assert new_ds.num_classes == 2
@unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.") @unittest.skipIf(F._default_context_str == 'gpu', reason="Datasets don't need to be tested on GPU.")
@unittest.skipIf(dgl.backend.backend_name == "mxnet", reason="Skip MXNet")
def test_as_graphpred_reprocess(): def test_as_graphpred_reprocess():
ds = data.AsGraphPredDataset(data.GINDataset(name='MUTAG', self_loop=True), [0.8, 0.1, 0.1]) ds = data.AsGraphPredDataset(data.GINDataset(name='MUTAG', self_loop=True), [0.8, 0.1, 0.1])
assert len(ds.train_idx) == int(len(ds) * 0.8) assert len(ds.train_idx) == int(len(ds) * 0.8)
......
...@@ -3,62 +3,122 @@ import pytest ...@@ -3,62 +3,122 @@ import pytest
@pytest.mark.parametrize('data', ['cora', 'citeseer', 'pubmed', 'csv', 'reddit', @pytest.mark.parametrize('data', ['cora', 'citeseer', 'pubmed', 'csv', 'reddit',
'co-buy-computer', 'ogbn-arxiv', 'ogbn-products']) 'co-buy-computer', 'ogbn-arxiv', 'ogbn-products'])
def test_nodepred_data(data):
os.system(f'dgl configure nodepred --data {data} --model gcn')
assert os.path.exists(f'nodepred_{data}_gcn.yaml')
custom_cfg = f'custom_{data}_gcn.yaml'
os.system(f'dgl configure nodepred --data {data} --model gcn --cfg {custom_cfg}')
assert os.path.exists(custom_cfg)
custom_script = f'{data}_gcn.py'
os.system(f'dgl export --cfg {custom_cfg} --output {custom_script}')
assert os.path.exists(custom_script)
@pytest.mark.parametrize('model', ['gcn', 'gat', 'sage', 'sgc', 'gin']) @pytest.mark.parametrize('model', ['gcn', 'gat', 'sage', 'sgc', 'gin'])
def test_nodepred(data, model): def test_nodepred_model(model):
os.system('dgl configure nodepred --data {} --model {}'.format(data, model)) os.system(f'dgl configure nodepred --data cora --model {model}')
assert os.path.exists('nodepred_{}_{}.yaml'.format(data, model)) assert os.path.exists(f'nodepred_cora_{model}.yaml')
custom_config_file = 'custom_{}_{}.yaml'.format(data, model) custom_cfg = f'custom_cora_{model}.yaml'
os.system('dgl configure nodepred --data {} --model {} --cfg {}'.format(data, model, os.system(f'dgl configure nodepred --data cora --model {model} --cfg {custom_cfg}')
custom_config_file)) assert os.path.exists(custom_cfg)
assert os.path.exists(custom_config_file)
custom_script = '_'.join([data, model]) + '.py' custom_script = f'cora_{model}.py'
os.system('dgl export --cfg {} --output {}'.format(custom_config_file, custom_script)) os.system(f'dgl export --cfg {custom_cfg} --output {custom_script}')
assert os.path.exists(custom_script) assert os.path.exists(custom_script)
@pytest.mark.parametrize('data', ['cora', 'citeseer', 'pubmed', 'csv', 'reddit', @pytest.mark.parametrize('data', ['cora', 'citeseer', 'pubmed', 'csv', 'reddit',
'co-buy-computer', 'ogbn-arxiv', 'ogbn-products']) 'co-buy-computer', 'ogbn-arxiv', 'ogbn-products'])
def test_nodepred_ns_data(data):
os.system(f'dgl configure nodepred-ns --data {data} --model gcn')
assert os.path.exists(f'nodepred-ns_{data}_gcn.yaml')
custom_cfg = f'ns-custom_{data}_gcn.yaml'
os.system(f'dgl configure nodepred-ns --data {data} --model gcn --cfg {custom_cfg}')
assert os.path.exists(custom_cfg)
custom_script = f'ns-{data}_gcn.py'
os.system(f'dgl export --cfg {custom_cfg} --output {custom_script}')
assert os.path.exists(custom_script)
@pytest.mark.parametrize('model', ['gcn', 'gat', 'sage']) @pytest.mark.parametrize('model', ['gcn', 'gat', 'sage'])
def test_nodepred_ns(data, model): def test_nodepred_ns_model(model):
os.system('dgl configure nodepred-ns --data {} --model {}'.format(data, model)) os.system(f'dgl configure nodepred-ns --data cora --model {model}')
assert os.path.exists('nodepred-ns_{}_{}.yaml'.format(data, model)) assert os.path.exists(f'nodepred-ns_cora_{model}.yaml')
custom_config_file = 'custom_{}_{}.yaml'.format(data, model) custom_cfg = f'ns-custom_cora_{model}.yaml'
os.system('dgl configure nodepred-ns --data {} --model {} --cfg {}'.format(data, model, os.system(f'dgl configure nodepred-ns --data cora --model {model} --cfg {custom_cfg}')
custom_config_file)) assert os.path.exists(custom_cfg)
assert os.path.exists(custom_config_file)
custom_script = '_'.join([data, model]) + '.py' custom_script = f'ns-cora_{model}.py'
os.system('dgl export --cfg {} --output {}'.format(custom_config_file, custom_script)) os.system(f'dgl export --cfg {custom_cfg} --output {custom_script}')
assert os.path.exists(custom_script) assert os.path.exists(custom_script)
@pytest.mark.parametrize('data', ['cora', 'citeseer', 'pubmed', 'csv', 'reddit', @pytest.mark.parametrize('data', ['cora', 'citeseer', 'pubmed', 'csv', 'reddit',
'co-buy-computer', 'ogbn-arxiv', 'ogbn-products', 'ogbl-collab', 'co-buy-computer', 'ogbn-arxiv', 'ogbn-products', 'ogbl-collab',
'ogbl-citation2']) 'ogbl-citation2'])
@pytest.mark.parametrize('node_model', ['gcn' ,'gat', 'sage', 'sgc', 'gin']) def test_linkpred_data(data):
@pytest.mark.parametrize('edge_model', ['ele', 'bilinear']) node_model = 'gcn'
@pytest.mark.parametrize('neg_sampler', ['global', 'persource']) edge_model = 'ele'
def test_linkpred(data, node_model, edge_model, neg_sampler): neg_sampler = 'global'
custom_config_file = '_'.join([data, node_model, edge_model, neg_sampler]) + '.yaml' custom_cfg = '_'.join([data, node_model, edge_model, neg_sampler]) + '.yaml'
os.system('dgl configure linkpred --data {} --node-model {} --edge-model {} --neg-sampler {} --cfg {}'.format( os.system('dgl configure linkpred --data {} --node-model {} --edge-model {} --neg-sampler {} --cfg {}'.format(
data, node_model, edge_model, neg_sampler, custom_config_file)) data, node_model, edge_model, neg_sampler, custom_cfg))
assert os.path.exists(custom_config_file) assert os.path.exists(custom_cfg)
custom_script = '_'.join([data, node_model, edge_model, neg_sampler]) + '.py' custom_script = '_'.join([data, node_model, edge_model, neg_sampler]) + '.py'
os.system('dgl export --cfg {} --output {}'.format(custom_config_file, custom_script)) os.system('dgl export --cfg {} --output {}'.format(custom_cfg, custom_script))
assert os.path.exists(custom_script) assert os.path.exists(custom_script)
@pytest.mark.parametrize('data', ['cora', 'citeseer', 'pubmed', 'csv', 'reddit',
'co-buy-computer', 'ogbn-arxiv', 'ogbn-products', 'ogbl-collab',
'ogbl-citation2'])
@pytest.mark.parametrize('node_model', ['gcn' ,'gat', 'sage', 'sgc', 'gin']) @pytest.mark.parametrize('node_model', ['gcn' ,'gat', 'sage', 'sgc', 'gin'])
def test_linkpred_node_model(node_model):
data = 'cora'
edge_model = 'ele'
neg_sampler = 'global'
custom_cfg = '_'.join([data, node_model, edge_model, neg_sampler]) + '.yaml'
os.system('dgl configure linkpred --data {} --node-model {} --edge-model {} --neg-sampler {} --cfg {}'.format(
data, node_model, edge_model, neg_sampler, custom_cfg))
assert os.path.exists(custom_cfg)
custom_script = '_'.join([data, node_model, edge_model, neg_sampler]) + '.py'
os.system('dgl export --cfg {} --output {}'.format(custom_cfg, custom_script))
assert os.path.exists(custom_script)
@pytest.mark.parametrize('edge_model', ['ele', 'bilinear']) @pytest.mark.parametrize('edge_model', ['ele', 'bilinear'])
def test_linkpred_default_neg_sampler(data, node_model, edge_model): def test_linkpred_edge_model(edge_model):
custom_config_file = '_'.join([data, node_model, edge_model]) + '.yaml' data = 'cora'
os.system('dgl configure linkpred --data {} --node-model {} --edge-model {} --cfg {}'.format( node_model = 'gcn'
data, node_model, edge_model, custom_config_file)) neg_sampler = 'global'
assert os.path.exists(custom_config_file) custom_cfg = '_'.join([data, node_model, edge_model, neg_sampler]) + '.yaml'
os.system('dgl configure linkpred --data {} --node-model {} --edge-model {} --neg-sampler {} --cfg {}'.format(
data, node_model, edge_model, neg_sampler, custom_cfg))
assert os.path.exists(custom_cfg)
custom_script = '_'.join([data, node_model, edge_model, neg_sampler]) + '.py'
os.system('dgl export --cfg {} --output {}'.format(custom_cfg, custom_script))
assert os.path.exists(custom_script)
@pytest.mark.parametrize('neg_sampler', ['global', 'persource', ''])
def test_linkpred_neg_sampler(neg_sampler):
data = 'cora'
node_model = 'gcn'
edge_model = 'ele'
custom_cfg = f'{data}_{node_model}_{edge_model}_{neg_sampler}.yaml'
if neg_sampler == '':
os.system('dgl configure linkpred --data {} --node-model {} --edge-model {} --cfg {}'.format(
data, node_model, edge_model, custom_cfg))
else:
os.system('dgl configure linkpred --data {} --node-model {} --edge-model {} --neg-sampler {} --cfg {}'.format(
data, node_model, edge_model, neg_sampler, custom_cfg))
assert os.path.exists(custom_cfg)
custom_script = f'{data}_{node_model}_{edge_model}_{neg_sampler}.py'
os.system('dgl export --cfg {} --output {}'.format(custom_cfg, custom_script))
assert os.path.exists(custom_script)
@pytest.mark.parametrize('data', ['csv', 'ogbg-molhiv', 'ogbg-molpcba']) @pytest.mark.parametrize('data', ['csv', 'ogbg-molhiv', 'ogbg-molpcba'])
@pytest.mark.parametrize('model', ['gin', 'pna']) @pytest.mark.parametrize('model', ['gin', 'pna'])
...@@ -66,13 +126,13 @@ def test_graphpred(data, model): ...@@ -66,13 +126,13 @@ def test_graphpred(data, model):
os.system('dgl configure graphpred --data {} --model {}'.format(data, model)) os.system('dgl configure graphpred --data {} --model {}'.format(data, model))
assert os.path.exists('graphpred_{}_{}.yaml'.format(data, model)) assert os.path.exists('graphpred_{}_{}.yaml'.format(data, model))
custom_config_file = 'custom_{}_{}.yaml'.format(data, model) custom_cfg = 'custom_{}_{}.yaml'.format(data, model)
os.system('dgl configure graphpred --data {} --model {} --cfg {}'.format(data, model, os.system('dgl configure graphpred --data {} --model {} --cfg {}'.format(data, model,
custom_config_file)) custom_cfg))
assert os.path.exists(custom_config_file) assert os.path.exists(custom_cfg)
custom_script = '_'.join([data, model]) + '.py' custom_script = '_'.join([data, model]) + '.py'
os.system('dgl export --cfg {} --output {}'.format(custom_config_file, custom_script)) os.system('dgl export --cfg {} --output {}'.format(custom_cfg, custom_script))
assert os.path.exists(custom_script) assert os.path.exists(custom_script)
@pytest.mark.parametrize('recipe', @pytest.mark.parametrize('recipe',
......
...@@ -34,4 +34,4 @@ do ...@@ -34,4 +34,4 @@ do
done done
done < ${DIST_DGL_TEST_IP_CONFIG} done < ${DIST_DGL_TEST_IP_CONFIG}
python3 -m pytest -v --capture=tee-sys --junitxml=pytest_dist.xml tests/dist/test_*.py || fail "dist across machines" python3 -m pytest -v --capture=tee-sys --junitxml=pytest_dist.xml --durations=100 tests/dist/test_*.py || fail "dist across machines"
...@@ -23,8 +23,5 @@ popd ...@@ -23,8 +23,5 @@ popd
export LC_ALL=C.UTF-8 export LC_ALL=C.UTF-8
export LANG=C.UTF-8 export LANG=C.UTF-8
python3 -m pytest -v --junitxml=pytest_go.xml tests/go || fail "go" python -m pip install psutil || fail "pip install"
python3 -m pytest -v --junitxml=pytest_go.xml --durations=100 tests/go || fail "go"
export PYTHONUNBUFFERED=1
export OMP_NUM_THREADS=1
export DMLC_LOG_DEBUG=1
...@@ -14,8 +14,8 @@ SET DGLBACKEND=!BACKEND! ...@@ -14,8 +14,8 @@ SET DGLBACKEND=!BACKEND!
SET DGL_LIBRARY_PATH=!CD!\build SET DGL_LIBRARY_PATH=!CD!\build
SET DGL_DOWNLOAD_DIR=!CD! SET DGL_DOWNLOAD_DIR=!CD!
python -m pip install pytest psutil pyyaml pandas pydantic rdflib || EXIT /B 1 python -m pip install pytest psutil pandas pyyaml pydantic rdflib || EXIT /B 1
python -m pytest -v --junitxml=pytest_backend.xml tests\!DGLBACKEND! || EXIT /B 1 python -m pytest -v --junitxml=pytest_backend.xml --durations=100 tests\!DGLBACKEND! || EXIT /B 1
python -m pytest -v --junitxml=pytest_compute.xml tests\compute || EXIT /B 1 python -m pytest -v --junitxml=pytest_compute.xml --durations=100 tests\compute || EXIT /B 1
ENDLOCAL ENDLOCAL
EXIT /B EXIT /B
...@@ -32,18 +32,16 @@ fi ...@@ -32,18 +32,16 @@ fi
conda activate ${DGLBACKEND}-ci conda activate ${DGLBACKEND}-ci
python3 -m pip install pytest psutil pyyaml pandas pydantic rdflib ogb || fail "pip install" python3 -m pip install pytest psutil pyyaml pydantic pandas rdflib ogb || fail "pip install"
python3 -m pytest -v --junitxml=pytest_compute.xml tests/compute || fail "compute" python3 -m pytest -v --junitxml=pytest_compute.xml --durations=100 tests/compute || fail "compute"
python3 -m pytest -v --junitxml=pytest_backend.xml tests/$DGLBACKEND || fail "backend-specific" python3 -m pytest -v --junitxml=pytest_backend.xml --durations=100 tests/$DGLBACKEND || fail "backend-specific"
export PYTHONUNBUFFERED=1 export PYTHONUNBUFFERED=1
export OMP_NUM_THREADS=1 export OMP_NUM_THREADS=1
export DMLC_LOG_DEBUG=1 export DMLC_LOG_DEBUG=1
if [ $2 != "gpu" ]; then if [ $2 != "gpu" && $DGLBACKEND == "pytorch" ]; then
python3 -m pytest -v --capture=tee-sys --junitxml=pytest_distributed.xml tests/distributed/*.py || fail "distributed" python3 -m pip install filelock
if [ $DGLBACKEND == "pytorch" ]; then python3 -m pytest -v --capture=tee-sys --junitxml=pytest_distributed.xml --durations=100 tests/distributed/*.py || fail "distributed"
python3 -m pip install filelock PYTHONPATH=tools:$PYTHONPATH python3 -m pytest -v --capture=tee-sys --junitxml=pytest_tools.xml --durations=100 tests/tools/*.py || fail "tools"
PYTHONPATH=tools:$PYTHONPATH python3 -m pytest -v --capture=tee-sys --junitxml=pytest_tools.xml tests/tools/*.py || fail "tools"
fi
fi fi
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment