Commit c6067cc4 authored by lisj's avatar lisj
Browse files

适配dtk23.04-km

parent a99e1077
...@@ -77,3 +77,5 @@ hip结构体hipPointerAttribute_t适配 ...@@ -77,3 +77,5 @@ hip结构体hipPointerAttribute_t适配
src\array\cuda\gather_mm.cu:103 src\array\cuda\gather_mm.cu:103
src\array\cuda\gather_mm.cu:164 src\array\cuda\gather_mm.cu:164
``` ```
3. 单元测试及python相关
修改`np.int``np.float``np.asscalar``int``float``np.ndarray.item`,并将`setup.py``numpy`依赖版本提高至`1.20.0`,统一dtype相关类型的代码,以避免单元测试和使用中问题
\ No newline at end of file
...@@ -287,9 +287,9 @@ class BGNNPredictor: ...@@ -287,9 +287,9 @@ class BGNNPredictor:
# initialize for early stopping and metrics # initialize for early stopping and metrics
if metric_name in ['r2', 'accuracy']: if metric_name in ['r2', 'accuracy']:
best_metric = [np.float('-inf')] * 3 # for train/val/test best_metric = [float('-inf')] * 3 # for train/val/test
else: else:
best_metric = [np.float('inf')] * 3 # for train/val/test best_metric = [float('inf')] * 3 # for train/val/test
best_val_epoch = 0 best_val_epoch = 0
epochs_since_last_best_metric = 0 epochs_since_last_best_metric = 0
......
...@@ -56,7 +56,7 @@ def main(args): ...@@ -56,7 +56,7 @@ def main(args):
labels = labels.data.numpy().tolist() labels = labels.data.numpy().tolist()
dev_preds += preds dev_preds += preds
dev_labels += labels dev_labels += labels
acc = np.equal(dev_labels, dev_preds).astype(np.float).tolist() acc = np.equal(dev_labels, dev_preds).astype(float).tolist()
acc = sum(acc) / len(acc) acc = sum(acc) / len(acc)
print(f"Epoch {epoch}, Dev acc {acc}") print(f"Epoch {epoch}, Dev acc {acc}")
...@@ -76,7 +76,7 @@ def main(args): ...@@ -76,7 +76,7 @@ def main(args):
labels = labels.data.numpy().tolist() labels = labels.data.numpy().tolist()
test_preds += preds test_preds += preds
test_labels += labels test_labels += labels
acc = np.equal(test_labels, test_preds).astype(np.float).tolist() acc = np.equal(test_labels, test_preds).astype(float).tolist()
acc = sum(acc) / len(acc) acc = sum(acc) / len(acc)
test_acc_list.append(acc) test_acc_list.append(acc)
......
...@@ -53,7 +53,7 @@ def main(args): ...@@ -53,7 +53,7 @@ def main(args):
labels = labels.data.numpy().tolist() labels = labels.data.numpy().tolist()
dev_preds += preds dev_preds += preds
dev_labels += labels dev_labels += labels
acc = np.equal(dev_labels, dev_preds).astype(np.float).tolist() acc = np.equal(dev_labels, dev_preds).astype(float).tolist()
acc = sum(acc) / len(acc) acc = sum(acc) / len(acc)
print(f"Epoch {epoch}, Dev acc {acc}") print(f"Epoch {epoch}, Dev acc {acc}")
...@@ -73,7 +73,7 @@ def main(args): ...@@ -73,7 +73,7 @@ def main(args):
labels = labels.data.numpy().tolist() labels = labels.data.numpy().tolist()
test_preds += preds test_preds += preds
test_labels += labels test_labels += labels
acc = np.equal(test_labels, test_preds).astype(np.float).tolist() acc = np.equal(test_labels, test_preds).astype(float).tolist()
acc = sum(acc) / len(acc) acc = sum(acc) / len(acc)
test_acc_list.append(acc) test_acc_list.append(acc)
......
...@@ -36,7 +36,7 @@ def evaluate(gt_labels, pred_labels, metric='pairwise'): ...@@ -36,7 +36,7 @@ def evaluate(gt_labels, pred_labels, metric='pairwise'):
with Timer('evaluate with {}{}{}'.format(TextColors.FATAL, metric, with Timer('evaluate with {}{}{}'.format(TextColors.FATAL, metric,
TextColors.ENDC)): TextColors.ENDC)):
result = metric_func(gt_labels, pred_labels) result = metric_func(gt_labels, pred_labels)
if isinstance(result, np.float): if isinstance(result, float):
print('{}{}: {:.4f}{}'.format(TextColors.OKGREEN, metric, result, print('{}{}: {:.4f}{}'.format(TextColors.OKGREEN, metric, result,
TextColors.ENDC)) TextColors.ENDC))
else: else:
......
...@@ -53,7 +53,7 @@ def process(dataset): ...@@ -53,7 +53,7 @@ def process(dataset):
with open('{0}_node_attributes.txt'.format(prefix), 'r') as f: with open('{0}_node_attributes.txt'.format(prefix), 'r') as f:
for line in f: for line in f:
node_attrs.append( node_attrs.append(
np.array([float(attr) for attr in re.split("[,\s]+", line.strip("\s\n")) if attr], dtype=np.float) np.array([float(attr) for attr in re.split("[,\s]+", line.strip("\s\n")) if attr], dtype=float)
) )
else: else:
print('No node attributes') print('No node attributes')
...@@ -113,7 +113,7 @@ def process(dataset): ...@@ -113,7 +113,7 @@ def process(dataset):
f = np.zeros(max_deg + 1) f = np.zeros(max_deg + 1)
f[graph.degree[u[0]]] = 1.0 f[graph.degree[u[0]]] = 1.0
if 'label' in u[1]: if 'label' in u[1]:
f = np.concatenate((np.array(u[1]['label'], dtype=np.float), f)) f = np.concatenate((np.array(u[1]['label'], dtype=float), f))
graph.nodes[u[0]]['feat'] = f graph.nodes[u[0]]['feat'] = f
return graphs, pprs return graphs, pprs
......
...@@ -188,7 +188,7 @@ class DeepwalkDataset: ...@@ -188,7 +188,7 @@ class DeepwalkDataset:
node_degree = self.G.out_degrees(self.valid_seeds).numpy() node_degree = self.G.out_degrees(self.valid_seeds).numpy()
node_degree = np.power(node_degree, 0.75) node_degree = np.power(node_degree, 0.75)
node_degree /= np.sum(node_degree) node_degree /= np.sum(node_degree)
node_degree = np.array(node_degree * 1e8, dtype=np.int) node_degree = np.array(node_degree * 1e8, dtype=int)
self.neg_table = [] self.neg_table = []
for idx, node in enumerate(self.valid_seeds): for idx, node in enumerate(self.valid_seeds):
......
...@@ -184,7 +184,7 @@ class LineDataset: ...@@ -184,7 +184,7 @@ class LineDataset:
node_degree = self.G.out_degrees(self.valid_nodes).numpy() node_degree = self.G.out_degrees(self.valid_nodes).numpy()
node_degree = np.power(node_degree, 0.75) node_degree = np.power(node_degree, 0.75)
node_degree /= np.sum(node_degree) node_degree /= np.sum(node_degree)
node_degree = np.array(node_degree * 1e8, dtype=np.int) node_degree = np.array(node_degree * 1e8, dtype=int)
self.neg_table = [] self.neg_table = []
for idx, node in enumerate(self.valid_nodes): for idx, node in enumerate(self.valid_nodes):
......
...@@ -98,9 +98,9 @@ class ShapeNetDataset(Dataset): ...@@ -98,9 +98,9 @@ class ShapeNetDataset(Dataset):
print('Loading data from split ' + self.mode) print('Loading data from split ' + self.mode)
for fn in tqdm.tqdm(self.file_list, ascii=True): for fn in tqdm.tqdm(self.file_list, ascii=True):
with open(fn) as f: with open(fn) as f:
data = np.array([t.split('\n')[0].split(' ') for t in f.readlines()]).astype(np.float) data = np.array([t.split('\n')[0].split(' ') for t in f.readlines()]).astype(float)
data_list.append(data[:, 0:self.dim]) data_list.append(data[:, 0:self.dim])
label_list.append(data[:, 6].astype(np.int)) label_list.append(data[:, 6].astype(int))
category_list.append(shapenet.synset_dict[fn.split('/')[-2]]) category_list.append(shapenet.synset_dict[fn.split('/')[-2]])
self.data = data_list self.data = data_list
self.label = label_list self.label = label_list
...@@ -122,6 +122,6 @@ class ShapeNetDataset(Dataset): ...@@ -122,6 +122,6 @@ class ShapeNetDataset(Dataset):
cat = self.category[i] cat = self.category[i]
if self.mode == 'train': if self.mode == 'train':
x = self.translate(x, size=self.dim) x = self.translate(x, size=self.dim)
x = x.astype(np.float) x = x.astype(float)
y = y.astype(np.int) y = y.astype(int)
return x, y, cat return x, y, cat
...@@ -98,9 +98,9 @@ class ShapeNetDataset(Dataset): ...@@ -98,9 +98,9 @@ class ShapeNetDataset(Dataset):
print('Loading data from split ' + self.mode) print('Loading data from split ' + self.mode)
for fn in tqdm.tqdm(self.file_list, ascii=True): for fn in tqdm.tqdm(self.file_list, ascii=True):
with open(fn) as f: with open(fn) as f:
data = np.array([t.split('\n')[0].split(' ') for t in f.readlines()]).astype(np.float) data = np.array([t.split('\n')[0].split(' ') for t in f.readlines()]).astype(float)
data_list.append(data[:, 0:self.dim]) data_list.append(data[:, 0:self.dim])
label_list.append(data[:, 6].astype(np.int)) label_list.append(data[:, 6].astype(int))
category_list.append(shapenet.synset_dict[fn.split('/')[-2]]) category_list.append(shapenet.synset_dict[fn.split('/')[-2]])
self.data = data_list self.data = data_list
self.label = label_list self.label = label_list
...@@ -122,6 +122,6 @@ class ShapeNetDataset(Dataset): ...@@ -122,6 +122,6 @@ class ShapeNetDataset(Dataset):
cat = self.category[i] cat = self.category[i]
if self.mode == 'train': if self.mode == 'train':
x = self.translate(x, size=self.dim) x = self.translate(x, size=self.dim)
x = x.astype(np.float) x = x.astype(float)
y = y.astype(np.int) y = y.astype(int)
return x, y, cat return x, y, cat
...@@ -98,9 +98,9 @@ class ShapeNetDataset(Dataset): ...@@ -98,9 +98,9 @@ class ShapeNetDataset(Dataset):
print('Loading data from split ' + self.mode) print('Loading data from split ' + self.mode)
for fn in tqdm.tqdm(self.file_list, ascii=True): for fn in tqdm.tqdm(self.file_list, ascii=True):
with open(fn) as f: with open(fn) as f:
data = np.array([t.split('\n')[0].split(' ') for t in f.readlines()]).astype(np.float) data = np.array([t.split('\n')[0].split(' ') for t in f.readlines()]).astype(float)
data_list.append(data[:, 0:self.dim]) data_list.append(data[:, 0:self.dim])
label_list.append(data[:, 6].astype(np.int)) label_list.append(data[:, 6].astype(int))
category_list.append(shapenet.synset_dict[fn.split('/')[-2]]) category_list.append(shapenet.synset_dict[fn.split('/')[-2]])
self.data = data_list self.data = data_list
self.label = label_list self.label = label_list
...@@ -122,6 +122,6 @@ class ShapeNetDataset(Dataset): ...@@ -122,6 +122,6 @@ class ShapeNetDataset(Dataset):
cat = self.category[i] cat = self.category[i]
if self.mode == 'train': if self.mode == 'train':
x = self.translate(x, size=self.dim) x = self.translate(x, size=self.dim)
x = x.astype(np.float) x = x.astype(float)
y = y.astype(np.int) y = y.astype(int)
return x, y, cat return x, y, cat
...@@ -417,7 +417,7 @@ def _load_data(dataset_str='aifb', dataset_path=None): ...@@ -417,7 +417,7 @@ def _load_data(dataset_str='aifb', dataset_path=None):
# sort indices by destination # sort indices by destination
edge_list = sorted(edge_list, key=lambda x: (x[1], x[0], x[2])) edge_list = sorted(edge_list, key=lambda x: (x[1], x[0], x[2]))
edge_list = np.asarray(edge_list, dtype=np.int) edge_list = np.asarray(edge_list, dtype=int)
print('Number of edges: ', len(edge_list)) print('Number of edges: ', len(edge_list))
np.savez(edge_file, edges=edge_list, n=np.asarray(num_node), nrel=np.asarray(num_rel)) np.savez(edge_file, edges=edge_list, n=np.asarray(num_node), nrel=np.asarray(num_rel))
......
...@@ -156,7 +156,7 @@ class MiniGCDataset(DGLDataset): ...@@ -156,7 +156,7 @@ class MiniGCDataset(DGLDataset):
for i in range(self.num_graphs): for i in range(self.num_graphs):
# convert to DGLGraph, and add self loops # convert to DGLGraph, and add self loops
self.graphs[i] = add_self_loop(from_networkx(self.graphs[i])) self.graphs[i] = add_self_loop(from_networkx(self.graphs[i]))
self.labels = F.tensor(np.array(self.labels).astype(np.int)) self.labels = F.tensor(np.array(self.labels).astype(int))
def _gen_cycle(self, n): def _gen_cycle(self, n):
for _ in range(n): for _ in range(n):
......
...@@ -168,7 +168,7 @@ setup( ...@@ -168,7 +168,7 @@ setup(
maintainer_email='wmjlyjemaine@gmail.com', maintainer_email='wmjlyjemaine@gmail.com',
packages=find_packages(), packages=find_packages(),
install_requires=[ install_requires=[
'numpy>=1.14.0', 'numpy>=1.20.0',
'scipy>=1.1.0', 'scipy>=1.1.0',
'networkx>=2.1', 'networkx>=2.1',
'requests>=2.19.0', 'requests>=2.19.0',
......
...@@ -510,12 +510,12 @@ def _test_construct_graphs_multiple(): ...@@ -510,12 +510,12 @@ def _test_construct_graphs_multiple():
num_edges = 1000 num_edges = 1000
num_graphs = 10 num_graphs = 10
num_dims = 3 num_dims = 3
node_ids = np.array([], dtype=np.int) node_ids = np.array([], dtype=int)
src_ids = np.array([], dtype=np.int) src_ids = np.array([], dtype=int)
dst_ids = np.array([], dtype=np.int) dst_ids = np.array([], dtype=int)
ngraph_ids = np.array([], dtype=np.int) ngraph_ids = np.array([], dtype=int)
egraph_ids = np.array([], dtype=np.int) egraph_ids = np.array([], dtype=int)
u_indices = np.array([], dtype=np.int) u_indices = np.array([], dtype=int)
for i in range(num_graphs): for i in range(num_graphs):
l_node_ids = np.random.choice( l_node_ids = np.random.choice(
np.arange(num_nodes*2), size=num_nodes, replace=False) np.arange(num_nodes*2), size=num_nodes, replace=False)
...@@ -1191,7 +1191,7 @@ def _test_NodeEdgeGraphData(): ...@@ -1191,7 +1191,7 @@ def _test_NodeEdgeGraphData():
from dgl.data.csv_dataset_base import NodeData, EdgeData, GraphData from dgl.data.csv_dataset_base import NodeData, EdgeData, GraphData
# NodeData basics # NodeData basics
num_nodes = 100 num_nodes = 100
node_ids = np.arange(num_nodes, dtype=np.float) node_ids = np.arange(num_nodes, dtype=float)
ndata = NodeData(node_ids, {}) ndata = NodeData(node_ids, {})
assert np.array_equal(ndata.id, node_ids) assert np.array_equal(ndata.id, node_ids)
assert len(ndata.data) == 0 assert len(ndata.data) == 0
...@@ -1228,8 +1228,8 @@ def _test_NodeEdgeGraphData(): ...@@ -1228,8 +1228,8 @@ def _test_NodeEdgeGraphData():
assert len(edata.data) == 0 assert len(edata.data) == 0
assert np.array_equal(edata.graph_id, np.full(num_edges, 0)) assert np.array_equal(edata.graph_id, np.full(num_edges, 0))
# EdageData more # EdageData more
src_ids = np.random.randint(num_nodes, size=num_edges).astype(np.float) src_ids = np.random.randint(num_nodes, size=num_edges).astype(float)
dst_ids = np.random.randint(num_nodes, size=num_edges).astype(np.float) dst_ids = np.random.randint(num_nodes, size=num_edges).astype(float)
data = {'feat': np.random.rand(num_edges, 3)} data = {'feat': np.random.rand(num_edges, 3)}
etype = ('user', 'like', 'item') etype = ('user', 'like', 'item')
graph_ids = np.arange(num_edges) graph_ids = np.arange(num_edges)
...@@ -1259,7 +1259,7 @@ def _test_NodeEdgeGraphData(): ...@@ -1259,7 +1259,7 @@ def _test_NodeEdgeGraphData():
assert np.array_equal(gdata.graph_id, graph_ids) assert np.array_equal(gdata.graph_id, graph_ids)
assert len(gdata.data) == 0 assert len(gdata.data) == 0
# GraphData more # GraphData more
graph_ids = np.arange(num_graphs).astype(np.float) graph_ids = np.arange(num_graphs).astype(float)
data = {'feat': np.random.rand(num_graphs, 3)} data = {'feat': np.random.rand(num_graphs, 3)}
gdata = GraphData(graph_ids, data) gdata = GraphData(graph_ids, data)
assert np.array_equal(gdata.graph_id, graph_ids) assert np.array_equal(gdata.graph_id, graph_ids)
......
...@@ -1125,8 +1125,8 @@ def test_convert(idtype): ...@@ -1125,8 +1125,8 @@ def test_convert(idtype):
dsttype = hg.ntypes[ntype_id[dst[i]]] dsttype = hg.ntypes[ntype_id[dst[i]]]
etype = hg.etypes[etype_id[i]] etype = hg.etypes[etype_id[i]]
src_i, dst_i = hg.find_edges([eid[i]], (srctype, etype, dsttype)) src_i, dst_i = hg.find_edges([eid[i]], (srctype, etype, dsttype))
assert np.asscalar(F.asnumpy(src_i)) == nid[src[i]] assert np.ndarray.item(F.asnumpy(src_i)) == nid[src[i]]
assert np.asscalar(F.asnumpy(dst_i)) == nid[dst[i]] assert np.ndarray.item(F.asnumpy(dst_i)) == nid[dst[i]]
mg = nx.MultiDiGraph([ mg = nx.MultiDiGraph([
('user', 'user', 'follows'), ('user', 'user', 'follows'),
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment