Commit b0fbdca2 authored by Ignacio Pickering's avatar Ignacio Pickering Committed by Farhad Ramezanghorbani
Browse files

Fix codefactor warnings (#273)

* Fix pyanitools docstrings

* Make offending ignite docstring raw

* Make flake8 happy
parent a41cc8b7
......@@ -6,22 +6,15 @@ import os
class datapacker:
def __init__(self, store_file, mode='w-', complib='gzip', complevel=6):
"""Wrapper to store arrays within HFD5 file
"""
# opening file
"""Wrapper to store arrays within HFD5 file"""
self.store = h5py.File(store_file, mode=mode)
self.clib = complib
self.clev = complevel
def store_data(self, store_loc, **kwargs):
"""Put arrays to store
"""
# print(store_loc)
"""Put arrays to store"""
g = self.store.create_group(store_loc)
for k, v, in kwargs.items():
# print(type(v[0]))
# print(k)
if isinstance(v, list):
if len(v) != 0:
if isinstance(v[0], np.str_) or isinstance(v[0], str):
......@@ -31,24 +24,22 @@ class datapacker:
compression_opts=self.clev)
def cleanup(self):
"""Wrapper to close HDF5 file
"""
"""Wrapper to close HDF5 file"""
self.store.close()
class anidataloader:
''' Contructor '''
def __init__(self, store_file):
if not os.path.exists(store_file):
exit('Error: file not found - ' + store_file)
self.store = h5py.File(store_file, 'r')
''' Group recursive iterator (iterate through all groups
in all branches and return datasets in dicts) '''
def h5py_dataset_iterator(self, g, prefix=''):
"""Group recursive iterator
Iterate through all groups in all branches and return datasets in dicts)
"""
for key in g.keys():
item = g[key]
path = '{}/{}'.format(prefix, key)
......@@ -64,38 +55,31 @@ class anidataloader:
if isinstance(dataset[0], np.bytes_):
dataset = [a.decode('ascii')
for a in dataset]
data.update({k: dataset})
yield data
else: # test for group (go down)
yield from self.h5py_dataset_iterator(item, path)
''' Default class iterator (iterate through all data) '''
def __iter__(self):
"""Default class iterator (iterate through all data)"""
for data in self.h5py_dataset_iterator(self.store):
yield data
''' Returns a list of all groups in the file '''
def get_group_list(self):
"""Returns a list of all groups in the file"""
return [g for g in self.store.values()]
''' Allows interation through the data in a given group '''
def iter_group(self, g):
"""Allows interation through the data in a given group"""
for data in self.h5py_dataset_iterator(g):
yield data
''' Returns the requested dataset '''
def get_data(self, path, prefix=''):
"""Returns the requested dataset"""
item = self.store[path]
path = '{}/{}'.format(prefix, path)
keys = [i for i in item.keys()]
data = {'path': path}
# print(path)
for k in keys:
if not isinstance(item[k], h5py.Group):
dataset = np.array(item[k][()])
......@@ -108,9 +92,8 @@ class anidataloader:
data.update({k: dataset})
return data
''' Returns the number of groups '''
def group_size(self):
"""Returns the number of groups"""
return len(self.get_group_list())
def size(self):
......@@ -119,7 +102,6 @@ class anidataloader:
count = count + len(g.items())
return count
''' Close the HDF5 file '''
def cleanup(self):
"""Close the HDF5 file"""
self.store.close()
......@@ -9,7 +9,7 @@ from ignite.contrib.metrics.regression import MaximumAbsoluteError
class Container(torch.nn.ModuleDict):
"""Each minibatch is splitted into chunks, as explained in the docstring of
r"""Each minibatch is splitted into chunks, as explained in the docstring of
:method:`torchani.data.load_ani_dataset`, as a result, it is impossible to
use :class:`torchani.AEVComputer`, :class:`torchani.ANIModel` directly with
ignite. This class is designed to solve this issue.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment