Unverified Commit bc5f4312 authored by Gao, Xiang's avatar Gao, Xiang Committed by GitHub
Browse files

Config flake8 and fix may linter errors (#198)

parent f149f6e1
...@@ -28,9 +28,9 @@ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] ...@@ -28,9 +28,9 @@ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
htmlhelp_basename = 'TorchANIdoc' htmlhelp_basename = 'TorchANIdoc'
sphinx_gallery_conf = { sphinx_gallery_conf = {
'examples_dirs': '../examples', 'examples_dirs': '../examples',
'gallery_dirs': 'examples', 'gallery_dirs': 'examples',
'filename_pattern': r'.*\.py' 'filename_pattern': r'.*\.py'
} }
intersphinx_mapping = { intersphinx_mapping = {
......
...@@ -52,8 +52,8 @@ log = 'runs' ...@@ -52,8 +52,8 @@ log = 'runs'
############################################################################### ###############################################################################
# Here, there is no need to manually construct aev computer and energy shifter, # Here, there is no need to manually construct aev computer and energy shifter,
# but we do need to generate a disk cache for datasets # but we do need to generate a disk cache for datasets
const_file = os.path.join(path, '../torchani/resources/ani-1x_8x/rHCNO-5.2R_16-3.5A_a4-8.params') # noqa: E501 const_file = os.path.join(path, '../torchani/resources/ani-1x_8x/rHCNO-5.2R_16-3.5A_a4-8.params')
sae_file = os.path.join(path, '../torchani/resources/ani-1x_8x/sae_linfit.dat') # noqa: E501 sae_file = os.path.join(path, '../torchani/resources/ani-1x_8x/sae_linfit.dat')
training_cache = './training_cache' training_cache = './training_cache'
validation_cache = './validation_cache' validation_cache = './validation_cache'
...@@ -113,7 +113,9 @@ container = torchani.ignite.Container({'energies': model}) ...@@ -113,7 +113,9 @@ container = torchani.ignite.Container({'energies': model})
optimizer = torch.optim.Adam(model.parameters()) optimizer = torch.optim.Adam(model.parameters())
trainer = ignite.engine.create_supervised_trainer( trainer = ignite.engine.create_supervised_trainer(
container, optimizer, torchani.ignite.MSELoss('energies')) container, optimizer, torchani.ignite.MSELoss('energies'))
evaluator = ignite.engine.create_supervised_evaluator(container, metrics={ evaluator = ignite.engine.create_supervised_evaluator(
container,
metrics={
'RMSE': torchani.ignite.RMSEMetric('energies') 'RMSE': torchani.ignite.RMSEMetric('energies')
}) })
......
...@@ -33,11 +33,11 @@ model = torchani.models.ANI1ccx() ...@@ -33,11 +33,11 @@ model = torchani.models.ANI1ccx()
# preceding ``1`` in the shape is here to support batch processing like in # preceding ``1`` in the shape is here to support batch processing like in
# training. If you have ``N`` different structures to compute, then make it # training. If you have ``N`` different structures to compute, then make it
# ``N``. # ``N``.
coordinates = torch.tensor([[[0.03192167, 0.00638559, 0.01301679], coordinates = torch.tensor([[[0.03192167, 0.00638559, 0.01301679],
[-0.83140486, 0.39370209, -0.26395324], [-0.83140486, 0.39370209, -0.26395324],
[-0.66518241, -0.84461308, 0.20759389], [-0.66518241, -0.84461308, 0.20759389],
[0.45554739, 0.54289633, 0.81170881], [0.45554739, 0.54289633, 0.81170881],
[0.66091919, -0.16799635, -0.91037834]]], [0.66091919, -0.16799635, -0.91037834]]],
requires_grad=True, device=device) requires_grad=True, device=device)
species = model.species_to_tensor('CHHHH').to(device).unsqueeze(0) species = model.species_to_tensor('CHHHH').to(device).unsqueeze(0)
......
...@@ -64,11 +64,11 @@ print(calculator1) ...@@ -64,11 +64,11 @@ print(calculator1)
############################################################################### ###############################################################################
# Now let's define a methane molecule # Now let's define a methane molecule
coordinates = torch.tensor([[[0.03192167, 0.00638559, 0.01301679], coordinates = torch.tensor([[[0.03192167, 0.00638559, 0.01301679],
[-0.83140486, 0.39370209, -0.26395324], [-0.83140486, 0.39370209, -0.26395324],
[-0.66518241, -0.84461308, 0.20759389], [-0.66518241, -0.84461308, 0.20759389],
[0.45554739, 0.54289633, 0.81170881], [0.45554739, 0.54289633, 0.81170881],
[0.66091919, -0.16799635, -0.91037834]]], [0.66091919, -0.16799635, -0.91037834]]],
requires_grad=True) requires_grad=True)
species = consts.species_to_tensor('CHHHH').unsqueeze(0) species = consts.species_to_tensor('CHHHH').unsqueeze(0)
methane = ase.Atoms('CHHHH', positions=coordinates.squeeze().detach().numpy()) methane = ase.Atoms('CHHHH', positions=coordinates.squeeze().detach().numpy())
......
...@@ -148,7 +148,9 @@ container = torchani.ignite.Container({'energies': model}) ...@@ -148,7 +148,9 @@ container = torchani.ignite.Container({'energies': model})
optimizer = torch.optim.Adam(model.parameters()) optimizer = torch.optim.Adam(model.parameters())
trainer = ignite.engine.create_supervised_trainer( trainer = ignite.engine.create_supervised_trainer(
container, optimizer, torchani.ignite.MSELoss('energies')) container, optimizer, torchani.ignite.MSELoss('energies'))
evaluator = ignite.engine.create_supervised_evaluator(container, metrics={ evaluator = ignite.engine.create_supervised_evaluator(
container,
metrics={
'RMSE': torchani.ignite.RMSEMetric('energies') 'RMSE': torchani.ignite.RMSEMetric('energies')
}) })
......
...@@ -2,4 +2,13 @@ ...@@ -2,4 +2,13 @@
verbosity=3 verbosity=3
detailed-errors=1 detailed-errors=1
with-coverage=1 with-coverage=1
cover-package=torchani cover-package=torchani
\ No newline at end of file
[flake8]
ignore = E501
exclude =
.git,
__pycache__,
build,
.eggs,
docs/examples
...@@ -74,7 +74,7 @@ class TestAEV(unittest.TestCase): ...@@ -74,7 +74,7 @@ class TestAEV(unittest.TestCase):
for expected_radial, expected_angular in radial_angular: for expected_radial, expected_angular in radial_angular:
conformations = expected_radial.shape[0] conformations = expected_radial.shape[0]
atoms = expected_radial.shape[1] atoms = expected_radial.shape[1]
aev_ = aev[start:start+conformations, 0:atoms] aev_ = aev[start:(start + conformations), 0:atoms]
start += conformations start += conformations
self._assertAEVEqual(expected_radial, expected_angular, aev_) self._assertAEVEqual(expected_radial, expected_angular, aev_)
......
...@@ -21,7 +21,7 @@ class TestData(unittest.TestCase): ...@@ -21,7 +21,7 @@ class TestData(unittest.TestCase):
batch_size) batch_size)
def _assertTensorEqual(self, t1, t2): def _assertTensorEqual(self, t1, t2):
self.assertEqual((t1-t2).abs().max().item(), 0) self.assertEqual((t1 - t2).abs().max().item(), 0)
def testSplitBatch(self): def testSplitBatch(self):
species1 = torch.randint(4, (5, 4), dtype=torch.long) species1 = torch.randint(4, (5, 4), dtype=torch.long)
...@@ -45,8 +45,8 @@ class TestData(unittest.TestCase): ...@@ -45,8 +45,8 @@ class TestData(unittest.TestCase):
self.assertNotEqual(last[-1], n[0]) self.assertNotEqual(last[-1], n[0])
conformations = s.shape[0] conformations = s.shape[0]
self.assertGreater(conformations, 0) self.assertGreater(conformations, 0)
s_ = species[start:start+conformations, ...] s_ = species[start:(start + conformations), ...]
c_ = coordinates[start:start+conformations, ...] c_ = coordinates[start:(start + conformations), ...]
s_, c_ = torchani.utils.strip_redundant_padding(s_, c_) s_, c_ = torchani.utils.strip_redundant_padding(s_, c_)
self._assertTensorEqual(s, s_) self._assertTensorEqual(s, s_)
self._assertTensorEqual(c, c_) self._assertTensorEqual(c, c_)
......
...@@ -77,8 +77,7 @@ def _angular_subaev_terms(Rca, ShfZ, EtaA, Zeta, ShfA, vectors1, vectors2): ...@@ -77,8 +77,7 @@ def _angular_subaev_terms(Rca, ShfZ, EtaA, Zeta, ShfA, vectors1, vectors2):
fcj1 = _cutoff_cosine(distances1, Rca) fcj1 = _cutoff_cosine(distances1, Rca)
fcj2 = _cutoff_cosine(distances2, Rca) fcj2 = _cutoff_cosine(distances2, Rca)
factor1 = ((1 + torch.cos(angles - ShfZ)) / 2) ** Zeta factor1 = ((1 + torch.cos(angles - ShfZ)) / 2) ** Zeta
factor2 = torch.exp(-EtaA * factor2 = torch.exp(-EtaA * ((distances1 + distances2) / 2 - ShfA) ** 2)
((distances1 + distances2) / 2 - ShfA) ** 2)
ret = 2 * factor1 * factor2 * fcj1 * fcj2 ret = 2 * factor1 * factor2 * fcj1 * fcj2
# At this point, ret now have shape # At this point, ret now have shape
# (conformations, atoms, N, ?, ?, ?, ?) where ? depend on constants. # (conformations, atoms, N, ?, ?, ?, ?) where ? depend on constants.
...@@ -157,9 +156,7 @@ def default_neighborlist(species, coordinates, cutoff): ...@@ -157,9 +156,7 @@ def default_neighborlist(species, coordinates, cutoff):
def _compute_mask_r(species_r, num_species): def _compute_mask_r(species_r, num_species):
# type: (Tensor, int) -> Tensor # type: (Tensor, int) -> Tensor
"""Get mask of radial terms for each supported species from indices""" """Get mask of radial terms for each supported species from indices"""
mask_r = (species_r.unsqueeze(-1) == mask_r = (species_r.unsqueeze(-1) == torch.arange(num_species, dtype=torch.long, device=species_r.device))
torch.arange(num_species, dtype=torch.long,
device=species_r.device))
return mask_r return mask_r
...@@ -199,10 +196,7 @@ def _assemble(radial_terms, angular_terms, present_species, ...@@ -199,10 +196,7 @@ def _assemble(radial_terms, angular_terms, present_species,
atoms = radial_terms.shape[1] atoms = radial_terms.shape[1]
# assemble radial subaev # assemble radial subaev
present_radial_aevs = ( present_radial_aevs = (radial_terms.unsqueeze(-2) * mask_r.unsqueeze(-1).to(radial_terms.dtype)).sum(-3)
radial_terms.unsqueeze(-2) *
mask_r.unsqueeze(-1).to(radial_terms.dtype)
).sum(-3)
# present_radial_aevs has shape # present_radial_aevs has shape
# (conformations, atoms, present species, radial_length) # (conformations, atoms, present species, radial_length)
radial_aevs = present_radial_aevs.flatten(start_dim=2) radial_aevs = present_radial_aevs.flatten(start_dim=2)
......
...@@ -53,7 +53,7 @@ def split_batch(natoms, species, coordinates): ...@@ -53,7 +53,7 @@ def split_batch(natoms, species, coordinates):
improved = False improved = False
cycle_split = split cycle_split = split
cycle_cost = cost cycle_cost = cost
for i in range(len(counts)-1): for i in range(len(counts) - 1):
if i not in split: if i not in split:
s = sorted(split + [i]) s = sorted(split + [i])
c = split_cost(counts, s) c = split_cost(counts, s)
......
...@@ -42,7 +42,7 @@ class anidataloader: ...@@ -42,7 +42,7 @@ class anidataloader:
def __init__(self, store_file): def __init__(self, store_file):
if not os.path.exists(store_file): if not os.path.exists(store_file):
exit('Error: file not found - '+store_file) exit('Error: file not found - ' + store_file)
self.store = h5py.File(store_file, 'r') self.store = h5py.File(store_file, 'r')
''' Group recursive iterator (iterate through all groups ''' Group recursive iterator (iterate through all groups
......
...@@ -61,4 +61,4 @@ class Ensemble(torch.nn.ModuleList): ...@@ -61,4 +61,4 @@ class Ensemble(torch.nn.ModuleList):
class Gaussian(torch.nn.Module): class Gaussian(torch.nn.Module):
"""Gaussian activation""" """Gaussian activation"""
def forward(self, x): def forward(self, x):
return torch.exp(-x*x) return torch.exp(- x * x)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment