Commit cecaf992 authored by Ignacio Pickering's avatar Ignacio Pickering Committed by Gao, Xiang
Browse files

Reformat the BuiltinModels and Builtins API to make them simpler and clearer (#252)

parent ae4cbe0a
......@@ -25,4 +25,6 @@ benchmark_xyz
datacache
dist
*.pckl
# No vim swap files
*.swp
*.swo
......@@ -19,8 +19,8 @@ tolerance = 1e-5
class TestAEV(unittest.TestCase):
def setUp(self):
builtins = torchani.neurochem.Builtins()
self.aev_computer = builtins.aev_computer
ani1x = torchani.models.ANI1x()
self.aev_computer = ani1x.aev_computer
self.radial_length = self.aev_computer.radial_length
self.debug = False
......@@ -177,10 +177,9 @@ class TestAEV(unittest.TestCase):
class TestPBCSeeEachOther(unittest.TestCase):
def setUp(self):
self.builtin = torchani.neurochem.Builtins()
self.aev_computer = self.builtin.aev_computer.to(torch.double)
self.ani1x = torchani.models.ANI1x()
self.aev_computer = self.ani1x.aev_computer.to(torch.double)
def testTranslationalInvariancePBC(self):
coordinates = torch.tensor(
......@@ -293,8 +292,8 @@ class TestAEVOnBoundary(unittest.TestCase):
self.pbc = torch.ones(3, dtype=torch.uint8)
self.v1, self.v2, self.v3 = self.cell
self.center_coordinates = self.coordinates + 0.5 * (self.v1 + self.v2 + self.v3)
builtin = torchani.neurochem.Builtins()
self.aev_computer = builtin.aev_computer.to(torch.double)
ani1x = torchani.models.ANI1x()
self.aev_computer = ani1x.aev_computer.to(torch.double)
_, self.aev = self.aev_computer((self.species, self.center_coordinates, self.cell, self.pbc))
def assertInCell(self, coordinates):
......@@ -325,8 +324,8 @@ class TestAEVOnBoundary(unittest.TestCase):
class TestAEVOnBenzenePBC(unittest.TestCase):
def setUp(self):
builtins = torchani.neurochem.Builtins()
self.aev_computer = builtins.aev_computer
ani1x = torchani.models.ANI1x()
self.aev_computer = ani1x.aev_computer
filename = os.path.join(path, '../tools/generate-unit-test-expect/others/Benzene.cif')
benzene = ase.io.read(filename)
self.cell = torch.tensor(benzene.get_cell(complete=True)).float()
......
......@@ -8,9 +8,9 @@ path = os.path.dirname(os.path.realpath(__file__))
dataset_path = os.path.join(path, '../dataset/ani1-up_to_gdb4')
dataset_path2 = os.path.join(path, '../dataset/ani1-up_to_gdb4/ani_gdb_s01.h5')
batch_size = 256
builtins = torchani.neurochem.Builtins()
consts = builtins.consts
aev_computer = builtins.aev_computer
ani1x = torchani.models.ANI1x()
consts = ani1x.consts
aev_computer = ani1x.aev_computer
class TestData(unittest.TestCase):
......
......@@ -14,11 +14,11 @@ class TestEnergies(unittest.TestCase):
def setUp(self):
self.tolerance = 5e-5
builtins = torchani.neurochem.Builtins()
self.aev_computer = builtins.aev_computer
nnp = builtins.models[0]
shift_energy = builtins.energy_shifter
self.model = torch.nn.Sequential(self.aev_computer, nnp, shift_energy)
ani1x = torchani.models.ANI1x()
aev_computer = ani1x.aev_computer
nnp = ani1x.neural_networks[0]
shift_energy = ani1x.energy_shifter
self.model = torch.nn.Sequential(aev_computer, nnp, shift_energy)
def random_skip(self):
return False
......
......@@ -15,17 +15,17 @@ class TestEnsemble(unittest.TestCase):
self.conformations = 20
def _test_molecule(self, coordinates, species):
builtins = torchani.neurochem.Builtins()
ani1x = torchani.models.ANI1x()
coordinates.requires_grad_(True)
aev = builtins.aev_computer
ensemble = builtins.models
models = [torch.nn.Sequential(aev, m) for m in ensemble]
ensemble = torch.nn.Sequential(aev, ensemble)
aev = ani1x.aev_computer
model_iterator = ani1x.neural_networks
model_list = [torch.nn.Sequential(aev, m) for m in model_iterator]
ensemble = torch.nn.Sequential(aev, model_iterator)
_, energy1 = ensemble((species, coordinates))
force1 = torch.autograd.grad(energy1.sum(), coordinates)[0]
energy2 = [m((species, coordinates))[1] for m in models]
energy2 = sum(energy2) / len(models)
energy2 = [m((species, coordinates))[1] for m in model_list]
energy2 = sum(energy2) / len(model_list)
force2 = torch.autograd.grad(energy2.sum(), coordinates)[0]
energy_diff = (energy1 - energy2).abs().max().item()
force_diff = (force1 - force2).abs().max().item()
......
......@@ -12,9 +12,9 @@ class TestForce(unittest.TestCase):
def setUp(self):
self.tolerance = 1e-5
builtins = torchani.neurochem.Builtins()
self.aev_computer = builtins.aev_computer
nnp = builtins.models[0]
ani1x = torchani.models.ANI1x()
self.aev_computer = ani1x.aev_computer
nnp = ani1x.neural_networks[0]
self.model = torch.nn.Sequential(self.aev_computer, nnp)
def random_skip(self):
......
......@@ -16,12 +16,12 @@ threshold = 1e-5
class TestIgnite(unittest.TestCase):
def testIgnite(self):
builtins = torchani.neurochem.Builtins()
aev_computer = builtins.aev_computer
nnp = copy.deepcopy(builtins.models[0])
shift_energy = builtins.energy_shifter
ani1x = torchani.models.ANI1x()
aev_computer = ani1x.aev_computer
nnp = copy.deepcopy(ani1x.neural_networks[0])
shift_energy = ani1x.energy_shifter
ds = torchani.data.BatchedANIDataset(
path, builtins.consts.species_to_tensor, batchsize,
path, ani1x.consts.species_to_tensor, batchsize,
transform=[shift_energy.subtract_from_dataset],
device=aev_computer.EtaR.device)
ds = torch.utils.data.Subset(ds, [0])
......
......@@ -14,10 +14,10 @@ class TestStructureOptimization(unittest.TestCase):
def setUp(self):
self.tolerance = 1e-6
self.builtin = torchani.neurochem.Builtins()
self.ani1x = torchani.models.ANI1x()
self.calculator = torchani.ase.Calculator(
self.builtin.species, self.builtin.aev_computer,
self.builtin.models[0], self.builtin.energy_shifter)
self.ani1x.species, self.ani1x.aev_computer,
self.ani1x.neural_networks[0], self.ani1x.energy_shifter)
def testRMSE(self):
datafile = os.path.join(path, 'test_data/NeuroChemOptimized/all')
......
......@@ -18,11 +18,11 @@ parser = parser.parse_args()
# set up benchmark
device = torch.device(parser.device)
builtins = torchani.neurochem.Builtins()
ani1x = torchani.models.ANI1x()
nnp = torch.nn.Sequential(
builtins.aev_computer,
builtins.models[0],
builtins.energy_shifter
ani1x.aev_computer,
ani1x.neural_networks[0],
ani1x.energy_shifter
).to(device)
......@@ -54,8 +54,8 @@ class XYZ:
atom_count -= 1
if atom_count == 0:
state = 'ready'
species = builtins.consts.species_to_tensor(species) \
.to(device)
species = ani1x.consts.species_to_tensor(species) \
.to(device)
coordinates = torch.tensor(coordinates, device=device)
self.mols.append((species, coordinates))
coordinates = []
......
......@@ -6,7 +6,7 @@ import pickle
import argparse
builtins = torchani.neurochem.Builtins()
ani1x = torchani.models.ANI1x()
# parse command line arguments
parser = argparse.ArgumentParser()
......@@ -22,13 +22,13 @@ parser.add_argument('--batch_size',
default=1024, type=int)
parser.add_argument('--const_file',
help='File storing constants',
default=builtins.const_file)
default=ani1x.const_file)
parser.add_argument('--sae_file',
help='File storing self atomic energies',
default=builtins.sae_file)
default=ani1x.sae_file)
parser.add_argument('--network_dir',
help='Directory or prefix of directories storing networks',
default=builtins.ensemble_prefix + '0/networks')
default=ani1x.ensemble_prefix + '0/networks')
parser.add_argument('--compare_with',
help='The TorchANI model to compare with', default=None)
parser = parser.parse_args()
......
......@@ -16,10 +16,10 @@ parser = parser.parse_args()
# set up benchmark
device = torch.device(parser.device)
builtins = torchani.neurochem.Builtins()
consts = builtins.consts
aev_computer = builtins.aev_computer
shift_energy = builtins.energy_shifter
ani1x = torchani.models.ANI1x()
consts = ani1x.consts
aev_computer = ani1x.aev_computer
shift_energy = ani1x.energy_shifter
def atomic():
......
......@@ -20,10 +20,10 @@ parser = parser.parse_args()
# set up benchmark
device = torch.device(parser.device)
builtins = torchani.neurochem.Builtins()
consts = builtins.consts
aev_computer = builtins.aev_computer
shift_energy = builtins.energy_shifter
ani1x = torchani.models.ANI1x()
consts = ani1x.consts
aev_computer = ani1x.aev_computer
shift_energy = ani1x.energy_shifter
def atomic():
......
......@@ -6,7 +6,7 @@ from os.path import join, isfile, isdir
import os
from ._pyanitools import anidataloader
import torch
from .. import utils, neurochem, aev
from .. import utils, neurochem, aev, models
import pickle
import numpy as np
from scipy.sparse import bsr_matrix
......@@ -414,7 +414,7 @@ class SparseAEVCacheLoader(AEVCacheLoader):
return encoded_species, encoded_aev
builtin = neurochem.Builtins()
ani1x = models.ANI1x()
def create_aev_cache(dataset, aev_computer, output, progress_bar=True, encoder=lambda *x: x):
......@@ -471,16 +471,16 @@ def _cache_aev(output, dataset_path, batchsize, device, constfile,
def cache_aev(output, dataset_path, batchsize, device=default_device,
constfile=builtin.const_file, subtract_sae=False,
sae_file=builtin.sae_file, enable_tqdm=True, **kwargs):
constfile=ani1x.const_file, subtract_sae=False,
sae_file=ani1x.sae_file, enable_tqdm=True, **kwargs):
_cache_aev(output, dataset_path, batchsize, device, constfile,
subtract_sae, sae_file, enable_tqdm, AEVCacheLoader.encode_aev,
**kwargs)
def cache_sparse_aev(output, dataset_path, batchsize, device=default_device,
constfile=builtin.const_file, subtract_sae=False,
sae_file=builtin.sae_file, enable_tqdm=True, **kwargs):
constfile=ani1x.const_file, subtract_sae=False,
sae_file=ani1x.sae_file, enable_tqdm=True, **kwargs):
_cache_aev(output, dataset_path, batchsize, device, constfile,
subtract_sae, sae_file, enable_tqdm,
SparseAEVCacheLoader.encode_aev, **kwargs)
......
......@@ -5,7 +5,7 @@ computed aevs. Use the ``-h`` option for help.
"""
import torch
from . import cache_aev, cache_sparse_aev, builtin, default_device
from . import cache_aev, cache_sparse_aev, ani1x, default_device
if __name__ == '__main__':
......@@ -19,7 +19,7 @@ if __name__ == '__main__':
parser.add_argument('batchsize', help='batch size', type=int)
parser.add_argument('--constfile',
help='Path of the constant file `.params`',
default=builtin.const_file)
default=ani1x.const_file)
parser.add_argument('--properties', nargs='+',
help='Output properties to load.`',
default=['energies'])
......@@ -35,7 +35,7 @@ if __name__ == '__main__':
help='Whether to subtrace self atomic energies',
default=None, action='store_true')
parser.add_argument('--sae-file', help='Path to SAE file',
default=builtin.sae_file)
default=ani1x.sae_file)
parser = parser.parse_args()
cache_aev(parser.output, parser.dataset, parser.batchsize, parser.device,
......
# -*- coding: utf-8 -*-
"""The ANI model zoo that stores public ANI models.
Currently the model zoo has two models: ANI-1x and ANI-1ccx. The corresponding
classes of these two models are :class:`ANI1x` and :class:`ANI1ccx`. These
classes share the same API. To use the builtin models, you simply need to
create an object of its corresponding class. These classes are subclasses of
:class:`torch.nn.Module`, and could be used directly. Below is an example of
how to use these models:
Currently the model zoo has two models: ANI-1x and ANI-1ccx. The classes
of these two models are :class:`ANI1x` and :class:`ANI1ccx`,
these are subclasses of :class:`torch.nn.Module`.
To use the models just instantiate them and either
directly calculate energies or get an ASE calculator. For example:
.. code:: python
model = torchani.models.ANI1x()
ani1x = torchani.models.ANI1x()
# compute energy using ANI-1x model ensemble
_, energies = model((species, coordinates))
model.ase() # get ASE Calculator using this ensemble
_, energies = ani1x((species, coordinates))
ani1x.ase() # get ASE Calculator using this ensemble
# convert atom species from string to long tensor
model.species_to_tensor('CHHHH')
ani1x.species_to_tensor('CHHHH')
model0 = model[0] # get the first model in the ensemble
model0 = ani1x[0] # get the first model in the ensemble
# compute energy using the first model in the ANI-1x model ensemble
_, energies = model0((species, coordinates))
model0.ase() # get ASE Calculator using this model
# convert atom species from string to long tensor
model0.species_to_tensor('CHHHH')
Note that the class BuiltinModels can be accessed but it is deprecated and
shouldn't be used anymore.
"""
import torch
import warnings
from pkg_resources import resource_filename
from . import neurochem
from .aev import AEVComputer
# Future: Delete BuiltinModels in a future release, it is DEPRECATED
class BuiltinModels(torch.nn.Module):
"""BuiltinModels class.
.. warning::
This class is part of an old API. It is DEPRECATED and may be deleted in a
future version. It shouldn't be used.
"""
def __init__(self, builtin_class):
warnings.warn(
"BuiltinsModels is deprecated and will be deleted in"
"the future; use torchani.models.BuiltinNet()", DeprecationWarning)
super(BuiltinModels, self).__init__()
self.builtins = builtin_class()
self.aev_computer = self.builtins.aev_computer
......@@ -44,6 +58,110 @@ class BuiltinModels(torch.nn.Module):
return self.energy_shifter(species_energies)
def __getitem__(self, index):
ret = torch.nn.Sequential(self.aev_computer,
self.neural_networks[index],
self.energy_shifter)
def ase(**kwargs):
from . import ase
return ase.Calculator(self.builtins.species, self.aev_computer,
self.neural_networks[index],
self.energy_shifter, **kwargs)
ret.ase = ase
ret.species_to_tensor = self.builtins.consts.species_to_tensor
return ret
def __len__(self):
return len(self.neural_networks)
def ase(self, **kwargs):
"""Get an ASE Calculator using this model"""
from . import ase
return ase.Calculator(self.builtins.species, self.aev_computer,
self.neural_networks, self.energy_shifter,
**kwargs)
def species_to_tensor(self, *args, **kwargs):
"""Convert species from strings to tensor.
See also :method:`torchani.neurochem.Constant.species_to_tensor`"""
return self.builtins.consts.species_to_tensor(*args, **kwargs) \
.to(self.aev_computer.ShfR.device)
class BuiltinNet(torch.nn.Module):
"""Private template for the builtin ANI ensemble models.
All ANI ensemble models form the ANI models zoo should inherit from this class.
This class is a torch module that sequentially calculates
AEVs, then energies from a torchani.Ensemble and then uses EnergyShifter
to shift those energies. It is essentially a sequential
'AEVComputer -> Ensemble -> EnergyShifter'.
.. note::
This class is for internal use only, avoid using it, use ANI1x, ANI1ccx,
etc instead. Don't confuse this class with torchani.Ensemble, which
is only a container for many ANIModel instances and shouldn't be used
directly for calculations.
Attributes:
const_file (:class:`str`): Path to the file with the builtin constants.
sae_file (:class:`str`): Path to the file with the Self Atomic Energies.
ensemble_prefix (:class:`str`): Prefix of directories.
ensemble_size (:class:`int`): Number of models in the ensemble.
energy_shifter (:class:`torchani.EnergyShifter`): Energy shifter with
builtin Self Atomic Energies.
aev_computer (:class:`torchani.AEVComputer`): AEV computer with
builtin constants
neural_networks (:class:`torchani.Ensemble`): Ensemble of ANIModel networks
"""
def __init__(self, parent_name, const_file_path, sae_file_path,
ensemble_size, ensemble_prefix_path):
super(BuiltinNet, self).__init__()
self.const_file = resource_filename(parent_name, const_file_path)
self.sae_file = resource_filename(parent_name, sae_file_path)
self.ensemble_prefix = resource_filename(parent_name,
ensemble_prefix_path)
self.ensemble_size = ensemble_size
self.consts = neurochem.Constants(self.const_file)
self.species = self.consts.species
self.aev_computer = AEVComputer(**self.consts)
self.energy_shifter = neurochem.load_sae(self.sae_file)
self.neural_networks = neurochem.load_model_ensemble(
self.species, self.ensemble_prefix, self.ensemble_size)
def forward(self, species_coordinates):
"""Calculates predicted properties for minibatch of configurations
Args:
species_coordinates: minibatch of configurations
Returns:
species_energies: energies for the given configurations
"""
species_aevs = self.aev_computer(species_coordinates)
species_energies = self.neural_networks(species_aevs)
return self.energy_shifter(species_energies)
def __getitem__(self, index):
"""Get a single 'AEVComputer -> ANIModel -> EnergyShifter' sequential model
Indexing allows access to a single model inside the ensemble
that can be used directly for calculations. The model consists
of a sequence AEVComputer -> ANIModel -> EnergyShifter
and can return an ase calculator and convert species to tensor.
Args:
index (:class:`int`): Index of the model
Returns:
ret: (:class:`torch.nn.Sequential`): Sequential model ready for
calculations
"""
ret = torch.nn.Sequential(
self.aev_computer,
self.neural_networks[index],
......@@ -51,38 +169,62 @@ class BuiltinModels(torch.nn.Module):
)
def ase(**kwargs):
"""Attach an ase calculator """
from . import ase
return ase.Calculator(self.builtins.species,
return ase.Calculator(self.species,
self.aev_computer,
self.neural_networks[index],
self.energy_shifter,
**kwargs)
ret.ase = ase
ret.species_to_tensor = self.builtins.consts.species_to_tensor
ret.species_to_tensor = self.consts.species_to_tensor
return ret
def __len__(self):
"""Get the number of networks in the ensemble
Returns:
length (:class:`int`): Number of networks in the ensemble
"""
return len(self.neural_networks)
def ase(self, **kwargs):
"""Get an ASE Calculator using this model"""
"""Get an ASE Calculator using this ANI model ensemble
Arguments:
kwargs: ase.Calculator kwargs
Returns:
calculator (:class:`int`): A calculator to be used with ASE
"""
from . import ase
return ase.Calculator(self.builtins.species, self.aev_computer,
return ase.Calculator(self.species, self.aev_computer,
self.neural_networks, self.energy_shifter,
**kwargs)
def species_to_tensor(self, *args, **kwargs):
"""Convert species from strings to tensor.
See also :method:`torchani.neurochem.Constant.species_to_tensor`"""
return self.builtins.consts.species_to_tensor(*args, **kwargs) \
See also :method:`torchani.neurochem.Constant.species_to_tensor`
Arguments:
species (:class:`str`): A string of chemical symbols
Returns:
tensor (:class:`torch.Tensor`): A 1D tensor of integers
"""
return self.consts.species_to_tensor(*args, **kwargs) \
.to(self.aev_computer.ShfR.device)
class ANI1x(BuiltinModels):
"""The ANI-1x model as in `ani-1x_8x on GitHub`_ and
`Active Learning Paper`_.
class ANI1x(BuiltinNet):
"""The ANI-1x model as in `ani-1x_8x on GitHub`_ and `Active Learning Paper`_.
The ANI-1x model is an ensemble of 8 networks that was trained using
active learning on the ANI-1x dataset, the target level of theory is
wB97X/6-31G(d). It predicts energies on HCNO elements exclusively, it
shouldn't be used with other atom types.
.. _ani-1x_8x on GitHub:
https://github.com/isayev/ASE_ANI/tree/master/ani_models/ani-1x_8x
......@@ -92,12 +234,23 @@ class ANI1x(BuiltinModels):
"""
def __init__(self):
super(ANI1x, self).__init__(neurochem.Builtins)
super(ANI1x, self).__init__(
parent_name='.'.join(__name__.split('.')[:-1]),
const_file_path='resources/ani-1x_8x'
'/rHCNO-5.2R_16-3.5A_a4-8.params',
sae_file_path='resources/ani-1x_8x/sae_linfit.dat',
ensemble_size=8,
ensemble_prefix_path='resources/ani-1x_8x/train')
class ANI1ccx(BuiltinNet):
"""The ANI-1ccx model as in `ani-1ccx_8x on GitHub`_ and `Transfer Learning Paper`_.
class ANI1ccx(BuiltinModels):
"""The ANI-1x model as in `ani-1ccx_8x on GitHub`_ and
`Transfer Learning Paper`_.
The ANI-1ccx model is an ensemble of 8 networks that was trained
on the ANI-1ccx dataset, using transfer learning. The target accuracy
is CCSD(T)*/CBS (CCSD(T) using the DPLNO-CCSD(T) method). It predicts
energies on HCNO elements exclusively, it shouldn't be used with other
atom types.
.. _ani-1ccx_8x on GitHub:
https://github.com/isayev/ASE_ANI/tree/master/ani_models/ani-1ccx_8x
......@@ -107,4 +260,10 @@ class ANI1ccx(BuiltinModels):
"""
def __init__(self):
super(ANI1ccx, self).__init__(neurochem.BuiltinsANI1CCX)
super(ANI1ccx, self).__init__(
parent_name='.'.join(__name__.split('.')[:-1]),
const_file_path='resources/ani-1ccx_8x'
'/rHCNO-5.2R_16-3.5A_a4-8.params',
sae_file_path='resources/ani-1ccx_8x/sae_linfit.dat',
ensemble_size=8,
ensemble_prefix_path='resources/ani-1ccx_8x/train')
......@@ -262,9 +262,14 @@ def load_model_ensemble(species, prefix, count):
return Ensemble(models)
# Future: Delete BuiltinsAbstract in a future release, it is DEPRECATED
class BuiltinsAbstract(object):
"""Base class for loading ANI neural network from configuration files.
.. warning::
This class is part of an old API. It is DEPRECATED and may be deleted in a
future version. It shouldn't be used.
Arguments:
parent_name (:class:`str`): Base path that other paths are relative to.
const_file_path (:class:`str`): Path to constant file for ANI model(s).
......@@ -296,6 +301,9 @@ class BuiltinsAbstract(object):
self.const_file = pkg_resources.resource_filename(
parent_name,
const_file_path)
warnings.warn(
"BuiltinsAbstract is deprecated and will be deleted in"
"the future; use torchani.models.BuiltinNet()", DeprecationWarning)
self.consts = Constants(self.const_file)
self.species = self.consts.species
self.aev_computer = AEVComputer(**self.consts)
......@@ -312,9 +320,14 @@ class BuiltinsAbstract(object):
self.ensemble_size)
# Future: Delete Builtins in a future release, it is DEPRECATED
class Builtins(BuiltinsAbstract):
"""Container for the builtin ANI-1x model.
.. warning::
This class is part of an old API. It is DEPRECATED and may be deleted in a
future version. It shouldn't be used.
Attributes:
const_file (:class:`str`): Path to the builtin constant file.
consts (:class:`Constants`): Constants loaded from builtin constant
......@@ -329,6 +342,9 @@ class Builtins(BuiltinsAbstract):
models (:class:`torchani.Ensemble`): Ensemble of models.
"""
def __init__(self):
warnings.warn(
"Builtins is deprecated and will be deleted in the"
"future; use torchani.models.ANI1x()", DeprecationWarning)
parent_name = '.'.join(__name__.split('.')[:-1])
const_file_path = 'resources/ani-1x_8x'\
'/rHCNO-5.2R_16-3.5A_a4-8.params'
......@@ -344,9 +360,14 @@ class Builtins(BuiltinsAbstract):
)
# Future: Delete BuiltinsANI1CCX in a future release, it is DEPRECATED
class BuiltinsANI1CCX(BuiltinsAbstract):
"""Container for the builtin ANI-1ccx model.
.. warning::
This class is part of an old API. It is DEPRECATED and may be deleted in a
future version. It shouldn't be used.
Attributes:
const_file (:class:`str`): Path to the builtin constant file.
consts (:class:`Constants`): Constants loaded from builtin constant
......@@ -361,6 +382,9 @@ class BuiltinsANI1CCX(BuiltinsAbstract):
models (:class:`torchani.Ensemble`): Ensemble of models.
"""
def __init__(self):
warnings.warn(
"BuiltinsANICCX is deprecated and will be deleted in the"
"future; use torchani.models.ANI1ccx()", DeprecationWarning)
parent_name = '.'.join(__name__.split('.')[:-1])
const_file_path = 'resources/ani-1ccx_8x'\
'/rHCNO-5.2R_16-3.5A_a4-8.params'
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment