Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
torchani
Commits
f149f6e1
Unverified
Commit
f149f6e1
authored
Mar 23, 2019
by
Gao, Xiang
Committed by
GitHub
Mar 23, 2019
Browse files
Tools for COMP6 benchmark (#196)
parent
5bac472d
Changes
25
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
28 additions
and
15 deletions
+28
-15
.gitmodules
.gitmodules
+3
-0
azure/runnable_submodules.yml
azure/runnable_submodules.yml
+3
-3
azure/tests.yml
azure/tests.yml
+3
-0
azure/tools.yml
azure/tools.yml
+9
-3
dataset/COMP6
dataset/COMP6
+1
-0
dataset/ani1-up_to_gdb4/ani_gdb_s01.h5
dataset/ani1-up_to_gdb4/ani_gdb_s01.h5
+0
-0
dataset/ani1-up_to_gdb4/ani_gdb_s02.h5
dataset/ani1-up_to_gdb4/ani_gdb_s02.h5
+0
-0
dataset/ani1-up_to_gdb4/ani_gdb_s03.h5
dataset/ani1-up_to_gdb4/ani_gdb_s03.h5
+0
-0
dataset/ani1-up_to_gdb4/ani_gdb_s04.h5
dataset/ani1-up_to_gdb4/ani_gdb_s04.h5
+0
-0
dataset/xyz_files/13.xyz
dataset/xyz_files/13.xyz
+0
-0
dataset/xyz_files/28.xyz
dataset/xyz_files/28.xyz
+0
-0
dataset/xyz_files/304.xyz
dataset/xyz_files/304.xyz
+0
-0
dataset/xyz_files/50.xyz
dataset/xyz_files/50.xyz
+0
-0
dataset/xyz_files/98.xyz
dataset/xyz_files/98.xyz
+0
-0
dataset/xyz_files/CH4-5.xyz
dataset/xyz_files/CH4-5.xyz
+0
-0
examples/cache_aev.py
examples/cache_aev.py
+2
-2
examples/neurochem_trainer.py
examples/neurochem_trainer.py
+2
-2
examples/nnp_training.py
examples/nnp_training.py
+2
-2
tests/test_data.py
tests/test_data.py
+2
-2
tests/test_ignite.py
tests/test_ignite.py
+1
-1
No files found.
.gitmodules
0 → 100644
View file @
f149f6e1
[submodule "dataset/COMP6"]
path = dataset/COMP6
url = https://github.com/isayev/COMP6.git
azure/runnable_submodules.yml
View file @
f149f6e1
...
...
@@ -21,11 +21,11 @@ steps:
-
script
:
'
azure/install_dependencies.sh
&&
pip
install
.'
displayName
:
'
Install
dependencies'
-
script
:
'
python
-m
torchani.neurochem.trainer
--tqdm
tests/test_data/inputtrain.ipt
dataset/ani_gdb_s01.h5
dataset/ani_gdb_s01.h5'
-
script
:
'
python
-m
torchani.neurochem.trainer
--tqdm
tests/test_data/inputtrain.ipt
dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5
dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5'
displayName
:
NeuroChem Trainer
-
script
:
'
python
-m
torchani.neurochem.trainer
--tqdm
tests/test_data/inputtrain.yaml
dataset/ani_gdb_s01.h5
dataset/ani_gdb_s01.h5'
-
script
:
'
python
-m
torchani.neurochem.trainer
--tqdm
tests/test_data/inputtrain.yaml
dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5
dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5'
displayName
:
NeuroChem Trainer YAML config
-
script
:
'
python
-m
torchani.data.cache_aev
tmp
dataset/ani_gdb_s01.h5
256'
-
script
:
'
python
-m
torchani.data.cache_aev
tmp
dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5
256'
displayName
:
Cache AEV
azure/tests.yml
View file @
f149f6e1
...
...
@@ -18,6 +18,9 @@ steps:
inputs
:
versionSpec
:
'
$(python.version)'
-
script
:
'
git
submodule
update
--init'
displayName
:
'
Fetch
submodules'
-
script
:
'
azure/install_dependencies.sh
&&
pip
install
nose
coverage
codecov'
displayName
:
'
Install
dependencies'
...
...
azure/tools.yml
View file @
f149f6e1
...
...
@@ -19,14 +19,20 @@ steps:
inputs
:
versionSpec
:
'
$(python.version)'
-
script
:
'
git
submodule
update
--init'
displayName
:
'
Fetch
submodules'
-
script
:
'
azure/install_dependencies.sh
&&
pip
install
.'
displayName
:
'
Install
dependencies'
-
script
:
'
python
tools/training-benchmark.py
./dataset/ani_gdb_s01.h5'
-
script
:
'
python
tools/training-benchmark.py
./dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5'
displayName
:
Training Benchmark
-
script
:
'
python
tools/neurochem-test.py
./dataset/ani_gdb_s01.h5'
-
script
:
'
python
tools/neurochem-test.py
./dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5'
displayName
:
NeuroChem Test
-
script
:
'
python
tools/inference-benchmark.py
--tqdm
./xyz_files/CH4-5.xyz'
-
script
:
'
python
tools/inference-benchmark.py
--tqdm
./
dataset/
xyz_files/CH4-5.xyz'
displayName
:
Inference Benchmark
-
script
:
'
python
tools/comp6.py
./dataset/COMP6/COMP6v1/s66x8'
displayName
:
COMP6 Benchmark
COMP6
@
79f41c15
Subproject commit 79f41c156f8e19506fe951b5513b98e7c534a503
dataset/ani_gdb_s01.h5
→
dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5
LFS
View file @
f149f6e1
File moved
dataset/ani_gdb_s02.h5
→
dataset/
ani1-up_to_gdb4/
ani_gdb_s02.h5
LFS
View file @
f149f6e1
File moved
dataset/ani_gdb_s03.h5
→
dataset/
ani1-up_to_gdb4/
ani_gdb_s03.h5
LFS
View file @
f149f6e1
File moved
dataset/ani_gdb_s04.h5
→
dataset/
ani1-up_to_gdb4/
ani_gdb_s04.h5
LFS
View file @
f149f6e1
File moved
xyz_files/13.xyz
→
dataset/
xyz_files/13.xyz
View file @
f149f6e1
File moved
xyz_files/28.xyz
→
dataset/
xyz_files/28.xyz
View file @
f149f6e1
File moved
xyz_files/304.xyz
→
dataset/
xyz_files/304.xyz
View file @
f149f6e1
File moved
xyz_files/50.xyz
→
dataset/
xyz_files/50.xyz
View file @
f149f6e1
File moved
xyz_files/98.xyz
→
dataset/
xyz_files/98.xyz
View file @
f149f6e1
File moved
xyz_files/CH4-5.xyz
→
dataset/
xyz_files/CH4-5.xyz
View file @
f149f6e1
File moved
examples/cache_aev.py
View file @
f149f6e1
...
...
@@ -26,8 +26,8 @@ try:
path
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
))
except
NameError
:
path
=
os
.
getcwd
()
training_path
=
os
.
path
.
join
(
path
,
'../dataset/ani_gdb_s01.h5'
)
validation_path
=
os
.
path
.
join
(
path
,
'../dataset/ani
_gdb_s01.h5'
)
training_path
=
os
.
path
.
join
(
path
,
'../dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5'
)
validation_path
=
os
.
path
.
join
(
path
,
'../dataset/ani
1-up_to_gdb4/ani_gdb_s01.h5'
)
# noqa: E501
# checkpoint file to save model when validation RMSE improves
model_checkpoint
=
'model.pt'
...
...
examples/neurochem_trainer.py
View file @
f149f6e1
...
...
@@ -30,8 +30,8 @@ try:
except
NameError
:
path
=
os
.
getcwd
()
cfg_path
=
os
.
path
.
join
(
path
,
'../tests/test_data/inputtrain.ipt'
)
training_path
=
os
.
path
.
join
(
path
,
'../dataset/ani
_gdb_s01.h5'
)
validation_path
=
os
.
path
.
join
(
path
,
'../dataset/ani
_gdb_s01.h5'
)
training_path
=
os
.
path
.
join
(
path
,
'../dataset/ani
1-up_to_gdb4/ani_gdb_s01.h5'
)
# noqa: E501
validation_path
=
os
.
path
.
join
(
path
,
'../dataset/ani
1-up_to_gdb4/ani_gdb_s01.h5'
)
# noqa: E501
###############################################################################
# We also need to set the device to run the training:
...
...
examples/nnp_training.py
View file @
f149f6e1
...
...
@@ -34,8 +34,8 @@ try:
path
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
))
except
NameError
:
path
=
os
.
getcwd
()
training_path
=
os
.
path
.
join
(
path
,
'../dataset/ani_gdb_s01.h5'
)
validation_path
=
os
.
path
.
join
(
path
,
'../dataset/ani
_gdb_s01.h5'
)
training_path
=
os
.
path
.
join
(
path
,
'../dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5'
)
validation_path
=
os
.
path
.
join
(
path
,
'../dataset/ani
1-up_to_gdb4/ani_gdb_s01.h5'
)
# noqa: E501
# checkpoint file to save model when validation RMSE improves
model_checkpoint
=
'model.pt'
...
...
tests/test_data.py
View file @
f149f6e1
...
...
@@ -5,8 +5,8 @@ import unittest
from
torchani.data.cache_aev
import
cache_aev
path
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
))
dataset_path
=
os
.
path
.
join
(
path
,
'../dataset'
)
dataset_path2
=
os
.
path
.
join
(
path
,
'../dataset/ani_gdb_s01.h5'
)
dataset_path
=
os
.
path
.
join
(
path
,
'../dataset
/ani1-up_to_gdb4
'
)
dataset_path2
=
os
.
path
.
join
(
path
,
'../dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5'
)
batch_size
=
256
builtins
=
torchani
.
neurochem
.
Builtins
()
consts
=
builtins
.
consts
...
...
tests/test_ignite.py
View file @
f149f6e1
...
...
@@ -8,7 +8,7 @@ import torchani
import
torchani.ignite
path
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
))
path
=
os
.
path
.
join
(
path
,
'../dataset/ani_gdb_s01.h5'
)
path
=
os
.
path
.
join
(
path
,
'../dataset/
ani1-up_to_gdb4/
ani_gdb_s01.h5'
)
batchsize
=
4
threshold
=
1e-5
...
...
Prev
1
2
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment