Commit 2359a387 authored by Xiang Gao's avatar Xiang Gao Committed by Gao, Xiang
Browse files

torchani 0.1

parents
RV淮7[wa7Ao9u^pGj9hY*j׉8♹f3Λ8us9I#.,7=?89zŹQ9Z<tIAU9X׷N:쿷λB9f=P/ Ta!/8K938$2;6,8 .%9Yd޴%%EQث,8 8m?6 wt7'ҹ_W9a2 eKR"9˹%yc8FS̟1X|us^9ulK,ʹ}Y<8eø-wF1w9I9J 9$F7JXTA橍9ȸ0v"ϣ 9wQ8~]5@T8bmA9Z%!̹8u
\ No newline at end of file
$R \L8I6돒8 j9)9AT99c99kZ:
k[GOƺ/Z9ַw9C꜋*R9t99:Q8I87ki'nK9n9Y9MHX9B|8 Pz=08G" A8"(9S;M99ȹp'nx9h-P~8^j"w9c2w߶(@nytP9Y9lv 7<*v8'%Tg鹿%7ޥ9q/,9]f 9&O9,@9eո+<91m 9FǠu
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
TM = 1
Rcr = 5.2000e+00
Rca = 3.5000e+00
EtaR = [1.6000000e+01]
ShfR = [9.0000000e-01,1.1687500e+00,1.4375000e+00,1.7062500e+00,1.9750000e+00,2.2437500e+00,2.5125000e+00,2.7812500e+00,3.0500000e+00,3.3187500e+00,3.5875000e+00,3.8562500e+00,4.1250000e+00,4.3937500e+00,4.6625000e+00,4.9312500e+00]
Zeta = [3.2000000e+01]
ShfZ = [1.9634954e-01,5.8904862e-01,9.8174770e-01,1.3744468e+00,1.7671459e+00,2.1598449e+00,2.5525440e+00,2.9452431e+00]
EtaA = [8.0000000e+00]
ShfA = [9.0000000e-01,1.5500000e+00,2.2000000e+00,2.8500000e+00]
Atyp = [H,C,N,O]
H,0=-0.600952980000
C,1=-38.08316124000
N,2=-54.70775770000
O,3=-75.19446356000
1 100
2 99
3 100
4 100
5 100
6 100
7 99
8 98
9 97
10 95
11 95
12 100
13 99
14 98
15 100
16 100
17 100
18 100
19 100
20 99
21 98
22 97
23 95
24 95
25 100
26 99
27 100
28 99
29 98
30 97
31 95
32 95
33 93
34 93
35 100
36 100
37 99
38 100
39 100
40 100
41 99
42 98
43 100
44 99
45 100
46 99
47 100
48 99
49 100
50 99
51 98
52 97
53 100
54 99
55 98
56 97
57 100
58 99
59 98
60 97
61 100
62 99
63 98
64 97
65 95
66 100
67 100
68 99
69 100
70 99
71 98
72 97
73 95
74 100
75 99
76 98
77 97
78 95
79 95
80 93
81 93
82 100
83 99
84 98
85 100
86 99
87 98
88 100
89 99
90 98
91 97
92 95
93 95
94 100
95 100
96 100
97 99
98 98
99 97
100 100
101 100
102 99
103 98
104 97
105 95
106 100
107 100
108 99
109 98
110 97
111 95
112 95
113 100
114 99
115 98
116 100
117 100
118 100
119 99
120 98
121 97
122 95
123 95
124 93
125 93
126 91
127 90
128 90
129 89
130 88
131 86
132 85
133 85
134 83
135 82
136 100
137 99
138 98
139 97
140 95
141 95
142 93
143 93
144 91
145 90
146 90
147 89
148 88
149 86
150 85
151 85
152 83
153 82
154 82
155 80
156 100
157 99
158 98
159 97
160 95
161 95
162 93
163 93
164 91
165 90
166 90
167 89
168 88
169 86
170 85
171 85
172 83
173 82
174 82
175 80
176 80
177 79
178 77
179 77
180 75
181 75
182 73
183 100
184 99
185 98
186 97
187 100
188 99
189 98
190 97
191 95
192 95
193 93
194 93
195 91
196 90
197 90
198 89
199 88
200 86
201 85
202 85
203 83
204 82
205 82
206 80
207 100
208 99
209 100
210 99
211 98
212 97
213 95
214 95
215 93
216 93
217 91
218 90
219 90
220 89
221 100
222 99
223 98
224 97
225 95
226 95
227 100
228 99
229 98
230 97
231 95
232 95
233 93
234 93
235 91
236 90
237 90
238 89
239 88
240 86
241 85
242 85
243 83
244 82
245 82
246 80
247 80
248 79
249 77
250 77
251 75
252 75
253 73
254 72
255 72
256 70
257 69
258 69
259 67
260 66
261 66
262 64
263 63
264 63
265 62
266 60
267 100
268 99
269 98
270 97
271 95
272 95
273 93
274 93
275 91
276 90
277 100
278 99
279 98
280 97
281 95
282 95
283 93
284 93
285 91
286 90
287 90
288 89
289 88
290 86
291 85
292 85
293 83
294 82
295 82
296 80
297 80
298 79
299 100
300 99
301 98
302 97
303 95
304 95
305 93
306 93
307 91
308 90
309 90
310 89
311 88
312 86
313 85
314 85
315 83
316 82
317 82
318 80
319 80
320 79
321 77
322 77
323 75
324 75
325 73
326 100
327 99
328 98
329 97
330 95
331 95
332 93
333 93
334 91
335 90
336 90
337 89
338 88
339 86
340 85
341 85
342 83
343 82
344 82
345 80
346 80
347 79
348 77
349 77
350 75
351 75
352 73
353 72
354 72
355 70
356 69
357 69
358 67
359 66
360 66
361 64
362 63
363 63
364 62
365 60
366 60
367 59
368 57
369 100
370 99
371 98
372 97
373 95
374 95
375 93
376 93
377 91
378 90
379 90
380 89
381 88
382 86
383 85
384 85
385 100
386 99
387 98
388 97
389 95
390 95
391 93
392 93
393 91
394 90
395 90
396 89
397 88
398 86
399 85
400 85
401 83
402 82
403 82
404 80
405 80
406 79
407 77
408 77
409 75
410 75
411 73
412 72
413 72
414 70
415 69
416 69
417 67
418 100
419 99
420 98
421 97
422 95
423 95
424 93
425 93
426 100
427 99
428 98
429 97
430 95
431 95
432 93
433 93
434 91
435 90
436 90
437 89
438 88
439 86
440 85
441 85
442 83
443 82
444 82
445 80
446 80
447 79
448 77
449 77
450 75
451 75
452 73
453 72
454 72
455 70
456 69
457 69
458 67
459 66
460 66
461 64
462 63
463 63
464 62
465 60
466 60
467 59
468 57
469 100
470 99
471 98
472 97
473 95
474 95
475 93
476 93
477 91
478 90
479 90
480 89
481 88
482 86
483 85
484 85
485 83
486 82
487 82
488 80
489 80
490 79
491 100
492 99
493 98
494 97
495 95
496 95
497 93
498 93
499 91
500 90
501 90
502 89
503 88
504 86
505 85
506 85
507 83
508 82
509 82
510 80
511 80
512 100
513 99
514 98
515 97
516 95
517 95
518 93
519 93
520 91
521 90
522 90
523 89
524 88
525 86
526 85
527 85
528 83
529 82
530 82
531 80
532 80
533 79
534 77
535 77
536 75
537 75
538 73
539 72
540 72
541 70
542 69
543 69
544 67
545 66
546 66
547 64
548 63
549 63
550 62
551 60
552 60
553 59
554 57
555 100
556 99
557 98
558 97
559 95
560 95
561 93
562 93
563 91
564 90
565 90
566 89
567 88
568 86
569 85
570 85
571 83
572 82
573 82
574 80
575 80
576 79
577 77
578 77
579 75
580 75
581 73
582 72
583 72
584 70
585 69
586 69
587 67
588 66
589 66
590 64
591 63
592 63
593 62
594 60
595 60
596 59
597 57
598 57
599 56
600 54
601 54
602 53
603 51
604 50
605 50
606 49
607 48
608 100
609 99
610 98
611 97
612 95
613 95
614 93
615 93
616 91
617 90
618 90
619 89
620 88
621 86
622 85
623 85
624 83
625 82
626 82
627 80
628 80
629 79
630 77
631 77
632 75
633 75
634 73
635 72
636 72
637 70
638 69
639 69
640 67
641 66
642 66
643 64
644 63
645 63
646 62
647 60
648 60
649 59
650 57
651 57
652 56
653 54
654 54
655 53
656 51
657 50
658 50
659 49
660 48
661 46
662 45
663 44
664 44
665 43
666 42
667 40
668 39
669 38
670 38
671 37
672 36
673 34
674 33
675 32
676 32
677 31
678 30
679 28
680 27
681 26
682 25
683 25
684 24
685 23
686 21
687 20
688 19
689 19
690 18
691 17
692 15
693 14
694 13
695 13
696 12
697 11
698 9
699 8
700 7
701 7
702 6
703 5
704 3
705 2
706 1
707 0
708 100
709 99
710 100
711 99
712 98
713 100
714 99
715 98
716 97
717 95
718 95
719 93
720 93
721 91
722 100
723 99
724 98
725 97
726 95
727 95
728 93
729 93
730 91
731 90
732 90
733 89
734 88
735 86
736 85
737 85
738 83
739 82
740 82
741 80
742 80
743 79
744 77
745 77
746 75
747 75
748 73
749 72
750 72
751 70
752 69
753 69
754 67
755 66
756 100
757 99
758 98
759 97
760 95
761 95
762 93
763 93
764 91
765 90
766 90
767 89
768 88
769 86
770 85
771 85
772 83
773 82
774 82
775 80
776 80
777 79
778 77
779 77
780 75
781 75
782 73
783 72
784 72
785 70
786 69
787 69
788 67
789 66
790 66
791 64
792 63
793 63
794 62
795 60
796 60
797 59
798 57
799 57
800 56
801 54
802 54
803 53
804 51
805 50
806 50
807 100
808 99
809 98
810 97
811 95
812 95
813 93
814 93
815 91
816 90
817 90
818 89
819 88
820 86
821 85
822 85
823 83
824 82
825 82
826 80
827 80
828 79
829 77
830 77
831 75
832 75
833 73
834 72
835 72
836 70
837 69
838 69
839 67
840 66
841 66
842 64
843 63
844 63
845 62
846 60
847 60
848 59
849 57
850 57
851 100
852 99
853 98
854 97
855 95
856 95
857 93
858 93
859 91
860 90
861 90
862 89
863 88
864 86
865 85
866 85
867 83
868 82
869 82
870 80
871 80
872 79
873 100
874 99
875 98
876 97
877 95
878 95
879 93
880 93
881 91
882 90
883 90
884 89
885 88
886 86
887 85
888 85
889 83
890 82
891 82
892 80
893 80
894 79
895 77
896 77
897 75
898 75
899 73
900 72
901 72
902 70
903 69
904 69
905 67
906 66
907 66
908 64
909 63
910 63
911 62
912 60
913 60
914 59
915 57
916 57
917 56
918 54
919 54
920 53
921 51
922 50
923 50
924 49
925 48
926 46
927 45
928 44
929 44
930 43
931 42
932 40
933 39
934 38
935 38
936 37
937 36
938 34
939 33
940 32
941 32
942 31
943 30
944 28
945 27
946 26
947 25
948 25
949 24
950 23
951 21
952 20
953 19
954 19
955 18
956 17
957 15
958 14
959 13
960 13
961 12
962 11
963 9
964 8
965 7
966 7
967 6
968 5
969 3
970 2
971 1
972 0
973 100
974 99
975 98
976 97
977 95
978 95
979 93
980 93
981 91
982 90
983 90
984 89
985 88
986 86
987 85
988 85
989 83
990 82
991 82
992 80
993 80
994 79
995 77
996 77
997 75
998 75
999 73
1000 100
1001 99
1002 98
1003 97
1004 95
1005 95
1006 93
1007 93
1008 91
1009 90
1010 90
1011 89
1012 88
1013 86
1014 85
1015 85
1016 83
1017 82
1018 82
1019 80
1020 80
1021 79
1022 77
1023 77
1024 75
1025 75
1026 73
1027 72
1028 72
1029 70
1030 69
1031 100
1032 99
1033 98
1034 97
1035 95
1036 95
1037 93
1038 93
1039 91
1040 90
1041 90
1042 89
1043 88
1044 86
1045 85
1046 85
1047 83
1048 82
1049 82
1050 80
1051 80
1052 79
1053 77
1054 77
1055 75
1056 75
1057 73
1058 72
1059 72
1060 70
1061 69
1062 69
1063 67
1064 66
1065 66
1066 64
1067 63
1068 63
1069 62
1070 60
1071 60
1072 59
1073 57
1074 57
1075 56
1076 54
1077 54
1078 53
1079 51
1080 50
1081 50
1082 49
1083 48
1084 46
1085 45
1086 44
1087 44
1088 43
1089 42
1090 40
1091 39
1092 38
1093 38
1094 37
1095 36
1096 34
1097 33
1098 32
1099 32
1100 31
1101 30
1102 28
1103 27
1104 26
1105 25
1106 25
1107 24
1108 23
1109 21
1110 20
1111 19
1112 19
1113 18
1114 17
1115 15
1116 14
1117 13
1118 13
1119 12
1120 11
1121 9
1122 8
1123 7
1124 7
1125 6
1126 5
1127 3
1128 2
1129 1
1130 0
1131 100
1132 99
1133 98
1134 97
1135 95
1136 95
1137 93
1138 93
1139 91
1140 90
1141 90
1142 89
1143 88
1144 86
1145 85
1146 85
1147 83
1148 82
1149 82
1150 80
1151 80
1152 79
1153 77
1154 77
1155 75
1156 75
1157 73
1158 72
1159 72
1160 70
1161 69
1162 69
1163 67
1164 66
1165 66
1166 64
1167 63
1168 63
1169 62
1170 60
1171 60
1172 59
1173 57
1174 57
1175 56
1176 54
1177 54
1178 53
1179 51
1180 50
1181 50
1182 49
1183 48
1184 46
1185 45
1186 44
1187 44
1188 43
1189 42
1190 40
1191 39
1192 38
1193 38
1194 37
1195 36
1196 34
1197 33
1198 32
1199 32
1200 31
1201 30
1202 28
1203 27
1204 26
1205 25
1206 25
1207 24
1208 23
1209 21
1210 20
1211 19
1212 19
1213 18
1214 17
1215 15
1216 14
1217 13
1218 13
1219 12
1220 11
1221 9
1222 8
1223 7
1224 7
1225 6
1226 5
1227 3
1228 2
1229 1
1230 0
1231 100
1232 99
1233 98
1234 97
1235 95
1236 95
1237 93
1238 93
1239 91
1240 90
1241 90
1242 89
1243 88
1244 86
1245 85
1246 85
1247 83
1248 82
1249 82
1250 80
1251 80
1252 79
1253 77
1254 77
1255 75
1256 75
1257 73
1258 72
1259 72
1260 70
1261 69
1262 69
1263 67
1264 66
1265 66
1266 64
1267 63
1268 63
1269 62
1270 60
1271 60
1272 59
1273 57
1274 57
1275 56
1276 54
1277 54
1278 53
1279 51
1280 50
1281 50
1282 49
1283 48
1284 46
1285 45
1286 44
1287 44
1288 43
1289 42
1290 40
1291 39
1292 38
1293 38
1294 37
1295 36
1296 34
1297 33
1298 32
1299 32
1300 31
1301 30
1302 28
1303 27
1304 26
1305 25
1306 25
1307 24
1308 23
1309 21
1310 20
1311 19
1312 19
1313 18
1314 17
1315 15
1316 14
1317 13
1318 13
1319 12
1320 11
1321 9
1322 8
1323 7
1324 7
1325 6
1326 5
1327 3
1328 2
1329 1
1330 0
1331 100
1332 99
1333 98
1334 97
1335 95
1336 95
1337 93
1338 93
1339 91
1340 90
1341 90
1342 89
1343 88
1344 86
1345 85
1346 85
1347 83
1348 82
1349 82
1350 80
1351 80
1352 79
1353 77
1354 77
1355 75
1356 75
1357 73
1358 72
1359 72
1360 70
1361 69
1362 69
1363 67
1364 66
1365 66
1366 64
1367 63
1368 63
1369 62
1370 60
1371 60
1372 59
1373 57
1374 57
1375 56
1376 54
1377 54
1378 53
1379 51
1380 50
1381 50
1382 49
1383 48
1384 46
1385 45
1386 44
1387 44
1388 43
1389 42
1390 40
1391 39
1392 38
1393 38
1394 37
1395 36
1396 34
1397 33
1398 32
1399 32
1400 31
1401 30
1402 28
1403 27
1404 26
1405 25
1406 25
1407 24
1408 23
1409 21
1410 20
1411 19
1412 19
1413 18
1414 17
1415 15
1416 14
1417 13
1418 13
1419 12
1420 11
1421 9
1422 8
1423 7
1424 7
1425 6
1426 5
1427 3
1428 2
1429 1
1430 0
1431 100
1432 99
1433 98
1434 97
1435 95
1436 95
1437 93
1438 93
1439 91
1440 90
1441 90
1442 89
1443 88
1444 86
1445 85
1446 85
1447 83
1448 82
1449 82
1450 80
1451 80
1452 79
1453 77
1454 77
1455 75
1456 75
1457 73
1458 72
1459 72
1460 70
1461 69
1462 69
1463 67
1464 66
1465 66
1466 64
1467 63
1468 63
1469 62
1470 60
1471 60
1472 59
1473 57
1474 57
1475 56
1476 54
1477 54
1478 53
1479 51
1480 50
1481 50
1482 49
1483 48
1484 46
1485 45
1486 44
1487 44
1488 43
1489 42
1490 40
1491 39
1492 38
1493 38
1494 37
1495 36
1496 34
1497 33
1498 32
1499 32
1500 31
1501 30
1502 28
1503 27
1504 26
1505 25
1506 25
1507 24
1508 23
1509 21
1510 20
1511 19
1512 19
1513 18
1514 17
1515 15
1516 14
1517 13
1518 13
1519 12
1520 11
1521 9
1522 8
1523 7
1524 7
1525 6
1526 5
1527 3
1528 2
1529 1
1530 0
import torch
import numpy
import torchani
import unittest
import pyanitools
import os
import logging
class TestAEV(unittest.TestCase):
def setUp(self, dtype=torchani.default_dtype, device=torchani.default_device):
self.aev = torchani.SortedAEV(dtype=dtype, device=device)
self.ncaev = torchani.NeuroChemAEV(dtype=dtype, device=device)
self.tolerance = 1e-5
self.logger = logging.getLogger('smiles')
def _test_molecule(self, coordinates, species):
coordinates = torch.from_numpy(coordinates).type(
self.aev.dtype).to(self.aev.device)
coordinates, species = self.aev.sort_by_species(coordinates, species)
radial_neurochem, angular_neurochem = self.ncaev(coordinates, species)
radial_torchani, angular_torchani = self.aev(coordinates, species)
radial_diff = radial_neurochem - radial_torchani
radial_max_error = torch.max(torch.abs(radial_diff))
angular_diff = angular_neurochem - angular_torchani
angular_max_error = torch.max(torch.abs(angular_diff))
if radial_max_error > self.tolerance:
print('radial aev for', species)
for i in range(len(species)):
r1 = radial_neurochem[0, i, :]
r2 = radial_torchani[0, i, :]
max_err = torch.max(torch.abs(r1 - r2))
if max_err > self.tolerance:
print('atom', i, 'species', species[i], 'radial aevs:')
print(torch.stack([r1, r2, r1-r2], dim=1))
break
if angular_max_error > self.tolerance:
print('angular aev for', species)
for i in range(len(species)):
r1 = angular_neurochem[0, i, :]
r2 = angular_torchani[0, i, :]
max_err = torch.max(torch.abs(r1 - r2))
if max_err > self.tolerance:
print('atom', i, 'species', species[i], 'angular aevs:')
print(torch.stack([r1, r2, r1-r2], dim=1))
break
self.assertLess(radial_max_error, self.tolerance)
self.assertLess(angular_max_error, self.tolerance)
def _test_datafile(self, number):
data_file = os.path.join(
torchani.buildin_dataset_dir, 'ani_gdb_s0{}.h5'.format(number))
adl = pyanitools.anidataloader(data_file)
for data in adl:
coordinates = data['coordinates'][:10, :]
species = data['species']
smiles = ''.join(data['smiles'])
self._test_molecule(coordinates, species)
self.logger.info('Test pass: ' + smiles)
def testGDB01(self):
self._test_datafile(1)
def testGDB02(self):
self._test_datafile(2)
def testGDB03(self):
self._test_datafile(3)
def testGDB04(self):
self._test_datafile(4)
def testCH4(self):
# return
coordinates = numpy.array([[[0.03192167, 0.00638559, 0.01301679],
[-0.83140486, 0.39370209, -0.26395324],
[-0.66518241, -0.84461308, 0.20759389],
[0.45554739, 0.54289633, 0.81170881],
[0.66091919, -0.16799635, -0.91037834]]], numpy.float32)
species = ['C', 'H', 'H', 'H', 'H']
self._test_molecule(coordinates, species)
if __name__ == '__main__':
unittest.main()
import torch
import torchani
import unittest
import copy
class TestBenchmark(unittest.TestCase):
def setUp(self, dtype=torchani.default_dtype, device=torchani.default_device):
self.dtype = dtype
self.device = device
self.conformations = 100
self.species = list('HHCCNNOO')
self.coordinates = torch.randn(
self.conformations, 8, 3, dtype=dtype, device=device)
self.count = 100
def _testModule(self, module, asserts):
keys = []
for i in asserts:
if '>=' in i:
i = i.split('>=')
keys += [i[0].strip(), i[1].strip()]
elif '<=' in i:
i = i.split('<=')
keys += [i[0].strip(), i[1].strip()]
elif '>' in i:
i = i.split('>')
keys += [i[0].strip(), i[1].strip()]
elif '<' in i:
i = i.split('<')
keys += [i[0].strip(), i[1].strip()]
elif '=' in i:
i = i.split('=')
keys += [i[0].strip(), i[1].strip()]
else:
keys.append(i.strip())
self.assertEqual(set(module.timers.keys()), set(keys))
for i in keys:
self.assertEqual(module.timers[i], 0)
old_timers = copy.copy(module.timers)
for _ in range(self.count):
module(self.coordinates, self.species)
for i in keys:
self.assertLess(old_timers[i], module.timers[i])
for i in asserts:
if '>=' in i:
i = i.split('>=')
key0 = i[0].strip()
key1 = i[1].strip()
self.assertGreaterEqual(
module.timers[key0], module.timers[key1])
elif '<=' in i:
i = i.split('<=')
key0 = i[0].strip()
key1 = i[1].strip()
self.assertLessEqual(
module.timers[key0], module.timers[key1])
elif '>' in i:
i = i.split('>')
key0 = i[0].strip()
key1 = i[1].strip()
self.assertGreater(
module.timers[key0], module.timers[key1])
elif '<' in i:
i = i.split('<')
key0 = i[0].strip()
key1 = i[1].strip()
self.assertLess(module.timers[key0], module.timers[key1])
elif '=' in i:
i = i.split('=')
key0 = i[0].strip()
key1 = i[1].strip()
self.assertEqual(module.timers[key0], module.timers[key1])
old_timers = copy.copy(module.timers)
module.reset_timers()
self.assertEqual(set(module.timers.keys()), set(keys))
for i in keys:
self.assertEqual(module.timers[i], 0)
def testAEV(self):
aev_computer = torchani.SortedAEV(
benchmark=True, dtype=self.dtype, device=self.device)
self._testModule(aev_computer, [
'terms and indices>radial terms',
'terms and indices>angular terms',
'total>terms and indices',
'total>partition', 'total>assemble'])
def testModelOnAEV(self):
aev_computer = torchani.SortedAEV(
dtype=self.dtype, device=self.device)
model = torchani.ModelOnAEV(
aev_computer, benchmark=True, from_nc=None)
self._testModule(model, ['forward>aev', 'forward>nn'])
model = torchani.ModelOnAEV(
aev_computer, benchmark=True, derivative=True, from_nc=None)
self._testModule(
model, ['forward>aev', 'forward>nn', 'forward>derivative'])
if __name__ == '__main__':
unittest.main()
import torchani
import unittest
import copy
import tempfile
import os
import torch
import torchani.pyanitools as pyanitools
import torchani.data
from math import ceil
from bisect import bisect
from pickle import dump, load
class TestDataset(unittest.TestCase):
def setUp(self, data_path=torchani.buildin_dataset_dir):
self.data_path = data_path
self.ds = torchani.data.load_dataset(data_path)
def testLen(self):
# compute data length using Dataset
l1 = len(self.ds)
# compute data lenght using pyanitools
l2 = 0
for f in os.listdir(self.data_path):
f = os.path.join(self.data_path, f)
if os.path.isfile(f) and (f.endswith('.h5') or f.endswith('.hdf5')):
for j in pyanitools.anidataloader(f):
l2 += j['energies'].shape[0]
# compute data length using iterator
l3 = len(list(self.ds))
# these lengths should match
self.assertEqual(l1, l2)
self.assertEqual(l1, l3)
def testNumChunks(self):
chunksize = 64
# compute number of chunks using batch sampler
bs = torchani.data.BatchSampler(self.ds, chunksize, 1)
l1 = len(bs)
# compute number of chunks using pyanitools
l2 = 0
for f in os.listdir(self.data_path):
f = os.path.join(self.data_path, f)
if os.path.isfile(f) and (f.endswith('.h5') or f.endswith('.hdf5')):
for j in pyanitools.anidataloader(f):
conformations = j['energies'].shape[0]
l2 += ceil(conformations / chunksize)
# compute number of chunks using iterator
l3 = len(list(bs))
# these lengths should match
self.assertEqual(l1, l2)
self.assertEqual(l1, l3)
def testNumBatches(self):
chunksize = 64
batch_chunks = 4
# compute number of batches using batch sampler
bs = torchani.data.BatchSampler(self.ds, chunksize, batch_chunks)
l1 = len(bs)
# compute number of batches by simple math
bs2 = torchani.data.BatchSampler(self.ds, chunksize, 1)
l2 = ceil(len(bs2) / batch_chunks)
# compute number of batches using iterator
l3 = len(list(bs))
# these lengths should match
self.assertEqual(l1, l2)
self.assertEqual(l1, l3)
def testBatchSize1(self):
bs = torchani.data.BatchSampler(self.ds, 1, 1)
self.assertEqual(len(bs), len(self.ds))
def testSplitSize(self):
chunksize = 64
bs = torchani.data.BatchSampler(self.ds, chunksize, 1)
chunks = len(bs)
ds1, ds2 = torchani.data.random_split(
self.ds, [200, chunks-200], chunksize)
bs1 = torchani.data.BatchSampler(ds1, chunksize, 1)
bs2 = torchani.data.BatchSampler(ds2, chunksize, 1)
self.assertEqual(len(bs1), 200)
self.assertEqual(len(bs2), chunks-200)
def testSplitNoOverlap(self):
chunksize = 64
bs = torchani.data.BatchSampler(self.ds, chunksize, 1)
chunks = len(bs)
ds1, ds2 = torchani.data.random_split(
self.ds, [200, chunks-200], chunksize)
indices1 = ds1.dataset.indices
indices2 = ds2.dataset.indices
self.assertEqual(len(indices1), len(ds1))
self.assertEqual(len(indices2), len(ds2))
self.assertEqual(len(indices1), len(set(indices1)))
self.assertEqual(len(indices2), len(set(indices2)))
self.assertEqual(len(self.ds), len(set(indices1+indices2)))
def _testMolSizes(self, ds):
for i in range(len(ds)):
l = bisect(ds.cumulative_sizes, i)
moli = ds[i][0].item()
for j in range(len(ds)):
l2 = bisect(ds.cumulative_sizes, j)
molj = ds[j][0].item()
if l == l2:
self.assertEqual(moli, molj)
else:
if moli == molj:
print(i, j)
self.assertNotEqual(moli, molj)
def testMolSizes(self):
chunksize = 8
bs = torchani.data.BatchSampler(self.ds, chunksize, 1)
chunks = len(bs)
ds1, ds2 = torchani.data.random_split(
self.ds, [50, chunks-50], chunksize)
self._testMolSizes(ds1)
def testSaveLoad(self):
chunksize = 8
bs = torchani.data.BatchSampler(self.ds, chunksize, 1)
chunks = len(bs)
ds1, ds2 = torchani.data.random_split(
self.ds, [50, chunks-50], chunksize)
tmpdir = tempfile.TemporaryDirectory()
tmpdirname = tmpdir.name
filename = os.path.join(tmpdirname, 'test.obj')
with open(filename, 'wb') as f:
dump(ds1, f)
with open(filename, 'rb') as f:
ds1_loaded = load(f)
self.assertEqual(len(ds1), len(ds1_loaded))
self.assertListEqual(ds1.sizes, ds1_loaded.sizes)
self.assertIsInstance(ds1_loaded, torchani.data.ANIDataset)
for i in range(len(ds1)):
i1 = ds1[i]
i2 = ds1_loaded[i]
molid1 = i1[0].item()
molid2 = i2[0].item()
self.assertEqual(molid1, molid2)
xyz1 = i1[1]
xyz2 = i2[1]
maxdiff = torch.max(torch.abs(xyz1-xyz2)).item()
self.assertEqual(maxdiff, 0)
e1 = i1[2].item()
e2 = i2[2].item()
self.assertEqual(e1, e2)
if __name__ == '__main__':
unittest.main()
import torch
import numpy
import torchani
import unittest
import ase
import pyNeuroChem
import ase_interface
import pyanitools
import os
import logging
class TestForceNeuroChem(unittest.TestCase):
def setUp(self, dtype=torchani.default_dtype, device=torchani.default_device):
self.tolerance = 1e-5
self.logger = logging.getLogger('smiles')
self.ncaev = torchani.NeuroChemAEV(dtype=dtype, device=device)
self.aev_computer = torchani.SortedAEV(dtype=dtype, device=device)
self.model1 = torchani.ModelOnAEV(
self.aev_computer, derivative=True, device=device, from_nc=None)
self.model2 = torchani.ModelOnAEV(
self.aev_computer, derivative=True, device=device, from_nc=None, ensemble=1)
self.logger = logging.getLogger('smiles')
def _test_molecule(self, coordinates, species):
_, force1 = self.model1(coordinates, species)
_, force2 = self.model1(coordinates, species)
conformations = coordinates.shape[0]
for i in range(conformations):
c = coordinates[i]
mol = ase.Atoms(''.join(species), positions=c)
mol.set_calculator(ase_interface.ANI(False))
mol.calc.setnc(self.ncaev.nc)
_ = mol.get_potential_energy()
force_nc = self.ncaev.nc.force()
force_nc = torch.from_numpy(force_nc).type(
self.aev_computer.dtype).to(self.aev_computer.device)
max_diff1 = torch.max(force_nc+force1[i])
max_diff2 = torch.max(force_nc+force2[i])
max_diff = max(max_diff1, max_diff2)
self.assertLess(max_diff, self.tolerance)
def testCH4(self):
coordinates = torch.tensor([[[0.03192167, 0.00638559, 0.01301679],
[-0.83140486, 0.39370209, -0.26395324],
[-0.66518241, -0.84461308, 0.20759389],
[0.45554739, 0.54289633, 0.81170881],
[0.66091919, -0.16799635, -0.91037834]],
[[0, 0, 0],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[-1, -1, -1]],
], dtype=self.aev_computer.dtype, device=self.aev_computer.device)
species = ['C', 'H', 'H', 'H', 'H']
self._test_molecule(coordinates, species)
def _test_by_file(self, number):
data_file = os.path.join(
torchani.buildin_dataset_dir, 'ani_gdb_s0{}.h5'.format(number))
adl = pyanitools.anidataloader(data_file)
for data in adl:
coordinates = data['coordinates'][:10, :]
coordinates = torch.from_numpy(coordinates).type(
self.aev_computer.dtype).to(self.aev_computer.device)
species = data['species']
smiles = ''.join(data['smiles'])
self._test_molecule(coordinates, species)
self.logger.info('Test pass: ' + smiles)
def testGDB01(self):
self._test_by_file(1)
def testGDB02(self):
self._test_by_file(2)
def testGDB03(self):
self._test_by_file(3)
def testGDB04(self):
self._test_by_file(4)
if __name__ == '__main__':
unittest.main()
import torch
import numpy
import torchani
import unittest
import os
import logging
import pyanitools
import ase
import pyNeuroChem
import ase_interface
class TestInference(unittest.TestCase):
def setUp(self, dtype=torchani.default_dtype, device=torchani.default_device):
self.tolerance = 1e-5
self.ncaev = torchani.NeuroChemAEV(dtype=dtype, device=device)
self.nn = torchani.ModelOnAEV(
self.ncaev, from_nc=self.ncaev.network_dir)
self.nn1 = torchani.ModelOnAEV(self.ncaev, from_nc=None)
self.nn2 = torchani.ModelOnAEV(
self.ncaev, from_nc=torchani.buildin_model_prefix, ensemble=1)
self.logger = logging.getLogger('smiles')
self.shift_energy = torchani.EnergyShifter(self.ncaev.sae_file)
def _get_neurochem_energies(self, coordinates, species):
conformations = coordinates.shape[0]
nc_energies = []
for i in range(conformations):
c = coordinates[i]
mol = ase.Atoms(''.join(species), positions=c)
mol.set_calculator(ase_interface.ANI(False))
mol.calc.setnc(self.ncaev.nc)
_ = mol.get_potential_energy()
e = self.ncaev.nc.energy()[0]
nc_energies.append(e)
nc_energies = torch.DoubleTensor(nc_energies)
return nc_energies.type(self.ncaev.dtype).to(self.ncaev.device)
def _test_molecule_energy(self, coordinates, species):
energies = self._get_neurochem_energies(coordinates, species)
energies = self.shift_energy.subtract_sae(energies, species)
coordinates = torch.from_numpy(coordinates).type(
self.ncaev.dtype).to(self.ncaev.device)
pred_energies1 = self.nn1(coordinates, species).squeeze()
pred_energies2 = self.nn2(coordinates, species).squeeze()
maxdiff1 = torch.max(torch.abs(pred_energies1 - energies)).item()
maxdiff2 = torch.max(torch.abs(pred_energies2 - energies)).item()
maxdiff = max(maxdiff1, maxdiff2)
maxdiff_per_atom = maxdiff / len(species)
self.assertLess(maxdiff_per_atom, self.tolerance)
def _test_activations(self, coordinates, species):
conformations = coordinates.shape[0]
atoms = coordinates.shape[1]
radial_aev, angular_aev = self.nn.aev_computer(coordinates, species)
aev = torch.cat([radial_aev, angular_aev], dim=2)
for i in range(conformations):
for j in range(atoms):
model_X = getattr(self.nn, 'model_' + species[j])
layers = model_X.layers
for layer in range(layers):
# get activation from NeuroChem
c = coordinates[i]
mol = ase.Atoms(''.join(species), positions=c)
mol.set_calculator(ase_interface.ANI(False))
mol.calc.setnc(self.ncaev.nc)
_ = mol.get_potential_energy()
nca = self.ncaev.nc.activations(j, layer, 0)
nca = torch.from_numpy(nca).type(
self.ncaev.dtype).to(self.ncaev.device)
# get activation from ModelOnAEV
atom_aev = aev[:, j, :]
a = model_X.get_activations(atom_aev, layer)
a = a[i].view(-1)
# compute diff
maxdiff = torch.max(torch.abs(nca - a)).item()
self.assertLess(maxdiff, self.tolerance)
def _test_by_file(self, number):
data_file = os.path.join(
torchani.buildin_dataset_dir, 'ani_gdb_s0{}.h5'.format(number))
adl = pyanitools.anidataloader(data_file)
for data in adl:
coordinates = data['coordinates'][:10, :]
species = data['species']
smiles = ''.join(data['smiles'])
self._test_activations(coordinates, species)
self._test_molecule_energy(coordinates, species)
self.logger.info('Test pass: ' + smiles)
def testGDB01(self):
self._test_by_file(1)
def testGDB02(self):
self._test_by_file(2)
def testGDB03(self):
self._test_by_file(3)
def testGDB04(self):
self._test_by_file(4)
if __name__ == '__main__':
unittest.main()
import torch
import numpy
import torchani
import unittest
import logging
class TestNeuroChemLoader(unittest.TestCase):
def setUp(self, dtype=torchani.default_dtype, device=torchani.default_device):
self.tolerance = 1e-5
self.ncaev = torchani.NeuroChemAEV(dtype=dtype, device=device)
self.logger = logging.getLogger('species')
def testLoader(self):
nn = torchani.ModelOnAEV(
self.ncaev, from_nc=self.ncaev.network_dir)
for i in range(len(self.ncaev.species)):
s = self.ncaev.species[i]
model_X = getattr(nn, 'model_' + s)
self.logger.info(s)
for j in range(model_X.layers):
linear = getattr(model_X, 'layer{}'.format(j))
ncparams = self.ncaev.nc.getntwkparams(i, j)
ncw = ncparams['weights']
ncw = torch.from_numpy(ncw).type(
self.ncaev.dtype).to(self.ncaev.device)
ncb = numpy.transpose(ncparams['biases'])
ncb = torch.from_numpy(ncb).type(
self.ncaev.dtype).to(self.ncaev.device)
max_wdiff = torch.max(
torch.abs(ncw - linear.weight.data)).item()
max_bdiff = torch.max(torch.abs(ncb - linear.bias.data)).item()
self.assertEqual(max_bdiff, 0.0)
self.assertEqual(max_wdiff, 0.0)
if __name__ == '__main__':
unittest.main()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment