pretrain.py 5.34 KB
Newer Older
1
"""Utilities for using pretrained models."""
2
# pylint: disable= no-member, arguments-differ, invalid-name
3
4
5
6
7
8
9
import os
import torch
import torch.nn.functional as F

from dgl.data.utils import _get_dgl_url, download, get_download_dir, extract_archive
from rdkit import Chem

10
11
from ..model import GCNPredictor, GATPredictor, AttentiveFPPredictor, DGMG, DGLJTNNVAE, \
    WLNReactionCenter
12
13
14
15
16
17
18
19
20
21
22

__all__ = ['load_pretrained']

URL = {
    'GCN_Tox21': 'dgllife/pre_trained/gcn_tox21.pth',
    'GAT_Tox21': 'dgllife/pre_trained/gat_tox21.pth',
    'AttentiveFP_Aromaticity': 'dgllife/pre_trained/attentivefp_aromaticity.pth',
    'DGMG_ChEMBL_canonical': 'pre_trained/dgmg_ChEMBL_canonical.pth',
    'DGMG_ChEMBL_random': 'pre_trained/dgmg_ChEMBL_random.pth',
    'DGMG_ZINC_canonical': 'pre_trained/dgmg_ZINC_canonical.pth',
    'DGMG_ZINC_random': 'pre_trained/dgmg_ZINC_random.pth',
23
24
    'JTNN_ZINC': 'pre_trained/JTNN_ZINC.pth',
    'wln_center_uspto': 'dgllife/pre_trained/wln_center_uspto.pth'
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
}

def download_and_load_checkpoint(model_name, model, model_postfix,
                                 local_pretrained_path='pre_trained.pth', log=True):
    """Download pretrained model checkpoint

    The model will be loaded to CPU.

    Parameters
    ----------
    model_name : str
        Name of the model
    model : nn.Module
        Instantiated model instance
    model_postfix : str
        Postfix for pretrained model checkpoint
    local_pretrained_path : str
        Local name for the downloaded model checkpoint
    log : bool
        Whether to print progress for model loading

    Returns
    -------
    model : nn.Module
        Pretrained model
    """
    url_to_pretrained = _get_dgl_url(model_postfix)
    local_pretrained_path = '_'.join([model_name, local_pretrained_path])
    download(url_to_pretrained, path=local_pretrained_path, log=log)
    checkpoint = torch.load(local_pretrained_path, map_location='cpu')
    model.load_state_dict(checkpoint['model_state_dict'])

    if log:
        print('Pretrained model loaded')

    return model

62
# pylint: disable=I1101
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
def load_pretrained(model_name, log=True):
    """Load a pretrained model

    Parameters
    ----------
    model_name : str
        Currently supported options include

        * ``'GCN_Tox21'``
        * ``'GAT_Tox21'``
        * ``'AttentiveFP_Aromaticity'``
        * ``'DGMG_ChEMBL_canonical'``
        * ``'DGMG_ChEMBL_random'``
        * ``'DGMG_ZINC_canonical'``
        * ``'DGMG_ZINC_random'``
        * ``'JTNN_ZINC'``
79
        * ``'wln_center_uspto'``
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141

    log : bool
        Whether to print progress for model loading

    Returns
    -------
    model
    """
    if model_name not in URL:
        raise RuntimeError("Cannot find a pretrained model with name {}".format(model_name))

    if model_name == 'GCN_Tox21':
        model = GCNPredictor(in_feats=74,
                             hidden_feats=[64, 64],
                             classifier_hidden_feats=64,
                             n_tasks=12)

    elif model_name == 'GAT_Tox21':
        model = GATPredictor(in_feats=74,
                             hidden_feats=[32, 32],
                             num_heads=[4, 4],
                             agg_modes=['flatten', 'mean'],
                             activations=[F.elu, None],
                             classifier_hidden_feats=64,
                             n_tasks=12)

    elif model_name == 'AttentiveFP_Aromaticity':
        model = AttentiveFPPredictor(node_feat_size=39,
                                     edge_feat_size=10,
                                     num_layers=2,
                                     num_timesteps=2,
                                     graph_feat_size=200,
                                     n_tasks=1,
                                     dropout=0.2)

    elif model_name.startswith('DGMG'):
        if model_name.startswith('DGMG_ChEMBL'):
            atom_types = ['O', 'Cl', 'C', 'S', 'F', 'Br', 'N']
        elif model_name.startswith('DGMG_ZINC'):
            atom_types = ['Br', 'S', 'C', 'P', 'N', 'O', 'F', 'Cl', 'I']
        bond_types = [Chem.rdchem.BondType.SINGLE,
                      Chem.rdchem.BondType.DOUBLE,
                      Chem.rdchem.BondType.TRIPLE]

        model = DGMG(atom_types=atom_types,
                     bond_types=bond_types,
                     node_hidden_size=128,
                     num_prop_rounds=2,
                     dropout=0.2)

    elif model_name == "JTNN_ZINC":
        default_dir = get_download_dir()
        vocab_file = '{}/jtnn/{}.txt'.format(default_dir, 'vocab')
        if not os.path.exists(vocab_file):
            zip_file_path = '{}/jtnn.zip'.format(default_dir)
            download(_get_dgl_url('dgllife/jtnn.zip'), path=zip_file_path)
            extract_archive(zip_file_path, '{}/jtnn'.format(default_dir))
        model = DGLJTNNVAE(vocab_file=vocab_file,
                           depth=3,
                           hidden_size=450,
                           latent_size=56)

142
143
144
145
146
147
148
149
    elif model_name == 'wln_center_uspto':
        model = WLNReactionCenter(node_in_feats=82,
                                  edge_in_feats=6,
                                  node_pair_in_feats=10,
                                  node_out_feats=300,
                                  n_layers=3,
                                  n_tasks=5)

150
    return download_and_load_checkpoint(model_name, model, URL[model_name], log=log)