compare_utils.py 3.11 KB
Newer Older
1
import os
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
2

3
4
5
6
7
8
9
import importlib
import pkgutil
import sys
import unittest

import numpy as np

10
from openfold.config import model_config
11
12
13
14
15
from openfold.model.model import AlphaFold
from openfold.utils.import_weights import import_jax_weights_
from tests.config import consts

# Give JAX some GPU memory discipline
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
16
# (by default it hogs 90% of GPU memory. This disables that behavior and also
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
# forces it to proactively free memory that it allocates)
os.environ["XLA_PYTHON_CLIENT_ALLOCATOR"] = "platform"
os.environ["JAX_PLATFORM_NAME"] = "gpu"


def alphafold_is_installed():
    return importlib.util.find_spec("alphafold") is not None


def skip_unless_alphafold_installed():
    return unittest.skipUnless(alphafold_is_installed(), "Requires AlphaFold")


def import_alphafold():
    """
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
32
33
34
35
36
    If AlphaFold is installed using the provided setuptools script, this
    is necessary to expose all of AlphaFold's precious insides
    """
    if "alphafold" in sys.modules:
        return sys.modules["alphafold"]
37
38
    module = importlib.import_module("alphafold")
    # Forcefully import alphafold's submodules
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
39
    submodules = pkgutil.walk_packages(module.__path__, prefix=("alphafold."))
40
41
42
43
44
45
46
47
48
    for submodule_info in submodules:
        importlib.import_module(submodule_info.name)
    sys.modules["alphafold"] = module
    globals()["alphafold"] = module

    return module


def get_alphafold_config():
49
    config = alphafold.model.config.model_config(consts.model)  # noqa
50
51
52
53
    config.model.global_config.deterministic = True
    return config


54
55
dir_path = os.path.dirname(os.path.realpath(__file__))
_param_path = os.path.join(dir_path, "..", f"openfold/resources/params/params_{consts.model}.npz")
56
_model = None
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
57
58


59
60
def get_global_pretrained_openfold():
    global _model
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
61
    if _model is None:
62
        _model = AlphaFold(model_config(consts.model))
63
        _model = _model.eval()
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
64
        if not os.path.exists(_param_path):
65
66
67
            raise FileNotFoundError(
                """Cannot load pretrained parameters. Make sure to run the 
                installation script before running tests."""
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
68
            )
69
        import_jax_weights_(_model, _param_path, version=consts.model)
70
71
72
73
74
75
        _model = _model.cuda()

    return _model


_orig_weights = None
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
76
77


78
79
def _get_orig_weights():
    global _orig_weights
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
80
    if _orig_weights is None:
81
82
83
84
85
86
87
        _orig_weights = np.load(_param_path)

    return _orig_weights


def _remove_key_prefix(d, prefix):
    for k, v in list(d.items()):
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
88
        if k.startswith(prefix):
89
            d.pop(k)
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
90
            d[k[len(prefix) :]] = v
91
92
93
94


def fetch_alphafold_module_weights(weight_path):
    orig_weights = _get_orig_weights()
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
95
96
97
    params = {k: v for k, v in orig_weights.items() if weight_path in k}
    if "/" in weight_path:
        spl = weight_path.split("/")
98
99
        spl = spl if len(spl[-1]) != 0 else spl[:-1]
        module_name = spl[-1]
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
100
        prefix = "/".join(spl[:-1]) + "/"
101
        _remove_key_prefix(params, prefix)
102
103

    try:
104
        params = alphafold.model.utils.flat_params_to_haiku(params)  # noqa
105
106
107
108
    except:
        raise ImportError(
            "Make sure to call import_alphafold before running this function"
        )
109
    return params