compare_utils.py 3.6 KB
Newer Older
1
import os
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
2

3
4
5
6
7
8
9
import importlib
import pkgutil
import sys
import unittest

import numpy as np

10
from openfold.config import model_config
11
12
13
14
from openfold.model.model import AlphaFold
from openfold.utils.import_weights import import_jax_weights_

# Give JAX some GPU memory discipline
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
15
# (by default it hogs 90% of GPU memory. This disables that behavior and also
16
17
18
19
20
# forces it to proactively free memory that it allocates)
os.environ["XLA_PYTHON_CLIENT_ALLOCATOR"] = "platform"
os.environ["JAX_PLATFORM_NAME"] = "gpu"


21
22
23
24
25
26
27
28
29
30
31
32
def skip_unless_ds4s_installed():
    deepspeed_is_installed = importlib.util.find_spec("deepspeed") is not None
    ds4s_is_installed = deepspeed_is_installed and importlib.util.find_spec(
        "deepspeed.ops.deepspeed4science") is not None
    return unittest.skipUnless(ds4s_is_installed, "Requires DeepSpeed with version ≥ 0.10.4")


def skip_unless_flash_attn_installed():
    fa_is_installed = importlib.util.find_spec("flash_attn") is not None
    return unittest.skipUnless(fa_is_installed, "Requires Flash Attention")


33
34
35
36
37
38
39
40
41
42
def alphafold_is_installed():
    return importlib.util.find_spec("alphafold") is not None


def skip_unless_alphafold_installed():
    return unittest.skipUnless(alphafold_is_installed(), "Requires AlphaFold")


def import_alphafold():
    """
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
43
44
45
46
47
    If AlphaFold is installed using the provided setuptools script, this
    is necessary to expose all of AlphaFold's precious insides
    """
    if "alphafold" in sys.modules:
        return sys.modules["alphafold"]
48
49
    module = importlib.import_module("alphafold")
    # Forcefully import alphafold's submodules
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
50
    submodules = pkgutil.walk_packages(module.__path__, prefix=("alphafold."))
51
52
53
54
55
56
57
58
59
    for submodule_info in submodules:
        importlib.import_module(submodule_info.name)
    sys.modules["alphafold"] = module
    globals()["alphafold"] = module

    return module


def get_alphafold_config():
60
    config = alphafold.model.config.model_config(consts.model)  # noqa
61
62
63
64
    config.model.global_config.deterministic = True
    return config


65
66
dir_path = os.path.dirname(os.path.realpath(__file__))
_param_path = os.path.join(dir_path, "..", f"openfold/resources/params/params_{consts.model}.npz")
67
_model = None
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
68
69


70
71
def get_global_pretrained_openfold():
    global _model
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
72
    if _model is None:
73
        _model = AlphaFold(model_config(consts.model))
74
        _model = _model.eval()
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
75
        if not os.path.exists(_param_path):
76
77
78
            raise FileNotFoundError(
                """Cannot load pretrained parameters. Make sure to run the 
                installation script before running tests."""
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
79
            )
80
        import_jax_weights_(_model, _param_path, version=consts.model)
81
82
83
84
85
86
        _model = _model.cuda()

    return _model


_orig_weights = None
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
87
88


89
90
def _get_orig_weights():
    global _orig_weights
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
91
    if _orig_weights is None:
92
93
94
95
96
97
98
        _orig_weights = np.load(_param_path)

    return _orig_weights


def _remove_key_prefix(d, prefix):
    for k, v in list(d.items()):
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
99
        if k.startswith(prefix):
100
            d.pop(k)
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
101
            d[k[len(prefix) :]] = v
102
103
104
105


def fetch_alphafold_module_weights(weight_path):
    orig_weights = _get_orig_weights()
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
106
107
108
    params = {k: v for k, v in orig_weights.items() if weight_path in k}
    if "/" in weight_path:
        spl = weight_path.split("/")
109
110
        spl = spl if len(spl[-1]) != 0 else spl[:-1]
        module_name = spl[-1]
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
111
        prefix = "/".join(spl[:-1]) + "/"
112
        _remove_key_prefix(params, prefix)
113
114

    try:
115
        params = alphafold.model.utils.flat_params_to_haiku(params)  # noqa
116
117
118
119
    except:
        raise ImportError(
            "Make sure to call import_alphafold before running this function"
        )
120
    return params