compare_utils.py 3.52 KB
Newer Older
1
import os
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
2

3
4
5
6
7
8
9
import importlib
import pkgutil
import sys
import unittest

import numpy as np

10
from openfold.config import model_config
11
12
13
14
from openfold.model.model import AlphaFold
from openfold.utils.import_weights import import_jax_weights_

# Give JAX some GPU memory discipline
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
15
# (by default it hogs 90% of GPU memory. This disables that behavior and also
16
17
18
19
20
# forces it to proactively free memory that it allocates)
os.environ["XLA_PYTHON_CLIENT_ALLOCATOR"] = "platform"
os.environ["JAX_PLATFORM_NAME"] = "gpu"


21
22
23
24
25
26
27
28
29
30
31
32
def skip_unless_ds4s_installed():
    deepspeed_is_installed = importlib.util.find_spec("deepspeed") is not None
    ds4s_is_installed = deepspeed_is_installed and importlib.util.find_spec(
        "deepspeed.ops.deepspeed4science") is not None
    return unittest.skipUnless(ds4s_is_installed, "Requires DeepSpeed with version ≥ 0.10.4")


def skip_unless_flash_attn_installed():
    fa_is_installed = importlib.util.find_spec("flash_attn") is not None
    return unittest.skipUnless(fa_is_installed, "Requires Flash Attention")


33
34
35
36
37
38
39
40
41
42
def alphafold_is_installed():
    return importlib.util.find_spec("alphafold") is not None


def skip_unless_alphafold_installed():
    return unittest.skipUnless(alphafold_is_installed(), "Requires AlphaFold")


def import_alphafold():
    """
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
43
44
45
46
47
    If AlphaFold is installed using the provided setuptools script, this
    is necessary to expose all of AlphaFold's precious insides
    """
    if "alphafold" in sys.modules:
        return sys.modules["alphafold"]
48
49
    module = importlib.import_module("alphafold")
    # Forcefully import alphafold's submodules
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
50
    submodules = pkgutil.walk_packages(module.__path__, prefix=("alphafold."))
51
52
53
54
55
56
57
58
59
    for submodule_info in submodules:
        importlib.import_module(submodule_info.name)
    sys.modules["alphafold"] = module
    globals()["alphafold"] = module

    return module


def get_alphafold_config():
60
    config = alphafold.model.config.model_config("model_1_ptm")  # noqa
61
62
63
64
65
66
    config.model.global_config.deterministic = True
    return config


_param_path = "openfold/resources/params/params_model_1_ptm.npz"
_model = None
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
67
68


69
70
def get_global_pretrained_openfold():
    global _model
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
71
    if _model is None:
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
72
        _model = AlphaFold(model_config("model_1_ptm"))
73
        _model = _model.eval()
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
74
        if not os.path.exists(_param_path):
75
76
77
            raise FileNotFoundError(
                """Cannot load pretrained parameters. Make sure to run the 
                installation script before running tests."""
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
78
            )
79
        import_jax_weights_(_model, _param_path, version="model_1_ptm")
80
81
82
83
84
85
        _model = _model.cuda()

    return _model


_orig_weights = None
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
86
87


88
89
def _get_orig_weights():
    global _orig_weights
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
90
    if _orig_weights is None:
91
92
93
94
95
96
97
        _orig_weights = np.load(_param_path)

    return _orig_weights


def _remove_key_prefix(d, prefix):
    for k, v in list(d.items()):
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
98
        if k.startswith(prefix):
99
            d.pop(k)
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
100
            d[k[len(prefix) :]] = v
101
102
103
104


def fetch_alphafold_module_weights(weight_path):
    orig_weights = _get_orig_weights()
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
105
106
107
    params = {k: v for k, v in orig_weights.items() if weight_path in k}
    if "/" in weight_path:
        spl = weight_path.split("/")
108
109
        spl = spl if len(spl[-1]) != 0 else spl[:-1]
        module_name = spl[-1]
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
110
        prefix = "/".join(spl[:-1]) + "/"
111
        _remove_key_prefix(params, prefix)
112
113

    try:
114
        params = alphafold.model.utils.flat_params_to_haiku(params)  # noqa
115
116
117
118
    except:
        raise ImportError(
            "Make sure to call import_alphafold before running this function"
        )
119
    return params