compare_utils.py 3.03 KB
Newer Older
1
import os
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
2

3
4
5
6
7
8
9
import importlib
import pkgutil
import sys
import unittest

import numpy as np

10
from openfold.config import model_config
11
12
13
14
15
from openfold.model.model import AlphaFold
from openfold.utils.import_weights import import_jax_weights_
from tests.config import consts

# Give JAX some GPU memory discipline
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
16
# (by default it hogs 90% of GPU memory. This disables that behavior and also
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
# forces it to proactively free memory that it allocates)
os.environ["XLA_PYTHON_CLIENT_ALLOCATOR"] = "platform"
os.environ["JAX_PLATFORM_NAME"] = "gpu"


def alphafold_is_installed():
    return importlib.util.find_spec("alphafold") is not None


def skip_unless_alphafold_installed():
    return unittest.skipUnless(alphafold_is_installed(), "Requires AlphaFold")


def import_alphafold():
    """
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
32
33
34
35
36
    If AlphaFold is installed using the provided setuptools script, this
    is necessary to expose all of AlphaFold's precious insides
    """
    if "alphafold" in sys.modules:
        return sys.modules["alphafold"]
37
38
    module = importlib.import_module("alphafold")
    # Forcefully import alphafold's submodules
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
39
    submodules = pkgutil.walk_packages(module.__path__, prefix=("alphafold."))
40
41
42
43
44
45
46
47
48
    for submodule_info in submodules:
        importlib.import_module(submodule_info.name)
    sys.modules["alphafold"] = module
    globals()["alphafold"] = module

    return module


def get_alphafold_config():
49
    config = alphafold.model.config.model_config(consts.model)  # noqa
50
51
52
53
    config.model.global_config.deterministic = True
    return config


54
_param_path = f"openfold/resources/params/params_{consts.model}.npz"
55
_model = None
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
56
57


58
59
def get_global_pretrained_openfold():
    global _model
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
60
    if _model is None:
61
        _model = AlphaFold(model_config(consts.model))
62
        _model = _model.eval()
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
63
        if not os.path.exists(_param_path):
64
65
66
            raise FileNotFoundError(
                """Cannot load pretrained parameters. Make sure to run the 
                installation script before running tests."""
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
67
            )
68
        import_jax_weights_(_model, _param_path, version=consts.model)
69
70
71
72
73
74
        _model = _model.cuda()

    return _model


_orig_weights = None
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
75
76


77
78
def _get_orig_weights():
    global _orig_weights
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
79
    if _orig_weights is None:
80
81
82
83
84
85
86
        _orig_weights = np.load(_param_path)

    return _orig_weights


def _remove_key_prefix(d, prefix):
    for k, v in list(d.items()):
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
87
        if k.startswith(prefix):
88
            d.pop(k)
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
89
            d[k[len(prefix) :]] = v
90
91
92
93


def fetch_alphafold_module_weights(weight_path):
    orig_weights = _get_orig_weights()
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
94
95
96
    params = {k: v for k, v in orig_weights.items() if weight_path in k}
    if "/" in weight_path:
        spl = weight_path.split("/")
97
98
        spl = spl if len(spl[-1]) != 0 else spl[:-1]
        module_name = spl[-1]
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
99
        prefix = "/".join(spl[:-1]) + "/"
100
        _remove_key_prefix(params, prefix)
101
102

    try:
103
        params = alphafold.model.utils.flat_params_to_haiku(params)  # noqa
104
105
106
107
    except:
        raise ImportError(
            "Make sure to call import_alphafold before running this function"
        )
108
    return params