test_triangular_attention.py 3.58 KB
Newer Older
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# Copyright 2021 AlQuraishi Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import torch
import numpy as np
import unittest
18
19
20
21
22
23
from openfold.model.triangular_attention import TriangleAttention
from openfold.utils.tensor_utils import tree_map

import tests.compare_utils as compare_utils
from tests.config import consts

Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
24
if compare_utils.alphafold_is_installed():
25
26
27
    alphafold = compare_utils.import_alphafold()
    import jax
    import haiku as hk
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
28
29
30
31


class TestTriangularAttention(unittest.TestCase):
    def test_shape(self):
32
        c_z = consts.c_z
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
33
34
35
36
        c = 12
        no_heads = 4
        starting = True

Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
37
        tan = TriangleAttention(c_z, c, no_heads, starting)
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
38

39
40
        batch_size = consts.batch_size
        n_res = consts.n_res
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
41
42
43

        x = torch.rand((batch_size, n_res, n_res, c_z))
        shape_before = x.shape
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
44
        x = tan(x, chunk_size=None)
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
45
46
47
48
        shape_after = x.shape

        self.assertTrue(shape_before == shape_after)

49
50
    def _tri_att_compare(self, starting=False):
        name = (
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
51
52
53
            "triangle_attention_"
            + ("starting" if starting else "ending")
            + "_node"
54
        )
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
55

56
57
58
59
        def run_tri_att(pair_act, pair_mask):
            config = compare_utils.get_alphafold_config()
            c_e = config.model.embeddings_and_evoformer.evoformer
            tri_att = alphafold.model.modules.TriangleAttention(
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
60
61
62
                c_e.triangle_attention_starting_node
                if starting
                else c_e.triangle_attention_ending_node,
63
64
65
66
67
                config.model.global_config,
                name=name,
            )
            act = tri_att(pair_act=pair_act, pair_mask=pair_mask)
            return act
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
68

69
70
71
72
73
74
75
76
77
        f = hk.transform(run_tri_att)

        n_res = consts.n_res

        pair_act = np.random.rand(n_res, n_res, consts.c_z)
        pair_mask = np.random.randint(low=0, high=2, size=(n_res, n_res))

        # Fetch pretrained parameters (but only from one block)]
        params = compare_utils.fetch_alphafold_module_weights(
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
78
79
            "alphafold/alphafold_iteration/evoformer/evoformer_iteration/"
            + name
80
81
82
        )
        params = tree_map(lambda n: n[0], params, jax.numpy.DeviceArray)

Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
83
        out_gt = f.apply(params, None, pair_act, pair_mask).block_until_ready()
84
85
86
87
        out_gt = torch.as_tensor(np.array(out_gt))

        model = compare_utils.get_global_pretrained_openfold()
        module = (
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
88
89
90
            model.evoformer.blocks[0].tri_att_start
            if starting
            else model.evoformer.blocks[0].tri_att_end
91
92
        )
        out_repro = module(
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
93
94
            torch.as_tensor(pair_act, dtype=torch.float32).cuda(),
            mask=torch.as_tensor(pair_mask, dtype=torch.float32).cuda(),
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
95
            chunk_size=None,
96
        ).cpu()
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
97

98
        self.assertTrue(torch.max(torch.abs(out_gt - out_repro) < consts.eps))
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
99

100
    @compare_utils.skip_unless_alphafold_installed()
101
102
    def test_tri_att_end_compare(self):
        self._tri_att_compare()
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
103

104
    @compare_utils.skip_unless_alphafold_installed()
105
106
107
108
109
    def test_tri_att_start_compare(self):
        self._tri_att_compare(starting=True)


if __name__ == "__main__":
Gustaf Ahdritz's avatar
Gustaf Ahdritz committed
110
    unittest.main()