"tests/vscode:/vscode.git/clone" did not exist on "3e4fb0b9d924951b17ce70b847b597995895cfc6"
primitives.py 3.96 KB
Newer Older
Boris Bonev's avatar
Boris Bonev committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
# coding=utf-8

# SPDX-FileCopyrightText: Copyright (c) 2022 The torch-harmonics Authors. All rights reserved.
# SPDX-License-Identifier: BSD-3-Clause
# 
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#

import torch
import torch.distributed as dist

Boris Bonev's avatar
Boris Bonev committed
35
from .utils import polar_group, azimuth_group, is_initialized
Boris Bonev's avatar
Boris Bonev committed
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52

# general helpers
def get_memory_format(tensor):
    if tensor.is_contiguous(memory_format=torch.channels_last):
        return torch.channels_last
    else:
        return torch.contiguous_format

def split_tensor_along_dim(tensor, dim, num_chunks):
    assert dim < tensor.dim(), f"Error, tensor dimension is {tensor.dim()} which cannot be split along {dim}"
    assert (tensor.shape[dim] % num_chunks == 0), f"Error, cannot split dim {dim} evenly. Dim size is \
                                                   {tensor.shape[dim]} and requested numnber of splits is {num_chunks}"
    chunk_size = tensor.shape[dim] // num_chunks
    tensor_list = torch.split(tensor, chunk_size, dim=dim)
    
    return tensor_list

Boris Bonev's avatar
Boris Bonev committed
53
def _transpose(tensor, dim0, dim1, group=None, async_op=False):
Boris Bonev's avatar
Boris Bonev committed
54
    # get input format
Boris Bonev's avatar
Boris Bonev committed
55
    input_format = get_memory_format(tensor)
Boris Bonev's avatar
Boris Bonev committed
56
    
Boris Bonev's avatar
Boris Bonev committed
57
    # get comm params
Boris Bonev's avatar
Boris Bonev committed
58
59
    comm_size = dist.get_world_size(group=group)

Boris Bonev's avatar
Boris Bonev committed
60
61
62
63
    # split and local transposition
    split_size = tensor.shape[dim0] // comm_size
    x_send = [y.contiguous(memory_format=input_format) for y in torch.split(tensor, split_size, dim=dim0)]
    x_recv = [torch.empty_like(x_send[0]).contiguous(memory_format=input_format) for _ in range(comm_size)]
Boris Bonev's avatar
Boris Bonev committed
64
    
Boris Bonev's avatar
Boris Bonev committed
65
66
    # global transposition
    req = dist.all_to_all(x_recv, x_send, group=group, async_op=async_op)
Boris Bonev's avatar
Boris Bonev committed
67
    
Boris Bonev's avatar
Boris Bonev committed
68
    return x_recv, req 
Boris Bonev's avatar
Boris Bonev committed
69
70


Boris Bonev's avatar
Boris Bonev committed
71
class distributed_transpose_azimuth(torch.autograd.Function):
Boris Bonev's avatar
Boris Bonev committed
72
73

    @staticmethod
Boris Bonev's avatar
Boris Bonev committed
74
75
76
77
78
    def forward(ctx, x, dim):
        xlist, _ = _transpose(x, dim[0], dim[1], group=azimuth_group())
        x = torch.cat(xlist, dim=dim[1])
        ctx.dim = dim
        return x
Boris Bonev's avatar
Boris Bonev committed
79
80

    @staticmethod
Boris Bonev's avatar
Boris Bonev committed
81
82
83
84
85
    def backward(ctx, go):
        dim = ctx.dim
        gilist, _ = _transpose(go, dim[1], dim[0], group=azimuth_group())
        gi = torch.cat(gilist, dim=dim[0])
        return gi, None
Boris Bonev's avatar
Boris Bonev committed
86
87

    
Boris Bonev's avatar
Boris Bonev committed
88
class distributed_transpose_polar(torch.autograd.Function):
Boris Bonev's avatar
Boris Bonev committed
89
90

    @staticmethod
Boris Bonev's avatar
Boris Bonev committed
91
92
93
94
95
    def forward(ctx, x, dim):
        xlist, _ = _transpose(x, dim[0], dim[1], group=polar_group())
        x = torch.cat(xlist, dim=dim[1])
        ctx.dim = dim
        return x
Boris Bonev's avatar
Boris Bonev committed
96
97

    @staticmethod
Boris Bonev's avatar
Boris Bonev committed
98
99
100
101
102
    def backward(ctx, go):
        dim = ctx.dim
        gilist, _ = _transpose(go, dim[1], dim[0], group=polar_group())
        gi = torch.cat(gilist, dim=dim[0])
        return gi, None
Boris Bonev's avatar
Boris Bonev committed
103