ops.py 2.55 KB
Newer Older
comfyanonymous's avatar
comfyanonymous committed
1
import torch
2
from contextlib import contextmanager
comfyanonymous's avatar
comfyanonymous committed
3

comfyanonymous's avatar
comfyanonymous committed
4
5
6
class Linear(torch.nn.Linear):
    def reset_parameters(self):
        return None
7
8
9
10

class Conv2d(torch.nn.Conv2d):
    def reset_parameters(self):
        return None
11

comfyanonymous's avatar
comfyanonymous committed
12
13
14
15
class Conv3d(torch.nn.Conv3d):
    def reset_parameters(self):
        return None

16
17
18
19
20
21
22
23
class GroupNorm(torch.nn.GroupNorm):
    def reset_parameters(self):
        return None

class LayerNorm(torch.nn.LayerNorm):
    def reset_parameters(self):
        return None

comfyanonymous's avatar
comfyanonymous committed
24
25
26
def conv_nd(dims, *args, **kwargs):
    if dims == 2:
        return Conv2d(*args, **kwargs)
comfyanonymous's avatar
comfyanonymous committed
27
28
    elif dims == 3:
        return Conv3d(*args, **kwargs)
comfyanonymous's avatar
comfyanonymous committed
29
30
    else:
        raise ValueError(f"unsupported dimensions: {dims}")
31

32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
def cast_bias_weight(s, input):
    bias = None
    if s.bias is not None:
        bias = s.bias.to(device=input.device, dtype=input.dtype)
    weight = s.weight.to(device=input.device, dtype=input.dtype)
    return weight, bias

class manual_cast:
    class Linear(Linear):
        def forward(self, input):
            weight, bias = cast_bias_weight(self, input)
            return torch.nn.functional.linear(input, weight, bias)

    class Conv2d(Conv2d):
        def forward(self, input):
            weight, bias = cast_bias_weight(self, input)
            return self._conv_forward(input, weight, bias)

    class Conv3d(Conv3d):
        def forward(self, input):
            weight, bias = cast_bias_weight(self, input)
            return self._conv_forward(input, weight, bias)

    class GroupNorm(GroupNorm):
        def forward(self, input):
            weight, bias = cast_bias_weight(self, input)
            return torch.nn.functional.group_norm(input, self.num_groups, weight, bias, self.eps)

    class LayerNorm(LayerNorm):
        def forward(self, input):
            weight, bias = cast_bias_weight(self, input)
            return torch.nn.functional.layer_norm(input, self.normalized_shape, weight, bias, self.eps)

65
@contextmanager
66
def use_comfy_ops(device=None, dtype=None): # Kind of an ugly hack but I can't think of a better way
67
    old_torch_nn_linear = torch.nn.Linear
68
69
70
71
72
73
74
75
76
77
    force_device = device
    force_dtype = dtype
    def linear_with_dtype(in_features: int, out_features: int, bias: bool = True, device=None, dtype=None):
        if force_device is not None:
            device = force_device
        if force_dtype is not None:
            dtype = force_dtype
        return Linear(in_features, out_features, bias=bias, device=device, dtype=dtype)

    torch.nn.Linear = linear_with_dtype
78
79
80
81
    try:
        yield
    finally:
        torch.nn.Linear = old_torch_nn_linear