apex_amp.py 1.05 KB
Newer Older
Frank Lee's avatar
Frank Lee committed
1
2
3
4
5
6
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

import torch.nn as nn
try:
    import apex.amp as apex_amp
7
except ImportError:
Frank Lee's avatar
Frank Lee committed
8
    pass
9

Frank Lee's avatar
Frank Lee committed
10
11
12
13
14
15
16
from torch import Tensor

from colossalai.nn.optimizer import ColossalaiOptimizer
from colossalai.utils import clip_grad_norm_fp32


class ApexAMPOptimizer(ColossalaiOptimizer):
HELSON's avatar
HELSON committed
17
    """ A wrapper class for APEX optimizer and it implements apex-specific backward and clip_grad_norm
18
    methods
HELSON's avatar
HELSON committed
19
    """
Frank Lee's avatar
Frank Lee committed
20
21

    def backward(self, loss: Tensor):
HELSON's avatar
HELSON committed
22
23
        """Backward pass to get all gradients

24
25
        Args:
            loss (torch.Tensor): Loss computed by a loss function
26
        """
Frank Lee's avatar
Frank Lee committed
27
28
29
30
        with apex_amp.scale_loss(loss, self.optim) as scaled_loss:
            scaled_loss.backward()

    def clip_grad_norm(self, model: nn.Module, max_norm: float):
31
        """Clip gradients by norm
HELSON's avatar
HELSON committed
32

33
34
35
        Args:
            model (torch.nn.Module): Your model object
            max_norm (float): The max norm value for gradient clipping
36
        """
Frank Lee's avatar
Frank Lee committed
37
38
        if max_norm > 0:
            clip_grad_norm_fp32(apex_amp.master_params(self.optim), max_norm)