Commit 5da03c93 authored by Ziyue Jiang's avatar Ziyue Jiang Committed by binmakeswell
Browse files

[NFC] polish colossalai/amp/torch_amp/_grad_scaler.py code style (#1823)


Co-authored-by: default avatarZiyue Jiang <ziyue.jiang@gmail.com>
parent 90833b45
......@@ -3,16 +3,18 @@
# modified from https://github.com/pytorch/pytorch/blob/master/torch/cuda/amp/grad_scaler.py
# to support tensor parallel
import torch
from collections import defaultdict, abc
import warnings
from collections import abc, defaultdict
from enum import Enum
from typing import Any, Dict, List, Optional, Tuple
from colossalai.context import ParallelMode
import torch
import torch.distributed as dist
from colossalai.core import global_context as gpc
from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors
from packaging import version
from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors
from colossalai.context import ParallelMode
from colossalai.core import global_context as gpc
class _MultiDeviceReplicator(object):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment