__init__.py 861 Bytes
Newer Older
1
2
3
# May help avoid undefined symbol errors https://pytorch.org/cppdocs/notes/faq.html#undefined-symbol-errors-from-pytorch-aten
import torch

4
5
6
if torch.distributed.is_available():
    from . import parallel

7
from . import amp
8
from . import fp16_utils
9

10
11
12
13
14
15
16
17
18
# For optimizers and normalization there is no Python fallback.
# Absence of cuda backend is a hard error.
# I would like the errors from importing fused_adam_cuda or fused_layer_norm_cuda
# to be triggered lazily, because if someone has installed with --cpp_ext and --cuda_ext
# so they expect those backends to be available, but for some reason they actually aren't
# available (for example because they built improperly in a way that isn't revealed until
# load time) the error message is timely and visible.
from . import optimizers
from . import normalization
Marek Kolodziej's avatar
Marek Kolodziej committed
19
from . import pyprof
Masaki Kozuki's avatar
Masaki Kozuki committed
20
from . import transformer