pytorch.rst 1.41 KB
Newer Older
Przemek Tredak's avatar
Przemek Tredak committed
1
..
2
    Copyright (c) 2022-2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
Przemek Tredak's avatar
Przemek Tredak committed
3
4
5
6
7
8

    See LICENSE for license information.

pyTorch
=======

9
.. autoapiclass:: transformer_engine.pytorch.Linear(in_features, out_features, bias=True, **kwargs)
Przemek Tredak's avatar
Przemek Tredak committed
10
11
  :members: forward

12
.. autoapiclass:: transformer_engine.pytorch.LayerNorm(hidden_size, eps=1e-5, **kwargs)
Przemek Tredak's avatar
Przemek Tredak committed
13

14
15
.. autoapiclass:: transformer_engine.pytorch.RMSNorm(hidden_size, eps=1e-5, **kwargs)

16
.. autoapiclass:: transformer_engine.pytorch.LayerNormLinear(in_features, out_features, eps=1e-5, bias=True, **kwargs)
Przemek Tredak's avatar
Przemek Tredak committed
17
18
  :members: forward

19
.. autoapiclass:: transformer_engine.pytorch.LayerNormMLP(hidden_size, ffn_hidden_size, eps=1e-5, bias=True, **kwargs)
Przemek Tredak's avatar
Przemek Tredak committed
20
21
  :members: forward

22
.. autoapiclass:: transformer_engine.pytorch.DotProductAttention(num_attention_heads, kv_channels, **kwargs)
cyanguwa's avatar
cyanguwa committed
23
24
  :members: forward

25
26
27
.. autoapiclass:: transformer_engine.pytorch.MultiheadAttention(hidden_size, num_attention_heads, **kwargs)
  :members: forward

28
.. autoapiclass:: transformer_engine.pytorch.TransformerLayer(hidden_size, ffn_hidden_size, num_attention_heads, **kwargs)
Przemek Tredak's avatar
Przemek Tredak committed
29
30
  :members: forward

31
32
33
.. autoapiclass:: transformer_engine.pytorch.InferenceParams(max_batch_size, max_sequence_length)
  :members: swap_key_value_dict

34
.. autoapifunction:: transformer_engine.pytorch.fp8_autocast
35

36
.. autoapifunction:: transformer_engine.pytorch.checkpoint
37
38

.. autoapifunction:: transformer_engine.pytorch.onnx_export