model_class.rst 3.08 KB
Newer Older
1
2
3
..
  autogenerated from source/_templates/autosummary/model_class.rst

mayp777's avatar
UPDATE  
mayp777 committed
4
5
6
7
.. currentmodule:: torchaudio.models

..

8
{%- set methods=["forward"] %}
mayp777's avatar
UPDATE  
mayp777 committed
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
{%- set helpers={
        "torchaudio.models.RNNTBeamSearch": [
            "Hypothesis",
        ],
    }
-%}
{%- set factory={
        "torchaudio.models.ConvTasNet": [
            "conv_tasnet_base",
        ],
        "torchaudio.models.Wav2Vec2Model": [
            "wav2vec2_model",
            "wav2vec2_base",
            "wav2vec2_large",
            "wav2vec2_large_lv60k",
            "wav2vec2_xlsr_300m",
            "wav2vec2_xlsr_1b",
            "wav2vec2_xlsr_2b",
            "hubert_base",
            "hubert_large",
            "hubert_xlarge",
            "wavlm_model",
            "wavlm_base",
            "wavlm_large",
        ],
        "torchaudio.models.HuBERTPretrainModel": [
            "hubert_pretrain_model",
            "hubert_pretrain_base",
            "hubert_pretrain_large",
            "hubert_pretrain_xlarge",
        ],
        "torchaudio.models.RNNT": [
            "emformer_rnnt_model",
            "emformer_rnnt_base",
        ],
        "torchaudio.models.HDemucs": [
            "hdemucs_low",
            "hdemucs_medium",
            "hdemucs_high",
        ],
        "torchaudio.models.SquimObjective": [
            "squim_objective_model",
            "squim_objective_base",
        ],
        "torchaudio.models.SquimSubjective": [
            "squim_subjective_model",
            "squim_subjective_base",
        ],
    }
-%}
{%- set utils={
        "torchaudio.models.Wav2Vec2Model": [
            "~torchaudio.models.wav2vec2.utils.import_fairseq_model",
            "~torchaudio.models.wav2vec2.utils.import_huggingface_model",
        ]
    }
-%}

67
68
69
70
71
72
73
74
{%- if name in ["Wav2Vec2Model"] %}
  {{ methods.extend(["extract_features"]) }}
{%- elif name in ["Emformer", "RNNTBeamSearch", "WaveRNN", "Tacotron2", ] %}
  {{ methods.extend(["infer"]) }}
{%- elif name == "RNNT" %}
  {{ methods.extend(["transcribe_streaming", "transcribe", "predict", "join"]) }}
{%- endif %}

mayp777's avatar
UPDATE  
mayp777 committed
75
76
.. TITLE

77
78
{{ name | underline }}

mayp777's avatar
UPDATE  
mayp777 committed
79
80
.. CLASS DEFINITIONS

81
82
.. autoclass:: {{ fullname }}

mayp777's avatar
UPDATE  
mayp777 committed
83
84
85
Methods
=======

86
87
88
89
90
91
92
93
94
95
{% for item in methods %}

{{item | underline("-") }}

.. container:: py attribute

   .. automethod:: {{[fullname, item] | join('.')}}

{%- endfor %}

mayp777's avatar
UPDATE  
mayp777 committed
96
97
98
.. HELPER STRUCTURES

{%- if helpers[fullname] %}
99
100
101
102

Support Structures
==================

mayp777's avatar
UPDATE  
mayp777 committed
103
104
105
{%- for item in helpers[fullname] %}

{{item | underline("-") }}
106
107
108

.. container:: py attribute

mayp777's avatar
UPDATE  
mayp777 committed
109
   .. autodata:: {{["torchaudio.models", item] | join('.')}}
110
111
      :no-value:

mayp777's avatar
UPDATE  
mayp777 committed
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
{%- endfor %}

{%- endif %}

.. FACTORY FUNCTIONS

{%- if factory[fullname] %}

Factory Functions
=================

.. autosummary::
   :toctree: ../generated
   :nosignatures:

{% for item in factory[fullname] %}
   {{["~torchaudio.models", item] | join('.')}}
{%- endfor %}

{%- endif %}

.. UTILITY FUNCTIONS

{%- if utils[fullname] %}

Utility Functions
=================

.. currentmodule:: torchaudio.models

.. autosummary::
   :toctree: ../generated
   :nosignatures:

{% for item in utils[fullname] %}
   {{ item }}
{%- endfor %}

150
{%- endif %}