Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
Torchaudio
Commits
f18d01a0
"...text-generation-inference.git" did not exist on "ed72e9212620d4de10fbe476f0b7af2ab94e4cd7"
Commit
f18d01a0
authored
Oct 11, 2021
by
moto
Browse files
Avoid concatenation in loop (#1850)
parent
6321adcf
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
10 additions
and
15 deletions
+10
-15
torchaudio/models/tacotron2.py
torchaudio/models/tacotron2.py
+10
-15
No files found.
torchaudio/models/tacotron2.py
View file @
f18d01a0
...
@@ -925,12 +925,9 @@ class _Decoder(nn.Module):
...
@@ -925,12 +925,9 @@ class _Decoder(nn.Module):
[
memory
.
size
(
0
)],
dtype
=
torch
.
int32
,
device
=
memory
.
device
[
memory
.
size
(
0
)],
dtype
=
torch
.
int32
,
device
=
memory
.
device
)
)
mel_specgrams
,
gate_outputs
,
alignments
=
(
mel_specgrams
:
List
[
Tensor
]
=
[]
torch
.
zeros
(
1
,
dtype
=
memory
.
dtype
),
gate_outputs
:
List
[
Tensor
]
=
[]
torch
.
zeros
(
1
,
dtype
=
memory
.
dtype
),
alignments
:
List
[
Tensor
]
=
[]
torch
.
zeros
(
1
,
dtype
=
memory
.
dtype
),
)
first_iter
=
True
while
True
:
while
True
:
decoder_input
=
self
.
prenet
(
decoder_input
)
decoder_input
=
self
.
prenet
(
decoder_input
)
(
(
...
@@ -957,15 +954,9 @@ class _Decoder(nn.Module):
...
@@ -957,15 +954,9 @@ class _Decoder(nn.Module):
mask
,
mask
,
)
)
if
first_iter
:
mel_specgrams
.
append
(
mel_specgram
.
unsqueeze
(
0
))
mel_specgrams
=
mel_specgram
.
unsqueeze
(
0
)
gate_outputs
.
append
(
gate_output
.
transpose
(
0
,
1
))
gate_outputs
=
gate_output
.
transpose
(
0
,
1
)
alignments
.
append
(
attention_weights
)
alignments
=
attention_weights
first_iter
=
False
else
:
mel_specgrams
=
torch
.
cat
((
mel_specgrams
,
mel_specgram
.
unsqueeze
(
0
)),
dim
=
0
)
gate_outputs
=
torch
.
cat
((
gate_outputs
,
gate_output
.
transpose
(
0
,
1
)),
dim
=
0
)
alignments
=
torch
.
cat
((
alignments
,
attention_weights
),
dim
=
0
)
dec
=
torch
.
le
(
torch
.
sigmoid
(
gate_output
),
self
.
gate_threshold
).
to
(
torch
.
int32
).
squeeze
(
1
)
dec
=
torch
.
le
(
torch
.
sigmoid
(
gate_output
),
self
.
gate_threshold
).
to
(
torch
.
int32
).
squeeze
(
1
)
...
@@ -980,6 +971,10 @@ class _Decoder(nn.Module):
...
@@ -980,6 +971,10 @@ class _Decoder(nn.Module):
mel_specgram_lengths
+=
not_finished
mel_specgram_lengths
+=
not_finished
decoder_input
=
mel_specgram
decoder_input
=
mel_specgram
mel_specgrams
=
torch
.
cat
(
mel_specgrams
,
dim
=
0
)
gate_outputs
=
torch
.
cat
(
gate_outputs
,
dim
=
0
)
alignments
=
torch
.
cat
(
alignments
,
dim
=
0
)
mel_specgrams
,
gate_outputs
,
alignments
=
self
.
_parse_decoder_outputs
(
mel_specgrams
,
gate_outputs
,
alignments
=
self
.
_parse_decoder_outputs
(
mel_specgrams
,
gate_outputs
,
alignments
mel_specgrams
,
gate_outputs
,
alignments
)
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment