Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
003a7cc6
Unverified
Commit
003a7cc6
authored
Mar 03, 2023
by
bofeng huang
Committed by
GitHub
Mar 03, 2023
Browse files
[Whisper] Fix feature normalization in `WhisperFeatureExtractor` (#21938)
Fix feature normalization in WhisperFeatureExtractor
parent
718e9d77
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
5 additions
and
7 deletions
+5
-7
src/transformers/models/whisper/feature_extraction_whisper.py
...transformers/models/whisper/feature_extraction_whisper.py
+5
-7
No files found.
src/transformers/models/whisper/feature_extraction_whisper.py
View file @
003a7cc6
...
@@ -334,14 +334,8 @@ class WhisperFeatureExtractor(SequenceFeatureExtractor):
...
@@ -334,14 +334,8 @@ class WhisperFeatureExtractor(SequenceFeatureExtractor):
max_length
=
max_length
if
max_length
else
self
.
n_samples
,
max_length
=
max_length
if
max_length
else
self
.
n_samples
,
truncation
=
truncation
,
truncation
=
truncation
,
pad_to_multiple_of
=
pad_to_multiple_of
,
pad_to_multiple_of
=
pad_to_multiple_of
,
return_attention_mask
=
return_attention_mask
,
return_attention_mask
=
return_attention_mask
or
do_normalize
,
)
)
# make sure list is in array format
input_features
=
padded_inputs
.
get
(
"input_features"
).
transpose
(
2
,
0
,
1
)
if
return_attention_mask
:
# rescale from sample (48000) to feature (3000)
padded_inputs
[
"attention_mask"
]
=
padded_inputs
[
"attention_mask"
][:,
::
self
.
hop_length
]
# zero-mean and unit-variance normalization
# zero-mean and unit-variance normalization
if
do_normalize
:
if
do_normalize
:
...
@@ -350,6 +344,10 @@ class WhisperFeatureExtractor(SequenceFeatureExtractor):
...
@@ -350,6 +344,10 @@ class WhisperFeatureExtractor(SequenceFeatureExtractor):
attention_mask
=
padded_inputs
[
"attention_mask"
],
attention_mask
=
padded_inputs
[
"attention_mask"
],
padding_value
=
self
.
padding_value
,
padding_value
=
self
.
padding_value
,
)
)
padded_inputs
[
"input_features"
]
=
np
.
stack
(
padded_inputs
[
"input_features"
],
axis
=
0
)
# make sure list is in array format
input_features
=
padded_inputs
.
get
(
"input_features"
).
transpose
(
2
,
0
,
1
)
input_features
=
[
self
.
_np_extract_fbank_features
(
waveform
)
for
waveform
in
input_features
[
0
]]
input_features
=
[
self
.
_np_extract_fbank_features
(
waveform
)
for
waveform
in
input_features
[
0
]]
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment