Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
aa43a765
Unverified
Commit
aa43a765
authored
Apr 20, 2023
by
Quentin Ambard
Committed by
GitHub
Apr 20, 2023
Browse files
fix warning function call creating logger error (max_length and max_new_tokens) (#22889)
parent
aa431675
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
6 additions
and
9 deletions
+6
-9
src/transformers/generation/flax_utils.py
src/transformers/generation/flax_utils.py
+2
-3
src/transformers/generation/tf_utils.py
src/transformers/generation/tf_utils.py
+2
-3
src/transformers/generation/utils.py
src/transformers/generation/utils.py
+2
-3
No files found.
src/transformers/generation/flax_utils.py
View file @
aa43a765
...
...
@@ -387,12 +387,11 @@ class FlaxGenerationMixin:
elif
generation_config
.
max_new_tokens
is
not
None
:
generation_config
.
max_length
=
generation_config
.
max_new_tokens
+
input_ids_seq_length
if
not
has_default_max_length
:
logger
.
warn
(
logger
.
warn
ing
(
f
"Both `max_new_tokens` (=
{
generation_config
.
max_new_tokens
}
) and `max_length`(="
f
"
{
generation_config
.
max_length
}
) seem to have been set. `max_new_tokens` will take precedence. "
"Please refer to the documentation for more information. "
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
,
UserWarning
,
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
)
if
generation_config
.
min_length
is
not
None
and
generation_config
.
min_length
>
generation_config
.
max_length
:
...
...
src/transformers/generation/tf_utils.py
View file @
aa43a765
...
...
@@ -860,12 +860,11 @@ class TFGenerationMixin:
elif
generation_config
.
max_new_tokens
is
not
None
:
generation_config
.
max_length
=
generation_config
.
max_new_tokens
+
input_ids_seq_length
if
not
has_default_max_length
:
logger
.
warn
(
logger
.
warn
ing
(
f
"Both `max_new_tokens` (=
{
generation_config
.
max_new_tokens
}
) and `max_length`(="
f
"
{
generation_config
.
max_length
}
) seem to have been set. `max_new_tokens` will take precedence. "
"Please refer to the documentation for more information. "
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
,
UserWarning
,
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
)
# If the input length is a tensor (i.e. dynamic length), skip length checks
...
...
src/transformers/generation/utils.py
View file @
aa43a765
...
...
@@ -1347,12 +1347,11 @@ class GenerationMixin:
elif
generation_config
.
max_new_tokens
is
not
None
:
generation_config
.
max_length
=
generation_config
.
max_new_tokens
+
input_ids_seq_length
if
not
has_default_max_length
:
logger
.
warn
(
logger
.
warn
ing
(
f
"Both `max_new_tokens` (=
{
generation_config
.
max_new_tokens
}
) and `max_length`(="
f
"
{
generation_config
.
max_length
}
) seem to have been set. `max_new_tokens` will take precedence. "
"Please refer to the documentation for more information. "
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
,
UserWarning
,
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
)
if
generation_config
.
min_length
is
not
None
and
generation_config
.
min_length
>
generation_config
.
max_length
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment