Unverified Commit aa43a765 authored by Quentin Ambard's avatar Quentin Ambard Committed by GitHub
Browse files

fix warning function call creating logger error (max_length and max_new_tokens) (#22889)

parent aa431675
...@@ -387,12 +387,11 @@ class FlaxGenerationMixin: ...@@ -387,12 +387,11 @@ class FlaxGenerationMixin:
elif generation_config.max_new_tokens is not None: elif generation_config.max_new_tokens is not None:
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
if not has_default_max_length: if not has_default_max_length:
logger.warn( logger.warning(
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(=" f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. " f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
"Please refer to the documentation for more information. " "Please refer to the documentation for more information. "
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)", "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
UserWarning,
) )
if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length: if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length:
......
...@@ -860,12 +860,11 @@ class TFGenerationMixin: ...@@ -860,12 +860,11 @@ class TFGenerationMixin:
elif generation_config.max_new_tokens is not None: elif generation_config.max_new_tokens is not None:
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
if not has_default_max_length: if not has_default_max_length:
logger.warn( logger.warning(
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(=" f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. " f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
"Please refer to the documentation for more information. " "Please refer to the documentation for more information. "
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)", "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
UserWarning,
) )
# If the input length is a tensor (i.e. dynamic length), skip length checks # If the input length is a tensor (i.e. dynamic length), skip length checks
......
...@@ -1347,12 +1347,11 @@ class GenerationMixin: ...@@ -1347,12 +1347,11 @@ class GenerationMixin:
elif generation_config.max_new_tokens is not None: elif generation_config.max_new_tokens is not None:
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
if not has_default_max_length: if not has_default_max_length:
logger.warn( logger.warning(
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(=" f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. " f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
"Please refer to the documentation for more information. " "Please refer to the documentation for more information. "
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)", "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
UserWarning,
) )
if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length: if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment