Unverified Commit 0eb02871 authored by Matt's avatar Matt Committed by GitHub
Browse files

Removed console spam from misfiring warnings (#13625)

* Removed misfiring warnings

* Revert "Removed misfiring warnings"

This reverts commit cea90de325056b9c1cbcda2bd2613a785c1639ce.

* Retain the warning, but only when the user actually overrides things

* Fix accidentally breaking just about every model on the hub simultaneously

* Style pass
parent da8beaaf
...@@ -294,9 +294,10 @@ def booleans_processing(config, **kwargs): ...@@ -294,9 +294,10 @@ def booleans_processing(config, **kwargs):
final_booleans["use_cache"] = kwargs["use_cache"] if kwargs["use_cache"] is not None else config.use_cache final_booleans["use_cache"] = kwargs["use_cache"] if kwargs["use_cache"] is not None else config.use_cache
else: else:
if ( if (
kwargs["output_attentions"] is not None kwargs["output_attentions"] not in (None, config.output_attentions)
or kwargs["output_hidden_states"] is not None or kwargs["output_hidden_states"] not in (None, config.output_hidden_states)
or ("use_cache" in kwargs and kwargs["use_cache"] is not None) or "use_cache" in kwargs
and kwargs["use_cache"] not in (None, config.use_cache)
): ):
tf_logger.warning( tf_logger.warning(
"The parameters `output_attentions`, `output_hidden_states` and `use_cache` cannot be updated when calling a model." "The parameters `output_attentions`, `output_hidden_states` and `use_cache` cannot be updated when calling a model."
...@@ -306,7 +307,7 @@ def booleans_processing(config, **kwargs): ...@@ -306,7 +307,7 @@ def booleans_processing(config, **kwargs):
final_booleans["output_attentions"] = config.output_attentions final_booleans["output_attentions"] = config.output_attentions
final_booleans["output_hidden_states"] = config.output_hidden_states final_booleans["output_hidden_states"] = config.output_hidden_states
if kwargs["return_dict"] is not None: if kwargs.get("return_dict", None) not in (None, True):
tf_logger.warning( tf_logger.warning(
"The parameter `return_dict` cannot be set in graph mode and will always be set to `True`." "The parameter `return_dict` cannot be set in graph mode and will always be set to `True`."
) )
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment