"tests/vscode:/vscode.git/clone" did not exist on "9932ee4b4bca9045d941af6687ef69eedcf68483"
Unverified Commit 167a0d8f authored by Matt's avatar Matt Committed by GitHub
Browse files

Add an option to reduce compile() console spam (#23938)

* Add an option to reduce compile() console spam

* Add annotations to the example scripts

* Add notes to the quicktour docs as well

* minor fix
parent c9cf3377
......@@ -454,7 +454,8 @@ def main():
weight_decay_rate=training_args.weight_decay,
adam_global_clipnorm=training_args.max_grad_norm,
)
# Transformers models compute the right loss for their task by default when labels are passed, and will
# use this for training unless you specify your own loss function in compile().
model.compile(optimizer=optimizer, jit_compile=training_args.xla)
# endregion
......
......@@ -643,6 +643,8 @@ def main():
# region Training
eval_metrics = None
# Transformers models compute the right loss for their task by default when labels are passed, and will
# use this for training unless you specify your own loss function in compile().
model.compile(optimizer=optimizer, jit_compile=training_args.xla)
if training_args.do_train:
......
......@@ -1498,7 +1498,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
def compile(
self,
optimizer="rmsprop",
loss="passthrough",
loss="auto_with_warning",
metrics=None,
loss_weights=None,
weighted_metrics=None,
......@@ -1510,13 +1510,16 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
This is a thin wrapper that sets the model's loss output head as the loss if the user does not specify a loss
function themselves.
"""
if loss == "passthrough":
logger.warning(
if loss in ("auto_with_warning", "passthrough"): # "passthrough" for workflow backward compatibility
logger.info(
"No loss specified in compile() - the model's internal loss computation will be used as the "
"loss. Don't panic - this is a common way to train TensorFlow models in Transformers! "
"To disable this behaviour please pass a loss argument, or explicitly pass "
"`loss=None` if you do not want your model to compute a loss."
"`loss=None` if you do not want your model to compute a loss. You can also specify `loss='auto'` to "
"get the internal loss without printing this info string."
)
loss = "auto"
if loss == "auto":
loss = dummy_loss
self._using_dummy_loss = True
else:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment