Unverified Commit ce5ae5a4 authored by Tom Aarsen's avatar Tom Aarsen Committed by GitHub
Browse files

Prevent crash with `WandbCallback` with third parties (#30477)

* Use EAFP principle to prevent crash with third parties

* Remove leftover debugging code

* Add info-level logger message
parent aca4a103
......@@ -786,15 +786,10 @@ class WandbCallback(TrainerCallback):
self._wandb.run._label(code="transformers_trainer")
# add number of model parameters to wandb config
if any(
(
isinstance(model, PreTrainedModel),
isinstance(model, PushToHubMixin),
(is_tf_available() and isinstance(model, TFPreTrainedModel)),
(is_torch_available() and isinstance(model, torch.nn.Module)),
)
):
try:
self._wandb.config["model/num_parameters"] = model.num_parameters()
except AttributeError:
logger.info("Could not log the number of model parameters in Weights & Biases.")
# log the initial model and architecture to an artifact
with tempfile.TemporaryDirectory() as temp_dir:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment