Unverified Commit bb154ac5 authored by Saurabh Misra's avatar Saurabh Misra Committed by GitHub
Browse files

Fixing TPU training by disabling wandb.watch gradients logging for TPU (#4926)

parent fb6cccb8
...@@ -335,8 +335,8 @@ class Trainer: ...@@ -335,8 +335,8 @@ class Trainer:
'Automatic Weights & Biases logging enabled, to disable set os.environ["WANDB_DISABLED"] = "true"' 'Automatic Weights & Biases logging enabled, to disable set os.environ["WANDB_DISABLED"] = "true"'
) )
wandb.init(project=os.getenv("WANDB_PROJECT", "huggingface"), config=vars(self.args)) wandb.init(project=os.getenv("WANDB_PROJECT", "huggingface"), config=vars(self.args))
# keep track of model topology and gradients # keep track of model topology and gradients, unsupported on TPU
if os.getenv("WANDB_WATCH") != "false": if not is_torch_tpu_available() and os.getenv("WANDB_WATCH") != "false":
wandb.watch( wandb.watch(
self.model, log=os.getenv("WANDB_WATCH", "gradients"), log_freq=max(100, self.args.logging_steps) self.model, log=os.getenv("WANDB_WATCH", "gradients"), log_freq=max(100, self.args.logging_steps)
) )
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment