Unverified Commit cedc547e authored by Jay Mody's avatar Jay Mody Committed by GitHub
Browse files

Adds train_batch_size, eval_batch_size, and n_gpu to to_sanitized_dict output for logging. (#5331)

* Adds train_batch_size, eval_batch_size, and n_gpu to to_sanitized_dict() output

* Update wandb config logging to use to_sanitized_dict

* removed n_gpu from sanitized dict

* fix quality check errors
parent 9996f697
...@@ -383,7 +383,7 @@ class Trainer: ...@@ -383,7 +383,7 @@ class Trainer:
logger.info( logger.info(
'Automatic Weights & Biases logging enabled, to disable set os.environ["WANDB_DISABLED"] = "true"' 'Automatic Weights & Biases logging enabled, to disable set os.environ["WANDB_DISABLED"] = "true"'
) )
wandb.init(project=os.getenv("WANDB_PROJECT", "huggingface"), config=vars(self.args)) wandb.init(project=os.getenv("WANDB_PROJECT", "huggingface"), config=self.args.to_sanitized_dict())
# keep track of model topology and gradients, unsupported on TPU # keep track of model topology and gradients, unsupported on TPU
if not is_torch_tpu_available() and os.getenv("WANDB_WATCH") != "false": if not is_torch_tpu_available() and os.getenv("WANDB_WATCH") != "false":
wandb.watch( wandb.watch(
......
...@@ -310,7 +310,10 @@ class TrainingArguments: ...@@ -310,7 +310,10 @@ class TrainingArguments:
Sanitized serialization to use with TensorBoard’s hparams Sanitized serialization to use with TensorBoard’s hparams
""" """
d = dataclasses.asdict(self) d = dataclasses.asdict(self)
d = {**d, **{"train_batch_size": self.train_batch_size, "eval_batch_size": self.eval_batch_size}}
valid_types = [bool, int, float, str] valid_types = [bool, int, float, str]
if is_torch_available(): if is_torch_available():
valid_types.append(torch.Tensor) valid_types.append(torch.Tensor)
return {k: v if type(v) in valid_types else str(v) for k, v in d.items()} return {k: v if type(v) in valid_types else str(v) for k, v in d.items()}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment