Unverified Commit d128f2ff authored by Matt's avatar Matt Committed by GitHub
Browse files

Stop requiring Torch for our TF examples! (#21997)

* Stop requiring Torch for our TF examples!

* Slight tweak to logging in the example itself
parent 7c393181
......@@ -273,9 +273,8 @@ def main():
handlers=[logging.StreamHandler(sys.stdout)],
)
if training_args.should_log:
# The default of training_args.log_level is passive, so we set log level at info here to have that default.
transformers.utils.logging.set_verbosity_info()
# The default of training_args.log_level is passive, so we set log level at info here to have that default.
transformers.utils.logging.set_verbosity_info()
log_level = training_args.get_process_log_level()
logger.setLevel(log_level)
......
......@@ -249,6 +249,13 @@ class TFTrainingArguments(TrainingArguments):
requires_backends(self, ["tf"])
return self._setup_strategy.num_replicas_in_sync
@property
def should_log(self):
"""
Whether or not the current process should produce log.
"""
return False # TF Logging is handled by Keras not the Trainer
@property
def train_batch_size(self) -> int:
"""
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment