"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "5e31a98ab70607c820cc2ad358d81916adad0313"
Unverified Commit d128f2ff authored by Matt's avatar Matt Committed by GitHub
Browse files

Stop requiring Torch for our TF examples! (#21997)

* Stop requiring Torch for our TF examples!

* Slight tweak to logging in the example itself
parent 7c393181
...@@ -273,7 +273,6 @@ def main(): ...@@ -273,7 +273,6 @@ def main():
handlers=[logging.StreamHandler(sys.stdout)], handlers=[logging.StreamHandler(sys.stdout)],
) )
if training_args.should_log:
# The default of training_args.log_level is passive, so we set log level at info here to have that default. # The default of training_args.log_level is passive, so we set log level at info here to have that default.
transformers.utils.logging.set_verbosity_info() transformers.utils.logging.set_verbosity_info()
......
...@@ -249,6 +249,13 @@ class TFTrainingArguments(TrainingArguments): ...@@ -249,6 +249,13 @@ class TFTrainingArguments(TrainingArguments):
requires_backends(self, ["tf"]) requires_backends(self, ["tf"])
return self._setup_strategy.num_replicas_in_sync return self._setup_strategy.num_replicas_in_sync
@property
def should_log(self):
"""
Whether or not the current process should produce log.
"""
return False # TF Logging is handled by Keras not the Trainer
@property @property
def train_batch_size(self) -> int: def train_batch_size(self) -> int:
""" """
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment