"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "e983da0e7d91c100e6e35efcb8a69c8cd41d6e09"
Unverified Commit 122d7dc3 authored by Ibraheem Moosa's avatar Ibraheem Moosa Committed by GitHub
Browse files

Remove logging of GPU count etc logging. (#12569)

Successfully logging this requires Pytorch. For the purposes of this script we are not using Pytorch.
parent d7e156bd
...@@ -430,10 +430,6 @@ if __name__ == "__main__": ...@@ -430,10 +430,6 @@ if __name__ == "__main__":
# Log on each process the small summary: # Log on each process the small summary:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.warning(
f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}"
+ f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}"
)
# Set the verbosity to info of the Transformers logger (on main process only): # Set the verbosity to info of the Transformers logger (on main process only):
logger.info(f"Training/evaluation parameters {training_args}") logger.info(f"Training/evaluation parameters {training_args}")
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment