Unverified Commit caf37466 authored by Patrick von Platen's avatar Patrick von Platen Committed by GitHub
Browse files

fix indentation issue (#4941)

parent 6293eb04
...@@ -104,8 +104,8 @@ class PyTorchBenchmark(Benchmark): ...@@ -104,8 +104,8 @@ class PyTorchBenchmark(Benchmark):
) )
torch.cuda.reset_max_memory_cached() torch.cuda.reset_max_memory_cached()
# calculate loss and do backpropagation # calculate loss and do backpropagation
_train() _train()
elif not self.args.no_tpu and is_torch_tpu_available(): elif not self.args.no_tpu and is_torch_tpu_available():
# tpu # tpu
raise NotImplementedError( raise NotImplementedError(
...@@ -129,8 +129,7 @@ class PyTorchBenchmark(Benchmark): ...@@ -129,8 +129,7 @@ class PyTorchBenchmark(Benchmark):
logger.info( logger.info(
"Please consider updating PyTorch to version 1.4 to get more accuracy on GPU memory usage" "Please consider updating PyTorch to version 1.4 to get more accuracy on GPU memory usage"
) )
memory = Memory(torch.cuda.max_memory_cached()) memory = Memory(torch.cuda.max_memory_reserved())
memory = Memory(torch.cuda.max_memory_reserved())
return memory, summary return memory, summary
else: else:
...@@ -215,8 +214,8 @@ class PyTorchBenchmark(Benchmark): ...@@ -215,8 +214,8 @@ class PyTorchBenchmark(Benchmark):
) )
torch.cuda.reset_max_memory_cached() torch.cuda.reset_max_memory_cached()
# run forward # run forward
_forward() _forward()
elif not self.args.no_tpu and is_torch_tpu_available(): elif not self.args.no_tpu and is_torch_tpu_available():
# tpu # tpu
raise NotImplementedError( raise NotImplementedError(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment