Commit 2a8022ca authored by Michael Carilli's avatar Michael Carilli
Browse files

Adding deterministic option to main_fp16_optimizer.py

parent b436213e
......@@ -68,6 +68,7 @@ parser.add_argument('--dynamic-loss-scale', action='store_true',
'--static-loss-scale.')
parser.add_argument('--prof', dest='prof', action='store_true',
help='Only run 10 iterations for profiling.')
parser.add_argument('--deterministic', action='store_true')
parser.add_argument("--local_rank", default=0, type=int)
parser.add_argument('--sync_bn', action='store_true',
......@@ -94,6 +95,12 @@ def fast_collate(batch):
best_prec1 = 0
args = parser.parse_args()
if args.deterministic:
cudnn.benchmark = False
cudnn.deterministic = True
torch.manual_seed(args.local_rank)
def main():
global best_prec1, args
......@@ -125,6 +132,7 @@ def main():
else:
print("=> creating model '{}'".format(args.arch))
model = models.__dict__[args.arch]()
if args.sync_bn:
import apex
print("using apex synced BN")
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment