Commit eae14cab authored by Manan Goel's avatar Manan Goel
Browse files

Fixed bug tto check if log_writer is not None before calling log_model

parent f1610456
...@@ -380,7 +380,8 @@ def train(config, ...@@ -380,7 +380,8 @@ def train(config,
epoch=epoch, epoch=epoch,
global_step=global_step) global_step=global_step)
log_writer.log_model(is_best=False, prefix="latest") if log_writer is not None:
log_writer.log_model(is_best=False, prefix="latest")
if dist.get_rank() == 0 and epoch > 0 and epoch % save_epoch_step == 0: if dist.get_rank() == 0 and epoch > 0 and epoch % save_epoch_step == 0:
save_model( save_model(
...@@ -394,8 +395,8 @@ def train(config, ...@@ -394,8 +395,8 @@ def train(config,
best_model_dict=best_model_dict, best_model_dict=best_model_dict,
epoch=epoch, epoch=epoch,
global_step=global_step) global_step=global_step)
if log_writer is not None:
log_writer.log_model(is_best=False, prefix='iter_epoch_{}'.format(epoch)) log_writer.log_model(is_best=False, prefix='iter_epoch_{}'.format(epoch))
best_str = 'best metric, {}'.format(', '.join( best_str = 'best metric, {}'.format(', '.join(
['{}: {}'.format(k, v) for k, v in best_model_dict.items()])) ['{}: {}'.format(k, v) for k, v in best_model_dict.items()]))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment