提交 eae14cab 编写于 作者: M Manan Goel

Fixed bug tto check if log_writer is not None before calling log_model

上级 f1610456
...@@ -380,7 +380,8 @@ def train(config, ...@@ -380,7 +380,8 @@ def train(config,
epoch=epoch, epoch=epoch,
global_step=global_step) global_step=global_step)
log_writer.log_model(is_best=False, prefix="latest") if log_writer is not None:
log_writer.log_model(is_best=False, prefix="latest")
if dist.get_rank() == 0 and epoch > 0 and epoch % save_epoch_step == 0: if dist.get_rank() == 0 and epoch > 0 and epoch % save_epoch_step == 0:
save_model( save_model(
...@@ -394,8 +395,8 @@ def train(config, ...@@ -394,8 +395,8 @@ def train(config,
best_model_dict=best_model_dict, best_model_dict=best_model_dict,
epoch=epoch, epoch=epoch,
global_step=global_step) global_step=global_step)
if log_writer is not None:
log_writer.log_model(is_best=False, prefix='iter_epoch_{}'.format(epoch)) log_writer.log_model(is_best=False, prefix='iter_epoch_{}'.format(epoch))
best_str = 'best metric, {}'.format(', '.join( best_str = 'best metric, {}'.format(', '.join(
['{}: {}'.format(k, v) for k, v in best_model_dict.items()])) ['{}: {}'.format(k, v) for k, v in best_model_dict.items()]))
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册