diff --git a/ppcls/engine/engine.py b/ppcls/engine/engine.py index 1dcf653578da6c0c63d7c03f4c43019ef30bdb9b..019cf165054a080e6d3883e5b01e3281ebd1d19e 100644 --- a/ppcls/engine/engine.py +++ b/ppcls/engine/engine.py @@ -243,11 +243,6 @@ class Engine(object): level=amp_level, save_dtype='float32') - # TODO(gaotingquan): convert_sync_batchnorm is not effective - # eval loss in training is inconsistent with the eval only if bn is used, - # because the running_mean and running_var of bn are not synced in dist. - self.model = nn.SyncBatchNorm.convert_sync_batchnorm(self.model) - # for distributed world_size = dist.get_world_size() self.config["Global"]["distributed"] = world_size != 1