未验证 提交 7d873006 编写于 作者: K Kaipeng Deng 提交者: GitHub

move train_batch_size sync from load_config to merge_config (#1510)

* move train_batch_size sync from load_config to merge_config
上级 23d3ea61
...@@ -98,14 +98,6 @@ def load_config(file_path): ...@@ -98,14 +98,6 @@ def load_config(file_path):
merge_config(cfg) merge_config(cfg)
# NOTE: training batch size defined only in TrainReader, sychornized
# batch size config to global, models can get batch size config
# from global config when building model.
# batch size in evaluation or inference can also be added here
if 'TrainReader' in global_config:
global_config['train_batch_size'] = global_config['TrainReader'][
'batch_size']
return global_config return global_config
...@@ -141,7 +133,16 @@ def merge_config(config, another_cfg=None): ...@@ -141,7 +133,16 @@ def merge_config(config, another_cfg=None):
""" """
global global_config global global_config
dct = another_cfg if another_cfg is not None else global_config dct = another_cfg if another_cfg is not None else global_config
return dict_merge(dct, config) dct = dict_merge(dct, config)
# NOTE: training batch size defined only in TrainReader, sychornized
# batch size config to global, models can get batch size config
# from global config when building model.
# batch size in evaluation or inference can also be added here
if 'TrainReader' in dct:
dct['train_batch_size'] = dct['TrainReader']['batch_size']
return dct
def get_registered_modules(): def get_registered_modules():
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册