提交 8b97a3a4 编写于 作者: D dzhwinter

rerun ci. test=develop

上级 a52be7c0
...@@ -1735,7 +1735,7 @@ class Program(object): ...@@ -1735,7 +1735,7 @@ class Program(object):
return self.__is_optimized return self.__is_optimized
@_is_optimized.setter @_is_optimized.setter
def set__is_optimized(self, target): def _is_optimized(self, target):
self.__is_optimized = target self.__is_optimized = target
@property @property
...@@ -1756,7 +1756,7 @@ class Program(object): ...@@ -1756,7 +1756,7 @@ class Program(object):
return self._current_role return self._current_role
@op_role.setter @op_role.setter
def set_op_role(self, role): def op_role(self, role):
self._current_role = role self._current_role = role
@property @property
......
...@@ -931,7 +931,7 @@ def save_inference_model(dirname, ...@@ -931,7 +931,7 @@ def save_inference_model(dirname,
if main_program is None: if main_program is None:
main_program = default_main_program() main_program = default_main_program()
if main_program.is_optimized: if main_program._is_optimized:
warnings.warn( warnings.warn(
"save_inference_model must put before you call memory_optimize. \ "save_inference_model must put before you call memory_optimize. \
the memory_optimize will modify the original program, \ the memory_optimize will modify the original program, \
......
...@@ -135,6 +135,7 @@ class ParallelExecutor(object): ...@@ -135,6 +135,7 @@ class ParallelExecutor(object):
# step3: init build_strategy # step3: init build_strategy
if build_strategy is None: if build_strategy is None:
build_strategy = BuildStrategy() build_strategy = BuildStrategy()
build_strategy.enable_inplace = False if main._is_optimized else True
build_strategy.num_trainers = num_trainers build_strategy.num_trainers = num_trainers
build_strategy.trainer_id = trainer_id build_strategy.trainer_id = trainer_id
# FIXME(zcd): is_distribution_ is a temporary field, because in pserver mode, # FIXME(zcd): is_distribution_ is a temporary field, because in pserver mode,
......
...@@ -108,9 +108,9 @@ class TestSaveInferenceModel(unittest.TestCase): ...@@ -108,9 +108,9 @@ class TestSaveInferenceModel(unittest.TestCase):
exe.run(init_program, feed={}, fetch_list=[]) exe.run(init_program, feed={}, fetch_list=[])
memory_optimize(program, print_log=True) memory_optimize(program, print_log=True)
self.assertRaises(RuntimeError, self.assertEqual(program._is_optimized, True)
save_inference_model(MODEL_DIR, ["x", "y"], # will print warning message
[avg_cost], exe, program)) save_inference_model(MODEL_DIR, ["x", "y"], [avg_cost], exe, program)
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -540,7 +540,7 @@ def memory_optimize(input_program, ...@@ -540,7 +540,7 @@ def memory_optimize(input_program,
if skip_opt_set is not None: if skip_opt_set is not None:
skip_opt_set = set(map(to_name_str, skip_opt_set)) skip_opt_set = set(map(to_name_str, skip_opt_set))
cfgs = _get_cfgs(input_program) cfgs = _get_cfgs(input_program)
input_program.is_optimized = True input_program._is_optimized = True
for cfg in cfgs: for cfg in cfgs:
cfg.memory_optimize(skip_opt_set=skip_opt_set, level=level) cfg.memory_optimize(skip_opt_set=skip_opt_set, level=level)
...@@ -560,6 +560,6 @@ def release_memory(input_program, skip_opt_set=None): ...@@ -560,6 +560,6 @@ def release_memory(input_program, skip_opt_set=None):
None None
""" """
cfgs = _get_cfgs(input_program) cfgs = _get_cfgs(input_program)
input_program.is_optimized = True input_program._is_optimized = True
for cfg in cfgs: for cfg in cfgs:
cfg.release_memory(skip_opt_set=skip_opt_set) cfg.release_memory(skip_opt_set=skip_opt_set)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册