未验证 提交 561232c2 编写于 作者: Z Zeng Jinle 提交者: GitHub

remove is_mem_optimized in Program, test=develop (#19307)

上级 97d1db18
......@@ -2854,10 +2854,6 @@ class Program(object):
self._use_hierarchical_allreduce = False
self._hierarchical_allreduce_inter_nranks = 0
# @deprecated(the python memory optimize transpiler is deprecated)
# whether the program is optimized by memory_optimize_transpiler
self.__is_mem_optimized = False
# if this program has been optimized by distributed optimizer
# fleet_opt will be given a value
self._fleet_opt = None
......@@ -2869,16 +2865,6 @@ class Program(object):
# appending gradients times
self._appending_grad_times = 0
@property
def _is_mem_optimized(self):
# if the program is optimized, operator input/outputs
# maybe same, which conflict with save_inference_model.
return self.__is_mem_optimized
@_is_mem_optimized.setter
def _is_mem_optimized(self, target):
self.__is_mem_optimized = target
@property
def _op_role(self):
"""
......
......@@ -1028,13 +1028,6 @@ def save_inference_model(dirname,
if main_program is None:
main_program = default_main_program()
if main_program._is_mem_optimized:
warnings.warn(
"save_inference_model must put before you call memory_optimize. \
the memory_optimize will modify the original program, \
is not suitable for saving inference model \
we save the original program as inference model.",
RuntimeWarning)
elif not isinstance(main_program, Program):
raise TypeError("program should be as Program type or None")
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册