diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py b/python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py index 43a05cbb2f9fd9b55b028b9339bf4f6ef6bf6967..318585972f0e699e2703a023d9240866be72dece 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py @@ -152,7 +152,6 @@ class PartialProgramLayer: assert isinstance(self._build_strategy, BuildStrategy) self._origin_main_program = self._verify_program(main_program) - self._tmp_scope_vec = self._create_scope_vec() self._cuda_graph_vec = self._create_cuda_graph_vec() self._cuda_graph_capture_mode = "" self._cuda_graph_pool_id = 0 @@ -363,9 +362,8 @@ class PartialProgramLayer: _C_ops.run_program(self._valid_vars(in_vars), self._valid_vars(self._params), - self._valid_vars(out_vars), self._tmp_scope_vec, + self._valid_vars(out_vars), self._create_scope_vec(), self._double_grads, self._cuda_graph_vec, *attrs) - self.drop_scope_if_no_grad() restored_nest_out = self._restore_out(out_vars) return self._remove_no_value(restored_nest_out) @@ -379,13 +377,6 @@ class PartialProgramLayer: in_vars[i] = var.astype('float16') in_vars[i].name = name - def drop_scope_if_no_grad(self): - tracer = framework._dygraph_tracer() - scope = self._tmp_scope_vec.value().get_scope() if isinstance( - self._tmp_scope_vec, (core.VarBase)) else self._tmp_scope_vec[0] - if self.training and not tracer._has_grad: - scope.drop_kids() - @property def program(self): if self.training: