未验证 提交 3901b2cf 编写于 作者: X xiongkun 提交者: GitHub

fix dygraph2static bugs (#43688)

因此将N次前向共用一个 vec scope 换为了N次前向分别使用自己的一个vecscope
上级 fc5a85b0
......@@ -152,7 +152,6 @@ class PartialProgramLayer:
assert isinstance(self._build_strategy, BuildStrategy)
self._origin_main_program = self._verify_program(main_program)
self._tmp_scope_vec = self._create_scope_vec()
self._cuda_graph_vec = self._create_cuda_graph_vec()
self._cuda_graph_capture_mode = ""
self._cuda_graph_pool_id = 0
......@@ -363,9 +362,8 @@ class PartialProgramLayer:
_C_ops.run_program(self._valid_vars(in_vars),
self._valid_vars(self._params),
self._valid_vars(out_vars), self._tmp_scope_vec,
self._valid_vars(out_vars), self._create_scope_vec(),
self._double_grads, self._cuda_graph_vec, *attrs)
self.drop_scope_if_no_grad()
restored_nest_out = self._restore_out(out_vars)
return self._remove_no_value(restored_nest_out)
......@@ -379,13 +377,6 @@ class PartialProgramLayer:
in_vars[i] = var.astype('float16')
in_vars[i].name = name
def drop_scope_if_no_grad(self):
tracer = framework._dygraph_tracer()
scope = self._tmp_scope_vec.value().get_scope() if isinstance(
self._tmp_scope_vec, (core.VarBase)) else self._tmp_scope_vec[0]
if self.training and not tracer._has_grad:
scope.drop_kids()
@property
def program(self):
if self.training:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册