未验证 提交 7f1c6269 编写于 作者: L lujun 提交者: GitHub

Fix potential bugs: use numpy assignment to restore parameters at build-once, test=develop (#17720)

 use numpy assignment to restore parameters at build-once
上级 d7c5c2bd
......@@ -197,10 +197,14 @@ class Layer(core.Layer):
the parameter passed in.
"""
assert isinstance(parameter, framework.Parameter)
self._parameters[name] = parameter
if parameter.name in self._loaddict_holder:
self._parameters[name] = self._loaddict_holder[parameter.name]
parameter = self._loaddict_holder[parameter.name]
var = parameter._ivar.value()
tensor = var.get_tensor()
tensor.set(self._loaddict_holder[parameter.name].numpy(),
framework._current_expected_place())
self._parameters[name] = parameter
return parameter
def __getattr__(self, name):
......@@ -216,8 +220,10 @@ class Layer(core.Layer):
raise ValueError(
"super(YourLayer, self).__init__() should be called first")
if value.name in self._loaddict_holder:
params[name] = self._loaddict_holder[value.name]
else:
var = value._ivar.value()
tensor = var.get_tensor()
tensor.set(self._loaddict_holder[value.name].numpy(),
framework._current_expected_place())
params[name] = value
elif isinstance(value, core.Layer):
layers = self.__dict__.get('_sub_layers', None)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册