提交 0492158d 编写于 作者: X Xin Pan

polish

test=develop
上级 b4db31ba
......@@ -293,7 +293,6 @@ class Variable(core.VarBase):
if is_new_var:
self.desc.set_type(type)
elif self.desc.type() != type:
# sys.stderr.write('%s vs %s\n' % (self.desc.type(), type))
raise ValueError("Variable {0} has been created before. The "
"previous type is {1}; the new type is {2}. They"
" are not matched".format(self.name,
......@@ -358,16 +357,16 @@ class Variable(core.VarBase):
self.stop_gradient = stop_gradient
self.is_data = is_data
def numpy(self):
def _numpy(self):
scope = _imperative_tracer().get_scope(self.block.desc)
tensor = core.get_variable_tensor(scope, self.desc.name())
return np.array(tensor)
def backward(self):
def _backward(self):
scope = _imperative_tracer().get_scope(self.block.desc)
self._run_backward(scope)
def grad(self):
def _gradient(self):
return np.array(self._grad())
def __str__(self):
......
......@@ -35,13 +35,8 @@ class PyLayer(core.Layer):
var_inputs = []
for x in inputs:
if isinstance(x, np.ndarray):
py_var = base.to_variable(x)
var_inputs.append(py_var)
elif isinstance(x, framework.Variable):
var_inputs.append(x)
else:
raise ValueError("not var or ndarray %s" % type(x))
py_var = base.to_variable(x)
var_inputs.append(py_var)
outputs = self.forward(var_inputs)
return outputs
......
......@@ -49,23 +49,8 @@ class LayerHelper(object):
def startup_program(self):
return default_startup_program()
def _np_to_variable(self, x):
tensor = core.LoDTensor()
tensor.set(x, core.CPUPlace())
return Variable(
self.main_program.current_block(),
type=core.VarDesc.VarType.LOD_TENSOR,
name=None,
shape=x.shape,
dtype=x.dtype)
def to_variable(self, x):
if isinstance(x, Variable):
return x
elif isinstance(x, np.ndarray):
return base.to_variable(x, self.main_program.current_block())
else:
raise ValueError("inputs wrong type %s\n" % x)
return base.to_variable(x, self.main_program.current_block())
def append_op(self, *args, **kwargs):
return self.main_program.current_block().append_op(*args, **kwargs)
......
......@@ -43,9 +43,9 @@ class TestImperative(unittest.TestCase):
l = MyLayer()
x = l(np.array([1.0, 2.0, -1.0], dtype=np.float32))[0]
self.assertIsNotNone(x)
sys.stderr.write("%s output: %s\n" % (x, x.numpy()))
x.backward()
sys.stderr.write("grad %s\n" % l._x_for_debug.grad())
sys.stderr.write("%s output: %s\n" % (x, x._numpy()))
x._backward()
sys.stderr.write("grad %s\n" % l._x_for_debug._gradient())
if __name__ == '__main__':
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册