diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index e31b09fc7cc25d5d70e9447c2e5b875552807495..caf31ccf5c971e7d1aa2fed929b12f2b6dba4634 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -293,7 +293,6 @@ class Variable(core.VarBase): if is_new_var: self.desc.set_type(type) elif self.desc.type() != type: - # sys.stderr.write('%s vs %s\n' % (self.desc.type(), type)) raise ValueError("Variable {0} has been created before. The " "previous type is {1}; the new type is {2}. They" " are not matched".format(self.name, @@ -358,16 +357,16 @@ class Variable(core.VarBase): self.stop_gradient = stop_gradient self.is_data = is_data - def numpy(self): + def _numpy(self): scope = _imperative_tracer().get_scope(self.block.desc) tensor = core.get_variable_tensor(scope, self.desc.name()) return np.array(tensor) - def backward(self): + def _backward(self): scope = _imperative_tracer().get_scope(self.block.desc) self._run_backward(scope) - def grad(self): + def _gradient(self): return np.array(self._grad()) def __str__(self): diff --git a/python/paddle/fluid/imperative/layers.py b/python/paddle/fluid/imperative/layers.py index cb54a36a5e86ff53a56bc187ddabcfade2808b71..1a28f7f4ae35295394b560d79e3dc0cdd5f2beab 100644 --- a/python/paddle/fluid/imperative/layers.py +++ b/python/paddle/fluid/imperative/layers.py @@ -35,13 +35,8 @@ class PyLayer(core.Layer): var_inputs = [] for x in inputs: - if isinstance(x, np.ndarray): - py_var = base.to_variable(x) - var_inputs.append(py_var) - elif isinstance(x, framework.Variable): - var_inputs.append(x) - else: - raise ValueError("not var or ndarray %s" % type(x)) + py_var = base.to_variable(x) + var_inputs.append(py_var) outputs = self.forward(var_inputs) return outputs diff --git a/python/paddle/fluid/layer_helper.py b/python/paddle/fluid/layer_helper.py index 25fc843bf58b1ea6989a0fa2959d76db651695b1..74b4a977db6b69d4d256e1f7b36eb53524269bb1 100644 --- a/python/paddle/fluid/layer_helper.py +++ b/python/paddle/fluid/layer_helper.py @@ -49,23 +49,8 @@ class LayerHelper(object): def startup_program(self): return default_startup_program() - def _np_to_variable(self, x): - tensor = core.LoDTensor() - tensor.set(x, core.CPUPlace()) - return Variable( - self.main_program.current_block(), - type=core.VarDesc.VarType.LOD_TENSOR, - name=None, - shape=x.shape, - dtype=x.dtype) - def to_variable(self, x): - if isinstance(x, Variable): - return x - elif isinstance(x, np.ndarray): - return base.to_variable(x, self.main_program.current_block()) - else: - raise ValueError("inputs wrong type %s\n" % x) + return base.to_variable(x, self.main_program.current_block()) def append_op(self, *args, **kwargs): return self.main_program.current_block().append_op(*args, **kwargs) diff --git a/python/paddle/fluid/tests/unittests/test_imperative.py b/python/paddle/fluid/tests/unittests/test_imperative.py index 5413bdc24ef8ab2c9aac88b99384266d0f55c9bf..b5b6305155d1ef3dcf6ce590c221664754c5bdc8 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative.py +++ b/python/paddle/fluid/tests/unittests/test_imperative.py @@ -43,9 +43,9 @@ class TestImperative(unittest.TestCase): l = MyLayer() x = l(np.array([1.0, 2.0, -1.0], dtype=np.float32))[0] self.assertIsNotNone(x) - sys.stderr.write("%s output: %s\n" % (x, x.numpy())) - x.backward() - sys.stderr.write("grad %s\n" % l._x_for_debug.grad()) + sys.stderr.write("%s output: %s\n" % (x, x._numpy())) + x._backward() + sys.stderr.write("grad %s\n" % l._x_for_debug._gradient()) if __name__ == '__main__':