diff --git a/paddle/fluid/API.spec b/paddle/fluid/API.spec index 69d665b80fde22c10d9d57687b0e45dae7291969..08147fdccd51f6899765cae3b5109e68ed27e936 100644 --- a/paddle/fluid/API.spec +++ b/paddle/fluid/API.spec @@ -1,8 +1,3 @@ -paddle.fluid.Variable.__init__ ArgSpec(args=['self', 'block', 'type', 'name', 'shape', 'dtype', 'lod_level', 'capacity', 'persistable', 'error_clip', 'stop_gradient', 'is_data'], varargs=None, keywords='kwargs', defaults=(VarType.LOD_TENSOR, None, None, None, None, None, None, None, False, False)) -paddle.fluid.Variable.astype ArgSpec(args=['self', 'dtype'], varargs=None, keywords=None, defaults=None) -paddle.fluid.Variable.set_desc ArgSpec(args=['self', 'input'], varargs=None, keywords=None, defaults=None) -paddle.fluid.Variable.set_error_clip ArgSpec(args=['self', 'error_clip'], varargs=None, keywords=None, defaults=None) -paddle.fluid.Variable.to_string ArgSpec(args=['self', 'throw_on_error', 'with_details'], varargs=None, keywords=None, defaults=(False,)) paddle.fluid.Program.__init__ ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None) paddle.fluid.Program.block ArgSpec(args=['self', 'index'], varargs=None, keywords=None, defaults=None) paddle.fluid.Program.clone ArgSpec(args=['self', 'for_test'], varargs=None, keywords=None, defaults=(False,)) @@ -33,8 +28,6 @@ paddle.fluid.Operator.set_attr ArgSpec(args=['self', 'name', 'val'], varargs=Non paddle.fluid.Operator.to_string ArgSpec(args=['self', 'throw_on_error'], varargs=None, keywords=None, defaults=None) paddle.fluid.Parameter.__init__ ArgSpec(args=['self', 'block', 'shape', 'dtype'], varargs=None, keywords='kwargs', defaults=None) paddle.fluid.Parameter.astype ArgSpec(args=['self', 'dtype'], varargs=None, keywords=None, defaults=None) -paddle.fluid.Parameter.set_desc ArgSpec(args=['self', 'input'], varargs=None, keywords=None, defaults=None) -paddle.fluid.Parameter.set_error_clip ArgSpec(args=['self', 'error_clip'], varargs=None, keywords=None, defaults=None) paddle.fluid.Parameter.to_string ArgSpec(args=['self', 'throw_on_error', 'with_details'], varargs=None, keywords=None, defaults=(False,)) paddle.fluid.default_startup_program ArgSpec(args=[], varargs=None, keywords=None, defaults=None) paddle.fluid.default_main_program ArgSpec(args=[], varargs=None, keywords=None, defaults=None) diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index 03e0ac757586150610aee275620d9eee77323c99..db550eccf98033a9b7dc1e68a58fca91d72ebaf7 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -32,7 +32,6 @@ except Exception, e: import unique_name __all__ = [ - 'Variable', 'Program', 'Operator', 'Parameter', @@ -302,7 +301,7 @@ class Variable(object): __repr__ = __str__ - def set_desc(self, input): + def _set_desc(self, input): """ Set the variable description. @@ -347,7 +346,7 @@ class Variable(object): def type(self): return self.desc.type() - def set_error_clip(self, error_clip): + def _set_error_clip(self, error_clip): """ Set the error_clip. diff --git a/python/paddle/fluid/tests/test_error_clip.py b/python/paddle/fluid/tests/test_error_clip.py index 89f4c64975802dc1827ec17ed3626b91e36d6971..3dc858971c584cca947cd958680dbdcf25df9e99 100644 --- a/python/paddle/fluid/tests/test_error_clip.py +++ b/python/paddle/fluid/tests/test_error_clip.py @@ -36,7 +36,7 @@ with fluid.program_guard(main_program=prog): avg_cost = fluid.layers.mean(cost) prog_clip = prog.clone() -prog_clip.block(0).var(hidden1.name).set_error_clip( +prog_clip.block(0).var(hidden1.name)._set_error_clip( fluid.clip.ErrorClipByValue( max=CLIP_MAX, min=CLIP_MIN)) diff --git a/python/paddle/fluid/tests/unittests/op_test.py b/python/paddle/fluid/tests/unittests/op_test.py index 6824ede82b74c4e9783682149db870a471c35079..82b5e7cf0b3633eb04ab97c5300b1926b9d47cb6 100644 --- a/python/paddle/fluid/tests/unittests/op_test.py +++ b/python/paddle/fluid/tests/unittests/op_test.py @@ -251,7 +251,7 @@ class OpTest(unittest.TestCase): for out_name, out_dup in Operator.get_op_outputs(self.op_type): fetch_list.append(str(out_name)) # fetch_list = map(block.var, fetch_list) - if not isinstance(fetch_list[0], Variable): + if not isinstance(fetch_list[0], fluid.framework.Variable): fetch_list = map(block.var, fetch_list) outs = executor.run(program, feed=feed_map, diff --git a/python/paddle/fluid/tests/unittests/test_parallel_op.py b/python/paddle/fluid/tests/unittests/test_parallel_op.py index 9ec05e02973138e3ec233ef07f98afd598ec86b1..18309f457704f522457daefdb8464ae5df2ffcfb 100644 --- a/python/paddle/fluid/tests/unittests/test_parallel_op.py +++ b/python/paddle/fluid/tests/unittests/test_parallel_op.py @@ -120,7 +120,7 @@ class BaseParallelForTest(unittest.TestCase): pd = fluid.layers.ParallelDo(places, use_nccl=use_nccl) data = next(generator) - if isinstance(data, fluid.Variable): + if isinstance(data, fluid.framework.Variable): data = [data] with pd.do(): diff --git a/python/paddle/fluid/transpiler/distribute_transpiler.py b/python/paddle/fluid/transpiler/distribute_transpiler.py index c2044bf03135dd9c5256021d87866cfbbc598dad..fc58703eca73addca109506aa60c0099ff31e1b5 100644 --- a/python/paddle/fluid/transpiler/distribute_transpiler.py +++ b/python/paddle/fluid/transpiler/distribute_transpiler.py @@ -38,7 +38,7 @@ from ps_dispatcher import RoundRobin, HashName, PSDispatcher from .. import core, framework from ..framework import Program, default_main_program, \ default_startup_program, Block, \ - Variable, Parameter, grad_var_name + Parameter, grad_var_name from details import * LOOKUP_TABLE_TYPE = "lookup_table" @@ -1044,7 +1044,6 @@ class DistributeTranspiler(object): ] def _clone_var(self, block, var, persistable=True): - assert isinstance(var, Variable) return block.create_var( name=var.name, shape=var.shape, diff --git a/python/paddle/fluid/transpiler/memory_optimization_transpiler.py b/python/paddle/fluid/transpiler/memory_optimization_transpiler.py index 353c82f71632c0fa398bcfcf836cc382e7e501f7..0ca5cf813b51e200da5edd5830767ad9457acec2 100644 --- a/python/paddle/fluid/transpiler/memory_optimization_transpiler.py +++ b/python/paddle/fluid/transpiler/memory_optimization_transpiler.py @@ -14,7 +14,7 @@ from collections import defaultdict from .. import core -from ..framework import Program, default_main_program, Parameter, Variable +from ..framework import Program, default_main_program, Parameter from ..backward import _rename_arg_ dtype_to_size = {