From 1e897f66ee769a27a0f58b643050f938f5f89ef6 Mon Sep 17 00:00:00 2001 From: Leo Chen Date: Wed, 8 Jul 2020 11:30:49 +0800 Subject: [PATCH] Refine __str__ of VarBase and ParamBase, test=develop (#25345) * clean __str__ of VarBase and ParamBase, test=develop * clean to_string, test=develop * update unittest, test=develop --- .../fluid/dygraph/varbase_patch_methods.py | 57 ++++++++----------- python/paddle/fluid/framework.py | 35 +++++------- .../unittests/test_imperative_framework.py | 4 +- .../fluid/tests/unittests/test_var_base.py | 2 +- 4 files changed, 40 insertions(+), 58 deletions(-) diff --git a/python/paddle/fluid/dygraph/varbase_patch_methods.py b/python/paddle/fluid/dygraph/varbase_patch_methods.py index 013aa23d94f..2e41a8ff417 100644 --- a/python/paddle/fluid/dygraph/varbase_patch_methods.py +++ b/python/paddle/fluid/dygraph/varbase_patch_methods.py @@ -212,46 +212,35 @@ def monkey_patch_varbase(): return np.array(new_ivar.value().get_tensor()) def __str__(self): - return self.to_string(True) - - @property - def block(self): - return framework.default_main_program().global_block() - - def to_string(self, throw_on_error, with_details=False): """ - Get debug string. + Convert a VarBase object to a readable string. - Args: - - throw_on_error (bool): True if raise an exception when self is not initialized. - - with_details (bool): more details about variables and parameters (e.g. trainable, optimize_attr, ...) will be printed when with_details is True. Default value is False; - - Returns: - str: The debug string. + Returns(str): A readable string. Examples: .. code-block:: python - import paddle.fluid as fluid - - cur_program = fluid.Program() - cur_block = cur_program.current_block() - new_variable = cur_block.create_var(name="X", - shape=[-1, 23, 48], - dtype='float32') - print(new_variable.to_string(True)) - print("=============with detail===============") - print(new_variable.to_string(True, True)) + import paddle + paddle.enable_imperative() + x = paddle.rand([1, 5]) + print(x) + # Variable: eager_tmp_0 + # - place: CUDAPlace(0) + # - shape: [1, 5] + # - layout: NCHW + # - dtype: float + # - data: [0.645307 0.597973 0.732793 0.646921 0.540328] + paddle.disable_imperative() """ - if framework.in_dygraph_mode(): - # TODO(panyx0718): add more dygraph debug info. - tensor = self.value().get_tensor() - if tensor._is_initialized(): - return 'Variable: %s\n%s' % (self.name, str(tensor)) - else: - return 'Variable: %s, not initialized' % (self.name) + tensor = self.value().get_tensor() + if tensor._is_initialized(): + return 'Variable: %s\n%s' % (self.name, str(tensor)) + else: + return 'Variable: %s, not initialized' % (self.name) + + @property + def block(self): + return framework.default_main_program().global_block() def __nonzero__(self): numel = np.prod(self.shape) @@ -267,7 +256,7 @@ def monkey_patch_varbase(): ("__bool__", __bool__), ("__nonzero__", __nonzero__), ("_to_static_var", _to_static_var), ("set_value", set_value), ("block", block), ("backward", backward), ("gradient", gradient), - ("__str__", __str__), ("to_string", to_string)): + ("__str__", __str__)): setattr(core.VarBase, method_name, method) # patch math methods for varbase diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index a8b4e55a1b0..cf4f47d13fc 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -5121,12 +5121,8 @@ class ParamBase(core.VarBase): self.do_model_average = kwargs.get('do_model_average', None) self.is_distributed = False - # self.block = default_main_program().global_block() - def __str__(self): - return self.to_string(True) - @property def trainable(self): return not self.stop_gradient @@ -5140,30 +5136,27 @@ class ParamBase(core.VarBase): "The type of trainable MUST be bool, but the type is ", type(trainable)) - def to_string(self, throw_on_error, with_details=False): + def __str__(self): """ - To debug string. + Convert a ParamBase object to a readable string. - Args: - throw_on_error(bool): raise exception when self is not initialized - when throw_on_error is True - with_details(bool): more details about variables and parameters - (e.g. trainable, optimize_attr, ...) will be printed when with_details is True - - Returns(str): The debug string. + Returns(str): A readable string. Examples: .. code-block:: python - import paddle.fluid as fluid - - prog = fluid.default_main_program() - rlt = fluid.layers.data("fake_data", shape=[1,1], dtype='float32') - debug_str = prog.to_string(throw_on_error=True, with_details=False) - print(debug_str) + import paddle + paddle.enable_imperative() + conv = paddle.nn.Conv2D(3, 3, 5) + print(conv.weight) + # Parameter: conv2d_0.w_0 + # - place: CUDAPlace(0) + # - shape: [3, 3, 5, 5] + # - layout: NCHW + # - dtype: float + # - data: [...] + paddle.disable_imperative() """ - assert isinstance(throw_on_error, bool) and isinstance(with_details, - bool) tensor = self.value().get_tensor() if tensor._is_initialized(): return 'Parameter: %s\n%s' % (self.name, str(tensor)) diff --git a/python/paddle/fluid/tests/unittests/test_imperative_framework.py b/python/paddle/fluid/tests/unittests/test_imperative_framework.py index 78ad00fb9a7..68628918391 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_framework.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_framework.py @@ -62,5 +62,5 @@ class TestDygraphFramework(unittest.TestCase): def test_dygraph_to_string(self): np_inp = np.array([[1.0, 2.0], [3.0, 4.0]], dtype=np.float32) with fluid.dygraph.guard(): - var_inp = fluid.dygraph.base.to_variable(np_inp) - var_inp.to_string(throw_on_error=True) + var_inp = fluid.dygraph.to_variable(np_inp) + print(str(var_inp)) diff --git a/python/paddle/fluid/tests/unittests/test_var_base.py b/python/paddle/fluid/tests/unittests/test_var_base.py index ea81fcb17c2..7e565ca31b2 100644 --- a/python/paddle/fluid/tests/unittests/test_var_base.py +++ b/python/paddle/fluid/tests/unittests/test_var_base.py @@ -102,7 +102,7 @@ class TestVarBase(unittest.TestCase): def test_to_string(self): with fluid.dygraph.guard(): var = fluid.dygraph.to_variable(self.array) - self.assertTrue(isinstance(str(var.to_string(True)), str)) + self.assertTrue(isinstance(str(var), str)) def test_backward(self): with fluid.dygraph.guard(): -- GitLab