未验证 提交 1e897f66 编写于 作者: L Leo Chen 提交者: GitHub

Refine __str__ of VarBase and ParamBase, test=develop (#25345)

* clean __str__ of VarBase and ParamBase, test=develop

* clean to_string, test=develop

* update unittest, test=develop
上级 1c7215ac
...@@ -212,46 +212,35 @@ def monkey_patch_varbase(): ...@@ -212,46 +212,35 @@ def monkey_patch_varbase():
return np.array(new_ivar.value().get_tensor()) return np.array(new_ivar.value().get_tensor())
def __str__(self): def __str__(self):
return self.to_string(True)
@property
def block(self):
return framework.default_main_program().global_block()
def to_string(self, throw_on_error, with_details=False):
""" """
Get debug string. Convert a VarBase object to a readable string.
Args: Returns(str): A readable string.
throw_on_error (bool): True if raise an exception when self is not initialized.
with_details (bool): more details about variables and parameters (e.g. trainable, optimize_attr, ...) will be printed when with_details is True. Default value is False;
Returns:
str: The debug string.
Examples: Examples:
.. code-block:: python .. code-block:: python
import paddle.fluid as fluid import paddle
paddle.enable_imperative()
cur_program = fluid.Program() x = paddle.rand([1, 5])
cur_block = cur_program.current_block() print(x)
new_variable = cur_block.create_var(name="X", # Variable: eager_tmp_0
shape=[-1, 23, 48], # - place: CUDAPlace(0)
dtype='float32') # - shape: [1, 5]
print(new_variable.to_string(True)) # - layout: NCHW
print("=============with detail===============") # - dtype: float
print(new_variable.to_string(True, True)) # - data: [0.645307 0.597973 0.732793 0.646921 0.540328]
paddle.disable_imperative()
""" """
if framework.in_dygraph_mode(): tensor = self.value().get_tensor()
# TODO(panyx0718): add more dygraph debug info. if tensor._is_initialized():
tensor = self.value().get_tensor() return 'Variable: %s\n%s' % (self.name, str(tensor))
if tensor._is_initialized(): else:
return 'Variable: %s\n%s' % (self.name, str(tensor)) return 'Variable: %s, not initialized' % (self.name)
else:
return 'Variable: %s, not initialized' % (self.name) @property
def block(self):
return framework.default_main_program().global_block()
def __nonzero__(self): def __nonzero__(self):
numel = np.prod(self.shape) numel = np.prod(self.shape)
...@@ -267,7 +256,7 @@ def monkey_patch_varbase(): ...@@ -267,7 +256,7 @@ def monkey_patch_varbase():
("__bool__", __bool__), ("__nonzero__", __nonzero__), ("__bool__", __bool__), ("__nonzero__", __nonzero__),
("_to_static_var", _to_static_var), ("set_value", set_value), ("_to_static_var", _to_static_var), ("set_value", set_value),
("block", block), ("backward", backward), ("gradient", gradient), ("block", block), ("backward", backward), ("gradient", gradient),
("__str__", __str__), ("to_string", to_string)): ("__str__", __str__)):
setattr(core.VarBase, method_name, method) setattr(core.VarBase, method_name, method)
# patch math methods for varbase # patch math methods for varbase
......
...@@ -5121,12 +5121,8 @@ class ParamBase(core.VarBase): ...@@ -5121,12 +5121,8 @@ class ParamBase(core.VarBase):
self.do_model_average = kwargs.get('do_model_average', None) self.do_model_average = kwargs.get('do_model_average', None)
self.is_distributed = False self.is_distributed = False
# self.block = default_main_program().global_block() # self.block = default_main_program().global_block()
def __str__(self):
return self.to_string(True)
@property @property
def trainable(self): def trainable(self):
return not self.stop_gradient return not self.stop_gradient
...@@ -5140,30 +5136,27 @@ class ParamBase(core.VarBase): ...@@ -5140,30 +5136,27 @@ class ParamBase(core.VarBase):
"The type of trainable MUST be bool, but the type is ", "The type of trainable MUST be bool, but the type is ",
type(trainable)) type(trainable))
def to_string(self, throw_on_error, with_details=False): def __str__(self):
""" """
To debug string. Convert a ParamBase object to a readable string.
Args: Returns(str): A readable string.
throw_on_error(bool): raise exception when self is not initialized
when throw_on_error is True
with_details(bool): more details about variables and parameters
(e.g. trainable, optimize_attr, ...) will be printed when with_details is True
Returns(str): The debug string.
Examples: Examples:
.. code-block:: python .. code-block:: python
import paddle.fluid as fluid import paddle
paddle.enable_imperative()
prog = fluid.default_main_program() conv = paddle.nn.Conv2D(3, 3, 5)
rlt = fluid.layers.data("fake_data", shape=[1,1], dtype='float32') print(conv.weight)
debug_str = prog.to_string(throw_on_error=True, with_details=False) # Parameter: conv2d_0.w_0
print(debug_str) # - place: CUDAPlace(0)
# - shape: [3, 3, 5, 5]
# - layout: NCHW
# - dtype: float
# - data: [...]
paddle.disable_imperative()
""" """
assert isinstance(throw_on_error, bool) and isinstance(with_details,
bool)
tensor = self.value().get_tensor() tensor = self.value().get_tensor()
if tensor._is_initialized(): if tensor._is_initialized():
return 'Parameter: %s\n%s' % (self.name, str(tensor)) return 'Parameter: %s\n%s' % (self.name, str(tensor))
......
...@@ -62,5 +62,5 @@ class TestDygraphFramework(unittest.TestCase): ...@@ -62,5 +62,5 @@ class TestDygraphFramework(unittest.TestCase):
def test_dygraph_to_string(self): def test_dygraph_to_string(self):
np_inp = np.array([[1.0, 2.0], [3.0, 4.0]], dtype=np.float32) np_inp = np.array([[1.0, 2.0], [3.0, 4.0]], dtype=np.float32)
with fluid.dygraph.guard(): with fluid.dygraph.guard():
var_inp = fluid.dygraph.base.to_variable(np_inp) var_inp = fluid.dygraph.to_variable(np_inp)
var_inp.to_string(throw_on_error=True) print(str(var_inp))
...@@ -102,7 +102,7 @@ class TestVarBase(unittest.TestCase): ...@@ -102,7 +102,7 @@ class TestVarBase(unittest.TestCase):
def test_to_string(self): def test_to_string(self):
with fluid.dygraph.guard(): with fluid.dygraph.guard():
var = fluid.dygraph.to_variable(self.array) var = fluid.dygraph.to_variable(self.array)
self.assertTrue(isinstance(str(var.to_string(True)), str)) self.assertTrue(isinstance(str(var), str))
def test_backward(self): def test_backward(self):
with fluid.dygraph.guard(): with fluid.dygraph.guard():
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册