未验证 提交 25a233e4 编写于 作者: C Chen Weihang 提交者: GitHub

Simplify Program printing code to improve debugging efficiency (#23918)

* add to_readable_code method, test=develop

* polish doc details, test=develop

* polish doc note, test=develop

* fix unittest error, test=develop

* fix coverage, test=develop

* add print test, test=develop

* add print param, test=develop

* hidden to_readable_code api, test=develop

* remove original tool methods, test=develop

* remove old api using code, test=develop
上级 bfb60efb
...@@ -1187,7 +1187,50 @@ class Variable(object): ...@@ -1187,7 +1187,50 @@ class Variable(object):
pass pass
def __str__(self): def __str__(self):
return self.to_string(True) return self._to_readable_code()
def _to_readable_code(self):
"""
Get readable debug string of Variable.
.. note::
If you want to get the debug string in protobuf format,
please use :code:`to_string` method.
Returns:
string: The formatted Variable string.
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print(new_variable._to_readable_code())
"""
if self.type == core.VarDesc.VarType.SELECTED_ROWS or self.type == core.VarDesc.VarType.LOD_TENSOR:
var_str = "{name} : fluid.{type}.shape{shape}.astype({dtype})".\
format(i="{", e="}", name=self.name, type=self.type, shape=self.shape, dtype=self.dtype)
else:
var_str = "{name} : fluid.{type})".\
format(i="{", e="}", name=self.name, type=self.type)
if type(self) == Parameter:
if self.trainable:
var_str = "trainable param " + var_str
else:
var_str = "param " + var_str
else:
var_str = "var " + var_str
if self.persistable:
var_str = "persist " + var_str
return var_str
def to_string(self, throw_on_error, with_details=False): def to_string(self, throw_on_error, with_details=False):
""" """
...@@ -1990,8 +2033,101 @@ class Operator(object): ...@@ -1990,8 +2033,101 @@ class Operator(object):
proto = framework_pb2.OpDesc.FromString(six.binary_type(protostr)) proto = framework_pb2.OpDesc.FromString(six.binary_type(protostr))
return _debug_string_(proto, throw_on_error) return _debug_string_(proto, throw_on_error)
def _to_readable_code(self, skip_op_callstack=True):
"""
Get readable debug string of Operator.
.. note::
If you want to get the debug string in protobuf format,
please use :code:`to_string` method.
Args:
skip_op_callstack(bool): whether to skip parsing Operator's attribute
op_callstack, default value is True
Returns:
string: The formatted Operator string.
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
var = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
new_op = cur_block.append_op(type="abs",
inputs={"X": [var]},
outputs={"Out": [var]})
print(new_op._to_readable_code())
"""
assert isinstance(
skip_op_callstack, bool
), "skip_op_callstack parameter's type is error, expect bool, received %s".format(
type(skip_op_callstack))
outputs_str = "{"
for i in range(0, len(self.output_names)):
outputs_str += "{name}=".format(name=self.output_names[i])
o = self.output(self.output_names[i])
outputs_str += "{value}".format(value=o)
if i != len(self.output_names) - 1:
outputs_str += ", "
outputs_str += "}"
inputs_str = "{"
for i in range(0, len(self.input_names)):
inputs_str += "{name}=".format(name=self.input_names[i])
o = self.input(self.input_names[i])
inputs_str += "{value}".format(value=o)
if i != len(self.input_names) - 1:
inputs_str += ", "
inputs_str += "}"
attr_names = sorted(self.attr_names)
attrs_str = ""
for i in range(0, len(attr_names)):
name = attr_names[i]
if skip_op_callstack and name == "op_callstack":
continue
attr_type = self.desc.attr_type(name)
if attr_type == core.AttrType.BLOCK:
a = "{name} = block[{value}]".format(
name=name, type=attr_type, value=self._block_attr_id(name))
attrs_str += a
if i != len(attr_names) - 1:
attrs_str += ", "
continue
if attr_type == core.AttrType.BLOCKS:
a = "{name} = blocks{value}".format(
name=name,
type=attr_type,
value=self._blocks_attr_ids(name))
attrs_str += a
if i != len(attr_names) - 1:
attrs_str += ", "
continue
a = "{name} = {value}".format(
name=name, type=attr_type, value=self.desc.attr(name))
attrs_str += a
if i != len(attr_names) - 1:
attrs_str += ", "
if outputs_str != "{}":
op_str = "{outputs} = {op_type}(inputs={inputs}, {attrs})".\
format(outputs = outputs_str, op_type=self.type, inputs=inputs_str, attrs=attrs_str)
else:
op_str = "{op_type}(inputs={inputs}, {attrs})".\
format(op_type=self.type, inputs=inputs_str, attrs=attrs_str)
return op_str
def __str__(self): def __str__(self):
return self.to_string(True) return self._to_readable_code()
__repr__ = __str__ __repr__ = __str__
...@@ -2285,7 +2421,52 @@ class Block(object): ...@@ -2285,7 +2421,52 @@ class Block(object):
self.removed_vars = collections.OrderedDict() self.removed_vars = collections.OrderedDict()
def __str__(self): def __str__(self):
return self.to_string(True) return self._to_readable_code()
def _to_readable_code(self, skip_op_callstack=True):
"""
Get readable debug string of Block.
.. note::
If you want to get the debug string in protobuf format,
please use :code:`to_string` method.
Args:
skip_op_callstack(bool): whether to skip parsing Operator's attribute
op_callstack, default value is True
Returns:
string: The formatted Block string.
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_var = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
new_op = cur_block.append_op(type="abs",
inputs={"X": [new_var]},
outputs={"Out": [new_var]})
print(cur_block._to_readable_code())
"""
assert isinstance(
skip_op_callstack, bool
), "skip_op_callstack parameter's type is error, expect bool, received %s".format(
type(skip_op_callstack))
block_str = "{ // block "
block_str += "{}\n".format(self.idx)
for var in list(self.vars.values()):
block_str += " {}\n".format(var._to_readable_code())
block_str += "\n"
for op in self.ops:
block_str += " {}\n".format(
op._to_readable_code(skip_op_callstack))
block_str += "}"
return block_str
def to_string(self, throw_on_error, with_details=False): def to_string(self, throw_on_error, with_details=False):
""" """
...@@ -3889,7 +4070,46 @@ class Program(object): ...@@ -3889,7 +4070,46 @@ class Program(object):
Raises: Raises:
ValueError: If any of required fields is not set. ValueError: If any of required fields is not set.
""" """
return self.to_string(True) return self._to_readable_code()
def _to_readable_code(self, skip_op_callstack=True):
"""
Get readable debug string of Program.
.. note::
If you want to get the debug string in protobuf format,
please use :code:`to_string` method.
Args:
skip_op_callstack(bool): whether to skip parsing Operator's attribute
op_callstack, default value is True
Returns:
string: The formatted Program string.
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_var = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
new_op = cur_block.append_op(type="abs",
inputs={"X": [new_var]},
outputs={"Out": [new_var]})
print(cur_program._to_readable_code())
"""
assert isinstance(
skip_op_callstack, bool
), "skip_op_callstack parameter's type is error, expect bool, received %s".format(
type(skip_op_callstack))
program_str = ""
for block in self.blocks:
program_str += block._to_readable_code(skip_op_callstack)
return program_str
def to_string(self, throw_on_error, with_details=False): def to_string(self, throw_on_error, with_details=False):
""" """
...@@ -4783,7 +5003,7 @@ class Parameter(Variable): ...@@ -4783,7 +5003,7 @@ class Parameter(Variable):
self.is_distributed = False self.is_distributed = False
def __str__(self): def __str__(self):
return self.to_string(True) return self._to_readable_code()
def to_string(self, throw_on_error, with_details=False): def to_string(self, throw_on_error, with_details=False):
""" """
......
...@@ -41,8 +41,6 @@ from paddle.compat import long_type ...@@ -41,8 +41,6 @@ from paddle.compat import long_type
import hashlib import hashlib
from paddle.fluid.transpiler.details import program_to_code
const_para_attr = fluid.ParamAttr(initializer=fluid.initializer.Constant(0.001)) const_para_attr = fluid.ParamAttr(initializer=fluid.initializer.Constant(0.001))
const_bias_attr = const_para_attr const_bias_attr = const_para_attr
......
...@@ -109,11 +109,8 @@ def get_transpiler(trainer_id, main_program, pserver_endpoints, trainers): ...@@ -109,11 +109,8 @@ def get_transpiler(trainer_id, main_program, pserver_endpoints, trainers):
return t return t
from paddle.fluid.transpiler.details import op_to_code
def operator_equal(a, b): def operator_equal(a, b):
if op_to_code(a) != op_to_code(b): if a.__str__() != b.__str__():
raise ValueError("In operator_equal not equal\n") raise ValueError("In operator_equal not equal\n")
for k, v in six.iteritems(a.__dict__): for k, v in six.iteritems(a.__dict__):
......
...@@ -22,7 +22,6 @@ import paddle.fluid.regularizer as regularizer ...@@ -22,7 +22,6 @@ import paddle.fluid.regularizer as regularizer
import paddle.fluid.clip as clip import paddle.fluid.clip as clip
import paddle.compat as cpt import paddle.compat as cpt
from paddle.fluid.backward import append_backward from paddle.fluid.backward import append_backward
from paddle.fluid.transpiler.details import program_to_code
class TestDGCMomentumOptimizer(unittest.TestCase): class TestDGCMomentumOptimizer(unittest.TestCase):
...@@ -121,10 +120,6 @@ class TestDGCMomentumOptimizer(unittest.TestCase): ...@@ -121,10 +120,6 @@ class TestDGCMomentumOptimizer(unittest.TestCase):
self.assertAlmostEqual(op.attr('regular_coeff'), coeff) self.assertAlmostEqual(op.attr('regular_coeff'), coeff)
print("dgc regular_coeff=" + str(coeff)) print("dgc regular_coeff=" + str(coeff))
# for local test debug
#with open("test_dgc_optimizer_" + name + str(use_recompute) + ".log", "w") as f:
# program_to_code(program, fout=f)
def test_tpyeError(self): def test_tpyeError(self):
# the type of DGCMomentumOptimizer(grad_clip=) must be 'GradientClipByNorm' # the type of DGCMomentumOptimizer(grad_clip=) must be 'GradientClipByNorm'
with self.assertRaises(TypeError): with self.assertRaises(TypeError):
......
...@@ -22,7 +22,7 @@ class TestDebugStringFramework(unittest.TestCase): ...@@ -22,7 +22,7 @@ class TestDebugStringFramework(unittest.TestCase):
def test_debug_str(self): def test_debug_str(self):
p = Program() p = Program()
p.current_block().create_var(name='t', shape=[0, 1]) p.current_block().create_var(name='t', shape=[0, 1])
self.assertRaises(ValueError, callableObj=p.__str__) self.assertRaises(ValueError, p.to_string, True)
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -83,8 +83,6 @@ class TestImperativeStaticModelRunnerWhile(unittest.TestCase): ...@@ -83,8 +83,6 @@ class TestImperativeStaticModelRunnerWhile(unittest.TestCase):
optimizer = fluid.optimizer.SGD(learning_rate=0.001) optimizer = fluid.optimizer.SGD(learning_rate=0.001)
optimizer.minimize(avg_loss) optimizer.minimize(avg_loss)
# pu.program_to_code(main_program, skip_op_callstack=True)
place = fluid.CUDAPlace(0) if core.is_compiled_with_cuda( place = fluid.CUDAPlace(0) if core.is_compiled_with_cuda(
) else fluid.CPUPlace() ) else fluid.CPUPlace()
......
...@@ -27,8 +27,6 @@ from paddle.fluid.layers.io import Recv ...@@ -27,8 +27,6 @@ from paddle.fluid.layers.io import Recv
from paddle.fluid.layers.io import Send from paddle.fluid.layers.io import Send
import paddle.fluid.layers.ops as ops import paddle.fluid.layers.ops as ops
from paddle.fluid.transpiler.details import program_to_code
class TestProgram2Code(unittest.TestCase): class TestProgram2Code(unittest.TestCase):
def test_print(self): def test_print(self):
...@@ -55,7 +53,7 @@ class TestProgram2Code(unittest.TestCase): ...@@ -55,7 +53,7 @@ class TestProgram2Code(unittest.TestCase):
fluid.initializer.Constant(value=1.0)(x, main.global_block()) fluid.initializer.Constant(value=1.0)(x, main.global_block())
ops._scale(x=x, scale=10.0, out=out_var) ops._scale(x=x, scale=10.0, out=out_var)
program_to_code(main) print(main)
def init_client(self, place, port): def init_client(self, place, port):
main = fluid.Program() main = fluid.Program()
...@@ -75,7 +73,48 @@ class TestProgram2Code(unittest.TestCase): ...@@ -75,7 +73,48 @@ class TestProgram2Code(unittest.TestCase):
Send("127.0.0.1:%d" % port, [x]) Send("127.0.0.1:%d" % port, [x])
o = Recv("127.0.0.1:%d" % port, [get_var]) o = Recv("127.0.0.1:%d" % port, [get_var])
program_to_code(main) print(main)
class TestProgramToReadableCode(unittest.TestCase):
def setUp(self):
self.program = fluid.Program()
self.block = self.program.current_block()
self.var = self.block.create_var(
name="X", shape=[-1, 23, 48], dtype='float32')
self.param = self.block.create_parameter(
name="W", shape=[23, 48], dtype='float32', trainable=True)
self.op = self.block.append_op(
type="abs", inputs={"X": [self.var]}, outputs={"Out": [self.var]})
# add control flow op and sub block
self.append_cond_op(self.program)
def append_cond_op(self, program):
def true_func():
return layers.fill_constant(shape=[2, 3], dtype='int32', value=2)
def false_func():
return layers.fill_constant(shape=[3, 2], dtype='int32', value=-1)
with fluid.program_guard(program):
x = layers.fill_constant(shape=[1], dtype='float32', value=0.1)
y = layers.fill_constant(shape=[1], dtype='float32', value=0.23)
pred = layers.less_than(y, x)
out = layers.cond(pred, true_func, false_func)
def test_program_code(self):
self.var._to_readable_code()
self.param._to_readable_code()
self.op._to_readable_code()
self.block._to_readable_code()
self.program._to_readable_code()
def test_program_print(self):
print(self.var)
print(self.param)
print(self.op)
print(self.block)
print(self.program)
if __name__ == "__main__": if __name__ == "__main__":
......
...@@ -49,155 +49,3 @@ def find_op_by_output_arg(block, arg_name, reverse=False): ...@@ -49,155 +49,3 @@ def find_op_by_output_arg(block, arg_name, reverse=False):
if arg_name in op.output_arg_names: if arg_name in op.output_arg_names:
return index return index
return -1 return -1
def get_indent_space(indent, space_num=4):
ret = ""
for i in range(0, indent * space_num):
ret += " "
return ret
def variable_to_code(var):
"""
Get readable codes of fluid variable.
Args:
var: A fluid operator.
Returns:
string: The formatted string.
"""
if var.type == core.VarDesc.VarType.SELECTED_ROWS or var.type == core.VarDesc.VarType.LOD_TENSOR:
var_str = "{name} : fluid.{type}.shape{shape}.astype({dtype})".\
format(i="{", e="}", name=var.name, type=var.type, shape=var.shape, dtype=var.dtype)
else:
var_str = "{name} : fluid.{type})".\
format(i="{", e="}", name=var.name, type=var.type)
if type(var) == paddle.fluid.framework.Parameter:
if var.trainable:
var_str = "trainable parameter " + var_str
else:
var_str = "parameter " + var_str
else:
var_str = "var " + var_str
if var.persistable:
var_str = "persist " + var_str
return var_str
def op_to_code(op, skip_op_callstack=True):
"""
Get readable codes of fluid operator.
Args:
op: A fluid operator.
Returns:
string: The formatted string.
"""
outputs_str = "{"
for i in range(0, len(op.output_names)):
outputs_str += "{name}=".format(name=op.output_names[i])
o = op.output(op.output_names[i])
outputs_str += "{value}".format(value=o)
if i != len(op.output_names) - 1:
outputs_str += ", "
outputs_str += "}"
inputs_str = "{"
for i in range(0, len(op.input_names)):
inputs_str += "{name}=".format(name=op.input_names[i])
o = op.input(op.input_names[i])
inputs_str += "{value}".format(value=o)
if i != len(op.input_names) - 1:
inputs_str += ", "
inputs_str += "}"
attr_names = sorted(op.attr_names)
attrs_str = ""
for i in range(0, len(attr_names)):
name = attr_names[i]
if skip_op_callstack and name == "op_callstack":
continue
attr_type = op.desc.attr_type(name)
if attr_type == core.AttrType.BLOCK:
a = "{name} = block[{value}]".format(
name=name, type=attr_type, value=op._block_attr_id(name))
attrs_str += a
if i != len(attr_names) - 1:
attrs_str += ", "
continue
if attr_type == core.AttrType.BLOCKS:
a = "{name} = blocks{value}".format(
name=name, type=attr_type, value=op._blocks_attr_ids(name))
attrs_str += a
if i != len(attr_names) - 1:
attrs_str += ", "
continue
a = "{name} = {value}".format(
name=name, type=attr_type, value=op.desc.attr(name))
attrs_str += a
if i != len(attr_names) - 1:
attrs_str += ", "
if outputs_str != "{}":
op_str = "{outputs} = {op_type}(inputs={inputs}, {attrs})".\
format(outputs = outputs_str, op_type=op.type, inputs=inputs_str, attrs=attrs_str)
else:
op_str = "{op_type}(inputs={inputs}, {attrs})".\
format(op_type=op.type, inputs=inputs_str, attrs=attrs_str)
return op_str
def block_to_code(block, block_idx, fout=None, skip_op_callstack=False):
indent = 0
print(
"{0}{1} // block {2}".format(get_indent_space(indent), '{', block_idx),
file=fout)
indent += 1
# sort all vars
all_vars = sorted(six.iteritems(block.vars), key=lambda x: x[0])
for var in all_vars:
print(
"{}{}".format(get_indent_space(indent), variable_to_code(var[1])),
file=fout)
if len(all_vars) > 0:
print("", file=fout)
for op in block.ops:
print(
"{}{}".format(
get_indent_space(indent), op_to_code(op, skip_op_callstack)),
file=fout)
indent -= 1
print("{0}{1}".format(get_indent_space(indent), '}'), file=fout)
def program_to_code(prog, fout=None, skip_op_callstack=True):
"""
Print readable codes of fluid program.
Args:
prog : A fluid program.
An example result like bellow:
https://github.com/PaddlePaddle/Paddle/pull/12673
"""
block_idx = 0
for block in prog.blocks:
block_to_code(block, block_idx, fout, skip_op_callstack)
block_idx += 1
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册