未验证 提交 16e73e0d 编写于 作者: W Wu Yi 提交者: GitHub

hide operator API (#12543)

* hide operator API

* update

* update api.spec

* fix merge

* fix test
上级 7f9dab10
......@@ -102,7 +102,7 @@ class Float16Transpiler:
continue
for input_arg in current_op.input_arg_names:
if input_arg in self.input_map:
current_op.rename_input(input_arg,
current_op._rename_input(input_arg,
self.input_map[input_arg])
def _remove_unused_var(self):
......@@ -187,7 +187,7 @@ class Float16Transpiler:
shape=var.shape,
persistable=var.persistable)
find_op(var)
var.op.rename_output(var_name, tmp_var_name)
var.op._rename_output(var_name, tmp_var_name)
self.block._insert_op(
i,
type="cast",
......
......@@ -6,26 +6,9 @@ paddle.fluid.Program.global_block ArgSpec(args=['self'], varargs=None, keywords=
paddle.fluid.Program.list_vars ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.parse_from_string ArgSpec(args=['binary_str'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.to_string ArgSpec(args=['self', 'throw_on_error', 'with_details'], varargs=None, keywords=None, defaults=(False,))
paddle.fluid.Operator.__init__ ArgSpec(args=['self', 'block', 'desc', 'type', 'inputs', 'outputs', 'attrs'], varargs=None, keywords=None, defaults=(None, None, None, None))
paddle.fluid.Operator.all_attrs ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.attr ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.attr_type ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.block_attr ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.block_attr_id ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.blocks_attr ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.blocks_attr_ids ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.has_attr ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.has_kernel ArgSpec(args=['self', 'op_type'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.input ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.output ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.rename_input ArgSpec(args=['self', 'old_name', 'new_name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.rename_output ArgSpec(args=['self', 'old_name', 'new_name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.set_attr ArgSpec(args=['self', 'name', 'val'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Operator.to_string ArgSpec(args=['self', 'throw_on_error'], varargs=None, keywords=None, defaults=None)
paddle.fluid.default_startup_program ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
paddle.fluid.default_main_program ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
paddle.fluid.program_guard ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
paddle.fluid.get_var ArgSpec(args=['name', 'program'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.name_scope ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
paddle.fluid.Executor.__init__ ArgSpec(args=['self', 'place'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Executor.close ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
......
......@@ -14,6 +14,8 @@
#include "paddle/fluid/framework/ir/graph_traits.h"
#include <vector>
namespace paddle {
namespace framework {
namespace ir {
......
......@@ -285,12 +285,12 @@ void BindOpDesc(pybind11::module *m) {
.def("set_output", &pd::OpDesc::SetOutput)
.def("input_arg_names", &pd::OpDesc::InputArgumentNames)
.def("output_arg_names", &pd::OpDesc::OutputArgumentNames)
.def("rename_input", &pd::OpDesc::RenameInput)
.def("rename_output", &pd::OpDesc::RenameOutput)
.def("_rename_input", &pd::OpDesc::RenameInput)
.def("_rename_output", &pd::OpDesc::RenameOutput)
.def("has_attr", &pd::OpDesc::HasAttr)
.def("attr_type", &pd::OpDesc::GetAttrType)
.def("attr_names", &pd::OpDesc::AttrNames)
.def("set_attr", &pd::OpDesc::SetAttr)
.def("_set_attr", &pd::OpDesc::SetAttr)
.def("attr", &pd::OpDesc::GetAttr)
.def("set_block_attr", &pd::OpDesc::SetBlockAttr)
.def("set_blocks_attr", &pd::OpDesc::SetBlocksAttr)
......@@ -300,8 +300,8 @@ void BindOpDesc(pybind11::module *m) {
std::string ser(seriralized);
self.SetAttr(name, ser);
})
.def("block_attr_id", &pd::OpDesc::GetBlockAttrId)
.def("blocks_attr_ids", &pd::OpDesc::GetBlocksAttrIds)
.def("_block_attr_id", &pd::OpDesc::GetBlockAttrId)
.def("_blocks_attr_ids", &pd::OpDesc::GetBlocksAttrIds)
.def("check_attrs", &pd::OpDesc::CheckAttrs)
.def("infer_shape", &pd::OpDesc::InferShape)
.def("infer_var_type", &pd::OpDesc::InferVarType)
......
......@@ -38,8 +38,8 @@ def _rename_arg_(op_descs, old_name, new_name, begin_idx=None, end_idx=None):
op_desc = op_descs[i]
if isinstance(op_desc, tuple):
op_desc = op_desc[0]
op_desc.rename_input(old_name, new_name)
op_desc.rename_output(old_name, new_name)
op_desc._rename_input(old_name, new_name)
op_desc._rename_output(old_name, new_name)
def _create_op_desc_(op_type, inputs, outputs, attrs):
......@@ -70,7 +70,7 @@ def _create_op_desc_(op_type, inputs, outputs, attrs):
if isinstance(val, framework.Block):
op_desc.set_block_attr(name, val.desc)
else:
op_desc.set_attr(name, val)
op_desc._set_attr(name, val)
return op_desc
......@@ -346,7 +346,7 @@ def _append_backward_ops_(block,
grad_sub_block_list = []
# If the op has its own sub-block, deal with the sub-block first
if op.has_attr("sub_block"):
sub_block = program.block(op.block_attr_id("sub_block"))
sub_block = program.block(op._block_attr_id("sub_block"))
grad_sub_block = program._create_block()
grad_sub_block._set_forward_block_idx(sub_block.idx)
cb = _callback_lookup_(op)
......@@ -382,7 +382,7 @@ def _append_backward_ops_(block,
for op_desc in grad_op_descs:
new_op_desc = target_block.desc.append_op()
new_op_desc.copy_from(op_desc)
new_op_desc.set_attr(op_role_attr_name, backward)
new_op_desc._set_attr(op_role_attr_name, backward)
grad_to_var["__current_op_desc__"] = new_op_desc
if callbacks is not None:
assert (isinstance(callbacks, list))
......@@ -408,7 +408,7 @@ def _append_backward_vars_(block, start_op_idx, grad_to_var, grad_info_map):
for op_idx in range(start_op_idx, block.desc.op_size()):
op_desc = block.desc.op(op_idx)
if op_desc.has_attr("sub_block"):
sub_block = block.program.block(op_desc.block_attr_id("sub_block"))
sub_block = block.program.block(op_desc._block_attr_id("sub_block"))
_append_backward_vars_(sub_block, 0, grad_to_var, grad_info_map)
new_vars = set()
# create new gradient variables
......@@ -438,12 +438,12 @@ def _rename_grad_(block, start_op_idx, grad_to_var, target_grad_map):
op_desc = block.desc.op(op_idx)
for name in op_desc.input_arg_names():
if name in var_map:
op_desc.rename_input(name, var_map[name])
op_desc._rename_input(name, var_map[name])
for name in op_desc.output_arg_names():
if block.desc.find_var(name.encode("ascii")):
new_name = unique_name.generate(name)
op_desc.rename_output(name, new_name)
op_desc._rename_output(name, new_name)
var_map[name] = new_name
for g, ng in six.iteritems(var_map):
......@@ -542,7 +542,7 @@ def append_backward(loss, parameter_list=None, no_grad_set=None,
if loss.op is None:
raise ValueError("loss.op is None. Should not happend")
loss.op.set_attr(core.op_proto_and_checker_maker.kOpRoleAttrName(),
loss.op._set_attr(core.op_proto_and_checker_maker.kOpRoleAttrName(),
int(core.op_proto_and_checker_maker.OpRole.Forward) |
int(core.op_proto_and_checker_maker.OpRole.Loss))
......@@ -631,7 +631,7 @@ def append_backward(loss, parameter_list=None, no_grad_set=None,
attr_val = [p.name, g.name]
if g.op.has_attr(op_role_var_attr_name):
attr_val.extend(g.op.attr(op_role_var_attr_name))
g.op.set_attr(op_role_var_attr_name, attr_val)
g.op._set_attr(op_role_var_attr_name, attr_val)
return params_and_grads
......
......@@ -75,8 +75,8 @@ class ErrorClipByValue(BaseErrorClipAttr):
clip_op_desc.set_type("clip")
clip_op_desc.set_input("X", [grad_name])
clip_op_desc.set_output("Out", [grad_name])
clip_op_desc.set_attr("min", self.min)
clip_op_desc.set_attr("max", self.max)
clip_op_desc._set_attr("min", self.min)
clip_op_desc._set_attr("max", self.max)
def error_clip_callback(block, context):
......
......@@ -40,11 +40,9 @@ PADDLE_ON_MODEL_CE = os.environ.get('PADDLE_ON_MODEL_CE', None) is not None
__all__ = [
'Program',
'Operator',
'default_startup_program',
'default_main_program',
'program_guard',
'get_var',
'name_scope',
]
......@@ -663,11 +661,11 @@ class Operator(object):
self._update_desc_attr(attr_name, attr_val)
self.desc.check_attrs()
if self.has_kernel(type):
if self._has_kernel(type):
self.desc.infer_var_type(self.block.desc)
self.desc.infer_shape(self.block.desc)
def has_kernel(self, op_type):
def _has_kernel(self, op_type):
return op_type not in self.OP_WITHOUT_KERNEL_SET
def to_string(self, throw_on_error):
......@@ -708,7 +706,7 @@ class Operator(object):
"""
return self.desc.input(name)
def rename_input(self, old_name, new_name):
def _rename_input(self, old_name, new_name):
"""
Rename the `old_name` to `new_name`.
......@@ -719,9 +717,9 @@ class Operator(object):
Returns:
None
"""
self.desc.rename_input(old_name, new_name)
self.desc._rename_input(old_name, new_name)
def rename_output(self, old_name, new_name):
def _rename_output(self, old_name, new_name):
"""
Rename the `old_name` to `new_name`.
......@@ -732,7 +730,7 @@ class Operator(object):
Returns:
None
"""
self.desc.rename_output(old_name, new_name)
self.desc._rename_output(old_name, new_name)
@property
def input_names(self):
......@@ -796,7 +794,7 @@ class Operator(object):
"""
return self.desc.attr_type(name)
def set_attr(self, name, val):
def _set_attr(self, name, val):
"""
Set the value of attribute by attribute's name.
......@@ -829,7 +827,7 @@ class Operator(object):
isinstance(val, core.ProgramDesc):
self.desc.set_serialized_attr(name, val.serialize_to_string())
else:
self.desc.set_attr(name, val)
self.desc._set_attr(name, val)
@property
def attr_names(self):
......@@ -848,7 +846,7 @@ class Operator(object):
"""
return self.desc.attr(name)
def block_attr_id(self, name):
def _block_attr_id(self, name):
"""
Get the block attribute's id by name.
......@@ -858,9 +856,9 @@ class Operator(object):
Returns:
int: the block index.
"""
return self.desc.block_attr_id(name)
return self.desc._block_attr_id(name)
def block_attr(self, name):
def _block_attr(self, name):
"""
Get the block attribute by name.
......@@ -871,11 +869,11 @@ class Operator(object):
block: the block attribute.
"""
id = self.block_attr_id(name)
id = self._block_attr_id(name)
assert (id >= 0 and id < len(self.block.program.blocks))
return self.block.program.blocks[id]
def blocks_attr(self, name):
def _blocks_attr(self, name):
"""
Get the blocks attribute by name.
......@@ -886,13 +884,13 @@ class Operator(object):
list: list of the blocks attribute.
"""
attrs = []
for i in self.blocks_attr_ids(name):
for i in self._blocks_attr_ids(name):
assert (i >= 0 and i < len(self.block.program.blocks))
attrs.append(self.block.program.blocks[i])
return attrs
def blocks_attr_ids(self, name):
def _blocks_attr_ids(self, name):
"""
Get the blocks attribute's ids by name.
......@@ -903,7 +901,7 @@ class Operator(object):
list: list of the blocks ids.
"""
return self.desc.blocks_attr_ids(name)
return self.desc._blocks_attr_ids(name)
def all_attrs(self):
"""
......@@ -917,11 +915,11 @@ class Operator(object):
for n in attr_names:
attr_type = self.desc.attr_type(n)
if attr_type == core.AttrType.BLOCK:
attr_map[n] = self.block_attr(n)
attr_map[n] = self._block_attr(n)
continue
if attr_type == core.AttrType.BLOCKS:
attr_map[n] = self.blocks_attr(n)
attr_map[n] = self._blocks_attr(n)
continue
attr_map[n] = self.attr(n)
......@@ -1795,7 +1793,7 @@ class Program(object):
for j in six.moves.range(block.op_size()):
op = block.op(j)
if op.has_attr('is_test'):
op.set_attr('is_test', True)
op._set_attr('is_test', True)
res.blocks = [
Block(res, i) for i in six.moves.range(res.desc.num_blocks())
]
......@@ -2169,7 +2167,7 @@ def program_guard(main_program, startup_program=None):
switch_startup_program(startup_program)
def get_var(name, program=None):
def _get_var(name, program=None):
"""
Get a variable by name from the global block of a program.
......
......@@ -1488,7 +1488,7 @@ def wrap_decoder(trg_vocab_size,
if weight_sharing:
predict = layers.matmul(
x=dec_output,
y=fluid.get_var(word_emb_param_names[0]),
y=fluid.framework._get_var(word_emb_param_names[0]),
transpose_y=True)
else:
predict = layers.fc(input=dec_output,
......
......@@ -76,8 +76,8 @@ class TestInferShape(unittest.TestCase):
mul_op_desc.set_input("X", ["x"])
mul_op_desc.set_input("Y", ["y"])
mul_op_desc.set_output("Out", ["out"])
mul_op_desc.set_attr("x_num_col_dims", 1)
mul_op_desc.set_attr("y_num_col_dims", 1)
mul_op_desc._set_attr("x_num_col_dims", 1)
mul_op_desc._set_attr("y_num_col_dims", 1)
mul_op_desc.check_attrs()
mul_op_desc.infer_shape(block)
......
......@@ -38,40 +38,40 @@ class TestOpDesc(unittest.TestCase):
self.assertEqual(['z'], op.output("Out"))
self.assertEqual(["Out"], op.output_names())
op.set_attr("int_attr", 1)
op._set_attr("int_attr", 1)
self.assertEqual(1, op.attr("int_attr"))
self.assertTrue(op.has_attr("int_attr"))
self.assertEqual(core.AttrType.INT, op.attr_type("int_attr"))
op.set_attr("float_attr", -1.32)
op._set_attr("float_attr", -1.32)
self.assertAlmostEqual(-1.32, op.attr("float_attr"), delta=1e-4)
self.assertTrue(op.has_attr("float_attr"))
op.set_attr("bool_attr", False)
op._set_attr("bool_attr", False)
self.assertFalse(op.attr("bool_attr"))
op.set_attr("string_attr", "abc")
op._set_attr("string_attr", "abc")
self.assertEqual("abc", op.attr("string_attr"))
self.assertTrue(op.has_attr("string_attr"))
op.set_attr("ints_attr", [1, 2, 3])
op._set_attr("ints_attr", [1, 2, 3])
self.assertEqual([1, 2, 3], op.attr("ints_attr"))
expected = [1.2, 2.3, 3.4]
op.set_attr("floats_attr", expected)
op._set_attr("floats_attr", expected)
for e, a in zip(expected, op.attr("floats_attr")):
self.assertAlmostEqual(e, a, delta=1e-4)
op.set_attr("strings_attr", ["a", "b", "c"])
op._set_attr("strings_attr", ["a", "b", "c"])
self.assertEqual(["a", "b", "c"], op.attr("strings_attr"))
op.set_attr("bools_attr", [True, False, True])
op._set_attr("bools_attr", [True, False, True])
self.assertEqual([True, False, True], op.attr("bools_attr"))
self.assertEqual(8, len(op.attr_names()))
op.set_block_attr("block_attr", program_desc.block(0))
self.assertEqual(0, op.block_attr_id("block_attr"))
op.set_block_attr("_block_attr", program_desc.block(0))
self.assertEqual(0, op._block_attr_id("_block_attr"))
mul_op = block.append_op()
mul_op.set_type("mul")
......
......@@ -128,7 +128,7 @@ def op_to_code(op):
attr_type = op.desc.attr_type(name)
if attr_type == core.AttrType.BLOCK:
a = "{name} = block[{value}]".format(
name=name, type=attr_type, value=op.block_attr_id(name))
name=name, type=attr_type, value=op._block_attr_id(name))
attrs_str += a
if i != len(attr_names) - 1:
attrs_str += ", "
......@@ -136,7 +136,7 @@ def op_to_code(op):
if attr_type == core.AttrType.BLOCKS:
a = "{name} = blocks{value}".format(
name=name, type=attr_type, value=op.blocks_attr_ids(name))
name=name, type=attr_type, value=op._blocks_attr_ids(name))
attrs_str += a
if i != len(attr_names) - 1:
attrs_str += ", "
......
......@@ -668,7 +668,7 @@ in a single call.")
__clone_lr_op_sub_block__(cloned_op, program, new_sub_block)
# reset the block of op
op.set_attr('sub_block', new_sub_block)
op._set_attr('sub_block', new_sub_block)
# append lr decay ops to the child block if exists
lr_ops = self._get_lr_ops()
......@@ -864,7 +864,7 @@ to transpile() call.")
if op.type in [
"gaussian_random", "fill_constant", "uniform_random"
]:
op.set_attr("shape", list(new_outputs["Out"].shape))
op._set_attr("shape", list(new_outputs["Out"].shape))
s_prog.global_block().append_op(
type=op.type,
inputs=new_inputs,
......
......@@ -163,7 +163,7 @@ class InferenceTranspiler(object):
next_op = self.block.ops[i + 1]
if next_op.type == 'relu':
# modify bnorm OP to include relu
current_op.set_attr("fuse_with_relu", True)
current_op._set_attr("fuse_with_relu", True)
# remove relu OP
self.block._remove_op(i + 1)
i = i + 1
......@@ -377,7 +377,7 @@ class InferenceTranspiler(object):
type=old_var.type,
dtype=old_var.dtype,
shape=old_var.shape)
op.rename_input(old_param_name, new_param_name)
op._rename_input(old_param_name, new_param_name)
self.scope.var(new_param_name)
tensor = self.scope.find_var(new_param_name).get_tensor()
......@@ -463,7 +463,7 @@ class InferenceTranspiler(object):
current_op = self.block.ops[i]
for input_arg in current_op.input_arg_names:
if input_arg in self.input_map:
current_op.rename_input(input_arg,
current_op._rename_input(input_arg,
self.input_map[input_arg])
def _remove_unused_var(self):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册