未验证 提交 db67d60e 编写于 作者: W Wu Yi 提交者: GitHub

Remove block api (#12107)

* remove block api

* remove clone_variable

* hide block inner apis

* update

* fix tests
上级 866fcb0c
......@@ -210,7 +210,7 @@ def train_parallel(avg_loss, infer_prog, optimizer, train_reader, test_reader,
# generate fake:
if args.use_fake_data:
for var in feed_var_list:
v = startup_prog.global_block().clone_variable(var)
v = startup_prog.global_block()._clone_variable(var)
var.persistable = True
v.persistable = True
......
......@@ -98,13 +98,13 @@ class Block(objects):
def append_operator(self, ...):
self.ops.append(Operator(self, ...))
def prepend_operator(self, ...): # Parameter's ctor prepands initialize operators.
def _prepend_operator(self, ...): # Parameter's ctor prepands initialize operators.
self.ops.prepend(Operator(self, ...))
```
`create_parameter` is necessary because parameters are global variables, defined in the global block, but can be created in some sub-blocks. For example, an FC layer in the step block of an RNN operator.
`prepend_operator` is necessary because the constructor of `Parameter` needs to create the initialize (or load) operator of the parameter, and would like to put it in the *preamble* of the global block.
`_prepend_operator` is necessary because the constructor of `Parameter` needs to create the initialize (or load) operator of the parameter, and would like to put it in the *preamble* of the global block.
### Operator
......
......@@ -78,7 +78,7 @@ def error_clip_callback(block, context):
op_desc = block.desc.op(block.desc.op_size() - 1)
for grad_n in filter(lambda n: grad_to_var.has_key(n),
op_desc.output_arg_names()):
fwd_var = block.var_recursive(grad_to_var[grad_n])
fwd_var = block.__var_recursive(grad_to_var[grad_n])
error_clip = getattr(fwd_var, "error_clip", None)
if not (error_clip is None or isinstance(error_clip,
BaseErrorClipAttr)):
......
......@@ -118,7 +118,7 @@ class Float16Transpiler:
for var in self.block.vars.keys():
if var not in args:
self.block.remove_var(var)
self.block._remove_var(var)
def _modify_feed_fetch(self):
'''
......@@ -165,7 +165,7 @@ class Float16Transpiler:
dtype=core.VarDesc.VarType.FP16,
shape=var.shape,
persistable=var.persistable)
self.block.insert_op(
self.block._insert_op(
i + 1,
type="cast",
inputs={"X": var},
......@@ -188,7 +188,7 @@ class Float16Transpiler:
persistable=var.persistable)
find_op(var)
var.op.rename_output(var_name, tmp_var_name)
self.block.insert_op(
self.block._insert_op(
i,
type="cast",
inputs={"X": tmp_var},
......@@ -253,4 +253,4 @@ class Float16Transpiler:
# old var will be replaced by the fp16 var in program desc
self.input_map[var.name] = fp16_var_name
self.block.remove_var(var.name)
self.block._remove_var(var.name)
......@@ -145,14 +145,14 @@ void BindBlockDesc(pybind11::module *m) {
.def_property_readonly("id", &pd::BlockDesc::ID)
.def_property_readonly("parent", &pd::BlockDesc::Parent)
.def("get_forward_block_idx", &pd::BlockDesc::ForwardBlockID)
.def("set_forward_block_idx", &pd::BlockDesc::SetForwardBlockID)
.def("_set_forward_block_idx", &pd::BlockDesc::SetForwardBlockID)
.def("append_op", &pd::BlockDesc::AppendOp,
pybind11::return_value_policy::reference)
.def("prepend_op", &pd::BlockDesc::PrependOp,
.def("_prepend_op", &pd::BlockDesc::PrependOp,
pybind11::return_value_policy::reference)
.def("insert_op", &pd::BlockDesc::InsertOp,
.def("_insert_op", &pd::BlockDesc::InsertOp,
pybind11::return_value_policy::reference)
.def("remove_op", &pd::BlockDesc::RemoveOp)
.def("_remove_op", &pd::BlockDesc::RemoveOp)
.def("var",
[](pd::BlockDesc &self, pybind11::bytes byte_name) {
std::string name = byte_name;
......@@ -165,7 +165,7 @@ void BindBlockDesc(pybind11::module *m) {
return self.HasVar(name);
},
pybind11::return_value_policy::reference)
.def("rename_var",
.def("_rename_var",
[](pd::BlockDesc &self, const pybind11::bytes &byte_name,
const pybind11::bytes &byte_name_new) {
std::string name = byte_name;
......@@ -189,7 +189,7 @@ void BindBlockDesc(pybind11::module *m) {
return self.FindVarRecursive(name);
},
pybind11::return_value_policy::reference)
.def("remove_var",
.def("_remove_var",
[](pd::BlockDesc &self, pybind11::bytes byte_name) {
std::string name = byte_name;
return self.RemoveVar(name);
......
......@@ -328,7 +328,7 @@ def _append_backward_ops_(block,
if op.has_attr("sub_block"):
sub_block = program.block(op.block_attr("sub_block"))
grad_sub_block = program.create_block()
grad_sub_block.set_forward_block_idx(sub_block.idx)
grad_sub_block._set_forward_block_idx(sub_block.idx)
cb = _callback_lookup_(op)
if cb is not None:
if callbacks is None:
......@@ -571,7 +571,7 @@ def append_backward(loss, parameter_list=None, no_grad_set=None,
_append_backward_vars_(root_block, fwd_op_num, grad_to_var, grad_info_map)
program.current_block_idx = current_block_idx
program.sync_with_cpp()
program._sync_with_cpp()
# FIXME(zcd): prevent loss.grad optimized by mem_opt.
loss.block.var(_append_grad_suffix_(loss.name)).persistable = True
......@@ -744,7 +744,7 @@ def calc_gradient(targets, inputs, target_gradients=None, no_grad_set=None):
_rename_grad_(block, fwd_op_num, grad_to_var, target_grad_map)
_append_backward_vars_(block, fwd_op_num, grad_to_var, grad_info_map)
prog.sync_with_cpp()
prog._sync_with_cpp()
grad_vars = []
for input_var in inputs:
......
......@@ -82,7 +82,7 @@ def error_clip_callback(block, context):
op_desc = block.desc.op(block.desc.op_size() - 1)
for grad_n in filter(lambda n: grad_to_var.has_key(n),
op_desc.output_arg_names()):
fwd_var = block.var_recursive(grad_to_var[grad_n])
fwd_var = block._var_recursive(grad_to_var[grad_n])
error_clip = getattr(fwd_var, "error_clip", None)
if not (error_clip is None or isinstance(error_clip,
BaseErrorClipAttr)):
......
......@@ -69,8 +69,10 @@ class Go(BlockGuard):
parent_block.append_op(
type='go',
inputs={
'X':
[parent_block.var_recursive(x_name) for x_name in x_name_list]
'X': [
parent_block._var_recursive(x_name)
for x_name in x_name_list
]
},
outputs={},
attrs={'sub_block': go_block})
......@@ -259,7 +261,7 @@ class Select(BlockGuard):
if var_name in intermediate
]
X = [select_block.var_recursive(x_name) for x_name in params]
X = [select_block._var_recursive(x_name) for x_name in params]
# Needs to be used by `equal` inside the cases block.
X.append(self.case_to_execute)
......
......@@ -309,7 +309,7 @@ class Executor(object):
if not has_feed_operators(global_block, feed, feed_var_name):
for i, name in enumerate(feed):
out = global_block.var(name)
global_block.prepend_op(
global_block._prepend_op(
type='feed',
inputs={'X': [feed_var]},
outputs={'Out': [out]},
......
......@@ -32,7 +32,6 @@ except Exception, e:
import unique_name
__all__ = [
'Block',
'Variable',
'Program',
'Operator',
......@@ -447,7 +446,7 @@ class Operator(object):
Notes:
The constructor of operator should not be invoked directly. Use
Block.append_op or Block.prepend_op instead.
Block.append_op or Block._prepend_op instead.
Examples:
.. code-block:: python
......@@ -870,7 +869,7 @@ class Block(object):
def forward_block_idx(self):
return self.desc.get_forward_block_idx()
def set_forward_block_idx(self, idx):
def _set_forward_block_idx(self, idx):
"""
Set the forward block Idx.
......@@ -880,7 +879,7 @@ class Block(object):
Returns:
None
"""
self.desc.set_forward_block_idx(idx)
self.desc._set_forward_block_idx(idx)
@property
def idx(self):
......@@ -909,7 +908,7 @@ class Block(object):
raise ValueError("var %s not in this block" % name)
return v
def var_recursive(self, name):
def _var_recursive(self, name):
"""
Get a Variable by name from this block recursively.
......@@ -951,9 +950,9 @@ class Block(object):
raise ValueError("Var {0} is not found recursively".format(name))
def all_parameters(self):
return list(self.iter_parameters())
return list(self._iter_parameters())
def iter_parameters(self):
def _iter_parameters(self):
return (item[1] for item in self.vars.iteritems()
if isinstance(item[1], Parameter))
......@@ -966,7 +965,7 @@ class Block(object):
def has_var(self, name):
return name in self.vars
def rename_var(self, name, new_name):
def _rename_var(self, name, new_name):
"""
Rename variable in vars and ops' inputs and outputs
......@@ -1000,8 +999,8 @@ class Block(object):
else:
raise ValueError("unsupported var type: %s", type(v))
orig_var_type = v.type
self.desc.rename_var(name, new_name)
# NOTE: v is destroyed by C++ after calling rename_var.
self.desc._rename_var(name, new_name)
# NOTE: v is destroyed by C++ after calling _rename_var.
d = self.desc.find_var(new_name)
if var_type == "Parameter":
var = Parameter(
......@@ -1024,16 +1023,16 @@ class Block(object):
error_clip=error_clip,
stop_gradient=stop_gradient)
# rename the python side, sync_with_cpp will only add
# rename the python side, _sync_with_cpp will only add
# new vars/ops to python side.
self.vars[new_name] = var
del self.vars[name]
self.sync_with_cpp()
self._sync_with_cpp()
return var
def remove_var(self, name):
self.sync_with_cpp()
self.desc.remove_var(name)
def _remove_var(self, name):
self._sync_with_cpp()
self.desc._remove_var(name)
del self.vars[name]
def create_parameter(self, *args, **kwargs):
......@@ -1055,7 +1054,7 @@ class Block(object):
self.ops.append(op)
return op
def insert_op(self, index, *args, **kwargs):
def _insert_op(self, index, *args, **kwargs):
"""
Insert a Operator according to the giving arguments.
......@@ -1065,13 +1064,13 @@ class Block(object):
Returns:
Operator: the insert Operator.
"""
self.sync_with_cpp()
op_desc = self.desc.insert_op(index)
self._sync_with_cpp()
op_desc = self.desc._insert_op(index)
op = Operator(block=self, desc=op_desc, *args, **kwargs)
self.ops.insert(index, op)
return op
def remove_op(self, index):
def _remove_op(self, index):
"""
Remove the specific position operator.
......@@ -1081,11 +1080,11 @@ class Block(object):
Returns:
None
"""
self.sync_with_cpp()
self.desc.remove_op(index, index + 1)
self._sync_with_cpp()
self.desc._remove_op(index, index + 1)
del self.ops[index]
def slice_ops(self, start, end):
def _slice_ops(self, start, end):
"""
Return the Operator between start and end.
......@@ -1098,13 +1097,13 @@ class Block(object):
"""
return self.ops[start:end]
def prepend_op(self, *args, **kwargs):
op_desc = self.desc.prepend_op()
def _prepend_op(self, *args, **kwargs):
op_desc = self.desc._prepend_op()
op = Operator(self, op_desc, *args, **kwargs)
self.ops.insert(0, op)
return op
def sync_with_cpp(self):
def _sync_with_cpp(self):
"""
Sync from the desc on the c++ end. This method is used to synchronize
the c++ desc instance generated by backward.
......@@ -1170,7 +1169,7 @@ class Block(object):
for index in range(len(self.ops)):
assert self.ops[index].desc == ops_in_cpp[index]
def copy_param_info_from(self, other):
def _copy_param_info_from(self, other):
"""
Copy the information of parameters from the other block.
......@@ -1185,12 +1184,13 @@ class Block(object):
None
"""
if not isinstance(other, Block):
raise TypeError("copy_param_info_from should be invoked with Block")
for p in other.iter_parameters():
raise TypeError(
"_copy_param_info_from should be invoked with Block")
for p in other._iter_parameters():
assert isinstance(p, Parameter)
v = self.vars.get(p.name, None)
if v is None:
raise ValueError("copy_param_info_from should be invoked with "
raise ValueError("_copy_param_info_from should be invoked with "
"same topology")
assert isinstance(v, Variable)
new_p = Parameter(
......@@ -1208,7 +1208,7 @@ class Block(object):
name=v.name)
self.vars[new_p.name] = new_p
def clone_variable(self, var):
def _clone_variable(self, var):
"""
Clone a variable into current block.
......@@ -1484,9 +1484,9 @@ class Program(object):
p = Program()
p.desc = core.ProgramDesc(self.desc)
p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
p.sync_with_cpp()
p._sync_with_cpp()
p.copy_param_info_from(self)
p._copy_param_info_from(self)
p.copy_data_info_from(self)
return p
......@@ -1536,7 +1536,7 @@ class Program(object):
res = Program()
res.desc = core.prune(self.desc, targets_idx)
res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
res.sync_with_cpp()
res._sync_with_cpp()
return res
def inference_optimize(self):
......@@ -1562,7 +1562,7 @@ class Program(object):
if op.has_attr('is_test'):
op.set_attr('is_test', True)
res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
res.sync_with_cpp()
res._sync_with_cpp()
return res
@staticmethod
......@@ -1582,7 +1582,7 @@ class Program(object):
p = Program()
p.desc = core.ProgramDesc(binary_str)
p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
p.sync_with_cpp()
p._sync_with_cpp()
return p
@property
......@@ -1662,7 +1662,7 @@ class Program(object):
"""
self.current_block_idx = self.current_block().parent_idx
def sync_with_cpp(self):
def _sync_with_cpp(self):
"""
Synchronize Python instance to its binding C++ object instance.
If the program is modified in C++ space, this method should be invoked.
......@@ -1676,9 +1676,9 @@ class Program(object):
for block_idx in range(len(self.blocks), self.desc.num_blocks()):
self.blocks.append(Block(self, block_idx))
for block in self.blocks:
block.sync_with_cpp()
block._sync_with_cpp()
def copy_param_info_from(self, other):
def _copy_param_info_from(self, other):
"""
Copy the information of parameters from other program.
......@@ -1692,13 +1692,13 @@ class Program(object):
None
"""
if not isinstance(other, Program):
raise TypeError("copy_param_info_from should be invoked with "
raise TypeError("_copy_param_info_from should be invoked with "
"Program")
if len(self.blocks) != len(other.blocks):
raise ValueError("copy_param_info_from should be invoked with two "
raise ValueError("_copy_param_info_from should be invoked with two "
"program, with represent the same topology")
self.global_block().copy_param_info_from(other.global_block())
self.global_block()._copy_param_info_from(other.global_block())
def copy_data_info_from(self, other):
"""
......@@ -1714,11 +1714,11 @@ class Program(object):
None
"""
if not isinstance(other, Program):
raise TypeError("copy_param_info_from should be invoked with "
raise TypeError("_copy_param_info_from should be invoked with "
"Program")
if len(self.blocks) != len(other.blocks):
raise ValueError("copy_param_info_from should be invoked with two "
raise ValueError("_copy_param_info_from should be invoked with two "
"program, with represent the same topology")
for var in other.global_block().vars.itervalues():
if var.is_data:
......
......@@ -148,7 +148,7 @@ class ConstantInitializer(Initializer):
assert isinstance(var, framework.Variable)
assert isinstance(block, framework.Block)
# Initialization Ops should be prepended and not appended
op = block.prepend_op(
op = block._prepend_op(
type="fill_constant",
outputs={"Out": var},
attrs={
......@@ -202,7 +202,7 @@ class UniformInitializer(Initializer):
# Initialization Ops should be prepended and not appended
if self._seed == 0:
self._seed = block.program.random_seed
op = block.prepend_op(
op = block._prepend_op(
type="uniform_random",
outputs={"Out": var},
attrs={
......@@ -256,7 +256,7 @@ class NormalInitializer(Initializer):
# Initialization Ops should be prepended and not appended
if self._seed == 0:
self._seed = block.program.random_seed
op = block.prepend_op(
op = block._prepend_op(
type="gaussian_random",
outputs={"Out": var},
attrs={
......@@ -346,7 +346,7 @@ class XavierInitializer(Initializer):
if self._uniform:
limit = np.sqrt(6.0 / float(fan_in + fan_out))
op = block.prepend_op(
op = block._prepend_op(
type="uniform_random",
outputs={"Out": var},
attrs={
......@@ -359,7 +359,7 @@ class XavierInitializer(Initializer):
else:
std = np.sqrt(2.0 / float(fan_in + fan_out))
op = block.prepend_op(
op = block._prepend_op(
type="gaussian_random",
outputs={"Out": var},
attrs={
......@@ -444,7 +444,7 @@ class MSRAInitializer(Initializer):
if self._uniform:
limit = np.sqrt(6.0 / float(fan_in))
op = block.prepend_op(
op = block._prepend_op(
type="uniform_random",
outputs={"Out": var},
attrs={
......@@ -457,7 +457,7 @@ class MSRAInitializer(Initializer):
else:
std = np.sqrt(2.0 / float(fan_in))
op = block.prepend_op(
op = block._prepend_op(
type="gaussian_random",
outputs={"Out": var},
attrs={
......
......@@ -523,7 +523,7 @@ def prepend_feed_ops(inference_program,
for i, name in enumerate(feed_target_names):
out = global_block.var(name)
global_block.prepend_op(
global_block._prepend_op(
type='feed',
inputs={'X': [feed_var]},
outputs={'Out': [out]},
......@@ -625,7 +625,7 @@ def save_inference_model(dirname,
for i, op in enumerate(global_block.ops):
op.desc.set_is_target(False)
if op.type == "feed" or op.type == "fetch":
global_block.remove_op(i)
global_block._remove_op(i)
copy_program.desc.flush()
pruned_program = copy_program.prune(targets=target_vars)
......@@ -874,7 +874,7 @@ def get_test_program(filelist, program=None, startup_program=None):
main_block = program.global_block()
for var in main_block.vars.values():
if var.type == core.VarDesc.VarType.READER:
main_block.rename_var(
main_block._rename_var(
str(var.name), str(_get_test_reader_name(var.name)))
for op in main_block.ops:
......@@ -883,7 +883,7 @@ def get_test_program(filelist, program=None, startup_program=None):
if op.type == "create_multi_pass_reader":
test_op.set_attr("pass_num", 1)
startup_program.sync_with_cpp()
program.sync_with_cpp()
startup_program._sync_with_cpp()
program._sync_with_cpp()
return program
......@@ -730,8 +730,10 @@ class While(object):
parent_block.append_op(
type='while',
inputs={
'X':
[parent_block.var_recursive(x_name) for x_name in x_name_list],
'X': [
parent_block._var_recursive(x_name)
for x_name in x_name_list
],
'Condition': [self.cond_var]
},
outputs={'Out': out_vars,
......@@ -1259,7 +1261,7 @@ class ConditionalBlock(object):
input_set = set([ipt.name for ipt in self.inputs])
param_list = [
parent_block.var_recursive(each_name) for each_name in params
parent_block._var_recursive(each_name) for each_name in params
if each_name not in input_set
]
......
......@@ -4367,7 +4367,7 @@ def autoincreased_step_counter(counter_name=None, begin=1, step=1):
helper.set_variable_initializer(
counter, initializer=Constant(
value=begin - 1, force_cpu=True))
helper.main_program.global_block().prepend_op(
helper.main_program.global_block()._prepend_op(
type='increment',
inputs={'X': [counter]},
outputs={'Out': [counter]},
......
......@@ -240,7 +240,7 @@ class Optimizer(object):
self._finish_update(loss.block, parameters_and_grads)
end = len(global_block.ops)
return global_block.slice_ops(start, end)
return global_block._slice_ops(start, end)
def minimize(self,
loss,
......
......@@ -152,7 +152,7 @@ class ParallelExecutor(object):
self.executor = core.ParallelExecutor(
self._places,
set([
p.name for p in main.global_block().iter_parameters()
p.name for p in main.global_block()._iter_parameters()
if not p.stop_gradient
]),
set(self.persistable_vars), main.desc, loss_name
......
......@@ -181,13 +181,13 @@ class TestBlockDesc(unittest.TestCase):
self.assertIsNotNone(block)
op1 = block.append_op()
op2 = block.append_op()
op0 = block.prepend_op()
op0 = block._prepend_op()
all_ops = []
for idx in xrange(0, block.op_size()):
all_ops.append(block.op(idx))
self.assertEqual(all_ops, [op0, op1, op2])
def test_remove_op(self):
def test__remove_op(self):
program = Program()
program_desc = program.desc
self.assertIsNotNone(program_desc)
......@@ -201,8 +201,8 @@ class TestBlockDesc(unittest.TestCase):
op1.set_type("test")
op2.set_type("test")
block.remove_op(1, 2)
program.sync_with_cpp()
block._remove_op(1, 2)
program._sync_with_cpp()
all_ops = []
for idx in xrange(0, block.op_size()):
......
......@@ -17,10 +17,10 @@ def delete_ops(block, ops):
try:
start = list(block.ops).index(ops[0])
end = list(block.ops).index(ops[-1])
[block.remove_op(start) for _ in xrange(end - start + 1)]
[block._remove_op(start) for _ in xrange(end - start + 1)]
except Exception, e:
raise e
block.program.sync_with_cpp()
block.program._sync_with_cpp()
def find_op_by_input_arg(block, arg_name):
......
......@@ -243,7 +243,7 @@ class DistributeTranspiler(object):
AssertionError("Can not insert the send op by original "
"variable name :", orig_varname)
program.global_block().insert_op(
program.global_block()._insert_op(
index=index + 1,
type="send",
inputs={"X": splited_vars},
......@@ -429,7 +429,7 @@ class DistributeTranspiler(object):
# clone vars
for var in origin_block.vars:
new_sub_block.clone_variable(var)
new_sub_block._clone_variable(var)
# clone ops
for origin_op in origin_block.ops:
......@@ -525,7 +525,7 @@ class DistributeTranspiler(object):
outputs={},
attrs=attrs)
pserver_program.sync_with_cpp()
pserver_program._sync_with_cpp()
return pserver_program
def get_startup_program(self, endpoint, pserver_program):
......@@ -557,7 +557,7 @@ class DistributeTranspiler(object):
pserver_vars = pserver_program.global_block().vars
created_var_map = dict()
for _, var in pserver_vars.iteritems():
tmpvar = s_prog.global_block().clone_variable(var)
tmpvar = s_prog.global_block()._clone_variable(var)
created_var_map[var.name] = tmpvar
# 2. rename op outputs
......@@ -760,7 +760,7 @@ class DistributeTranspiler(object):
self.all_prefetch_output_vars.append(prefetch_output_vars)
# insert split_ids_op
program.global_block().insert_op(
program.global_block()._insert_op(
index=lookup_table_op_index,
type="split_ids",
inputs={
......@@ -772,7 +772,7 @@ class DistributeTranspiler(object):
outputs={"Out": prefetch_input_vars})
# insert prefetch_op
program.global_block().insert_op(
program.global_block()._insert_op(
index=lookup_table_op_index + 1,
type="prefetch",
inputs={'X': prefetch_input_vars},
......@@ -783,7 +783,7 @@ class DistributeTranspiler(object):
})
# insert concat_op
program.global_block().insert_op(
program.global_block()._insert_op(
index=lookup_table_op_index + 2,
type="merge_ids",
inputs={
......@@ -814,14 +814,14 @@ class DistributeTranspiler(object):
if table_grad_name in op.output_arg_names:
op_index = list(all_ops).index(op)
# insert split_ids_op
program.global_block().insert_op(
program.global_block()._insert_op(
index=op_index + 1,
type="split_ids",
inputs={
'Ids': [program.global_block().vars[table_grad_name]]
},
outputs={"Out": self.trainer_side_table_grad_list})
program.global_block().insert_op(
program.global_block()._insert_op(
index=op_index + 2,
type="send",
inputs={'X': self.trainer_side_table_grad_list},
......@@ -880,7 +880,7 @@ class DistributeTranspiler(object):
persistable=True)
# parameter must be selected rows
param_var.desc.set_type(core.VarDesc.VarType.SELECTED_ROWS)
grad_var = pserver_program.global_block().clone_variable(
grad_var = pserver_program.global_block()._clone_variable(
self.origin_program.global_block().vars[grad_var_name(
self.table_name)])
......@@ -920,7 +920,7 @@ class DistributeTranspiler(object):
if not splited_grad_name.startswith(origin_grad_name):
raise ValueError("origin_grad_var: " + splited_grad_name +
" grad_var:" + grad_var.name)
grad_var = pserver_program.global_block().rename_var(
grad_var = pserver_program.global_block()._rename_var(
origin_grad_name, splited_grad_name)
lr_var = pserver_program.global_block().vars[table_opt_op.input(
......@@ -996,7 +996,7 @@ class DistributeTranspiler(object):
if self.sync_mode and add_trainer_suffix:
new_var_name = "%s.trainer_%d" % \
(orig_var.name, self.trainer_id)
program.global_block().rename_var(varname, new_var_name)
program.global_block()._rename_var(varname, new_var_name)
var_mapping[varname] = \
[program.global_block().var(new_var_name)]
else:
......@@ -1030,8 +1030,7 @@ class DistributeTranspiler(object):
type=orig_var.type,
shape=splited_shape) # flattend splited var
var_mapping[varname].append(var)
program.global_block().sync_with_cpp()
program.global_block()._sync_with_cpp()
return var_mapping
def create_splited_vars(self, source_var, block, tag):
......@@ -1059,7 +1058,7 @@ class DistributeTranspiler(object):
height_sections = []
for v in splited_vars:
height_sections.append(v.shape[0])
program.global_block().insert_op(
program.global_block()._insert_op(
index=index + 1,
type="split_selected_rows",
inputs={"X": orig_var},
......@@ -1069,7 +1068,7 @@ class DistributeTranspiler(object):
sections = []
for v in splited_vars:
sections.append(v.shape[0])
program.global_block().insert_op(
program.global_block()._insert_op(
index=index + 1,
type="split_byref",
inputs={"X": orig_var},
......@@ -1258,7 +1257,7 @@ class DistributeTranspiler(object):
varlist = [varlist]
for var in varlist:
if var not in program.global_block().vars:
block.clone_variable(var)
block._clone_variable(var)
outputs = self._get_output_map_from_op(
self.origin_program.global_block().vars, op)
......@@ -1267,7 +1266,7 @@ class DistributeTranspiler(object):
varlist = [varlist]
for var in varlist:
if var not in program.global_block().vars:
block.clone_variable(var)
block._clone_variable(var)
return block.append_op(
type=op.type, inputs=inputs, outputs=outputs, attrs=op.attrs)
......@@ -1305,7 +1304,7 @@ class DistributeTranspiler(object):
if grad_block:
outputs[key] = grad_block
elif not program.global_block().vars.has_key(var.name):
program.global_block().clone_variable(var)
program.global_block()._clone_variable(var)
return optimize_block.append_op(
type=opt_op.type,
......
......@@ -95,7 +95,7 @@ class InferenceTranspiler(object):
# modify bnorm OP to include relu
current_op.set_attr("fuse_with_relu", True)
# remove relu OP
self.block.remove_op(i + 1)
self.block._remove_op(i + 1)
i = i + 1
self._remove_unused_var()
......@@ -171,7 +171,7 @@ class InferenceTranspiler(object):
# fuse batch_norm
self._fuse_param(current_op, next_op, bias_op, 0)
# remove batch_norm_op
self.block.remove_op(i + 2)
self.block._remove_op(i + 2)
i = i + 1
# conv2d with bias, the next_op.type is elementwise_add
elif (next_op.type == 'elementwise_add'):
......@@ -180,7 +180,7 @@ class InferenceTranspiler(object):
# fuse batch_norm
self._fuse_param(current_op, next_next_op, next_op, 1)
# remove batch_norm_op
self.block.remove_op(i + 2)
self.block._remove_op(i + 2)
i = i + 1
i = i + 1
......@@ -212,7 +212,7 @@ class InferenceTranspiler(object):
y_var = self.block.var(bn_op.input("Bias")[0])
out_var = self.block.var(bn_op.output("Y")[0])
bias_op = self.block.insert_op(
bias_op = self.block._insert_op(
index,
type="elementwise_add",
inputs={"X": x_var,
......@@ -307,4 +307,4 @@ class InferenceTranspiler(object):
for var in self.block.vars.keys():
if var not in args:
self.block.remove_var(var)
self.block._remove_var(var)
......@@ -177,7 +177,7 @@ class ControlFlowGraph(object):
in_diff)
if can_optimize:
index = i + fwd_id + 1 if is_forward else i - self._forward_num + bwd_id + 1
delete_op = block_desc.insert_op(index)
delete_op = block_desc._insert_op(index)
delete_op.set_type("delete_var")
delete_op.set_input("X", can_optimize)
if is_forward:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册