framework.py 21.4 KB
Newer Older
Y
Yu Yang 已提交
1
import collections
2

Y
Yu Yang 已提交
3
import numpy as np
4 5
from . import core
import proto.framework_pb2 as framework_pb2
Y
Yu Yang 已提交
6

7 8
__all__ = [
    'Block', 'Variable', 'Program', 'Operator', 'default_startup_program',
9
    'default_main_program', 'g_startup_program', 'g_main_program'
10
]
Y
Yu Yang 已提交
11 12


Q
Qiao Longfei 已提交
13 14 15 16 17
def unique_name(prefix):
    uid = core.unique_integer(prefix)  # unique during whole process.
    return "_".join([prefix, str(uid)])


18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48
def convert_np_dtype_to_dtype_(np_dtype):
    dtype = np.dtype(np_dtype)
    if dtype == np.float32:
        return core.DataType.FP32
    elif dtype == np.float64:
        return core.DataType.FP64
    elif dtype == np.float16:
        return core.DataType.FP16
    elif dtype == np.int32:
        return core.DataType.INT32
    elif dtype == np.int16:
        return core.DataType.INT16
    elif dtype == np.int64:
        return core.DataType.INT64
    elif dtype == np.bool:
        return core.DataType.BOOL
    else:
        raise ValueError("Not supported numpy dtype " + str(dtype))


def dtype_is_floating(dtype):
    if not isinstance(dtype, core.DataType):
        dtype = convert_np_dtype_to_dtype_(dtype)

    if (dtype == core.DataType.FP16 or dtype == core.DataType.FP32 or
            dtype == core.DataType.FP64):
        return True
    else:
        return False


Y
Yang Yang(Tony) 已提交
49
def _debug_string_(proto, throw_on_error=True):
Y
Yu Yang 已提交
50
    error_fields = list()
Y
Yang Yang(Tony) 已提交
51
    if not proto.IsInitialized(error_fields) and throw_on_error:
Y
Yu Yang 已提交
52 53 54 55 56
        raise ValueError("{0} are not initialized\nThe message is {1}".format(
            error_fields, proto))
    return proto.__str__()


Y
Yu Yang 已提交
57
class Variable(object):
Y
Yu Yang 已提交
58 59
    def __init__(self,
                 block,
Y
Yu Yang 已提交
60
                 type=core.VarDesc.VarType.LOD_TENSOR,
Y
Yu Yang 已提交
61 62 63 64
                 name=None,
                 shape=None,
                 dtype=None,
                 lod_level=None,
Q
QI JUN 已提交
65
                 persistable=None,
Y
Yu Yang 已提交
66
                 stop_gradient=False,
Y
Yu Yang 已提交
67
                 **kwargs):
Y
Yu Yang 已提交
68 69 70 71
        self.block = block

        if name is None:
            name = Variable._unique_var_name_()
D
Dong Zhihong 已提交
72 73 74 75
        is_new_var = False
        self.desc = self.block.desc.find_var(name)

        if self.desc is None:
D
dongzhihong 已提交
76
            self.desc = self.block.desc.var(name)
Y
Yu Yang 已提交
77
            is_new_var = True
Y
Yu Yang 已提交
78

Y
Yu Yang 已提交
79 80 81 82 83 84 85 86
        if is_new_var:
            self.desc.set_type(type)
        elif self.desc.type() != type:
            raise ValueError("Variable {0} has been created before. The "
                             "previous type is {1}; the new type is {2}. They"
                             " are not matched".format(self.name,
                                                       self.desc.type(), type))

Y
Yu Yang 已提交
87
        if shape is not None:
Y
Yu Yang 已提交
88
            if is_new_var:
89
                self.desc.set_shape(shape)
Y
Yu Yang 已提交
90 91 92 93 94 95 96 97
            else:
                old_shape = self.shape
                shape = tuple(shape)
                if shape != old_shape:
                    raise ValueError(
                        "Variable {0} has been created before. the previous "
                        "shape is {1}; the new shape is {2}. They are not "
                        "matched.".format(self.name, old_shape, shape))
Y
Yu Yang 已提交
98
        if dtype is not None:
Y
Yu Yang 已提交
99
            if not isinstance(dtype, core.DataType):
100
                dtype = convert_np_dtype_to_dtype_(dtype)
Y
Yu Yang 已提交
101
            if is_new_var:
F
fengjiayi 已提交
102
                self.desc.set_dtype(dtype)
Y
Yu Yang 已提交
103
            else:
F
fengjiayi 已提交
104
                old_dtype = self.dtype
Q
QI JUN 已提交
105
                if dtype != old_dtype:
Y
Yu Yang 已提交
106 107 108 109 110
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous data type is {1}; the new "
                                     "data type is {2}. They are not "
                                     "matched.".format(self.name, old_dtype,
                                                       dtype))
Y
Yu Yang 已提交
111 112

        if lod_level is not None:
Y
Yu Yang 已提交
113
            if is_new_var:
114
                self.desc.set_lod_level(lod_level)
Y
Yu Yang 已提交
115 116 117 118 119 120 121
            else:
                if lod_level != self.lod_level:
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous lod_level is {1}; the new "
                                     "lod_level is {2}. They are not "
                                     "matched".format(self.name, self.lod_level,
                                                      lod_level))
122 123 124 125 126 127 128 129 130 131 132
        if persistable is not None:
            if is_new_var:
                self.desc.set_persistable(persistable)
            else:
                if persistable != self.persistable:
                    raise ValueError(
                        "Variable {0} has been created before."
                        "The previous persistable is {1}; the new "
                        "persistable is {2}. They are not matched".format(
                            self.name, self.persistable, persistable))

Y
Yu Yang 已提交
133
        self.block.vars[name] = self
Y
Yu Yang 已提交
134
        self.op = None
Y
Yu Yang 已提交
135
        self.stop_gradient = stop_gradient
Y
Yu Yang 已提交
136

137
    def __str__(self):
Y
Yang Yang(Tony) 已提交
138 139 140
        return self.to_string(True)

    def to_string(self, throw_on_error):
141 142
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.VarDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
143
        return _debug_string_(proto, throw_on_error)
144 145 146

    __repr__ = __str__

147 148 149 150
    @property
    def persistable(self):
        return self.desc.persistable()

Y
Yu Yang 已提交
151 152 153 154
    @persistable.setter
    def persistable(self, p):
        self.desc.set_persistable(p)

Y
Yu Yang 已提交
155 156
    @property
    def name(self):
157
        return self.desc.name()
Y
Yu Yang 已提交
158 159 160 161

    @property
    def shape(self):
        # convert to tuple, make it as same as numpy API.
162
        return tuple(self.desc.shape())
Y
Yu Yang 已提交
163 164

    @property
F
fengjiayi 已提交
165 166
    def dtype(self):
        return self.desc.dtype()
Y
Yu Yang 已提交
167 168 169

    @property
    def lod_level(self):
170
        return self.desc.lod_level()
Y
Yu Yang 已提交
171

Y
Yu Yang 已提交
172 173 174 175
    @property
    def type(self):
        return self.desc.type()

Y
Yu Yang 已提交
176 177
    @staticmethod
    def _unique_var_name_():
178 179 180
        prefix = "_generated_var"
        uid = core.unique_integer(prefix)  # unique during whole process.
        return "_".join([prefix, str(uid)])
Y
Yu Yang 已提交
181

Y
Yu Yang 已提交
182

F
fengjiayi 已提交
183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212
def get_all_op_protos():
    """
    Get all registered op proto from PaddlePaddle C++ end.
    :return: A list of registered OpProto.
    """
    protostrs = core.get_all_op_protos()
    ret_values = []
    for pbstr in protostrs:
        op_proto = framework_pb2.OpProto.FromString(str(pbstr))
        ret_values.append(op_proto)
    return ret_values


class OpProtoHolder(object):
    @classmethod
    def instance(cls):
        if not hasattr(cls, '_instance'):
            cls._instance = cls()
        return cls._instance

    def __init__(self):
        assert not hasattr(
            self.__class__,
            '_instance'), 'Please use `instance()` to get OpProtoHolder opject!'
        op_protos = get_all_op_protos()
        self.op_proto_map = {}
        for proto in op_protos:
            self.op_proto_map[proto.type] = proto

    def get_op_proto(self, type):
Y
Yu Yang 已提交
213 214
        if type not in self.op_proto_map:
            raise ValueError("Operator \"%s\" has not been registered." % type)
F
fengjiayi 已提交
215 216 217
        return self.op_proto_map[type]


Y
Yu Yang 已提交
218 219 220
class Operator(object):
    def __init__(self,
                 block,
Y
Yu Yang 已提交
221
                 desc,
Y
Yu Yang 已提交
222 223 224 225 226
                 type=None,
                 inputs=None,
                 outputs=None,
                 attrs=None):
        self.block = block
Y
Yu Yang 已提交
227
        self.desc = desc
F
fengjiayi 已提交
228 229 230 231 232
        if len(self.desc.type()) != 0:
            return
        if type is None:
            raise ValueError(
                "`type` to initilized an Operator can not be None.")
F
Update  
fengjiayi 已提交
233
        self.desc.set_type(type)
F
fengjiayi 已提交
234
        proto = OpProtoHolder.instance().get_op_proto(type)
235

Y
Yang Yang(Tony) 已提交
236 237 238 239 240
        def find_name(var_list, name):
            for var_name in var_list:
                if var_name == name:
                    return True
            return False
Q
QI JUN 已提交
241

Y
Yang Yang(Tony) 已提交
242 243 244 245 246 247 248
        if inputs is not None:
            for in_proto in proto.inputs:
                found = find_name(inputs, in_proto.name)
                assert found or in_proto.dispensable, "Input {} not found".format(
                    in_proto.name)

                if found:
249 250 251 252
                    in_args = inputs[in_proto.name]
                    if not isinstance(in_args, list):
                        in_args = [in_args]
                    if not in_proto.duplicable and len(in_args) > 1:
Y
Yang Yang(Tony) 已提交
253 254
                        raise ValueError(
                            "Input %s expects only one input, but %d are given."
255 256 257 258 259
                            % (in_proto.name, len(in_args)))
                    in_arg_names = []
                    for arg in in_args:
                        in_arg_names.append(arg.name)
                    self.desc.set_input(in_proto.name, in_arg_names)
Y
Yang Yang(Tony) 已提交
260 261
                else:
                    self.desc.set_input(in_proto.name, [])
F
Update  
fengjiayi 已提交
262

Y
Yu Yang 已提交
263
        if outputs is not None:
264 265 266 267 268 269 270 271 272 273 274 275
            given = set()
            need = set()
            for n in outputs:
                given.add(n)
            for m in proto.outputs:
                need.add(m.name)
            if not given == need:
                raise ValueError(
                    "Incorrect setting for output(s) of operator \"%s\". Need: [%s] Given: [%s]"
                    % (type, ", ".join(str(e) for e in need), ", ".join(
                        str(e) for e in given)))

F
fengjiayi 已提交
276
            for out_proto in proto.outputs:
277 278 279 280
                out_args = outputs[out_proto.name]
                if not isinstance(out_args, list):
                    out_args = [out_args]
                if not out_proto.duplicable and len(out_args) > 1:
F
Update  
fengjiayi 已提交
281 282
                    raise ValueError(
                        "Output %s expects only one output, but %d are given." %
283 284 285 286 287 288
                        (out_proto.name, len(out_args)))
                out_arg_names = []
                for arg in out_args:
                    out_arg_names.append(arg.name)
                    arg.op = self
                self.desc.set_output(out_proto.name, out_arg_names)
F
Update  
fengjiayi 已提交
289

Y
Yu Yang 已提交
290
        if attrs is not None:
291 292
            if not isinstance(attrs, dict):
                raise TypeError("'attrs' should be a dict.")
F
fengjiayi 已提交
293
            for attr in proto.attrs:
F
Update  
fengjiayi 已提交
294
                attr_name = attr.name
295
                if (not attr_name in attrs) or (attrs[attr_name] is None):
F
Update  
fengjiayi 已提交
296
                    continue
Y
Yang Yang(Tony) 已提交
297
                if isinstance(attrs[attr_name], Block):
F
Update  
fengjiayi 已提交
298
                    self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
Y
Yang Yang(Tony) 已提交
299 300
                else:
                    self.desc.set_attr(attr_name, attrs[attr_name])
Y
Yu Yang 已提交
301

302
        self.desc.check_attrs()
Y
Yang Yang(Tony) 已提交
303
        no_kernel_op_set = {
Y
Yu Yang 已提交
304
            'feed', 'fetch', 'save', 'load', 'recurrent',
Y
Yu Yang 已提交
305
            'rnn_memory_helper_grad', 'conditional_block', 'while'
Y
Yang Yang(Tony) 已提交
306
        }
307
        if type not in no_kernel_op_set:
Q
QI JUN 已提交
308
            self.desc.infer_var_type(self.block.desc)
Y
Yu Yang 已提交
309
            self.desc.infer_shape(self.block.desc)
F
fengjiayi 已提交
310

Y
Yang Yang(Tony) 已提交
311
    def to_string(self, throw_on_error):
312 313
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.OpDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
314 315 316 317
        return _debug_string_(proto, throw_on_error)

    def __str__(self):
        return self.to_string(True)
318 319 320

    __repr__ = __str__

F
fengjiayi 已提交
321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338
    @property
    def type(self):
        return self.desc.type()

    def input(self, name):
        return self.desc.input(name)

    @property
    def input_names(self):
        return self.desc.input_names()

    def output(self, name):
        return self.desc.output(name)

    @property
    def output_names(self):
        return self.desc.output_names()

339 340 341 342 343 344 345 346
    @property
    def idx(self):
        for i, op in enumerate(self.block.ops):
            if op == self:
                return i
        raise ValueError(
            "Can't find op itself in it's block. It could be a bug of Paddle.")

F
fengjiayi 已提交
347 348 349 350 351 352 353 354 355 356 357 358
    def has_attr(self, name):
        return self.desc.has_attr(name)

    def attr_type(self, name):
        return self.desc.attr_type(name)

    @property
    def attr_names(self):
        return self.desc.attr_names()

    def attr(self, name):
        return self.desc.attr(name)
Y
Yu Yang 已提交
359

F
fengjiayi 已提交
360 361
    def block_attr(self, name):
        return self.desc.block_attr(name)
Y
Yu Yang 已提交
362 363


Y
Yu Yang 已提交
364 365
class Block(object):
    def __init__(self, program, idx):
Y
Yu Yang 已提交
366
        self.desc = program.desc.block(idx)
Y
Yu Yang 已提交
367
        self.vars = dict()  # var_name --> var
Y
Yu Yang 已提交
368
        self.ops = collections.deque()  # operator list
Y
Yu Yang 已提交
369 370
        self.program = program

371
    def __str__(self):
Y
Yang Yang(Tony) 已提交
372 373 374
        return self.to_string(True)

    def to_string(self, throw_on_error):
375 376
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.BlockDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
377
        return _debug_string_(proto, throw_on_error)
378 379 380

    __repr__ = __str__

Y
Yu Yang 已提交
381 382
    @property
    def parent_idx(self):
Y
Yu Yang 已提交
383
        return self.desc.parent
Y
Yu Yang 已提交
384 385 386

    @property
    def idx(self):
Y
Yu Yang 已提交
387
        return self.desc.id
Y
Yu Yang 已提交
388

Q
Qiao Longfei 已提交
389
    def var(self, name):
Y
Yu Yang 已提交
390 391 392 393
        if not isinstance(name, basestring):
            raise TypeError()
        v = self.vars.get(name, None)
        if v is None:
Q
Qiao Longfei 已提交
394
            raise ValueError("var %s not in this block" % name)
Y
Yu Yang 已提交
395
        return v
Q
Qiao Longfei 已提交
396 397

    def all_parameters(self):
398 399 400 401 402
        return list(self.iter_parameters())

    def iter_parameters(self):
        return (item[1] for item in self.vars.iteritems()
                if isinstance(item[1], Parameter))
Q
Qiao Longfei 已提交
403

Y
Yu Yang 已提交
404
    def create_var(self, *args, **kwargs):
Q
Qiao Longfei 已提交
405
        var = Variable(self, *args, **kwargs)
406 407
        if 'initializer' in kwargs:
            kwargs['initializer'](var, self)
Q
Qiao Longfei 已提交
408
        return var
Y
Yu Yang 已提交
409

Q
Qiao Longfei 已提交
410 411 412
    def has_var(self, name):
        return name in self.vars

Y
Yu Yang 已提交
413 414
    def create_parameter(self, *args, **kwargs):
        global_block = self.program.global_block()
Q
Qiao Longfei 已提交
415
        param = Parameter(global_block, *args, **kwargs)
416 417
        if 'initializer' in kwargs:
            kwargs['initializer'](param, self)
Q
Qiao Longfei 已提交
418
        return param
Y
Yu Yang 已提交
419

Y
Yu Yang 已提交
420
    def append_op(self, *args, **kwargs):
Y
Yu Yang 已提交
421 422
        op_desc = self.desc.append_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
423 424 425 426
        self.ops.append(op)
        return op

    def prepend_op(self, *args, **kwargs):
Y
Yu Yang 已提交
427 428
        op_desc = self.desc.prepend_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
429 430 431
        self.ops.appendleft(op)
        return op

Q
Qiao Longfei 已提交
432 433 434 435 436 437 438
    def sync_with_cpp(self):
        # sync variables from cpp
        for var in self.desc.all_vars():
            if not self.has_var(var.name()):
                self.create_var(name=var.name(), desc=var, type=var.type())

        # sync operators from cpp
439 440 441 442
        ops_in_cpp = []
        for op_idx in range(0, self.desc.op_size()):
            ops_in_cpp.append(self.desc.op(op_idx))

Y
Yu Yang 已提交
443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458
        if len(self.ops) != 0:
            first_op_in_python = self.ops[0].desc
            last_op_in_python = self.ops[len(self.ops) - 1].desc
            start_index = None
            end_index = None
            for index in range(len(ops_in_cpp)):
                if first_op_in_python == ops_in_cpp[index]:
                    start_index = index
                if last_op_in_python == ops_in_cpp[index]:
                    end_index = index
            assert start_index is not None
            assert end_index is not None
            assert start_index <= end_index
        else:
            start_index = 0
            end_index = -1
Q
Qiao Longfei 已提交
459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475

        # sync ops append to the head of cpp_ops
        for index in range((start_index - 1 - 1), -1, -1):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.appendleft(op)

        # sync ops append to the end of cpp_ops
        for index in range((end_index + 1), len(ops_in_cpp)):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.append(op)

        assert len(self.ops) == len(ops_in_cpp)
        for index in range(len(self.ops)):
            assert self.ops[index].desc == ops_in_cpp[index]

476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506
    def copy_param_info_from(self, other):
        """
        Copy the information of parameters from other block
        Args:
            other(Block): other block 

        Returns:
            None
        """
        if not isinstance(other, Block):
            raise TypeError("copy_param_info_from should be invoked with Block")
        for p in other.iter_parameters():
            assert isinstance(p, Parameter)
            v = self.vars.get(p.name, None)
            if v is None:
                raise ValueError("copy_param_info_from should be invoked with "
                                 "same topology")
            assert isinstance(v, Variable)
            new_p = Parameter(
                block=self,
                shape=v.shape,
                dtype=v.dtype,
                type=v.type,
                lod_level=v.lod_level,
                stop_gradient=p.stop_gradient,
                trainable=p.trainable,
                optimize_attr=p.optimize_attr,
                regularizer=p.regularizer,
                name=v.name)
            self.vars[new_p.name] = new_p

Y
Yu Yang 已提交
507 508

class Program(object):
509 510
    def __init__(self):
        self.desc = core.ProgramDesc()
Y
Yu Yang 已提交
511 512 513
        self.blocks = [Block(self, 0)]
        self.current_block_idx = 0

514
    def __str__(self):
Y
Yang Yang(Tony) 已提交
515 516 517
        return self.to_string(True)

    def to_string(self, throw_on_error):
518 519
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.ProgramDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
520
        return _debug_string_(proto, throw_on_error)
521

Y
Yu Yang 已提交
522 523 524 525 526
    def clone(self):
        p = Program()
        p.desc = core.ProgramDesc(self.desc)
        p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
        p.sync_with_cpp()
527
        p.copy_param_info_from(self)
Y
Yu Yang 已提交
528
        return p
529

530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549
    def prune(self, targets):
        if not isinstance(targets, list):
            targets = [targets]
        targets_idx = []
        for t in targets:
            if not isinstance(t, Operator):
                if isinstance(t, Variable):
                    t = t.op
                else:
                    raise ValueError(
                        "All targets of prune() can only be Variable or Operator."
                    )

            targets_idx.append([t.block.idx, t.idx])
        res = Program()
        res.desc = core.prune(self.desc, targets_idx)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

550 551 552 553 554 555 556
    def inference_optimize(self):
        res = Program()
        res.desc = core.inference_optimize(self.desc)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

557 558 559 560
    @staticmethod
    def parse_from_string(binary_str):
        p = Program()
        p.desc = core.ProgramDesc(binary_str)
561
        p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
562 563
        p.sync_with_cpp()
        return p
Y
Yu Yang 已提交
564 565 566

    def __repr__(self):
        return str(self)
567

Y
Yu Yang 已提交
568 569 570
    def global_block(self):
        return self.blocks[0]

Q
Qiao Longfei 已提交
571 572 573
    def block(self, index):
        return self.blocks[index]

Y
Yu Yang 已提交
574 575 576
    def current_block(self):
        return self.blocks[self.current_block_idx]

F
fengjiayi 已提交
577
    def append_backward(self, target, no_grad_set=None):
Q
Qiao Longfei 已提交
578 579 580
        """
        return map(param_name -> (grad_name, block_index, op_index))
        """
Q
Qiao Longfei 已提交
581
        assert isinstance(target, Variable)
F
fengjiayi 已提交
582 583
        if no_grad_set is None:
            no_grad_set = set()
Y
Yang Yang(Tony) 已提交
584 585 586 587 588 589 590 591
        try:
            param_to_grad_info = self.desc.append_backward(target.desc,
                                                           no_grad_set)
        except Exception as e:
            raise core.EnforceNotMet(
                str(e) + "\nCurrent protobuf is\n{0}".format(
                    self.to_string(False)))

Q
Qiao Longfei 已提交
592 593 594
        self.sync_with_cpp()
        return param_to_grad_info

Y
Yu Yang 已提交
595 596
    def create_block(self):
        new_block_idx = len(self.blocks)
Y
Yu Yang 已提交
597
        self.desc.append_block(self.current_block().desc)
Y
Yu Yang 已提交
598 599 600 601 602 603 604
        self.current_block_idx = new_block_idx
        self.blocks.append(Block(self, self.current_block_idx))
        return self.current_block()

    def rollback(self):
        self.current_block_idx = self.current_block().parent_idx

Q
Qiao Longfei 已提交
605 606 607 608 609 610
    def sync_with_cpp(self):
        for block_idx in range(len(self.blocks), self.desc.num_blocks()):
            self.blocks.append(Block(self, block_idx))
        for block in self.blocks:
            block.sync_with_cpp()

611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628
    def copy_param_info_from(self, other):
        """
        Copy the information of parameters from other program. 
        Args:
            other(Program): Other program

        Returns:
            None
        """
        if not isinstance(other, Program):
            raise TypeError("copy_param_info_from should be invoked with "
                            "Program")

        if len(self.blocks) != len(other.blocks):
            raise ValueError("copy_param_info_from should be invoked with two "
                             "program, with represent the same topology")
        self.global_block().copy_param_info_from(other.global_block())

629 630 631 632 633
    def list_vars(self):
        for each_block in self.blocks:
            for each_var in each_block.vars.itervalues():
                yield each_var

Y
Yu Yang 已提交
634

Y
Yu Yang 已提交
635 636 637 638 639 640 641 642 643 644 645
class Parameter(Variable):
    def __init__(self, block, shape, dtype, **kwargs):
        if shape is None or dtype is None:
            raise ValueError("Parameter must set shape and dtype")
        if len(shape) == 0:
            raise ValueError("Parameter shape cannot be empty")

        for each in shape:
            if each < 0:
                raise ValueError("Parameter shape should not be related with "
                                 "batch-size")
646 647 648

        Variable.__init__(
            self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
Y
Yu Yang 已提交
649 650 651 652
        self.trainable = kwargs.get('trainable', True)

        self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})

653 654
        self.regularizer = kwargs.get('regularizer', None)

Y
Yu Yang 已提交
655

Y
Yu Yang 已提交
656
# program is a global instance.
657 658
g_main_program = Program()
g_startup_program = Program()
659

660

661 662 663
def default_startup_program():
    return g_startup_program

664

665 666
def default_main_program():
    return g_main_program