framework.py 23.6 KB
Newer Older
Y
Yu Yang 已提交
1
import collections
2

Y
Yu Yang 已提交
3
import numpy as np
4 5
from . import core
import proto.framework_pb2 as framework_pb2
Y
Yu Yang 已提交
6
import contextlib
Y
Yu Yang 已提交
7

8 9
__all__ = [
    'Block', 'Variable', 'Program', 'Operator', 'default_startup_program',
Y
Yu Yang 已提交
10 11
    'default_main_program', 'program_guard', 'switch_startup_program',
    'switch_main_program'
12
]
Y
Yu Yang 已提交
13 14


Q
Qiao Longfei 已提交
15 16 17 18 19
def unique_name(prefix):
    uid = core.unique_integer(prefix)  # unique during whole process.
    return "_".join([prefix, str(uid)])


20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
def convert_np_dtype_to_dtype_(np_dtype):
    dtype = np.dtype(np_dtype)
    if dtype == np.float32:
        return core.DataType.FP32
    elif dtype == np.float64:
        return core.DataType.FP64
    elif dtype == np.float16:
        return core.DataType.FP16
    elif dtype == np.int32:
        return core.DataType.INT32
    elif dtype == np.int16:
        return core.DataType.INT16
    elif dtype == np.int64:
        return core.DataType.INT64
    elif dtype == np.bool:
        return core.DataType.BOOL
    else:
        raise ValueError("Not supported numpy dtype " + str(dtype))


def dtype_is_floating(dtype):
    if not isinstance(dtype, core.DataType):
        dtype = convert_np_dtype_to_dtype_(dtype)

    if (dtype == core.DataType.FP16 or dtype == core.DataType.FP32 or
            dtype == core.DataType.FP64):
        return True
    else:
        return False


Y
Yang Yang(Tony) 已提交
51
def _debug_string_(proto, throw_on_error=True):
Y
Yu Yang 已提交
52
    error_fields = list()
Y
Yang Yang(Tony) 已提交
53
    if not proto.IsInitialized(error_fields) and throw_on_error:
Y
Yu Yang 已提交
54 55 56 57 58
        raise ValueError("{0} are not initialized\nThe message is {1}".format(
            error_fields, proto))
    return proto.__str__()


Y
Yu Yang 已提交
59
class Variable(object):
Y
Yu Yang 已提交
60 61
    def __init__(self,
                 block,
Y
Yu Yang 已提交
62
                 type=core.VarDesc.VarType.LOD_TENSOR,
Y
Yu Yang 已提交
63 64 65 66
                 name=None,
                 shape=None,
                 dtype=None,
                 lod_level=None,
Q
QI JUN 已提交
67
                 persistable=None,
Y
Yu Yang 已提交
68
                 stop_gradient=False,
Y
Yu Yang 已提交
69
                 **kwargs):
Y
Yu Yang 已提交
70 71 72 73
        self.block = block

        if name is None:
            name = Variable._unique_var_name_()
D
Dong Zhihong 已提交
74 75 76 77
        is_new_var = False
        self.desc = self.block.desc.find_var(name)

        if self.desc is None:
D
dongzhihong 已提交
78
            self.desc = self.block.desc.var(name)
Y
Yu Yang 已提交
79
            is_new_var = True
Y
Yu Yang 已提交
80

Y
Yu Yang 已提交
81 82 83 84 85 86 87 88
        if is_new_var:
            self.desc.set_type(type)
        elif self.desc.type() != type:
            raise ValueError("Variable {0} has been created before. The "
                             "previous type is {1}; the new type is {2}. They"
                             " are not matched".format(self.name,
                                                       self.desc.type(), type))

Y
Yu Yang 已提交
89
        if shape is not None:
Y
Yu Yang 已提交
90
            if is_new_var:
91
                self.desc.set_shape(shape)
Y
Yu Yang 已提交
92 93 94 95 96 97 98 99
            else:
                old_shape = self.shape
                shape = tuple(shape)
                if shape != old_shape:
                    raise ValueError(
                        "Variable {0} has been created before. the previous "
                        "shape is {1}; the new shape is {2}. They are not "
                        "matched.".format(self.name, old_shape, shape))
Y
Yu Yang 已提交
100
        if dtype is not None:
Y
Yu Yang 已提交
101
            if not isinstance(dtype, core.DataType):
102
                dtype = convert_np_dtype_to_dtype_(dtype)
Y
Yu Yang 已提交
103
            if is_new_var:
F
fengjiayi 已提交
104
                self.desc.set_dtype(dtype)
Y
Yu Yang 已提交
105
            else:
F
fengjiayi 已提交
106
                old_dtype = self.dtype
Q
QI JUN 已提交
107
                if dtype != old_dtype:
Y
Yu Yang 已提交
108 109 110 111 112
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous data type is {1}; the new "
                                     "data type is {2}. They are not "
                                     "matched.".format(self.name, old_dtype,
                                                       dtype))
Y
Yu Yang 已提交
113 114

        if lod_level is not None:
Y
Yu Yang 已提交
115
            if is_new_var:
116
                self.desc.set_lod_level(lod_level)
Y
Yu Yang 已提交
117 118 119 120 121 122 123
            else:
                if lod_level != self.lod_level:
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous lod_level is {1}; the new "
                                     "lod_level is {2}. They are not "
                                     "matched".format(self.name, self.lod_level,
                                                      lod_level))
124 125 126 127 128 129 130 131 132 133 134
        if persistable is not None:
            if is_new_var:
                self.desc.set_persistable(persistable)
            else:
                if persistable != self.persistable:
                    raise ValueError(
                        "Variable {0} has been created before."
                        "The previous persistable is {1}; the new "
                        "persistable is {2}. They are not matched".format(
                            self.name, self.persistable, persistable))

Y
Yu Yang 已提交
135
        self.block.vars[name] = self
Y
Yu Yang 已提交
136
        self.op = None
Y
Yu Yang 已提交
137
        self.stop_gradient = stop_gradient
Y
Yu Yang 已提交
138

139
    def __str__(self):
Y
Yang Yang(Tony) 已提交
140 141 142
        return self.to_string(True)

    def to_string(self, throw_on_error):
143 144
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.VarDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
145
        return _debug_string_(proto, throw_on_error)
146 147 148

    __repr__ = __str__

149 150 151 152
    @property
    def persistable(self):
        return self.desc.persistable()

Y
Yu Yang 已提交
153 154 155 156
    @persistable.setter
    def persistable(self, p):
        self.desc.set_persistable(p)

Y
Yu Yang 已提交
157 158
    @property
    def name(self):
159
        return self.desc.name()
Y
Yu Yang 已提交
160 161 162 163

    @property
    def shape(self):
        # convert to tuple, make it as same as numpy API.
164
        return tuple(self.desc.shape())
Y
Yu Yang 已提交
165 166

    @property
F
fengjiayi 已提交
167 168
    def dtype(self):
        return self.desc.dtype()
Y
Yu Yang 已提交
169 170 171

    @property
    def lod_level(self):
172
        return self.desc.lod_level()
Y
Yu Yang 已提交
173

Y
Yu Yang 已提交
174 175 176 177
    @property
    def type(self):
        return self.desc.type()

Y
Yu Yang 已提交
178 179
    @staticmethod
    def _unique_var_name_():
180 181 182
        prefix = "_generated_var"
        uid = core.unique_integer(prefix)  # unique during whole process.
        return "_".join([prefix, str(uid)])
Y
Yu Yang 已提交
183

Y
Yu Yang 已提交
184

F
fengjiayi 已提交
185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214
def get_all_op_protos():
    """
    Get all registered op proto from PaddlePaddle C++ end.
    :return: A list of registered OpProto.
    """
    protostrs = core.get_all_op_protos()
    ret_values = []
    for pbstr in protostrs:
        op_proto = framework_pb2.OpProto.FromString(str(pbstr))
        ret_values.append(op_proto)
    return ret_values


class OpProtoHolder(object):
    @classmethod
    def instance(cls):
        if not hasattr(cls, '_instance'):
            cls._instance = cls()
        return cls._instance

    def __init__(self):
        assert not hasattr(
            self.__class__,
            '_instance'), 'Please use `instance()` to get OpProtoHolder opject!'
        op_protos = get_all_op_protos()
        self.op_proto_map = {}
        for proto in op_protos:
            self.op_proto_map[proto.type] = proto

    def get_op_proto(self, type):
Y
Yu Yang 已提交
215 216
        if type not in self.op_proto_map:
            raise ValueError("Operator \"%s\" has not been registered." % type)
F
fengjiayi 已提交
217 218 219
        return self.op_proto_map[type]


Y
Yu Yang 已提交
220 221 222
class Operator(object):
    def __init__(self,
                 block,
Y
Yu Yang 已提交
223
                 desc,
Y
Yu Yang 已提交
224 225 226 227 228
                 type=None,
                 inputs=None,
                 outputs=None,
                 attrs=None):
        self.block = block
Y
Yu Yang 已提交
229
        self.desc = desc
F
fengjiayi 已提交
230 231 232 233 234
        if len(self.desc.type()) != 0:
            return
        if type is None:
            raise ValueError(
                "`type` to initilized an Operator can not be None.")
F
Update  
fengjiayi 已提交
235
        self.desc.set_type(type)
F
fengjiayi 已提交
236
        proto = OpProtoHolder.instance().get_op_proto(type)
237

Y
Yang Yang(Tony) 已提交
238 239 240 241 242
        def find_name(var_list, name):
            for var_name in var_list:
                if var_name == name:
                    return True
            return False
Q
QI JUN 已提交
243

Y
Yang Yang(Tony) 已提交
244 245 246 247 248 249 250
        if inputs is not None:
            for in_proto in proto.inputs:
                found = find_name(inputs, in_proto.name)
                assert found or in_proto.dispensable, "Input {} not found".format(
                    in_proto.name)

                if found:
251 252 253 254
                    in_args = inputs[in_proto.name]
                    if not isinstance(in_args, list):
                        in_args = [in_args]
                    if not in_proto.duplicable and len(in_args) > 1:
Y
Yang Yang(Tony) 已提交
255 256
                        raise ValueError(
                            "Input %s expects only one input, but %d are given."
257 258 259 260 261
                            % (in_proto.name, len(in_args)))
                    in_arg_names = []
                    for arg in in_args:
                        in_arg_names.append(arg.name)
                    self.desc.set_input(in_proto.name, in_arg_names)
Y
Yang Yang(Tony) 已提交
262 263
                else:
                    self.desc.set_input(in_proto.name, [])
F
Update  
fengjiayi 已提交
264

Y
Yu Yang 已提交
265
        if outputs is not None:
266 267 268 269 270 271 272 273 274 275 276 277
            given = set()
            need = set()
            for n in outputs:
                given.add(n)
            for m in proto.outputs:
                need.add(m.name)
            if not given == need:
                raise ValueError(
                    "Incorrect setting for output(s) of operator \"%s\". Need: [%s] Given: [%s]"
                    % (type, ", ".join(str(e) for e in need), ", ".join(
                        str(e) for e in given)))

F
fengjiayi 已提交
278
            for out_proto in proto.outputs:
279 280 281 282
                out_args = outputs[out_proto.name]
                if not isinstance(out_args, list):
                    out_args = [out_args]
                if not out_proto.duplicable and len(out_args) > 1:
F
Update  
fengjiayi 已提交
283 284
                    raise ValueError(
                        "Output %s expects only one output, but %d are given." %
285 286 287 288 289 290
                        (out_proto.name, len(out_args)))
                out_arg_names = []
                for arg in out_args:
                    out_arg_names.append(arg.name)
                    arg.op = self
                self.desc.set_output(out_proto.name, out_arg_names)
F
Update  
fengjiayi 已提交
291

Y
Yu Yang 已提交
292
        if attrs is not None:
293 294
            if not isinstance(attrs, dict):
                raise TypeError("'attrs' should be a dict.")
F
fengjiayi 已提交
295
            for attr in proto.attrs:
F
Update  
fengjiayi 已提交
296
                attr_name = attr.name
297
                if (not attr_name in attrs) or (attrs[attr_name] is None):
F
Update  
fengjiayi 已提交
298
                    continue
Y
Yang Yang(Tony) 已提交
299
                if isinstance(attrs[attr_name], Block):
F
Update  
fengjiayi 已提交
300
                    self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
Y
Yang Yang(Tony) 已提交
301 302
                else:
                    self.desc.set_attr(attr_name, attrs[attr_name])
Y
Yu Yang 已提交
303

304
        self.desc.check_attrs()
Y
Yang Yang(Tony) 已提交
305
        no_kernel_op_set = {
Y
Yu Yang 已提交
306
            'feed', 'fetch', 'save', 'load', 'recurrent',
T
typhoonzero 已提交
307 308
            'rnn_memory_helper_grad', 'conditional_block', 'while', 'send',
            'recv'
Y
Yang Yang(Tony) 已提交
309
        }
310
        if type not in no_kernel_op_set:
Q
QI JUN 已提交
311
            self.desc.infer_var_type(self.block.desc)
Y
Yu Yang 已提交
312
            self.desc.infer_shape(self.block.desc)
F
fengjiayi 已提交
313

Y
Yang Yang(Tony) 已提交
314
    def to_string(self, throw_on_error):
315 316
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.OpDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
317 318 319 320
        return _debug_string_(proto, throw_on_error)

    def __str__(self):
        return self.to_string(True)
321 322 323

    __repr__ = __str__

F
fengjiayi 已提交
324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341
    @property
    def type(self):
        return self.desc.type()

    def input(self, name):
        return self.desc.input(name)

    @property
    def input_names(self):
        return self.desc.input_names()

    def output(self, name):
        return self.desc.output(name)

    @property
    def output_names(self):
        return self.desc.output_names()

342 343 344 345 346 347 348 349
    @property
    def idx(self):
        for i, op in enumerate(self.block.ops):
            if op == self:
                return i
        raise ValueError(
            "Can't find op itself in it's block. It could be a bug of Paddle.")

F
fengjiayi 已提交
350 351 352 353 354 355 356 357 358 359 360 361
    def has_attr(self, name):
        return self.desc.has_attr(name)

    def attr_type(self, name):
        return self.desc.attr_type(name)

    @property
    def attr_names(self):
        return self.desc.attr_names()

    def attr(self, name):
        return self.desc.attr(name)
Y
Yu Yang 已提交
362

F
fengjiayi 已提交
363 364
    def block_attr(self, name):
        return self.desc.block_attr(name)
Y
Yu Yang 已提交
365 366


Y
Yu Yang 已提交
367 368
class Block(object):
    def __init__(self, program, idx):
Y
Yu Yang 已提交
369
        self.desc = program.desc.block(idx)
Y
Yu Yang 已提交
370
        self.vars = dict()  # var_name --> var
Y
Yu Yang 已提交
371
        self.ops = collections.deque()  # operator list
Y
Yu Yang 已提交
372 373
        self.program = program

374
    def __str__(self):
Y
Yang Yang(Tony) 已提交
375 376 377
        return self.to_string(True)

    def to_string(self, throw_on_error):
378 379
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.BlockDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
380
        return _debug_string_(proto, throw_on_error)
381 382 383

    __repr__ = __str__

Y
Yu Yang 已提交
384 385
    @property
    def parent_idx(self):
Y
Yu Yang 已提交
386
        return self.desc.parent
Y
Yu Yang 已提交
387 388 389

    @property
    def idx(self):
Y
Yu Yang 已提交
390
        return self.desc.id
Y
Yu Yang 已提交
391

Q
Qiao Longfei 已提交
392
    def var(self, name):
Y
Yu Yang 已提交
393 394 395 396
        if not isinstance(name, basestring):
            raise TypeError()
        v = self.vars.get(name, None)
        if v is None:
Q
Qiao Longfei 已提交
397
            raise ValueError("var %s not in this block" % name)
Y
Yu Yang 已提交
398
        return v
Q
Qiao Longfei 已提交
399 400

    def all_parameters(self):
401 402 403 404 405
        return list(self.iter_parameters())

    def iter_parameters(self):
        return (item[1] for item in self.vars.iteritems()
                if isinstance(item[1], Parameter))
Q
Qiao Longfei 已提交
406

Y
Yu Yang 已提交
407
    def create_var(self, *args, **kwargs):
Q
Qiao Longfei 已提交
408
        var = Variable(self, *args, **kwargs)
409 410
        if 'initializer' in kwargs:
            kwargs['initializer'](var, self)
Q
Qiao Longfei 已提交
411
        return var
Y
Yu Yang 已提交
412

Q
Qiao Longfei 已提交
413 414 415
    def has_var(self, name):
        return name in self.vars

Y
Yu Yang 已提交
416 417
    def create_parameter(self, *args, **kwargs):
        global_block = self.program.global_block()
Q
Qiao Longfei 已提交
418
        param = Parameter(global_block, *args, **kwargs)
419 420
        if 'initializer' in kwargs:
            kwargs['initializer'](param, self)
Q
Qiao Longfei 已提交
421
        return param
Y
Yu Yang 已提交
422

Y
Yu Yang 已提交
423
    def append_op(self, *args, **kwargs):
Y
Yu Yang 已提交
424 425
        op_desc = self.desc.append_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
426 427 428
        self.ops.append(op)
        return op

T
wip  
typhoonzero 已提交
429 430 431
    def delete_op(self, op):
        self.ops.remove(op)

Y
Yu Yang 已提交
432
    def prepend_op(self, *args, **kwargs):
Y
Yu Yang 已提交
433 434
        op_desc = self.desc.prepend_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
435 436 437
        self.ops.appendleft(op)
        return op

Q
Qiao Longfei 已提交
438 439 440 441 442 443 444
    def sync_with_cpp(self):
        # sync variables from cpp
        for var in self.desc.all_vars():
            if not self.has_var(var.name()):
                self.create_var(name=var.name(), desc=var, type=var.type())

        # sync operators from cpp
445 446 447 448
        ops_in_cpp = []
        for op_idx in range(0, self.desc.op_size()):
            ops_in_cpp.append(self.desc.op(op_idx))

Y
Yu Yang 已提交
449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464
        if len(self.ops) != 0:
            first_op_in_python = self.ops[0].desc
            last_op_in_python = self.ops[len(self.ops) - 1].desc
            start_index = None
            end_index = None
            for index in range(len(ops_in_cpp)):
                if first_op_in_python == ops_in_cpp[index]:
                    start_index = index
                if last_op_in_python == ops_in_cpp[index]:
                    end_index = index
            assert start_index is not None
            assert end_index is not None
            assert start_index <= end_index
        else:
            start_index = 0
            end_index = -1
Q
Qiao Longfei 已提交
465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481

        # sync ops append to the head of cpp_ops
        for index in range((start_index - 1 - 1), -1, -1):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.appendleft(op)

        # sync ops append to the end of cpp_ops
        for index in range((end_index + 1), len(ops_in_cpp)):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.append(op)

        assert len(self.ops) == len(ops_in_cpp)
        for index in range(len(self.ops)):
            assert self.ops[index].desc == ops_in_cpp[index]

482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512
    def copy_param_info_from(self, other):
        """
        Copy the information of parameters from other block
        Args:
            other(Block): other block 

        Returns:
            None
        """
        if not isinstance(other, Block):
            raise TypeError("copy_param_info_from should be invoked with Block")
        for p in other.iter_parameters():
            assert isinstance(p, Parameter)
            v = self.vars.get(p.name, None)
            if v is None:
                raise ValueError("copy_param_info_from should be invoked with "
                                 "same topology")
            assert isinstance(v, Variable)
            new_p = Parameter(
                block=self,
                shape=v.shape,
                dtype=v.dtype,
                type=v.type,
                lod_level=v.lod_level,
                stop_gradient=p.stop_gradient,
                trainable=p.trainable,
                optimize_attr=p.optimize_attr,
                regularizer=p.regularizer,
                name=v.name)
            self.vars[new_p.name] = new_p

Y
Yu Yang 已提交
513 514

class Program(object):
515 516
    def __init__(self):
        self.desc = core.ProgramDesc()
Y
Yu Yang 已提交
517 518 519
        self.blocks = [Block(self, 0)]
        self.current_block_idx = 0

520
    def __str__(self):
Y
Yang Yang(Tony) 已提交
521 522 523
        return self.to_string(True)

    def to_string(self, throw_on_error):
524 525
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.ProgramDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
526
        return _debug_string_(proto, throw_on_error)
527

Y
Yu Yang 已提交
528 529 530 531 532
    def clone(self):
        p = Program()
        p.desc = core.ProgramDesc(self.desc)
        p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
        p.sync_with_cpp()
533
        p.copy_param_info_from(self)
Y
Yu Yang 已提交
534
        return p
535

536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555
    def prune(self, targets):
        if not isinstance(targets, list):
            targets = [targets]
        targets_idx = []
        for t in targets:
            if not isinstance(t, Operator):
                if isinstance(t, Variable):
                    t = t.op
                else:
                    raise ValueError(
                        "All targets of prune() can only be Variable or Operator."
                    )

            targets_idx.append([t.block.idx, t.idx])
        res = Program()
        res.desc = core.prune(self.desc, targets_idx)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

556 557 558 559 560 561 562
    def inference_optimize(self):
        res = Program()
        res.desc = core.inference_optimize(self.desc)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

563 564 565 566
    @staticmethod
    def parse_from_string(binary_str):
        p = Program()
        p.desc = core.ProgramDesc(binary_str)
567
        p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
568 569
        p.sync_with_cpp()
        return p
Y
Yu Yang 已提交
570 571 572

    def __repr__(self):
        return str(self)
573

Y
Yu Yang 已提交
574 575 576
    def global_block(self):
        return self.blocks[0]

Q
Qiao Longfei 已提交
577 578 579
    def block(self, index):
        return self.blocks[index]

Y
Yu Yang 已提交
580 581 582
    def current_block(self):
        return self.blocks[self.current_block_idx]

F
fengjiayi 已提交
583
    def append_backward(self, target, no_grad_set=None):
Q
Qiao Longfei 已提交
584 585 586
        """
        return map(param_name -> (grad_name, block_index, op_index))
        """
Q
Qiao Longfei 已提交
587
        assert isinstance(target, Variable)
F
fengjiayi 已提交
588 589
        if no_grad_set is None:
            no_grad_set = set()
Y
Yang Yang(Tony) 已提交
590 591 592 593 594 595 596 597
        try:
            param_to_grad_info = self.desc.append_backward(target.desc,
                                                           no_grad_set)
        except Exception as e:
            raise core.EnforceNotMet(
                str(e) + "\nCurrent protobuf is\n{0}".format(
                    self.to_string(False)))

Q
Qiao Longfei 已提交
598 599 600
        self.sync_with_cpp()
        return param_to_grad_info

Y
Yu Yang 已提交
601 602
    def create_block(self):
        new_block_idx = len(self.blocks)
Y
Yu Yang 已提交
603
        self.desc.append_block(self.current_block().desc)
Y
Yu Yang 已提交
604 605 606 607 608 609 610
        self.current_block_idx = new_block_idx
        self.blocks.append(Block(self, self.current_block_idx))
        return self.current_block()

    def rollback(self):
        self.current_block_idx = self.current_block().parent_idx

Q
Qiao Longfei 已提交
611 612 613 614 615 616
    def sync_with_cpp(self):
        for block_idx in range(len(self.blocks), self.desc.num_blocks()):
            self.blocks.append(Block(self, block_idx))
        for block in self.blocks:
            block.sync_with_cpp()

617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634
    def copy_param_info_from(self, other):
        """
        Copy the information of parameters from other program. 
        Args:
            other(Program): Other program

        Returns:
            None
        """
        if not isinstance(other, Program):
            raise TypeError("copy_param_info_from should be invoked with "
                            "Program")

        if len(self.blocks) != len(other.blocks):
            raise ValueError("copy_param_info_from should be invoked with two "
                             "program, with represent the same topology")
        self.global_block().copy_param_info_from(other.global_block())

635 636 637 638 639
    def list_vars(self):
        for each_block in self.blocks:
            for each_var in each_block.vars.itervalues():
                yield each_var

Y
Yu Yang 已提交
640

Y
Yu Yang 已提交
641 642 643 644 645 646 647 648 649 650 651
class Parameter(Variable):
    def __init__(self, block, shape, dtype, **kwargs):
        if shape is None or dtype is None:
            raise ValueError("Parameter must set shape and dtype")
        if len(shape) == 0:
            raise ValueError("Parameter shape cannot be empty")

        for each in shape:
            if each < 0:
                raise ValueError("Parameter shape should not be related with "
                                 "batch-size")
652 653 654

        Variable.__init__(
            self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
Y
Yu Yang 已提交
655 656 657 658
        self.trainable = kwargs.get('trainable', True)

        self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})

659 660
        self.regularizer = kwargs.get('regularizer', None)

Y
Yu Yang 已提交
661

Y
Yu Yang 已提交
662
# program is a global instance.
Y
Yu Yang 已提交
663 664
_main_program_ = Program()
_startup_program_ = Program()
665

666

667
def default_startup_program():
Y
Yu Yang 已提交
668 669 670 671 672 673 674
    """
    Get default startup program. In startup program, Paddle will initialize
    parameters, initialize nccl handle, etc.
    
    Returns:
        Program: startup program
    """
Y
Yu Yang 已提交
675
    return _startup_program_
676

677

678
def default_main_program():
Y
Yu Yang 已提交
679 680 681 682 683 684
    """
    Get default main program. The main program is used for training or testing.
    
    Returns:
        Program: main program
    """
Y
Yu Yang 已提交
685
    return _main_program_
Y
Yu Yang 已提交
686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747


def switch_main_program(program):
    """
    Switch the main program to a new program.
    
    Args:
        program(Program): The new main program

    Returns:
        Program: The previous main program
    """
    global _main_program_
    prev_program = _main_program_
    _main_program_ = program
    return prev_program


def switch_startup_program(program):
    """
    Switch the startup program to a new program 
    Args:
        program(Program): The new startup program

    Returns:
        Program: The previous startup program
    """
    global _startup_program_
    prev_program = _startup_program_
    _startup_program_ = program
    return prev_program


@contextlib.contextmanager
def program_guard(main_program, startup_program=None):
    """
    Switch program with `with` statement
    
    Examples:
        >>> with program_guard(Program()):
        >>>   data = fluid.layers.data(...)
        >>>   hidden = fluid.layers.fc(...)
        
    Args:
        main_program(Program): New main program inside `with` statement
        startup_program(Program): New startup program inside `with` statement. 
            None means do not change startup program.

    Returns:
        None
    """
    if not isinstance(main_program, Program):
        raise TypeError("main_program should be Program")
    main_program = switch_main_program(main_program)
    if startup_program is not None:
        if not isinstance(startup_program, Program):
            raise TypeError("startup_program should be Program")
        startup_program = switch_startup_program(startup_program)
    yield
    switch_main_program(main_program)
    if startup_program is not None:
        switch_startup_program(startup_program)