framework.py 29.8 KB
Newer Older
Y
Yu Yang 已提交
1
import collections
2

Y
Yu Yang 已提交
3
import numpy as np
4 5
from . import core
import proto.framework_pb2 as framework_pb2
6
import google.protobuf.message
Y
Yu Yang 已提交
7
import contextlib
Y
Yu Yang 已提交
8

9 10
__all__ = [
    'Block', 'Variable', 'Program', 'Operator', 'default_startup_program',
Y
Yu Yang 已提交
11 12
    'default_main_program', 'program_guard', 'switch_startup_program',
    'switch_main_program'
13
]
Y
Yu Yang 已提交
14 15


Q
Qiao Longfei 已提交
16
def unique_name(prefix):
17 18 19 20 21 22 23 24 25
    """
    Generate unique names with prefix

    Args:
        prefix(str): The prefix of return string

    Returns(str): A unique string with the prefix

    """
Q
Qiao Longfei 已提交
26 27 28 29
    uid = core.unique_integer(prefix)  # unique during whole process.
    return "_".join([prefix, str(uid)])


30
def convert_np_dtype_to_dtype_(np_dtype):
31 32 33 34 35 36 37 38
    """
    Convert the data type in numpy to the data type in Paddle
    Args:
        np_dtype(np.dtype): the data type in numpy

    Returns(core.DataType): the data type in Paddle

    """
39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58
    dtype = np.dtype(np_dtype)
    if dtype == np.float32:
        return core.DataType.FP32
    elif dtype == np.float64:
        return core.DataType.FP64
    elif dtype == np.float16:
        return core.DataType.FP16
    elif dtype == np.int32:
        return core.DataType.INT32
    elif dtype == np.int16:
        return core.DataType.INT16
    elif dtype == np.int64:
        return core.DataType.INT64
    elif dtype == np.bool:
        return core.DataType.BOOL
    else:
        raise ValueError("Not supported numpy dtype " + str(dtype))


def dtype_is_floating(dtype):
59 60 61 62 63 64 65 66 67
    """
    Check the data type is floating or not.
    Args:
        dtype(np.dtype|core.DataType): data type.
            Could be numpy format or Paddle format

    Returns(bool): True if data type is a float value

    """
68 69 70
    if not isinstance(dtype, core.DataType):
        dtype = convert_np_dtype_to_dtype_(dtype)

71
    return dtype in [core.DataType.FP16, core.DataType.FP32, core.DataType.FP64]
72 73


Y
Yang Yang(Tony) 已提交
74
def _debug_string_(proto, throw_on_error=True):
75 76 77 78 79 80 81 82 83 84 85
    """
    Get the debug string of a protobuf message. The message could be not
    initialized.
    Args:
        proto(google.protobuf.message.Message): The protobuf message
        throw_on_error(bool): True if raise an error when the protobuf message
            is not initialized.

    Returns(str): The debug string of the protobuf message

    """
Y
Yu Yang 已提交
86
    error_fields = list()
Y
Yang Yang(Tony) 已提交
87
    if not proto.IsInitialized(error_fields) and throw_on_error:
Y
Yu Yang 已提交
88 89 90 91 92
        raise ValueError("{0} are not initialized\nThe message is {1}".format(
            error_fields, proto))
    return proto.__str__()


Y
Yu Yang 已提交
93
class Variable(object):
94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125
    """
    Python variable. Every input and output of an operator is a variable. Every
    variable belongs to a block. The variable has a name and two variables in
    different blocks could have the same name.

    There are many kinds of variables. Please reference the framework.proto for
    details.

    Notes: The constructor of Variable should not be invoked directly. Please
    use `Block.create_var` to create a variable.

    >>> cur_program = Program()
    >>> cur_block = cur_program.current_block()
    >>> new_variable = cur_block.create_var(
    >>>                    name="X", shape=[-1, 23, 48], dtype='float32')

    Args:
        block(Block): The associated block. It will be passed by
            `Block.create_var` automatically.
        type(core.VarDesc.VarType): Variable type. Please reference the
            framework.proto for details.
        shape(tuple|list|None): The shape of variable. -1 means the batch size.
            Some kinds of variable do not contain shape, just set it to None.
        dtype(np.dtype|core.DataType|str): The data type of variable.
        lod_level(int): The level of lod tensor. 0 means there is not a time
            series data.
        persistable(bool): True if the variable should be saved as check point.
            Defaults to False.
        stop_gradient(bool): True if the variable will stop to calculate
            gradients when backward. Defaults to False.
    """

Y
Yu Yang 已提交
126 127
    def __init__(self,
                 block,
Y
Yu Yang 已提交
128
                 type=core.VarDesc.VarType.LOD_TENSOR,
Y
Yu Yang 已提交
129 130 131 132
                 name=None,
                 shape=None,
                 dtype=None,
                 lod_level=None,
Q
QI JUN 已提交
133
                 persistable=None,
Y
Yu Yang 已提交
134
                 stop_gradient=False,
Y
Yu Yang 已提交
135
                 **kwargs):
Y
Yu Yang 已提交
136 137 138 139
        self.block = block

        if name is None:
            name = Variable._unique_var_name_()
D
Dong Zhihong 已提交
140 141 142 143
        is_new_var = False
        self.desc = self.block.desc.find_var(name)

        if self.desc is None:
D
dongzhihong 已提交
144
            self.desc = self.block.desc.var(name)
Y
Yu Yang 已提交
145
            is_new_var = True
Y
Yu Yang 已提交
146

Y
Yu Yang 已提交
147 148 149 150 151 152 153 154
        if is_new_var:
            self.desc.set_type(type)
        elif self.desc.type() != type:
            raise ValueError("Variable {0} has been created before. The "
                             "previous type is {1}; the new type is {2}. They"
                             " are not matched".format(self.name,
                                                       self.desc.type(), type))

Y
Yu Yang 已提交
155
        if shape is not None:
Y
Yu Yang 已提交
156
            if is_new_var:
157
                self.desc.set_shape(shape)
Y
Yu Yang 已提交
158 159 160 161 162 163 164 165
            else:
                old_shape = self.shape
                shape = tuple(shape)
                if shape != old_shape:
                    raise ValueError(
                        "Variable {0} has been created before. the previous "
                        "shape is {1}; the new shape is {2}. They are not "
                        "matched.".format(self.name, old_shape, shape))
Y
Yu Yang 已提交
166
        if dtype is not None:
Y
Yu Yang 已提交
167
            if not isinstance(dtype, core.DataType):
168
                dtype = convert_np_dtype_to_dtype_(dtype)
Y
Yu Yang 已提交
169
            if is_new_var:
F
fengjiayi 已提交
170
                self.desc.set_dtype(dtype)
Y
Yu Yang 已提交
171
            else:
F
fengjiayi 已提交
172
                old_dtype = self.dtype
Q
QI JUN 已提交
173
                if dtype != old_dtype:
Y
Yu Yang 已提交
174 175 176 177 178
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous data type is {1}; the new "
                                     "data type is {2}. They are not "
                                     "matched.".format(self.name, old_dtype,
                                                       dtype))
Y
Yu Yang 已提交
179 180

        if lod_level is not None:
Y
Yu Yang 已提交
181
            if is_new_var:
182
                self.desc.set_lod_level(lod_level)
Y
Yu Yang 已提交
183 184 185 186 187 188 189
            else:
                if lod_level != self.lod_level:
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous lod_level is {1}; the new "
                                     "lod_level is {2}. They are not "
                                     "matched".format(self.name, self.lod_level,
                                                      lod_level))
190 191 192 193 194 195 196 197 198 199 200
        if persistable is not None:
            if is_new_var:
                self.desc.set_persistable(persistable)
            else:
                if persistable != self.persistable:
                    raise ValueError(
                        "Variable {0} has been created before."
                        "The previous persistable is {1}; the new "
                        "persistable is {2}. They are not matched".format(
                            self.name, self.persistable, persistable))

Y
Yu Yang 已提交
201
        self.block.vars[name] = self
Y
Yu Yang 已提交
202
        self.op = None
Y
Yu Yang 已提交
203
        self.stop_gradient = stop_gradient
Y
Yu Yang 已提交
204

205
    def __str__(self):
Y
Yang Yang(Tony) 已提交
206 207 208
        return self.to_string(True)

    def to_string(self, throw_on_error):
209 210 211 212 213 214 215 216 217 218
        """
        Get debug string.

        Args:
            throw_on_error(bool): True if raise an exception when self is not
                intialized.

        Returns(str): The debug string.

        """
219 220
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.VarDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
221
        return _debug_string_(proto, throw_on_error)
222 223 224

    __repr__ = __str__

225 226 227 228
    @property
    def persistable(self):
        return self.desc.persistable()

Y
Yu Yang 已提交
229 230 231 232
    @persistable.setter
    def persistable(self, p):
        self.desc.set_persistable(p)

Y
Yu Yang 已提交
233 234
    @property
    def name(self):
235
        return self.desc.name()
Y
Yu Yang 已提交
236 237 238 239

    @property
    def shape(self):
        # convert to tuple, make it as same as numpy API.
240
        return tuple(self.desc.shape())
Y
Yu Yang 已提交
241 242

    @property
F
fengjiayi 已提交
243 244
    def dtype(self):
        return self.desc.dtype()
Y
Yu Yang 已提交
245 246 247

    @property
    def lod_level(self):
248
        return self.desc.lod_level()
Y
Yu Yang 已提交
249

Y
Yu Yang 已提交
250 251 252 253
    @property
    def type(self):
        return self.desc.type()

Y
Yu Yang 已提交
254 255
    @staticmethod
    def _unique_var_name_():
256 257 258
        prefix = "_generated_var"
        uid = core.unique_integer(prefix)  # unique during whole process.
        return "_".join([prefix, str(uid)])
Y
Yu Yang 已提交
259

Y
Yu Yang 已提交
260

F
fengjiayi 已提交
261 262 263
def get_all_op_protos():
    """
    Get all registered op proto from PaddlePaddle C++ end.
264 265 266

    Returns(list): list of OpProto

F
fengjiayi 已提交
267 268 269 270 271 272 273 274 275 276
    """
    protostrs = core.get_all_op_protos()
    ret_values = []
    for pbstr in protostrs:
        op_proto = framework_pb2.OpProto.FromString(str(pbstr))
        ret_values.append(op_proto)
    return ret_values


class OpProtoHolder(object):
277 278 279 280
    """
    A global variable to hold all OpProtos from C++ as a map
    """

F
fengjiayi 已提交
281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296
    @classmethod
    def instance(cls):
        if not hasattr(cls, '_instance'):
            cls._instance = cls()
        return cls._instance

    def __init__(self):
        assert not hasattr(
            self.__class__,
            '_instance'), 'Please use `instance()` to get OpProtoHolder opject!'
        op_protos = get_all_op_protos()
        self.op_proto_map = {}
        for proto in op_protos:
            self.op_proto_map[proto.type] = proto

    def get_op_proto(self, type):
297 298 299 300 301 302 303 304
        """
        Get OpProto by a type string.
        Args:
            type(str): The type that operator registered in C++ side.

        Returns(framework_pb2.OpProto): The OpProto

        """
Y
Yu Yang 已提交
305 306
        if type not in self.op_proto_map:
            raise ValueError("Operator \"%s\" has not been registered." % type)
F
fengjiayi 已提交
307 308 309
        return self.op_proto_map[type]


Y
Yu Yang 已提交
310
class Operator(object):
311 312 313 314 315 316
    """
    Python Operator class. The operator represents the build in instructs in a
    Block. Users can use the build in instructs to describe their neural
    network.
    """

Y
Yu Yang 已提交
317 318
    def __init__(self,
                 block,
Y
Yu Yang 已提交
319
                 desc,
Y
Yu Yang 已提交
320 321 322 323
                 type=None,
                 inputs=None,
                 outputs=None,
                 attrs=None):
324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347
        """
        Constructor.

        Notes: The constructor of operator should not be invoked directly. Use
        Block.append_op or Block.prepend_op instead.

        >>> cur_program = Program()
        >>> cur_block = cur_program.current_block()
        >>> # var1 += var2 + var3
        >>> cur_block.append_op(type="sum",
        >>>                     inputs={"X": [var1, var2, var3]},
        >>>                     outputs={"Out": [var1]})

        Args:
            block(Block): The block has the current operator
            desc(core.OpDesc): The protobuf description
            type(str): The type of operator.
            inputs(dict): The input dictionary. Key is the input parameter name.
                Value is a list of variables.
            outputs(dict): The output dictionary. Has same format with inputs
            attrs(dict): The attributes dictionary. Key is attribute name. Value
                is the attribute value. The attribute type should be as same as
                the type registered in C++
        """
Y
Yu Yang 已提交
348
        self.block = block
Y
Yu Yang 已提交
349
        self.desc = desc
F
fengjiayi 已提交
350 351 352 353 354
        if len(self.desc.type()) != 0:
            return
        if type is None:
            raise ValueError(
                "`type` to initilized an Operator can not be None.")
F
Update  
fengjiayi 已提交
355
        self.desc.set_type(type)
F
fengjiayi 已提交
356
        proto = OpProtoHolder.instance().get_op_proto(type)
357

Y
Yang Yang(Tony) 已提交
358 359
        def find_name(var_list, name):
            for var_name in var_list:
Q
Qiao Longfei 已提交
360
                if var_list[var_name] is not None and var_name == name:
Y
Yang Yang(Tony) 已提交
361 362
                    return True
            return False
Q
QI JUN 已提交
363

Y
Yang Yang(Tony) 已提交
364 365 366 367 368 369 370
        if inputs is not None:
            for in_proto in proto.inputs:
                found = find_name(inputs, in_proto.name)
                assert found or in_proto.dispensable, "Input {} not found".format(
                    in_proto.name)

                if found:
371 372 373 374
                    in_args = inputs[in_proto.name]
                    if not isinstance(in_args, list):
                        in_args = [in_args]
                    if not in_proto.duplicable and len(in_args) > 1:
Y
Yang Yang(Tony) 已提交
375 376
                        raise ValueError(
                            "Input %s expects only one input, but %d are given."
377 378 379 380 381
                            % (in_proto.name, len(in_args)))
                    in_arg_names = []
                    for arg in in_args:
                        in_arg_names.append(arg.name)
                    self.desc.set_input(in_proto.name, in_arg_names)
Y
Yang Yang(Tony) 已提交
382 383
                else:
                    self.desc.set_input(in_proto.name, [])
F
Update  
fengjiayi 已提交
384

Y
Yu Yang 已提交
385
        if outputs is not None:
386 387 388 389 390 391 392 393 394 395 396 397
            given = set()
            need = set()
            for n in outputs:
                given.add(n)
            for m in proto.outputs:
                need.add(m.name)
            if not given == need:
                raise ValueError(
                    "Incorrect setting for output(s) of operator \"%s\". Need: [%s] Given: [%s]"
                    % (type, ", ".join(str(e) for e in need), ", ".join(
                        str(e) for e in given)))

F
fengjiayi 已提交
398
            for out_proto in proto.outputs:
399 400 401 402
                out_args = outputs[out_proto.name]
                if not isinstance(out_args, list):
                    out_args = [out_args]
                if not out_proto.duplicable and len(out_args) > 1:
F
Update  
fengjiayi 已提交
403 404
                    raise ValueError(
                        "Output %s expects only one output, but %d are given." %
405 406 407 408 409 410
                        (out_proto.name, len(out_args)))
                out_arg_names = []
                for arg in out_args:
                    out_arg_names.append(arg.name)
                    arg.op = self
                self.desc.set_output(out_proto.name, out_arg_names)
F
Update  
fengjiayi 已提交
411

Y
Yu Yang 已提交
412
        if attrs is not None:
413 414
            if not isinstance(attrs, dict):
                raise TypeError("'attrs' should be a dict.")
F
fengjiayi 已提交
415
            for attr in proto.attrs:
F
Update  
fengjiayi 已提交
416
                attr_name = attr.name
417
                if (not attr_name in attrs) or (attrs[attr_name] is None):
F
Update  
fengjiayi 已提交
418
                    continue
Y
Yang Yang(Tony) 已提交
419
                if isinstance(attrs[attr_name], Block):
F
Update  
fengjiayi 已提交
420
                    self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
Y
Yang Yang(Tony) 已提交
421 422
                else:
                    self.desc.set_attr(attr_name, attrs[attr_name])
Y
Yu Yang 已提交
423

424
        self.desc.check_attrs()
Y
Yang Yang(Tony) 已提交
425
        no_kernel_op_set = {
Y
Yu Yang 已提交
426
            'feed', 'fetch', 'save', 'load', 'recurrent',
Y
Yu Yang 已提交
427
            'rnn_memory_helper_grad', 'conditional_block', 'while'
Y
Yang Yang(Tony) 已提交
428
        }
429
        if type not in no_kernel_op_set:
Q
QI JUN 已提交
430
            self.desc.infer_var_type(self.block.desc)
Y
Yu Yang 已提交
431
            self.desc.infer_shape(self.block.desc)
F
fengjiayi 已提交
432

Y
Yang Yang(Tony) 已提交
433
    def to_string(self, throw_on_error):
434 435 436 437 438 439 440 441 442
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True

        Returns(str): The debug string.

        """
443 444
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.OpDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
445 446 447 448
        return _debug_string_(proto, throw_on_error)

    def __str__(self):
        return self.to_string(True)
449 450 451

    __repr__ = __str__

F
fengjiayi 已提交
452 453 454 455 456
    @property
    def type(self):
        return self.desc.type()

    def input(self, name):
457 458 459 460 461 462 463 464 465
        """
        Get input arguments by the input parameter name
        Args:
            name(str): The input parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
466 467 468 469
        return self.desc.input(name)

    @property
    def input_names(self):
470 471 472 473 474
        """
        Get all input parameter names
        Returns(list): return a list of input parameter names

        """
F
fengjiayi 已提交
475 476 477
        return self.desc.input_names()

    def output(self, name):
478 479 480 481 482 483 484 485 486
        """
        Get output arguments by the output parameter name
        Args:
            name(str): The output parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
487 488 489 490
        return self.desc.output(name)

    @property
    def output_names(self):
491 492 493 494 495
        """
        Get all output parameter names
        Returns(list): return a list of output parameter names

        """
F
fengjiayi 已提交
496 497
        return self.desc.output_names()

498 499
    @property
    def idx(self):
500 501 502 503 504 505
        """
        Return the array index of current operator.
        Returns(int): The array index in block.ops array
        Raises:
            ValueError: when the operator is not found.
        """
506 507 508 509 510 511
        for i, op in enumerate(self.block.ops):
            if op == self:
                return i
        raise ValueError(
            "Can't find op itself in it's block. It could be a bug of Paddle.")

F
fengjiayi 已提交
512
    def has_attr(self, name):
513 514 515 516 517 518 519 520
        """
        operator has the attribute with name or not.
        Args:
            name(str): the attribute name

        Returns(bool): True if has this attribute.

        """
F
fengjiayi 已提交
521 522 523
        return self.desc.has_attr(name)

    def attr_type(self, name):
524 525 526 527 528 529 530 531
        """
        Get the type of attribute by attribute name
        Args:
            name(str): the attribute name

        Returns(core.AttrType): the attribute type

        """
F
fengjiayi 已提交
532 533 534 535
        return self.desc.attr_type(name)

    @property
    def attr_names(self):
536 537 538 539 540
        """
        Get all attribute names
        Returns(list): The list of attribute name

        """
F
fengjiayi 已提交
541 542 543
        return self.desc.attr_names()

    def attr(self, name):
544 545 546 547 548 549 550 551 552
        """
        Get attribute by name
        Args:
            name(str): the attribute name

        Returns(bool|int|str|float|list): The attribute value. The return value
            can be any valid attribute type.

        """
F
fengjiayi 已提交
553
        return self.desc.attr(name)
Y
Yu Yang 已提交
554

F
fengjiayi 已提交
555
    def block_attr(self, name):
556 557 558 559 560 561 562 563
        """
        Get the block attribute by name
        Args:
            name(str): the attribute name

        Returns(int): the block index

        """
F
fengjiayi 已提交
564
        return self.desc.block_attr(name)
Y
Yu Yang 已提交
565 566


Y
Yu Yang 已提交
567 568
class Block(object):
    def __init__(self, program, idx):
Y
Yu Yang 已提交
569
        self.desc = program.desc.block(idx)
Y
Yu Yang 已提交
570
        self.vars = dict()  # var_name --> var
Y
Yu Yang 已提交
571
        self.ops = collections.deque()  # operator list
Y
Yu Yang 已提交
572 573
        self.program = program

574
    def __str__(self):
Y
Yang Yang(Tony) 已提交
575 576 577
        return self.to_string(True)

    def to_string(self, throw_on_error):
578 579
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.BlockDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
580
        return _debug_string_(proto, throw_on_error)
581 582 583

    __repr__ = __str__

Y
Yu Yang 已提交
584 585
    @property
    def parent_idx(self):
Y
Yu Yang 已提交
586
        return self.desc.parent
Y
Yu Yang 已提交
587 588 589

    @property
    def idx(self):
Y
Yu Yang 已提交
590
        return self.desc.id
Y
Yu Yang 已提交
591

Q
Qiao Longfei 已提交
592
    def var(self, name):
Y
Yu Yang 已提交
593 594 595 596
        if not isinstance(name, basestring):
            raise TypeError()
        v = self.vars.get(name, None)
        if v is None:
Q
Qiao Longfei 已提交
597
            raise ValueError("var %s not in this block" % name)
Y
Yu Yang 已提交
598
        return v
Q
Qiao Longfei 已提交
599 600

    def all_parameters(self):
601 602 603 604 605
        return list(self.iter_parameters())

    def iter_parameters(self):
        return (item[1] for item in self.vars.iteritems()
                if isinstance(item[1], Parameter))
Q
Qiao Longfei 已提交
606

Y
Yu Yang 已提交
607
    def create_var(self, *args, **kwargs):
Q
Qiao Longfei 已提交
608
        var = Variable(self, *args, **kwargs)
609 610
        if 'initializer' in kwargs:
            kwargs['initializer'](var, self)
Q
Qiao Longfei 已提交
611
        return var
Y
Yu Yang 已提交
612

Q
Qiao Longfei 已提交
613 614 615
    def has_var(self, name):
        return name in self.vars

Y
Yu Yang 已提交
616 617
    def create_parameter(self, *args, **kwargs):
        global_block = self.program.global_block()
Q
Qiao Longfei 已提交
618
        param = Parameter(global_block, *args, **kwargs)
619 620
        if 'initializer' in kwargs:
            kwargs['initializer'](param, self)
Q
Qiao Longfei 已提交
621
        return param
Y
Yu Yang 已提交
622

Y
Yu Yang 已提交
623
    def append_op(self, *args, **kwargs):
Y
Yu Yang 已提交
624 625
        op_desc = self.desc.append_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
626 627 628 629
        self.ops.append(op)
        return op

    def prepend_op(self, *args, **kwargs):
Y
Yu Yang 已提交
630 631
        op_desc = self.desc.prepend_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
632 633 634
        self.ops.appendleft(op)
        return op

Q
Qiao Longfei 已提交
635 636 637 638 639 640 641
    def sync_with_cpp(self):
        # sync variables from cpp
        for var in self.desc.all_vars():
            if not self.has_var(var.name()):
                self.create_var(name=var.name(), desc=var, type=var.type())

        # sync operators from cpp
642 643 644 645
        ops_in_cpp = []
        for op_idx in range(0, self.desc.op_size()):
            ops_in_cpp.append(self.desc.op(op_idx))

Y
Yu Yang 已提交
646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661
        if len(self.ops) != 0:
            first_op_in_python = self.ops[0].desc
            last_op_in_python = self.ops[len(self.ops) - 1].desc
            start_index = None
            end_index = None
            for index in range(len(ops_in_cpp)):
                if first_op_in_python == ops_in_cpp[index]:
                    start_index = index
                if last_op_in_python == ops_in_cpp[index]:
                    end_index = index
            assert start_index is not None
            assert end_index is not None
            assert start_index <= end_index
        else:
            start_index = 0
            end_index = -1
Q
Qiao Longfei 已提交
662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678

        # sync ops append to the head of cpp_ops
        for index in range((start_index - 1 - 1), -1, -1):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.appendleft(op)

        # sync ops append to the end of cpp_ops
        for index in range((end_index + 1), len(ops_in_cpp)):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.append(op)

        assert len(self.ops) == len(ops_in_cpp)
        for index in range(len(self.ops)):
            assert self.ops[index].desc == ops_in_cpp[index]

679 680 681 682
    def copy_param_info_from(self, other):
        """
        Copy the information of parameters from other block
        Args:
683
            other(Block): other block
684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706

        Returns:
            None
        """
        if not isinstance(other, Block):
            raise TypeError("copy_param_info_from should be invoked with Block")
        for p in other.iter_parameters():
            assert isinstance(p, Parameter)
            v = self.vars.get(p.name, None)
            if v is None:
                raise ValueError("copy_param_info_from should be invoked with "
                                 "same topology")
            assert isinstance(v, Variable)
            new_p = Parameter(
                block=self,
                shape=v.shape,
                dtype=v.dtype,
                type=v.type,
                lod_level=v.lod_level,
                stop_gradient=p.stop_gradient,
                trainable=p.trainable,
                optimize_attr=p.optimize_attr,
                regularizer=p.regularizer,
Y
Yu Yang 已提交
707
                clip_attr=p.clip_attr,
708 709 710
                name=v.name)
            self.vars[new_p.name] = new_p

Y
Yu Yang 已提交
711 712

class Program(object):
713 714
    def __init__(self):
        self.desc = core.ProgramDesc()
Y
Yu Yang 已提交
715 716
        self.blocks = [Block(self, 0)]
        self.current_block_idx = 0
D
dzhwinter 已提交
717
        self._seed = 0
Y
Yu Yang 已提交
718

719
    def __str__(self):
Y
Yang Yang(Tony) 已提交
720 721 722
        return self.to_string(True)

    def to_string(self, throw_on_error):
723 724
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.ProgramDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
725
        return _debug_string_(proto, throw_on_error)
726

Y
Yu Yang 已提交
727 728 729 730 731
    def clone(self):
        p = Program()
        p.desc = core.ProgramDesc(self.desc)
        p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
        p.sync_with_cpp()
732
        p.copy_param_info_from(self)
Y
Yu Yang 已提交
733
        return p
734

735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754
    def prune(self, targets):
        if not isinstance(targets, list):
            targets = [targets]
        targets_idx = []
        for t in targets:
            if not isinstance(t, Operator):
                if isinstance(t, Variable):
                    t = t.op
                else:
                    raise ValueError(
                        "All targets of prune() can only be Variable or Operator."
                    )

            targets_idx.append([t.block.idx, t.idx])
        res = Program()
        res.desc = core.prune(self.desc, targets_idx)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

755 756 757 758 759 760 761
    def inference_optimize(self):
        res = Program()
        res.desc = core.inference_optimize(self.desc)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

762 763 764 765
    @staticmethod
    def parse_from_string(binary_str):
        p = Program()
        p.desc = core.ProgramDesc(binary_str)
766
        p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
767 768
        p.sync_with_cpp()
        return p
Y
Yu Yang 已提交
769

D
dzhwinter 已提交
770 771 772 773 774 775 776 777 778 779
    @property
    def random_seed(self):
        return self._seed

    @random_seed.setter
    def random_seed(self, seed):
        if not isinstance(seed, int):
            raise ValueError("Seed must be a integer.")
        self._seed = seed

Y
Yu Yang 已提交
780 781
    def __repr__(self):
        return str(self)
782

Y
Yu Yang 已提交
783 784 785
    def global_block(self):
        return self.blocks[0]

Q
Qiao Longfei 已提交
786 787 788
    def block(self, index):
        return self.blocks[index]

Y
Yu Yang 已提交
789 790 791
    def current_block(self):
        return self.blocks[self.current_block_idx]

F
fengjiayi 已提交
792
    def append_backward(self, target, no_grad_set=None):
Q
Qiao Longfei 已提交
793 794 795
        """
        return map(param_name -> (grad_name, block_index, op_index))
        """
Q
Qiao Longfei 已提交
796
        assert isinstance(target, Variable)
F
fengjiayi 已提交
797 798
        if no_grad_set is None:
            no_grad_set = set()
Y
Yang Yang(Tony) 已提交
799 800 801 802 803 804 805 806
        try:
            param_to_grad_info = self.desc.append_backward(target.desc,
                                                           no_grad_set)
        except Exception as e:
            raise core.EnforceNotMet(
                str(e) + "\nCurrent protobuf is\n{0}".format(
                    self.to_string(False)))

Q
Qiao Longfei 已提交
807 808 809
        self.sync_with_cpp()
        return param_to_grad_info

Y
Yu Yang 已提交
810 811
    def create_block(self):
        new_block_idx = len(self.blocks)
Y
Yu Yang 已提交
812
        self.desc.append_block(self.current_block().desc)
Y
Yu Yang 已提交
813 814 815 816 817 818 819
        self.current_block_idx = new_block_idx
        self.blocks.append(Block(self, self.current_block_idx))
        return self.current_block()

    def rollback(self):
        self.current_block_idx = self.current_block().parent_idx

Q
Qiao Longfei 已提交
820 821 822 823 824 825
    def sync_with_cpp(self):
        for block_idx in range(len(self.blocks), self.desc.num_blocks()):
            self.blocks.append(Block(self, block_idx))
        for block in self.blocks:
            block.sync_with_cpp()

826 827
    def copy_param_info_from(self, other):
        """
828
        Copy the information of parameters from other program.
829 830 831 832 833 834 835 836 837 838 839 840 841 842 843
        Args:
            other(Program): Other program

        Returns:
            None
        """
        if not isinstance(other, Program):
            raise TypeError("copy_param_info_from should be invoked with "
                            "Program")

        if len(self.blocks) != len(other.blocks):
            raise ValueError("copy_param_info_from should be invoked with two "
                             "program, with represent the same topology")
        self.global_block().copy_param_info_from(other.global_block())

844 845 846 847 848
    def list_vars(self):
        for each_block in self.blocks:
            for each_var in each_block.vars.itervalues():
                yield each_var

Y
Yu Yang 已提交
849

Y
Yu Yang 已提交
850 851 852 853 854 855 856 857 858 859 860
class Parameter(Variable):
    def __init__(self, block, shape, dtype, **kwargs):
        if shape is None or dtype is None:
            raise ValueError("Parameter must set shape and dtype")
        if len(shape) == 0:
            raise ValueError("Parameter shape cannot be empty")

        for each in shape:
            if each < 0:
                raise ValueError("Parameter shape should not be related with "
                                 "batch-size")
861 862 863

        Variable.__init__(
            self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
Y
Yu Yang 已提交
864 865 866 867
        self.trainable = kwargs.get('trainable', True)

        self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})

868 869
        self.regularizer = kwargs.get('regularizer', None)

Y
Yu Yang 已提交
870 871
        self.clip_attr = kwargs.get('clip_attr', None)

Y
Yu Yang 已提交
872

Y
Yu Yang 已提交
873
# program is a global instance.
Y
Yu Yang 已提交
874 875
_main_program_ = Program()
_startup_program_ = Program()
876

877

878
def default_startup_program():
Y
Yu Yang 已提交
879 880 881
    """
    Get default startup program. In startup program, Paddle will initialize
    parameters, initialize nccl handle, etc.
882

Y
Yu Yang 已提交
883 884 885
    Returns:
        Program: startup program
    """
Y
Yu Yang 已提交
886
    return _startup_program_
887

888

889
def default_main_program():
Y
Yu Yang 已提交
890 891
    """
    Get default main program. The main program is used for training or testing.
892

Y
Yu Yang 已提交
893 894 895
    Returns:
        Program: main program
    """
Y
Yu Yang 已提交
896
    return _main_program_
Y
Yu Yang 已提交
897 898 899 900 901


def switch_main_program(program):
    """
    Switch the main program to a new program.
902

Y
Yu Yang 已提交
903 904 905 906 907 908 909 910 911 912 913 914 915 916
    Args:
        program(Program): The new main program

    Returns:
        Program: The previous main program
    """
    global _main_program_
    prev_program = _main_program_
    _main_program_ = program
    return prev_program


def switch_startup_program(program):
    """
917
    Switch the startup program to a new program
Y
Yu Yang 已提交
918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933
    Args:
        program(Program): The new startup program

    Returns:
        Program: The previous startup program
    """
    global _startup_program_
    prev_program = _startup_program_
    _startup_program_ = program
    return prev_program


@contextlib.contextmanager
def program_guard(main_program, startup_program=None):
    """
    Switch program with `with` statement
934

Y
Yu Yang 已提交
935 936 937 938
    Examples:
        >>> with program_guard(Program()):
        >>>   data = fluid.layers.data(...)
        >>>   hidden = fluid.layers.fc(...)
939

Y
Yu Yang 已提交
940 941
    Args:
        main_program(Program): New main program inside `with` statement
942
        startup_program(Program): New startup program inside `with` statement.
Y
Yu Yang 已提交
943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958
            None means do not change startup program.

    Returns:
        None
    """
    if not isinstance(main_program, Program):
        raise TypeError("main_program should be Program")
    main_program = switch_main_program(main_program)
    if startup_program is not None:
        if not isinstance(startup_program, Program):
            raise TypeError("startup_program should be Program")
        startup_program = switch_startup_program(startup_program)
    yield
    switch_main_program(main_program)
    if startup_program is not None:
        switch_startup_program(startup_program)