framework.py 31.7 KB
Newer Older
Y
Yu Yang 已提交
1
import collections
Q
qiaolongfei 已提交
2
import contextlib
3

Y
Yu Yang 已提交
4
import numpy as np
Q
qiaolongfei 已提交
5

6
import proto.framework_pb2 as framework_pb2
Q
qiaolongfei 已提交
7
from . import core
Y
Yu Yang 已提交
8

9 10
__all__ = [
    'Block', 'Variable', 'Program', 'Operator', 'default_startup_program',
Y
Yu Yang 已提交
11 12
    'default_main_program', 'program_guard', 'switch_startup_program',
    'switch_main_program'
13
]
Y
Yu Yang 已提交
14

Q
qiaolongfei 已提交
15 16 17 18 19
EMPTY_VAR_NAME = core.kEmptyVarName()
TEMP_VAR_NAME = core.kTempVarName()
GRAD_VAR_SUFFIX = core.kGradVarSuffix()
ZERO_VAR_SUFFIX = core.kZeroVarSuffix()

Q
Qiao Longfei 已提交
20 21 22 23
USE_CPU = core.kUseCPU()
USE_CUDNN = core.kUseMKLDNN()
USE_MKLDNN = core.kUseMKLDNN()

Q
qiaolongfei 已提交
24 25 26 27 28 29 30

def grad_var_name(var_name):
    """
    return gradient name for a certain var name
    """
    return var_name + GRAD_VAR_SUFFIX

Y
Yu Yang 已提交
31

Q
Qiao Longfei 已提交
32
def unique_name(prefix):
33 34 35 36 37 38 39 40 41
    """
    Generate unique names with prefix

    Args:
        prefix(str): The prefix of return string

    Returns(str): A unique string with the prefix

    """
Q
Qiao Longfei 已提交
42 43 44 45
    uid = core.unique_integer(prefix)  # unique during whole process.
    return "_".join([prefix, str(uid)])


46
def convert_np_dtype_to_dtype_(np_dtype):
47 48 49 50 51 52 53 54
    """
    Convert the data type in numpy to the data type in Paddle
    Args:
        np_dtype(np.dtype): the data type in numpy

    Returns(core.DataType): the data type in Paddle

    """
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
    dtype = np.dtype(np_dtype)
    if dtype == np.float32:
        return core.DataType.FP32
    elif dtype == np.float64:
        return core.DataType.FP64
    elif dtype == np.float16:
        return core.DataType.FP16
    elif dtype == np.int32:
        return core.DataType.INT32
    elif dtype == np.int16:
        return core.DataType.INT16
    elif dtype == np.int64:
        return core.DataType.INT64
    elif dtype == np.bool:
        return core.DataType.BOOL
    else:
        raise ValueError("Not supported numpy dtype " + str(dtype))


def dtype_is_floating(dtype):
75 76 77 78 79 80 81 82 83
    """
    Check the data type is floating or not.
    Args:
        dtype(np.dtype|core.DataType): data type.
            Could be numpy format or Paddle format

    Returns(bool): True if data type is a float value

    """
84 85 86
    if not isinstance(dtype, core.DataType):
        dtype = convert_np_dtype_to_dtype_(dtype)

87
    return dtype in [core.DataType.FP16, core.DataType.FP32, core.DataType.FP64]
88 89


Y
Yang Yang(Tony) 已提交
90
def _debug_string_(proto, throw_on_error=True):
91 92 93 94 95 96 97 98 99 100 101
    """
    Get the debug string of a protobuf message. The message could be not
    initialized.
    Args:
        proto(google.protobuf.message.Message): The protobuf message
        throw_on_error(bool): True if raise an error when the protobuf message
            is not initialized.

    Returns(str): The debug string of the protobuf message

    """
Y
Yu Yang 已提交
102
    error_fields = list()
Y
Yang Yang(Tony) 已提交
103
    if not proto.IsInitialized(error_fields) and throw_on_error:
Y
Yu Yang 已提交
104 105 106 107 108
        raise ValueError("{0} are not initialized\nThe message is {1}".format(
            error_fields, proto))
    return proto.__str__()


Y
Yu Yang 已提交
109
class Variable(object):
110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141
    """
    Python variable. Every input and output of an operator is a variable. Every
    variable belongs to a block. The variable has a name and two variables in
    different blocks could have the same name.

    There are many kinds of variables. Please reference the framework.proto for
    details.

    Notes: The constructor of Variable should not be invoked directly. Please
    use `Block.create_var` to create a variable.

    >>> cur_program = Program()
    >>> cur_block = cur_program.current_block()
    >>> new_variable = cur_block.create_var(
    >>>                    name="X", shape=[-1, 23, 48], dtype='float32')

    Args:
        block(Block): The associated block. It will be passed by
            `Block.create_var` automatically.
        type(core.VarDesc.VarType): Variable type. Please reference the
            framework.proto for details.
        shape(tuple|list|None): The shape of variable. -1 means the batch size.
            Some kinds of variable do not contain shape, just set it to None.
        dtype(np.dtype|core.DataType|str): The data type of variable.
        lod_level(int): The level of lod tensor. 0 means there is not a time
            series data.
        persistable(bool): True if the variable should be saved as check point.
            Defaults to False.
        stop_gradient(bool): True if the variable will stop to calculate
            gradients when backward. Defaults to False.
    """

Y
Yu Yang 已提交
142 143
    def __init__(self,
                 block,
Y
Yu Yang 已提交
144
                 type=core.VarDesc.VarType.LOD_TENSOR,
Y
Yu Yang 已提交
145 146 147 148
                 name=None,
                 shape=None,
                 dtype=None,
                 lod_level=None,
Q
QI JUN 已提交
149
                 persistable=None,
F
fengjiayi 已提交
150
                 error_clip=None,
Y
Yu Yang 已提交
151
                 stop_gradient=False,
Y
Yu Yang 已提交
152
                 **kwargs):
Y
Yu Yang 已提交
153
        self.block = block
F
fengjiayi 已提交
154
        self.error_clip = error_clip
Y
Yu Yang 已提交
155 156 157

        if name is None:
            name = Variable._unique_var_name_()
D
Dong Zhihong 已提交
158 159 160 161
        is_new_var = False
        self.desc = self.block.desc.find_var(name)

        if self.desc is None:
D
dongzhihong 已提交
162
            self.desc = self.block.desc.var(name)
Y
Yu Yang 已提交
163
            is_new_var = True
Y
Yu Yang 已提交
164

Y
Yu Yang 已提交
165 166 167 168 169 170 171 172
        if is_new_var:
            self.desc.set_type(type)
        elif self.desc.type() != type:
            raise ValueError("Variable {0} has been created before. The "
                             "previous type is {1}; the new type is {2}. They"
                             " are not matched".format(self.name,
                                                       self.desc.type(), type))

Y
Yu Yang 已提交
173
        if shape is not None:
Y
Yu Yang 已提交
174
            if is_new_var:
175
                self.desc.set_shape(shape)
Y
Yu Yang 已提交
176 177 178 179 180 181 182 183
            else:
                old_shape = self.shape
                shape = tuple(shape)
                if shape != old_shape:
                    raise ValueError(
                        "Variable {0} has been created before. the previous "
                        "shape is {1}; the new shape is {2}. They are not "
                        "matched.".format(self.name, old_shape, shape))
Y
Yu Yang 已提交
184
        if dtype is not None:
Y
Yu Yang 已提交
185
            if not isinstance(dtype, core.DataType):
186
                dtype = convert_np_dtype_to_dtype_(dtype)
Y
Yu Yang 已提交
187
            if is_new_var:
F
fengjiayi 已提交
188
                self.desc.set_dtype(dtype)
Y
Yu Yang 已提交
189
            else:
F
fengjiayi 已提交
190
                old_dtype = self.dtype
Q
QI JUN 已提交
191
                if dtype != old_dtype:
Y
Yu Yang 已提交
192 193 194 195 196
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous data type is {1}; the new "
                                     "data type is {2}. They are not "
                                     "matched.".format(self.name, old_dtype,
                                                       dtype))
Y
Yu Yang 已提交
197 198

        if lod_level is not None:
Y
Yu Yang 已提交
199
            if is_new_var:
200
                self.desc.set_lod_level(lod_level)
Y
Yu Yang 已提交
201 202 203 204 205 206 207
            else:
                if lod_level != self.lod_level:
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous lod_level is {1}; the new "
                                     "lod_level is {2}. They are not "
                                     "matched".format(self.name, self.lod_level,
                                                      lod_level))
208 209 210 211 212 213 214 215 216 217 218
        if persistable is not None:
            if is_new_var:
                self.desc.set_persistable(persistable)
            else:
                if persistable != self.persistable:
                    raise ValueError(
                        "Variable {0} has been created before."
                        "The previous persistable is {1}; the new "
                        "persistable is {2}. They are not matched".format(
                            self.name, self.persistable, persistable))

Y
Yu Yang 已提交
219
        self.block.vars[name] = self
Y
Yu Yang 已提交
220
        self.op = None
Y
Yu Yang 已提交
221
        self.stop_gradient = stop_gradient
Y
Yu Yang 已提交
222

223
    def __str__(self):
Y
Yang Yang(Tony) 已提交
224 225 226
        return self.to_string(True)

    def to_string(self, throw_on_error):
227 228 229 230 231 232 233 234 235 236
        """
        Get debug string.

        Args:
            throw_on_error(bool): True if raise an exception when self is not
                intialized.

        Returns(str): The debug string.

        """
237 238
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.VarDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
239
        return _debug_string_(proto, throw_on_error)
240 241 242

    __repr__ = __str__

243 244 245 246
    @property
    def persistable(self):
        return self.desc.persistable()

Y
Yu Yang 已提交
247 248 249 250
    @persistable.setter
    def persistable(self, p):
        self.desc.set_persistable(p)

Y
Yu Yang 已提交
251 252
    @property
    def name(self):
253
        return self.desc.name()
Y
Yu Yang 已提交
254 255 256 257

    @property
    def shape(self):
        # convert to tuple, make it as same as numpy API.
258
        return tuple(self.desc.shape())
Y
Yu Yang 已提交
259 260

    @property
F
fengjiayi 已提交
261 262
    def dtype(self):
        return self.desc.dtype()
Y
Yu Yang 已提交
263 264 265

    @property
    def lod_level(self):
266
        return self.desc.lod_level()
Y
Yu Yang 已提交
267

Y
Yu Yang 已提交
268 269 270 271
    @property
    def type(self):
        return self.desc.type()

Y
Yu Yang 已提交
272 273
    @staticmethod
    def _unique_var_name_():
274 275 276
        prefix = "_generated_var"
        uid = core.unique_integer(prefix)  # unique during whole process.
        return "_".join([prefix, str(uid)])
Y
Yu Yang 已提交
277

Y
Yu Yang 已提交
278

F
fengjiayi 已提交
279 280 281
def get_all_op_protos():
    """
    Get all registered op proto from PaddlePaddle C++ end.
282 283 284

    Returns(list): list of OpProto

F
fengjiayi 已提交
285 286 287 288 289 290 291 292 293 294
    """
    protostrs = core.get_all_op_protos()
    ret_values = []
    for pbstr in protostrs:
        op_proto = framework_pb2.OpProto.FromString(str(pbstr))
        ret_values.append(op_proto)
    return ret_values


class OpProtoHolder(object):
295 296 297 298
    """
    A global variable to hold all OpProtos from C++ as a map
    """

F
fengjiayi 已提交
299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314
    @classmethod
    def instance(cls):
        if not hasattr(cls, '_instance'):
            cls._instance = cls()
        return cls._instance

    def __init__(self):
        assert not hasattr(
            self.__class__,
            '_instance'), 'Please use `instance()` to get OpProtoHolder opject!'
        op_protos = get_all_op_protos()
        self.op_proto_map = {}
        for proto in op_protos:
            self.op_proto_map[proto.type] = proto

    def get_op_proto(self, type):
315 316 317 318 319 320 321 322
        """
        Get OpProto by a type string.
        Args:
            type(str): The type that operator registered in C++ side.

        Returns(framework_pb2.OpProto): The OpProto

        """
Y
Yu Yang 已提交
323 324
        if type not in self.op_proto_map:
            raise ValueError("Operator \"%s\" has not been registered." % type)
F
fengjiayi 已提交
325 326 327
        return self.op_proto_map[type]


Y
Yu Yang 已提交
328
class Operator(object):
329 330 331 332 333 334
    """
    Python Operator class. The operator represents the build in instructs in a
    Block. Users can use the build in instructs to describe their neural
    network.
    """

Y
Yu Yang 已提交
335 336
    def __init__(self,
                 block,
Y
Yu Yang 已提交
337
                 desc,
Y
Yu Yang 已提交
338 339 340 341
                 type=None,
                 inputs=None,
                 outputs=None,
                 attrs=None):
342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365
        """
        Constructor.

        Notes: The constructor of operator should not be invoked directly. Use
        Block.append_op or Block.prepend_op instead.

        >>> cur_program = Program()
        >>> cur_block = cur_program.current_block()
        >>> # var1 += var2 + var3
        >>> cur_block.append_op(type="sum",
        >>>                     inputs={"X": [var1, var2, var3]},
        >>>                     outputs={"Out": [var1]})

        Args:
            block(Block): The block has the current operator
            desc(core.OpDesc): The protobuf description
            type(str): The type of operator.
            inputs(dict): The input dictionary. Key is the input parameter name.
                Value is a list of variables.
            outputs(dict): The output dictionary. Has same format with inputs
            attrs(dict): The attributes dictionary. Key is attribute name. Value
                is the attribute value. The attribute type should be as same as
                the type registered in C++
        """
Y
Yu Yang 已提交
366
        self.block = block
Y
Yu Yang 已提交
367
        self.desc = desc
T
typhoonzero 已提交
368 369 370 371
        # for clone a new operator
        self.inputs = inputs
        self.outputs = outputs
        self.attrs = attrs
F
fengjiayi 已提交
372 373 374 375 376
        if len(self.desc.type()) != 0:
            return
        if type is None:
            raise ValueError(
                "`type` to initilized an Operator can not be None.")
F
Update  
fengjiayi 已提交
377
        self.desc.set_type(type)
F
fengjiayi 已提交
378
        proto = OpProtoHolder.instance().get_op_proto(type)
379

Y
Yang Yang(Tony) 已提交
380 381
        def find_name(var_list, name):
            for var_name in var_list:
Q
Qiao Longfei 已提交
382
                if var_list[var_name] is not None and var_name == name:
Y
Yang Yang(Tony) 已提交
383 384
                    return True
            return False
Q
QI JUN 已提交
385

Y
Yang Yang(Tony) 已提交
386 387 388 389 390 391 392
        if inputs is not None:
            for in_proto in proto.inputs:
                found = find_name(inputs, in_proto.name)
                assert found or in_proto.dispensable, "Input {} not found".format(
                    in_proto.name)

                if found:
393 394 395 396
                    in_args = inputs[in_proto.name]
                    if not isinstance(in_args, list):
                        in_args = [in_args]
                    if not in_proto.duplicable and len(in_args) > 1:
Y
Yang Yang(Tony) 已提交
397 398
                        raise ValueError(
                            "Input %s expects only one input, but %d are given."
399 400 401
                            % (in_proto.name, len(in_args)))
                    in_arg_names = []
                    for arg in in_args:
Y
Yang Yu 已提交
402 403 404 405
                        if isinstance(arg, basestring):
                            in_arg_names.append(arg)
                        else:
                            in_arg_names.append(arg.name)
406
                    self.desc.set_input(in_proto.name, in_arg_names)
Y
Yang Yang(Tony) 已提交
407 408
                else:
                    self.desc.set_input(in_proto.name, [])
F
Update  
fengjiayi 已提交
409

Y
Yu Yang 已提交
410
        if outputs is not None:
411 412 413 414 415 416 417 418 419 420 421 422
            given = set()
            need = set()
            for n in outputs:
                given.add(n)
            for m in proto.outputs:
                need.add(m.name)
            if not given == need:
                raise ValueError(
                    "Incorrect setting for output(s) of operator \"%s\". Need: [%s] Given: [%s]"
                    % (type, ", ".join(str(e) for e in need), ", ".join(
                        str(e) for e in given)))

F
fengjiayi 已提交
423
            for out_proto in proto.outputs:
424 425 426 427
                out_args = outputs[out_proto.name]
                if not isinstance(out_args, list):
                    out_args = [out_args]
                if not out_proto.duplicable and len(out_args) > 1:
F
Update  
fengjiayi 已提交
428 429
                    raise ValueError(
                        "Output %s expects only one output, but %d are given." %
430 431 432 433 434 435
                        (out_proto.name, len(out_args)))
                out_arg_names = []
                for arg in out_args:
                    out_arg_names.append(arg.name)
                    arg.op = self
                self.desc.set_output(out_proto.name, out_arg_names)
F
Update  
fengjiayi 已提交
436

Y
Yu Yang 已提交
437
        if attrs is not None:
438 439
            if not isinstance(attrs, dict):
                raise TypeError("'attrs' should be a dict.")
F
fengjiayi 已提交
440
            for attr in proto.attrs:
F
Update  
fengjiayi 已提交
441
                attr_name = attr.name
442
                if (not attr_name in attrs) or (attrs[attr_name] is None):
F
Update  
fengjiayi 已提交
443
                    continue
Y
Yang Yang(Tony) 已提交
444
                if isinstance(attrs[attr_name], Block):
F
Update  
fengjiayi 已提交
445
                    self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
T
typhoonzero 已提交
446 447 448 449
                elif isinstance(attrs[attr_name], core.BlockDesc) or \
                   isinstance(attrs[attr_name], core.ProgramDesc):
                    self.desc.set_serialized_attr(
                        attr_name, attrs[attr_name].serialize_to_string())
Y
Yang Yang(Tony) 已提交
450 451
                else:
                    self.desc.set_attr(attr_name, attrs[attr_name])
Y
Yu Yang 已提交
452

453
        self.desc.check_attrs()
Y
Yang Yang(Tony) 已提交
454
        no_kernel_op_set = {
Y
Yu Yang 已提交
455
            'feed', 'fetch', 'save', 'load', 'recurrent',
T
typhoonzero 已提交
456 457
            'rnn_memory_helper_grad', 'conditional_block', 'while', 'send',
            'recv'
Y
Yang Yang(Tony) 已提交
458
        }
459
        if type not in no_kernel_op_set:
Q
QI JUN 已提交
460
            self.desc.infer_var_type(self.block.desc)
Y
Yu Yang 已提交
461
            self.desc.infer_shape(self.block.desc)
F
fengjiayi 已提交
462

Y
Yang Yang(Tony) 已提交
463
    def to_string(self, throw_on_error):
464 465 466 467 468 469 470 471 472
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True

        Returns(str): The debug string.

        """
473 474
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.OpDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
475 476 477 478
        return _debug_string_(proto, throw_on_error)

    def __str__(self):
        return self.to_string(True)
479 480 481

    __repr__ = __str__

F
fengjiayi 已提交
482 483 484 485 486
    @property
    def type(self):
        return self.desc.type()

    def input(self, name):
487 488 489 490 491 492 493 494 495
        """
        Get input arguments by the input parameter name
        Args:
            name(str): The input parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
496 497 498 499
        return self.desc.input(name)

    @property
    def input_names(self):
500 501 502 503 504
        """
        Get all input parameter names
        Returns(list): return a list of input parameter names

        """
F
fengjiayi 已提交
505 506 507
        return self.desc.input_names()

    def output(self, name):
508 509 510 511 512 513 514 515 516
        """
        Get output arguments by the output parameter name
        Args:
            name(str): The output parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
517 518 519 520
        return self.desc.output(name)

    @property
    def output_names(self):
521 522 523 524 525
        """
        Get all output parameter names
        Returns(list): return a list of output parameter names

        """
F
fengjiayi 已提交
526 527
        return self.desc.output_names()

528 529
    @property
    def idx(self):
530 531 532 533 534 535
        """
        Return the array index of current operator.
        Returns(int): The array index in block.ops array
        Raises:
            ValueError: when the operator is not found.
        """
536 537 538 539 540 541
        for i, op in enumerate(self.block.ops):
            if op == self:
                return i
        raise ValueError(
            "Can't find op itself in it's block. It could be a bug of Paddle.")

F
fengjiayi 已提交
542
    def has_attr(self, name):
543 544 545 546 547 548 549 550
        """
        operator has the attribute with name or not.
        Args:
            name(str): the attribute name

        Returns(bool): True if has this attribute.

        """
F
fengjiayi 已提交
551 552 553
        return self.desc.has_attr(name)

    def attr_type(self, name):
554 555 556 557 558 559 560 561
        """
        Get the type of attribute by attribute name
        Args:
            name(str): the attribute name

        Returns(core.AttrType): the attribute type

        """
F
fengjiayi 已提交
562 563 564 565
        return self.desc.attr_type(name)

    @property
    def attr_names(self):
566 567 568 569 570
        """
        Get all attribute names
        Returns(list): The list of attribute name

        """
F
fengjiayi 已提交
571 572 573
        return self.desc.attr_names()

    def attr(self, name):
574 575 576 577 578 579 580 581 582
        """
        Get attribute by name
        Args:
            name(str): the attribute name

        Returns(bool|int|str|float|list): The attribute value. The return value
            can be any valid attribute type.

        """
F
fengjiayi 已提交
583
        return self.desc.attr(name)
Y
Yu Yang 已提交
584

F
fengjiayi 已提交
585
    def block_attr(self, name):
586 587 588 589 590 591 592 593
        """
        Get the block attribute by name
        Args:
            name(str): the attribute name

        Returns(int): the block index

        """
F
fengjiayi 已提交
594
        return self.desc.block_attr(name)
Y
Yu Yang 已提交
595 596


Y
Yu Yang 已提交
597 598
class Block(object):
    def __init__(self, program, idx):
Y
Yu Yang 已提交
599
        self.desc = program.desc.block(idx)
Y
Yu Yang 已提交
600
        self.vars = dict()  # var_name --> var
Y
Yu Yang 已提交
601
        self.ops = collections.deque()  # operator list
Y
Yu Yang 已提交
602
        self.program = program
T
typhoonzero 已提交
603
        self.removed_vars = dict()
Y
Yu Yang 已提交
604

605
    def __str__(self):
Y
Yang Yang(Tony) 已提交
606 607 608
        return self.to_string(True)

    def to_string(self, throw_on_error):
609 610
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.BlockDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
611
        return _debug_string_(proto, throw_on_error)
612 613 614

    __repr__ = __str__

Y
Yu Yang 已提交
615 616
    @property
    def parent_idx(self):
Y
Yu Yang 已提交
617
        return self.desc.parent
Y
Yu Yang 已提交
618 619 620

    @property
    def idx(self):
Y
Yu Yang 已提交
621
        return self.desc.id
Y
Yu Yang 已提交
622

Q
Qiao Longfei 已提交
623
    def var(self, name):
Y
Yu Yang 已提交
624 625 626 627
        if not isinstance(name, basestring):
            raise TypeError()
        v = self.vars.get(name, None)
        if v is None:
Q
Qiao Longfei 已提交
628
            raise ValueError("var %s not in this block" % name)
Y
Yu Yang 已提交
629
        return v
Q
Qiao Longfei 已提交
630

F
fengjiayi 已提交
631 632 633 634 635 636 637 638 639 640 641
    def var_recursive(self, name):
        if self.has_var(name):
            return self.var(name)
        else:
            if self.idx == 0:
                raise ValueError("var %s is not in block(%d) nor its parents." %
                                 name, self.idx)
            else:
                parent_block = self.program.block(self.parent_idx)
                return parent_block.var_recursive(name)

Q
Qiao Longfei 已提交
642
    def all_parameters(self):
643 644 645 646 647
        return list(self.iter_parameters())

    def iter_parameters(self):
        return (item[1] for item in self.vars.iteritems()
                if isinstance(item[1], Parameter))
Q
Qiao Longfei 已提交
648

Y
Yu Yang 已提交
649
    def create_var(self, *args, **kwargs):
Q
Qiao Longfei 已提交
650
        var = Variable(self, *args, **kwargs)
651 652
        if 'initializer' in kwargs:
            kwargs['initializer'](var, self)
Q
Qiao Longfei 已提交
653
        return var
Y
Yu Yang 已提交
654

Q
Qiao Longfei 已提交
655 656 657
    def has_var(self, name):
        return name in self.vars

Y
Yu Yang 已提交
658 659
    def create_parameter(self, *args, **kwargs):
        global_block = self.program.global_block()
Q
Qiao Longfei 已提交
660
        param = Parameter(global_block, *args, **kwargs)
661 662
        if 'initializer' in kwargs:
            kwargs['initializer'](param, self)
Q
Qiao Longfei 已提交
663
        return param
Y
Yu Yang 已提交
664

Y
Yu Yang 已提交
665
    def append_op(self, *args, **kwargs):
Y
Yu Yang 已提交
666 667
        op_desc = self.desc.append_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
668 669 670
        self.ops.append(op)
        return op

T
typhoonzero 已提交
671 672 673 674 675 676 677 678
    def delete_ops(self, ops):
        # remove from cpp
        # FIXME(typhoonzero): remove only the first occuracy.
        try:
            start = list(self.ops).index(ops[0])
            end = list(self.ops).index(ops[-1])
        except Exception, e:
            raise e
T
typhoonzero 已提交
679
        self.desc.remove_op(start, end + 1)
T
wip  
typhoonzero 已提交
680

Y
Yu Yang 已提交
681
    def prepend_op(self, *args, **kwargs):
Y
Yu Yang 已提交
682 683
        op_desc = self.desc.prepend_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
684 685 686
        self.ops.appendleft(op)
        return op

Q
Qiao Longfei 已提交
687 688 689 690 691 692 693
    def sync_with_cpp(self):
        # sync variables from cpp
        for var in self.desc.all_vars():
            if not self.has_var(var.name()):
                self.create_var(name=var.name(), desc=var, type=var.type())

        # sync operators from cpp
694 695 696 697
        ops_in_cpp = []
        for op_idx in range(0, self.desc.op_size()):
            ops_in_cpp.append(self.desc.op(op_idx))

Y
Yu Yang 已提交
698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713
        if len(self.ops) != 0:
            first_op_in_python = self.ops[0].desc
            last_op_in_python = self.ops[len(self.ops) - 1].desc
            start_index = None
            end_index = None
            for index in range(len(ops_in_cpp)):
                if first_op_in_python == ops_in_cpp[index]:
                    start_index = index
                if last_op_in_python == ops_in_cpp[index]:
                    end_index = index
            assert start_index is not None
            assert end_index is not None
            assert start_index <= end_index
        else:
            start_index = 0
            end_index = -1
Q
Qiao Longfei 已提交
714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730

        # sync ops append to the head of cpp_ops
        for index in range((start_index - 1 - 1), -1, -1):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.appendleft(op)

        # sync ops append to the end of cpp_ops
        for index in range((end_index + 1), len(ops_in_cpp)):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.append(op)

        assert len(self.ops) == len(ops_in_cpp)
        for index in range(len(self.ops)):
            assert self.ops[index].desc == ops_in_cpp[index]

731 732 733 734
    def copy_param_info_from(self, other):
        """
        Copy the information of parameters from other block
        Args:
735
            other(Block): other block
736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758

        Returns:
            None
        """
        if not isinstance(other, Block):
            raise TypeError("copy_param_info_from should be invoked with Block")
        for p in other.iter_parameters():
            assert isinstance(p, Parameter)
            v = self.vars.get(p.name, None)
            if v is None:
                raise ValueError("copy_param_info_from should be invoked with "
                                 "same topology")
            assert isinstance(v, Variable)
            new_p = Parameter(
                block=self,
                shape=v.shape,
                dtype=v.dtype,
                type=v.type,
                lod_level=v.lod_level,
                stop_gradient=p.stop_gradient,
                trainable=p.trainable,
                optimize_attr=p.optimize_attr,
                regularizer=p.regularizer,
Y
Yu Yang 已提交
759
                clip_attr=p.clip_attr,
F
fengjiayi 已提交
760
                error_clip=p.error_clip,
761 762 763
                name=v.name)
            self.vars[new_p.name] = new_p

Y
Yu Yang 已提交
764 765

class Program(object):
766 767
    def __init__(self):
        self.desc = core.ProgramDesc()
Y
Yu Yang 已提交
768 769
        self.blocks = [Block(self, 0)]
        self.current_block_idx = 0
D
dzhwinter 已提交
770
        self._seed = 0
Y
Yu Yang 已提交
771

772
    def __str__(self):
Y
Yang Yang(Tony) 已提交
773 774 775
        return self.to_string(True)

    def to_string(self, throw_on_error):
776 777
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.ProgramDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
778
        return _debug_string_(proto, throw_on_error)
779

Y
Yu Yang 已提交
780 781 782 783 784
    def clone(self):
        p = Program()
        p.desc = core.ProgramDesc(self.desc)
        p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
        p.sync_with_cpp()
785
        p.copy_param_info_from(self)
Y
Yu Yang 已提交
786
        return p
787

788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807
    def prune(self, targets):
        if not isinstance(targets, list):
            targets = [targets]
        targets_idx = []
        for t in targets:
            if not isinstance(t, Operator):
                if isinstance(t, Variable):
                    t = t.op
                else:
                    raise ValueError(
                        "All targets of prune() can only be Variable or Operator."
                    )

            targets_idx.append([t.block.idx, t.idx])
        res = Program()
        res.desc = core.prune(self.desc, targets_idx)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

808 809 810 811 812 813 814
    def inference_optimize(self):
        res = Program()
        res.desc = core.inference_optimize(self.desc)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

815 816 817 818
    @staticmethod
    def parse_from_string(binary_str):
        p = Program()
        p.desc = core.ProgramDesc(binary_str)
819
        p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
820 821
        p.sync_with_cpp()
        return p
Y
Yu Yang 已提交
822

D
dzhwinter 已提交
823 824 825 826 827 828 829 830 831 832
    @property
    def random_seed(self):
        return self._seed

    @random_seed.setter
    def random_seed(self, seed):
        if not isinstance(seed, int):
            raise ValueError("Seed must be a integer.")
        self._seed = seed

Y
Yu Yang 已提交
833 834
    def __repr__(self):
        return str(self)
835

Y
Yu Yang 已提交
836 837 838
    def global_block(self):
        return self.blocks[0]

Q
Qiao Longfei 已提交
839 840 841
    def block(self, index):
        return self.blocks[index]

Y
Yu Yang 已提交
842 843 844
    def current_block(self):
        return self.blocks[self.current_block_idx]

F
fengjiayi 已提交
845
    def append_backward(self, target, no_grad_set=None):
Q
Qiao Longfei 已提交
846 847 848
        """
        return map(param_name -> (grad_name, block_index, op_index))
        """
Q
Qiao Longfei 已提交
849
        assert isinstance(target, Variable)
F
fengjiayi 已提交
850 851
        if no_grad_set is None:
            no_grad_set = set()
Y
Yang Yang(Tony) 已提交
852 853 854 855 856 857 858 859
        try:
            param_to_grad_info = self.desc.append_backward(target.desc,
                                                           no_grad_set)
        except Exception as e:
            raise core.EnforceNotMet(
                str(e) + "\nCurrent protobuf is\n{0}".format(
                    self.to_string(False)))

Q
Qiao Longfei 已提交
860 861 862
        self.sync_with_cpp()
        return param_to_grad_info

F
update  
fengjiayi 已提交
863
    def create_block(self, parent_idx=None):
Y
Yu Yang 已提交
864
        new_block_idx = len(self.blocks)
F
update  
fengjiayi 已提交
865 866 867
        parent = self.current_block() if parent_idx is None else self.block(
            parent_idx)
        self.desc.append_block(parent.desc)
Y
Yu Yang 已提交
868 869 870 871 872 873 874
        self.current_block_idx = new_block_idx
        self.blocks.append(Block(self, self.current_block_idx))
        return self.current_block()

    def rollback(self):
        self.current_block_idx = self.current_block().parent_idx

Q
Qiao Longfei 已提交
875 876 877 878 879 880
    def sync_with_cpp(self):
        for block_idx in range(len(self.blocks), self.desc.num_blocks()):
            self.blocks.append(Block(self, block_idx))
        for block in self.blocks:
            block.sync_with_cpp()

881 882
    def copy_param_info_from(self, other):
        """
883
        Copy the information of parameters from other program.
884 885 886 887 888 889 890 891 892 893 894 895 896 897 898
        Args:
            other(Program): Other program

        Returns:
            None
        """
        if not isinstance(other, Program):
            raise TypeError("copy_param_info_from should be invoked with "
                            "Program")

        if len(self.blocks) != len(other.blocks):
            raise ValueError("copy_param_info_from should be invoked with two "
                             "program, with represent the same topology")
        self.global_block().copy_param_info_from(other.global_block())

899 900 901 902 903
    def list_vars(self):
        for each_block in self.blocks:
            for each_var in each_block.vars.itervalues():
                yield each_var

Y
Yu Yang 已提交
904

Y
Yu Yang 已提交
905 906 907 908 909 910 911 912 913 914 915
class Parameter(Variable):
    def __init__(self, block, shape, dtype, **kwargs):
        if shape is None or dtype is None:
            raise ValueError("Parameter must set shape and dtype")
        if len(shape) == 0:
            raise ValueError("Parameter shape cannot be empty")

        for each in shape:
            if each < 0:
                raise ValueError("Parameter shape should not be related with "
                                 "batch-size")
916 917 918

        Variable.__init__(
            self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
Y
Yu Yang 已提交
919 920 921 922
        self.trainable = kwargs.get('trainable', True)

        self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})

923 924
        self.regularizer = kwargs.get('regularizer', None)

Y
Yu Yang 已提交
925 926
        self.clip_attr = kwargs.get('clip_attr', None)

Y
Yu Yang 已提交
927

Y
Yu Yang 已提交
928
# program is a global instance.
Y
Yu Yang 已提交
929 930
_main_program_ = Program()
_startup_program_ = Program()
931

932

933
def default_startup_program():
Y
Yu Yang 已提交
934 935 936
    """
    Get default startup program. In startup program, Paddle will initialize
    parameters, initialize nccl handle, etc.
937

Y
Yu Yang 已提交
938 939 940
    Returns:
        Program: startup program
    """
Y
Yu Yang 已提交
941
    return _startup_program_
942

943

944
def default_main_program():
Y
Yu Yang 已提交
945 946
    """
    Get default main program. The main program is used for training or testing.
947

Y
Yu Yang 已提交
948 949 950
    Returns:
        Program: main program
    """
Y
Yu Yang 已提交
951
    return _main_program_
Y
Yu Yang 已提交
952 953 954 955 956


def switch_main_program(program):
    """
    Switch the main program to a new program.
957

Y
Yu Yang 已提交
958 959 960 961 962 963 964 965 966 967 968 969 970 971
    Args:
        program(Program): The new main program

    Returns:
        Program: The previous main program
    """
    global _main_program_
    prev_program = _main_program_
    _main_program_ = program
    return prev_program


def switch_startup_program(program):
    """
972
    Switch the startup program to a new program
Y
Yu Yang 已提交
973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988
    Args:
        program(Program): The new startup program

    Returns:
        Program: The previous startup program
    """
    global _startup_program_
    prev_program = _startup_program_
    _startup_program_ = program
    return prev_program


@contextlib.contextmanager
def program_guard(main_program, startup_program=None):
    """
    Switch program with `with` statement
989

Y
Yu Yang 已提交
990 991 992 993
    Examples:
        >>> with program_guard(Program()):
        >>>   data = fluid.layers.data(...)
        >>>   hidden = fluid.layers.fc(...)
994

Y
Yu Yang 已提交
995 996
    Args:
        main_program(Program): New main program inside `with` statement
997
        startup_program(Program): New startup program inside `with` statement.
Y
Yu Yang 已提交
998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013
            None means do not change startup program.

    Returns:
        None
    """
    if not isinstance(main_program, Program):
        raise TypeError("main_program should be Program")
    main_program = switch_main_program(main_program)
    if startup_program is not None:
        if not isinstance(startup_program, Program):
            raise TypeError("startup_program should be Program")
        startup_program = switch_startup_program(startup_program)
    yield
    switch_main_program(main_program)
    if startup_program is not None:
        switch_startup_program(startup_program)