framework.py 39.3 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Y
Yu Yang 已提交
15
import collections
Q
qiaolongfei 已提交
16
import contextlib
F
fengjiayi 已提交
17
import re
18

Y
Yu Yang 已提交
19
import numpy as np
Q
qiaolongfei 已提交
20

21
import proto.framework_pb2 as framework_pb2
Q
qiaolongfei 已提交
22
from . import core
Y
Yu Yang 已提交
23
import unique_name
Y
Yu Yang 已提交
24

25
__all__ = [
26 27 28 29 30 31 32 33 34
    'Block',
    'Variable',
    'Program',
    'Operator',
    'default_startup_program',
    'default_main_program',
    'program_guard',
    'switch_startup_program',
    'switch_main_program',
X
xuwei06 已提交
35
    'get_var',
36
]
Y
Yu Yang 已提交
37

Q
qiaolongfei 已提交
38 39 40 41 42 43 44 45 46 47 48 49
EMPTY_VAR_NAME = core.kEmptyVarName()
TEMP_VAR_NAME = core.kTempVarName()
GRAD_VAR_SUFFIX = core.kGradVarSuffix()
ZERO_VAR_SUFFIX = core.kZeroVarSuffix()


def grad_var_name(var_name):
    """
    return gradient name for a certain var name
    """
    return var_name + GRAD_VAR_SUFFIX

Y
Yu Yang 已提交
50

51
def convert_np_dtype_to_dtype_(np_dtype):
52 53 54 55 56
    """
    Convert the data type in numpy to the data type in Paddle
    Args:
        np_dtype(np.dtype): the data type in numpy

57
    Returns(core.VarDesc.VarType): the data type in Paddle
58 59

    """
60 61
    dtype = np.dtype(np_dtype)
    if dtype == np.float32:
62
        return core.VarDesc.VarType.FP32
63
    elif dtype == np.float64:
64
        return core.VarDesc.VarType.FP64
65
    elif dtype == np.float16:
66
        return core.VarDesc.VarType.FP16
67
    elif dtype == np.int32:
68
        return core.VarDesc.VarType.INT32
69
    elif dtype == np.int16:
70
        return core.VarDesc.VarType.INT16
71
    elif dtype == np.int64:
72
        return core.VarDesc.VarType.INT64
73
    elif dtype == np.bool:
74
        return core.VarDesc.VarType.BOOL
75 76 77 78 79
    else:
        raise ValueError("Not supported numpy dtype " + str(dtype))


def dtype_is_floating(dtype):
80 81 82
    """
    Check the data type is floating or not.
    Args:
83
        dtype(np.dtype|core.VarDesc.VarType): data type.
84 85 86 87 88
            Could be numpy format or Paddle format

    Returns(bool): True if data type is a float value

    """
89
    if not isinstance(dtype, core.VarDesc.VarType):
90 91
        dtype = convert_np_dtype_to_dtype_(dtype)

92 93 94 95
    return dtype in [
        core.VarDesc.VarType.FP16, core.VarDesc.VarType.FP32,
        core.VarDesc.VarType.FP64
    ]
96 97


Y
Yang Yang(Tony) 已提交
98
def _debug_string_(proto, throw_on_error=True):
99 100 101 102 103 104 105 106 107 108 109
    """
    Get the debug string of a protobuf message. The message could be not
    initialized.
    Args:
        proto(google.protobuf.message.Message): The protobuf message
        throw_on_error(bool): True if raise an error when the protobuf message
            is not initialized.

    Returns(str): The debug string of the protobuf message

    """
Y
Yu Yang 已提交
110
    error_fields = list()
Y
Yang Yang(Tony) 已提交
111
    if not proto.IsInitialized(error_fields) and throw_on_error:
C
caoying03 已提交
112 113
        raise ValueError("{0} are not initialized.\nThe message is {1}:\n".
                         format(error_fields, proto))
Y
Yu Yang 已提交
114 115 116
    return proto.__str__()


Y
Yu Yang 已提交
117
class Variable(object):
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
    """
    Python variable. Every input and output of an operator is a variable. Every
    variable belongs to a block. The variable has a name and two variables in
    different blocks could have the same name.

    There are many kinds of variables. Please reference the framework.proto for
    details.

    Notes: The constructor of Variable should not be invoked directly. Please
    use `Block.create_var` to create a variable.

    >>> cur_program = Program()
    >>> cur_block = cur_program.current_block()
    >>> new_variable = cur_block.create_var(
    >>>                    name="X", shape=[-1, 23, 48], dtype='float32')

    Args:
        block(Block): The associated block. It will be passed by
            `Block.create_var` automatically.
        type(core.VarDesc.VarType): Variable type. Please reference the
            framework.proto for details.
        shape(tuple|list|None): The shape of variable. -1 means the batch size.
            Some kinds of variable do not contain shape, just set it to None.
141
        dtype(np.dtype|core.VarDesc.VarType|str): The data type of variable.
142
        lod_level(int): The level of lod tensor. 0 means it is not a time
143 144 145 146 147 148 149
            series data.
        persistable(bool): True if the variable should be saved as check point.
            Defaults to False.
        stop_gradient(bool): True if the variable will stop to calculate
            gradients when backward. Defaults to False.
    """

Y
Yu Yang 已提交
150 151
    def __init__(self,
                 block,
Y
Yu Yang 已提交
152
                 type=core.VarDesc.VarType.LOD_TENSOR,
Y
Yu Yang 已提交
153 154 155 156
                 name=None,
                 shape=None,
                 dtype=None,
                 lod_level=None,
Q
QI JUN 已提交
157
                 persistable=None,
F
fengjiayi 已提交
158
                 error_clip=None,
Y
Yu Yang 已提交
159
                 stop_gradient=False,
Y
Yu Yang 已提交
160
                 **kwargs):
Y
Yu Yang 已提交
161
        self.block = block
F
fengjiayi 已提交
162
        self.error_clip = error_clip
Y
Yu Yang 已提交
163 164

        if name is None:
Y
Yu Yang 已提交
165
            name = unique_name.generate('_generated_var')
D
Dong Zhihong 已提交
166 167 168 169
        is_new_var = False
        self.desc = self.block.desc.find_var(name)

        if self.desc is None:
D
dongzhihong 已提交
170
            self.desc = self.block.desc.var(name)
Y
Yu Yang 已提交
171
            is_new_var = True
Y
Yu Yang 已提交
172

Y
Yu Yang 已提交
173 174 175 176 177 178 179 180
        if is_new_var:
            self.desc.set_type(type)
        elif self.desc.type() != type:
            raise ValueError("Variable {0} has been created before. The "
                             "previous type is {1}; the new type is {2}. They"
                             " are not matched".format(self.name,
                                                       self.desc.type(), type))

Y
Yu Yang 已提交
181
        if shape is not None:
Y
Yu Yang 已提交
182
            if is_new_var:
183
                self.desc.set_shape(shape)
Y
Yu Yang 已提交
184 185 186 187 188 189 190 191
            else:
                old_shape = self.shape
                shape = tuple(shape)
                if shape != old_shape:
                    raise ValueError(
                        "Variable {0} has been created before. the previous "
                        "shape is {1}; the new shape is {2}. They are not "
                        "matched.".format(self.name, old_shape, shape))
Y
Yu Yang 已提交
192
        if dtype is not None:
193
            if not isinstance(dtype, core.VarDesc.VarType):
194
                dtype = convert_np_dtype_to_dtype_(dtype)
Y
Yu Yang 已提交
195
            if is_new_var:
F
fengjiayi 已提交
196
                self.desc.set_dtype(dtype)
Y
Yu Yang 已提交
197
            else:
F
fengjiayi 已提交
198
                old_dtype = self.dtype
Q
QI JUN 已提交
199
                if dtype != old_dtype:
Y
Yu Yang 已提交
200 201 202 203 204
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous data type is {1}; the new "
                                     "data type is {2}. They are not "
                                     "matched.".format(self.name, old_dtype,
                                                       dtype))
Y
Yu Yang 已提交
205 206

        if lod_level is not None:
Y
Yu Yang 已提交
207
            if is_new_var:
208
                self.desc.set_lod_level(lod_level)
Y
Yu Yang 已提交
209 210 211 212 213 214 215
            else:
                if lod_level != self.lod_level:
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous lod_level is {1}; the new "
                                     "lod_level is {2}. They are not "
                                     "matched".format(self.name, self.lod_level,
                                                      lod_level))
216 217 218 219 220 221 222 223 224 225 226
        if persistable is not None:
            if is_new_var:
                self.desc.set_persistable(persistable)
            else:
                if persistable != self.persistable:
                    raise ValueError(
                        "Variable {0} has been created before."
                        "The previous persistable is {1}; the new "
                        "persistable is {2}. They are not matched".format(
                            self.name, self.persistable, persistable))

Y
Yu Yang 已提交
227
        self.block.vars[name] = self
Y
Yu Yang 已提交
228
        self.op = None
Y
Yu Yang 已提交
229
        self.stop_gradient = stop_gradient
Y
Yu Yang 已提交
230

231
    def __str__(self):
Y
Yang Yang(Tony) 已提交
232 233
        return self.to_string(True)

F
update  
fengjiayi 已提交
234
    def to_string(self, throw_on_error, with_details=False):
235 236 237 238 239 240
        """
        Get debug string.

        Args:
            throw_on_error(bool): True if raise an exception when self is not
                intialized.
F
update  
fengjiayi 已提交
241 242
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True
243 244 245 246

        Returns(str): The debug string.

        """
F
update  
fengjiayi 已提交
247 248
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
249 250
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.VarDesc.FromString(str(protostr))
F
update  
fengjiayi 已提交
251 252 253 254 255 256 257
        res_str = _debug_string_(proto, throw_on_error)
        if with_details:
            additional_attr = ("error_clip", "stop_gradient")
            for attr_name in additional_attr:
                res_str += "%s: %s\n" % (attr_name,
                                         str(getattr(self, attr_name)))
        return res_str
258 259 260

    __repr__ = __str__

261 262 263
    def set_desc(self, input):
        self.desc = input

264 265 266 267
    @property
    def persistable(self):
        return self.desc.persistable()

Y
Yu Yang 已提交
268 269 270 271
    @persistable.setter
    def persistable(self, p):
        self.desc.set_persistable(p)

Y
Yu Yang 已提交
272 273
    @property
    def name(self):
274
        return self.desc.name()
Y
Yu Yang 已提交
275

T
typhoonzero 已提交
276 277 278 279
    @name.setter
    def name(self, new_name):
        self.desc.set_name(new_name)

Y
Yu Yang 已提交
280 281 282
    @property
    def shape(self):
        # convert to tuple, make it as same as numpy API.
283
        return tuple(self.desc.shape())
Y
Yu Yang 已提交
284 285

    @property
F
fengjiayi 已提交
286 287
    def dtype(self):
        return self.desc.dtype()
Y
Yu Yang 已提交
288 289 290

    @property
    def lod_level(self):
291
        return self.desc.lod_level()
Y
Yu Yang 已提交
292

Y
Yu Yang 已提交
293 294 295 296
    @property
    def type(self):
        return self.desc.type()

297 298 299
    def set_error_clip(self, error_clip):
        self.error_clip = error_clip

Y
Yu Yang 已提交
300

F
fengjiayi 已提交
301 302 303
def get_all_op_protos():
    """
    Get all registered op proto from PaddlePaddle C++ end.
304 305 306

    Returns(list): list of OpProto

F
fengjiayi 已提交
307 308 309 310 311 312 313 314 315 316
    """
    protostrs = core.get_all_op_protos()
    ret_values = []
    for pbstr in protostrs:
        op_proto = framework_pb2.OpProto.FromString(str(pbstr))
        ret_values.append(op_proto)
    return ret_values


class OpProtoHolder(object):
317 318 319 320
    """
    A global variable to hold all OpProtos from C++ as a map
    """

F
fengjiayi 已提交
321 322 323 324 325 326 327 328 329
    @classmethod
    def instance(cls):
        if not hasattr(cls, '_instance'):
            cls._instance = cls()
        return cls._instance

    def __init__(self):
        assert not hasattr(
            self.__class__,
330
            '_instance'), 'Please use `instance()` to get OpProtoHolder object!'
F
fengjiayi 已提交
331 332 333 334 335 336
        op_protos = get_all_op_protos()
        self.op_proto_map = {}
        for proto in op_protos:
            self.op_proto_map[proto.type] = proto

    def get_op_proto(self, type):
337 338 339 340 341 342 343 344
        """
        Get OpProto by a type string.
        Args:
            type(str): The type that operator registered in C++ side.

        Returns(framework_pb2.OpProto): The OpProto

        """
Y
Yu Yang 已提交
345 346
        if type not in self.op_proto_map:
            raise ValueError("Operator \"%s\" has not been registered." % type)
F
fengjiayi 已提交
347 348 349
        return self.op_proto_map[type]


Y
Yu Yang 已提交
350
class Operator(object):
351
    """
352 353
    Python Operator class. The operator represents the build in instructions in a
    Block. Users can use the build in instructions to describe their neural
354 355 356
    network.
    """

Y
Yu Yang 已提交
357 358
    def __init__(self,
                 block,
Y
Yu Yang 已提交
359
                 desc,
Y
Yu Yang 已提交
360 361 362 363
                 type=None,
                 inputs=None,
                 outputs=None,
                 attrs=None):
364 365 366 367 368 369 370 371 372 373 374 375 376 377
        """
        Constructor.

        Notes: The constructor of operator should not be invoked directly. Use
        Block.append_op or Block.prepend_op instead.

        >>> cur_program = Program()
        >>> cur_block = cur_program.current_block()
        >>> # var1 += var2 + var3
        >>> cur_block.append_op(type="sum",
        >>>                     inputs={"X": [var1, var2, var3]},
        >>>                     outputs={"Out": [var1]})

        Args:
C
caoying03 已提交
378 379
            block(Block): The block has the current operator.
            desc(core.OpDesc): The protobuf description.
380 381 382
            type(str): The type of operator.
            inputs(dict): The input dictionary. Key is the input parameter name.
                Value is a list of variables.
C
caoying03 已提交
383 384
            outputs(dict): The output dictionary which has the same format with
                           inputs.
385 386 387 388
            attrs(dict): The attributes dictionary. Key is attribute name. Value
                is the attribute value. The attribute type should be as same as
                the type registered in C++
        """
Y
Yu Yang 已提交
389
        self.block = block
Y
Yu Yang 已提交
390
        self.desc = desc
T
typhoonzero 已提交
391
        self.attrs = attrs
F
fengjiayi 已提交
392 393 394 395 396
        if len(self.desc.type()) != 0:
            return
        if type is None:
            raise ValueError(
                "`type` to initilized an Operator can not be None.")
F
Update  
fengjiayi 已提交
397
        self.desc.set_type(type)
F
fengjiayi 已提交
398
        proto = OpProtoHolder.instance().get_op_proto(type)
399

Y
Yang Yang(Tony) 已提交
400 401
        def find_name(var_list, name):
            for var_name in var_list:
Q
Qiao Longfei 已提交
402
                if var_list[var_name] is not None and var_name == name:
Y
Yang Yang(Tony) 已提交
403 404
                    return True
            return False
Q
QI JUN 已提交
405

Y
Yang Yang(Tony) 已提交
406 407 408 409 410 411 412
        if inputs is not None:
            for in_proto in proto.inputs:
                found = find_name(inputs, in_proto.name)
                assert found or in_proto.dispensable, "Input {} not found".format(
                    in_proto.name)

                if found:
413 414 415 416
                    in_args = inputs[in_proto.name]
                    if not isinstance(in_args, list):
                        in_args = [in_args]
                    if not in_proto.duplicable and len(in_args) > 1:
Y
Yang Yang(Tony) 已提交
417 418
                        raise ValueError(
                            "Input %s expects only one input, but %d are given."
419 420 421
                            % (in_proto.name, len(in_args)))
                    in_arg_names = []
                    for arg in in_args:
Y
Yang Yu 已提交
422 423 424 425
                        if isinstance(arg, basestring):
                            in_arg_names.append(arg)
                        else:
                            in_arg_names.append(arg.name)
426
                    self.desc.set_input(in_proto.name, in_arg_names)
Y
Yang Yang(Tony) 已提交
427 428
                else:
                    self.desc.set_input(in_proto.name, [])
F
Update  
fengjiayi 已提交
429

Y
Yu Yang 已提交
430
        if outputs is not None:
431 432 433 434 435 436 437
            given = set()
            need = set()
            for n in outputs:
                given.add(n)
            for m in proto.outputs:
                need.add(m.name)
            if not given == need:
C
caoying03 已提交
438 439
                raise ValueError(("Incorrect setting for output(s) of "
                                  "operator \"%s\". Need: [%s] Given: [%s]") %
440 441
                                 (type, ", ".join(str(e) for e in need),
                                  ", ".join(str(e) for e in given)))
442

F
fengjiayi 已提交
443
            for out_proto in proto.outputs:
444 445 446 447
                out_args = outputs[out_proto.name]
                if not isinstance(out_args, list):
                    out_args = [out_args]
                if not out_proto.duplicable and len(out_args) > 1:
F
Update  
fengjiayi 已提交
448 449
                    raise ValueError(
                        "Output %s expects only one output, but %d are given." %
450 451 452 453 454 455
                        (out_proto.name, len(out_args)))
                out_arg_names = []
                for arg in out_args:
                    out_arg_names.append(arg.name)
                    arg.op = self
                self.desc.set_output(out_proto.name, out_arg_names)
F
Update  
fengjiayi 已提交
456

Y
Yu Yang 已提交
457
        if attrs is not None:
458 459
            if not isinstance(attrs, dict):
                raise TypeError("'attrs' should be a dict.")
F
fengjiayi 已提交
460
            for attr in proto.attrs:
F
Update  
fengjiayi 已提交
461
                attr_name = attr.name
462
                if (attr_name not in attrs) or (attrs[attr_name] is None):
F
Update  
fengjiayi 已提交
463
                    continue
Y
Yang Yang(Tony) 已提交
464
                if isinstance(attrs[attr_name], Block):
F
Update  
fengjiayi 已提交
465
                    self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
T
typhoonzero 已提交
466 467 468 469
                elif isinstance(attrs[attr_name], core.BlockDesc) or \
                   isinstance(attrs[attr_name], core.ProgramDesc):
                    self.desc.set_serialized_attr(
                        attr_name, attrs[attr_name].serialize_to_string())
Y
Yang Yang(Tony) 已提交
470 471
                else:
                    self.desc.set_attr(attr_name, attrs[attr_name])
Y
Yu Yang 已提交
472

473
        self.desc.check_attrs()
Y
Yang Yang(Tony) 已提交
474
        no_kernel_op_set = {
Y
Yu Yang 已提交
475
            'feed', 'fetch', 'save', 'load', 'recurrent',
T
typhoonzero 已提交
476
            'rnn_memory_helper_grad', 'conditional_block', 'while', 'send',
477
            'recv', 'listen_and_serv', 'parallel_do', 'save_combine',
478
            'load_combine', 'ncclInit'
Y
Yang Yang(Tony) 已提交
479
        }
480
        if type not in no_kernel_op_set:
Q
QI JUN 已提交
481
            self.desc.infer_var_type(self.block.desc)
Y
Yu Yang 已提交
482
            self.desc.infer_shape(self.block.desc)
F
fengjiayi 已提交
483

Y
Yang Yang(Tony) 已提交
484
    def to_string(self, throw_on_error):
485 486 487 488 489 490 491 492 493
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True

        Returns(str): The debug string.

        """
494 495
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.OpDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
496 497 498 499
        return _debug_string_(proto, throw_on_error)

    def __str__(self):
        return self.to_string(True)
500 501 502

    __repr__ = __str__

F
fengjiayi 已提交
503 504 505 506 507
    @property
    def type(self):
        return self.desc.type()

    def input(self, name):
508 509 510 511 512 513 514 515 516
        """
        Get input arguments by the input parameter name
        Args:
            name(str): The input parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
517 518
        return self.desc.input(name)

T
typhoonzero 已提交
519 520 521 522 523 524
    def rename_input(self, old_name, new_name):
        self.desc.rename_input(old_name, new_name)

    def rename_output(self, old_name, new_name):
        self.desc.rename_output(old_name, new_name)

F
fengjiayi 已提交
525 526
    @property
    def input_names(self):
527 528 529 530 531
        """
        Get all input parameter names
        Returns(list): return a list of input parameter names

        """
F
fengjiayi 已提交
532 533
        return self.desc.input_names()

T
typhoonzero 已提交
534 535 536 537 538 539 540 541
    @property
    def input_arg_names(self):
        return self.desc.input_arg_names()

    @property
    def output_arg_names(self):
        return self.desc.output_arg_names()

F
fengjiayi 已提交
542
    def output(self, name):
543 544 545 546 547 548 549 550 551
        """
        Get output arguments by the output parameter name
        Args:
            name(str): The output parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
552 553 554 555
        return self.desc.output(name)

    @property
    def output_names(self):
556 557 558 559 560
        """
        Get all output parameter names
        Returns(list): return a list of output parameter names

        """
F
fengjiayi 已提交
561 562
        return self.desc.output_names()

563 564
    @property
    def idx(self):
565 566 567 568 569 570
        """
        Return the array index of current operator.
        Returns(int): The array index in block.ops array
        Raises:
            ValueError: when the operator is not found.
        """
571 572 573 574 575 576
        for i, op in enumerate(self.block.ops):
            if op == self:
                return i
        raise ValueError(
            "Can't find op itself in it's block. It could be a bug of Paddle.")

F
fengjiayi 已提交
577
    def has_attr(self, name):
578 579 580 581 582 583 584 585
        """
        operator has the attribute with name or not.
        Args:
            name(str): the attribute name

        Returns(bool): True if has this attribute.

        """
F
fengjiayi 已提交
586 587 588
        return self.desc.has_attr(name)

    def attr_type(self, name):
589 590 591 592 593 594 595 596
        """
        Get the type of attribute by attribute name
        Args:
            name(str): the attribute name

        Returns(core.AttrType): the attribute type

        """
F
fengjiayi 已提交
597 598 599 600
        return self.desc.attr_type(name)

    @property
    def attr_names(self):
601 602 603 604 605
        """
        Get all attribute names
        Returns(list): The list of attribute name

        """
F
fengjiayi 已提交
606 607 608
        return self.desc.attr_names()

    def attr(self, name):
609 610 611 612 613 614 615 616 617
        """
        Get attribute by name
        Args:
            name(str): the attribute name

        Returns(bool|int|str|float|list): The attribute value. The return value
            can be any valid attribute type.

        """
F
fengjiayi 已提交
618
        return self.desc.attr(name)
Y
Yu Yang 已提交
619

F
fengjiayi 已提交
620
    def block_attr(self, name):
621 622 623 624 625 626 627 628
        """
        Get the block attribute by name
        Args:
            name(str): the attribute name

        Returns(int): the block index

        """
F
fengjiayi 已提交
629
        return self.desc.block_attr(name)
Y
Yu Yang 已提交
630 631


Y
Yu Yang 已提交
632 633
class Block(object):
    def __init__(self, program, idx):
Y
Yu Yang 已提交
634
        self.desc = program.desc.block(idx)
Y
Yu Yang 已提交
635
        self.vars = dict()  # var_name --> var
Y
Yu Yang 已提交
636
        self.ops = collections.deque()  # operator list
Y
Yu Yang 已提交
637
        self.program = program
T
typhoonzero 已提交
638
        self.removed_vars = dict()
Y
Yu Yang 已提交
639

640
    def __str__(self):
Y
Yang Yang(Tony) 已提交
641 642
        return self.to_string(True)

F
fengjiayi 已提交
643 644 645 646 647 648
    def to_string(self, throw_on_error, with_details=False):
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True
F
update  
fengjiayi 已提交
649 650
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True
F
fengjiayi 已提交
651 652 653 654 655 656 657

        Returns(str): The debug string.

        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
F
fengjiayi 已提交
658
            re_add_indent = re.compile(r"\n(.)")
F
fengjiayi 已提交
659 660 661
            res_str = "blocks {\n  idx: %d\n  parent_idx: %d" % (
                self.idx, self.parent_idx)
            for var in self.vars.itervalues():
F
fengjiayi 已提交
662
                res_str += "\n  vars {\n    %s  }" % re_add_indent.sub(
F
update  
fengjiayi 已提交
663
                    r"\n    \1", var.to_string(throw_on_error, with_details))
F
fengjiayi 已提交
664
            for op in self.ops:
F
fengjiayi 已提交
665 666
                res_str += "\n  ops {\n    %s  }" % re_add_indent.sub(
                    r"\n    \1", op.to_string(throw_on_error))
F
fengjiayi 已提交
667 668 669 670 671 672
            res_str += "\n}"
        else:
            protostr = self.desc.serialize_to_string()
            proto = framework_pb2.BlockDesc.FromString(str(protostr))
            res_str = _debug_string_(proto, throw_on_error)
        return res_str
673 674 675

    __repr__ = __str__

Y
Yu Yang 已提交
676 677
    @property
    def parent_idx(self):
Y
Yu Yang 已提交
678
        return self.desc.parent
Y
Yu Yang 已提交
679

Y
Yu Yang 已提交
680 681 682 683 684 685 686
    @property
    def forward_block_idx(self):
        return self.desc.get_forward_block_idx()

    def set_forward_block_idx(self, idx):
        self.desc.set_forward_block_idx(idx)

Y
Yu Yang 已提交
687 688
    @property
    def idx(self):
Y
Yu Yang 已提交
689
        return self.desc.id
Y
Yu Yang 已提交
690

Q
Qiao Longfei 已提交
691
    def var(self, name):
Y
Yu Yang 已提交
692 693 694 695
        if not isinstance(name, basestring):
            raise TypeError()
        v = self.vars.get(name, None)
        if v is None:
Q
Qiao Longfei 已提交
696
            raise ValueError("var %s not in this block" % name)
Y
Yu Yang 已提交
697
        return v
Q
Qiao Longfei 已提交
698

F
fengjiayi 已提交
699
    def var_recursive(self, name):
Y
Yu Yang 已提交
700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725
        frontier = list()
        visited = set()

        frontier.append(self)

        prog = self.program

        while len(frontier) != 0:  # BFS
            cur = frontier[0]
            frontier = frontier[1:]

            if id(cur) in visited:
                continue

            if cur.has_var(name):
                return cur.var(name)

            if cur.parent_idx != -1:
                frontier.append(prog.block(cur.parent_idx))

            if cur.forward_block_idx != -1:
                frontier.append(prog.block(cur.forward_block_idx))

            visited.add(id(cur))

        raise ValueError("Var {0} is not found recursively".format(name))
F
fengjiayi 已提交
726

Q
Qiao Longfei 已提交
727
    def all_parameters(self):
728 729 730 731 732
        return list(self.iter_parameters())

    def iter_parameters(self):
        return (item[1] for item in self.vars.iteritems()
                if isinstance(item[1], Parameter))
Q
Qiao Longfei 已提交
733

Y
Yu Yang 已提交
734
    def create_var(self, *args, **kwargs):
735
        var = Variable(block=self, *args, **kwargs)
736 737
        if 'initializer' in kwargs:
            kwargs['initializer'](var, self)
Q
Qiao Longfei 已提交
738
        return var
Y
Yu Yang 已提交
739

Q
Qiao Longfei 已提交
740 741 742
    def has_var(self, name):
        return name in self.vars

T
typhoonzero 已提交
743 744 745 746 747 748
    def rename_var(self, name, new_name):
        """
        Rename variable in vars and ops' inputs and outputs
        """
        if not self.has_var(name):
            raise ValueError("var %s is not in current" % name)
T
wip  
typhoonzero 已提交
749 750
        v = self.var(name)
        if type(v) == Parameter:
T
typhoonzero 已提交
751
            var_type = "Parameter"
T
wip  
typhoonzero 已提交
752 753 754 755 756 757 758
            stop_gradient = v.stop_gradient
            trainable = v.trainable
            optimize_attr = v.optimize_attr
            regularizer = v.regularizer
            gradient_clip_attr = v.gradient_clip_attr
            error_clip = v.error_clip
        elif type(v) == Variable:
T
typhoonzero 已提交
759
            var_type = "Variable"
T
wip  
typhoonzero 已提交
760 761 762 763 764
            error_clip = v.error_clip
            stop_gradient = v.stop_gradient
        else:
            raise ValueError("unsupported var type: %s", type(v))

T
typhoonzero 已提交
765
        self.desc.rename_var(name, new_name)
T
typhoonzero 已提交
766
        # NOTE: v is destroyed by C++ after calling rename_var.
T
wip  
typhoonzero 已提交
767
        d = self.desc.find_var(new_name)
T
typhoonzero 已提交
768
        if var_type == "Parameter":
T
wip  
typhoonzero 已提交
769 770 771 772 773 774 775 776 777 778 779
            var = Parameter(
                self,
                d.shape(),
                d.dtype(),
                name=new_name,
                stop_gradient=stop_gradient,
                trainable=trainable,
                optimize_attr=optimize_attr,
                regularizer=regularizer,
                gradient_clip_attr=gradient_clip_attr,
                error_clip=error_clip)
T
typhoonzero 已提交
780
        elif var_type == "Variable":
T
wip  
typhoonzero 已提交
781 782
            var = Variable(
                self,
Y
Yancey 已提交
783
                type=v.type,
T
wip  
typhoonzero 已提交
784 785 786 787 788 789 790 791
                name=new_name,
                error_clip=error_clip,
                stop_gradient=stop_gradient)

        # rename the python side, sync_with_cpp will only add
        # new vars/ops to python side.
        self.vars[new_name] = var
        del self.vars[name]
T
typhoonzero 已提交
792
        self.sync_with_cpp()
T
typhoonzero 已提交
793

Y
Yu Yang 已提交
794 795
    def create_parameter(self, *args, **kwargs):
        global_block = self.program.global_block()
Q
Qiao Longfei 已提交
796
        param = Parameter(global_block, *args, **kwargs)
797 798
        if 'initializer' in kwargs:
            kwargs['initializer'](param, self)
Q
Qiao Longfei 已提交
799
        return param
Y
Yu Yang 已提交
800

Y
Yu Yang 已提交
801
    def append_op(self, *args, **kwargs):
Y
Yu Yang 已提交
802
        op_desc = self.desc.append_op()
803
        op = Operator(block=self, desc=op_desc, *args, **kwargs)
Y
Yu Yang 已提交
804 805 806
        self.ops.append(op)
        return op

T
typhoonzero 已提交
807 808
    def delete_ops(self, ops):
        # remove from cpp
809
        # FIXME(typhoonzero): remove only the first occurrence.
T
typhoonzero 已提交
810 811 812 813 814
        try:
            start = list(self.ops).index(ops[0])
            end = list(self.ops).index(ops[-1])
        except Exception, e:
            raise e
T
typhoonzero 已提交
815
        self.desc.remove_op(start, end + 1)
T
wip  
typhoonzero 已提交
816

Y
Yancey1989 已提交
817 818 819
    def slice_ops(self, start, end):
        return list(self.ops)[start:end]

Y
Yu Yang 已提交
820
    def prepend_op(self, *args, **kwargs):
Y
Yu Yang 已提交
821 822
        op_desc = self.desc.prepend_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
823 824 825
        self.ops.appendleft(op)
        return op

Q
Qiao Longfei 已提交
826
    def sync_with_cpp(self):
827 828 829 830 831
        """
        Sync with the desc on the c++ end.

        This method is used to synchronize the c++ desc instance generated by backward.
        """
Q
Qiao Longfei 已提交
832 833 834 835 836 837
        # sync variables from cpp
        for var in self.desc.all_vars():
            if not self.has_var(var.name()):
                self.create_var(name=var.name(), desc=var, type=var.type())

        # sync operators from cpp
838 839 840 841
        ops_in_cpp = []
        for op_idx in range(0, self.desc.op_size()):
            ops_in_cpp.append(self.desc.op(op_idx))

Y
Yu Yang 已提交
842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857
        if len(self.ops) != 0:
            first_op_in_python = self.ops[0].desc
            last_op_in_python = self.ops[len(self.ops) - 1].desc
            start_index = None
            end_index = None
            for index in range(len(ops_in_cpp)):
                if first_op_in_python == ops_in_cpp[index]:
                    start_index = index
                if last_op_in_python == ops_in_cpp[index]:
                    end_index = index
            assert start_index is not None
            assert end_index is not None
            assert start_index <= end_index
        else:
            start_index = 0
            end_index = -1
Q
Qiao Longfei 已提交
858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874

        # sync ops append to the head of cpp_ops
        for index in range((start_index - 1 - 1), -1, -1):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.appendleft(op)

        # sync ops append to the end of cpp_ops
        for index in range((end_index + 1), len(ops_in_cpp)):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.append(op)

        assert len(self.ops) == len(ops_in_cpp)
        for index in range(len(self.ops)):
            assert self.ops[index].desc == ops_in_cpp[index]

875 876
    def copy_param_info_from(self, other):
        """
877
        Copy the information of parameters from the other block
878
        Args:
879
            other(Block): the other block
880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902

        Returns:
            None
        """
        if not isinstance(other, Block):
            raise TypeError("copy_param_info_from should be invoked with Block")
        for p in other.iter_parameters():
            assert isinstance(p, Parameter)
            v = self.vars.get(p.name, None)
            if v is None:
                raise ValueError("copy_param_info_from should be invoked with "
                                 "same topology")
            assert isinstance(v, Variable)
            new_p = Parameter(
                block=self,
                shape=v.shape,
                dtype=v.dtype,
                type=v.type,
                lod_level=v.lod_level,
                stop_gradient=p.stop_gradient,
                trainable=p.trainable,
                optimize_attr=p.optimize_attr,
                regularizer=p.regularizer,
F
fengjiayi 已提交
903
                gradient_clip_attr=p.gradient_clip_attr,
F
fengjiayi 已提交
904
                error_clip=p.error_clip,
905 906 907
                name=v.name)
            self.vars[new_p.name] = new_p

Y
Yu Yang 已提交
908 909

class Program(object):
910 911
    def __init__(self):
        self.desc = core.ProgramDesc()
Y
Yu Yang 已提交
912 913
        self.blocks = [Block(self, 0)]
        self.current_block_idx = 0
D
dzhwinter 已提交
914
        self._seed = 0
Y
Yu Yang 已提交
915

916
    def __str__(self):
Y
Yang Yang(Tony) 已提交
917 918
        return self.to_string(True)

F
fengjiayi 已提交
919 920 921 922 923 924
    def to_string(self, throw_on_error, with_details=False):
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True
F
update  
fengjiayi 已提交
925 926
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True
F
fengjiayi 已提交
927 928 929 930 931 932 933 934 935 936 937 938 939 940 941

        Returns(str): The debug string.

        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
            res_str = ""
            for block in self.blocks:
                res_str += block.to_string(throw_on_error, with_details)
        else:
            protostr = self.desc.serialize_to_string()
            proto = framework_pb2.ProgramDesc.FromString(str(protostr))
            res_str = _debug_string_(proto, throw_on_error)
        return res_str
942

943 944 945
    def get_desc(self):
        return self.desc

Y
Yu Yang 已提交
946 947 948 949 950
    def clone(self):
        p = Program()
        p.desc = core.ProgramDesc(self.desc)
        p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
        p.sync_with_cpp()
951
        p.copy_param_info_from(self)
Y
Yu Yang 已提交
952
        return p
953

954 955 956 957 958 959 960 961 962
    def prune(self, targets):
        if not isinstance(targets, list):
            targets = [targets]
        targets_idx = []
        for t in targets:
            if not isinstance(t, Operator):
                if isinstance(t, Variable):
                    t = t.op
                else:
C
caoying03 已提交
963 964
                    raise ValueError(("All targets of prune() can only be "
                                      "Variable or Operator."))
965 966 967 968 969 970 971 972

            targets_idx.append([t.block.idx, t.idx])
        res = Program()
        res.desc = core.prune(self.desc, targets_idx)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

973 974 975 976 977 978 979
    def inference_optimize(self):
        res = Program()
        res.desc = core.inference_optimize(self.desc)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

980 981 982 983
    @staticmethod
    def parse_from_string(binary_str):
        p = Program()
        p.desc = core.ProgramDesc(binary_str)
984
        p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
985 986
        p.sync_with_cpp()
        return p
Y
Yu Yang 已提交
987

D
dzhwinter 已提交
988 989 990 991 992 993 994 995 996 997
    @property
    def random_seed(self):
        return self._seed

    @random_seed.setter
    def random_seed(self, seed):
        if not isinstance(seed, int):
            raise ValueError("Seed must be a integer.")
        self._seed = seed

Y
Yu Yang 已提交
998 999
    def __repr__(self):
        return str(self)
1000

Y
Yu Yang 已提交
1001 1002 1003
    def global_block(self):
        return self.blocks[0]

Q
Qiao Longfei 已提交
1004 1005 1006
    def block(self, index):
        return self.blocks[index]

Y
Yu Yang 已提交
1007 1008 1009
    def current_block(self):
        return self.blocks[self.current_block_idx]

F
fengjiayi 已提交
1010
    def append_backward(self, target, no_grad_set=None):
Q
Qiao Longfei 已提交
1011 1012 1013
        """
        return map(param_name -> (grad_name, block_index, op_index))
        """
Q
Qiao Longfei 已提交
1014
        assert isinstance(target, Variable)
F
fengjiayi 已提交
1015 1016
        if no_grad_set is None:
            no_grad_set = set()
Y
Yang Yang(Tony) 已提交
1017 1018 1019 1020 1021 1022 1023 1024
        try:
            param_to_grad_info = self.desc.append_backward(target.desc,
                                                           no_grad_set)
        except Exception as e:
            raise core.EnforceNotMet(
                str(e) + "\nCurrent protobuf is\n{0}".format(
                    self.to_string(False)))

Q
Qiao Longfei 已提交
1025 1026 1027
        self.sync_with_cpp()
        return param_to_grad_info

F
update  
fengjiayi 已提交
1028
    def create_block(self, parent_idx=None):
Y
Yu Yang 已提交
1029
        new_block_idx = len(self.blocks)
F
update  
fengjiayi 已提交
1030 1031 1032
        parent = self.current_block() if parent_idx is None else self.block(
            parent_idx)
        self.desc.append_block(parent.desc)
Y
Yu Yang 已提交
1033 1034 1035 1036 1037 1038 1039
        self.current_block_idx = new_block_idx
        self.blocks.append(Block(self, self.current_block_idx))
        return self.current_block()

    def rollback(self):
        self.current_block_idx = self.current_block().parent_idx

Q
Qiao Longfei 已提交
1040 1041 1042 1043 1044 1045
    def sync_with_cpp(self):
        for block_idx in range(len(self.blocks), self.desc.num_blocks()):
            self.blocks.append(Block(self, block_idx))
        for block in self.blocks:
            block.sync_with_cpp()

1046 1047
    def copy_param_info_from(self, other):
        """
1048
        Copy the information of parameters from other program.
1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063
        Args:
            other(Program): Other program

        Returns:
            None
        """
        if not isinstance(other, Program):
            raise TypeError("copy_param_info_from should be invoked with "
                            "Program")

        if len(self.blocks) != len(other.blocks):
            raise ValueError("copy_param_info_from should be invoked with two "
                             "program, with represent the same topology")
        self.global_block().copy_param_info_from(other.global_block())

1064 1065 1066 1067 1068
    def list_vars(self):
        for each_block in self.blocks:
            for each_var in each_block.vars.itervalues():
                yield each_var

Y
Yu Yang 已提交
1069

Y
Yu Yang 已提交
1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080
class Parameter(Variable):
    def __init__(self, block, shape, dtype, **kwargs):
        if shape is None or dtype is None:
            raise ValueError("Parameter must set shape and dtype")
        if len(shape) == 0:
            raise ValueError("Parameter shape cannot be empty")

        for each in shape:
            if each < 0:
                raise ValueError("Parameter shape should not be related with "
                                 "batch-size")
1081 1082 1083

        Variable.__init__(
            self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
Y
Yu Yang 已提交
1084 1085 1086 1087
        self.trainable = kwargs.get('trainable', True)

        self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})

1088 1089
        self.regularizer = kwargs.get('regularizer', None)

F
fengjiayi 已提交
1090
        self.gradient_clip_attr = kwargs.get('gradient_clip_attr', None)
Y
Yu Yang 已提交
1091

F
fengjiayi 已提交
1092 1093 1094
    def __str__(self):
        return self.to_string(True)

F
update  
fengjiayi 已提交
1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117
    def to_string(self, throw_on_error, with_details=False):
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True

        Returns(str): The debug string.

        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
            res_str = Variable.to_string(self, throw_on_error, True)
            additional_attr = ("trainable", "optimize_attr", "regularizer",
                               "gradient_clip_attr")
            for attr_name in additional_attr:
                res_str += "%s: %s\n" % (attr_name,
                                         str(getattr(self, attr_name)))
        else:
            res_str = Variable.to_string(self, throw_on_error, False)
F
fengjiayi 已提交
1118 1119 1120 1121
        return res_str

    __repr__ = __str__

Y
Yu Yang 已提交
1122

Y
Yu Yang 已提交
1123
# program is a global instance.
Y
Yu Yang 已提交
1124 1125
_main_program_ = Program()
_startup_program_ = Program()
1126

1127

1128
def default_startup_program():
Y
Yu Yang 已提交
1129 1130 1131
    """
    Get default startup program. In startup program, Paddle will initialize
    parameters, initialize nccl handle, etc.
1132

Y
Yu Yang 已提交
1133 1134 1135
    Returns:
        Program: startup program
    """
Y
Yu Yang 已提交
1136
    return _startup_program_
1137

1138

1139
def default_main_program():
Y
Yu Yang 已提交
1140 1141
    """
    Get default main program. The main program is used for training or testing.
1142

Y
Yu Yang 已提交
1143 1144 1145
    Returns:
        Program: main program
    """
Y
Yu Yang 已提交
1146
    return _main_program_
Y
Yu Yang 已提交
1147 1148 1149 1150 1151


def switch_main_program(program):
    """
    Switch the main program to a new program.
1152

Y
Yu Yang 已提交
1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166
    Args:
        program(Program): The new main program

    Returns:
        Program: The previous main program
    """
    global _main_program_
    prev_program = _main_program_
    _main_program_ = program
    return prev_program


def switch_startup_program(program):
    """
1167
    Switch the startup program to a new program
Y
Yu Yang 已提交
1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183
    Args:
        program(Program): The new startup program

    Returns:
        Program: The previous startup program
    """
    global _startup_program_
    prev_program = _startup_program_
    _startup_program_ = program
    return prev_program


@contextlib.contextmanager
def program_guard(main_program, startup_program=None):
    """
    Switch program with `with` statement
1184

Y
Yu Yang 已提交
1185 1186 1187 1188
    Examples:
        >>> with program_guard(Program()):
        >>>   data = fluid.layers.data(...)
        >>>   hidden = fluid.layers.fc(...)
1189

Y
Yu Yang 已提交
1190 1191
    Args:
        main_program(Program): New main program inside `with` statement
1192
        startup_program(Program): New startup program inside `with` statement.
Y
Yu Yang 已提交
1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208
            None means do not change startup program.

    Returns:
        None
    """
    if not isinstance(main_program, Program):
        raise TypeError("main_program should be Program")
    main_program = switch_main_program(main_program)
    if startup_program is not None:
        if not isinstance(startup_program, Program):
            raise TypeError("startup_program should be Program")
        startup_program = switch_startup_program(startup_program)
    yield
    switch_main_program(main_program)
    if startup_program is not None:
        switch_startup_program(startup_program)
X
xuwei06 已提交
1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224


def get_var(name, program=None):
    """
    Get a variable by name from the global block of a program
    Args:
        name(str): name of the variable
        program(Program|None): program object.
             If None, default_global_program() will be used.

    Returns:
        Variable
    """
    if program is None:
        program = default_main_program()
    assert isinstance(name, str)
1225
    assert isinstance(program, Program)
X
xuwei06 已提交
1226 1227

    return program.global_block().var(name)