framework.py 39.1 KB
Newer Older
D
dzhwinter 已提交
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Y
Yu Yang 已提交
15
import collections
Q
qiaolongfei 已提交
16
import contextlib
F
fengjiayi 已提交
17
import re
18

Y
Yu Yang 已提交
19
import numpy as np
Q
qiaolongfei 已提交
20

21
import proto.framework_pb2 as framework_pb2
Q
qiaolongfei 已提交
22
from . import core
Y
Yu Yang 已提交
23

24
__all__ = [
25 26 27 28 29 30 31 32 33
    'Block',
    'Variable',
    'Program',
    'Operator',
    'default_startup_program',
    'default_main_program',
    'program_guard',
    'switch_startup_program',
    'switch_main_program',
X
xuwei06 已提交
34
    'get_var',
35
]
Y
Yu Yang 已提交
36

Q
qiaolongfei 已提交
37 38 39 40 41 42 43 44 45 46 47 48
EMPTY_VAR_NAME = core.kEmptyVarName()
TEMP_VAR_NAME = core.kTempVarName()
GRAD_VAR_SUFFIX = core.kGradVarSuffix()
ZERO_VAR_SUFFIX = core.kZeroVarSuffix()


def grad_var_name(var_name):
    """
    return gradient name for a certain var name
    """
    return var_name + GRAD_VAR_SUFFIX

Y
Yu Yang 已提交
49

Q
Qiao Longfei 已提交
50
def unique_name(prefix):
51 52 53 54 55 56 57 58 59
    """
    Generate unique names with prefix

    Args:
        prefix(str): The prefix of return string

    Returns(str): A unique string with the prefix

    """
Q
Qiao Longfei 已提交
60 61 62 63
    uid = core.unique_integer(prefix)  # unique during whole process.
    return "_".join([prefix, str(uid)])


64
def convert_np_dtype_to_dtype_(np_dtype):
65 66 67 68 69 70 71 72
    """
    Convert the data type in numpy to the data type in Paddle
    Args:
        np_dtype(np.dtype): the data type in numpy

    Returns(core.DataType): the data type in Paddle

    """
73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92
    dtype = np.dtype(np_dtype)
    if dtype == np.float32:
        return core.DataType.FP32
    elif dtype == np.float64:
        return core.DataType.FP64
    elif dtype == np.float16:
        return core.DataType.FP16
    elif dtype == np.int32:
        return core.DataType.INT32
    elif dtype == np.int16:
        return core.DataType.INT16
    elif dtype == np.int64:
        return core.DataType.INT64
    elif dtype == np.bool:
        return core.DataType.BOOL
    else:
        raise ValueError("Not supported numpy dtype " + str(dtype))


def dtype_is_floating(dtype):
93 94 95 96 97 98 99 100 101
    """
    Check the data type is floating or not.
    Args:
        dtype(np.dtype|core.DataType): data type.
            Could be numpy format or Paddle format

    Returns(bool): True if data type is a float value

    """
102 103 104
    if not isinstance(dtype, core.DataType):
        dtype = convert_np_dtype_to_dtype_(dtype)

105
    return dtype in [core.DataType.FP16, core.DataType.FP32, core.DataType.FP64]
106 107


Y
Yang Yang(Tony) 已提交
108
def _debug_string_(proto, throw_on_error=True):
109 110 111 112 113 114 115 116 117 118 119
    """
    Get the debug string of a protobuf message. The message could be not
    initialized.
    Args:
        proto(google.protobuf.message.Message): The protobuf message
        throw_on_error(bool): True if raise an error when the protobuf message
            is not initialized.

    Returns(str): The debug string of the protobuf message

    """
Y
Yu Yang 已提交
120
    error_fields = list()
Y
Yang Yang(Tony) 已提交
121
    if not proto.IsInitialized(error_fields) and throw_on_error:
C
caoying03 已提交
122 123
        raise ValueError("{0} are not initialized.\nThe message is {1}:\n".
                         format(error_fields, proto))
Y
Yu Yang 已提交
124 125 126
    return proto.__str__()


Y
Yu Yang 已提交
127
class Variable(object):
128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159
    """
    Python variable. Every input and output of an operator is a variable. Every
    variable belongs to a block. The variable has a name and two variables in
    different blocks could have the same name.

    There are many kinds of variables. Please reference the framework.proto for
    details.

    Notes: The constructor of Variable should not be invoked directly. Please
    use `Block.create_var` to create a variable.

    >>> cur_program = Program()
    >>> cur_block = cur_program.current_block()
    >>> new_variable = cur_block.create_var(
    >>>                    name="X", shape=[-1, 23, 48], dtype='float32')

    Args:
        block(Block): The associated block. It will be passed by
            `Block.create_var` automatically.
        type(core.VarDesc.VarType): Variable type. Please reference the
            framework.proto for details.
        shape(tuple|list|None): The shape of variable. -1 means the batch size.
            Some kinds of variable do not contain shape, just set it to None.
        dtype(np.dtype|core.DataType|str): The data type of variable.
        lod_level(int): The level of lod tensor. 0 means there is not a time
            series data.
        persistable(bool): True if the variable should be saved as check point.
            Defaults to False.
        stop_gradient(bool): True if the variable will stop to calculate
            gradients when backward. Defaults to False.
    """

Y
Yu Yang 已提交
160 161
    def __init__(self,
                 block,
Y
Yu Yang 已提交
162
                 type=core.VarDesc.VarType.LOD_TENSOR,
Y
Yu Yang 已提交
163 164 165 166
                 name=None,
                 shape=None,
                 dtype=None,
                 lod_level=None,
Q
QI JUN 已提交
167
                 persistable=None,
F
fengjiayi 已提交
168
                 error_clip=None,
Y
Yu Yang 已提交
169
                 stop_gradient=False,
Y
Yu Yang 已提交
170
                 **kwargs):
Y
Yu Yang 已提交
171
        self.block = block
F
fengjiayi 已提交
172
        self.error_clip = error_clip
Y
Yu Yang 已提交
173 174 175

        if name is None:
            name = Variable._unique_var_name_()
D
Dong Zhihong 已提交
176 177 178 179
        is_new_var = False
        self.desc = self.block.desc.find_var(name)

        if self.desc is None:
D
dongzhihong 已提交
180
            self.desc = self.block.desc.var(name)
Y
Yu Yang 已提交
181
            is_new_var = True
Y
Yu Yang 已提交
182

Y
Yu Yang 已提交
183 184 185 186 187 188 189 190
        if is_new_var:
            self.desc.set_type(type)
        elif self.desc.type() != type:
            raise ValueError("Variable {0} has been created before. The "
                             "previous type is {1}; the new type is {2}. They"
                             " are not matched".format(self.name,
                                                       self.desc.type(), type))

Y
Yu Yang 已提交
191
        if shape is not None:
Y
Yu Yang 已提交
192
            if is_new_var:
193
                self.desc.set_shape(shape)
Y
Yu Yang 已提交
194 195 196 197 198 199 200 201
            else:
                old_shape = self.shape
                shape = tuple(shape)
                if shape != old_shape:
                    raise ValueError(
                        "Variable {0} has been created before. the previous "
                        "shape is {1}; the new shape is {2}. They are not "
                        "matched.".format(self.name, old_shape, shape))
Y
Yu Yang 已提交
202
        if dtype is not None:
Y
Yu Yang 已提交
203
            if not isinstance(dtype, core.DataType):
204
                dtype = convert_np_dtype_to_dtype_(dtype)
Y
Yu Yang 已提交
205
            if is_new_var:
F
fengjiayi 已提交
206
                self.desc.set_dtype(dtype)
Y
Yu Yang 已提交
207
            else:
F
fengjiayi 已提交
208
                old_dtype = self.dtype
Q
QI JUN 已提交
209
                if dtype != old_dtype:
Y
Yu Yang 已提交
210 211 212 213 214
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous data type is {1}; the new "
                                     "data type is {2}. They are not "
                                     "matched.".format(self.name, old_dtype,
                                                       dtype))
Y
Yu Yang 已提交
215 216

        if lod_level is not None:
Y
Yu Yang 已提交
217
            if is_new_var:
218
                self.desc.set_lod_level(lod_level)
Y
Yu Yang 已提交
219 220 221 222 223 224 225
            else:
                if lod_level != self.lod_level:
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous lod_level is {1}; the new "
                                     "lod_level is {2}. They are not "
                                     "matched".format(self.name, self.lod_level,
                                                      lod_level))
226 227 228 229 230 231 232 233 234 235 236
        if persistable is not None:
            if is_new_var:
                self.desc.set_persistable(persistable)
            else:
                if persistable != self.persistable:
                    raise ValueError(
                        "Variable {0} has been created before."
                        "The previous persistable is {1}; the new "
                        "persistable is {2}. They are not matched".format(
                            self.name, self.persistable, persistable))

Y
Yu Yang 已提交
237
        self.block.vars[name] = self
Y
Yu Yang 已提交
238
        self.op = None
Y
Yu Yang 已提交
239
        self.stop_gradient = stop_gradient
Y
Yu Yang 已提交
240

241
    def __str__(self):
Y
Yang Yang(Tony) 已提交
242 243
        return self.to_string(True)

F
update  
fengjiayi 已提交
244
    def to_string(self, throw_on_error, with_details=False):
245 246 247 248 249 250
        """
        Get debug string.

        Args:
            throw_on_error(bool): True if raise an exception when self is not
                intialized.
F
update  
fengjiayi 已提交
251 252
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True
253 254 255 256

        Returns(str): The debug string.

        """
F
update  
fengjiayi 已提交
257 258
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
259 260
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.VarDesc.FromString(str(protostr))
F
update  
fengjiayi 已提交
261 262 263 264 265 266 267
        res_str = _debug_string_(proto, throw_on_error)
        if with_details:
            additional_attr = ("error_clip", "stop_gradient")
            for attr_name in additional_attr:
                res_str += "%s: %s\n" % (attr_name,
                                         str(getattr(self, attr_name)))
        return res_str
268 269 270

    __repr__ = __str__

271 272 273
    def set_desc(self, input):
        self.desc = input

274 275 276 277
    @property
    def persistable(self):
        return self.desc.persistable()

Y
Yu Yang 已提交
278 279 280 281
    @persistable.setter
    def persistable(self, p):
        self.desc.set_persistable(p)

Y
Yu Yang 已提交
282 283
    @property
    def name(self):
284
        return self.desc.name()
Y
Yu Yang 已提交
285

T
typhoonzero 已提交
286 287 288 289
    @name.setter
    def name(self, new_name):
        self.desc.set_name(new_name)

Y
Yu Yang 已提交
290 291 292
    @property
    def shape(self):
        # convert to tuple, make it as same as numpy API.
293
        return tuple(self.desc.shape())
Y
Yu Yang 已提交
294 295

    @property
F
fengjiayi 已提交
296 297
    def dtype(self):
        return self.desc.dtype()
Y
Yu Yang 已提交
298 299 300

    @property
    def lod_level(self):
301
        return self.desc.lod_level()
Y
Yu Yang 已提交
302

Y
Yu Yang 已提交
303 304 305 306
    @property
    def type(self):
        return self.desc.type()

Y
Yu Yang 已提交
307 308
    @staticmethod
    def _unique_var_name_():
309 310 311
        prefix = "_generated_var"
        uid = core.unique_integer(prefix)  # unique during whole process.
        return "_".join([prefix, str(uid)])
Y
Yu Yang 已提交
312

313 314 315
    def set_error_clip(self, error_clip):
        self.error_clip = error_clip

Y
Yu Yang 已提交
316

F
fengjiayi 已提交
317 318 319
def get_all_op_protos():
    """
    Get all registered op proto from PaddlePaddle C++ end.
320 321 322

    Returns(list): list of OpProto

F
fengjiayi 已提交
323 324 325 326 327 328 329 330 331 332
    """
    protostrs = core.get_all_op_protos()
    ret_values = []
    for pbstr in protostrs:
        op_proto = framework_pb2.OpProto.FromString(str(pbstr))
        ret_values.append(op_proto)
    return ret_values


class OpProtoHolder(object):
333 334 335 336
    """
    A global variable to hold all OpProtos from C++ as a map
    """

F
fengjiayi 已提交
337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352
    @classmethod
    def instance(cls):
        if not hasattr(cls, '_instance'):
            cls._instance = cls()
        return cls._instance

    def __init__(self):
        assert not hasattr(
            self.__class__,
            '_instance'), 'Please use `instance()` to get OpProtoHolder opject!'
        op_protos = get_all_op_protos()
        self.op_proto_map = {}
        for proto in op_protos:
            self.op_proto_map[proto.type] = proto

    def get_op_proto(self, type):
353 354 355 356 357 358 359 360
        """
        Get OpProto by a type string.
        Args:
            type(str): The type that operator registered in C++ side.

        Returns(framework_pb2.OpProto): The OpProto

        """
Y
Yu Yang 已提交
361 362
        if type not in self.op_proto_map:
            raise ValueError("Operator \"%s\" has not been registered." % type)
F
fengjiayi 已提交
363 364 365
        return self.op_proto_map[type]


Y
Yu Yang 已提交
366
class Operator(object):
367 368 369 370 371 372
    """
    Python Operator class. The operator represents the build in instructs in a
    Block. Users can use the build in instructs to describe their neural
    network.
    """

Y
Yu Yang 已提交
373 374
    def __init__(self,
                 block,
Y
Yu Yang 已提交
375
                 desc,
Y
Yu Yang 已提交
376 377 378 379
                 type=None,
                 inputs=None,
                 outputs=None,
                 attrs=None):
380 381 382 383 384 385 386 387 388 389 390 391 392 393
        """
        Constructor.

        Notes: The constructor of operator should not be invoked directly. Use
        Block.append_op or Block.prepend_op instead.

        >>> cur_program = Program()
        >>> cur_block = cur_program.current_block()
        >>> # var1 += var2 + var3
        >>> cur_block.append_op(type="sum",
        >>>                     inputs={"X": [var1, var2, var3]},
        >>>                     outputs={"Out": [var1]})

        Args:
C
caoying03 已提交
394 395
            block(Block): The block has the current operator.
            desc(core.OpDesc): The protobuf description.
396 397 398
            type(str): The type of operator.
            inputs(dict): The input dictionary. Key is the input parameter name.
                Value is a list of variables.
C
caoying03 已提交
399 400
            outputs(dict): The output dictionary which has the same format with
                           inputs.
401 402 403 404
            attrs(dict): The attributes dictionary. Key is attribute name. Value
                is the attribute value. The attribute type should be as same as
                the type registered in C++
        """
Y
Yu Yang 已提交
405
        self.block = block
Y
Yu Yang 已提交
406
        self.desc = desc
T
typhoonzero 已提交
407 408 409 410
        # for clone a new operator
        self.inputs = inputs
        self.outputs = outputs
        self.attrs = attrs
F
fengjiayi 已提交
411 412 413 414 415
        if len(self.desc.type()) != 0:
            return
        if type is None:
            raise ValueError(
                "`type` to initilized an Operator can not be None.")
F
Update  
fengjiayi 已提交
416
        self.desc.set_type(type)
F
fengjiayi 已提交
417
        proto = OpProtoHolder.instance().get_op_proto(type)
418

Y
Yang Yang(Tony) 已提交
419 420
        def find_name(var_list, name):
            for var_name in var_list:
Q
Qiao Longfei 已提交
421
                if var_list[var_name] is not None and var_name == name:
Y
Yang Yang(Tony) 已提交
422 423
                    return True
            return False
Q
QI JUN 已提交
424

Y
Yang Yang(Tony) 已提交
425 426 427 428 429 430 431
        if inputs is not None:
            for in_proto in proto.inputs:
                found = find_name(inputs, in_proto.name)
                assert found or in_proto.dispensable, "Input {} not found".format(
                    in_proto.name)

                if found:
432 433 434 435
                    in_args = inputs[in_proto.name]
                    if not isinstance(in_args, list):
                        in_args = [in_args]
                    if not in_proto.duplicable and len(in_args) > 1:
Y
Yang Yang(Tony) 已提交
436 437
                        raise ValueError(
                            "Input %s expects only one input, but %d are given."
438 439 440
                            % (in_proto.name, len(in_args)))
                    in_arg_names = []
                    for arg in in_args:
Y
Yang Yu 已提交
441 442 443 444
                        if isinstance(arg, basestring):
                            in_arg_names.append(arg)
                        else:
                            in_arg_names.append(arg.name)
445
                    self.desc.set_input(in_proto.name, in_arg_names)
Y
Yang Yang(Tony) 已提交
446 447
                else:
                    self.desc.set_input(in_proto.name, [])
F
Update  
fengjiayi 已提交
448

Y
Yu Yang 已提交
449
        if outputs is not None:
450 451 452 453 454 455 456
            given = set()
            need = set()
            for n in outputs:
                given.add(n)
            for m in proto.outputs:
                need.add(m.name)
            if not given == need:
C
caoying03 已提交
457 458
                raise ValueError(("Incorrect setting for output(s) of "
                                  "operator \"%s\". Need: [%s] Given: [%s]") %
459 460
                                 (type, ", ".join(str(e) for e in need),
                                  ", ".join(str(e) for e in given)))
461

F
fengjiayi 已提交
462
            for out_proto in proto.outputs:
463 464 465 466
                out_args = outputs[out_proto.name]
                if not isinstance(out_args, list):
                    out_args = [out_args]
                if not out_proto.duplicable and len(out_args) > 1:
F
Update  
fengjiayi 已提交
467 468
                    raise ValueError(
                        "Output %s expects only one output, but %d are given." %
469 470 471 472 473 474
                        (out_proto.name, len(out_args)))
                out_arg_names = []
                for arg in out_args:
                    out_arg_names.append(arg.name)
                    arg.op = self
                self.desc.set_output(out_proto.name, out_arg_names)
F
Update  
fengjiayi 已提交
475

Y
Yu Yang 已提交
476
        if attrs is not None:
477 478
            if not isinstance(attrs, dict):
                raise TypeError("'attrs' should be a dict.")
F
fengjiayi 已提交
479
            for attr in proto.attrs:
F
Update  
fengjiayi 已提交
480
                attr_name = attr.name
481
                if (not attr_name in attrs) or (attrs[attr_name] is None):
F
Update  
fengjiayi 已提交
482
                    continue
Y
Yang Yang(Tony) 已提交
483
                if isinstance(attrs[attr_name], Block):
F
Update  
fengjiayi 已提交
484
                    self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
T
typhoonzero 已提交
485 486 487 488
                elif isinstance(attrs[attr_name], core.BlockDesc) or \
                   isinstance(attrs[attr_name], core.ProgramDesc):
                    self.desc.set_serialized_attr(
                        attr_name, attrs[attr_name].serialize_to_string())
Y
Yang Yang(Tony) 已提交
489 490
                else:
                    self.desc.set_attr(attr_name, attrs[attr_name])
Y
Yu Yang 已提交
491

492
        self.desc.check_attrs()
Y
Yang Yang(Tony) 已提交
493
        no_kernel_op_set = {
Y
Yu Yang 已提交
494
            'feed', 'fetch', 'save', 'load', 'recurrent',
T
typhoonzero 已提交
495
            'rnn_memory_helper_grad', 'conditional_block', 'while', 'send',
496 497
            'recv', 'listen_and_serv', 'parallel_do', 'save_combine',
            'load_combine'
Y
Yang Yang(Tony) 已提交
498
        }
499
        if type not in no_kernel_op_set:
Q
QI JUN 已提交
500
            self.desc.infer_var_type(self.block.desc)
Y
Yu Yang 已提交
501
            self.desc.infer_shape(self.block.desc)
F
fengjiayi 已提交
502

Y
Yang Yang(Tony) 已提交
503
    def to_string(self, throw_on_error):
504 505 506 507 508 509 510 511 512
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True

        Returns(str): The debug string.

        """
513 514
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.OpDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
515 516 517 518
        return _debug_string_(proto, throw_on_error)

    def __str__(self):
        return self.to_string(True)
519 520 521

    __repr__ = __str__

F
fengjiayi 已提交
522 523 524 525 526
    @property
    def type(self):
        return self.desc.type()

    def input(self, name):
527 528 529 530 531 532 533 534 535
        """
        Get input arguments by the input parameter name
        Args:
            name(str): The input parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
536 537
        return self.desc.input(name)

T
typhoonzero 已提交
538 539 540 541 542 543
    def rename_input(self, old_name, new_name):
        self.desc.rename_input(old_name, new_name)

    def rename_output(self, old_name, new_name):
        self.desc.rename_output(old_name, new_name)

F
fengjiayi 已提交
544 545
    @property
    def input_names(self):
546 547 548 549 550
        """
        Get all input parameter names
        Returns(list): return a list of input parameter names

        """
F
fengjiayi 已提交
551 552
        return self.desc.input_names()

T
typhoonzero 已提交
553 554 555 556 557 558 559 560
    @property
    def input_arg_names(self):
        return self.desc.input_arg_names()

    @property
    def output_arg_names(self):
        return self.desc.output_arg_names()

F
fengjiayi 已提交
561
    def output(self, name):
562 563 564 565 566 567 568 569 570
        """
        Get output arguments by the output parameter name
        Args:
            name(str): The output parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
571 572 573 574
        return self.desc.output(name)

    @property
    def output_names(self):
575 576 577 578 579
        """
        Get all output parameter names
        Returns(list): return a list of output parameter names

        """
F
fengjiayi 已提交
580 581
        return self.desc.output_names()

582 583
    @property
    def idx(self):
584 585 586 587 588 589
        """
        Return the array index of current operator.
        Returns(int): The array index in block.ops array
        Raises:
            ValueError: when the operator is not found.
        """
590 591 592 593 594 595
        for i, op in enumerate(self.block.ops):
            if op == self:
                return i
        raise ValueError(
            "Can't find op itself in it's block. It could be a bug of Paddle.")

F
fengjiayi 已提交
596
    def has_attr(self, name):
597 598 599 600 601 602 603 604
        """
        operator has the attribute with name or not.
        Args:
            name(str): the attribute name

        Returns(bool): True if has this attribute.

        """
F
fengjiayi 已提交
605 606 607
        return self.desc.has_attr(name)

    def attr_type(self, name):
608 609 610 611 612 613 614 615
        """
        Get the type of attribute by attribute name
        Args:
            name(str): the attribute name

        Returns(core.AttrType): the attribute type

        """
F
fengjiayi 已提交
616 617 618 619
        return self.desc.attr_type(name)

    @property
    def attr_names(self):
620 621 622 623 624
        """
        Get all attribute names
        Returns(list): The list of attribute name

        """
F
fengjiayi 已提交
625 626 627
        return self.desc.attr_names()

    def attr(self, name):
628 629 630 631 632 633 634 635 636
        """
        Get attribute by name
        Args:
            name(str): the attribute name

        Returns(bool|int|str|float|list): The attribute value. The return value
            can be any valid attribute type.

        """
F
fengjiayi 已提交
637
        return self.desc.attr(name)
Y
Yu Yang 已提交
638

F
fengjiayi 已提交
639
    def block_attr(self, name):
640 641 642 643 644 645 646 647
        """
        Get the block attribute by name
        Args:
            name(str): the attribute name

        Returns(int): the block index

        """
F
fengjiayi 已提交
648
        return self.desc.block_attr(name)
Y
Yu Yang 已提交
649 650


Y
Yu Yang 已提交
651 652
class Block(object):
    def __init__(self, program, idx):
Y
Yu Yang 已提交
653
        self.desc = program.desc.block(idx)
Y
Yu Yang 已提交
654
        self.vars = dict()  # var_name --> var
Y
Yu Yang 已提交
655
        self.ops = collections.deque()  # operator list
Y
Yu Yang 已提交
656
        self.program = program
T
typhoonzero 已提交
657
        self.removed_vars = dict()
Y
Yu Yang 已提交
658

659
    def __str__(self):
Y
Yang Yang(Tony) 已提交
660 661
        return self.to_string(True)

F
fengjiayi 已提交
662 663 664 665 666 667
    def to_string(self, throw_on_error, with_details=False):
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True
F
update  
fengjiayi 已提交
668 669
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True
F
fengjiayi 已提交
670 671 672 673 674 675 676

        Returns(str): The debug string.

        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
F
fengjiayi 已提交
677
            re_add_indent = re.compile(r"\n(.)")
F
fengjiayi 已提交
678 679 680
            res_str = "blocks {\n  idx: %d\n  parent_idx: %d" % (
                self.idx, self.parent_idx)
            for var in self.vars.itervalues():
F
fengjiayi 已提交
681
                res_str += "\n  vars {\n    %s  }" % re_add_indent.sub(
F
update  
fengjiayi 已提交
682
                    r"\n    \1", var.to_string(throw_on_error, with_details))
F
fengjiayi 已提交
683
            for op in self.ops:
F
fengjiayi 已提交
684 685
                res_str += "\n  ops {\n    %s  }" % re_add_indent.sub(
                    r"\n    \1", op.to_string(throw_on_error))
F
fengjiayi 已提交
686 687 688 689 690 691
            res_str += "\n}"
        else:
            protostr = self.desc.serialize_to_string()
            proto = framework_pb2.BlockDesc.FromString(str(protostr))
            res_str = _debug_string_(proto, throw_on_error)
        return res_str
692 693 694

    __repr__ = __str__

Y
Yu Yang 已提交
695 696
    @property
    def parent_idx(self):
Y
Yu Yang 已提交
697
        return self.desc.parent
Y
Yu Yang 已提交
698 699 700

    @property
    def idx(self):
Y
Yu Yang 已提交
701
        return self.desc.id
Y
Yu Yang 已提交
702

Q
Qiao Longfei 已提交
703
    def var(self, name):
Y
Yu Yang 已提交
704 705 706 707
        if not isinstance(name, basestring):
            raise TypeError()
        v = self.vars.get(name, None)
        if v is None:
Q
Qiao Longfei 已提交
708
            raise ValueError("var %s not in this block" % name)
Y
Yu Yang 已提交
709
        return v
Q
Qiao Longfei 已提交
710

F
fengjiayi 已提交
711 712 713 714 715 716 717 718 719 720 721
    def var_recursive(self, name):
        if self.has_var(name):
            return self.var(name)
        else:
            if self.idx == 0:
                raise ValueError("var %s is not in block(%d) nor its parents." %
                                 name, self.idx)
            else:
                parent_block = self.program.block(self.parent_idx)
                return parent_block.var_recursive(name)

Q
Qiao Longfei 已提交
722
    def all_parameters(self):
723 724 725 726 727
        return list(self.iter_parameters())

    def iter_parameters(self):
        return (item[1] for item in self.vars.iteritems()
                if isinstance(item[1], Parameter))
Q
Qiao Longfei 已提交
728

Y
Yu Yang 已提交
729
    def create_var(self, *args, **kwargs):
Q
Qiao Longfei 已提交
730
        var = Variable(self, *args, **kwargs)
731 732
        if 'initializer' in kwargs:
            kwargs['initializer'](var, self)
Q
Qiao Longfei 已提交
733
        return var
Y
Yu Yang 已提交
734

Q
Qiao Longfei 已提交
735 736 737
    def has_var(self, name):
        return name in self.vars

T
typhoonzero 已提交
738 739 740 741 742 743
    def rename_var(self, name, new_name):
        """
        Rename variable in vars and ops' inputs and outputs
        """
        if not self.has_var(name):
            raise ValueError("var %s is not in current" % name)
T
wip  
typhoonzero 已提交
744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763
        v = self.var(name)
        stop_gradient = None
        trainable = None
        optimize_attr = None
        regularizer = None
        gradient_clip_attr = None
        error_clip = None
        if type(v) == Parameter:
            stop_gradient = v.stop_gradient
            trainable = v.trainable
            optimize_attr = v.optimize_attr
            regularizer = v.regularizer
            gradient_clip_attr = v.gradient_clip_attr
            error_clip = v.error_clip
        elif type(v) == Variable:
            error_clip = v.error_clip
            stop_gradient = v.stop_gradient
        else:
            raise ValueError("unsupported var type: %s", type(v))

T
typhoonzero 已提交
764
        self.desc.rename_var(name, new_name)
T
wip  
typhoonzero 已提交
765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789
        d = self.desc.find_var(new_name)
        var = None
        if type(v) == Parameter:
            var = Parameter(
                self,
                d.shape(),
                d.dtype(),
                name=new_name,
                stop_gradient=stop_gradient,
                trainable=trainable,
                optimize_attr=optimize_attr,
                regularizer=regularizer,
                gradient_clip_attr=gradient_clip_attr,
                error_clip=error_clip)
        elif type(v) == Variable:
            var = Variable(
                self,
                name=new_name,
                error_clip=error_clip,
                stop_gradient=stop_gradient)

        # rename the python side, sync_with_cpp will only add
        # new vars/ops to python side.
        self.vars[new_name] = var
        del self.vars[name]
T
typhoonzero 已提交
790
        self.sync_with_cpp()
T
typhoonzero 已提交
791

Y
Yu Yang 已提交
792 793
    def create_parameter(self, *args, **kwargs):
        global_block = self.program.global_block()
Q
Qiao Longfei 已提交
794
        param = Parameter(global_block, *args, **kwargs)
795 796
        if 'initializer' in kwargs:
            kwargs['initializer'](param, self)
Q
Qiao Longfei 已提交
797
        return param
Y
Yu Yang 已提交
798

Y
Yu Yang 已提交
799
    def append_op(self, *args, **kwargs):
Y
Yu Yang 已提交
800 801
        op_desc = self.desc.append_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
802 803 804
        self.ops.append(op)
        return op

T
typhoonzero 已提交
805 806 807 808 809 810 811 812
    def delete_ops(self, ops):
        # remove from cpp
        # FIXME(typhoonzero): remove only the first occuracy.
        try:
            start = list(self.ops).index(ops[0])
            end = list(self.ops).index(ops[-1])
        except Exception, e:
            raise e
T
typhoonzero 已提交
813
        self.desc.remove_op(start, end + 1)
T
wip  
typhoonzero 已提交
814

Y
Yancey1989 已提交
815 816 817
    def slice_ops(self, start, end):
        return list(self.ops)[start:end]

Y
Yu Yang 已提交
818
    def prepend_op(self, *args, **kwargs):
Y
Yu Yang 已提交
819 820
        op_desc = self.desc.prepend_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
821 822 823
        self.ops.appendleft(op)
        return op

Q
Qiao Longfei 已提交
824 825 826 827 828 829 830
    def sync_with_cpp(self):
        # sync variables from cpp
        for var in self.desc.all_vars():
            if not self.has_var(var.name()):
                self.create_var(name=var.name(), desc=var, type=var.type())

        # sync operators from cpp
831 832 833 834
        ops_in_cpp = []
        for op_idx in range(0, self.desc.op_size()):
            ops_in_cpp.append(self.desc.op(op_idx))

Y
Yu Yang 已提交
835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850
        if len(self.ops) != 0:
            first_op_in_python = self.ops[0].desc
            last_op_in_python = self.ops[len(self.ops) - 1].desc
            start_index = None
            end_index = None
            for index in range(len(ops_in_cpp)):
                if first_op_in_python == ops_in_cpp[index]:
                    start_index = index
                if last_op_in_python == ops_in_cpp[index]:
                    end_index = index
            assert start_index is not None
            assert end_index is not None
            assert start_index <= end_index
        else:
            start_index = 0
            end_index = -1
Q
Qiao Longfei 已提交
851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867

        # sync ops append to the head of cpp_ops
        for index in range((start_index - 1 - 1), -1, -1):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.appendleft(op)

        # sync ops append to the end of cpp_ops
        for index in range((end_index + 1), len(ops_in_cpp)):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.append(op)

        assert len(self.ops) == len(ops_in_cpp)
        for index in range(len(self.ops)):
            assert self.ops[index].desc == ops_in_cpp[index]

868 869 870 871
    def copy_param_info_from(self, other):
        """
        Copy the information of parameters from other block
        Args:
872
            other(Block): other block
873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895

        Returns:
            None
        """
        if not isinstance(other, Block):
            raise TypeError("copy_param_info_from should be invoked with Block")
        for p in other.iter_parameters():
            assert isinstance(p, Parameter)
            v = self.vars.get(p.name, None)
            if v is None:
                raise ValueError("copy_param_info_from should be invoked with "
                                 "same topology")
            assert isinstance(v, Variable)
            new_p = Parameter(
                block=self,
                shape=v.shape,
                dtype=v.dtype,
                type=v.type,
                lod_level=v.lod_level,
                stop_gradient=p.stop_gradient,
                trainable=p.trainable,
                optimize_attr=p.optimize_attr,
                regularizer=p.regularizer,
F
fengjiayi 已提交
896
                gradient_clip_attr=p.gradient_clip_attr,
F
fengjiayi 已提交
897
                error_clip=p.error_clip,
898 899 900
                name=v.name)
            self.vars[new_p.name] = new_p

Y
Yu Yang 已提交
901 902

class Program(object):
903 904
    def __init__(self):
        self.desc = core.ProgramDesc()
Y
Yu Yang 已提交
905 906
        self.blocks = [Block(self, 0)]
        self.current_block_idx = 0
D
dzhwinter 已提交
907
        self._seed = 0
Y
Yu Yang 已提交
908

909
    def __str__(self):
Y
Yang Yang(Tony) 已提交
910 911
        return self.to_string(True)

F
fengjiayi 已提交
912 913 914 915 916 917
    def to_string(self, throw_on_error, with_details=False):
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True
F
update  
fengjiayi 已提交
918 919
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True
F
fengjiayi 已提交
920 921 922 923 924 925 926 927 928 929 930 931 932 933 934

        Returns(str): The debug string.

        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
            res_str = ""
            for block in self.blocks:
                res_str += block.to_string(throw_on_error, with_details)
        else:
            protostr = self.desc.serialize_to_string()
            proto = framework_pb2.ProgramDesc.FromString(str(protostr))
            res_str = _debug_string_(proto, throw_on_error)
        return res_str
935

936 937 938
    def get_desc(self):
        return self.desc

Y
Yu Yang 已提交
939 940 941 942 943
    def clone(self):
        p = Program()
        p.desc = core.ProgramDesc(self.desc)
        p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
        p.sync_with_cpp()
944
        p.copy_param_info_from(self)
Y
Yu Yang 已提交
945
        return p
946

947 948 949 950 951 952 953 954 955
    def prune(self, targets):
        if not isinstance(targets, list):
            targets = [targets]
        targets_idx = []
        for t in targets:
            if not isinstance(t, Operator):
                if isinstance(t, Variable):
                    t = t.op
                else:
C
caoying03 已提交
956 957
                    raise ValueError(("All targets of prune() can only be "
                                      "Variable or Operator."))
958 959 960 961 962 963 964 965

            targets_idx.append([t.block.idx, t.idx])
        res = Program()
        res.desc = core.prune(self.desc, targets_idx)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

966 967 968 969 970 971 972
    def inference_optimize(self):
        res = Program()
        res.desc = core.inference_optimize(self.desc)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

973 974 975 976
    @staticmethod
    def parse_from_string(binary_str):
        p = Program()
        p.desc = core.ProgramDesc(binary_str)
977
        p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
978 979
        p.sync_with_cpp()
        return p
Y
Yu Yang 已提交
980

D
dzhwinter 已提交
981 982 983 984 985 986 987 988 989 990
    @property
    def random_seed(self):
        return self._seed

    @random_seed.setter
    def random_seed(self, seed):
        if not isinstance(seed, int):
            raise ValueError("Seed must be a integer.")
        self._seed = seed

Y
Yu Yang 已提交
991 992
    def __repr__(self):
        return str(self)
993

Y
Yu Yang 已提交
994 995 996
    def global_block(self):
        return self.blocks[0]

Q
Qiao Longfei 已提交
997 998 999
    def block(self, index):
        return self.blocks[index]

Y
Yu Yang 已提交
1000 1001 1002
    def current_block(self):
        return self.blocks[self.current_block_idx]

F
fengjiayi 已提交
1003
    def append_backward(self, target, no_grad_set=None):
Q
Qiao Longfei 已提交
1004 1005 1006
        """
        return map(param_name -> (grad_name, block_index, op_index))
        """
Q
Qiao Longfei 已提交
1007
        assert isinstance(target, Variable)
F
fengjiayi 已提交
1008 1009
        if no_grad_set is None:
            no_grad_set = set()
Y
Yang Yang(Tony) 已提交
1010 1011 1012 1013 1014 1015 1016 1017
        try:
            param_to_grad_info = self.desc.append_backward(target.desc,
                                                           no_grad_set)
        except Exception as e:
            raise core.EnforceNotMet(
                str(e) + "\nCurrent protobuf is\n{0}".format(
                    self.to_string(False)))

Q
Qiao Longfei 已提交
1018 1019 1020
        self.sync_with_cpp()
        return param_to_grad_info

F
update  
fengjiayi 已提交
1021
    def create_block(self, parent_idx=None):
Y
Yu Yang 已提交
1022
        new_block_idx = len(self.blocks)
F
update  
fengjiayi 已提交
1023 1024 1025
        parent = self.current_block() if parent_idx is None else self.block(
            parent_idx)
        self.desc.append_block(parent.desc)
Y
Yu Yang 已提交
1026 1027 1028 1029 1030 1031 1032
        self.current_block_idx = new_block_idx
        self.blocks.append(Block(self, self.current_block_idx))
        return self.current_block()

    def rollback(self):
        self.current_block_idx = self.current_block().parent_idx

Q
Qiao Longfei 已提交
1033 1034 1035 1036 1037 1038
    def sync_with_cpp(self):
        for block_idx in range(len(self.blocks), self.desc.num_blocks()):
            self.blocks.append(Block(self, block_idx))
        for block in self.blocks:
            block.sync_with_cpp()

1039 1040
    def copy_param_info_from(self, other):
        """
1041
        Copy the information of parameters from other program.
1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056
        Args:
            other(Program): Other program

        Returns:
            None
        """
        if not isinstance(other, Program):
            raise TypeError("copy_param_info_from should be invoked with "
                            "Program")

        if len(self.blocks) != len(other.blocks):
            raise ValueError("copy_param_info_from should be invoked with two "
                             "program, with represent the same topology")
        self.global_block().copy_param_info_from(other.global_block())

1057 1058 1059 1060 1061
    def list_vars(self):
        for each_block in self.blocks:
            for each_var in each_block.vars.itervalues():
                yield each_var

Y
Yu Yang 已提交
1062

Y
Yu Yang 已提交
1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073
class Parameter(Variable):
    def __init__(self, block, shape, dtype, **kwargs):
        if shape is None or dtype is None:
            raise ValueError("Parameter must set shape and dtype")
        if len(shape) == 0:
            raise ValueError("Parameter shape cannot be empty")

        for each in shape:
            if each < 0:
                raise ValueError("Parameter shape should not be related with "
                                 "batch-size")
1074 1075 1076

        Variable.__init__(
            self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
Y
Yu Yang 已提交
1077 1078 1079 1080
        self.trainable = kwargs.get('trainable', True)

        self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})

1081 1082
        self.regularizer = kwargs.get('regularizer', None)

F
fengjiayi 已提交
1083
        self.gradient_clip_attr = kwargs.get('gradient_clip_attr', None)
Y
Yu Yang 已提交
1084

F
fengjiayi 已提交
1085 1086 1087
    def __str__(self):
        return self.to_string(True)

F
update  
fengjiayi 已提交
1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110
    def to_string(self, throw_on_error, with_details=False):
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True

        Returns(str): The debug string.

        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
            res_str = Variable.to_string(self, throw_on_error, True)
            additional_attr = ("trainable", "optimize_attr", "regularizer",
                               "gradient_clip_attr")
            for attr_name in additional_attr:
                res_str += "%s: %s\n" % (attr_name,
                                         str(getattr(self, attr_name)))
        else:
            res_str = Variable.to_string(self, throw_on_error, False)
F
fengjiayi 已提交
1111 1112 1113 1114
        return res_str

    __repr__ = __str__

Y
Yu Yang 已提交
1115

Y
Yu Yang 已提交
1116
# program is a global instance.
Y
Yu Yang 已提交
1117 1118
_main_program_ = Program()
_startup_program_ = Program()
1119

1120

1121
def default_startup_program():
Y
Yu Yang 已提交
1122 1123 1124
    """
    Get default startup program. In startup program, Paddle will initialize
    parameters, initialize nccl handle, etc.
1125

Y
Yu Yang 已提交
1126 1127 1128
    Returns:
        Program: startup program
    """
Y
Yu Yang 已提交
1129
    return _startup_program_
1130

1131

1132
def default_main_program():
Y
Yu Yang 已提交
1133 1134
    """
    Get default main program. The main program is used for training or testing.
1135

Y
Yu Yang 已提交
1136 1137 1138
    Returns:
        Program: main program
    """
Y
Yu Yang 已提交
1139
    return _main_program_
Y
Yu Yang 已提交
1140 1141 1142 1143 1144


def switch_main_program(program):
    """
    Switch the main program to a new program.
1145

Y
Yu Yang 已提交
1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159
    Args:
        program(Program): The new main program

    Returns:
        Program: The previous main program
    """
    global _main_program_
    prev_program = _main_program_
    _main_program_ = program
    return prev_program


def switch_startup_program(program):
    """
1160
    Switch the startup program to a new program
Y
Yu Yang 已提交
1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176
    Args:
        program(Program): The new startup program

    Returns:
        Program: The previous startup program
    """
    global _startup_program_
    prev_program = _startup_program_
    _startup_program_ = program
    return prev_program


@contextlib.contextmanager
def program_guard(main_program, startup_program=None):
    """
    Switch program with `with` statement
1177

Y
Yu Yang 已提交
1178 1179 1180 1181
    Examples:
        >>> with program_guard(Program()):
        >>>   data = fluid.layers.data(...)
        >>>   hidden = fluid.layers.fc(...)
1182

Y
Yu Yang 已提交
1183 1184
    Args:
        main_program(Program): New main program inside `with` statement
1185
        startup_program(Program): New startup program inside `with` statement.
Y
Yu Yang 已提交
1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201
            None means do not change startup program.

    Returns:
        None
    """
    if not isinstance(main_program, Program):
        raise TypeError("main_program should be Program")
    main_program = switch_main_program(main_program)
    if startup_program is not None:
        if not isinstance(startup_program, Program):
            raise TypeError("startup_program should be Program")
        startup_program = switch_startup_program(startup_program)
    yield
    switch_main_program(main_program)
    if startup_program is not None:
        switch_startup_program(startup_program)
X
xuwei06 已提交
1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220


def get_var(name, program=None):
    """
    Get a variable by name from the global block of a program
    Args:
        name(str): name of the variable
        program(Program|None): program object.
             If None, default_global_program() will be used.

    Returns:
        Variable
    """
    if program is None:
        program = default_main_program()
    assert isinstance(name, str)
    assert isinstance(name, Program)

    return program.global_block().var(name)