framework.py 36.9 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Y
Yu Yang 已提交
15
import collections
Q
qiaolongfei 已提交
16
import contextlib
F
fengjiayi 已提交
17
import re
18

Y
Yu Yang 已提交
19
import numpy as np
Q
qiaolongfei 已提交
20

21
import proto.framework_pb2 as framework_pb2
Q
qiaolongfei 已提交
22
from . import core
Y
Yu Yang 已提交
23

24
__all__ = [
25 26 27 28 29 30 31 32 33
    'Block',
    'Variable',
    'Program',
    'Operator',
    'default_startup_program',
    'default_main_program',
    'program_guard',
    'switch_startup_program',
    'switch_main_program',
X
xuwei06 已提交
34
    'get_var',
35
]
Y
Yu Yang 已提交
36

Q
qiaolongfei 已提交
37 38 39 40 41 42 43 44 45 46 47 48
EMPTY_VAR_NAME = core.kEmptyVarName()
TEMP_VAR_NAME = core.kTempVarName()
GRAD_VAR_SUFFIX = core.kGradVarSuffix()
ZERO_VAR_SUFFIX = core.kZeroVarSuffix()


def grad_var_name(var_name):
    """
    return gradient name for a certain var name
    """
    return var_name + GRAD_VAR_SUFFIX

Y
Yu Yang 已提交
49

Q
Qiao Longfei 已提交
50
def unique_name(prefix):
51 52 53 54 55 56 57 58 59
    """
    Generate unique names with prefix

    Args:
        prefix(str): The prefix of return string

    Returns(str): A unique string with the prefix

    """
Q
Qiao Longfei 已提交
60 61 62 63
    uid = core.unique_integer(prefix)  # unique during whole process.
    return "_".join([prefix, str(uid)])


64
def convert_np_dtype_to_dtype_(np_dtype):
65 66 67 68 69
    """
    Convert the data type in numpy to the data type in Paddle
    Args:
        np_dtype(np.dtype): the data type in numpy

70
    Returns(core.VarDesc.VarType): the data type in Paddle
71 72

    """
73 74
    dtype = np.dtype(np_dtype)
    if dtype == np.float32:
75
        return core.VarDesc.VarType.FP32
76
    elif dtype == np.float64:
77
        return core.VarDesc.VarType.FP64
78
    elif dtype == np.float16:
79
        return core.VarDesc.VarType.FP16
80
    elif dtype == np.int32:
81
        return core.VarDesc.VarType.INT32
82
    elif dtype == np.int16:
83
        return core.VarDesc.VarType.INT16
84
    elif dtype == np.int64:
85
        return core.VarDesc.VarType.INT64
86
    elif dtype == np.bool:
87
        return core.VarDesc.VarType.BOOL
88 89 90 91 92
    else:
        raise ValueError("Not supported numpy dtype " + str(dtype))


def dtype_is_floating(dtype):
93 94 95
    """
    Check the data type is floating or not.
    Args:
96
        dtype(np.dtype|core.VarDesc.VarType): data type.
97 98 99 100 101
            Could be numpy format or Paddle format

    Returns(bool): True if data type is a float value

    """
102
    if not isinstance(dtype, core.VarDesc.VarType):
103 104
        dtype = convert_np_dtype_to_dtype_(dtype)

105 106 107 108
    return dtype in [
        core.VarDesc.VarType.FP16, core.VarDesc.VarType.FP32,
        core.VarDesc.VarType.FP64
    ]
109 110


Y
Yang Yang(Tony) 已提交
111
def _debug_string_(proto, throw_on_error=True):
112 113 114 115 116 117 118 119 120 121 122
    """
    Get the debug string of a protobuf message. The message could be not
    initialized.
    Args:
        proto(google.protobuf.message.Message): The protobuf message
        throw_on_error(bool): True if raise an error when the protobuf message
            is not initialized.

    Returns(str): The debug string of the protobuf message

    """
Y
Yu Yang 已提交
123
    error_fields = list()
Y
Yang Yang(Tony) 已提交
124
    if not proto.IsInitialized(error_fields) and throw_on_error:
C
caoying03 已提交
125 126
        raise ValueError("{0} are not initialized.\nThe message is {1}:\n".
                         format(error_fields, proto))
Y
Yu Yang 已提交
127 128 129
    return proto.__str__()


Y
Yu Yang 已提交
130
class Variable(object):
131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153
    """
    Python variable. Every input and output of an operator is a variable. Every
    variable belongs to a block. The variable has a name and two variables in
    different blocks could have the same name.

    There are many kinds of variables. Please reference the framework.proto for
    details.

    Notes: The constructor of Variable should not be invoked directly. Please
    use `Block.create_var` to create a variable.

    >>> cur_program = Program()
    >>> cur_block = cur_program.current_block()
    >>> new_variable = cur_block.create_var(
    >>>                    name="X", shape=[-1, 23, 48], dtype='float32')

    Args:
        block(Block): The associated block. It will be passed by
            `Block.create_var` automatically.
        type(core.VarDesc.VarType): Variable type. Please reference the
            framework.proto for details.
        shape(tuple|list|None): The shape of variable. -1 means the batch size.
            Some kinds of variable do not contain shape, just set it to None.
154
        dtype(np.dtype|core.VarDesc.VarType|str): The data type of variable.
155 156 157 158 159 160 161 162
        lod_level(int): The level of lod tensor. 0 means there is not a time
            series data.
        persistable(bool): True if the variable should be saved as check point.
            Defaults to False.
        stop_gradient(bool): True if the variable will stop to calculate
            gradients when backward. Defaults to False.
    """

Y
Yu Yang 已提交
163 164
    def __init__(self,
                 block,
Y
Yu Yang 已提交
165
                 type=core.VarDesc.VarType.LOD_TENSOR,
Y
Yu Yang 已提交
166 167 168 169
                 name=None,
                 shape=None,
                 dtype=None,
                 lod_level=None,
Q
QI JUN 已提交
170
                 persistable=None,
F
fengjiayi 已提交
171
                 error_clip=None,
Y
Yu Yang 已提交
172
                 stop_gradient=False,
Y
Yu Yang 已提交
173
                 **kwargs):
Y
Yu Yang 已提交
174
        self.block = block
F
fengjiayi 已提交
175
        self.error_clip = error_clip
Y
Yu Yang 已提交
176 177 178

        if name is None:
            name = Variable._unique_var_name_()
D
Dong Zhihong 已提交
179 180 181 182
        is_new_var = False
        self.desc = self.block.desc.find_var(name)

        if self.desc is None:
D
dongzhihong 已提交
183
            self.desc = self.block.desc.var(name)
Y
Yu Yang 已提交
184
            is_new_var = True
Y
Yu Yang 已提交
185

Y
Yu Yang 已提交
186 187 188 189 190 191 192 193
        if is_new_var:
            self.desc.set_type(type)
        elif self.desc.type() != type:
            raise ValueError("Variable {0} has been created before. The "
                             "previous type is {1}; the new type is {2}. They"
                             " are not matched".format(self.name,
                                                       self.desc.type(), type))

Y
Yu Yang 已提交
194
        if shape is not None:
Y
Yu Yang 已提交
195
            if is_new_var:
196
                self.desc.set_shape(shape)
Y
Yu Yang 已提交
197 198 199 200 201 202 203 204
            else:
                old_shape = self.shape
                shape = tuple(shape)
                if shape != old_shape:
                    raise ValueError(
                        "Variable {0} has been created before. the previous "
                        "shape is {1}; the new shape is {2}. They are not "
                        "matched.".format(self.name, old_shape, shape))
Y
Yu Yang 已提交
205
        if dtype is not None:
206
            if not isinstance(dtype, core.VarDesc.VarType):
207
                dtype = convert_np_dtype_to_dtype_(dtype)
Y
Yu Yang 已提交
208
            if is_new_var:
F
fengjiayi 已提交
209
                self.desc.set_dtype(dtype)
Y
Yu Yang 已提交
210
            else:
F
fengjiayi 已提交
211
                old_dtype = self.dtype
Q
QI JUN 已提交
212
                if dtype != old_dtype:
Y
Yu Yang 已提交
213 214 215 216 217
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous data type is {1}; the new "
                                     "data type is {2}. They are not "
                                     "matched.".format(self.name, old_dtype,
                                                       dtype))
Y
Yu Yang 已提交
218 219

        if lod_level is not None:
Y
Yu Yang 已提交
220
            if is_new_var:
221
                self.desc.set_lod_level(lod_level)
Y
Yu Yang 已提交
222 223 224 225 226 227 228
            else:
                if lod_level != self.lod_level:
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous lod_level is {1}; the new "
                                     "lod_level is {2}. They are not "
                                     "matched".format(self.name, self.lod_level,
                                                      lod_level))
229 230 231 232 233 234 235 236 237 238 239
        if persistable is not None:
            if is_new_var:
                self.desc.set_persistable(persistable)
            else:
                if persistable != self.persistable:
                    raise ValueError(
                        "Variable {0} has been created before."
                        "The previous persistable is {1}; the new "
                        "persistable is {2}. They are not matched".format(
                            self.name, self.persistable, persistable))

Y
Yu Yang 已提交
240
        self.block.vars[name] = self
Y
Yu Yang 已提交
241
        self.op = None
Y
Yu Yang 已提交
242
        self.stop_gradient = stop_gradient
Y
Yu Yang 已提交
243

244
    def __str__(self):
Y
Yang Yang(Tony) 已提交
245 246
        return self.to_string(True)

F
update  
fengjiayi 已提交
247
    def to_string(self, throw_on_error, with_details=False):
248 249 250 251 252 253
        """
        Get debug string.

        Args:
            throw_on_error(bool): True if raise an exception when self is not
                intialized.
F
update  
fengjiayi 已提交
254 255
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True
256 257 258 259

        Returns(str): The debug string.

        """
F
update  
fengjiayi 已提交
260 261
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
262 263
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.VarDesc.FromString(str(protostr))
F
update  
fengjiayi 已提交
264 265 266 267 268 269 270
        res_str = _debug_string_(proto, throw_on_error)
        if with_details:
            additional_attr = ("error_clip", "stop_gradient")
            for attr_name in additional_attr:
                res_str += "%s: %s\n" % (attr_name,
                                         str(getattr(self, attr_name)))
        return res_str
271 272 273

    __repr__ = __str__

274 275 276
    def set_desc(self, input):
        self.desc = input

277 278 279 280
    @property
    def persistable(self):
        return self.desc.persistable()

Y
Yu Yang 已提交
281 282 283 284
    @persistable.setter
    def persistable(self, p):
        self.desc.set_persistable(p)

Y
Yu Yang 已提交
285 286
    @property
    def name(self):
287
        return self.desc.name()
Y
Yu Yang 已提交
288 289 290 291

    @property
    def shape(self):
        # convert to tuple, make it as same as numpy API.
292
        return tuple(self.desc.shape())
Y
Yu Yang 已提交
293 294

    @property
F
fengjiayi 已提交
295 296
    def dtype(self):
        return self.desc.dtype()
Y
Yu Yang 已提交
297 298 299

    @property
    def lod_level(self):
300
        return self.desc.lod_level()
Y
Yu Yang 已提交
301

Y
Yu Yang 已提交
302 303 304 305
    @property
    def type(self):
        return self.desc.type()

Y
Yu Yang 已提交
306 307
    @staticmethod
    def _unique_var_name_():
308 309 310
        prefix = "_generated_var"
        uid = core.unique_integer(prefix)  # unique during whole process.
        return "_".join([prefix, str(uid)])
Y
Yu Yang 已提交
311

312 313 314
    def set_error_clip(self, error_clip):
        self.error_clip = error_clip

Y
Yu Yang 已提交
315

F
fengjiayi 已提交
316 317 318
def get_all_op_protos():
    """
    Get all registered op proto from PaddlePaddle C++ end.
319 320 321

    Returns(list): list of OpProto

F
fengjiayi 已提交
322 323 324 325 326 327 328 329 330 331
    """
    protostrs = core.get_all_op_protos()
    ret_values = []
    for pbstr in protostrs:
        op_proto = framework_pb2.OpProto.FromString(str(pbstr))
        ret_values.append(op_proto)
    return ret_values


class OpProtoHolder(object):
332 333 334 335
    """
    A global variable to hold all OpProtos from C++ as a map
    """

F
fengjiayi 已提交
336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351
    @classmethod
    def instance(cls):
        if not hasattr(cls, '_instance'):
            cls._instance = cls()
        return cls._instance

    def __init__(self):
        assert not hasattr(
            self.__class__,
            '_instance'), 'Please use `instance()` to get OpProtoHolder opject!'
        op_protos = get_all_op_protos()
        self.op_proto_map = {}
        for proto in op_protos:
            self.op_proto_map[proto.type] = proto

    def get_op_proto(self, type):
352 353 354 355 356 357 358 359
        """
        Get OpProto by a type string.
        Args:
            type(str): The type that operator registered in C++ side.

        Returns(framework_pb2.OpProto): The OpProto

        """
Y
Yu Yang 已提交
360 361
        if type not in self.op_proto_map:
            raise ValueError("Operator \"%s\" has not been registered." % type)
F
fengjiayi 已提交
362 363 364
        return self.op_proto_map[type]


Y
Yu Yang 已提交
365
class Operator(object):
366 367 368 369 370 371
    """
    Python Operator class. The operator represents the build in instructs in a
    Block. Users can use the build in instructs to describe their neural
    network.
    """

Y
Yu Yang 已提交
372 373
    def __init__(self,
                 block,
Y
Yu Yang 已提交
374
                 desc,
Y
Yu Yang 已提交
375 376 377 378
                 type=None,
                 inputs=None,
                 outputs=None,
                 attrs=None):
379 380 381 382 383 384 385 386 387 388 389 390 391 392
        """
        Constructor.

        Notes: The constructor of operator should not be invoked directly. Use
        Block.append_op or Block.prepend_op instead.

        >>> cur_program = Program()
        >>> cur_block = cur_program.current_block()
        >>> # var1 += var2 + var3
        >>> cur_block.append_op(type="sum",
        >>>                     inputs={"X": [var1, var2, var3]},
        >>>                     outputs={"Out": [var1]})

        Args:
C
caoying03 已提交
393 394
            block(Block): The block has the current operator.
            desc(core.OpDesc): The protobuf description.
395 396 397
            type(str): The type of operator.
            inputs(dict): The input dictionary. Key is the input parameter name.
                Value is a list of variables.
C
caoying03 已提交
398 399
            outputs(dict): The output dictionary which has the same format with
                           inputs.
400 401 402 403
            attrs(dict): The attributes dictionary. Key is attribute name. Value
                is the attribute value. The attribute type should be as same as
                the type registered in C++
        """
Y
Yu Yang 已提交
404
        self.block = block
Y
Yu Yang 已提交
405
        self.desc = desc
T
typhoonzero 已提交
406
        self.attrs = attrs
F
fengjiayi 已提交
407 408 409 410 411
        if len(self.desc.type()) != 0:
            return
        if type is None:
            raise ValueError(
                "`type` to initilized an Operator can not be None.")
F
Update  
fengjiayi 已提交
412
        self.desc.set_type(type)
F
fengjiayi 已提交
413
        proto = OpProtoHolder.instance().get_op_proto(type)
414

Y
Yang Yang(Tony) 已提交
415 416
        def find_name(var_list, name):
            for var_name in var_list:
Q
Qiao Longfei 已提交
417
                if var_list[var_name] is not None and var_name == name:
Y
Yang Yang(Tony) 已提交
418 419
                    return True
            return False
Q
QI JUN 已提交
420

Y
Yang Yang(Tony) 已提交
421 422 423 424 425 426 427
        if inputs is not None:
            for in_proto in proto.inputs:
                found = find_name(inputs, in_proto.name)
                assert found or in_proto.dispensable, "Input {} not found".format(
                    in_proto.name)

                if found:
428 429 430 431
                    in_args = inputs[in_proto.name]
                    if not isinstance(in_args, list):
                        in_args = [in_args]
                    if not in_proto.duplicable and len(in_args) > 1:
Y
Yang Yang(Tony) 已提交
432 433
                        raise ValueError(
                            "Input %s expects only one input, but %d are given."
434 435 436
                            % (in_proto.name, len(in_args)))
                    in_arg_names = []
                    for arg in in_args:
Y
Yang Yu 已提交
437 438 439 440
                        if isinstance(arg, basestring):
                            in_arg_names.append(arg)
                        else:
                            in_arg_names.append(arg.name)
441
                    self.desc.set_input(in_proto.name, in_arg_names)
Y
Yang Yang(Tony) 已提交
442 443
                else:
                    self.desc.set_input(in_proto.name, [])
F
Update  
fengjiayi 已提交
444

Y
Yu Yang 已提交
445
        if outputs is not None:
446 447 448 449 450 451 452
            given = set()
            need = set()
            for n in outputs:
                given.add(n)
            for m in proto.outputs:
                need.add(m.name)
            if not given == need:
C
caoying03 已提交
453 454
                raise ValueError(("Incorrect setting for output(s) of "
                                  "operator \"%s\". Need: [%s] Given: [%s]") %
455 456
                                 (type, ", ".join(str(e) for e in need),
                                  ", ".join(str(e) for e in given)))
457

F
fengjiayi 已提交
458
            for out_proto in proto.outputs:
459 460 461 462
                out_args = outputs[out_proto.name]
                if not isinstance(out_args, list):
                    out_args = [out_args]
                if not out_proto.duplicable and len(out_args) > 1:
F
Update  
fengjiayi 已提交
463 464
                    raise ValueError(
                        "Output %s expects only one output, but %d are given." %
465 466 467 468 469 470
                        (out_proto.name, len(out_args)))
                out_arg_names = []
                for arg in out_args:
                    out_arg_names.append(arg.name)
                    arg.op = self
                self.desc.set_output(out_proto.name, out_arg_names)
F
Update  
fengjiayi 已提交
471

Y
Yu Yang 已提交
472
        if attrs is not None:
473 474
            if not isinstance(attrs, dict):
                raise TypeError("'attrs' should be a dict.")
F
fengjiayi 已提交
475
            for attr in proto.attrs:
F
Update  
fengjiayi 已提交
476
                attr_name = attr.name
477
                if (not attr_name in attrs) or (attrs[attr_name] is None):
F
Update  
fengjiayi 已提交
478
                    continue
Y
Yang Yang(Tony) 已提交
479
                if isinstance(attrs[attr_name], Block):
F
Update  
fengjiayi 已提交
480
                    self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
T
typhoonzero 已提交
481 482 483 484
                elif isinstance(attrs[attr_name], core.BlockDesc) or \
                   isinstance(attrs[attr_name], core.ProgramDesc):
                    self.desc.set_serialized_attr(
                        attr_name, attrs[attr_name].serialize_to_string())
Y
Yang Yang(Tony) 已提交
485 486
                else:
                    self.desc.set_attr(attr_name, attrs[attr_name])
Y
Yu Yang 已提交
487

488
        self.desc.check_attrs()
Y
Yang Yang(Tony) 已提交
489
        no_kernel_op_set = {
Y
Yu Yang 已提交
490
            'feed', 'fetch', 'save', 'load', 'recurrent',
T
typhoonzero 已提交
491
            'rnn_memory_helper_grad', 'conditional_block', 'while', 'send',
492
            'recv', 'listen_and_serv', 'parallel_do', 'save_combine',
493
            'load_combine', 'ncclInit'
Y
Yang Yang(Tony) 已提交
494
        }
495
        if type not in no_kernel_op_set:
Q
QI JUN 已提交
496
            self.desc.infer_var_type(self.block.desc)
Y
Yu Yang 已提交
497
            self.desc.infer_shape(self.block.desc)
F
fengjiayi 已提交
498

Y
Yang Yang(Tony) 已提交
499
    def to_string(self, throw_on_error):
500 501 502 503 504 505 506 507 508
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True

        Returns(str): The debug string.

        """
509 510
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.OpDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
511 512 513 514
        return _debug_string_(proto, throw_on_error)

    def __str__(self):
        return self.to_string(True)
515 516 517

    __repr__ = __str__

F
fengjiayi 已提交
518 519 520 521 522
    @property
    def type(self):
        return self.desc.type()

    def input(self, name):
523 524 525 526 527 528 529 530 531
        """
        Get input arguments by the input parameter name
        Args:
            name(str): The input parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
532 533 534 535
        return self.desc.input(name)

    @property
    def input_names(self):
536 537 538 539 540
        """
        Get all input parameter names
        Returns(list): return a list of input parameter names

        """
F
fengjiayi 已提交
541 542 543
        return self.desc.input_names()

    def output(self, name):
544 545 546 547 548 549 550 551 552
        """
        Get output arguments by the output parameter name
        Args:
            name(str): The output parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
553 554 555 556
        return self.desc.output(name)

    @property
    def output_names(self):
557 558 559 560 561
        """
        Get all output parameter names
        Returns(list): return a list of output parameter names

        """
F
fengjiayi 已提交
562 563
        return self.desc.output_names()

564 565
    @property
    def idx(self):
566 567 568 569 570 571
        """
        Return the array index of current operator.
        Returns(int): The array index in block.ops array
        Raises:
            ValueError: when the operator is not found.
        """
572 573 574 575 576 577
        for i, op in enumerate(self.block.ops):
            if op == self:
                return i
        raise ValueError(
            "Can't find op itself in it's block. It could be a bug of Paddle.")

F
fengjiayi 已提交
578
    def has_attr(self, name):
579 580 581 582 583 584 585 586
        """
        operator has the attribute with name or not.
        Args:
            name(str): the attribute name

        Returns(bool): True if has this attribute.

        """
F
fengjiayi 已提交
587 588 589
        return self.desc.has_attr(name)

    def attr_type(self, name):
590 591 592 593 594 595 596 597
        """
        Get the type of attribute by attribute name
        Args:
            name(str): the attribute name

        Returns(core.AttrType): the attribute type

        """
F
fengjiayi 已提交
598 599 600 601
        return self.desc.attr_type(name)

    @property
    def attr_names(self):
602 603 604 605 606
        """
        Get all attribute names
        Returns(list): The list of attribute name

        """
F
fengjiayi 已提交
607 608 609
        return self.desc.attr_names()

    def attr(self, name):
610 611 612 613 614 615 616 617 618
        """
        Get attribute by name
        Args:
            name(str): the attribute name

        Returns(bool|int|str|float|list): The attribute value. The return value
            can be any valid attribute type.

        """
F
fengjiayi 已提交
619
        return self.desc.attr(name)
Y
Yu Yang 已提交
620

F
fengjiayi 已提交
621
    def block_attr(self, name):
622 623 624 625 626 627 628 629
        """
        Get the block attribute by name
        Args:
            name(str): the attribute name

        Returns(int): the block index

        """
F
fengjiayi 已提交
630
        return self.desc.block_attr(name)
Y
Yu Yang 已提交
631 632


Y
Yu Yang 已提交
633 634
class Block(object):
    def __init__(self, program, idx):
Y
Yu Yang 已提交
635
        self.desc = program.desc.block(idx)
Y
Yu Yang 已提交
636
        self.vars = dict()  # var_name --> var
Y
Yu Yang 已提交
637
        self.ops = collections.deque()  # operator list
Y
Yu Yang 已提交
638
        self.program = program
T
typhoonzero 已提交
639
        self.removed_vars = dict()
Y
Yu Yang 已提交
640

641
    def __str__(self):
Y
Yang Yang(Tony) 已提交
642 643
        return self.to_string(True)

F
fengjiayi 已提交
644 645 646 647 648 649
    def to_string(self, throw_on_error, with_details=False):
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True
F
update  
fengjiayi 已提交
650 651
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True
F
fengjiayi 已提交
652 653 654 655 656 657 658

        Returns(str): The debug string.

        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
F
fengjiayi 已提交
659
            re_add_indent = re.compile(r"\n(.)")
F
fengjiayi 已提交
660 661 662
            res_str = "blocks {\n  idx: %d\n  parent_idx: %d" % (
                self.idx, self.parent_idx)
            for var in self.vars.itervalues():
F
fengjiayi 已提交
663
                res_str += "\n  vars {\n    %s  }" % re_add_indent.sub(
F
update  
fengjiayi 已提交
664
                    r"\n    \1", var.to_string(throw_on_error, with_details))
F
fengjiayi 已提交
665
            for op in self.ops:
F
fengjiayi 已提交
666 667
                res_str += "\n  ops {\n    %s  }" % re_add_indent.sub(
                    r"\n    \1", op.to_string(throw_on_error))
F
fengjiayi 已提交
668 669 670 671 672 673
            res_str += "\n}"
        else:
            protostr = self.desc.serialize_to_string()
            proto = framework_pb2.BlockDesc.FromString(str(protostr))
            res_str = _debug_string_(proto, throw_on_error)
        return res_str
674 675 676

    __repr__ = __str__

Y
Yu Yang 已提交
677 678
    @property
    def parent_idx(self):
Y
Yu Yang 已提交
679
        return self.desc.parent
Y
Yu Yang 已提交
680 681 682

    @property
    def idx(self):
Y
Yu Yang 已提交
683
        return self.desc.id
Y
Yu Yang 已提交
684

Q
Qiao Longfei 已提交
685
    def var(self, name):
Y
Yu Yang 已提交
686 687 688 689
        if not isinstance(name, basestring):
            raise TypeError()
        v = self.vars.get(name, None)
        if v is None:
Q
Qiao Longfei 已提交
690
            raise ValueError("var %s not in this block" % name)
Y
Yu Yang 已提交
691
        return v
Q
Qiao Longfei 已提交
692

F
fengjiayi 已提交
693 694 695 696 697 698 699 700 701 702 703
    def var_recursive(self, name):
        if self.has_var(name):
            return self.var(name)
        else:
            if self.idx == 0:
                raise ValueError("var %s is not in block(%d) nor its parents." %
                                 name, self.idx)
            else:
                parent_block = self.program.block(self.parent_idx)
                return parent_block.var_recursive(name)

Q
Qiao Longfei 已提交
704
    def all_parameters(self):
705 706 707 708 709
        return list(self.iter_parameters())

    def iter_parameters(self):
        return (item[1] for item in self.vars.iteritems()
                if isinstance(item[1], Parameter))
Q
Qiao Longfei 已提交
710

Y
Yu Yang 已提交
711
    def create_var(self, *args, **kwargs):
Q
Qiao Longfei 已提交
712
        var = Variable(self, *args, **kwargs)
713 714
        if 'initializer' in kwargs:
            kwargs['initializer'](var, self)
Q
Qiao Longfei 已提交
715
        return var
Y
Yu Yang 已提交
716

Q
Qiao Longfei 已提交
717 718 719
    def has_var(self, name):
        return name in self.vars

Y
Yu Yang 已提交
720 721
    def create_parameter(self, *args, **kwargs):
        global_block = self.program.global_block()
Q
Qiao Longfei 已提交
722
        param = Parameter(global_block, *args, **kwargs)
723 724
        if 'initializer' in kwargs:
            kwargs['initializer'](param, self)
Q
Qiao Longfei 已提交
725
        return param
Y
Yu Yang 已提交
726

Y
Yu Yang 已提交
727
    def append_op(self, *args, **kwargs):
Y
Yu Yang 已提交
728 729
        op_desc = self.desc.append_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
730 731 732
        self.ops.append(op)
        return op

T
typhoonzero 已提交
733 734 735 736 737 738 739 740
    def delete_ops(self, ops):
        # remove from cpp
        # FIXME(typhoonzero): remove only the first occuracy.
        try:
            start = list(self.ops).index(ops[0])
            end = list(self.ops).index(ops[-1])
        except Exception, e:
            raise e
T
typhoonzero 已提交
741
        self.desc.remove_op(start, end + 1)
T
wip  
typhoonzero 已提交
742

Y
Yancey1989 已提交
743 744 745
    def slice_ops(self, start, end):
        return list(self.ops)[start:end]

Y
Yu Yang 已提交
746
    def prepend_op(self, *args, **kwargs):
Y
Yu Yang 已提交
747 748
        op_desc = self.desc.prepend_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
749 750 751
        self.ops.appendleft(op)
        return op

Q
Qiao Longfei 已提交
752 753 754 755 756 757 758
    def sync_with_cpp(self):
        # sync variables from cpp
        for var in self.desc.all_vars():
            if not self.has_var(var.name()):
                self.create_var(name=var.name(), desc=var, type=var.type())

        # sync operators from cpp
759 760 761 762
        ops_in_cpp = []
        for op_idx in range(0, self.desc.op_size()):
            ops_in_cpp.append(self.desc.op(op_idx))

Y
Yu Yang 已提交
763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778
        if len(self.ops) != 0:
            first_op_in_python = self.ops[0].desc
            last_op_in_python = self.ops[len(self.ops) - 1].desc
            start_index = None
            end_index = None
            for index in range(len(ops_in_cpp)):
                if first_op_in_python == ops_in_cpp[index]:
                    start_index = index
                if last_op_in_python == ops_in_cpp[index]:
                    end_index = index
            assert start_index is not None
            assert end_index is not None
            assert start_index <= end_index
        else:
            start_index = 0
            end_index = -1
Q
Qiao Longfei 已提交
779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795

        # sync ops append to the head of cpp_ops
        for index in range((start_index - 1 - 1), -1, -1):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.appendleft(op)

        # sync ops append to the end of cpp_ops
        for index in range((end_index + 1), len(ops_in_cpp)):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.append(op)

        assert len(self.ops) == len(ops_in_cpp)
        for index in range(len(self.ops)):
            assert self.ops[index].desc == ops_in_cpp[index]

796 797 798 799
    def copy_param_info_from(self, other):
        """
        Copy the information of parameters from other block
        Args:
800
            other(Block): other block
801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823

        Returns:
            None
        """
        if not isinstance(other, Block):
            raise TypeError("copy_param_info_from should be invoked with Block")
        for p in other.iter_parameters():
            assert isinstance(p, Parameter)
            v = self.vars.get(p.name, None)
            if v is None:
                raise ValueError("copy_param_info_from should be invoked with "
                                 "same topology")
            assert isinstance(v, Variable)
            new_p = Parameter(
                block=self,
                shape=v.shape,
                dtype=v.dtype,
                type=v.type,
                lod_level=v.lod_level,
                stop_gradient=p.stop_gradient,
                trainable=p.trainable,
                optimize_attr=p.optimize_attr,
                regularizer=p.regularizer,
F
fengjiayi 已提交
824
                gradient_clip_attr=p.gradient_clip_attr,
F
fengjiayi 已提交
825
                error_clip=p.error_clip,
826 827 828
                name=v.name)
            self.vars[new_p.name] = new_p

Y
Yu Yang 已提交
829 830

class Program(object):
831 832
    def __init__(self):
        self.desc = core.ProgramDesc()
Y
Yu Yang 已提交
833 834
        self.blocks = [Block(self, 0)]
        self.current_block_idx = 0
D
dzhwinter 已提交
835
        self._seed = 0
Y
Yu Yang 已提交
836

837
    def __str__(self):
Y
Yang Yang(Tony) 已提交
838 839
        return self.to_string(True)

F
fengjiayi 已提交
840 841 842 843 844 845
    def to_string(self, throw_on_error, with_details=False):
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True
F
update  
fengjiayi 已提交
846 847
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True
F
fengjiayi 已提交
848 849 850 851 852 853 854 855 856 857 858 859 860 861 862

        Returns(str): The debug string.

        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
            res_str = ""
            for block in self.blocks:
                res_str += block.to_string(throw_on_error, with_details)
        else:
            protostr = self.desc.serialize_to_string()
            proto = framework_pb2.ProgramDesc.FromString(str(protostr))
            res_str = _debug_string_(proto, throw_on_error)
        return res_str
863

864 865 866
    def get_desc(self):
        return self.desc

Y
Yu Yang 已提交
867 868 869 870 871
    def clone(self):
        p = Program()
        p.desc = core.ProgramDesc(self.desc)
        p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
        p.sync_with_cpp()
872
        p.copy_param_info_from(self)
Y
Yu Yang 已提交
873
        return p
874

875 876 877 878 879 880 881 882 883
    def prune(self, targets):
        if not isinstance(targets, list):
            targets = [targets]
        targets_idx = []
        for t in targets:
            if not isinstance(t, Operator):
                if isinstance(t, Variable):
                    t = t.op
                else:
C
caoying03 已提交
884 885
                    raise ValueError(("All targets of prune() can only be "
                                      "Variable or Operator."))
886 887 888 889 890 891 892 893

            targets_idx.append([t.block.idx, t.idx])
        res = Program()
        res.desc = core.prune(self.desc, targets_idx)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

894 895 896 897 898 899 900
    def inference_optimize(self):
        res = Program()
        res.desc = core.inference_optimize(self.desc)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

901 902 903 904
    @staticmethod
    def parse_from_string(binary_str):
        p = Program()
        p.desc = core.ProgramDesc(binary_str)
905
        p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
906 907
        p.sync_with_cpp()
        return p
Y
Yu Yang 已提交
908

D
dzhwinter 已提交
909 910 911 912 913 914 915 916 917 918
    @property
    def random_seed(self):
        return self._seed

    @random_seed.setter
    def random_seed(self, seed):
        if not isinstance(seed, int):
            raise ValueError("Seed must be a integer.")
        self._seed = seed

Y
Yu Yang 已提交
919 920
    def __repr__(self):
        return str(self)
921

Y
Yu Yang 已提交
922 923 924
    def global_block(self):
        return self.blocks[0]

Q
Qiao Longfei 已提交
925 926 927
    def block(self, index):
        return self.blocks[index]

Y
Yu Yang 已提交
928 929 930
    def current_block(self):
        return self.blocks[self.current_block_idx]

F
fengjiayi 已提交
931
    def append_backward(self, target, no_grad_set=None):
Q
Qiao Longfei 已提交
932 933 934
        """
        return map(param_name -> (grad_name, block_index, op_index))
        """
Q
Qiao Longfei 已提交
935
        assert isinstance(target, Variable)
F
fengjiayi 已提交
936 937
        if no_grad_set is None:
            no_grad_set = set()
Y
Yang Yang(Tony) 已提交
938 939 940 941 942 943 944 945
        try:
            param_to_grad_info = self.desc.append_backward(target.desc,
                                                           no_grad_set)
        except Exception as e:
            raise core.EnforceNotMet(
                str(e) + "\nCurrent protobuf is\n{0}".format(
                    self.to_string(False)))

Q
Qiao Longfei 已提交
946 947 948
        self.sync_with_cpp()
        return param_to_grad_info

F
update  
fengjiayi 已提交
949
    def create_block(self, parent_idx=None):
Y
Yu Yang 已提交
950
        new_block_idx = len(self.blocks)
F
update  
fengjiayi 已提交
951 952 953
        parent = self.current_block() if parent_idx is None else self.block(
            parent_idx)
        self.desc.append_block(parent.desc)
Y
Yu Yang 已提交
954 955 956 957 958 959 960
        self.current_block_idx = new_block_idx
        self.blocks.append(Block(self, self.current_block_idx))
        return self.current_block()

    def rollback(self):
        self.current_block_idx = self.current_block().parent_idx

Q
Qiao Longfei 已提交
961 962 963 964 965 966
    def sync_with_cpp(self):
        for block_idx in range(len(self.blocks), self.desc.num_blocks()):
            self.blocks.append(Block(self, block_idx))
        for block in self.blocks:
            block.sync_with_cpp()

967 968
    def copy_param_info_from(self, other):
        """
969
        Copy the information of parameters from other program.
970 971 972 973 974 975 976 977 978 979 980 981 982 983 984
        Args:
            other(Program): Other program

        Returns:
            None
        """
        if not isinstance(other, Program):
            raise TypeError("copy_param_info_from should be invoked with "
                            "Program")

        if len(self.blocks) != len(other.blocks):
            raise ValueError("copy_param_info_from should be invoked with two "
                             "program, with represent the same topology")
        self.global_block().copy_param_info_from(other.global_block())

985 986 987 988 989
    def list_vars(self):
        for each_block in self.blocks:
            for each_var in each_block.vars.itervalues():
                yield each_var

Y
Yu Yang 已提交
990

Y
Yu Yang 已提交
991 992 993 994 995 996 997 998 999 1000 1001
class Parameter(Variable):
    def __init__(self, block, shape, dtype, **kwargs):
        if shape is None or dtype is None:
            raise ValueError("Parameter must set shape and dtype")
        if len(shape) == 0:
            raise ValueError("Parameter shape cannot be empty")

        for each in shape:
            if each < 0:
                raise ValueError("Parameter shape should not be related with "
                                 "batch-size")
1002 1003 1004

        Variable.__init__(
            self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
Y
Yu Yang 已提交
1005 1006 1007 1008
        self.trainable = kwargs.get('trainable', True)

        self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})

1009 1010
        self.regularizer = kwargs.get('regularizer', None)

F
fengjiayi 已提交
1011
        self.gradient_clip_attr = kwargs.get('gradient_clip_attr', None)
Y
Yu Yang 已提交
1012

F
fengjiayi 已提交
1013 1014 1015
    def __str__(self):
        return self.to_string(True)

F
update  
fengjiayi 已提交
1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038
    def to_string(self, throw_on_error, with_details=False):
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True

        Returns(str): The debug string.

        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
            res_str = Variable.to_string(self, throw_on_error, True)
            additional_attr = ("trainable", "optimize_attr", "regularizer",
                               "gradient_clip_attr")
            for attr_name in additional_attr:
                res_str += "%s: %s\n" % (attr_name,
                                         str(getattr(self, attr_name)))
        else:
            res_str = Variable.to_string(self, throw_on_error, False)
F
fengjiayi 已提交
1039 1040 1041 1042
        return res_str

    __repr__ = __str__

Y
Yu Yang 已提交
1043

Y
Yu Yang 已提交
1044
# program is a global instance.
Y
Yu Yang 已提交
1045 1046
_main_program_ = Program()
_startup_program_ = Program()
1047

1048

1049
def default_startup_program():
Y
Yu Yang 已提交
1050 1051 1052
    """
    Get default startup program. In startup program, Paddle will initialize
    parameters, initialize nccl handle, etc.
1053

Y
Yu Yang 已提交
1054 1055 1056
    Returns:
        Program: startup program
    """
Y
Yu Yang 已提交
1057
    return _startup_program_
1058

1059

1060
def default_main_program():
Y
Yu Yang 已提交
1061 1062
    """
    Get default main program. The main program is used for training or testing.
1063

Y
Yu Yang 已提交
1064 1065 1066
    Returns:
        Program: main program
    """
Y
Yu Yang 已提交
1067
    return _main_program_
Y
Yu Yang 已提交
1068 1069 1070 1071 1072


def switch_main_program(program):
    """
    Switch the main program to a new program.
1073

Y
Yu Yang 已提交
1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087
    Args:
        program(Program): The new main program

    Returns:
        Program: The previous main program
    """
    global _main_program_
    prev_program = _main_program_
    _main_program_ = program
    return prev_program


def switch_startup_program(program):
    """
1088
    Switch the startup program to a new program
Y
Yu Yang 已提交
1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104
    Args:
        program(Program): The new startup program

    Returns:
        Program: The previous startup program
    """
    global _startup_program_
    prev_program = _startup_program_
    _startup_program_ = program
    return prev_program


@contextlib.contextmanager
def program_guard(main_program, startup_program=None):
    """
    Switch program with `with` statement
1105

Y
Yu Yang 已提交
1106 1107 1108 1109
    Examples:
        >>> with program_guard(Program()):
        >>>   data = fluid.layers.data(...)
        >>>   hidden = fluid.layers.fc(...)
1110

Y
Yu Yang 已提交
1111 1112
    Args:
        main_program(Program): New main program inside `with` statement
1113
        startup_program(Program): New startup program inside `with` statement.
Y
Yu Yang 已提交
1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129
            None means do not change startup program.

    Returns:
        None
    """
    if not isinstance(main_program, Program):
        raise TypeError("main_program should be Program")
    main_program = switch_main_program(main_program)
    if startup_program is not None:
        if not isinstance(startup_program, Program):
            raise TypeError("startup_program should be Program")
        startup_program = switch_startup_program(startup_program)
    yield
    switch_main_program(main_program)
    if startup_program is not None:
        switch_startup_program(startup_program)
X
xuwei06 已提交
1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148


def get_var(name, program=None):
    """
    Get a variable by name from the global block of a program
    Args:
        name(str): name of the variable
        program(Program|None): program object.
             If None, default_global_program() will be used.

    Returns:
        Variable
    """
    if program is None:
        program = default_main_program()
    assert isinstance(name, str)
    assert isinstance(name, Program)

    return program.global_block().var(name)