framework.py 32.6 KB
Newer Older
D
dzhwinter 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
#  Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
Y
Yu Yang 已提交
14
import collections
Q
qiaolongfei 已提交
15
import contextlib
16

Y
Yu Yang 已提交
17
import numpy as np
Q
qiaolongfei 已提交
18

19
import proto.framework_pb2 as framework_pb2
Q
qiaolongfei 已提交
20
from . import core
Y
Yu Yang 已提交
21

22
__all__ = [
23 24 25 26 27 28 29 30 31
    'Block',
    'Variable',
    'Program',
    'Operator',
    'default_startup_program',
    'default_main_program',
    'program_guard',
    'switch_startup_program',
    'switch_main_program',
32
]
Y
Yu Yang 已提交
33

Q
qiaolongfei 已提交
34 35 36 37 38 39 40 41 42 43 44 45
EMPTY_VAR_NAME = core.kEmptyVarName()
TEMP_VAR_NAME = core.kTempVarName()
GRAD_VAR_SUFFIX = core.kGradVarSuffix()
ZERO_VAR_SUFFIX = core.kZeroVarSuffix()


def grad_var_name(var_name):
    """
    return gradient name for a certain var name
    """
    return var_name + GRAD_VAR_SUFFIX

Y
Yu Yang 已提交
46

Q
Qiao Longfei 已提交
47
def unique_name(prefix):
48 49 50 51 52 53 54 55 56
    """
    Generate unique names with prefix

    Args:
        prefix(str): The prefix of return string

    Returns(str): A unique string with the prefix

    """
Q
Qiao Longfei 已提交
57 58 59 60
    uid = core.unique_integer(prefix)  # unique during whole process.
    return "_".join([prefix, str(uid)])


61
def convert_np_dtype_to_dtype_(np_dtype):
62 63 64 65 66 67 68 69
    """
    Convert the data type in numpy to the data type in Paddle
    Args:
        np_dtype(np.dtype): the data type in numpy

    Returns(core.DataType): the data type in Paddle

    """
70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89
    dtype = np.dtype(np_dtype)
    if dtype == np.float32:
        return core.DataType.FP32
    elif dtype == np.float64:
        return core.DataType.FP64
    elif dtype == np.float16:
        return core.DataType.FP16
    elif dtype == np.int32:
        return core.DataType.INT32
    elif dtype == np.int16:
        return core.DataType.INT16
    elif dtype == np.int64:
        return core.DataType.INT64
    elif dtype == np.bool:
        return core.DataType.BOOL
    else:
        raise ValueError("Not supported numpy dtype " + str(dtype))


def dtype_is_floating(dtype):
90 91 92 93 94 95 96 97 98
    """
    Check the data type is floating or not.
    Args:
        dtype(np.dtype|core.DataType): data type.
            Could be numpy format or Paddle format

    Returns(bool): True if data type is a float value

    """
99 100 101
    if not isinstance(dtype, core.DataType):
        dtype = convert_np_dtype_to_dtype_(dtype)

102
    return dtype in [core.DataType.FP16, core.DataType.FP32, core.DataType.FP64]
103 104


Y
Yang Yang(Tony) 已提交
105
def _debug_string_(proto, throw_on_error=True):
106 107 108 109 110 111 112 113 114 115 116
    """
    Get the debug string of a protobuf message. The message could be not
    initialized.
    Args:
        proto(google.protobuf.message.Message): The protobuf message
        throw_on_error(bool): True if raise an error when the protobuf message
            is not initialized.

    Returns(str): The debug string of the protobuf message

    """
Y
Yu Yang 已提交
117
    error_fields = list()
Y
Yang Yang(Tony) 已提交
118
    if not proto.IsInitialized(error_fields) and throw_on_error:
C
caoying03 已提交
119 120
        raise ValueError("{0} are not initialized.\nThe message is {1}:\n".
                         format(error_fields, proto))
Y
Yu Yang 已提交
121 122 123
    return proto.__str__()


Y
Yu Yang 已提交
124
class Variable(object):
125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156
    """
    Python variable. Every input and output of an operator is a variable. Every
    variable belongs to a block. The variable has a name and two variables in
    different blocks could have the same name.

    There are many kinds of variables. Please reference the framework.proto for
    details.

    Notes: The constructor of Variable should not be invoked directly. Please
    use `Block.create_var` to create a variable.

    >>> cur_program = Program()
    >>> cur_block = cur_program.current_block()
    >>> new_variable = cur_block.create_var(
    >>>                    name="X", shape=[-1, 23, 48], dtype='float32')

    Args:
        block(Block): The associated block. It will be passed by
            `Block.create_var` automatically.
        type(core.VarDesc.VarType): Variable type. Please reference the
            framework.proto for details.
        shape(tuple|list|None): The shape of variable. -1 means the batch size.
            Some kinds of variable do not contain shape, just set it to None.
        dtype(np.dtype|core.DataType|str): The data type of variable.
        lod_level(int): The level of lod tensor. 0 means there is not a time
            series data.
        persistable(bool): True if the variable should be saved as check point.
            Defaults to False.
        stop_gradient(bool): True if the variable will stop to calculate
            gradients when backward. Defaults to False.
    """

Y
Yu Yang 已提交
157 158
    def __init__(self,
                 block,
Y
Yu Yang 已提交
159
                 type=core.VarDesc.VarType.LOD_TENSOR,
Y
Yu Yang 已提交
160 161 162 163
                 name=None,
                 shape=None,
                 dtype=None,
                 lod_level=None,
Q
QI JUN 已提交
164
                 persistable=None,
F
fengjiayi 已提交
165
                 error_clip=None,
Y
Yu Yang 已提交
166
                 stop_gradient=False,
Y
Yu Yang 已提交
167
                 **kwargs):
Y
Yu Yang 已提交
168
        self.block = block
F
fengjiayi 已提交
169
        self.error_clip = error_clip
Y
Yu Yang 已提交
170 171 172

        if name is None:
            name = Variable._unique_var_name_()
D
Dong Zhihong 已提交
173 174 175 176
        is_new_var = False
        self.desc = self.block.desc.find_var(name)

        if self.desc is None:
D
dongzhihong 已提交
177
            self.desc = self.block.desc.var(name)
Y
Yu Yang 已提交
178
            is_new_var = True
Y
Yu Yang 已提交
179

Y
Yu Yang 已提交
180 181 182 183 184 185 186 187
        if is_new_var:
            self.desc.set_type(type)
        elif self.desc.type() != type:
            raise ValueError("Variable {0} has been created before. The "
                             "previous type is {1}; the new type is {2}. They"
                             " are not matched".format(self.name,
                                                       self.desc.type(), type))

Y
Yu Yang 已提交
188
        if shape is not None:
Y
Yu Yang 已提交
189
            if is_new_var:
190
                self.desc.set_shape(shape)
Y
Yu Yang 已提交
191 192 193 194 195 196 197 198
            else:
                old_shape = self.shape
                shape = tuple(shape)
                if shape != old_shape:
                    raise ValueError(
                        "Variable {0} has been created before. the previous "
                        "shape is {1}; the new shape is {2}. They are not "
                        "matched.".format(self.name, old_shape, shape))
Y
Yu Yang 已提交
199
        if dtype is not None:
Y
Yu Yang 已提交
200
            if not isinstance(dtype, core.DataType):
201
                dtype = convert_np_dtype_to_dtype_(dtype)
Y
Yu Yang 已提交
202
            if is_new_var:
F
fengjiayi 已提交
203
                self.desc.set_dtype(dtype)
Y
Yu Yang 已提交
204
            else:
F
fengjiayi 已提交
205
                old_dtype = self.dtype
Q
QI JUN 已提交
206
                if dtype != old_dtype:
Y
Yu Yang 已提交
207 208 209 210 211
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous data type is {1}; the new "
                                     "data type is {2}. They are not "
                                     "matched.".format(self.name, old_dtype,
                                                       dtype))
Y
Yu Yang 已提交
212 213

        if lod_level is not None:
Y
Yu Yang 已提交
214
            if is_new_var:
215
                self.desc.set_lod_level(lod_level)
Y
Yu Yang 已提交
216 217 218 219 220 221 222
            else:
                if lod_level != self.lod_level:
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous lod_level is {1}; the new "
                                     "lod_level is {2}. They are not "
                                     "matched".format(self.name, self.lod_level,
                                                      lod_level))
223 224 225 226 227 228 229 230 231 232 233
        if persistable is not None:
            if is_new_var:
                self.desc.set_persistable(persistable)
            else:
                if persistable != self.persistable:
                    raise ValueError(
                        "Variable {0} has been created before."
                        "The previous persistable is {1}; the new "
                        "persistable is {2}. They are not matched".format(
                            self.name, self.persistable, persistable))

Y
Yu Yang 已提交
234
        self.block.vars[name] = self
Y
Yu Yang 已提交
235
        self.op = None
Y
Yu Yang 已提交
236
        self.stop_gradient = stop_gradient
Y
Yu Yang 已提交
237

238
    def __str__(self):
Y
Yang Yang(Tony) 已提交
239 240 241
        return self.to_string(True)

    def to_string(self, throw_on_error):
242 243 244 245 246 247 248 249 250 251
        """
        Get debug string.

        Args:
            throw_on_error(bool): True if raise an exception when self is not
                intialized.

        Returns(str): The debug string.

        """
252 253
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.VarDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
254
        return _debug_string_(proto, throw_on_error)
255 256 257

    __repr__ = __str__

258 259 260
    def set_desc(self, input):
        self.desc = input

261 262 263 264
    @property
    def persistable(self):
        return self.desc.persistable()

Y
Yu Yang 已提交
265 266 267 268
    @persistable.setter
    def persistable(self, p):
        self.desc.set_persistable(p)

Y
Yu Yang 已提交
269 270
    @property
    def name(self):
271
        return self.desc.name()
Y
Yu Yang 已提交
272 273 274 275

    @property
    def shape(self):
        # convert to tuple, make it as same as numpy API.
276
        return tuple(self.desc.shape())
Y
Yu Yang 已提交
277 278

    @property
F
fengjiayi 已提交
279 280
    def dtype(self):
        return self.desc.dtype()
Y
Yu Yang 已提交
281 282 283

    @property
    def lod_level(self):
284
        return self.desc.lod_level()
Y
Yu Yang 已提交
285

Y
Yu Yang 已提交
286 287 288 289
    @property
    def type(self):
        return self.desc.type()

Y
Yu Yang 已提交
290 291
    @staticmethod
    def _unique_var_name_():
292 293 294
        prefix = "_generated_var"
        uid = core.unique_integer(prefix)  # unique during whole process.
        return "_".join([prefix, str(uid)])
Y
Yu Yang 已提交
295

296 297 298
    def set_error_clip(self, error_clip):
        self.error_clip = error_clip

Y
Yu Yang 已提交
299

F
fengjiayi 已提交
300 301 302
def get_all_op_protos():
    """
    Get all registered op proto from PaddlePaddle C++ end.
303 304 305

    Returns(list): list of OpProto

F
fengjiayi 已提交
306 307 308 309 310 311 312 313 314 315
    """
    protostrs = core.get_all_op_protos()
    ret_values = []
    for pbstr in protostrs:
        op_proto = framework_pb2.OpProto.FromString(str(pbstr))
        ret_values.append(op_proto)
    return ret_values


class OpProtoHolder(object):
316 317 318 319
    """
    A global variable to hold all OpProtos from C++ as a map
    """

F
fengjiayi 已提交
320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335
    @classmethod
    def instance(cls):
        if not hasattr(cls, '_instance'):
            cls._instance = cls()
        return cls._instance

    def __init__(self):
        assert not hasattr(
            self.__class__,
            '_instance'), 'Please use `instance()` to get OpProtoHolder opject!'
        op_protos = get_all_op_protos()
        self.op_proto_map = {}
        for proto in op_protos:
            self.op_proto_map[proto.type] = proto

    def get_op_proto(self, type):
336 337 338 339 340 341 342 343
        """
        Get OpProto by a type string.
        Args:
            type(str): The type that operator registered in C++ side.

        Returns(framework_pb2.OpProto): The OpProto

        """
Y
Yu Yang 已提交
344 345
        if type not in self.op_proto_map:
            raise ValueError("Operator \"%s\" has not been registered." % type)
F
fengjiayi 已提交
346 347 348
        return self.op_proto_map[type]


Y
Yu Yang 已提交
349
class Operator(object):
350 351 352 353 354 355
    """
    Python Operator class. The operator represents the build in instructs in a
    Block. Users can use the build in instructs to describe their neural
    network.
    """

Y
Yu Yang 已提交
356 357
    def __init__(self,
                 block,
Y
Yu Yang 已提交
358
                 desc,
Y
Yu Yang 已提交
359 360 361 362
                 type=None,
                 inputs=None,
                 outputs=None,
                 attrs=None):
363 364 365 366 367 368 369 370 371 372 373 374 375 376
        """
        Constructor.

        Notes: The constructor of operator should not be invoked directly. Use
        Block.append_op or Block.prepend_op instead.

        >>> cur_program = Program()
        >>> cur_block = cur_program.current_block()
        >>> # var1 += var2 + var3
        >>> cur_block.append_op(type="sum",
        >>>                     inputs={"X": [var1, var2, var3]},
        >>>                     outputs={"Out": [var1]})

        Args:
C
caoying03 已提交
377 378
            block(Block): The block has the current operator.
            desc(core.OpDesc): The protobuf description.
379 380 381
            type(str): The type of operator.
            inputs(dict): The input dictionary. Key is the input parameter name.
                Value is a list of variables.
C
caoying03 已提交
382 383
            outputs(dict): The output dictionary which has the same format with
                           inputs.
384 385 386 387
            attrs(dict): The attributes dictionary. Key is attribute name. Value
                is the attribute value. The attribute type should be as same as
                the type registered in C++
        """
Y
Yu Yang 已提交
388
        self.block = block
Y
Yu Yang 已提交
389
        self.desc = desc
T
typhoonzero 已提交
390 391 392 393
        # for clone a new operator
        self.inputs = inputs
        self.outputs = outputs
        self.attrs = attrs
F
fengjiayi 已提交
394 395 396 397 398
        if len(self.desc.type()) != 0:
            return
        if type is None:
            raise ValueError(
                "`type` to initilized an Operator can not be None.")
F
Update  
fengjiayi 已提交
399
        self.desc.set_type(type)
F
fengjiayi 已提交
400
        proto = OpProtoHolder.instance().get_op_proto(type)
401

Y
Yang Yang(Tony) 已提交
402 403
        def find_name(var_list, name):
            for var_name in var_list:
Q
Qiao Longfei 已提交
404
                if var_list[var_name] is not None and var_name == name:
Y
Yang Yang(Tony) 已提交
405 406
                    return True
            return False
Q
QI JUN 已提交
407

Y
Yang Yang(Tony) 已提交
408 409 410 411 412 413 414
        if inputs is not None:
            for in_proto in proto.inputs:
                found = find_name(inputs, in_proto.name)
                assert found or in_proto.dispensable, "Input {} not found".format(
                    in_proto.name)

                if found:
415 416 417 418
                    in_args = inputs[in_proto.name]
                    if not isinstance(in_args, list):
                        in_args = [in_args]
                    if not in_proto.duplicable and len(in_args) > 1:
Y
Yang Yang(Tony) 已提交
419 420
                        raise ValueError(
                            "Input %s expects only one input, but %d are given."
421 422 423
                            % (in_proto.name, len(in_args)))
                    in_arg_names = []
                    for arg in in_args:
Y
Yang Yu 已提交
424 425 426 427
                        if isinstance(arg, basestring):
                            in_arg_names.append(arg)
                        else:
                            in_arg_names.append(arg.name)
428
                    self.desc.set_input(in_proto.name, in_arg_names)
Y
Yang Yang(Tony) 已提交
429 430
                else:
                    self.desc.set_input(in_proto.name, [])
F
Update  
fengjiayi 已提交
431

Y
Yu Yang 已提交
432
        if outputs is not None:
433 434 435 436 437 438 439
            given = set()
            need = set()
            for n in outputs:
                given.add(n)
            for m in proto.outputs:
                need.add(m.name)
            if not given == need:
C
caoying03 已提交
440 441 442 443 444
                raise ValueError(("Incorrect setting for output(s) of "
                                  "operator \"%s\". Need: [%s] Given: [%s]") %
                                 (type, ", ".join(str(e)
                                                  for e in need), ", ".join(
                                                      str(e) for e in given)))
445

F
fengjiayi 已提交
446
            for out_proto in proto.outputs:
447 448 449 450
                out_args = outputs[out_proto.name]
                if not isinstance(out_args, list):
                    out_args = [out_args]
                if not out_proto.duplicable and len(out_args) > 1:
F
Update  
fengjiayi 已提交
451 452
                    raise ValueError(
                        "Output %s expects only one output, but %d are given." %
453 454 455 456 457 458
                        (out_proto.name, len(out_args)))
                out_arg_names = []
                for arg in out_args:
                    out_arg_names.append(arg.name)
                    arg.op = self
                self.desc.set_output(out_proto.name, out_arg_names)
F
Update  
fengjiayi 已提交
459

Y
Yu Yang 已提交
460
        if attrs is not None:
461 462
            if not isinstance(attrs, dict):
                raise TypeError("'attrs' should be a dict.")
F
fengjiayi 已提交
463
            for attr in proto.attrs:
F
Update  
fengjiayi 已提交
464
                attr_name = attr.name
465
                if (not attr_name in attrs) or (attrs[attr_name] is None):
F
Update  
fengjiayi 已提交
466
                    continue
Y
Yang Yang(Tony) 已提交
467
                if isinstance(attrs[attr_name], Block):
F
Update  
fengjiayi 已提交
468
                    self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
T
typhoonzero 已提交
469 470 471 472
                elif isinstance(attrs[attr_name], core.BlockDesc) or \
                   isinstance(attrs[attr_name], core.ProgramDesc):
                    self.desc.set_serialized_attr(
                        attr_name, attrs[attr_name].serialize_to_string())
Y
Yang Yang(Tony) 已提交
473 474
                else:
                    self.desc.set_attr(attr_name, attrs[attr_name])
Y
Yu Yang 已提交
475

476
        self.desc.check_attrs()
Y
Yang Yang(Tony) 已提交
477
        no_kernel_op_set = {
Y
Yu Yang 已提交
478
            'feed', 'fetch', 'save', 'load', 'recurrent',
T
typhoonzero 已提交
479
            'rnn_memory_helper_grad', 'conditional_block', 'while', 'send',
Y
Yang Yang 已提交
480
            'recv', 'parallel_do'
Y
Yang Yang(Tony) 已提交
481
        }
482
        if type not in no_kernel_op_set:
Q
QI JUN 已提交
483
            self.desc.infer_var_type(self.block.desc)
Y
Yu Yang 已提交
484
            self.desc.infer_shape(self.block.desc)
F
fengjiayi 已提交
485

Y
Yang Yang(Tony) 已提交
486
    def to_string(self, throw_on_error):
487 488 489 490 491 492 493 494 495
        """
        To debug string.
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True

        Returns(str): The debug string.

        """
496 497
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.OpDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
498 499 500 501
        return _debug_string_(proto, throw_on_error)

    def __str__(self):
        return self.to_string(True)
502 503 504

    __repr__ = __str__

F
fengjiayi 已提交
505 506 507 508 509
    @property
    def type(self):
        return self.desc.type()

    def input(self, name):
510 511 512 513 514 515 516 517 518
        """
        Get input arguments by the input parameter name
        Args:
            name(str): The input parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
519 520 521 522
        return self.desc.input(name)

    @property
    def input_names(self):
523 524 525 526 527
        """
        Get all input parameter names
        Returns(list): return a list of input parameter names

        """
F
fengjiayi 已提交
528 529 530
        return self.desc.input_names()

    def output(self, name):
531 532 533 534 535 536 537 538 539
        """
        Get output arguments by the output parameter name
        Args:
            name(str): The output parameter name

        Returns(list): return the list of argument names associated with the
            specific parameter name.

        """
F
fengjiayi 已提交
540 541 542 543
        return self.desc.output(name)

    @property
    def output_names(self):
544 545 546 547 548
        """
        Get all output parameter names
        Returns(list): return a list of output parameter names

        """
F
fengjiayi 已提交
549 550
        return self.desc.output_names()

551 552
    @property
    def idx(self):
553 554 555 556 557 558
        """
        Return the array index of current operator.
        Returns(int): The array index in block.ops array
        Raises:
            ValueError: when the operator is not found.
        """
559 560 561 562 563 564
        for i, op in enumerate(self.block.ops):
            if op == self:
                return i
        raise ValueError(
            "Can't find op itself in it's block. It could be a bug of Paddle.")

F
fengjiayi 已提交
565
    def has_attr(self, name):
566 567 568 569 570 571 572 573
        """
        operator has the attribute with name or not.
        Args:
            name(str): the attribute name

        Returns(bool): True if has this attribute.

        """
F
fengjiayi 已提交
574 575 576
        return self.desc.has_attr(name)

    def attr_type(self, name):
577 578 579 580 581 582 583 584
        """
        Get the type of attribute by attribute name
        Args:
            name(str): the attribute name

        Returns(core.AttrType): the attribute type

        """
F
fengjiayi 已提交
585 586 587 588
        return self.desc.attr_type(name)

    @property
    def attr_names(self):
589 590 591 592 593
        """
        Get all attribute names
        Returns(list): The list of attribute name

        """
F
fengjiayi 已提交
594 595 596
        return self.desc.attr_names()

    def attr(self, name):
597 598 599 600 601 602 603 604 605
        """
        Get attribute by name
        Args:
            name(str): the attribute name

        Returns(bool|int|str|float|list): The attribute value. The return value
            can be any valid attribute type.

        """
F
fengjiayi 已提交
606
        return self.desc.attr(name)
Y
Yu Yang 已提交
607

F
fengjiayi 已提交
608
    def block_attr(self, name):
609 610 611 612 613 614 615 616
        """
        Get the block attribute by name
        Args:
            name(str): the attribute name

        Returns(int): the block index

        """
F
fengjiayi 已提交
617
        return self.desc.block_attr(name)
Y
Yu Yang 已提交
618 619


Y
Yu Yang 已提交
620 621
class Block(object):
    def __init__(self, program, idx):
Y
Yu Yang 已提交
622
        self.desc = program.desc.block(idx)
Y
Yu Yang 已提交
623
        self.vars = dict()  # var_name --> var
Y
Yu Yang 已提交
624
        self.ops = collections.deque()  # operator list
Y
Yu Yang 已提交
625
        self.program = program
T
typhoonzero 已提交
626
        self.removed_vars = dict()
Y
Yu Yang 已提交
627

628
    def __str__(self):
Y
Yang Yang(Tony) 已提交
629 630 631
        return self.to_string(True)

    def to_string(self, throw_on_error):
632 633
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.BlockDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
634
        return _debug_string_(proto, throw_on_error)
635 636 637

    __repr__ = __str__

Y
Yu Yang 已提交
638 639
    @property
    def parent_idx(self):
Y
Yu Yang 已提交
640
        return self.desc.parent
Y
Yu Yang 已提交
641 642 643

    @property
    def idx(self):
Y
Yu Yang 已提交
644
        return self.desc.id
Y
Yu Yang 已提交
645

Q
Qiao Longfei 已提交
646
    def var(self, name):
Y
Yu Yang 已提交
647 648 649 650
        if not isinstance(name, basestring):
            raise TypeError()
        v = self.vars.get(name, None)
        if v is None:
Q
Qiao Longfei 已提交
651
            raise ValueError("var %s not in this block" % name)
Y
Yu Yang 已提交
652
        return v
Q
Qiao Longfei 已提交
653

F
fengjiayi 已提交
654 655 656 657 658 659 660 661 662 663 664
    def var_recursive(self, name):
        if self.has_var(name):
            return self.var(name)
        else:
            if self.idx == 0:
                raise ValueError("var %s is not in block(%d) nor its parents." %
                                 name, self.idx)
            else:
                parent_block = self.program.block(self.parent_idx)
                return parent_block.var_recursive(name)

Q
Qiao Longfei 已提交
665
    def all_parameters(self):
666 667 668 669 670
        return list(self.iter_parameters())

    def iter_parameters(self):
        return (item[1] for item in self.vars.iteritems()
                if isinstance(item[1], Parameter))
Q
Qiao Longfei 已提交
671

Y
Yu Yang 已提交
672
    def create_var(self, *args, **kwargs):
Q
Qiao Longfei 已提交
673
        var = Variable(self, *args, **kwargs)
674 675
        if 'initializer' in kwargs:
            kwargs['initializer'](var, self)
Q
Qiao Longfei 已提交
676
        return var
Y
Yu Yang 已提交
677

Q
Qiao Longfei 已提交
678 679 680
    def has_var(self, name):
        return name in self.vars

Y
Yu Yang 已提交
681 682
    def create_parameter(self, *args, **kwargs):
        global_block = self.program.global_block()
Q
Qiao Longfei 已提交
683
        param = Parameter(global_block, *args, **kwargs)
684 685
        if 'initializer' in kwargs:
            kwargs['initializer'](param, self)
Q
Qiao Longfei 已提交
686
        return param
Y
Yu Yang 已提交
687

Y
Yu Yang 已提交
688
    def append_op(self, *args, **kwargs):
Y
Yu Yang 已提交
689 690
        op_desc = self.desc.append_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
691 692 693
        self.ops.append(op)
        return op

T
typhoonzero 已提交
694 695 696 697 698 699 700 701
    def delete_ops(self, ops):
        # remove from cpp
        # FIXME(typhoonzero): remove only the first occuracy.
        try:
            start = list(self.ops).index(ops[0])
            end = list(self.ops).index(ops[-1])
        except Exception, e:
            raise e
T
typhoonzero 已提交
702
        self.desc.remove_op(start, end + 1)
T
wip  
typhoonzero 已提交
703

Y
Yu Yang 已提交
704
    def prepend_op(self, *args, **kwargs):
Y
Yu Yang 已提交
705 706
        op_desc = self.desc.prepend_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
707 708 709
        self.ops.appendleft(op)
        return op

Q
Qiao Longfei 已提交
710 711 712 713 714 715 716
    def sync_with_cpp(self):
        # sync variables from cpp
        for var in self.desc.all_vars():
            if not self.has_var(var.name()):
                self.create_var(name=var.name(), desc=var, type=var.type())

        # sync operators from cpp
717 718 719 720
        ops_in_cpp = []
        for op_idx in range(0, self.desc.op_size()):
            ops_in_cpp.append(self.desc.op(op_idx))

Y
Yu Yang 已提交
721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736
        if len(self.ops) != 0:
            first_op_in_python = self.ops[0].desc
            last_op_in_python = self.ops[len(self.ops) - 1].desc
            start_index = None
            end_index = None
            for index in range(len(ops_in_cpp)):
                if first_op_in_python == ops_in_cpp[index]:
                    start_index = index
                if last_op_in_python == ops_in_cpp[index]:
                    end_index = index
            assert start_index is not None
            assert end_index is not None
            assert start_index <= end_index
        else:
            start_index = 0
            end_index = -1
Q
Qiao Longfei 已提交
737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753

        # sync ops append to the head of cpp_ops
        for index in range((start_index - 1 - 1), -1, -1):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.appendleft(op)

        # sync ops append to the end of cpp_ops
        for index in range((end_index + 1), len(ops_in_cpp)):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.append(op)

        assert len(self.ops) == len(ops_in_cpp)
        for index in range(len(self.ops)):
            assert self.ops[index].desc == ops_in_cpp[index]

754 755 756 757
    def copy_param_info_from(self, other):
        """
        Copy the information of parameters from other block
        Args:
758
            other(Block): other block
759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781

        Returns:
            None
        """
        if not isinstance(other, Block):
            raise TypeError("copy_param_info_from should be invoked with Block")
        for p in other.iter_parameters():
            assert isinstance(p, Parameter)
            v = self.vars.get(p.name, None)
            if v is None:
                raise ValueError("copy_param_info_from should be invoked with "
                                 "same topology")
            assert isinstance(v, Variable)
            new_p = Parameter(
                block=self,
                shape=v.shape,
                dtype=v.dtype,
                type=v.type,
                lod_level=v.lod_level,
                stop_gradient=p.stop_gradient,
                trainable=p.trainable,
                optimize_attr=p.optimize_attr,
                regularizer=p.regularizer,
Y
Yu Yang 已提交
782
                clip_attr=p.clip_attr,
F
fengjiayi 已提交
783
                error_clip=p.error_clip,
784 785 786
                name=v.name)
            self.vars[new_p.name] = new_p

Y
Yu Yang 已提交
787 788

class Program(object):
789 790
    def __init__(self):
        self.desc = core.ProgramDesc()
Y
Yu Yang 已提交
791 792
        self.blocks = [Block(self, 0)]
        self.current_block_idx = 0
D
dzhwinter 已提交
793
        self._seed = 0
Y
Yu Yang 已提交
794

795
    def __str__(self):
Y
Yang Yang(Tony) 已提交
796 797 798
        return self.to_string(True)

    def to_string(self, throw_on_error):
799 800
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.ProgramDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
801
        return _debug_string_(proto, throw_on_error)
802

803 804 805
    def get_desc(self):
        return self.desc

Y
Yu Yang 已提交
806 807 808 809 810
    def clone(self):
        p = Program()
        p.desc = core.ProgramDesc(self.desc)
        p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
        p.sync_with_cpp()
811
        p.copy_param_info_from(self)
Y
Yu Yang 已提交
812
        return p
813

814 815 816 817 818 819 820 821 822
    def prune(self, targets):
        if not isinstance(targets, list):
            targets = [targets]
        targets_idx = []
        for t in targets:
            if not isinstance(t, Operator):
                if isinstance(t, Variable):
                    t = t.op
                else:
C
caoying03 已提交
823 824
                    raise ValueError(("All targets of prune() can only be "
                                      "Variable or Operator."))
825 826 827 828 829 830 831 832

            targets_idx.append([t.block.idx, t.idx])
        res = Program()
        res.desc = core.prune(self.desc, targets_idx)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

833 834 835 836 837 838 839
    def inference_optimize(self):
        res = Program()
        res.desc = core.inference_optimize(self.desc)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

840 841 842 843
    @staticmethod
    def parse_from_string(binary_str):
        p = Program()
        p.desc = core.ProgramDesc(binary_str)
844
        p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
845 846
        p.sync_with_cpp()
        return p
Y
Yu Yang 已提交
847

D
dzhwinter 已提交
848 849 850 851 852 853 854 855 856 857
    @property
    def random_seed(self):
        return self._seed

    @random_seed.setter
    def random_seed(self, seed):
        if not isinstance(seed, int):
            raise ValueError("Seed must be a integer.")
        self._seed = seed

Y
Yu Yang 已提交
858 859
    def __repr__(self):
        return str(self)
860

Y
Yu Yang 已提交
861 862 863
    def global_block(self):
        return self.blocks[0]

Q
Qiao Longfei 已提交
864 865 866
    def block(self, index):
        return self.blocks[index]

Y
Yu Yang 已提交
867 868 869
    def current_block(self):
        return self.blocks[self.current_block_idx]

F
fengjiayi 已提交
870
    def append_backward(self, target, no_grad_set=None):
Q
Qiao Longfei 已提交
871 872 873
        """
        return map(param_name -> (grad_name, block_index, op_index))
        """
Q
Qiao Longfei 已提交
874
        assert isinstance(target, Variable)
F
fengjiayi 已提交
875 876
        if no_grad_set is None:
            no_grad_set = set()
Y
Yang Yang(Tony) 已提交
877 878 879 880 881 882 883 884
        try:
            param_to_grad_info = self.desc.append_backward(target.desc,
                                                           no_grad_set)
        except Exception as e:
            raise core.EnforceNotMet(
                str(e) + "\nCurrent protobuf is\n{0}".format(
                    self.to_string(False)))

Q
Qiao Longfei 已提交
885 886 887
        self.sync_with_cpp()
        return param_to_grad_info

F
update  
fengjiayi 已提交
888
    def create_block(self, parent_idx=None):
Y
Yu Yang 已提交
889
        new_block_idx = len(self.blocks)
F
update  
fengjiayi 已提交
890 891 892
        parent = self.current_block() if parent_idx is None else self.block(
            parent_idx)
        self.desc.append_block(parent.desc)
Y
Yu Yang 已提交
893 894 895 896 897 898 899
        self.current_block_idx = new_block_idx
        self.blocks.append(Block(self, self.current_block_idx))
        return self.current_block()

    def rollback(self):
        self.current_block_idx = self.current_block().parent_idx

Q
Qiao Longfei 已提交
900 901 902 903 904 905
    def sync_with_cpp(self):
        for block_idx in range(len(self.blocks), self.desc.num_blocks()):
            self.blocks.append(Block(self, block_idx))
        for block in self.blocks:
            block.sync_with_cpp()

906 907
    def copy_param_info_from(self, other):
        """
908
        Copy the information of parameters from other program.
909 910 911 912 913 914 915 916 917 918 919 920 921 922 923
        Args:
            other(Program): Other program

        Returns:
            None
        """
        if not isinstance(other, Program):
            raise TypeError("copy_param_info_from should be invoked with "
                            "Program")

        if len(self.blocks) != len(other.blocks):
            raise ValueError("copy_param_info_from should be invoked with two "
                             "program, with represent the same topology")
        self.global_block().copy_param_info_from(other.global_block())

924 925 926 927 928
    def list_vars(self):
        for each_block in self.blocks:
            for each_var in each_block.vars.itervalues():
                yield each_var

Y
Yu Yang 已提交
929

Y
Yu Yang 已提交
930 931 932 933 934 935 936 937 938 939 940
class Parameter(Variable):
    def __init__(self, block, shape, dtype, **kwargs):
        if shape is None or dtype is None:
            raise ValueError("Parameter must set shape and dtype")
        if len(shape) == 0:
            raise ValueError("Parameter shape cannot be empty")

        for each in shape:
            if each < 0:
                raise ValueError("Parameter shape should not be related with "
                                 "batch-size")
941 942 943

        Variable.__init__(
            self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
Y
Yu Yang 已提交
944 945 946 947
        self.trainable = kwargs.get('trainable', True)

        self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})

948 949
        self.regularizer = kwargs.get('regularizer', None)

Y
Yu Yang 已提交
950 951
        self.clip_attr = kwargs.get('clip_attr', None)

Y
Yu Yang 已提交
952

Y
Yu Yang 已提交
953
# program is a global instance.
Y
Yu Yang 已提交
954 955
_main_program_ = Program()
_startup_program_ = Program()
956

957

958
def default_startup_program():
Y
Yu Yang 已提交
959 960 961
    """
    Get default startup program. In startup program, Paddle will initialize
    parameters, initialize nccl handle, etc.
962

Y
Yu Yang 已提交
963 964 965
    Returns:
        Program: startup program
    """
Y
Yu Yang 已提交
966
    return _startup_program_
967

968

969
def default_main_program():
Y
Yu Yang 已提交
970 971
    """
    Get default main program. The main program is used for training or testing.
972

Y
Yu Yang 已提交
973 974 975
    Returns:
        Program: main program
    """
Y
Yu Yang 已提交
976
    return _main_program_
Y
Yu Yang 已提交
977 978 979 980 981


def switch_main_program(program):
    """
    Switch the main program to a new program.
982

Y
Yu Yang 已提交
983 984 985 986 987 988 989 990 991 992 993 994 995 996
    Args:
        program(Program): The new main program

    Returns:
        Program: The previous main program
    """
    global _main_program_
    prev_program = _main_program_
    _main_program_ = program
    return prev_program


def switch_startup_program(program):
    """
997
    Switch the startup program to a new program
Y
Yu Yang 已提交
998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013
    Args:
        program(Program): The new startup program

    Returns:
        Program: The previous startup program
    """
    global _startup_program_
    prev_program = _startup_program_
    _startup_program_ = program
    return prev_program


@contextlib.contextmanager
def program_guard(main_program, startup_program=None):
    """
    Switch program with `with` statement
1014

Y
Yu Yang 已提交
1015 1016 1017 1018
    Examples:
        >>> with program_guard(Program()):
        >>>   data = fluid.layers.data(...)
        >>>   hidden = fluid.layers.fc(...)
1019

Y
Yu Yang 已提交
1020 1021
    Args:
        main_program(Program): New main program inside `with` statement
1022
        startup_program(Program): New startup program inside `with` statement.
Y
Yu Yang 已提交
1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038
            None means do not change startup program.

    Returns:
        None
    """
    if not isinstance(main_program, Program):
        raise TypeError("main_program should be Program")
    main_program = switch_main_program(main_program)
    if startup_program is not None:
        if not isinstance(startup_program, Program):
            raise TypeError("startup_program should be Program")
        startup_program = switch_startup_program(startup_program)
    yield
    switch_main_program(main_program)
    if startup_program is not None:
        switch_startup_program(startup_program)