framework.py 63.5 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Y
Yu Yang 已提交
15
import collections
Q
qiaolongfei 已提交
16
import contextlib
F
fengjiayi 已提交
17
import re
18

Y
Yu Yang 已提交
19
import numpy as np
Q
qiaolongfei 已提交
20

21
import proto.framework_pb2 as framework_pb2
Q
qiaolongfei 已提交
22
from . import core
Y
Yu Yang 已提交
23
import unique_name
Y
Yu Yang 已提交
24

25
__all__ = [
26 27 28 29 30 31 32
    'Block',
    'Variable',
    'Program',
    'Operator',
    'default_startup_program',
    'default_main_program',
    'program_guard',
X
xuwei06 已提交
33
    'get_var',
34
]
Y
Yu Yang 已提交
35

Q
qiaolongfei 已提交
36 37 38 39 40 41 42 43
EMPTY_VAR_NAME = core.kEmptyVarName()
TEMP_VAR_NAME = core.kTempVarName()
GRAD_VAR_SUFFIX = core.kGradVarSuffix()
ZERO_VAR_SUFFIX = core.kZeroVarSuffix()


def grad_var_name(var_name):
    """
44 45
    Returns:
        str: gradient name for a certain var name
Q
qiaolongfei 已提交
46 47 48
    """
    return var_name + GRAD_VAR_SUFFIX

Y
Yu Yang 已提交
49

50
def convert_np_dtype_to_dtype_(np_dtype):
51 52
    """
    Convert the data type in numpy to the data type in Paddle
53

54
    Args:
55
        np_dtype(np.dtype): the data type in numpy.
56

57 58
    Returns:
        core.VarDesc.VarType: the data type in Paddle.
59 60

    """
61 62
    dtype = np.dtype(np_dtype)
    if dtype == np.float32:
63
        return core.VarDesc.VarType.FP32
64
    elif dtype == np.float64:
65
        return core.VarDesc.VarType.FP64
66
    elif dtype == np.float16:
67
        return core.VarDesc.VarType.FP16
68
    elif dtype == np.int32:
69
        return core.VarDesc.VarType.INT32
70
    elif dtype == np.int16:
71
        return core.VarDesc.VarType.INT16
72
    elif dtype == np.int64:
73
        return core.VarDesc.VarType.INT64
74
    elif dtype == np.bool:
75
        return core.VarDesc.VarType.BOOL
76 77
    elif dtype == np.uint16:
        return core.VarDesc.VarType.INT16
78 79
    elif dtype == np.uint8:
        return core.VarDesc.VarType.UINT8
80 81 82 83 84
    else:
        raise ValueError("Not supported numpy dtype " + str(dtype))


def dtype_is_floating(dtype):
85 86 87
    """
    Check the data type is floating or not.
    Args:
88
        dtype(np.dtype|core.VarDesc.VarType): data type.
89 90 91 92 93
            Could be numpy format or Paddle format

    Returns(bool): True if data type is a float value

    """
94
    if not isinstance(dtype, core.VarDesc.VarType):
95 96
        dtype = convert_np_dtype_to_dtype_(dtype)

97 98 99 100
    return dtype in [
        core.VarDesc.VarType.FP16, core.VarDesc.VarType.FP32,
        core.VarDesc.VarType.FP64
    ]
101 102


Y
Yang Yang(Tony) 已提交
103
def _debug_string_(proto, throw_on_error=True):
104 105 106 107 108 109 110 111 112 113 114
    """
    Get the debug string of a protobuf message. The message could be not
    initialized.
    Args:
        proto(google.protobuf.message.Message): The protobuf message
        throw_on_error(bool): True if raise an error when the protobuf message
            is not initialized.

    Returns(str): The debug string of the protobuf message

    """
Y
Yu Yang 已提交
115
    error_fields = list()
Y
Yang Yang(Tony) 已提交
116
    if not proto.IsInitialized(error_fields) and throw_on_error:
C
caoying03 已提交
117 118
        raise ValueError("{0} are not initialized.\nThe message is {1}:\n".
                         format(error_fields, proto))
Y
Yu Yang 已提交
119 120 121
    return proto.__str__()


Y
Yu Yang 已提交
122
class Variable(object):
123
    """
124 125 126 127
    In Fluid, every input and output of an operator is a variable. In most 
    cases, variables are used for holding different kinds of data or training 
    labels. A variable belongs to a block. All variable has its own name and 
    two variables in different blocks could have the same name.
128

129 130
    There are many kinds of variables. Each kind of them has its own attributes 
    and usages. Please reference the framework.proto for details. 
131

132
    Most of a Variable's member variables can be setted to be None. It mean 
133
    it is not available or will be specified later.
134 135

    Args:
136
        block(Block): The block that the variable belongs to.
137 138
        type(core.VarDesc.VarType): Variable type. Please reference the
            framework.proto for details.
139 140
        name(str|None): The name of the variable. If setted None, it will be
            generated automatically. Default: None
141
        shape(tuple|list|None): The shape of the variable. -1 means the batch size.
142
            Some kinds of variable do not contain shape, just set it to None.
143 144 145
            Default: None
        dtype(np.dtype|core.VarDesc.VarType|str|None): The data type of variable.
            Default: None
146
        lod_level (int|None): The level of lod tensor. 0 means it is not a time
147
            series data.
148
            Default: None
149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170
        capacity (int|None): The capacity of Channel variable. Ignored for other
            types. Default: None
        persistable (bool|None): True if the variable is persistable. A persistable
            variable will not be deleted after an iteration ending. Defaults: None.
        error_clip (BaseErrorClipAttr|None): The error clip attributes of the
            corresponding gradient variable. Default: None
        stop_gradient (bool): True if the variable will stop to calculate its
            gradients when backward. Default: False.
        is_data (bool): True if the variable is an input data. Default: False

    Notes:
        The constructor of Variable should not be invoked directly. Please
        use `Block.create_var` to create a variable.

    Examples:
        .. code-block:: python

            cur_program = Program()
            cur_block = cur_program.current_block()
            new_variable = cur_block.create_var(name="X",
                                                shape=[-1, 23, 48],
                                                dtype='float32')
171 172
    """

Y
Yu Yang 已提交
173 174
    def __init__(self,
                 block,
Y
Yu Yang 已提交
175
                 type=core.VarDesc.VarType.LOD_TENSOR,
Y
Yu Yang 已提交
176 177 178 179
                 name=None,
                 shape=None,
                 dtype=None,
                 lod_level=None,
180
                 capacity=None,
Q
QI JUN 已提交
181
                 persistable=None,
F
fengjiayi 已提交
182
                 error_clip=None,
Y
Yu Yang 已提交
183
                 stop_gradient=False,
F
fengjiayi 已提交
184
                 is_data=False,
Y
Yu Yang 已提交
185
                 **kwargs):
Y
Yu Yang 已提交
186
        self.block = block
F
fengjiayi 已提交
187
        self.error_clip = error_clip
Y
Yu Yang 已提交
188 189

        if name is None:
Y
Yu Yang 已提交
190
            name = unique_name.generate('_generated_var')
D
Dong Zhihong 已提交
191 192 193 194
        is_new_var = False
        self.desc = self.block.desc.find_var(name)

        if self.desc is None:
D
dongzhihong 已提交
195
            self.desc = self.block.desc.var(name)
Y
Yu Yang 已提交
196
            is_new_var = True
Y
Yu Yang 已提交
197

Y
Yu Yang 已提交
198 199 200 201 202 203 204 205
        if is_new_var:
            self.desc.set_type(type)
        elif self.desc.type() != type:
            raise ValueError("Variable {0} has been created before. The "
                             "previous type is {1}; the new type is {2}. They"
                             " are not matched".format(self.name,
                                                       self.desc.type(), type))

Y
Yu Yang 已提交
206
        if shape is not None:
Y
Yu Yang 已提交
207
            if is_new_var:
208
                self.desc.set_shape(shape)
Y
Yu Yang 已提交
209 210 211 212 213 214 215 216
            else:
                old_shape = self.shape
                shape = tuple(shape)
                if shape != old_shape:
                    raise ValueError(
                        "Variable {0} has been created before. the previous "
                        "shape is {1}; the new shape is {2}. They are not "
                        "matched.".format(self.name, old_shape, shape))
Y
Yu Yang 已提交
217
        if dtype is not None:
218
            if not isinstance(dtype, core.VarDesc.VarType):
219
                dtype = convert_np_dtype_to_dtype_(dtype)
Y
Yu Yang 已提交
220
            if is_new_var:
F
fengjiayi 已提交
221
                self.desc.set_dtype(dtype)
Y
Yu Yang 已提交
222
            else:
F
fengjiayi 已提交
223
                old_dtype = self.dtype
Q
QI JUN 已提交
224
                if dtype != old_dtype:
Y
Yu Yang 已提交
225 226 227 228 229
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous data type is {1}; the new "
                                     "data type is {2}. They are not "
                                     "matched.".format(self.name, old_dtype,
                                                       dtype))
Y
Yu Yang 已提交
230 231

        if lod_level is not None:
Y
Yu Yang 已提交
232
            if is_new_var:
233
                self.desc.set_lod_level(lod_level)
Y
Yu Yang 已提交
234 235 236 237 238 239 240
            else:
                if lod_level != self.lod_level:
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous lod_level is {1}; the new "
                                     "lod_level is {2}. They are not "
                                     "matched".format(self.name, self.lod_level,
                                                      lod_level))
241 242 243 244 245 246 247 248 249 250 251
        if persistable is not None:
            if is_new_var:
                self.desc.set_persistable(persistable)
            else:
                if persistable != self.persistable:
                    raise ValueError(
                        "Variable {0} has been created before."
                        "The previous persistable is {1}; the new "
                        "persistable is {2}. They are not matched".format(
                            self.name, self.persistable, persistable))

252 253 254 255 256 257 258 259
        if capacity is not None:
            if is_new_var:
                self.desc.set_capacity(capacity)
            else:
                # TODO(abhinavarora) : Compare with set capacity once,
                # get_capacity is implemented
                pass

Y
Yu Yang 已提交
260
        self.block.vars[name] = self
Y
Yu Yang 已提交
261
        self.op = None
Y
Yu Yang 已提交
262
        self.stop_gradient = stop_gradient
F
fengjiayi 已提交
263
        self.is_data = is_data
Y
Yu Yang 已提交
264

265
    def __str__(self):
Y
Yang Yang(Tony) 已提交
266 267
        return self.to_string(True)

F
update  
fengjiayi 已提交
268
    def to_string(self, throw_on_error, with_details=False):
269 270 271 272
        """
        Get debug string.

        Args:
273 274
            throw_on_error(bool): True if raise an exception when self is
                not initialized.
F
update  
fengjiayi 已提交
275
            with_details(bool): more details about variables and parameters
276 277
                (e.g. trainable, optimize_attr, ...) will be printed when
                with_details is True. Default False;
278

279 280
        Returns:
            str: The debug string.
281
        """
F
update  
fengjiayi 已提交
282 283
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
284 285
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.VarDesc.FromString(str(protostr))
F
update  
fengjiayi 已提交
286 287 288 289 290 291 292
        res_str = _debug_string_(proto, throw_on_error)
        if with_details:
            additional_attr = ("error_clip", "stop_gradient")
            for attr_name in additional_attr:
                res_str += "%s: %s\n" % (attr_name,
                                         str(getattr(self, attr_name)))
        return res_str
293 294 295

    __repr__ = __str__

296
    def set_desc(self, input):
297 298 299 300 301 302 303 304 305
        """
        Set the variable description.

        Args:
            input(core.VarDesc): The new VarDesc.

        Returns:
            None
        """
306 307
        self.desc = input

308 309 310 311
    @property
    def persistable(self):
        return self.desc.persistable()

Y
Yu Yang 已提交
312 313 314 315
    @persistable.setter
    def persistable(self, p):
        self.desc.set_persistable(p)

Y
Yu Yang 已提交
316 317
    @property
    def name(self):
318
        return self.desc.name()
Y
Yu Yang 已提交
319

T
typhoonzero 已提交
320 321 322 323
    @name.setter
    def name(self, new_name):
        self.desc.set_name(new_name)

Y
Yu Yang 已提交
324 325 326
    @property
    def shape(self):
        # convert to tuple, make it as same as numpy API.
327
        return tuple(self.desc.shape())
Y
Yu Yang 已提交
328 329

    @property
F
fengjiayi 已提交
330 331
    def dtype(self):
        return self.desc.dtype()
Y
Yu Yang 已提交
332 333 334

    @property
    def lod_level(self):
335
        return self.desc.lod_level()
Y
Yu Yang 已提交
336

Y
Yu Yang 已提交
337 338 339 340
    @property
    def type(self):
        return self.desc.type()

341
    def set_error_clip(self, error_clip):
342 343 344 345 346 347 348 349 350
        """
        Set the error_clip.

        Args:
            error_clip(BaseErrorClipAttr) : The new error_clip.

        Returns:
            None
        """
351 352
        self.error_clip = error_clip

Y
Yu Yang 已提交
353

F
fengjiayi 已提交
354 355 356
def get_all_op_protos():
    """
    Get all registered op proto from PaddlePaddle C++ end.
357

358 359
    Returns:
       list: list of OpProto.
F
fengjiayi 已提交
360 361 362 363 364 365 366 367 368 369
    """
    protostrs = core.get_all_op_protos()
    ret_values = []
    for pbstr in protostrs:
        op_proto = framework_pb2.OpProto.FromString(str(pbstr))
        ret_values.append(op_proto)
    return ret_values


class OpProtoHolder(object):
370 371 372 373
    """
    A global variable to hold all OpProtos from C++ as a map
    """

F
fengjiayi 已提交
374 375 376 377 378 379 380 381 382
    @classmethod
    def instance(cls):
        if not hasattr(cls, '_instance'):
            cls._instance = cls()
        return cls._instance

    def __init__(self):
        assert not hasattr(
            self.__class__,
383
            '_instance'), 'Please use `instance()` to get OpProtoHolder object!'
F
fengjiayi 已提交
384 385 386 387 388 389
        op_protos = get_all_op_protos()
        self.op_proto_map = {}
        for proto in op_protos:
            self.op_proto_map[proto.type] = proto

    def get_op_proto(self, type):
390 391 392 393 394 395 396 397
        """
        Get OpProto by a type string.
        Args:
            type(str): The type that operator registered in C++ side.

        Returns(framework_pb2.OpProto): The OpProto

        """
Y
Yu Yang 已提交
398 399
        if type not in self.op_proto_map:
            raise ValueError("Operator \"%s\" has not been registered." % type)
F
fengjiayi 已提交
400 401
        return self.op_proto_map[type]

402 403 404 405 406 407 408
    @staticmethod
    def generated_op_attr_names():
        return {
            core.op_proto_and_checker_maker.kOpRoleAttrName(),
            core.op_proto_and_checker_maker.kOpRoleVarAttrName()
        }

F
fengjiayi 已提交
409

Y
Yu Yang 已提交
410
class Operator(object):
411
    """
412 413 414 415 416 417 418
    In Fluid, all the operation are represented by Operator, and Operator
    is regarded as a build in an instruction of a Block. Users can use the
    build in instructions to describe their neural network.

    Args:
        block(Block): The block has the current operator.
        desc(core.OpDesc): The protobuf description of Operator.
C
chengduoZH 已提交
419
        type(str): The type of operator. Default None.
420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450
        inputs(dict): The input of this Operator. it is a dictionary, for every
            element, key is the input parameter name, and value is a list of
            variables. Default None.
        outputs(dict): The output of this Operator. it is a dictionary, for
            every element, key is the input parameter name, and value is a list
            of variables. Default None.
        attrs(dict): The attributes of this Operator. it is a dictionary, for
            every element, key is attribute name, and value is the attribute value.
            The attribute type should be as same as the type registered in C++ side.
            Default None.

    Returns:
        Operator: The initialized Operator.

    Raises:
        ValueError: If the passed input, output and attrs doesn't match the
            initializing Operator's that registered in C++ side.

    Notes:
        The constructor of operator should not be invoked directly. Use
        Block.append_op or Block.prepend_op instead.

    Examples:
        .. code-block:: python

            cur_program = Program()
            cur_block = cur_program.current_block()
            # var1 += var2 + var3
            cur_block.append_op(type="sum",
                                inputs={"X": [var1, var2, var3]},
                                outputs={"Out": [var1]})
451
    """
452 453 454 455 456
    OP_WITHOUT_KERNEL_SET = {
        'feed', 'fetch', 'save', 'load', 'recurrent', 'go',
        'rnn_memory_helper_grad', 'conditional_block', 'while', 'send', 'recv',
        'listen_and_serv', 'parallel_do', 'save_combine', 'load_combine',
        'ncclInit', 'channel_create', 'channel_close', 'channel_send',
T
typhoonzero 已提交
457
        'channel_recv', 'select', 'gen_nccl_id'
458
    }
459

Y
Yu Yang 已提交
460 461
    def __init__(self,
                 block,
Y
Yu Yang 已提交
462
                 desc,
Y
Yu Yang 已提交
463 464 465 466
                 type=None,
                 inputs=None,
                 outputs=None,
                 attrs=None):
467

Y
Yu Yang 已提交
468
        self.block = block
Y
Yu Yang 已提交
469
        self.desc = desc
T
typhoonzero 已提交
470
        self.attrs = attrs
Y
yuyang18 已提交
471 472 473 474 475 476 477 478
        if self.attrs is None:
            self.attrs = dict()
        del attrs

        op_maker = core.op_proto_and_checker_maker

        if op_maker.kOpRoleAttrName() not in self.attrs:
            self.attrs[op_maker.kOpRoleAttrName()] = self.block.program.op_role
Y
yuyang18 已提交
479 480 481 482 483 484 485 486

        role_var_name = op_maker.kOpRoleVarAttrName()
        if len(self.block.program.
               op_role_var) != 0 and role_var_name not in self.attrs:
            self.attrs[role_var_name] = self.block.program.op_role_var

        if role_var_name in self.attrs and len(self.attrs[role_var_name]) == 0:
            del self.attrs[role_var_name]
Y
yuyang18 已提交
487

F
fengjiayi 已提交
488 489 490 491 492
        if len(self.desc.type()) != 0:
            return
        if type is None:
            raise ValueError(
                "`type` to initilized an Operator can not be None.")
F
Update  
fengjiayi 已提交
493
        self.desc.set_type(type)
F
fengjiayi 已提交
494
        proto = OpProtoHolder.instance().get_op_proto(type)
495

Y
Yang Yang(Tony) 已提交
496 497
        def find_name(var_list, name):
            for var_name in var_list:
Q
Qiao Longfei 已提交
498
                if var_list[var_name] is not None and var_name == name:
Y
Yang Yang(Tony) 已提交
499 500
                    return True
            return False
Q
QI JUN 已提交
501

Y
Yang Yang(Tony) 已提交
502 503 504 505 506 507 508
        if inputs is not None:
            for in_proto in proto.inputs:
                found = find_name(inputs, in_proto.name)
                assert found or in_proto.dispensable, "Input {} not found".format(
                    in_proto.name)

                if found:
509 510 511 512
                    in_args = inputs[in_proto.name]
                    if not isinstance(in_args, list):
                        in_args = [in_args]
                    if not in_proto.duplicable and len(in_args) > 1:
Y
Yang Yang(Tony) 已提交
513 514
                        raise ValueError(
                            "Input %s expects only one input, but %d are given."
515 516 517
                            % (in_proto.name, len(in_args)))
                    in_arg_names = []
                    for arg in in_args:
Y
Yang Yu 已提交
518 519 520 521
                        if isinstance(arg, basestring):
                            in_arg_names.append(arg)
                        else:
                            in_arg_names.append(arg.name)
522
                    self.desc.set_input(in_proto.name, in_arg_names)
Y
Yang Yang(Tony) 已提交
523 524
                else:
                    self.desc.set_input(in_proto.name, [])
F
Update  
fengjiayi 已提交
525

Y
Yu Yang 已提交
526
        if outputs is not None:
527 528 529 530 531 532 533
            given = set()
            need = set()
            for n in outputs:
                given.add(n)
            for m in proto.outputs:
                need.add(m.name)
            if not given == need:
C
caoying03 已提交
534 535
                raise ValueError(("Incorrect setting for output(s) of "
                                  "operator \"%s\". Need: [%s] Given: [%s]") %
536 537
                                 (type, ", ".join(str(e) for e in need),
                                  ", ".join(str(e) for e in given)))
538

F
fengjiayi 已提交
539
            for out_proto in proto.outputs:
540 541 542 543
                out_args = outputs[out_proto.name]
                if not isinstance(out_args, list):
                    out_args = [out_args]
                if not out_proto.duplicable and len(out_args) > 1:
F
Update  
fengjiayi 已提交
544 545
                    raise ValueError(
                        "Output %s expects only one output, but %d are given." %
546 547 548 549 550 551
                        (out_proto.name, len(out_args)))
                out_arg_names = []
                for arg in out_args:
                    out_arg_names.append(arg.name)
                    arg.op = self
                self.desc.set_output(out_proto.name, out_arg_names)
F
Update  
fengjiayi 已提交
552

Y
yuyang18 已提交
553 554
        if self.attrs is not None:
            if not isinstance(self.attrs, dict):
555
                raise TypeError("'attrs' should be a dict.")
F
fengjiayi 已提交
556
            for attr in proto.attrs:
F
Update  
fengjiayi 已提交
557
                attr_name = attr.name
Y
yuyang18 已提交
558 559
                if (attr_name not in self.attrs) or (
                        self.attrs[attr_name] is None):
F
Update  
fengjiayi 已提交
560
                    continue
Y
yuyang18 已提交
561 562 563
                if isinstance(self.attrs[attr_name], Block):
                    self.desc.set_block_attr(attr_name,
                                             self.attrs[attr_name].desc)
564 565 566 567
                elif isinstance(self.attrs[attr_name], list) and \
                      all(isinstance(v, Block) for v in self.attrs[attr_name]):
                    self.desc.set_blocks_attr(
                        attr_name, [v.desc for v in self.attrs[attr_name]])
Y
yuyang18 已提交
568 569
                elif isinstance(self.attrs[attr_name], core.BlockDesc) or \
                        isinstance(self.attrs[attr_name], core.ProgramDesc):
T
typhoonzero 已提交
570
                    self.desc.set_serialized_attr(
Y
yuyang18 已提交
571
                        attr_name, self.attrs[attr_name].serialize_to_string())
Y
Yang Yang(Tony) 已提交
572
                else:
Y
yuyang18 已提交
573
                    self.desc.set_attr(attr_name, self.attrs[attr_name])
574
        self.desc.check_attrs()
575
        if self.has_kernel(type):
Q
QI JUN 已提交
576
            self.desc.infer_var_type(self.block.desc)
Y
Yu Yang 已提交
577
            self.desc.infer_shape(self.block.desc)
F
fengjiayi 已提交
578

579 580 581
    def has_kernel(self, op_type):
        return op_type not in self.OP_WITHOUT_KERNEL_SET

Y
Yang Yang(Tony) 已提交
582
    def to_string(self, throw_on_error):
583
        """
584 585
        Get debug string.

586
        Args:
587 588
            throw_on_error(bool): Whether to raise exception if self is not
                initialized.
589

590 591
        Returns:
            str: The debug string.
592 593

        """
594 595
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.OpDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
596 597 598 599
        return _debug_string_(proto, throw_on_error)

    def __str__(self):
        return self.to_string(True)
600 601 602

    __repr__ = __str__

F
fengjiayi 已提交
603 604 605 606 607
    @property
    def type(self):
        return self.desc.type()

    def input(self, name):
608
        """
609
        Get the input arguments according to the input parameter name.
610

611 612
        Args:
            name(str): The input parameter name.
613

614 615 616
        Returns:
            list: return the list of argument names that associated with \
                the specific parameter name.
617
        """
F
fengjiayi 已提交
618 619
        return self.desc.input(name)

T
typhoonzero 已提交
620
    def rename_input(self, old_name, new_name):
621 622 623 624 625 626 627 628 629 630
        """
        Rename the `old_name` to `new_name`.

        Args:
            old_name(str): The old name of the Operator's input.
            new_name(str): The new name of the Operator's input.

        Returns:
            None
        """
T
typhoonzero 已提交
631 632 633
        self.desc.rename_input(old_name, new_name)

    def rename_output(self, old_name, new_name):
634 635 636 637 638 639 640 641 642 643
        """
        Rename the `old_name` to `new_name`.

        Args:
            old_name(str): The old name of the Operator's output.
            new_name(str): The new name of the Operator's output.

        Returns:
            None
        """
T
typhoonzero 已提交
644 645
        self.desc.rename_output(old_name, new_name)

F
fengjiayi 已提交
646 647 648 649
    @property
    def input_names(self):
        return self.desc.input_names()

T
typhoonzero 已提交
650 651 652 653 654 655 656 657
    @property
    def input_arg_names(self):
        return self.desc.input_arg_names()

    @property
    def output_arg_names(self):
        return self.desc.output_arg_names()

F
fengjiayi 已提交
658
    def output(self, name):
659
        """
660
        Get output arguments by the output parameter name.
661

662 663
        Args:
            name(str): The output parameter name.
664

665 666 667
        Returns:
            list: return the list of argument names associated with \
                the specific parameter name.
668
        """
F
fengjiayi 已提交
669 670 671 672 673 674
        return self.desc.output(name)

    @property
    def output_names(self):
        return self.desc.output_names()

675 676 677 678 679 680 681 682
    @property
    def idx(self):
        for i, op in enumerate(self.block.ops):
            if op == self:
                return i
        raise ValueError(
            "Can't find op itself in it's block. It could be a bug of Paddle.")

F
fengjiayi 已提交
683
    def has_attr(self, name):
684
        """
685 686
        Whether this Operator has the attribute with name or not.

687
        Args:
688
            name(str): the attribute name.
689

690 691
        Returns:
            bool: True if has this attribute.
692 693

        """
F
fengjiayi 已提交
694 695 696
        return self.desc.has_attr(name)

    def attr_type(self, name):
697
        """
698
        Get the type of attribute by attribute's name.
699

700 701
        Args:
            name(str): the attribute name.
702

703 704
        Returns:
            core.AttrType: the attribute type.
705
        """
F
fengjiayi 已提交
706 707
        return self.desc.attr_type(name)

Y
yuyang18 已提交
708
    def set_attr(self, name, val):
709 710 711 712 713 714 715 716 717 718
        """
        Set the value of attribute by attribute's name.

        Args:
            name(str): the attribute name.
            val(bool|int|str|float|list): the value of the attribute.

        Raises:
            ValueError: If the type of value doesn't match with desc.attr_type(name).
        """
Y
yuyang18 已提交
719
        self.attrs[name] = val
Q
Qiyang Min 已提交
720 721
        if isinstance(val, Block):
            self.desc.set_block_attr(name, val.desc)
722 723
        elif isinstance(val, list) and all(isinstance(v, Block) for v in val):
            self.desc.set_blocks_attr(name, [v.desc for v in val])
Q
Qiyang Min 已提交
724 725 726 727 728
        elif isinstance(val, core.BlockDesc) or \
                isinstance(val, core.ProgramDesc):
            self.desc.set_serialized_attr(name, val.serialize_to_string())
        else:
            self.desc.set_attr(name, val)
Y
yuyang18 已提交
729

F
fengjiayi 已提交
730 731 732 733 734
    @property
    def attr_names(self):
        return self.desc.attr_names()

    def attr(self, name):
735
        """
736 737
        Get the attribute by name.

738
        Args:
739
            name(str): the attribute name.
740

741 742
        Returns:
            bool|int|str|float|list: The attribute value. The return value
743 744
            can be any valid attribute type.
        """
F
fengjiayi 已提交
745
        return self.desc.attr(name)
Y
Yu Yang 已提交
746

F
fengjiayi 已提交
747
    def block_attr(self, name):
748
        """
749
        Get the block attribute by name.
750

751 752
        Args:
            name(str): the attribute name.
753

754 755
        Returns:
            int: the block index.
756
        """
F
fengjiayi 已提交
757
        return self.desc.block_attr(name)
Y
Yu Yang 已提交
758

J
JiayiFeng 已提交
759
    def all_attrs(self):
F
fengjiayi 已提交
760
        """
761 762 763 764
        Get the attribute dict.

        Returns:
            dict: The Operator's attribute dict.
F
fengjiayi 已提交
765 766 767 768 769 770 771 772 773 774
        """
        attr_names = self.attr_names
        attr_map = {}
        for n in attr_names:
            if n == 'sub_block':
                attr_map[n] = self.block_attr(n)
            else:
                attr_map[n] = self.attr(n)
        return attr_map

Y
Yu Yang 已提交
775

Y
Yu Yang 已提交
776
class Block(object):
777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805
    """
    In Fluid, a Program is consistence of multi-Block, and Block stores
    VarDesc and OpDesc. In a specific Block, a VarDesc have a unique name.
    One block could have some child blocks, and child block's name scopes
    should inherit the parent's so that OpDesc in child block can reference
    a VarDesc that is stored in the parent block.
    Please reference the framework.proto for details.

    Args:
        program(Program): The Program that the Block belongs to.
        idx(int): The block's id in the Program.

    Notes:
        The constructor of Block should not be invoked directly. Please
        use `Program.create_block()` to create a block.

    Examples:
        .. code-block:: python

            cur_program = Program()
            cur_block = cur_program.current_block()
            var = cur_block.create_var(name="X",
                                       shape=[-1, 23, 48],
                                       dtype='float32')
            cur_block.append_op(type="abs",
                                inputs={"X": [var]},
                                outputs={"Out": [var]})
    """

Y
Yu Yang 已提交
806
    def __init__(self, program, idx):
Y
Yu Yang 已提交
807
        self.desc = program.desc.block(idx)
808
        self.vars = collections.OrderedDict()  # var_name --> var
Q
qiaolongfei 已提交
809
        self.ops = list()  # operator list
Y
Yu Yang 已提交
810
        self.program = program
811
        self.removed_vars = collections.OrderedDict()
Y
Yu Yang 已提交
812

813
    def __str__(self):
Y
Yang Yang(Tony) 已提交
814 815
        return self.to_string(True)

F
fengjiayi 已提交
816 817
    def to_string(self, throw_on_error, with_details=False):
        """
818 819
        Get debug string.

F
fengjiayi 已提交
820 821
        Args:
            throw_on_error(bool): raise exception when self is not initialized
822
                when throw_on_error is True.
F
update  
fengjiayi 已提交
823
            with_details(bool): more details about variables and parameters
824 825
                (e.g. trainable, optimize_attr, ...) will be printed when
                with_details is True. Default False.
F
fengjiayi 已提交
826

827 828
        Returns:
            str: The debug string.
F
fengjiayi 已提交
829 830 831 832
        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
F
fengjiayi 已提交
833
            re_add_indent = re.compile(r"\n(.)")
F
fengjiayi 已提交
834 835 836
            res_str = "blocks {\n  idx: %d\n  parent_idx: %d" % (
                self.idx, self.parent_idx)
            for var in self.vars.itervalues():
F
fengjiayi 已提交
837
                res_str += "\n  vars {\n    %s  }" % re_add_indent.sub(
F
update  
fengjiayi 已提交
838
                    r"\n    \1", var.to_string(throw_on_error, with_details))
F
fengjiayi 已提交
839
            for op in self.ops:
F
fengjiayi 已提交
840 841
                res_str += "\n  ops {\n    %s  }" % re_add_indent.sub(
                    r"\n    \1", op.to_string(throw_on_error))
F
fengjiayi 已提交
842 843 844 845 846 847
            res_str += "\n}"
        else:
            protostr = self.desc.serialize_to_string()
            proto = framework_pb2.BlockDesc.FromString(str(protostr))
            res_str = _debug_string_(proto, throw_on_error)
        return res_str
848 849 850

    __repr__ = __str__

Y
Yu Yang 已提交
851 852
    @property
    def parent_idx(self):
Y
Yu Yang 已提交
853
        return self.desc.parent
Y
Yu Yang 已提交
854

Y
Yu Yang 已提交
855 856 857 858 859
    @property
    def forward_block_idx(self):
        return self.desc.get_forward_block_idx()

    def set_forward_block_idx(self, idx):
860 861 862 863 864 865 866 867 868
        """
        Set the forward block Idx.

        Args:
            idx(int): the block index.

        Returns:
            None
        """
Y
Yu Yang 已提交
869 870
        self.desc.set_forward_block_idx(idx)

Y
Yu Yang 已提交
871 872
    @property
    def idx(self):
Y
Yu Yang 已提交
873
        return self.desc.id
Y
Yu Yang 已提交
874

Q
Qiao Longfei 已提交
875
    def var(self, name):
876 877 878 879 880 881 882 883 884 885 886 887 888
        """
        Get a Variable by name from this block.

        Args:
            name(str): the Variable's name.

        Raises:
            ValueError: The If input's type is not str, or this block
                doesn't have a Variable with the giving name.

        Returns:
            Variable: the Variable with the giving name.
        """
Y
Yu Yang 已提交
889
        if not isinstance(name, basestring):
890 891 892
            raise TypeError(
                "var require string as parameter, but get %s instead." %
                (type(name)))
Y
Yu Yang 已提交
893 894
        v = self.vars.get(name, None)
        if v is None:
Q
Qiao Longfei 已提交
895
            raise ValueError("var %s not in this block" % name)
Y
Yu Yang 已提交
896
        return v
Q
Qiao Longfei 已提交
897

F
fengjiayi 已提交
898
    def var_recursive(self, name):
899 900 901 902 903 904 905 906 907 908 909 910 911
        """
        Get a Variable by name from this block recursively.

        Args:
            name(str): the Variable's name.

        Raises:
            ValueError: this block and this parent block doesn't
                have a Variable with the giving name.

        Returns:
            Variable: the Variable with the giving name.
        """
Y
Yu Yang 已提交
912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937
        frontier = list()
        visited = set()

        frontier.append(self)

        prog = self.program

        while len(frontier) != 0:  # BFS
            cur = frontier[0]
            frontier = frontier[1:]

            if id(cur) in visited:
                continue

            if cur.has_var(name):
                return cur.var(name)

            if cur.parent_idx != -1:
                frontier.append(prog.block(cur.parent_idx))

            if cur.forward_block_idx != -1:
                frontier.append(prog.block(cur.forward_block_idx))

            visited.add(id(cur))

        raise ValueError("Var {0} is not found recursively".format(name))
F
fengjiayi 已提交
938

Q
Qiao Longfei 已提交
939
    def all_parameters(self):
940 941 942 943 944
        return list(self.iter_parameters())

    def iter_parameters(self):
        return (item[1] for item in self.vars.iteritems()
                if isinstance(item[1], Parameter))
Q
Qiao Longfei 已提交
945

Y
Yu Yang 已提交
946
    def create_var(self, *args, **kwargs):
947
        var = Variable(block=self, *args, **kwargs)
948 949
        if 'initializer' in kwargs:
            kwargs['initializer'](var, self)
Q
Qiao Longfei 已提交
950
        return var
Y
Yu Yang 已提交
951

Q
Qiao Longfei 已提交
952 953 954
    def has_var(self, name):
        return name in self.vars

T
typhoonzero 已提交
955 956 957
    def rename_var(self, name, new_name):
        """
        Rename variable in vars and ops' inputs and outputs
958 959 960 961 962 963 964 965 966 967 968 969

        Args:
            name(str): the name that need to be renamed.
            new_name(str): the name that need to rename to.

        Raises:
            ValueError: If this block doesn't have this the giving name,
                or the type of the var with the giving name is not Parameter
                or Variable.

        Returns:
            Variable: the Variable with the giving name.
T
typhoonzero 已提交
970 971
        """
        if not self.has_var(name):
972
            raise ValueError("var %s is not in current block" % name)
T
wip  
typhoonzero 已提交
973 974
        v = self.var(name)
        if type(v) == Parameter:
T
typhoonzero 已提交
975
            var_type = "Parameter"
T
wip  
typhoonzero 已提交
976 977 978 979 980 981 982
            stop_gradient = v.stop_gradient
            trainable = v.trainable
            optimize_attr = v.optimize_attr
            regularizer = v.regularizer
            gradient_clip_attr = v.gradient_clip_attr
            error_clip = v.error_clip
        elif type(v) == Variable:
T
typhoonzero 已提交
983
            var_type = "Variable"
T
wip  
typhoonzero 已提交
984 985 986 987
            error_clip = v.error_clip
            stop_gradient = v.stop_gradient
        else:
            raise ValueError("unsupported var type: %s", type(v))
T
typhoonzero 已提交
988
        orig_var_type = v.type
T
typhoonzero 已提交
989
        self.desc.rename_var(name, new_name)
T
typhoonzero 已提交
990
        # NOTE: v is destroyed by C++ after calling rename_var.
T
wip  
typhoonzero 已提交
991
        d = self.desc.find_var(new_name)
T
typhoonzero 已提交
992
        if var_type == "Parameter":
T
wip  
typhoonzero 已提交
993 994 995 996
            var = Parameter(
                self,
                d.shape(),
                d.dtype(),
T
typhoonzero 已提交
997
                type=orig_var_type,
T
wip  
typhoonzero 已提交
998 999 1000 1001 1002 1003 1004
                name=new_name,
                stop_gradient=stop_gradient,
                trainable=trainable,
                optimize_attr=optimize_attr,
                regularizer=regularizer,
                gradient_clip_attr=gradient_clip_attr,
                error_clip=error_clip)
T
typhoonzero 已提交
1005
        elif var_type == "Variable":
T
wip  
typhoonzero 已提交
1006 1007
            var = Variable(
                self,
T
typhoonzero 已提交
1008
                type=orig_var_type,
T
wip  
typhoonzero 已提交
1009 1010 1011 1012 1013 1014 1015 1016
                name=new_name,
                error_clip=error_clip,
                stop_gradient=stop_gradient)

        # rename the python side, sync_with_cpp will only add
        # new vars/ops to python side.
        self.vars[new_name] = var
        del self.vars[name]
T
typhoonzero 已提交
1017
        self.sync_with_cpp()
1018
        return var
T
typhoonzero 已提交
1019

1020 1021 1022 1023 1024
    def remove_var(self, name):
        self.sync_with_cpp()
        self.desc.remove_var(name)
        del self.vars[name]

Y
Yu Yang 已提交
1025 1026
    def create_parameter(self, *args, **kwargs):
        global_block = self.program.global_block()
Q
Qiao Longfei 已提交
1027
        param = Parameter(global_block, *args, **kwargs)
1028 1029
        if 'initializer' in kwargs:
            kwargs['initializer'](param, self)
Q
Qiao Longfei 已提交
1030
        return param
Y
Yu Yang 已提交
1031

Y
Yu Yang 已提交
1032
    def append_op(self, *args, **kwargs):
1033 1034 1035 1036 1037 1038
        """
        Appends a new Operator according to the giving arguments.

        Returns:
            Operator: the append Operator.
        """
Y
Yu Yang 已提交
1039
        op_desc = self.desc.append_op()
1040
        op = Operator(block=self, desc=op_desc, *args, **kwargs)
Y
Yu Yang 已提交
1041 1042 1043
        self.ops.append(op)
        return op

Q
qiaolongfei 已提交
1044
    def insert_op(self, index, *args, **kwargs):
1045 1046 1047 1048 1049 1050 1051 1052 1053
        """
        Insert a Operator according to the giving arguments.

        Args:
            index(int): the place that the operator to insert.

        Returns:
            Operator: the insert Operator.
        """
Q
qiaolongfei 已提交
1054 1055 1056 1057 1058 1059
        self.sync_with_cpp()
        op_desc = self.desc.insert_op(index)
        op = Operator(block=self, desc=op_desc, *args, **kwargs)
        self.ops.insert(index, op)
        return op

1060
    def remove_op(self, index):
1061 1062 1063 1064 1065 1066 1067 1068 1069
        """
        Remove the specific position operator.

        Args:
            index(int): the position that the operator to insert.

        Returns:
            None
        """
1070 1071 1072 1073
        self.sync_with_cpp()
        self.desc.remove_op(index, index + 1)
        del self.ops[index]

Y
Yancey1989 已提交
1074
    def slice_ops(self, start, end):
1075 1076 1077 1078 1079 1080 1081 1082 1083 1084
        """
        Return the Operator between start and end.

        Args:
            start(int): the start position.
            end(int): the end position.

        Returns:
            list: the Operators between start and end.
        """
Q
qiaolongfei 已提交
1085
        return self.ops[start:end]
Y
Yancey1989 已提交
1086

Y
Yu Yang 已提交
1087
    def prepend_op(self, *args, **kwargs):
Y
Yu Yang 已提交
1088 1089
        op_desc = self.desc.prepend_op()
        op = Operator(self, op_desc, *args, **kwargs)
Q
qiaolongfei 已提交
1090
        self.ops.insert(0, op)
Y
Yu Yang 已提交
1091 1092
        return op

Q
Qiao Longfei 已提交
1093
    def sync_with_cpp(self):
1094
        """
1095 1096
        Sync from the desc on the c++ end. This method is used to synchronize
        the c++ desc instance generated by backward.
1097
        """
Q
Qiao Longfei 已提交
1098 1099 1100 1101 1102
        # sync variables from cpp
        for var in self.desc.all_vars():
            if not self.has_var(var.name()):
                self.create_var(name=var.name(), desc=var, type=var.type())

1103 1104 1105 1106 1107
        # sync variables removed from c++ end
        for var in self.vars.keys():
            if not self.desc.find_var(var):
                self.vars.pop(var)

Q
Qiao Longfei 已提交
1108
        # sync operators from cpp
1109 1110 1111 1112
        ops_in_cpp = []
        for op_idx in range(0, self.desc.op_size()):
            ops_in_cpp.append(self.desc.op(op_idx))

Y
Yu Yang 已提交
1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128
        if len(self.ops) != 0:
            first_op_in_python = self.ops[0].desc
            last_op_in_python = self.ops[len(self.ops) - 1].desc
            start_index = None
            end_index = None
            for index in range(len(ops_in_cpp)):
                if first_op_in_python == ops_in_cpp[index]:
                    start_index = index
                if last_op_in_python == ops_in_cpp[index]:
                    end_index = index
            assert start_index is not None
            assert end_index is not None
            assert start_index <= end_index
        else:
            start_index = 0
            end_index = -1
Q
Qiao Longfei 已提交
1129 1130 1131 1132 1133

        # sync ops append to the head of cpp_ops
        for index in range((start_index - 1 - 1), -1, -1):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
Q
qiaolongfei 已提交
1134
            self.ops.insert(0, op)
Q
Qiao Longfei 已提交
1135 1136 1137 1138 1139 1140 1141

        # sync ops append to the end of cpp_ops
        for index in range((end_index + 1), len(ops_in_cpp)):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.append(op)

1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154
        # sync ops removed from c++ end
        if end_index != -1 and end_index < len(self.ops):
            ops_in_cpp_index = 0
            ops_in_python_index = 0
            while ops_in_python_index < len(
                    self.ops) and ops_in_cpp_index < len(ops_in_cpp):
                if self.ops[ops_in_python_index].desc != ops_in_cpp[
                        ops_in_cpp_index]:
                    del self.ops[ops_in_python_index]
                else:
                    ops_in_cpp_index += 1
                    ops_in_python_index += 1

Q
Qiao Longfei 已提交
1155 1156 1157 1158
        assert len(self.ops) == len(ops_in_cpp)
        for index in range(len(self.ops)):
            assert self.ops[index].desc == ops_in_cpp[index]

1159 1160
    def copy_param_info_from(self, other):
        """
1161 1162
        Copy the information of parameters from the other block.

1163
        Args:
1164 1165 1166 1167 1168
            other(Block): the other block.

        Raises:
            ValueError: If type of input is not Block, or the `other` and this
                block is not in the same topology.
1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191

        Returns:
            None
        """
        if not isinstance(other, Block):
            raise TypeError("copy_param_info_from should be invoked with Block")
        for p in other.iter_parameters():
            assert isinstance(p, Parameter)
            v = self.vars.get(p.name, None)
            if v is None:
                raise ValueError("copy_param_info_from should be invoked with "
                                 "same topology")
            assert isinstance(v, Variable)
            new_p = Parameter(
                block=self,
                shape=v.shape,
                dtype=v.dtype,
                type=v.type,
                lod_level=v.lod_level,
                stop_gradient=p.stop_gradient,
                trainable=p.trainable,
                optimize_attr=p.optimize_attr,
                regularizer=p.regularizer,
F
fengjiayi 已提交
1192
                gradient_clip_attr=p.gradient_clip_attr,
F
fengjiayi 已提交
1193
                error_clip=p.error_clip,
1194 1195 1196
                name=v.name)
            self.vars[new_p.name] = new_p

1197 1198 1199
    def clone_variable(self, var):
        """
        Clone a variable into current block.
1200

1201 1202 1203 1204
        Args:
            var: the variable to be cloned.

        Returns:
1205
            Variable: the new  variable cloned from 'var' in current block.
1206 1207
        """
        assert isinstance(var, Variable)
T
update  
typhoonzero 已提交
1208 1209 1210 1211 1212
        ret_var = None
        # make STEP_SCOPES var can be safely cloned.
        if var.type == core.VarDesc.VarType.STEP_SCOPES:
            ret_var = self.create_var(
                name=var.name, persistable=var.persistable, type=var.type)
T
typhoonzero 已提交
1213 1214 1215 1216 1217 1218
        elif var.type == core.VarDesc.VarType.SELECTED_ROWS:
            ret_var = self.create_var(
                name=var.name,
                shape=var.shape,
                dtype=var.dtype,
                type=var.type,
F
fengjiayi 已提交
1219 1220
                persistable=True,
                is_data=var.is_data)
T
update  
typhoonzero 已提交
1221 1222 1223 1224 1225 1226 1227
        else:
            ret_var = self.create_var(
                name=var.name,
                shape=var.shape,
                dtype=var.dtype,
                type=var.type,
                lod_level=var.lod_level,
F
fengjiayi 已提交
1228 1229
                persistable=True,
                is_data=var.is_data)
T
update  
typhoonzero 已提交
1230
        return ret_var
1231

Y
Yu Yang 已提交
1232 1233

class Program(object):
D
dzhwinter 已提交
1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244
    """
    Python Program. Beneath it is a ProgramDesc, which is used for
    create c++ Program. A program is a self-contained programing
    language like container. It has at least one Block, when the
    control flow op like conditional_block, while_op is included,
    it will contains nested block.
    Please reference the framework.proto for details.

    Notes: we have default_startup_program and default_main_program
    by default, a pair of them will shared the parameters.
    The default_startup_program only run once to initialize parameters,
Y
yuyang18 已提交
1245
    default_main_program run in every mini batch and adjust the weights.
D
dzhwinter 已提交
1246 1247

    Returns:
Y
yuyang18 已提交
1248
        A empty program.
D
dzhwinter 已提交
1249 1250

    Examples:
Y
yuyang18 已提交
1251 1252 1253 1254 1255 1256
        >>> main_program = fluid.Program()
        >>> startup_program = fluid.Program()
        >>> with fluid.program_guard(main_program=main_program, startup_program=startup_program):
        >>>     fluid.layers.data(name="x", shape=[-1, 784], dtype='float32')
        >>>     fluid.layers.data(name="y", shape=[-1, 1], dtype='int32')
        >>>     fluid.layers.fc(name="fc", shape=[10], dtype='float32', act="relu")
D
dzhwinter 已提交
1257 1258 1259

    """

1260 1261
    def __init__(self):
        self.desc = core.ProgramDesc()
Y
Yu Yang 已提交
1262 1263
        self.blocks = [Block(self, 0)]
        self.current_block_idx = 0
D
dzhwinter 已提交
1264
        self._seed = 0
Y
yuyang18 已提交
1265
        self._current_role = core.op_proto_and_checker_maker.OpRole.Forward
Y
yuyang18 已提交
1266
        self._op_role_var = []
Y
yuyang18 已提交
1267 1268 1269

    @property
    def op_role(self):
Y
yuyang18 已提交
1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282
        """
        The operator role. In a enum {Forward, Backward, Optimize}.

        Notes: this is a low level API. It is used only for ParallelExecutor to
        duplicate or schedule operator to devices.

        For example, the forward operator should be executed on every device.
        The backward operator should be executed on every device and the
        parameter gradient of backward (use :code:`op_role_var` to get this
        variable) operator should be merged to one device. The optimization
        operators should be executed on only one device and broadcast the
        optimization result, i.e., the new parameter, to every other device.
        """
Y
yuyang18 已提交
1283 1284 1285 1286 1287 1288 1289 1290
        return self._current_role

    @op_role.setter
    def set_op_role(self, role):
        self._current_role = role

    @property
    def op_role_var(self):
Y
yuyang18 已提交
1291 1292 1293 1294 1295 1296 1297
        """
        The auxiliary variables for :code:`op_role` property.

        See Also: :code:`Program.op_role`'s documentation for details.

        Notes: This is a very low-level API. Users should not use it directly.
        """
Y
yuyang18 已提交
1298 1299 1300 1301
        return self._op_role_var

    @op_role_var.setter
    def set_op_role_var(self, var_name):
Y
yuyang18 已提交
1302
        self._op_role_var = [var_name]
Y
yuyang18 已提交
1303 1304 1305

    @contextlib.contextmanager
    def optimized_guard(self, var):
Y
yuyang18 已提交
1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317
        """
        A with guard to set :code:`Optimization` :code:`OpRole` and
        :code:`OpRoleVar` automatically.

        Notes: This is a very low level API. Users should not use it directly.

        Args:
            var(Variable|str): The variable (name) to be optimized.

        Examples:

            >>> p, g = backward(...)
Y
yuyang18 已提交
1318
            >>> with program.optimized_guard(p):
Y
yuyang18 已提交
1319 1320
            >>>     p = p - 0.001 * g
        """
Y
yuyang18 已提交
1321 1322
        OpRole = core.op_proto_and_checker_maker.OpRole
        self._current_role = OpRole.Optimize
Y
yuyang18 已提交
1323
        self._op_role_var = [var.name if isinstance(var, Variable) else var]
Y
yuyang18 已提交
1324
        yield
Y
yuyang18 已提交
1325
        self._op_role_var = []
Y
yuyang18 已提交
1326
        self._current_role = OpRole.Forward
Y
Yu Yang 已提交
1327

1328
    def __str__(self):
Y
yuyang18 已提交
1329 1330 1331 1332 1333 1334 1335 1336 1337
        """
        Get the protobuf debug string of this Program.

        Returns:
            (str): The protobuf debug string.

        Raises:
            ValueError: If any of required fields is not set.
        """
Y
Yang Yang(Tony) 已提交
1338 1339
        return self.to_string(True)

F
fengjiayi 已提交
1340 1341 1342
    def to_string(self, throw_on_error, with_details=False):
        """
        To debug string.
Y
yuyang18 已提交
1343

F
fengjiayi 已提交
1344
        Args:
Y
yuyang18 已提交
1345 1346
            throw_on_error(bool): raise Value error when any of required fields
                is not set.
F
fengjiayi 已提交
1347

Y
yuyang18 已提交
1348 1349 1350 1351 1352 1353 1354 1355 1356 1357
            with_details(bool): True if more details about variables and
                parameters, e.g., :code:`trainable`, :code:`optimize_attr`, need
                to print.

        Returns
            (str): The debug string.

        Raises:
            ValueError: If any of required fields is not set and throw_on_error is
                True.
F
fengjiayi 已提交
1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370

        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
            res_str = ""
            for block in self.blocks:
                res_str += block.to_string(throw_on_error, with_details)
        else:
            protostr = self.desc.serialize_to_string()
            proto = framework_pb2.ProgramDesc.FromString(str(protostr))
            res_str = _debug_string_(proto, throw_on_error)
        return res_str
1371

1372
    def get_desc(self):
Y
yuyang18 已提交
1373 1374 1375 1376 1377 1378 1379
        """
        Get the C++ side of `ProgramDesc` object pointer. The C++ object is
        exposed by :code:`pybind`.

        Notes: This is a very low level API. Users should not use this API
        directly.
        """
1380 1381
        return self.desc

1382
    def clone(self, for_test=False):
Y
yuyang18 已提交
1383 1384 1385
        """
        Create a new, duplicated program.

1386

Y
yuyang18 已提交
1387 1388 1389 1390
        Some operators, e.g., :code:`batch_norm`, behave differently between
        training and testing. They have an attribute, :code:`is_test`, to
        control this behaviour. This method will change the :code:`is_test`
        attribute of them to :code:`True` when :code:`for_test=True`.
1391

Y
yuyang18 已提交
1392 1393 1394 1395 1396
        * Set for_test to False when we want to clone the program for training.
        * Set for_test to True when we want to clone the program for testing.

        Notes: This API DOES NOT prune any operator. Use
        :code:`clone(for_test=True)` before backward and optimization please.
1397 1398

        Args:
Y
yuyang18 已提交
1399 1400
            for_test(bool): True if change the :code:`is_test` attribute of
                operators to :code:`True`.
1401

D
dzhwinter 已提交
1402
        Returns:
Y
yuyang18 已提交
1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455
            Program: The new, duplicated Program object.

        Examples:

            1. To clone a test program, the sample code is:

            >>> import paddle.fluid as fluid
            >>> train_program = fluid.Program()
            >>> startup_program = fluid.Program()
            >>> with fluid.program_guard(train_program, startup_program):
            >>>     img = fluid.layers.data(name='image', shape=[784])
            >>>     hidden = fluid.layers.fc(input=img, size=200, act='relu')
            >>>     hidden = fluid.layers.dropout(hidden, dropout_prob=0.5)
            >>>     loss = fluid.layers.cross_entropy(
            >>>                 input=fluid.layers.fc(hidden, size=10, act='softmax'),
            >>>                 label=fluid.layers.data(name='label', shape=[1], dtype='int64'))
            >>>
            >>> test_program = train_program.clone(for_test=True)
            >>>
            >>> sgd = fluid.optimizer.SGD(learning_rate=1e-3)
            >>> with fluid.program_guard(train_program, startup_program):
            >>>     sgd.minimize(loss)

            2. The :code:`clone` method can be avoid if you create program for
            training and program for testing individually.

            >>> import paddle.fluid as fluid
            >>>
            >>> def network(is_test):
            >>>     img = fluid.layers.data(name='image', shape=[784])
            >>>     hidden = fluid.layers.fc(input=img, size=200, act='relu')
            >>>     hidden = fluid.layers.dropout(hidden, dropout_prob=0.5, is_test=is_test)
            >>>     loss = fluid.layers.cross_entropy(
            >>>                 input=fluid.layers.fc(hidden, size=10, act='softmax'),
            >>>                 label=fluid.layers.data(name='label', shape=[1], dtype='int64'))
            >>>     return loss
            >>>
            >>> train_program = fluid.Program()
            >>> startup_program = fluid.Program()
            >>> test_program = fluid.Program()
            >>>
            >>> with fluid.program_guard(train_program, startup_program):
            >>>     with fluid.unique_name.guard():
            >>>         loss = network(is_test=False)
            >>>         sgd = fluid.optimizer.SGD(learning_rate=1e-3)
            >>>         sgd.minimize(loss)
            >>>
            >>> # the test startup program is not used.
            >>> with fluid.program_guard(test_program, fluid.Program()):
            >>>     with fluid.unique_name.guard():
            >>>         loss = network(is_test=True)

            The two code snippets above will generate same programs.
1456 1457
        """
        if for_test:
1458
            p = self.inference_optimize()
1459
        else:
1460
            p = Program()
1461
            p.desc = core.ProgramDesc(self.desc)
1462 1463 1464
            p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
            p.sync_with_cpp()

1465
        p.copy_param_info_from(self)
F
fengjiayi 已提交
1466
        p.copy_data_info_from(self)
Y
Yu Yang 已提交
1467
        return p
1468

1469
    def prune(self, targets):
Y
yuyang18 已提交
1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484
        """
        Prune operators and variables which are not needed to generate
        :code:`targets`.

        Notes: This is a very low level API. Users should not use this API
        directly. This API is in flux and not stable.

        Args:
            targets(list|Variable|Operator): A list of variables or operators
                need to be pruned

        Returns:
            Program:  A new, pruned program.

        """
1485 1486 1487 1488 1489 1490
        if not isinstance(targets, list):
            targets = [targets]
        targets_idx = []
        for t in targets:
            if not isinstance(t, Operator):
                if isinstance(t, Variable):
1491 1492
                    # After transpiler processing, the op that output this
                    # variable maybe has been changed, so t.op is not reliable
1493
                    # and we need to find the current op that generate this
1494 1495 1496 1497 1498 1499 1500 1501
                    # variable here.
                    t.op = None
                    global_block = self.global_block()
                    for idx, op in enumerate(global_block.ops):
                        if t.name in op.output_arg_names:
                            t.op = op
                            break

1502
                    t = t.op
1503 1504 1505 1506
                    if t is None:
                        raise ValueError(
                            "The target variable must have an "
                            "associated operator that generates it.")
1507
                else:
1508 1509
                    raise ValueError("All targets of prune() can only be "
                                     "Variable or Operator.")
1510 1511 1512 1513 1514 1515 1516 1517

            targets_idx.append([t.block.idx, t.idx])
        res = Program()
        res.desc = core.prune(self.desc, targets_idx)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

1518
    def inference_optimize(self):
Y
yuyang18 已提交
1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529
        """
        This method will create a new program and change the :code:`is_test`
        attribute of operators to :code:`True`. All the :code:`Parameter`
        information will be lost.

        Notes: This API is a very low level API. Use
        :code:`Program.clone(for_test=True)` instead.

        Returns:
            Program: The new program.
        """
1530 1531
        # this is an alternative implement before
        # core.inference_optimize being fixed.
1532
        res = Program()
1533 1534 1535 1536 1537 1538 1539
        res.desc = core.ProgramDesc(self.desc)
        for i in xrange(res.desc.num_blocks()):
            block = res.desc.block(i)
            for j in xrange(block.op_size()):
                op = block.op(j)
                if op.has_attr('is_test'):
                    op.set_attr('is_test', True)
1540 1541 1542 1543
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

1544 1545
    @staticmethod
    def parse_from_string(binary_str):
Y
yuyang18 已提交
1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557
        """
        Deserialize a program desc from protobuf binary string.

        Notes: All information about parameters will be lost after serialization
        and deserialization.

        Args:
            binary_str(str): The binary prootbuf string.

        Returns:
            Program: A deserialized program desc.
        """
1558 1559
        p = Program()
        p.desc = core.ProgramDesc(binary_str)
1560
        p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
1561 1562
        p.sync_with_cpp()
        return p
Y
Yu Yang 已提交
1563

D
dzhwinter 已提交
1564 1565
    @property
    def random_seed(self):
Y
yuyang18 已提交
1566 1567 1568 1569 1570 1571
        """
        The default random seed for random operators in Program. Zero means get
        the random seed from random device.

        Notes: It must be set before the operators have been added.
        """
D
dzhwinter 已提交
1572 1573
        return self._seed

Q
qiaolongfei 已提交
1574 1575
    @property
    def num_blocks(self):
Y
yuyang18 已提交
1576 1577 1578
        """
        The number of blocks in this program.
        """
Q
qiaolongfei 已提交
1579 1580
        return self.desc.num_blocks()

D
dzhwinter 已提交
1581 1582 1583 1584 1585 1586
    @random_seed.setter
    def random_seed(self, seed):
        if not isinstance(seed, int):
            raise ValueError("Seed must be a integer.")
        self._seed = seed

Y
Yu Yang 已提交
1587 1588
    def __repr__(self):
        return str(self)
1589

Y
Yu Yang 已提交
1590
    def global_block(self):
Y
yuyang18 已提交
1591 1592 1593
        """
        Get the first block of this program.
        """
Y
Yu Yang 已提交
1594 1595
        return self.blocks[0]

Q
Qiao Longfei 已提交
1596
    def block(self, index):
Y
yuyang18 已提交
1597 1598 1599 1600 1601 1602 1603 1604
        """
        Get the :code:`index` block of this program
        Args:
            index(int): The index of block to get

        Returns:
            Block: The :code:`index` block
        """
Q
Qiao Longfei 已提交
1605 1606
        return self.blocks[index]

Y
Yu Yang 已提交
1607
    def current_block(self):
Y
yuyang18 已提交
1608 1609 1610 1611
        """
        Get the current block. The :code:`current` block is the block to append
        operators.
        """
Y
Yu Yang 已提交
1612 1613
        return self.blocks[self.current_block_idx]

F
update  
fengjiayi 已提交
1614
    def create_block(self, parent_idx=None):
Y
yuyang18 已提交
1615 1616 1617 1618 1619 1620 1621 1622 1623 1624
        """
        Create a new block with the :code:`parent_idx` and change the current block
        to new block.

        Args:
            parent_idx(int): The parent block index.

        Returns:
            Block: The new block.
        """
Y
Yu Yang 已提交
1625
        new_block_idx = len(self.blocks)
F
update  
fengjiayi 已提交
1626 1627 1628
        parent = self.current_block() if parent_idx is None else self.block(
            parent_idx)
        self.desc.append_block(parent.desc)
Y
Yu Yang 已提交
1629 1630 1631 1632 1633
        self.current_block_idx = new_block_idx
        self.blocks.append(Block(self, self.current_block_idx))
        return self.current_block()

    def rollback(self):
Y
yuyang18 已提交
1634 1635 1636 1637 1638
        """
        Exit a code block, i.e., roll back to the parent block.
        Returns:
            None
        """
Y
Yu Yang 已提交
1639 1640
        self.current_block_idx = self.current_block().parent_idx

Q
Qiao Longfei 已提交
1641
    def sync_with_cpp(self):
Y
yuyang18 已提交
1642 1643 1644 1645 1646 1647 1648 1649 1650 1651
        """
        Synchronize Python instance to its binding C++ object instance.
        If the program is modified in C++ space, this method should be invoked.

        Notes: This is a very low level API. Users should not invoke it
        directly.

        Returns:
            None
        """
Q
Qiao Longfei 已提交
1652 1653 1654 1655 1656
        for block_idx in range(len(self.blocks), self.desc.num_blocks()):
            self.blocks.append(Block(self, block_idx))
        for block in self.blocks:
            block.sync_with_cpp()

1657 1658
    def copy_param_info_from(self, other):
        """
1659
        Copy the information of parameters from other program.
D
dzhwinter 已提交
1660

Y
yuyang18 已提交
1661 1662 1663
        Notes: This is a very low level API. Users should not invoke it
        directly.

1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678
        Args:
            other(Program): Other program

        Returns:
            None
        """
        if not isinstance(other, Program):
            raise TypeError("copy_param_info_from should be invoked with "
                            "Program")

        if len(self.blocks) != len(other.blocks):
            raise ValueError("copy_param_info_from should be invoked with two "
                             "program, with represent the same topology")
        self.global_block().copy_param_info_from(other.global_block())

F
fengjiayi 已提交
1679 1680 1681
    def copy_data_info_from(self, other):
        """
        Copy the information of data variables from other program.
D
dzhwinter 已提交
1682

Y
yuyang18 已提交
1683 1684 1685
        Notes: This is a very low level API. Users should not invoke it
        directly.

F
fengjiayi 已提交
1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702
        Args:
            other(Program): Other program

        Returns:
            None
        """
        if not isinstance(other, Program):
            raise TypeError("copy_param_info_from should be invoked with "
                            "Program")

        if len(self.blocks) != len(other.blocks):
            raise ValueError("copy_param_info_from should be invoked with two "
                             "program, with represent the same topology")
        for var in other.global_block().vars.itervalues():
            if var.is_data:
                self.global_block().var(var.name).is_data = True

1703
    def list_vars(self):
Y
yuyang18 已提交
1704 1705 1706 1707 1708 1709
        """
        Get all variables from this Program. A iterable object is returned.

        Returns:
            iterable: The generator will yield every variable in this program.
        """
1710 1711 1712 1713
        for each_block in self.blocks:
            for each_var in each_block.vars.itervalues():
                yield each_var

Y
Yu Yang 已提交
1714

Y
Yu Yang 已提交
1715
class Parameter(Variable):
1716 1717 1718 1719 1720 1721
    """
    Parameter is derived from Variable. A parameter is a persistable 
    Variable, and will be updated by optimizers after each iteration.
    The training of a neural network is essentially the updating of 
    its parameters.

1722
    Relative to a general Variable, a Parameter has several its own
1723 1724
    member variables:

1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736
    Args:
        trainable(bool): True if the parameter need to be updated after
            iterations.
        optimize_attr(map): Parameter attributes related with optimizing.
            Currently, it only contains 'learning_rate'.
            Default: {'learning_rate': 1.0}
        regularizer(WeightDecayRegularizer): The Regularizer which will
            be applied on the parameter. Default: None
        gradient_clip_attr(BaseGradientClipAttr): The gradint clip strategy
            which will be applied on the parameter. Default: None
        do_model_average(bool): True if the model average strategy will
            be applied on this parameter.
1737 1738
    """

Y
Yu Yang 已提交
1739 1740 1741 1742 1743 1744 1745 1746 1747 1748
    def __init__(self, block, shape, dtype, **kwargs):
        if shape is None or dtype is None:
            raise ValueError("Parameter must set shape and dtype")
        if len(shape) == 0:
            raise ValueError("Parameter shape cannot be empty")

        for each in shape:
            if each < 0:
                raise ValueError("Parameter shape should not be related with "
                                 "batch-size")
1749 1750 1751

        Variable.__init__(
            self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
Y
Yu Yang 已提交
1752 1753 1754 1755
        self.trainable = kwargs.get('trainable', True)

        self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})

1756 1757
        self.regularizer = kwargs.get('regularizer', None)

F
fengjiayi 已提交
1758
        self.gradient_clip_attr = kwargs.get('gradient_clip_attr', None)
Y
Yu Yang 已提交
1759

W
wanghaoshuang 已提交
1760
        self.do_model_average = kwargs.get('do_model_average', None)
W
wanghaoshuang 已提交
1761

F
fengjiayi 已提交
1762 1763 1764
    def __str__(self):
        return self.to_string(True)

F
update  
fengjiayi 已提交
1765 1766 1767
    def to_string(self, throw_on_error, with_details=False):
        """
        To debug string.
D
dzhwinter 已提交
1768

F
update  
fengjiayi 已提交
1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782
        Args:
            throw_on_error(bool): raise exception when self is not initialized
                when throw_on_error is True
            with_details(bool): more details about variables and parameters
                (e.g. trainable, optimize_attr, ...) will be printed when with_details is True

        Returns(str): The debug string.

        """
        assert isinstance(throw_on_error, bool) and isinstance(with_details,
                                                               bool)
        if with_details:
            res_str = Variable.to_string(self, throw_on_error, True)
            additional_attr = ("trainable", "optimize_attr", "regularizer",
W
wanghaoshuang 已提交
1783
                               "gradient_clip_attr", "do_model_average")
F
update  
fengjiayi 已提交
1784 1785 1786 1787 1788
            for attr_name in additional_attr:
                res_str += "%s: %s\n" % (attr_name,
                                         str(getattr(self, attr_name)))
        else:
            res_str = Variable.to_string(self, throw_on_error, False)
F
fengjiayi 已提交
1789 1790 1791 1792
        return res_str

    __repr__ = __str__

Y
Yu Yang 已提交
1793

Y
Yu Yang 已提交
1794
# program is a global instance.
Y
Yu Yang 已提交
1795 1796
_main_program_ = Program()
_startup_program_ = Program()
1797

1798

1799
def default_startup_program():
Y
Yu Yang 已提交
1800
    """
Y
yuyang18 已提交
1801 1802 1803 1804 1805 1806 1807 1808 1809
    Get default/global startup program.

    The layer function in :code:`fluid.layers` will create parameters, readers,
    NCCL handles as global variables. The :code:`startup_program` will
    initialize them by the operators in startup program. The layer function will
    append these initialization operators into startup program.

    This method will return the :code:`default` or the :code:`current` startup
    program. Users can use :code:`fluid.program_guard` to switch program.
1810

Y
Yu Yang 已提交
1811 1812 1813
    Returns:
        Program: startup program
    """
Y
Yu Yang 已提交
1814
    return _startup_program_
1815

1816

1817
def default_main_program():
Y
Yu Yang 已提交
1818
    """
Y
yuyang18 已提交
1819 1820 1821 1822 1823 1824 1825 1826 1827
    Get default/global main program. The main program is used for training or
    testing.

    All layer function in :code:`fluid.layers` will append operators and
    variables to the :code:`default_main_program`.

    The :code:`default_main_program` is the default program in a lot of APIs.
    For example, the :code:`Executor.run()` will execute the
    :code:`default_main_program` when the program is not specified.
1828

Y
Yu Yang 已提交
1829 1830 1831
    Returns:
        Program: main program
    """
Y
Yu Yang 已提交
1832
    return _main_program_
Y
Yu Yang 已提交
1833 1834 1835 1836 1837


def switch_main_program(program):
    """
    Switch the main program to a new program.
1838

Y
Yu Yang 已提交
1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852
    Args:
        program(Program): The new main program

    Returns:
        Program: The previous main program
    """
    global _main_program_
    prev_program = _main_program_
    _main_program_ = program
    return prev_program


def switch_startup_program(program):
    """
1853
    Switch the startup program to a new program
Y
Yu Yang 已提交
1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868
    Args:
        program(Program): The new startup program

    Returns:
        Program: The previous startup program
    """
    global _startup_program_
    prev_program = _startup_program_
    _startup_program_ = program
    return prev_program


@contextlib.contextmanager
def program_guard(main_program, startup_program=None):
    """
Y
yuyang18 已提交
1869 1870 1871
    Change the global main program and startup program with `with` statement.
    Layer functions in the Python `with` block will append operators and
    variables to the new main programs.
1872

Y
Yu Yang 已提交
1873
    Examples:
Y
yuyang18 已提交
1874 1875 1876 1877 1878 1879 1880 1881 1882 1883

        >>> import paddle.fluid as fluid
        >>> main_program = fluid.Program()
        >>> startup_program = fluid.Program()
        >>> with fluid.program_guard(main_program, startup_program):
        >>>     data = fluid.layers.data(...)
        >>>     hidden = fluid.layers.fc(...)

    Notes: The temporary :code:`Program` can be used if the user does not need
    to construct either of startup program or main program.
1884

Y
Yu Yang 已提交
1885
    Examples:
Y
yuyang18 已提交
1886 1887 1888 1889 1890 1891

        >>> import paddle.fluid as fluid
        >>> main_program = fluid.Program()
        >>> # does not care about startup program. Just pass a temporary value.
        >>> with fluid.program_guard(main_program, fluid.Program()):
        >>>     data = ...
1892

Y
Yu Yang 已提交
1893
    Args:
Y
yuyang18 已提交
1894
        main_program(Program): New main program inside `with` statement.
1895
        startup_program(Program): New startup program inside `with` statement.
Y
Yu Yang 已提交
1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908
            None means do not change startup program.
    """
    if not isinstance(main_program, Program):
        raise TypeError("main_program should be Program")
    main_program = switch_main_program(main_program)
    if startup_program is not None:
        if not isinstance(startup_program, Program):
            raise TypeError("startup_program should be Program")
        startup_program = switch_startup_program(startup_program)
    yield
    switch_main_program(main_program)
    if startup_program is not None:
        switch_startup_program(startup_program)
X
xuwei06 已提交
1909 1910 1911 1912


def get_var(name, program=None):
    """
Y
yuyang18 已提交
1913 1914
    Get a variable by name from the global block of a program.
    
X
xuwei06 已提交
1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925
    Args:
        name(str): name of the variable
        program(Program|None): program object.
             If None, default_global_program() will be used.

    Returns:
        Variable
    """
    if program is None:
        program = default_main_program()
    assert isinstance(name, str)
1926
    assert isinstance(program, Program)
X
xuwei06 已提交
1927 1928

    return program.global_block().var(name)