framework.py 19.2 KB
Newer Older
Q
Qiao Longfei 已提交
1 2
import paddle.v2.fluid.core as core
import paddle.v2.fluid.proto.framework_pb2 as framework_pb2
Y
Yu Yang 已提交
3
import collections
Y
Yu Yang 已提交
4
import numpy as np
Y
Yu Yang 已提交
5
import copy
Y
Yu Yang 已提交
6

7 8 9 10
__all__ = [
    'Block', 'Variable', 'Program', 'Operator', 'default_startup_program',
    'default_main_program'
]
Y
Yu Yang 已提交
11 12


Q
Qiao Longfei 已提交
13 14 15 16 17
def unique_name(prefix):
    uid = core.unique_integer(prefix)  # unique during whole process.
    return "_".join([prefix, str(uid)])


Y
Yang Yang(Tony) 已提交
18
def _debug_string_(proto, throw_on_error=True):
Y
Yu Yang 已提交
19
    error_fields = list()
Y
Yang Yang(Tony) 已提交
20
    if not proto.IsInitialized(error_fields) and throw_on_error:
Y
Yu Yang 已提交
21 22 23 24 25
        raise ValueError("{0} are not initialized\nThe message is {1}".format(
            error_fields, proto))
    return proto.__str__()


Y
Yu Yang 已提交
26
class Variable(object):
Y
Yu Yang 已提交
27 28
    def __init__(self,
                 block,
Y
Yu Yang 已提交
29
                 type=core.VarDesc.VarType.LOD_TENSOR,
Y
Yu Yang 已提交
30 31 32 33
                 name=None,
                 shape=None,
                 dtype=None,
                 lod_level=None,
Q
QI JUN 已提交
34
                 persistable=None,
Y
Yu Yang 已提交
35
                 stop_gradient=False,
Y
Yu Yang 已提交
36
                 **kwargs):
Y
Yu Yang 已提交
37 38 39 40
        self.block = block

        if name is None:
            name = Variable._unique_var_name_()
D
Dong Zhihong 已提交
41 42 43 44
        is_new_var = False
        self.desc = self.block.desc.find_var(name)

        if self.desc is None:
D
dongzhihong 已提交
45
            self.desc = self.block.desc.var(name)
Y
Yu Yang 已提交
46
            is_new_var = True
Y
Yu Yang 已提交
47

Y
Yu Yang 已提交
48 49 50 51 52 53 54 55
        if is_new_var:
            self.desc.set_type(type)
        elif self.desc.type() != type:
            raise ValueError("Variable {0} has been created before. The "
                             "previous type is {1}; the new type is {2}. They"
                             " are not matched".format(self.name,
                                                       self.desc.type(), type))

Y
Yu Yang 已提交
56
        if shape is not None:
Y
Yu Yang 已提交
57
            if is_new_var:
58
                self.desc.set_shape(shape)
Y
Yu Yang 已提交
59 60 61 62 63 64 65 66
            else:
                old_shape = self.shape
                shape = tuple(shape)
                if shape != old_shape:
                    raise ValueError(
                        "Variable {0} has been created before. the previous "
                        "shape is {1}; the new shape is {2}. They are not "
                        "matched.".format(self.name, old_shape, shape))
Y
Yu Yang 已提交
67
        if dtype is not None:
Y
Yu Yang 已提交
68 69
            if not isinstance(dtype, core.DataType):
                dtype = Variable._convert_np_dtype_to_dtype_(dtype)
Y
Yu Yang 已提交
70
            if is_new_var:
71
                self.desc.set_data_type(dtype)
Y
Yu Yang 已提交
72
            else:
Q
QI JUN 已提交
73 74
                old_dtype = self.data_type
                if dtype != old_dtype:
Y
Yu Yang 已提交
75 76 77 78 79
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous data type is {1}; the new "
                                     "data type is {2}. They are not "
                                     "matched.".format(self.name, old_dtype,
                                                       dtype))
Y
Yu Yang 已提交
80 81

        if lod_level is not None:
Y
Yu Yang 已提交
82
            if is_new_var:
83
                self.desc.set_lod_level(lod_level)
Y
Yu Yang 已提交
84 85 86 87 88 89 90
            else:
                if lod_level != self.lod_level:
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous lod_level is {1}; the new "
                                     "lod_level is {2}. They are not "
                                     "matched".format(self.name, self.lod_level,
                                                      lod_level))
91 92 93 94 95 96 97 98 99 100 101
        if persistable is not None:
            if is_new_var:
                self.desc.set_persistable(persistable)
            else:
                if persistable != self.persistable:
                    raise ValueError(
                        "Variable {0} has been created before."
                        "The previous persistable is {1}; the new "
                        "persistable is {2}. They are not matched".format(
                            self.name, self.persistable, persistable))

Y
Yu Yang 已提交
102
        self.block.vars[name] = self
Y
Yu Yang 已提交
103
        self.op = None
Y
Yu Yang 已提交
104
        self.stop_gradient = stop_gradient
Y
Yu Yang 已提交
105

106
    def __str__(self):
Y
Yang Yang(Tony) 已提交
107 108 109
        return self.to_string(True)

    def to_string(self, throw_on_error):
110 111
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.VarDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
112
        return _debug_string_(proto, throw_on_error)
113 114 115

    __repr__ = __str__

116 117 118 119
    @property
    def persistable(self):
        return self.desc.persistable()

Y
Yu Yang 已提交
120 121 122 123
    @persistable.setter
    def persistable(self, p):
        self.desc.set_persistable(p)

Y
Yu Yang 已提交
124 125
    @property
    def name(self):
126
        return self.desc.name()
Y
Yu Yang 已提交
127 128 129 130

    @property
    def shape(self):
        # convert to tuple, make it as same as numpy API.
131
        return tuple(self.desc.shape())
Y
Yu Yang 已提交
132 133 134

    @property
    def data_type(self):
135
        return self.desc.data_type()
Y
Yu Yang 已提交
136 137 138

    @property
    def lod_level(self):
139
        return self.desc.lod_level()
Y
Yu Yang 已提交
140

Y
Yu Yang 已提交
141 142 143 144
    @property
    def type(self):
        return self.desc.type()

Y
Yu Yang 已提交
145 146
    @staticmethod
    def _unique_var_name_():
147 148 149
        prefix = "_generated_var"
        uid = core.unique_integer(prefix)  # unique during whole process.
        return "_".join([prefix, str(uid)])
Y
Yu Yang 已提交
150

Y
Yu Yang 已提交
151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170
    @staticmethod
    def _convert_np_dtype_to_dtype_(np_dtype):
        dtype = np.dtype(np_dtype)
        if dtype == np.float32:
            return core.DataType.FP32
        elif dtype == np.float64:
            return core.DataType.FP64
        elif dtype == np.float16:
            return core.DataType.FP16
        elif dtype == np.int32:
            return core.DataType.INT32
        elif dtype == np.int16:
            return core.DataType.INT16
        elif dtype == np.int64:
            return core.DataType.INT64
        elif dtype == np.bool:
            return core.DataType.BOOL
        else:
            raise ValueError("Not supported numpy dtype " + str(dtype))

Y
Yu Yang 已提交
171

F
fengjiayi 已提交
172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201
def get_all_op_protos():
    """
    Get all registered op proto from PaddlePaddle C++ end.
    :return: A list of registered OpProto.
    """
    protostrs = core.get_all_op_protos()
    ret_values = []
    for pbstr in protostrs:
        op_proto = framework_pb2.OpProto.FromString(str(pbstr))
        ret_values.append(op_proto)
    return ret_values


class OpProtoHolder(object):
    @classmethod
    def instance(cls):
        if not hasattr(cls, '_instance'):
            cls._instance = cls()
        return cls._instance

    def __init__(self):
        assert not hasattr(
            self.__class__,
            '_instance'), 'Please use `instance()` to get OpProtoHolder opject!'
        op_protos = get_all_op_protos()
        self.op_proto_map = {}
        for proto in op_protos:
            self.op_proto_map[proto.type] = proto

    def get_op_proto(self, type):
Y
Yu Yang 已提交
202 203
        if type not in self.op_proto_map:
            raise ValueError("Operator \"%s\" has not been registered." % type)
F
fengjiayi 已提交
204 205 206
        return self.op_proto_map[type]


Y
Yu Yang 已提交
207 208 209
class Operator(object):
    def __init__(self,
                 block,
Y
Yu Yang 已提交
210
                 desc,
Y
Yu Yang 已提交
211 212 213 214 215
                 type=None,
                 inputs=None,
                 outputs=None,
                 attrs=None):
        self.block = block
Y
Yu Yang 已提交
216
        self.desc = desc
F
fengjiayi 已提交
217 218 219 220 221
        if len(self.desc.type()) != 0:
            return
        if type is None:
            raise ValueError(
                "`type` to initilized an Operator can not be None.")
F
Update  
fengjiayi 已提交
222
        self.desc.set_type(type)
F
fengjiayi 已提交
223
        proto = OpProtoHolder.instance().get_op_proto(type)
224

Y
Yang Yang(Tony) 已提交
225 226 227 228 229
        def find_name(var_list, name):
            for var_name in var_list:
                if var_name == name:
                    return True
            return False
Q
QI JUN 已提交
230

Y
Yang Yang(Tony) 已提交
231 232 233 234 235 236 237
        if inputs is not None:
            for in_proto in proto.inputs:
                found = find_name(inputs, in_proto.name)
                assert found or in_proto.dispensable, "Input {} not found".format(
                    in_proto.name)

                if found:
238 239 240 241
                    in_args = inputs[in_proto.name]
                    if not isinstance(in_args, list):
                        in_args = [in_args]
                    if not in_proto.duplicable and len(in_args) > 1:
Y
Yang Yang(Tony) 已提交
242 243
                        raise ValueError(
                            "Input %s expects only one input, but %d are given."
244 245 246 247 248
                            % (in_proto.name, len(in_args)))
                    in_arg_names = []
                    for arg in in_args:
                        in_arg_names.append(arg.name)
                    self.desc.set_input(in_proto.name, in_arg_names)
Y
Yang Yang(Tony) 已提交
249 250
                else:
                    self.desc.set_input(in_proto.name, [])
F
Update  
fengjiayi 已提交
251

Y
Yu Yang 已提交
252
        if outputs is not None:
253 254 255 256 257 258 259 260 261 262 263 264
            given = set()
            need = set()
            for n in outputs:
                given.add(n)
            for m in proto.outputs:
                need.add(m.name)
            if not given == need:
                raise ValueError(
                    "Incorrect setting for output(s) of operator \"%s\". Need: [%s] Given: [%s]"
                    % (type, ", ".join(str(e) for e in need), ", ".join(
                        str(e) for e in given)))

F
fengjiayi 已提交
265
            for out_proto in proto.outputs:
266 267 268 269
                out_args = outputs[out_proto.name]
                if not isinstance(out_args, list):
                    out_args = [out_args]
                if not out_proto.duplicable and len(out_args) > 1:
F
Update  
fengjiayi 已提交
270 271
                    raise ValueError(
                        "Output %s expects only one output, but %d are given." %
272 273 274 275 276 277
                        (out_proto.name, len(out_args)))
                out_arg_names = []
                for arg in out_args:
                    out_arg_names.append(arg.name)
                    arg.op = self
                self.desc.set_output(out_proto.name, out_arg_names)
F
Update  
fengjiayi 已提交
278

Y
Yu Yang 已提交
279
        if attrs is not None:
280 281
            if not isinstance(attrs, dict):
                raise TypeError("'attrs' should be a dict.")
F
fengjiayi 已提交
282
            for attr in proto.attrs:
F
Update  
fengjiayi 已提交
283
                attr_name = attr.name
284
                if (not attr_name in attrs) or (attrs[attr_name] is None):
F
Update  
fengjiayi 已提交
285
                    continue
Y
Yang Yang(Tony) 已提交
286
                if isinstance(attrs[attr_name], Block):
F
Update  
fengjiayi 已提交
287
                    self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
Y
Yang Yang(Tony) 已提交
288 289
                else:
                    self.desc.set_attr(attr_name, attrs[attr_name])
Y
Yu Yang 已提交
290

291
        self.desc.check_attrs()
Y
Yang Yang(Tony) 已提交
292
        no_kernel_op_set = {
Y
Yu Yang 已提交
293
            'feed', 'fetch', 'save', 'load', 'recurrent',
Y
Yu Yang 已提交
294
            'rnn_memory_helper_grad', 'conditional_block', 'while'
Y
Yang Yang(Tony) 已提交
295
        }
296
        if type not in no_kernel_op_set:
Q
QI JUN 已提交
297
            self.desc.infer_var_type(self.block.desc)
Y
Yu Yang 已提交
298
            self.desc.infer_shape(self.block.desc)
F
fengjiayi 已提交
299

Y
Yang Yang(Tony) 已提交
300
    def to_string(self, throw_on_error):
301 302
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.OpDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
303 304 305 306
        return _debug_string_(proto, throw_on_error)

    def __str__(self):
        return self.to_string(True)
307 308 309

    __repr__ = __str__

F
fengjiayi 已提交
310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327
    @property
    def type(self):
        return self.desc.type()

    def input(self, name):
        return self.desc.input(name)

    @property
    def input_names(self):
        return self.desc.input_names()

    def output(self, name):
        return self.desc.output(name)

    @property
    def output_names(self):
        return self.desc.output_names()

328 329 330 331 332 333 334 335
    @property
    def idx(self):
        for i, op in enumerate(self.block.ops):
            if op == self:
                return i
        raise ValueError(
            "Can't find op itself in it's block. It could be a bug of Paddle.")

F
fengjiayi 已提交
336 337 338 339 340 341 342 343 344 345 346 347
    def has_attr(self, name):
        return self.desc.has_attr(name)

    def attr_type(self, name):
        return self.desc.attr_type(name)

    @property
    def attr_names(self):
        return self.desc.attr_names()

    def attr(self, name):
        return self.desc.attr(name)
Y
Yu Yang 已提交
348

F
fengjiayi 已提交
349 350
    def block_attr(self, name):
        return self.desc.block_attr(name)
Y
Yu Yang 已提交
351 352


Y
Yu Yang 已提交
353 354
class Block(object):
    def __init__(self, program, idx):
Y
Yu Yang 已提交
355
        self.desc = program.desc.block(idx)
Y
Yu Yang 已提交
356
        self.vars = dict()  # var_name --> var
Y
Yu Yang 已提交
357
        self.ops = collections.deque()  # operator list
Y
Yu Yang 已提交
358 359
        self.program = program

360
    def __str__(self):
Y
Yang Yang(Tony) 已提交
361 362 363
        return self.to_string(True)

    def to_string(self, throw_on_error):
364 365
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.BlockDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
366
        return _debug_string_(proto, throw_on_error)
367 368 369

    __repr__ = __str__

Y
Yu Yang 已提交
370 371
    @property
    def parent_idx(self):
Y
Yu Yang 已提交
372
        return self.desc.parent
Y
Yu Yang 已提交
373 374 375

    @property
    def idx(self):
Y
Yu Yang 已提交
376
        return self.desc.id
Y
Yu Yang 已提交
377

Q
Qiao Longfei 已提交
378
    def var(self, name):
Y
Yu Yang 已提交
379 380 381 382
        if not isinstance(name, basestring):
            raise TypeError()
        v = self.vars.get(name, None)
        if v is None:
Q
Qiao Longfei 已提交
383
            raise ValueError("var %s not in this block" % name)
Y
Yu Yang 已提交
384
        return v
Q
Qiao Longfei 已提交
385 386 387 388

    def all_parameters(self):
        return {v for k, v in self.vars.iteritems() if isinstance(v, Parameter)}

Y
Yu Yang 已提交
389
    def create_var(self, *args, **kwargs):
Q
Qiao Longfei 已提交
390
        var = Variable(self, *args, **kwargs)
391 392
        if 'initializer' in kwargs:
            kwargs['initializer'](var, self)
Q
Qiao Longfei 已提交
393
        return var
Y
Yu Yang 已提交
394

Q
Qiao Longfei 已提交
395 396 397
    def has_var(self, name):
        return name in self.vars

Y
Yu Yang 已提交
398 399
    def create_parameter(self, *args, **kwargs):
        global_block = self.program.global_block()
Q
Qiao Longfei 已提交
400
        param = Parameter(global_block, *args, **kwargs)
401 402
        if 'initializer' in kwargs:
            kwargs['initializer'](param, self)
Q
Qiao Longfei 已提交
403
        return param
Y
Yu Yang 已提交
404

Y
Yu Yang 已提交
405
    def append_op(self, *args, **kwargs):
Y
Yu Yang 已提交
406 407
        op_desc = self.desc.append_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
408 409 410 411
        self.ops.append(op)
        return op

    def prepend_op(self, *args, **kwargs):
Y
Yu Yang 已提交
412 413
        op_desc = self.desc.prepend_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
414 415 416
        self.ops.appendleft(op)
        return op

Q
Qiao Longfei 已提交
417 418 419 420 421 422 423
    def sync_with_cpp(self):
        # sync variables from cpp
        for var in self.desc.all_vars():
            if not self.has_var(var.name()):
                self.create_var(name=var.name(), desc=var, type=var.type())

        # sync operators from cpp
424 425 426 427
        ops_in_cpp = []
        for op_idx in range(0, self.desc.op_size()):
            ops_in_cpp.append(self.desc.op(op_idx))

Y
Yu Yang 已提交
428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443
        if len(self.ops) != 0:
            first_op_in_python = self.ops[0].desc
            last_op_in_python = self.ops[len(self.ops) - 1].desc
            start_index = None
            end_index = None
            for index in range(len(ops_in_cpp)):
                if first_op_in_python == ops_in_cpp[index]:
                    start_index = index
                if last_op_in_python == ops_in_cpp[index]:
                    end_index = index
            assert start_index is not None
            assert end_index is not None
            assert start_index <= end_index
        else:
            start_index = 0
            end_index = -1
Q
Qiao Longfei 已提交
444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460

        # sync ops append to the head of cpp_ops
        for index in range((start_index - 1 - 1), -1, -1):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.appendleft(op)

        # sync ops append to the end of cpp_ops
        for index in range((end_index + 1), len(ops_in_cpp)):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.append(op)

        assert len(self.ops) == len(ops_in_cpp)
        for index in range(len(self.ops)):
            assert self.ops[index].desc == ops_in_cpp[index]

Y
Yu Yang 已提交
461 462

class Program(object):
463 464
    def __init__(self):
        self.desc = core.ProgramDesc()
Y
Yu Yang 已提交
465 466 467
        self.blocks = [Block(self, 0)]
        self.current_block_idx = 0

468
    def __str__(self):
Y
Yang Yang(Tony) 已提交
469 470 471
        return self.to_string(True)

    def to_string(self, throw_on_error):
472 473
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.ProgramDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
474
        return _debug_string_(proto, throw_on_error)
475

Y
Yu Yang 已提交
476 477 478 479 480 481
    def clone(self):
        p = Program()
        p.desc = core.ProgramDesc(self.desc)
        p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
        p.sync_with_cpp()
        return p
482

483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502
    def prune(self, targets):
        if not isinstance(targets, list):
            targets = [targets]
        targets_idx = []
        for t in targets:
            if not isinstance(t, Operator):
                if isinstance(t, Variable):
                    t = t.op
                else:
                    raise ValueError(
                        "All targets of prune() can only be Variable or Operator."
                    )

            targets_idx.append([t.block.idx, t.idx])
        res = Program()
        res.desc = core.prune(self.desc, targets_idx)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

503 504 505 506
    @staticmethod
    def parse_from_string(binary_str):
        p = Program()
        p.desc = core.ProgramDesc(binary_str)
507
        p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
508 509
        p.sync_with_cpp()
        return p
Y
Yu Yang 已提交
510 511 512

    def __repr__(self):
        return str(self)
513

Y
Yu Yang 已提交
514 515 516
    def global_block(self):
        return self.blocks[0]

Q
Qiao Longfei 已提交
517 518 519
    def block(self, index):
        return self.blocks[index]

Y
Yu Yang 已提交
520 521 522
    def current_block(self):
        return self.blocks[self.current_block_idx]

F
fengjiayi 已提交
523
    def append_backward(self, target, no_grad_set=None):
Q
Qiao Longfei 已提交
524 525 526
        """
        return map(param_name -> (grad_name, block_index, op_index))
        """
Q
Qiao Longfei 已提交
527
        assert isinstance(target, Variable)
F
fengjiayi 已提交
528 529
        if no_grad_set is None:
            no_grad_set = set()
Y
Yang Yang(Tony) 已提交
530 531 532 533 534 535 536 537
        try:
            param_to_grad_info = self.desc.append_backward(target.desc,
                                                           no_grad_set)
        except Exception as e:
            raise core.EnforceNotMet(
                str(e) + "\nCurrent protobuf is\n{0}".format(
                    self.to_string(False)))

Q
Qiao Longfei 已提交
538 539 540
        self.sync_with_cpp()
        return param_to_grad_info

Y
Yu Yang 已提交
541 542
    def create_block(self):
        new_block_idx = len(self.blocks)
Y
Yu Yang 已提交
543
        self.desc.append_block(self.current_block().desc)
Y
Yu Yang 已提交
544 545 546 547 548 549 550
        self.current_block_idx = new_block_idx
        self.blocks.append(Block(self, self.current_block_idx))
        return self.current_block()

    def rollback(self):
        self.current_block_idx = self.current_block().parent_idx

Q
Qiao Longfei 已提交
551 552 553 554 555 556
    def sync_with_cpp(self):
        for block_idx in range(len(self.blocks), self.desc.num_blocks()):
            self.blocks.append(Block(self, block_idx))
        for block in self.blocks:
            block.sync_with_cpp()

557 558 559 560 561
    def list_vars(self):
        for each_block in self.blocks:
            for each_var in each_block.vars.itervalues():
                yield each_var

Y
Yu Yang 已提交
562

Y
Yu Yang 已提交
563 564 565 566 567 568 569 570 571 572 573
class Parameter(Variable):
    def __init__(self, block, shape, dtype, **kwargs):
        if shape is None or dtype is None:
            raise ValueError("Parameter must set shape and dtype")
        if len(shape) == 0:
            raise ValueError("Parameter shape cannot be empty")

        for each in shape:
            if each < 0:
                raise ValueError("Parameter shape should not be related with "
                                 "batch-size")
574 575 576

        Variable.__init__(
            self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
Y
Yu Yang 已提交
577 578 579 580
        self.trainable = kwargs.get('trainable', True)

        self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})

581 582
        self.regularizer = kwargs.get('regularizer', None)

Y
Yu Yang 已提交
583

Y
Yu Yang 已提交
584
# program is a global instance.
585 586
g_main_program = Program()
g_startup_program = Program()
587

588

589 590 591
def default_startup_program():
    return g_startup_program

592

593 594
def default_main_program():
    return g_main_program