framework.py 19.2 KB
Newer Older
Q
Qiao Longfei 已提交
1 2
import paddle.v2.fluid.core as core
import paddle.v2.fluid.proto.framework_pb2 as framework_pb2
Y
Yu Yang 已提交
3
import collections
Y
Yu Yang 已提交
4
import numpy as np
Y
Yu Yang 已提交
5
import copy
Y
Yu Yang 已提交
6

7
__all__ = ['Block', 'Variable', 'Program', 'Operator', 'default_startup_program', 'default_main_program']
Y
Yu Yang 已提交
8 9


Q
Qiao Longfei 已提交
10 11 12 13 14
def unique_name(prefix):
    uid = core.unique_integer(prefix)  # unique during whole process.
    return "_".join([prefix, str(uid)])


Y
Yang Yang(Tony) 已提交
15
def _debug_string_(proto, throw_on_error=True):
Y
Yu Yang 已提交
16
    error_fields = list()
Y
Yang Yang(Tony) 已提交
17
    if not proto.IsInitialized(error_fields) and throw_on_error:
Y
Yu Yang 已提交
18 19 20 21 22
        raise ValueError("{0} are not initialized\nThe message is {1}".format(
            error_fields, proto))
    return proto.__str__()


Y
Yu Yang 已提交
23
class Variable(object):
Y
Yu Yang 已提交
24 25
    def __init__(self,
                 block,
Y
Yu Yang 已提交
26
                 type=core.VarDesc.VarType.LOD_TENSOR,
Y
Yu Yang 已提交
27 28 29 30
                 name=None,
                 shape=None,
                 dtype=None,
                 lod_level=None,
Q
QI JUN 已提交
31
                 persistable=None,
Y
Yu Yang 已提交
32
                 stop_gradient=False,
Y
Yu Yang 已提交
33
                 **kwargs):
Y
Yu Yang 已提交
34 35 36 37
        self.block = block

        if name is None:
            name = Variable._unique_var_name_()
D
Dong Zhihong 已提交
38 39 40 41
        is_new_var = False
        self.desc = self.block.desc.find_var(name)

        if self.desc is None:
D
dongzhihong 已提交
42
            self.desc = self.block.desc.var(name)
Y
Yu Yang 已提交
43
            is_new_var = True
Y
Yu Yang 已提交
44

Y
Yu Yang 已提交
45 46 47 48 49 50 51 52
        if is_new_var:
            self.desc.set_type(type)
        elif self.desc.type() != type:
            raise ValueError("Variable {0} has been created before. The "
                             "previous type is {1}; the new type is {2}. They"
                             " are not matched".format(self.name,
                                                       self.desc.type(), type))

Y
Yu Yang 已提交
53
        if shape is not None:
Y
Yu Yang 已提交
54
            if is_new_var:
55
                self.desc.set_shape(shape)
Y
Yu Yang 已提交
56 57 58 59 60 61 62 63
            else:
                old_shape = self.shape
                shape = tuple(shape)
                if shape != old_shape:
                    raise ValueError(
                        "Variable {0} has been created before. the previous "
                        "shape is {1}; the new shape is {2}. They are not "
                        "matched.".format(self.name, old_shape, shape))
Y
Yu Yang 已提交
64
        if dtype is not None:
Y
Yu Yang 已提交
65 66
            if not isinstance(dtype, core.DataType):
                dtype = Variable._convert_np_dtype_to_dtype_(dtype)
Y
Yu Yang 已提交
67
            if is_new_var:
68
                self.desc.set_data_type(dtype)
Y
Yu Yang 已提交
69
            else:
Q
QI JUN 已提交
70 71
                old_dtype = self.data_type
                if dtype != old_dtype:
Y
Yu Yang 已提交
72 73 74 75 76
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous data type is {1}; the new "
                                     "data type is {2}. They are not "
                                     "matched.".format(self.name, old_dtype,
                                                       dtype))
Y
Yu Yang 已提交
77 78

        if lod_level is not None:
Y
Yu Yang 已提交
79
            if is_new_var:
80
                self.desc.set_lod_level(lod_level)
Y
Yu Yang 已提交
81 82 83 84 85 86 87
            else:
                if lod_level != self.lod_level:
                    raise ValueError("Variable {0} has been created before. "
                                     "The previous lod_level is {1}; the new "
                                     "lod_level is {2}. They are not "
                                     "matched".format(self.name, self.lod_level,
                                                      lod_level))
88 89 90 91 92 93 94 95 96 97 98
        if persistable is not None:
            if is_new_var:
                self.desc.set_persistable(persistable)
            else:
                if persistable != self.persistable:
                    raise ValueError(
                        "Variable {0} has been created before."
                        "The previous persistable is {1}; the new "
                        "persistable is {2}. They are not matched".format(
                            self.name, self.persistable, persistable))

Y
Yu Yang 已提交
99
        self.block.vars[name] = self
Y
Yu Yang 已提交
100
        self.op = None
Y
Yu Yang 已提交
101
        self.stop_gradient = stop_gradient
Y
Yu Yang 已提交
102

103
    def __str__(self):
Y
Yang Yang(Tony) 已提交
104 105 106
        return self.to_string(True)

    def to_string(self, throw_on_error):
107 108
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.VarDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
109
        return _debug_string_(proto, throw_on_error)
110 111 112

    __repr__ = __str__

113 114 115 116
    @property
    def persistable(self):
        return self.desc.persistable()

Y
Yu Yang 已提交
117 118 119 120
    @persistable.setter
    def persistable(self, p):
        self.desc.set_persistable(p)

Y
Yu Yang 已提交
121 122
    @property
    def name(self):
123
        return self.desc.name()
Y
Yu Yang 已提交
124 125 126 127

    @property
    def shape(self):
        # convert to tuple, make it as same as numpy API.
128
        return tuple(self.desc.shape())
Y
Yu Yang 已提交
129 130 131

    @property
    def data_type(self):
132
        return self.desc.data_type()
Y
Yu Yang 已提交
133 134 135

    @property
    def lod_level(self):
136
        return self.desc.lod_level()
Y
Yu Yang 已提交
137

Y
Yu Yang 已提交
138 139 140 141
    @property
    def type(self):
        return self.desc.type()

Y
Yu Yang 已提交
142 143
    @staticmethod
    def _unique_var_name_():
144 145 146
        prefix = "_generated_var"
        uid = core.unique_integer(prefix)  # unique during whole process.
        return "_".join([prefix, str(uid)])
Y
Yu Yang 已提交
147

Y
Yu Yang 已提交
148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167
    @staticmethod
    def _convert_np_dtype_to_dtype_(np_dtype):
        dtype = np.dtype(np_dtype)
        if dtype == np.float32:
            return core.DataType.FP32
        elif dtype == np.float64:
            return core.DataType.FP64
        elif dtype == np.float16:
            return core.DataType.FP16
        elif dtype == np.int32:
            return core.DataType.INT32
        elif dtype == np.int16:
            return core.DataType.INT16
        elif dtype == np.int64:
            return core.DataType.INT64
        elif dtype == np.bool:
            return core.DataType.BOOL
        else:
            raise ValueError("Not supported numpy dtype " + str(dtype))

Y
Yu Yang 已提交
168

F
fengjiayi 已提交
169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198
def get_all_op_protos():
    """
    Get all registered op proto from PaddlePaddle C++ end.
    :return: A list of registered OpProto.
    """
    protostrs = core.get_all_op_protos()
    ret_values = []
    for pbstr in protostrs:
        op_proto = framework_pb2.OpProto.FromString(str(pbstr))
        ret_values.append(op_proto)
    return ret_values


class OpProtoHolder(object):
    @classmethod
    def instance(cls):
        if not hasattr(cls, '_instance'):
            cls._instance = cls()
        return cls._instance

    def __init__(self):
        assert not hasattr(
            self.__class__,
            '_instance'), 'Please use `instance()` to get OpProtoHolder opject!'
        op_protos = get_all_op_protos()
        self.op_proto_map = {}
        for proto in op_protos:
            self.op_proto_map[proto.type] = proto

    def get_op_proto(self, type):
Y
Yu Yang 已提交
199 200
        if type not in self.op_proto_map:
            raise ValueError("Operator \"%s\" has not been registered." % type)
F
fengjiayi 已提交
201 202 203
        return self.op_proto_map[type]


Y
Yu Yang 已提交
204 205 206
class Operator(object):
    def __init__(self,
                 block,
Y
Yu Yang 已提交
207
                 desc,
Y
Yu Yang 已提交
208 209 210 211 212
                 type=None,
                 inputs=None,
                 outputs=None,
                 attrs=None):
        self.block = block
Y
Yu Yang 已提交
213
        self.desc = desc
F
fengjiayi 已提交
214 215 216 217 218
        if len(self.desc.type()) != 0:
            return
        if type is None:
            raise ValueError(
                "`type` to initilized an Operator can not be None.")
F
Update  
fengjiayi 已提交
219
        self.desc.set_type(type)
F
fengjiayi 已提交
220
        proto = OpProtoHolder.instance().get_op_proto(type)
221

Y
Yang Yang(Tony) 已提交
222 223 224 225 226
        def find_name(var_list, name):
            for var_name in var_list:
                if var_name == name:
                    return True
            return False
Q
QI JUN 已提交
227

Y
Yang Yang(Tony) 已提交
228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247
        if inputs is not None:
            for in_proto in proto.inputs:
                found = find_name(inputs, in_proto.name)
                assert found or in_proto.dispensable, "Input {} not found".format(
                    in_proto.name)

                if found:
                    in_argus = inputs[in_proto.name]
                    if not isinstance(in_argus, list):
                        in_argus = [in_argus]
                    if not in_proto.duplicable and len(in_argus) > 1:
                        raise ValueError(
                            "Input %s expects only one input, but %d are given."
                            % (in_proto.name, len(in_argus)))
                    in_argu_names = []
                    for argu in in_argus:
                        in_argu_names.append(argu.name)
                    self.desc.set_input(in_proto.name, in_argu_names)
                else:
                    self.desc.set_input(in_proto.name, [])
F
Update  
fengjiayi 已提交
248

Y
Yu Yang 已提交
249
        if outputs is not None:
250 251 252 253 254 255 256 257 258 259 260 261
            given = set()
            need = set()
            for n in outputs:
                given.add(n)
            for m in proto.outputs:
                need.add(m.name)
            if not given == need:
                raise ValueError(
                    "Incorrect setting for output(s) of operator \"%s\". Need: [%s] Given: [%s]"
                    % (type, ", ".join(str(e) for e in need), ", ".join(
                        str(e) for e in given)))

F
fengjiayi 已提交
262
            for out_proto in proto.outputs:
F
Update  
fengjiayi 已提交
263 264 265 266 267 268 269 270 271
                out_argus = outputs[out_proto.name]
                if not isinstance(out_argus, list):
                    out_argus = [out_argus]
                if not out_proto.duplicable and len(out_argus) > 1:
                    raise ValueError(
                        "Output %s expects only one output, but %d are given." %
                        (out_proto.name, len(out_argus)))
                out_argu_names = []
                for argu in out_argus:
F
fengjiayi 已提交
272
                    out_argu_names.append(argu.name)
F
fengjiayi 已提交
273
                    argu.op = self
F
Update  
fengjiayi 已提交
274 275
                self.desc.set_output(out_proto.name, out_argu_names)

Y
Yu Yang 已提交
276
        if attrs is not None:
277 278
            if not isinstance(attrs, dict):
                raise TypeError("'attrs' should be a dict.")
F
fengjiayi 已提交
279
            for attr in proto.attrs:
F
Update  
fengjiayi 已提交
280
                attr_name = attr.name
281
                if (not attr_name in attrs) or (attrs[attr_name] is None):
F
Update  
fengjiayi 已提交
282
                    continue
Y
Yang Yang(Tony) 已提交
283
                if isinstance(attrs[attr_name], Block):
F
Update  
fengjiayi 已提交
284
                    self.desc.set_block_attr(attr_name, attrs[attr_name].desc)
Y
Yang Yang(Tony) 已提交
285 286
                else:
                    self.desc.set_attr(attr_name, attrs[attr_name])
Y
Yu Yang 已提交
287

288
        self.desc.check_attrs()
Y
Yang Yang(Tony) 已提交
289
        no_kernel_op_set = {
Y
Yu Yang 已提交
290
            'feed', 'fetch', 'save', 'load', 'recurrent',
Y
Yu Yang 已提交
291
            'rnn_memory_helper_grad', 'conditional_block', 'while'
Y
Yang Yang(Tony) 已提交
292
        }
293
        if type not in no_kernel_op_set:
Q
QI JUN 已提交
294
            self.desc.infer_var_type(self.block.desc)
Y
Yu Yang 已提交
295
            self.desc.infer_shape(self.block.desc)
F
fengjiayi 已提交
296

Y
Yang Yang(Tony) 已提交
297
    def to_string(self, throw_on_error):
298 299
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.OpDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
300 301 302 303
        return _debug_string_(proto, throw_on_error)

    def __str__(self):
        return self.to_string(True)
304 305 306

    __repr__ = __str__

F
fengjiayi 已提交
307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324
    @property
    def type(self):
        return self.desc.type()

    def input(self, name):
        return self.desc.input(name)

    @property
    def input_names(self):
        return self.desc.input_names()

    def output(self, name):
        return self.desc.output(name)

    @property
    def output_names(self):
        return self.desc.output_names()

325 326 327 328 329 330 331 332
    @property
    def idx(self):
        for i, op in enumerate(self.block.ops):
            if op == self:
                return i
        raise ValueError(
            "Can't find op itself in it's block. It could be a bug of Paddle.")

F
fengjiayi 已提交
333 334 335 336 337 338 339 340 341 342 343 344
    def has_attr(self, name):
        return self.desc.has_attr(name)

    def attr_type(self, name):
        return self.desc.attr_type(name)

    @property
    def attr_names(self):
        return self.desc.attr_names()

    def attr(self, name):
        return self.desc.attr(name)
Y
Yu Yang 已提交
345

F
fengjiayi 已提交
346 347
    def block_attr(self, name):
        return self.desc.block_attr(name)
Y
Yu Yang 已提交
348 349


Y
Yu Yang 已提交
350 351
class Block(object):
    def __init__(self, program, idx):
Y
Yu Yang 已提交
352
        self.desc = program.desc.block(idx)
Y
Yu Yang 已提交
353
        self.vars = dict()  # var_name --> var
Y
Yu Yang 已提交
354
        self.ops = collections.deque()  # operator list
Y
Yu Yang 已提交
355 356
        self.program = program

357
    def __str__(self):
Y
Yang Yang(Tony) 已提交
358 359 360
        return self.to_string(True)

    def to_string(self, throw_on_error):
361 362
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.BlockDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
363
        return _debug_string_(proto, throw_on_error)
364 365 366

    __repr__ = __str__

Y
Yu Yang 已提交
367 368
    @property
    def parent_idx(self):
Y
Yu Yang 已提交
369
        return self.desc.parent
Y
Yu Yang 已提交
370 371 372

    @property
    def idx(self):
Y
Yu Yang 已提交
373
        return self.desc.id
Y
Yu Yang 已提交
374

Q
Qiao Longfei 已提交
375
    def var(self, name):
Y
Yu Yang 已提交
376 377 378 379
        if not isinstance(name, basestring):
            raise TypeError()
        v = self.vars.get(name, None)
        if v is None:
Q
Qiao Longfei 已提交
380
            raise ValueError("var %s not in this block" % name)
Y
Yu Yang 已提交
381
        return v
Q
Qiao Longfei 已提交
382 383 384 385

    def all_parameters(self):
        return {v for k, v in self.vars.iteritems() if isinstance(v, Parameter)}

Y
Yu Yang 已提交
386
    def create_var(self, *args, **kwargs):
Q
Qiao Longfei 已提交
387
        var = Variable(self, *args, **kwargs)
388 389
        if 'initializer' in kwargs:
            kwargs['initializer'](var, self)
Q
Qiao Longfei 已提交
390
        return var
Y
Yu Yang 已提交
391

Q
Qiao Longfei 已提交
392 393 394
    def has_var(self, name):
        return name in self.vars

Y
Yu Yang 已提交
395 396
    def create_parameter(self, *args, **kwargs):
        global_block = self.program.global_block()
Q
Qiao Longfei 已提交
397
        param = Parameter(global_block, *args, **kwargs)
398 399
        if 'initializer' in kwargs:
            kwargs['initializer'](param, self)
Q
Qiao Longfei 已提交
400
        return param
Y
Yu Yang 已提交
401

Y
Yu Yang 已提交
402
    def append_op(self, *args, **kwargs):
Y
Yu Yang 已提交
403 404
        op_desc = self.desc.append_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
405 406 407 408
        self.ops.append(op)
        return op

    def prepend_op(self, *args, **kwargs):
Y
Yu Yang 已提交
409 410
        op_desc = self.desc.prepend_op()
        op = Operator(self, op_desc, *args, **kwargs)
Y
Yu Yang 已提交
411 412 413
        self.ops.appendleft(op)
        return op

Q
Qiao Longfei 已提交
414 415 416 417 418 419 420
    def sync_with_cpp(self):
        # sync variables from cpp
        for var in self.desc.all_vars():
            if not self.has_var(var.name()):
                self.create_var(name=var.name(), desc=var, type=var.type())

        # sync operators from cpp
421 422 423 424
        ops_in_cpp = []
        for op_idx in range(0, self.desc.op_size()):
            ops_in_cpp.append(self.desc.op(op_idx))

Y
Yu Yang 已提交
425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440
        if len(self.ops) != 0:
            first_op_in_python = self.ops[0].desc
            last_op_in_python = self.ops[len(self.ops) - 1].desc
            start_index = None
            end_index = None
            for index in range(len(ops_in_cpp)):
                if first_op_in_python == ops_in_cpp[index]:
                    start_index = index
                if last_op_in_python == ops_in_cpp[index]:
                    end_index = index
            assert start_index is not None
            assert end_index is not None
            assert start_index <= end_index
        else:
            start_index = 0
            end_index = -1
Q
Qiao Longfei 已提交
441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457

        # sync ops append to the head of cpp_ops
        for index in range((start_index - 1 - 1), -1, -1):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.appendleft(op)

        # sync ops append to the end of cpp_ops
        for index in range((end_index + 1), len(ops_in_cpp)):
            op_desc = ops_in_cpp[index]
            op = Operator(self, op_desc)
            self.ops.append(op)

        assert len(self.ops) == len(ops_in_cpp)
        for index in range(len(self.ops)):
            assert self.ops[index].desc == ops_in_cpp[index]

Y
Yu Yang 已提交
458 459

class Program(object):
460 461
    def __init__(self):
        self.desc = core.ProgramDesc()
Y
Yu Yang 已提交
462 463 464
        self.blocks = [Block(self, 0)]
        self.current_block_idx = 0

465
    def __str__(self):
Y
Yang Yang(Tony) 已提交
466 467 468
        return self.to_string(True)

    def to_string(self, throw_on_error):
469 470
        protostr = self.desc.serialize_to_string()
        proto = framework_pb2.ProgramDesc.FromString(str(protostr))
Y
Yang Yang(Tony) 已提交
471
        return _debug_string_(proto, throw_on_error)
472

Y
Yu Yang 已提交
473 474 475 476 477 478
    def clone(self):
        p = Program()
        p.desc = core.ProgramDesc(self.desc)
        p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
        p.sync_with_cpp()
        return p
479

480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499
    def prune(self, targets):
        if not isinstance(targets, list):
            targets = [targets]
        targets_idx = []
        for t in targets:
            if not isinstance(t, Operator):
                if isinstance(t, Variable):
                    t = t.op
                else:
                    raise ValueError(
                        "All targets of prune() can only be Variable or Operator."
                    )

            targets_idx.append([t.block.idx, t.idx])
        res = Program()
        res.desc = core.prune(self.desc, targets_idx)
        res.blocks = [Block(res, i) for i in xrange(res.desc.num_blocks())]
        res.sync_with_cpp()
        return res

500 501 502 503
    @staticmethod
    def parse_from_string(binary_str):
        p = Program()
        p.desc = core.ProgramDesc(binary_str)
504
        p.blocks = [Block(p, i) for i in xrange(p.desc.num_blocks())]
505 506
        p.sync_with_cpp()
        return p
Y
Yu Yang 已提交
507 508 509

    def __repr__(self):
        return str(self)
510

Y
Yu Yang 已提交
511 512 513
    def global_block(self):
        return self.blocks[0]

Q
Qiao Longfei 已提交
514 515 516
    def block(self, index):
        return self.blocks[index]

Y
Yu Yang 已提交
517 518 519
    def current_block(self):
        return self.blocks[self.current_block_idx]

F
fengjiayi 已提交
520
    def append_backward(self, target, no_grad_set=None):
Q
Qiao Longfei 已提交
521 522 523
        """
        return map(param_name -> (grad_name, block_index, op_index))
        """
Q
Qiao Longfei 已提交
524
        assert isinstance(target, Variable)
F
fengjiayi 已提交
525 526
        if no_grad_set is None:
            no_grad_set = set()
Y
Yang Yang(Tony) 已提交
527 528 529 530 531 532 533 534
        try:
            param_to_grad_info = self.desc.append_backward(target.desc,
                                                           no_grad_set)
        except Exception as e:
            raise core.EnforceNotMet(
                str(e) + "\nCurrent protobuf is\n{0}".format(
                    self.to_string(False)))

Q
Qiao Longfei 已提交
535 536 537
        self.sync_with_cpp()
        return param_to_grad_info

Y
Yu Yang 已提交
538 539
    def create_block(self):
        new_block_idx = len(self.blocks)
Y
Yu Yang 已提交
540
        self.desc.append_block(self.current_block().desc)
Y
Yu Yang 已提交
541 542 543 544 545 546 547
        self.current_block_idx = new_block_idx
        self.blocks.append(Block(self, self.current_block_idx))
        return self.current_block()

    def rollback(self):
        self.current_block_idx = self.current_block().parent_idx

Q
Qiao Longfei 已提交
548 549 550 551 552 553
    def sync_with_cpp(self):
        for block_idx in range(len(self.blocks), self.desc.num_blocks()):
            self.blocks.append(Block(self, block_idx))
        for block in self.blocks:
            block.sync_with_cpp()

554 555 556 557 558
    def list_vars(self):
        for each_block in self.blocks:
            for each_var in each_block.vars.itervalues():
                yield each_var

Y
Yu Yang 已提交
559

Y
Yu Yang 已提交
560 561 562 563 564 565 566 567 568 569 570
class Parameter(Variable):
    def __init__(self, block, shape, dtype, **kwargs):
        if shape is None or dtype is None:
            raise ValueError("Parameter must set shape and dtype")
        if len(shape) == 0:
            raise ValueError("Parameter shape cannot be empty")

        for each in shape:
            if each < 0:
                raise ValueError("Parameter shape should not be related with "
                                 "batch-size")
571 572 573

        Variable.__init__(
            self, block, persistable=True, shape=shape, dtype=dtype, **kwargs)
Y
Yu Yang 已提交
574 575 576 577
        self.trainable = kwargs.get('trainable', True)

        self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})

578 579
        self.regularizer = kwargs.get('regularizer', None)

Y
Yu Yang 已提交
580

Y
Yu Yang 已提交
581
# program is a global instance.
582 583
g_main_program = Program()
g_startup_program = Program()
584 585 586 587 588 589

def default_startup_program():
    return g_startup_program

def default_main_program():
    return g_main_program