tracer.py 3.4 KB
Newer Older
M
minqiyang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function

import six

from collections import defaultdict
from paddle.fluid import core
from paddle.fluid import framework

__all__ = ['Tracer']


def release_op(op):
M
minqiyang 已提交
27
    del framework._dygraph_tracer()._ops[op._trace_id].inputs
28 29
    del framework._dygraph_tracer()._ops[op._trace_id].outputs
    del framework._dygraph_tracer()._ops[op._trace_id].backward_refs
M
minqiyang 已提交
30 31 32 33


class Tracer(core.Tracer):
    """
L
lujun 已提交
34
    Python wrapper of dygraph tracer
M
minqiyang 已提交
35 36 37 38 39 40
    """

    def __init__(self, block):
        super(Tracer, self).__init__(block)

        self._ops = defaultdict()
41
        self._vars = defaultdict()
M
minqiyang 已提交
42 43
        self._trace_id = 0

44 45 46 47 48 49 50
    def trace_var(self, name, var):
        self._vars[name] = var

    def all_parameters(self):
        return list((item for name, item in six.iteritems(self._vars)
                     if isinstance(item, framework.Parameter)))

M
minqiyang 已提交
51 52 53 54 55 56 57 58 59 60 61 62 63 64
    def trace_op(self, op, inputs, outputs, stop_gradient=False):
        # TODO(minqiyang): remove this line after we take apart all
        # backward grads and forward variables
        op.inputs = inputs
        inps = defaultdict(list)
        for k, vars in six.iteritems(inputs):
            if isinstance(vars, framework.Variable):
                op.previous_ops.append(vars.op)
                inps[k].append(vars._ivar)
            elif isinstance(vars, list) or isinstance(vars, tuple):
                for var in vars:
                    op.previous_ops.append(var.op)
                    inps[k].append(var._ivar)

65
        op.outputs = outputs
M
minqiyang 已提交
66 67 68 69 70 71 72 73 74 75
        outs = defaultdict(list)
        for k, vars in six.iteritems(outputs):
            if isinstance(vars, framework.Variable):
                vars.op = op
                outs[k].append(vars._ivar)
            elif isinstance(vars, list) or isinstance(vars, tuple):
                for var in vars:
                    var.op = op
                    outs[k].append(var._ivar)

M
minqiyang 已提交
76 77
        # record op's trace id
        op.iop._trace_id = self._trace_id
X
polish  
Xin Pan 已提交
78

M
minqiyang 已提交
79
        backward_refs = self.trace(op.iop, inps, outs, op.attrs,
M
minqiyang 已提交
80 81 82 83
                                   framework._current_expected_place(),
                                   stop_gradient)

        if not stop_gradient:
84
            self._trace_id += 1
M
minqiyang 已提交
85 86 87 88 89 90
            self._ops[op.iop._trace_id] = op

            # register backward hooks and variables if needed
            if len(backward_refs) > 0:
                op.iop.register_backward_hooks(release_op)

X
polish  
Xin Pan 已提交
91
                # TODO(minqiyang): remove all inputs and outputs after separate
M
minqiyang 已提交
92
                # var and grad
M
minqiyang 已提交
93
                op.backward_refs = defaultdict(list)
M
minqiyang 已提交
94
                for k, v in six.iteritems(inputs):
M
minqiyang 已提交
95
                    if k in backward_refs:
M
minqiyang 已提交
96
                        op.backward_refs[k] = inputs[k]
M
minqiyang 已提交
97

M
minqiyang 已提交
98
                for k, v in six.iteritems(outputs):
M
minqiyang 已提交
99
                    if k in backward_refs:
M
minqiyang 已提交
100
                        op.backward_refs[k] = outputs[k]