testsuite.py 6.9 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import numpy as np
L
LoneRanger 已提交
16
from op import Operator
17

18
from paddle.fluid import core
19 20


P
phlrain 已提交
21
def create_op(scope, op_type, inputs, outputs, attrs, cache_list=None):
22
    kwargs = {}
23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43

    op_maker = core.op_proto_and_checker_maker
    op_role_attr_name = op_maker.kOpRoleAttrName()

    if op_role_attr_name not in attrs:
        attrs[op_role_attr_name] = int(op_maker.OpRole.Forward)

    def __create_var__(name, var_name):
        scope.var(var_name).get_tensor()
        kwargs[name].append(var_name)

    for in_name, in_dup in Operator.get_op_inputs(op_type):
        if in_name in inputs:
            kwargs[in_name] = []
            if in_dup:
                sub_in = inputs[in_name]
                for item in sub_in:
                    sub_in_name, _ = item[0], item[1]
                    __create_var__(in_name, sub_in_name)
            else:
                __create_var__(in_name, in_name)
44
    if cache_list is not None and isinstance(cache_list, list):
P
phlrain 已提交
45 46 47 48
        for name in cache_list:
            kwargs[name] = []
            scope.var(name)
            kwargs[name].append(name)
49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64

    for out_name, out_dup in Operator.get_op_outputs(op_type):
        if out_name in outputs:
            kwargs[out_name] = []
            if out_dup:
                sub_out = outputs[out_name]
                for item in sub_out:
                    sub_out_name, _ = item[0], item[1]
                    __create_var__(out_name, sub_out_name)
            else:
                __create_var__(out_name, out_name)

    for attr_name in Operator.get_op_attr_names(op_type):
        if attr_name in attrs:
            kwargs[attr_name] = attrs[attr_name]

65 66 67 68
    for extra_attr_name in Operator.get_op_extra_attr_names(op_type):
        if extra_attr_name in attrs:
            kwargs[extra_attr_name] = attrs[extra_attr_name]

69 70 71 72 73
    return Operator(op_type, **kwargs)


def set_input(scope, op, inputs, place):
    def __set_input__(var_name, var):
74
        if isinstance(var, (tuple, np.ndarray)):
75 76
            tensor = scope.find_var(var_name).get_tensor()
            if isinstance(var, tuple):
77
                tensor.set_recursive_sequence_lengths(var[1])
78
                var = var[0]
Y
yuyang18 已提交
79
            tensor._set_dims(var.shape)
80
            tensor.set(var, place)
81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96
        elif isinstance(var, float):
            scope.find_var(var_name).set_float(var)
        elif isinstance(var, int):
            scope.find_var(var_name).set_int(var)

    for in_name, in_dup in Operator.get_op_inputs(op.type()):
        if in_name in inputs:
            if in_dup:
                sub_in = inputs[in_name]
                for item in sub_in:
                    sub_in_name, sub_in_val = item[0], item[1]
                    __set_input__(sub_in_name, sub_in_val)
            else:
                __set_input__(in_name, inputs[in_name])


97 98 99
def append_input_output(
    block, op_proto, np_list, is_input, dtype, is_calc_ref=False
):
100 101 102
    '''Insert VarDesc and generate Python variable instance'''
    proto_list = op_proto.inputs if is_input else op_proto.outputs

103
    def create_var(block, name, np_list, var_proto, is_calc_ref=False):
104 105 106 107
        dtype = None
        shape = None
        lod_level = None
        if name not in np_list:
108
            assert var_proto.intermediate, f"{name} not found"
109
        else:
D
dzhwinter 已提交
110
            # inferece the dtype from numpy value.
111 112 113 114 115 116 117 118 119 120 121 122
            np_value = np_list[name]
            if isinstance(np_value, tuple):
                dtype = np_value[0].dtype
                # output shape, lod should be infered from input.
                if is_input:
                    shape = list(np_value[0].shape)
                    lod_level = len(np_value[1])
            else:
                dtype = np_value.dtype
                if is_input:
                    shape = list(np_value.shape)
                    lod_level = 0
123
            if is_calc_ref and (dtype == np.float16 or dtype == np.uint16):
124
                dtype = np.float32
125 126 127
        return block.create_var(
            dtype=dtype, shape=shape, lod_level=lod_level, name=name
        )
128 129 130 131

    var_dict = {}
    for var_proto in proto_list:
        var_name = str(var_proto.name)
Q
qingqing01 已提交
132 133
        if (var_name not in np_list) and var_proto.dispensable:
            continue
134
        if is_input:
135 136
            assert (var_name in np_list) or (
                var_proto.dispensable
137
            ), f"Missing {var_name} as input"
138
        if var_proto.duplicable:
139 140
            assert isinstance(
                np_list[var_name], list
141
            ), f"Duplicable {var_name} should be set as list"
142 143 144
            var_list = []
            for (name, np_value) in np_list[var_name]:
                var_list.append(
145 146 147
                    create_var(
                        block, name, {name: np_value}, var_proto, is_calc_ref
                    )
148
                )
149 150
            var_dict[var_name] = var_list
        else:
151 152 153
            var_dict[var_name] = create_var(
                block, var_name, np_list, var_proto, is_calc_ref
            )
154 155 156 157 158

    return var_dict


def append_loss_ops(block, output_names):
159
    mean_inputs = list(map(block.var, output_names))
160 161 162

    if len(mean_inputs) == 1:
        loss = block.create_var(dtype=mean_inputs[0].dtype, shape=[1])
163 164 165
        op = block.append_op(
            inputs={"X": mean_inputs}, outputs={"Out": loss}, type='mean'
        )
166 167 168 169 170 171
        op.desc.infer_var_type(block.desc)
        op.desc.infer_shape(block.desc)
    else:
        avg_sum = []
        for cur_loss in mean_inputs:
            cur_avg_loss = block.create_var(dtype=cur_loss.dtype, shape=[1])
172 173 174 175 176
            op = block.append_op(
                inputs={"X": [cur_loss]},
                outputs={"Out": [cur_avg_loss]},
                type="mean",
            )
177 178 179 180 181
            op.desc.infer_var_type(block.desc)
            op.desc.infer_shape(block.desc)
            avg_sum.append(cur_avg_loss)

        loss_sum = block.create_var(dtype=avg_sum[0].dtype, shape=[1])
182 183 184
        op_sum = block.append_op(
            inputs={"X": avg_sum}, outputs={"Out": loss_sum}, type='sum'
        )
185 186 187 188
        op_sum.desc.infer_var_type(block.desc)
        op_sum.desc.infer_shape(block.desc)

        loss = block.create_var(dtype=loss_sum.dtype, shape=[1])
189 190 191 192 193 194
        op_loss = block.append_op(
            inputs={"X": loss_sum},
            outputs={"Out": loss},
            type='scale',
            attrs={'scale': 1.0 / float(len(avg_sum))},
        )
195 196 197
        op_loss.desc.infer_var_type(block.desc)
        op_loss.desc.infer_shape(block.desc)
    return loss