precision.py 3.6 KB
Newer Older
M
malin10 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import math

import numpy as np
import paddle.fluid as fluid

from paddlerec.core.metric import Metric
from paddle.fluid.layers import nn, accuracy
from paddle.fluid.initializer import Constant
from paddle.fluid.layer_helper import LayerHelper


class Precision(Metric):
    """
    Metric For Fluid Model
    """

    def __init__(self, **kwargs):
        """ """
        helper = LayerHelper("PaddleRec_Precision", **kwargs)
        self.batch_accuracy = accuracy(
            kwargs.get("input"), kwargs.get("label"), kwargs.get("k"))
        local_ins_num, _ = helper.create_or_get_global_variable(
            name="local_ins_num", persistable=True, dtype='float32',
            shape=[1])
        local_pos_num, _ = helper.create_or_get_global_variable(
            name="local_pos_num", persistable=True, dtype='float32',
            shape=[1])

        batch_pos_num, _ = helper.create_or_get_global_variable(
            name="batch_pos_num",
            persistable=False,
            dtype='float32',
            shape=[1])
        batch_ins_num, _ = helper.create_or_get_global_variable(
            name="batch_ins_num",
            persistable=False,
            dtype='float32',
            shape=[1])

        tmp_ones = helper.create_global_variable(
            name="batch_size_like_ones",
            persistable=False,
            dtype='float32',
            shape=[-1])

        for var in [
                batch_pos_num, batch_ins_num, local_pos_num, local_ins_num
        ]:
            print(var, type(var))
            helper.set_variable_initializer(
                var, Constant(
                    value=0.0, force_cpu=True))

        helper.append_op(
            type='fill_constant_batch_size_like',
            inputs={"Input": kwargs.get("label")},
            outputs={'Out': [tmp_ones]},
            attrs={
                'shape': [-1, 1],
                'dtype': tmp_ones.dtype,
                'value': float(1.0),
            })
        helper.append_op(
            type="reduce_sum",
            inputs={"X": [tmp_ones]},
            outputs={"Out": [batch_ins_num]})

        helper.append_op(
            type="elementwise_mul",
            inputs={"X": [batch_ins_num],
                    "Y": [self.batch_accuracy]},
            outputs={"Out": [batch_pos_num]})

        helper.append_op(
            type="elementwise_add",
            inputs={"X": [local_pos_num],
                    "Y": [batch_pos_num]},
            outputs={"Out": [local_pos_num]})

        helper.append_op(
            type="elementwise_add",
            inputs={"X": [local_ins_num],
                    "Y": [batch_ins_num]},
            outputs={"Out": [local_ins_num]})

        self.accuracy = local_pos_num / local_ins_num

M
update  
malin10 已提交
102 103
        self._need_clear_list = [("local_ins_num", "float32"),
                                 ("local_pos_num", "float32")]
M
malin10 已提交
104 105 106 107 108 109
        self.metrics = dict()
        metric_varname = "P@%d" % kwargs.get("k")
        self.metrics[metric_varname] = self.accuracy

    def get_result(self):
        return self.metrics