metric.py 2.9 KB
Newer Older
T
tangwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

T
tangwei 已提交
15
import abc
M
update  
malin10 已提交
16 17
import paddle.fluid as fluid
import numpy as np
T
tangwei 已提交
18 19 20 21 22 23 24 25 26 27 28


class Metric(object):
    """R
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, config):
        """ """
        pass

M
update  
malin10 已提交
29
    def clear(self, scope=None):
T
tangwei 已提交
30 31 32 33 34 35
        """
        clear current value
        Args:
            scope: value container
            params: extend varilable for clear
        """
M
update  
malin10 已提交
36 37 38 39
        if scope is None:
            scope = fluid.global_scope()

        place = fluid.CPUPlace()
M
update  
malin10 已提交
40 41
        for key in self._global_communicate_var:
            varname, dtype = self._global_communicate_var[key]
M
update  
malin10 已提交
42 43 44 45 46
            if scope.find_var(varname) is None:
                continue
            var = scope.var(varname).get_tensor()
            data_array = np.zeros(var._get_dims()).astype(dtype)
            var.set(data_array, place)
T
tangwei 已提交
47

M
update  
malin10 已提交
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65
    def get_global_metric(self, fleet, scope, metric_name, mode="sum"):
        """
        reduce metric named metric_name from all worker
        Return:
            metric reduce result
        """
        input = np.array(scope.find_var(metric_name).get_tensor())
        if fleet is None:
            return input
        fleet._role_maker._barrier_worker()
        old_shape = np.array(input.shape)
        input = input.reshape(-1)
        output = np.copy(input) * 0
        fleet._role_maker._all_reduce(input, output, mode=mode)
        output = output.reshape(old_shape)
        return output

    def cal_global_metrics(self, fleet, scope=None):
T
tangwei 已提交
66 67 68 69 70 71
        """
        calculate result
        Args:
            scope: value container
            params: extend varilable for clear
        """
M
update  
malin10 已提交
72 73 74 75 76 77 78 79 80 81 82
        if scope is None:
            scope = fluid.global_scope()

        global_metrics = dict()
        for key in self._global_communicate_var:
            varname, dtype = self._global_communicate_var[key]
            global_metrics[key] = self.get_global_metric(fleet, scope, varname)

        return self.calculate(global_metrics)

    def calculate(self, global_metrics):
T
tangwei 已提交
83 84 85 86 87 88 89 90 91 92
        pass

    @abc.abstractmethod
    def get_result(self):
        """
        Return:
            result(dict) : calculate result
        """
        pass

T
for mat  
tangwei 已提交
93
    def __str__(self):
T
tangwei 已提交
94 95 96 97 98
        """
        Return:
            result(string) : calculate result with string format, for output
        """
        pass