metric.py 14.8 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
X
xujiaqi01 已提交
14 15 16 17
"""Fleet Metrics"""

import math
import numpy as np
18 19
from paddle.static import Variable
import paddle
X
xujiaqi01 已提交
20 21


T
tangwei12 已提交
22
def sum(input, scope=None, util=None):
X
xujiaqi01 已提交
23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44
    """
    distributed sum in fleet

    Args:
        input(numpy.array|Variable|string): output of a layer
        scope(Scope): specific scope

    Returns:
        global_metric(numpy.array): sum array

    Example:
        .. code-block:: python

          # in model.py
          input = fluid.layers.cast(some_input, dtype='float32')
          cnt = fluid.layers.reduce_sum(input)
          global_cnt = fluid.layers.create_global_var(persistable=True, dtype='float32', shape=[1], value=0)
          tmp = fluid.layers.elementwise_add(cnt, global_cnt)
          fluid.layers.assign(tmp, global_cnt)
          
          # in train.py, after train or infer
          res = np.array(scope.find_var(global_cnt.name).get_tensor())
45
          print("sum array: ", paddle.distributed.fleet.sum(res))
X
xujiaqi01 已提交
46 47
    """
    if scope is None:
48
        scope = paddle.static.global_scope()
T
tangwei12 已提交
49
    if util is None:
50
        util = paddle.distributed.fleet.util
X
xujiaqi01 已提交
51 52 53 54 55 56
    if isinstance(input, Variable):
        input = np.array(scope.find_var(input.name).get_tensor())
    elif isinstance(input, str):
        input = np.array(scope.find_var(input).get_tensor())
    old_shape = np.array(input.shape)
    output = np.copy(input) * 0
T
tangwei12 已提交
57
    output = util.all_reduce(input, "sum")
X
xujiaqi01 已提交
58 59 60 61
    output = output.reshape(old_shape)
    return output


T
tangwei12 已提交
62
def max(input, scope=None, util=None):
X
xujiaqi01 已提交
63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84
    """
    distributed max in fleet

    Args:
        input(numpy.array|Variable|string): output of a layer
        scope(Scope): specific scope

    Returns:
        global_metric(numpy.array): max array

    Example:
        .. code-block:: python

          # in model.py
          input = fluid.layers.cast(some_input, dtype='float32')
          cnt = fluid.layers.reduce_sum(input)
          global_cnt = fluid.layers.create_global_var(persistable=True, dtype='float32', shape=[1], value=0)
          tmp = fluid.layers.elementwise_max(cnt, global_cnt)
          fluid.layers.assign(tmp, global_cnt)

          # in train.py, after train or infer
          res = np.array(scope.find_var(global_cnt.name).get_tensor())
85
          print("max array: ", paddle.distributed.fleet.max(res))
X
xujiaqi01 已提交
86 87
    """
    if scope is None:
88
        scope = paddle.static.global_scope()
T
tangwei12 已提交
89
    if util is None:
90
        util = paddle.distributed.fleet.util
X
xujiaqi01 已提交
91 92 93 94 95 96
    if isinstance(input, Variable):
        input = np.array(scope.find_var(input.name).get_tensor())
    elif isinstance(input, str):
        input = np.array(scope.find_var(input).get_tensor())
    old_shape = np.array(input.shape)
    output = np.copy(input) * 0
T
tangwei12 已提交
97
    output = util.all_reduce(input, "max")
X
xujiaqi01 已提交
98 99 100 101
    output = output.reshape(old_shape)
    return output


T
tangwei12 已提交
102
def min(input, scope=None, util=None):
X
xujiaqi01 已提交
103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124
    """
    distributed min in fleet

    Args:
        input(numpy.array|Variable|string): output of a layer
        scope(Scope): specific scope

    Returns:
        global_metric(numpy.array): min array

    Example:
        .. code-block:: python

          # in model.py
          input = fluid.layers.cast(some_input, dtype='float32')
          cnt = fluid.layers.reduce_sum(input)
          global_cnt = fluid.layers.create_global_var(persistable=True, dtype='float32', shape=[1], value=0)
          tmp = fluid.layers.elementwise_min(cnt, global_cnt)
          fluid.layers.assign(tmp, global_cnt)

          # in train.py, after train or infer
          res = np.array(scope.find_var(global_cnt.name).get_tensor())
125
          print("min array: ", paddle.distributed.fleet.min(res))
X
xujiaqi01 已提交
126 127
    """
    if scope is None:
128
        scope = paddle.static.global_scope()
T
tangwei12 已提交
129
    if util is None:
130
        util = paddle.distributed.fleet.util
X
xujiaqi01 已提交
131 132 133 134 135 136
    if isinstance(input, Variable):
        input = np.array(scope.find_var(input.name).get_tensor())
    elif isinstance(input, str):
        input = np.array(scope.find_var(input).get_tensor())
    old_shape = np.array(input.shape)
    output = np.copy(input) * 0
T
tangwei12 已提交
137
    output = util.all_reduce(input, "min")
X
xujiaqi01 已提交
138 139 140 141
    output = output.reshape(old_shape)
    return output


T
tangwei12 已提交
142
def auc(stat_pos, stat_neg, scope=None, util=None):
X
xujiaqi01 已提交
143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166
    """
    distributed auc in fleet

    Args:
        stat_pos(numpy.array|Variable|string): stat_pos in output of fluid.layers.auc
        stat_neg(numpy.array|Variable|string): stat_neg in output of fluid.layers.auc
        scope(Scope): specific scope

    Returns:
        auc_value(float): auc value

    Example:
        .. code-block:: python

          # in model.py
          similarity_norm = fluid.layers.sigmoid(fluid.layers.clip(output, min=-15.0, max=15.0))
          binary_predict = fluid.layers.concat(
              input=[fluid.layers.elementwise_sub(fluid.layers.ceil(similarity_norm), similarity_norm), similarity_norm], axis=1)
          self.auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, stat_neg] =
              fluid.layers.auc(input=binary_predict, label=label, curve='ROC', num_thresholds=4096)

          # in train.py, after train or infer
          pos = np.array(scope.find_var(stat_pos.name).get_tensor())
          neg = np.array(scope.find_var(stat_neg.name).get_tensor())
167
          print("auc: ", paddle.distributed.fleet.auc(pos, neg))
X
xujiaqi01 已提交
168 169
    """
    if scope is None:
170
        scope = paddle.static.global_scope()
T
tangwei12 已提交
171
    if util is None:
172
        util = paddle.distributed.fleet.util
T
tangwei12 已提交
173

X
xujiaqi01 已提交
174 175 176 177 178 179 180 181 182 183 184 185 186 187
    if isinstance(stat_pos, Variable):
        stat_pos = np.array(scope.find_var(stat_pos.name).get_tensor())
    elif isinstance(stat_pos, str):
        stat_pos = np.array(scope.find_var(stat_pos).get_tensor())
    if isinstance(stat_neg, Variable):
        stat_neg = np.array(scope.find_var(stat_neg.name).get_tensor())
    elif isinstance(stat_neg, str):
        stat_neg = np.array(scope.find_var(stat_neg).get_tensor())
    # auc pos bucket shape
    old_pos_shape = np.array(stat_pos.shape)
    # reshape to one dim
    stat_pos = stat_pos.reshape(-1)
    global_pos = np.copy(stat_pos) * 0
    # mpi allreduce
T
tangwei12 已提交
188
    global_pos = util.all_reduce(stat_pos, "sum")
X
xujiaqi01 已提交
189 190 191 192 193 194
    global_pos = global_pos.reshape(old_pos_shape)

    # auc neg bucket
    old_neg_shape = np.array(stat_neg.shape)
    stat_neg = stat_neg.reshape(-1)
    global_neg = np.copy(stat_neg) * 0
T
tangwei12 已提交
195
    global_neg = util.all_reduce(stat_neg, "sum")
X
xujiaqi01 已提交
196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224
    global_neg = global_neg.reshape(old_neg_shape)

    # calculate auc
    num_bucket = len(global_pos[0])
    area = 0.0
    pos = 0.0
    neg = 0.0
    new_pos = 0.0
    new_neg = 0.0
    total_ins_num = 0
    for i in range(num_bucket):
        index = num_bucket - 1 - i
        new_pos = pos + global_pos[0][index]
        total_ins_num += global_pos[0][index]
        new_neg = neg + global_neg[0][index]
        total_ins_num += global_neg[0][index]
        area += (new_neg - neg) * (pos + new_pos) / 2
        pos = new_pos
        neg = new_neg

    auc_value = None
    if pos * neg == 0 or total_ins_num == 0:
        auc_value = 0.5
    else:
        auc_value = area / (pos * neg)

    return auc_value


T
tangwei12 已提交
225
def mae(abserr, total_ins_num, scope=None, util=None):
X
xujiaqi01 已提交
226 227 228 229 230
    """
    distributed mae in fleet

    Args:
        abserr(numpy.array|Variable|string): abserr in output of fluid.contrib.layers.ctr_metric_bundle
231
        total_ins_num(numpy.array|Variable|string): total variable
X
xujiaqi01 已提交
232 233 234 235 236 237 238 239 240 241 242 243 244
        scope(Scope): specific scope

    Returns:
        mae(float): mae value

    Example:
        .. code-block:: python

          # in model.py
          sqrerr, abserr, prob, q, pos, total = fluid.contrib.layers.ctr_metric_bundle(similarity_norm, fluid.layers.cast(x=label, dtype='float32'))

          # in train.py, after train or infer
          res = np.array(scope.find_var(abserr.name).get_tensor())
245
          print("mae: ", paddle.distributed.fleet.mae(res, total_ins_num))
X
xujiaqi01 已提交
246 247
    """
    if scope is None:
248
        scope = paddle.static.global_scope()
T
tangwei12 已提交
249
    if util is None:
250
        util = paddle.distributed.fleet.util
T
tangwei12 已提交
251

X
xujiaqi01 已提交
252 253 254 255
    if isinstance(abserr, Variable):
        abserr = np.array(scope.find_var(abserr.name).get_tensor())
    elif isinstance(abserr, str):
        abserr = np.array(scope.find_var(abserr).get_tensor())
256 257 258 259 260
    if isinstance(total_ins_num, Variable):
        total_ins_num = np.array(
            scope.find_var(total_ins_num.name).get_tensor())
    elif isinstance(total_ins_num, str):
        total_ins_num = np.array(scope.find_var(total_ins_num).get_tensor())
T
tangwei12 已提交
261

X
xujiaqi01 已提交
262 263 264
    old_metric_shape = np.array(abserr.shape)
    abserr = abserr.reshape(-1)
    global_metric = np.copy(abserr) * 0
T
tangwei12 已提交
265 266

    global_metric = util.all_reduce(abserr, "sum")
X
xujiaqi01 已提交
267
    global_metric = global_metric.reshape(old_metric_shape)
268
    global_total_num = util.all_reduce(total_ins_num, "sum")
T
tangwei12 已提交
269

270
    mae_value = float(global_metric[0]) / float(global_total_num[0])
X
xujiaqi01 已提交
271 272 273
    return mae_value


T
tangwei12 已提交
274
def rmse(sqrerr, total_ins_num, scope=None, util=None):
X
xujiaqi01 已提交
275 276 277 278 279
    """
    distributed rmse in fleet

    Args:
        sqrerr(numpy.array|Variable|string): sqrerr in output of fluid.contrib.layers.ctr_metric_bundle
280
        total_ins_num(numpy.array|Variable|string): total variable
X
xujiaqi01 已提交
281 282 283 284 285 286 287 288 289 290 291 292 293
        scope(Scope): specific scope

    Returns:
        rmse(float): rmse value

    Example:
        .. code-block:: python

          # in model.py
          sqrerr, abserr, prob, q, pos, total = fluid.contrib.layers.ctr_metric_bundle(similarity_norm, fluid.layers.cast(x=label, dtype='float32'))

          # in train.py, after train or infer
          res = np.array(scope.find_var(sqrerr.name).get_tensor())
294
          print("rmse: ", paddle.distributed.fleet.rmse(res, total_ins_num))
X
xujiaqi01 已提交
295 296
    """
    if scope is None:
297
        scope = paddle.static.global_scope()
T
tangwei12 已提交
298
    if util is None:
299
        util = paddle.distributed.fleet.util
T
tangwei12 已提交
300

X
xujiaqi01 已提交
301 302 303 304
    if isinstance(sqrerr, Variable):
        sqrerr = np.array(scope.find_var(sqrerr.name).get_tensor())
    elif isinstance(sqrerr, str):
        sqrerr = np.array(scope.find_var(sqrerr).get_tensor())
305 306 307 308 309
    if isinstance(total_ins_num, Variable):
        total_ins_num = np.array(
            scope.find_var(total_ins_num.name).get_tensor())
    elif isinstance(total_ins_num, str):
        total_ins_num = np.array(scope.find_var(total_ins_num).get_tensor())
X
xujiaqi01 已提交
310 311 312
    old_metric_shape = np.array(sqrerr.shape)
    sqrerr = sqrerr.reshape(-1)
    global_metric = np.copy(sqrerr) * 0
T
tangwei12 已提交
313 314

    global_metric = util.all_reduce(sqrerr, "sum")
X
xujiaqi01 已提交
315
    global_metric = global_metric.reshape(old_metric_shape)
316 317 318
    global_total_num = util.all_reduce(total_ins_num, "sum")

    rmse_value = math.sqrt(float(global_metric[0]) / float(global_total_num[0]))
T
tangwei12 已提交
319

X
xujiaqi01 已提交
320 321 322
    return rmse_value


T
tangwei12 已提交
323
def mse(sqrerr, total_ins_num, scope=None, util=None):
X
xujiaqi01 已提交
324 325 326 327 328
    """
    distributed mse in fleet

    Args:
        sqrerr(numpy.array|Variable|string): sqrerr in output of fluid.contrib.layers.ctr_metric_bundle
329
        total_ins_num(numpy.array|Variable|string): total variable
X
xujiaqi01 已提交
330 331 332 333 334 335 336 337 338 339 340 341 342
        scope(Scope): specific scope

    Returns:
        mse(float): mse value

    Example:
        .. code-block:: python

          # in model.py
          sqrerr, abserr, prob, q, pos, total = fluid.contrib.layers.ctr_metric_bundle(similarity_norm, fluid.layers.cast(x=label, dtype='float32'))

          # in train.py, after train or infer
          metric = np.array(scope.find_var(sqrerr.name).get_tensor())
343
          print("mse: ", paddle.distributed.fleet.mse(metric, total_ins_num))
X
xujiaqi01 已提交
344 345
    """
    if scope is None:
346
        scope = paddle.static.global_scope()
T
tangwei12 已提交
347
    if util is None:
348
        util = paddle.distributed.fleet.util
T
tangwei12 已提交
349

X
xujiaqi01 已提交
350 351 352 353
    if isinstance(sqrerr, Variable):
        sqrerr = np.array(scope.find_var(sqrerr.name).get_tensor())
    elif isinstance(sqrerr, str):
        sqrerr = np.array(scope.find_var(sqrerr).get_tensor())
354 355 356 357 358
    if isinstance(total_ins_num, Variable):
        total_ins_num = np.array(
            scope.find_var(total_ins_num.name).get_tensor())
    elif isinstance(total_ins_num, str):
        total_ins_num = np.array(scope.find_var(total_ins_num).get_tensor())
X
xujiaqi01 已提交
359 360 361
    old_metric_shape = np.array(sqrerr.shape)
    sqrerr = sqrerr.reshape(-1)
    global_metric = np.copy(sqrerr) * 0
T
tangwei12 已提交
362 363

    global_metric = util.all_reduce(sqrerr, "sum")
X
xujiaqi01 已提交
364
    global_metric = global_metric.reshape(old_metric_shape)
365
    global_total_num = util.all_reduce(total_ins_num, "sum")
T
tangwei12 已提交
366

367
    mse_value = float(global_metric[0]) / float(global_total_num[0])
X
xujiaqi01 已提交
368 369 370
    return mse_value


T
tangwei12 已提交
371
def acc(correct, total, scope=None, util=None):
X
xujiaqi01 已提交
372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401
    """
    distributed accuracy in fleet

    Args:
        correct(numpy.array|Variable|string): correct Variable
        total(numpy.array|Variable): total Variable
        scope(Scope): specific scope

    Returns:
        acc(float): accuracy value

    Example:
        .. code-block:: python

          # in model.py
          correct = fluid.layers.create_global_var(dtype='float32', shape=[1], value=0)
          total = fluid.layers.create_global_var(dtype='float32', shape=[1], value=0)
          acc = fluid.layers.acc(predict, label, k=1, correct=correct, total=total)

          global_correct = fluid.layers.create_global_var(persistable=True, dtype='float32', shape=[1], value=0)
          tmp1 = fluid.layers.elementwise_min(correct, global_correct)
          fluid.layers.assign(tmp1, global_correct)

          global_total = fluid.layers.create_global_var(persistable=True, dtype='float32', shape=[1], value=0)
          tmp2 = fluid.layers.elementwise_min(total, global_total)
          fluid.layers.assign(tmp2, global_total)

          # in train.py, after train or infer
          correct_num = np.array(scope.find_var(correct.name).get_tensor())
          total_num = np.array(scope.find_var(total.name).get_tensor())
402
          print("accuracy: ", paddle.distributed.fleet.acc(correct_num, total_num))
X
xujiaqi01 已提交
403 404
    """
    if scope is None:
405
        scope = paddle.static.global_scope()
T
tangwei12 已提交
406
    if util is None:
407
        util = paddle.distributed.fleet.util
T
tangwei12 已提交
408

X
xujiaqi01 已提交
409 410 411 412 413 414 415 416
    if isinstance(correct, Variable):
        correct = np.array(scope.find_var(correct.name).get_tensor())
    elif isinstance(correct, str):
        correct = np.array(scope.find_var(correct).get_tensor())
    if isinstance(total, Variable):
        total = np.array(scope.find_var(total.name).get_tensor())
    elif isinstance(total, str):
        total = np.array(scope.find_var(total).get_tensor())
T
tangwei12 已提交
417

X
xujiaqi01 已提交
418 419
    global_correct_num = np.copy(correct) * 0
    global_total_num = np.copy(total) * 0
T
tangwei12 已提交
420 421 422 423

    global_correct_num = util.all_reduce(correct, "sum")
    global_total_num = util.all_reduce(total, "sum")

X
xujiaqi01 已提交
424
    return float(global_correct_num[0]) / float(global_total_num[0])