From 7ce38826959b55c9eccc4957457943e796c648a8 Mon Sep 17 00:00:00 2001 From: Qiao Longfei Date: Sun, 26 Aug 2018 10:00:41 +0800 Subject: [PATCH] fix auc layer and add check for auc op (#12954) * fix auc layer and add check for auc op * use input to check if states are inited * optimize code --- paddle/fluid/operators/auc_op.h | 14 ++++++++++++++ python/paddle/fluid/layers/metric_op.py | 12 ++++++++---- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/paddle/fluid/operators/auc_op.h b/paddle/fluid/operators/auc_op.h index 0a18585edb5..0651203286c 100644 --- a/paddle/fluid/operators/auc_op.h +++ b/paddle/fluid/operators/auc_op.h @@ -60,6 +60,20 @@ class AucKernel : public framework::OpKernel { const T* inference_data = predict->data(); const auto* label_data = label->data(); + // check if states are inited. + auto* tp_in = ctx.Input("TP"); + auto* fp_in = ctx.Input("FP"); + auto* tn_in = ctx.Input("TN"); + auto* fn_in = ctx.Input("FN"); + PADDLE_ENFORCE(tp_in->IsInitialized(), "true_positive is not inited!"); + PADDLE_ENFORCE(fp_in->IsInitialized(), "false_negative is not inited!"); + PADDLE_ENFORCE(tn_in->IsInitialized(), "true_negative is not inited!"); + PADDLE_ENFORCE(fn_in->IsInitialized(), "false_positive is not inited!"); + PADDLE_ENFORCE_EQ(tp_in->numel(), num_thresholds, ""); + PADDLE_ENFORCE_EQ(fp_in->numel(), num_thresholds, ""); + PADDLE_ENFORCE_EQ(tn_in->numel(), num_thresholds, ""); + PADDLE_ENFORCE_EQ(fn_in->numel(), num_thresholds, ""); + auto* tp_data = true_positive->mutable_data(ctx.GetPlace()); auto* fn_data = false_negative->mutable_data(ctx.GetPlace()); auto* tn_data = true_negative->mutable_data(ctx.GetPlace()); diff --git a/python/paddle/fluid/layers/metric_op.py b/python/paddle/fluid/layers/metric_op.py index 2c3bdd77e1f..0182bbeb637 100644 --- a/python/paddle/fluid/layers/metric_op.py +++ b/python/paddle/fluid/layers/metric_op.py @@ -119,10 +119,14 @@ def auc(input, label, curve='ROC', num_thresholds=200, topk=1): helper = LayerHelper("auc", **locals()) auc_out = helper.create_tmp_variable(dtype="float64") # make tp, tn, fp, fn persistable, so that can accumulate all batches. - tp = helper.create_global_variable(persistable=True, dtype='int64') - tn = helper.create_global_variable(persistable=True, dtype='int64') - fp = helper.create_global_variable(persistable=True, dtype='int64') - fn = helper.create_global_variable(persistable=True, dtype='int64') + tp = helper.create_global_variable( + persistable=True, dtype='int64', shape=[num_thresholds]) + tn = helper.create_global_variable( + persistable=True, dtype='int64', shape=[num_thresholds]) + fp = helper.create_global_variable( + persistable=True, dtype='int64', shape=[num_thresholds]) + fn = helper.create_global_variable( + persistable=True, dtype='int64', shape=[num_thresholds]) for var in [tp, tn, fp, fn]: helper.set_variable_initializer( var, Constant( -- GitLab