From 333e83a4e8a5fda955c865572a3ed8a7661e2e51 Mon Sep 17 00:00:00 2001 From: wang001 <1617581104@qq.com> Date: Sat, 31 Aug 2019 10:12:18 +0800 Subject: [PATCH] fix bug for regression is_prediction ==true fix using softmax whatever is_classify or is_regression. --- finetune/classifier.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/finetune/classifier.py b/finetune/classifier.py index 69646b5..4487e08 100644 --- a/finetune/classifier.py +++ b/finetune/classifier.py @@ -92,8 +92,12 @@ def create_model(args, name=task_name + "_cls_out_b", initializer=fluid.initializer.Constant(0.))) + assert is_classify != is_regression, 'is_classify or is_regression must be true and only one of them can be true' if is_prediction: - probs = fluid.layers.softmax(logits) + if is_classify: + probs = fluid.layers.softmax(logits) + else: + probs = logits feed_targets_name = [ src_ids.name, sent_ids.name, pos_ids.name, input_mask.name ] @@ -101,7 +105,6 @@ def create_model(args, feed_targets_name += [task_ids.name] return pyreader, probs, feed_targets_name - assert is_classify != is_regression, 'is_classify or is_regression must be true and only one of them can be true' num_seqs = fluid.layers.create_tensor(dtype='int64') if is_classify: ce_loss, probs = fluid.layers.softmax_with_cross_entropy( -- GitLab