From a445c335529beaadc5539fbef159811995e88a47 Mon Sep 17 00:00:00 2001 From: Yiqun Liu Date: Tue, 6 Aug 2019 16:58:05 +0800 Subject: [PATCH] Add the check of lod in sequence_softmax kernel. (#18996) * Add the check of lod in sequence_softmax kernel. test=develop * Refine the comments. test=develop --- paddle/fluid/operators/sequence_ops/sequence_softmax_op.h | 3 +++ 1 file changed, 3 insertions(+) diff --git a/paddle/fluid/operators/sequence_ops/sequence_softmax_op.h b/paddle/fluid/operators/sequence_ops/sequence_softmax_op.h index ed49e94714..0555e4ee00 100644 --- a/paddle/fluid/operators/sequence_ops/sequence_softmax_op.h +++ b/paddle/fluid/operators/sequence_ops/sequence_softmax_op.h @@ -97,6 +97,9 @@ class SequenceSoftmaxKernel : public framework::OpKernel { auto dims = x->dims(); const size_t level = lod.size() - 1; + PADDLE_ENFORCE_GT( + lod.size(), 0U, + "The LoD level of Input X should be larger than 0 (lod.size() > 0)."); PADDLE_ENFORCE_EQ(dims[0], static_cast(lod[level].back()), "The first dimension of Input(X) should be equal to the " "sum of all sequences' lengths."); -- GitLab