From 7daa316e764081ba66315f9684082cd3d26365a1 Mon Sep 17 00:00:00 2001 From: guru4elephant Date: Sat, 15 Feb 2020 00:15:54 +0800 Subject: [PATCH] fix batch size problem for general text infer --- core/general-server/op/general_text_infer_op.cpp | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/core/general-server/op/general_text_infer_op.cpp b/core/general-server/op/general_text_infer_op.cpp index d5acc838..42be5ed0 100644 --- a/core/general-server/op/general_text_infer_op.cpp +++ b/core/general-server/op/general_text_infer_op.cpp @@ -56,7 +56,13 @@ int GeneralTextInferOp::inference() { const TensorVector *in = &reader_out->tensor_vector; TensorVector *out = butil::get_object(); - int batch_size = (*in)[0].shape[0]; + int batch_size = 0; + if (in->at(0).lod.size() == 1) { + batch_size = in->at(0).lod[0].size() - 1; + } else { + batch_size = in->at(0).shape[0]; + } + VLOG(2) << "infer batch size: " << batch_size; // infer Timer timeline; double infer_time = 0.0; -- GitLab