diff --git a/paddle/fluid/inference/tests/book/CMakeLists.txt b/paddle/fluid/inference/tests/book/CMakeLists.txt index 90357f99d1defb97eaf7137b947d68443ad316a2..b33df2942a8262167dee1a022e5dd65eec465523 100644 --- a/paddle/fluid/inference/tests/book/CMakeLists.txt +++ b/paddle/fluid/inference/tests/book/CMakeLists.txt @@ -35,7 +35,13 @@ inference_test(image_classification ARGS vgg resnet) inference_test(label_semantic_roles) inference_test(recognize_digits ARGS mlp conv) inference_test(recommender_system) -inference_test(nlp) #inference_test(rnn_encoder_decoder) #inference_test(understand_sentiment ARGS conv) inference_test(word2vec) + +# This is an unly work around to make this test run +cc_test(test_inference_nlp + SRCS test_inference_nlp.cc + DEPS paddle_fluid + ARGS + --modelpath=${PADDLE_BINARY_DIR}/python/paddle/fluid/tests/book/recognize_digits_mlp.inference.model) diff --git a/paddle/fluid/inference/tests/book/test_inference_nlp.cc b/paddle/fluid/inference/tests/book/test_inference_nlp.cc index c4d7b0bbf011f8400ab7e790df1d0217a4a52ab7..5ece6084df272a0db8fc54f6da5206e46e83a821 100644 --- a/paddle/fluid/inference/tests/book/test_inference_nlp.cc +++ b/paddle/fluid/inference/tests/book/test_inference_nlp.cc @@ -37,10 +37,22 @@ inline double GetCurrentMs() { return 1e+3 * time.tv_sec + 1e-3 * time.tv_usec; } +// This function just give dummy data for recognize_digits model. +size_t DummyData(std::vector* out) { + paddle::framework::LoDTensor input; + SetupTensor(&input, {1, 1, 28, 28}, -1.f, 1.f); + out->emplace_back(input); + return 1; +} + // Load the input word index data from file and save into LodTensor. // Return the size of words. size_t LoadData(std::vector* out, const std::string& filename) { + if (filename.empty()) { + return DummyData(out); + } + size_t sz = 0; std::fstream fin(filename); std::string line; @@ -130,9 +142,12 @@ void ThreadRunInfer( } TEST(inference, nlp) { - if (FLAGS_modelpath.empty() || FLAGS_datafile.empty()) { - LOG(FATAL) << "Usage: ./example --modelpath=path/to/your/model " - << "--datafile=path/to/your/data"; + if (FLAGS_modelpath.empty()) { + LOG(FATAL) << "Usage: ./example --modelpath=path/to/your/model"; + } + if (FLAGS_datafile.empty()) { + LOG(WARNING) << " Not data file provided, will use dummy data!" + << "Note: if you use nlp model, please provide data file."; } LOG(INFO) << "Model Path: " << FLAGS_modelpath; LOG(INFO) << "Data File: " << FLAGS_datafile;