From 77599415ba1b93715fa0626e147865c088970ee6 Mon Sep 17 00:00:00 2001 From: tensor-tang Date: Wed, 30 May 2018 12:15:10 +0800 Subject: [PATCH] enable read dataset --- .../tests/book/test_inference_nlp.cc | 32 +++++++++++++++++-- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/paddle/fluid/inference/tests/book/test_inference_nlp.cc b/paddle/fluid/inference/tests/book/test_inference_nlp.cc index 6ff8a18cdb1..95cdeb4ad1f 100644 --- a/paddle/fluid/inference/tests/book/test_inference_nlp.cc +++ b/paddle/fluid/inference/tests/book/test_inference_nlp.cc @@ -14,7 +14,12 @@ limitations under the License. */ #include #include +#include +#include +#include +#include #include // NOLINT +#include #include "gflags/gflags.h" #include "gtest/gtest.h" #include "paddle/fluid/inference/tests/test_helper.h" @@ -31,16 +36,37 @@ inline double get_current_ms() { return 1e+3 * time.tv_sec + 1e-3 * time.tv_usec; } +void read_data( + std::vector>* out, + const std::string& filename = "/home/tangjian/paddle-tj/out.ids.txt") { + using namespace std; // NOLINT + fstream fin(filename); + string line; + out->clear(); + while (getline(fin, line)) { + istringstream iss(line); + vector ids; + string field; + while (getline(iss, field, ' ')) { + ids.push_back(stoi(field)); + } + out->push_back(ids); + } +} + TEST(inference, understand_sentiment) { if (FLAGS_dirname.empty()) { LOG(FATAL) << "Usage: ./example --dirname=path/to/your/model"; } - + std::vector> inputdatas; + read_data(&inputdatas); + LOG(INFO) << "---------- dataset size: " << inputdatas.size(); LOG(INFO) << "FLAGS_dirname: " << FLAGS_dirname << std::endl; std::string dirname = FLAGS_dirname; + const bool model_combined = false; - int total_work = 100; - int num_threads = 10; + int total_work = 10; + int num_threads = 2; int work_per_thread = total_work / num_threads; std::vector> infer_threads; for (int i = 0; i < num_threads; ++i) { -- GitLab