helper.h 8.8 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

L
luotao1 已提交
17
#include <glog/logging.h>
Y
Yan Chunwei 已提交
18 19 20 21
#include <fstream>
#if !defined(_WIN32)
#include <sys/time.h>
#endif
22
#include <algorithm>
L
luotao1 已提交
23
#include <chrono>  // NOLINT
P
peizhilin 已提交
24
#include <iterator>
25
#include <numeric>
26 27 28
#include <sstream>
#include <string>
#include <vector>
29
#include "paddle/fluid/framework/data_type.h"
30
#include "paddle/fluid/inference/api/paddle_inference_api.h"
31
#include "paddle/fluid/platform/enforce.h"
P
peizhilin 已提交
32
#include "paddle/fluid/platform/port.h"
33
#include "paddle/fluid/string/printf.h"
34

35 36 37
extern std::string paddle::framework::DataTypeToString(
    const framework::proto::VarType::Type type);

38 39 40
namespace paddle {
namespace inference {

41 42
using paddle::framework::DataTypeToString;

43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
// Timer for timer
class Timer {
 public:
  std::chrono::high_resolution_clock::time_point start;
  std::chrono::high_resolution_clock::time_point startu;

  void tic() { start = std::chrono::high_resolution_clock::now(); }
  double toc() {
    startu = std::chrono::high_resolution_clock::now();
    std::chrono::duration<double> time_span =
        std::chrono::duration_cast<std::chrono::duration<double>>(startu -
                                                                  start);
    double used_time_ms = static_cast<double>(time_span.count()) * 1000.0;
    return used_time_ms;
  }
};

N
nhzlx 已提交
60 61 62 63 64
static int GetUniqueId() {
  static int id = 0;
  return id++;
}

65 66
static void split(const std::string &str, char sep,
                  std::vector<std::string> *pieces) {
67 68 69 70 71 72 73 74 75 76 77 78 79 80 81
  pieces->clear();
  if (str.empty()) {
    return;
  }
  size_t pos = 0;
  size_t next = str.find(sep, pos);
  while (next != std::string::npos) {
    pieces->push_back(str.substr(pos, next - pos));
    pos = next + 1;
    next = str.find(sep, pos);
  }
  if (!str.substr(pos).empty()) {
    pieces->push_back(str.substr(pos));
  }
}
82 83
static void split_to_float(const std::string &str, char sep,
                           std::vector<float> *fs) {
84 85 86 87 88
  std::vector<std::string> pieces;
  split(str, sep, &pieces);
  std::transform(pieces.begin(), pieces.end(), std::back_inserter(*fs),
                 [](const std::string &v) { return std::stof(v); });
}
L
luotao1 已提交
89 90 91 92 93 94 95
static void split_to_int64(const std::string &str, char sep,
                           std::vector<int64_t> *is) {
  std::vector<std::string> pieces;
  split(str, sep, &pieces);
  std::transform(pieces.begin(), pieces.end(), std::back_inserter(*is),
                 [](const std::string &v) { return std::stoi(v); });
}
T
Tao Luo 已提交
96 97 98 99
static void split_to_int(const std::string &str, char sep,
                         std::vector<int> *is) {
  std::vector<std::string> pieces;
  split(str, sep, &pieces);
L
luotao1 已提交
100 101 102
  std::transform(pieces.begin(), pieces.end(), std::back_inserter(*is),
                 [](const std::string &v) { return std::stoi(v); });
}
103 104 105 106 107 108 109 110 111 112
template <typename T>
std::string to_string(const std::vector<T> &vec) {
  std::stringstream ss;
  for (const auto &c : vec) {
    ss << c << " ";
  }
  return ss.str();
}
template <>
std::string to_string<std::vector<float>>(
113 114
    const std::vector<std::vector<float>> &vec);

115 116
template <>
std::string to_string<std::vector<std::vector<float>>>(
117 118
    const std::vector<std::vector<std::vector<float>>> &vec);

119 120 121 122 123
template <typename T>
int VecReduceToInt(const std::vector<T> &v) {
  return std::accumulate(v.begin(), v.end(), 1, [](T a, T b) { return a * b; });
}

L
luotao1 已提交
124 125 126
template <typename T>
static void TensorAssignData(PaddleTensor *tensor,
                             const std::vector<std::vector<T>> &data) {
127
  // Assign buffer
128 129
  int num_elems = VecReduceToInt(tensor->shape);
  tensor->data.Resize(sizeof(T) * num_elems);
130 131
  int c = 0;
  for (const auto &f : data) {
L
luotao1 已提交
132 133 134
    for (T v : f) {
      static_cast<T *>(tensor->data.data())[c++] = v;
    }
135 136 137
  }
}

T
Tao Luo 已提交
138 139 140 141 142 143 144 145 146 147
template <typename T>
static void TensorAssignData(PaddleTensor *tensor,
                             const std::vector<std::vector<T>> &data,
                             const std::vector<size_t> &lod) {
  int size = lod[lod.size() - 1];
  tensor->shape.assign({size, 1});
  tensor->lod.assign({lod});
  TensorAssignData(tensor, data);
}

148
template <typename T>
L
luotao1 已提交
149 150
static void ZeroCopyTensorAssignData(ZeroCopyTensor *tensor,
                                     const std::vector<std::vector<T>> &data) {
151 152 153 154 155 156 157
  auto *ptr = tensor->mutable_data<T>(PaddlePlace::kCPU);
  int c = 0;
  for (const auto &f : data) {
    for (T v : f) {
      ptr[c++] = v;
    }
  }
L
luotao1 已提交
158 159 160 161 162 163 164 165 166
}

template <typename T>
static void ZeroCopyTensorAssignData(ZeroCopyTensor *tensor,
                                     const PaddleBuf &data) {
  auto *ptr = tensor->mutable_data<T>(PaddlePlace::kCPU);
  for (size_t i = 0; i < data.length() / sizeof(T); i++) {
    ptr[i] = *(reinterpret_cast<T *>(data.data()) + i);
  }
167 168
}

169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213
static bool CompareTensor(const PaddleTensor &a, const PaddleTensor &b) {
  if (a.dtype != b.dtype) {
    LOG(ERROR) << "dtype not match";
    return false;
  }

  if (a.lod.size() != b.lod.size()) {
    LOG(ERROR) << "lod not match";
    return false;
  }
  for (size_t i = 0; i < a.lod.size(); i++) {
    if (a.lod[i].size() != b.lod[i].size()) {
      LOG(ERROR) << "lod not match";
      return false;
    }
    for (size_t j = 0; j < a.lod[i].size(); j++) {
      if (a.lod[i][j] != b.lod[i][j]) {
        LOG(ERROR) << "lod not match";
        return false;
      }
    }
  }

  if (a.shape.size() != b.shape.size()) {
    LOG(INFO) << "shape not match";
    return false;
  }
  for (size_t i = 0; i < a.shape.size(); i++) {
    if (a.shape[i] != b.shape[i]) {
      LOG(ERROR) << "shape not match";
      return false;
    }
  }

  auto *adata = static_cast<float *>(a.data.data());
  auto *bdata = static_cast<float *>(b.data.data());
  for (int i = 0; i < VecReduceToInt(a.shape); i++) {
    if (adata[i] != bdata[i]) {
      LOG(ERROR) << "data not match";
      return false;
    }
  }
  return true;
}

Y
Yan Chunwei 已提交
214 215
static std::string DescribeTensor(const PaddleTensor &tensor,
                                  int max_num_of_data = 15) {
L
luotao1 已提交
216 217 218 219 220 221 222 223 224 225
  std::stringstream os;
  os << "Tensor [" << tensor.name << "]\n";
  os << " - type: ";
  switch (tensor.dtype) {
    case PaddleDType::FLOAT32:
      os << "float32";
      break;
    case PaddleDType::INT64:
      os << "int64";
      break;
226 227 228
    case PaddleDType::INT32:
      os << "int32";
      break;
L
luotao1 已提交
229 230 231 232 233 234 235 236 237 238 239
    default:
      os << "unset";
  }
  os << '\n';

  os << " - shape: " << to_string(tensor.shape) << '\n';
  os << " - lod: ";
  for (auto &l : tensor.lod) {
    os << to_string(l) << "; ";
  }
  os << "\n";
T
tensor-tang 已提交
240 241
  os << " - memory length: " << tensor.data.length();
  os << "\n";
L
luotao1 已提交
242

T
tensor-tang 已提交
243
  os << " - data: ";
244
  int dim = VecReduceToInt(tensor.shape);
T
tensor-tang 已提交
245
  float *pdata = static_cast<float *>(tensor.data.data());
L
luotao1 已提交
246
  for (int i = 0; i < dim; i++) {
T
tensor-tang 已提交
247
    os << pdata[i] << " ";
L
luotao1 已提交
248 249 250 251 252
  }
  os << '\n';
  return os.str();
}

253 254 255 256 257 258 259 260 261 262 263 264 265
static std::string DescribeZeroCopyTensor(const ZeroCopyTensor &tensor) {
  std::stringstream os;
  os << "Tensor [" << tensor.name() << "]\n";

  os << " - shape: " << to_string(tensor.shape()) << '\n';
  os << " - lod: ";
  for (auto &l : tensor.lod()) {
    os << to_string(l) << "; ";
  }
  os << "\n";
  PaddlePlace place;
  int size;
  const auto *data = tensor.data<float>(&place, &size);
T
tensor-tang 已提交
266 267 268
  os << " - numel: " << size;
  os << "\n";
  os << " - data: ";
269 270 271 272 273 274
  for (int i = 0; i < size; i++) {
    os << data[i] << " ";
  }
  return os.str();
}

275
static void PrintTime(int batch_size, int repeat, int num_threads, int tid,
276 277 278
                      double batch_latency, int epoch = 1,
                      const framework::proto::VarType::Type data_type =
                          framework::proto::VarType::FP32) {
279 280 281
  PADDLE_ENFORCE(batch_size > 0, "Non-positive batch size.");
  double sample_latency = batch_latency / batch_size;
  LOG(INFO) << "====== threads: " << num_threads << ", thread id: " << tid
S
Sylwester Fraczek 已提交
282
            << " ======";
283
  LOG(INFO) << "====== batch size: " << batch_size << ", iterations: " << epoch
284 285 286 287
            << ", repetitions: " << repeat << " ======";
  LOG(INFO) << "====== batch latency: " << batch_latency
            << "ms, number of samples: " << batch_size * epoch
            << ", sample latency: " << sample_latency
288 289
            << "ms, fps: " << 1000.f / sample_latency
            << ", data type: " << DataTypeToString(data_type) << " ======";
L
luotao1 已提交
290 291
}

Y
Yan Chunwei 已提交
292 293 294 295 296 297 298
static bool IsFileExists(const std::string &path) {
  std::ifstream file(path);
  bool exists = file.is_open();
  file.close();
  return exists;
}

299 300
}  // namespace inference
}  // namespace paddle