enforce.h 17.6 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

17 18 19 20 21 22 23 24 25 26 27 28
#ifdef __GNUC__
#include <cxxabi.h>  // for __cxa_demangle
#endif               // __GNUC__

#ifdef PADDLE_WITH_CUDA
#include <cublas_v2.h>
#include <cudnn.h>
#include <curand.h>
#include <thrust/system/cuda/error.h>
#include <thrust/system_error.h>
#endif  // PADDLE_WITH_CUDA

Y
Yu Yang 已提交
29
#include <iomanip>
L
liaogang 已提交
30
#include <memory>
31 32 33
#include <sstream>
#include <stdexcept>
#include <string>
S
sneaxiy 已提交
34 35
#include <type_traits>
#include <utility>
36

37
#define GLOG_NO_ABBREVIATED_SEVERITIES  // msvc conflict logging with windows.h
38
#include "glog/logging.h"
Y
Yi Wang 已提交
39
#include "paddle/fluid/platform/macros.h"
D
dzhwinter 已提交
40
#include "paddle/fluid/platform/port.h"
41 42
#include "paddle/fluid/string/printf.h"
#include "paddle/fluid/string/to_string.h"
43

44
#ifdef PADDLE_WITH_CUDA
Y
Yi Wang 已提交
45 46 47
#include "paddle/fluid/platform/dynload/cublas.h"
#include "paddle/fluid/platform/dynload/cudnn.h"
#include "paddle/fluid/platform/dynload/curand.h"
D
dzhwinter 已提交
48
#if !defined(__APPLE__) && !defined(_WIN32)
Y
Yi Wang 已提交
49
#include "paddle/fluid/platform/dynload/nccl.h"
Y
Yi Wang 已提交
50 51
#endif  // __APPLE__
#endif  // PADDLE_WITH_CUDA
52 53 54 55

namespace paddle {
namespace platform {

L
liaogang 已提交
56 57 58 59 60 61 62 63 64 65 66
#ifdef __GNUC__
inline std::string demangle(std::string name) {
  int status = -4;  // some arbitrary value to eliminate the compiler warning
  std::unique_ptr<char, void (*)(void*)> res{
      abi::__cxa_demangle(name.c_str(), NULL, NULL, &status), std::free};
  return (status == 0) ? res.get() : name;
}
#else
inline std::string demangle(std::string name) { return name; }
#endif

67 68
struct EnforceNotMet : public std::exception {
  std::string err_str_;
Y
Yu Yang 已提交
69
  EnforceNotMet(std::exception_ptr e, const char* f, int l) {
70
    try {
Y
Yu Yang 已提交
71 72 73 74 75
      std::rethrow_exception(e);
    } catch (std::exception& e) {
      Init(e.what(), f, l);
    }
  }
76

S
sneaxiy 已提交
77 78
  EnforceNotMet(const std::string& str, const char* f, int l) {
    Init(str, f, l);
Y
Yu Yang 已提交
79 80 81
  }

  const char* what() const noexcept override { return err_str_.c_str(); }
82

Y
Yu Yang 已提交
83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106
 private:
  template <typename StrType>
  inline void Init(StrType what, const char* f, int l) {
    static constexpr int TRACE_STACK_LIMIT = 100;
    std::ostringstream sout;

    sout << string::Sprintf("%s at [%s:%d]", what, f, l) << std::endl;
    sout << "PaddlePaddle Call Stacks: " << std::endl;
#if !defined(_WIN32)
    void* call_stack[TRACE_STACK_LIMIT];
    auto size = backtrace(call_stack, TRACE_STACK_LIMIT);
    auto symbols = backtrace_symbols(call_stack, size);
    Dl_info info;
    for (int i = 0; i < size; ++i) {
      if (dladdr(call_stack[i], &info) && info.dli_sname) {
        auto demangled = demangle(info.dli_sname);
        auto addr_offset = static_cast<char*>(call_stack[i]) -
                           static_cast<char*>(info.dli_saddr);
        sout << string::Sprintf("%-3d %*0p %s + %zd\n", i,
                                2 + sizeof(void*) * 2, call_stack[i], demangled,
                                addr_offset);
      } else {
        sout << string::Sprintf("%-3d %*0p\n", i, 2 + sizeof(void*) * 2,
                                call_stack[i]);
Y
Yu Yang 已提交
107
      }
Y
Yu Yang 已提交
108 109
    }
    free(symbols);
D
dzhwinter 已提交
110
#else
Y
Yu Yang 已提交
111
    sout << "Windows not support stack backtrace yet.";
D
dzhwinter 已提交
112
#endif
Y
Yu Yang 已提交
113
    err_str_ = sout.str();
114 115 116
  }
};

117 118 119
struct EOFException : public std::exception {
  std::string err_str_;
  EOFException(const char* err_msg, const char* f, int l) {
F
fengjiayi 已提交
120
    err_str_ = string::Sprintf("%s at [%s:%d]", err_msg, f, l);
121 122 123 124 125
  }

  const char* what() const noexcept { return err_str_.c_str(); }
};

L
liaogang 已提交
126 127 128 129 130
// Because most enforce conditions would evaluate to true, we can use
// __builtin_expect to instruct the C++ compiler to generate code that
// always forces branch prediction of true.
// This generates faster binary code. __builtin_expect is since C++11.
// For more details, please check https://stackoverflow.com/a/43870188/724872.
D
dzhwinter 已提交
131
#if !defined(_WIN32)
132
#define UNLIKELY(condition) __builtin_expect(static_cast<bool>(condition), 0)
D
dzhwinter 已提交
133 134
#else
// there is no equivalent intrinsics in msvc.
W
wopeizl 已提交
135
#define UNLIKELY(condition) (condition)
D
dzhwinter 已提交
136
#endif
137

138 139 140 141
#if !defined(_WIN32)
#define LIKELY(condition) __builtin_expect(static_cast<bool>(condition), 1)
#else
// there is no equivalent intrinsics in msvc.
W
wopeizl 已提交
142
#define LIKELY(condition) (condition)
143 144
#endif

M
minqiyang 已提交
145 146
inline bool is_error(bool stat) { return !stat; }

S
sneaxiy 已提交
147
inline void throw_on_error(bool stat, const std::string& msg) {
148
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
149
  throw std::runtime_error(msg);
150
#else
S
sneaxiy 已提交
151
  LOG(FATAL) << msg;
152
#endif
L
liaogang 已提交
153 154
}

155
#ifdef PADDLE_WITH_CUDA
156

S
sneaxiy 已提交
157
inline bool is_error(cudaError_t e) { return e != cudaSuccess; }
M
minqiyang 已提交
158

S
sneaxiy 已提交
159
inline void throw_on_error(cudaError_t e, const std::string& msg) {
160
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
161
  throw thrust::system_error(e, thrust::cuda_category(), msg);
162
#else
S
sneaxiy 已提交
163
  LOG(FATAL) << msg;
164
#endif
M
minqiyang 已提交
165 166 167 168
}

inline bool is_error(curandStatus_t stat) {
  return stat != CURAND_STATUS_SUCCESS;
169 170
}

S
sneaxiy 已提交
171
inline void throw_on_error(curandStatus_t stat, const std::string& msg) {
172
#ifndef REPLACE_ENFORCE_GLOG
M
minqiyang 已提交
173
  throw thrust::system_error(cudaErrorLaunchFailure, thrust::cuda_category(),
S
sneaxiy 已提交
174
                             msg);
175
#else
S
sneaxiy 已提交
176
  LOG(FATAL) << msg;
177
#endif
M
minqiyang 已提交
178 179 180 181
}

inline bool is_error(cudnnStatus_t stat) {
  return stat != CUDNN_STATUS_SUCCESS;
182 183
}

S
sneaxiy 已提交
184
inline void throw_on_error(cudnnStatus_t stat, const std::string& msg) {
185
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
186
  throw std::runtime_error(platform::dynload::cudnnGetErrorString(stat) + msg);
187
#else
S
sneaxiy 已提交
188
  LOG(FATAL) << platform::dynload::cudnnGetErrorString(stat) << msg;
189
#endif
M
minqiyang 已提交
190 191 192 193
}

inline bool is_error(cublasStatus_t stat) {
  return stat != CUBLAS_STATUS_SUCCESS;
194 195
}

S
sneaxiy 已提交
196
inline void throw_on_error(cublasStatus_t stat, const std::string& msg) {
L
liaogang 已提交
197
  std::string err;
M
minqiyang 已提交
198
  if (stat == CUBLAS_STATUS_NOT_INITIALIZED) {
L
liaogang 已提交
199
    err = "CUBLAS: not initialized, ";
200
  } else if (stat == CUBLAS_STATUS_ALLOC_FAILED) {
L
liaogang 已提交
201
    err = "CUBLAS: alloc failed, ";
202
  } else if (stat == CUBLAS_STATUS_INVALID_VALUE) {
L
liaogang 已提交
203
    err = "CUBLAS: invalid value, ";
204
  } else if (stat == CUBLAS_STATUS_ARCH_MISMATCH) {
L
liaogang 已提交
205
    err = "CUBLAS: arch mismatch, ";
206
  } else if (stat == CUBLAS_STATUS_MAPPING_ERROR) {
L
liaogang 已提交
207
    err = "CUBLAS: mapping error, ";
208
  } else if (stat == CUBLAS_STATUS_EXECUTION_FAILED) {
L
liaogang 已提交
209
    err = "CUBLAS: execution failed, ";
210
  } else if (stat == CUBLAS_STATUS_INTERNAL_ERROR) {
L
liaogang 已提交
211
    err = "CUBLAS: internal error, ";
212
  } else if (stat == CUBLAS_STATUS_NOT_SUPPORTED) {
L
liaogang 已提交
213
    err = "CUBLAS: not supported, ";
214
  } else if (stat == CUBLAS_STATUS_LICENSE_ERROR) {
L
liaogang 已提交
215
    err = "CUBLAS: license error, ";
216
  }
217
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
218
  throw std::runtime_error(err + msg);
219
#else
S
sneaxiy 已提交
220
  LOG(FATAL) << err << msg;
221
#endif
222 223
}

D
done  
dzhwinter 已提交
224
#if !defined(__APPLE__) && !defined(_WIN32)
S
sneaxiy 已提交
225 226 227 228 229
inline bool is_error(ncclResult_t nccl_result) {
  return nccl_result != ncclSuccess;
}

inline void throw_on_error(ncclResult_t stat, const std::string& msg) {
230
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
231
  throw std::runtime_error(platform::dynload::ncclGetErrorString(stat) + msg);
232
#else
S
sneaxiy 已提交
233
  LOG(FATAL) << platform::dynload::ncclGetErrorString(stat) << msg;
234
#endif
Y
Yu Yang 已提交
235
}
D
dzhwinter 已提交
236
#endif  // __APPLE__ and windows
237
#endif  // PADDLE_WITH_CUDA
238

239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263
#ifdef PADDLE_WITH_CUDA
namespace details {

template <typename T>
struct CudaStatusType {};

#define DEFINE_CUDA_STATUS_TYPE(type, success_value) \
  template <>                                        \
  struct CudaStatusType<type> {                      \
    using Type = type;                               \
    static constexpr Type kSuccess = success_value;  \
  }

DEFINE_CUDA_STATUS_TYPE(cudaError_t, cudaSuccess);
DEFINE_CUDA_STATUS_TYPE(curandStatus_t, CURAND_STATUS_SUCCESS);
DEFINE_CUDA_STATUS_TYPE(cudnnStatus_t, CUDNN_STATUS_SUCCESS);
DEFINE_CUDA_STATUS_TYPE(cublasStatus_t, CUBLAS_STATUS_SUCCESS);

#if !defined(__APPLE__) && !defined(_WIN32)
DEFINE_CUDA_STATUS_TYPE(ncclResult_t, ncclSuccess);
#endif

}  // namespace details
#endif

S
sneaxiy 已提交
264 265 266 267 268
#define PADDLE_THROW(...)                                            \
  do {                                                               \
    throw ::paddle::platform::EnforceNotMet(                         \
        ::paddle::string::Sprintf(__VA_ARGS__), __FILE__, __LINE__); \
  } while (0)
S
sneaxiy 已提交
269 270 271 272 273 274 275 276 277 278 279 280 281

#define PADDLE_ENFORCE(COND, ...)                                         \
  do {                                                                    \
    auto __cond__ = (COND);                                               \
    if (UNLIKELY(::paddle::platform::is_error(__cond__))) {               \
      try {                                                               \
        ::paddle::platform::throw_on_error(                               \
            __cond__, ::paddle::string::Sprintf(__VA_ARGS__));            \
      } catch (...) {                                                     \
        throw ::paddle::platform::EnforceNotMet(std::current_exception(), \
                                                __FILE__, __LINE__);      \
      }                                                                   \
    }                                                                     \
M
minqiyang 已提交
282
  } while (0)
M
minqiyang 已提交
283

284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305
#ifdef PADDLE_WITH_CUDA
#define PADDLE_ENFORCE_CUDA_SUCCESS(COND, ...)                            \
  do {                                                                    \
    auto __cond__ = (COND);                                               \
    using __CUDA_STATUS_TYPE__ = decltype(__cond__);                      \
    constexpr auto __success_type__ =                                     \
        ::paddle::platform::details::CudaStatusType<                      \
            __CUDA_STATUS_TYPE__>::kSuccess;                              \
    if (UNLIKELY(__cond__ != __success_type__)) {                         \
      try {                                                               \
        ::paddle::platform::throw_on_error(                               \
            __cond__, ::paddle::string::Sprintf(__VA_ARGS__));            \
      } catch (...) {                                                     \
        throw ::paddle::platform::EnforceNotMet(std::current_exception(), \
                                                __FILE__, __LINE__);      \
      }                                                                   \
    }                                                                     \
  } while (0)

#undef DEFINE_CUDA_STATUS_TYPE
#endif

D
dzhwinter 已提交
306 307 308 309 310 311
#define PADDLE_THROW_EOF()                                                     \
  do {                                                                         \
    throw ::paddle::platform::EOFException("There is no next data.", __FILE__, \
                                           __LINE__);                          \
  } while (false)

S
Superjom 已提交
312 313 314 315 316 317 318
/*
 * Some enforce helpers here, usage:
 *    int a = 1;
 *    int b = 2;
 *    PADDLE_ENFORCE_EQ(a, b);
 *
 *    will raise an expression described as follows:
C
chenweihang 已提交
319
 *    "Enforce failed. Expected input a == b, but received a(1) != b(2)."
320
 *      with detailed stack information.
S
Superjom 已提交
321 322 323 324
 *
 *    extra messages is also supported, for example:
 *    PADDLE_ENFORCE(a, b, "some simple enforce failed between %d numbers", 2)
 */
S
sneaxiy 已提交
325 326 327 328 329 330
#define PADDLE_ENFORCE_NOT_NULL(__VAL, ...)                 \
  do {                                                      \
    if (UNLIKELY(nullptr == (__VAL))) {                     \
      PADDLE_THROW(#__VAL " should not be null\n%s",        \
                   ::paddle::string::Sprintf(__VA_ARGS__)); \
    }                                                       \
331 332
  } while (0)

S
sneaxiy 已提交
333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371
namespace details {
template <typename T>
inline constexpr bool IsArithmetic() {
  return std::is_arithmetic<T>::value;
}

template <typename T1, typename T2, bool kIsArithmetic /* = true */>
struct TypeConverterImpl {
  using Type1 = typename std::common_type<T1, T2>::type;
  using Type2 = Type1;
};

template <typename T1, typename T2>
struct TypeConverterImpl<T1, T2, false> {
  using Type1 = T1;
  using Type2 = T2;
};

template <typename T1, typename T2>
struct TypeConverter {
 private:
  static constexpr bool kIsArithmetic =
      IsArithmetic<T1>() && IsArithmetic<T2>();

 public:
  using Type1 = typename TypeConverterImpl<T1, T2, kIsArithmetic>::Type1;
  using Type2 = typename TypeConverterImpl<T1, T2, kIsArithmetic>::Type2;
};

template <typename T1, typename T2>
using CommonType1 = typename std::add_lvalue_reference<
    typename std::add_const<typename TypeConverter<T1, T2>::Type1>::type>::type;

template <typename T1, typename T2>
using CommonType2 = typename std::add_lvalue_reference<
    typename std::add_const<typename TypeConverter<T1, T2>::Type2>::type>::type;
}  // namespace details

#define __PADDLE_BINARY_COMPARE(__VAL1, __VAL2, __CMP, __INV_CMP, ...)  \
372
  do {                                                                  \
S
sneaxiy 已提交
373 374 375 376 377 378 379 380 381 382 383
    auto __val1 = (__VAL1);                                             \
    auto __val2 = (__VAL2);                                             \
    using __TYPE1__ = decltype(__val1);                                 \
    using __TYPE2__ = decltype(__val2);                                 \
    using __COMMON_TYPE1__ =                                            \
        ::paddle::platform::details::CommonType1<__TYPE1__, __TYPE2__>; \
    using __COMMON_TYPE2__ =                                            \
        ::paddle::platform::details::CommonType2<__TYPE1__, __TYPE2__>; \
    bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))__CMP( \
        static_cast<__COMMON_TYPE2__>(__val2));                         \
    if (UNLIKELY(!__is_not_error)) {                                    \
C
chenweihang 已提交
384
      PADDLE_THROW("Enforce failed. Expected %s " #__CMP                \
385
                   " %s, but received %s:%s " #__INV_CMP " %s:%s.\n%s", \
S
sneaxiy 已提交
386 387 388
                   #__VAL1, #__VAL2, #__VAL1,                           \
                   ::paddle::string::to_string(__val1), #__VAL2,        \
                   ::paddle::string::to_string(__val2),                 \
S
sneaxiy 已提交
389
                   ::paddle::string::Sprintf(__VA_ARGS__));             \
390 391
    }                                                                   \
  } while (0)
W
wopeizl 已提交
392 393 394 395 396 397 398 399 400 401 402 403 404

#define PADDLE_ENFORCE_EQ(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, ==, !=, __VA_ARGS__)
#define PADDLE_ENFORCE_NE(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, !=, ==, __VA_ARGS__)
#define PADDLE_ENFORCE_GT(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, >, <=, __VA_ARGS__)
#define PADDLE_ENFORCE_GE(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, >=, <, __VA_ARGS__)
#define PADDLE_ENFORCE_LT(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, <, >=, __VA_ARGS__)
#define PADDLE_ENFORCE_LE(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, <=, >, __VA_ARGS__)
S
add EQ  
Superjom 已提交
405

X
xuezhong 已提交
406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446
#define __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL1, __VAL2, __CMP, \
                                           __INV_CMP, ...)               \
  do {                                                                   \
    auto __val1 = (__VAL1);                                              \
    auto __val2 = (__VAL2);                                              \
    if (!__CTX->IsRuntime()) {                                           \
      if (__val1 == -1 || __val2 == -1) {                                \
        break;                                                           \
      }                                                                  \
    }                                                                    \
    using __TYPE1__ = decltype(__val1);                                  \
    using __TYPE2__ = decltype(__val2);                                  \
    using __COMMON_TYPE1__ =                                             \
        ::paddle::platform::details::CommonType1<__TYPE1__, __TYPE2__>;  \
    using __COMMON_TYPE2__ =                                             \
        ::paddle::platform::details::CommonType2<__TYPE1__, __TYPE2__>;  \
    bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))__CMP(  \
        static_cast<__COMMON_TYPE2__>(__val2));                          \
    if (UNLIKELY(!__is_not_error)) {                                     \
      PADDLE_THROW("Enforce failed. Expected %s " #__CMP                 \
                   " %s, but received %s:%s " #__INV_CMP " %s:%s.\n%s",  \
                   #__VAL1, #__VAL2, #__VAL1,                            \
                   ::paddle::string::to_string(__val1), #__VAL2,         \
                   ::paddle::string::to_string(__val2),                  \
                   ::paddle::string::Sprintf(__VA_ARGS__));              \
    }                                                                    \
  } while (0)

#define PADDLE_INFERSHAPE_ENFORCE_EQ(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, ==, !=, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_NE(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, !=, ==, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_GT(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, >, <=, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_GE(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, >=, <, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_LT(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, <, >=, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_LE(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, <=, >, __VA_ARGS__)

447 448
}  // namespace platform
}  // namespace paddle