data_type_transform.cc 8.6 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/data_type_transform.h"
16

17
#include "paddle/fluid/framework/convert_utils.h"
18
#include "paddle/fluid/framework/selected_rows_utils.h"
19
#include "paddle/phi/common/transform.h"
20

21 22 23 24
#if defined(PADDLE_WITH_XPU)
#include "paddle/fluid/platform/device/device_wrapper.h"
#endif

25 26 27 28 29 30 31 32 33 34
namespace paddle {
namespace framework {

template <typename InType, typename OutType>
struct CastDataTypeFunctor {
  HOSTDEVICE inline OutType operator()(InType in) const {
    return static_cast<OutType>(in);
  }
};

35 36 37
#if defined(PADDLE_WITH_XPU)

template <typename InType, typename OutType>
38 39
static void XPUCastData(const phi::DenseTensor& in,
                        phi::DenseTensor* out,
40 41 42
                        const platform::XPUDeviceContext* dev_ctx) {
  using XPUInTDType = typename XPUTypeTrait<InType>::Type;
  using XPUOutTDType = typename XPUTypeTrait<OutType>::Type;
43
  int r = xpu::cast<XPUInTDType, XPUOutTDType>(
44 45 46 47
      dev_ctx->x_context(),
      reinterpret_cast<const XPUInTDType*>(in.data<InType>()),
      reinterpret_cast<XPUOutTDType*>(out->mutable_data<OutType>(in.place())),
      in.numel());
48
  PADDLE_ENFORCE_XDNN_SUCCESS(r, "cast");
49 50 51 52 53
  dev_ctx->Wait();
}

template <typename InType>
static void XPUTransDataType(
54 55
    const phi::DenseTensor& in,
    phi::DenseTensor* out,
56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
    const paddle::framework::proto::VarType::Type& dst_type,
    const platform::DeviceContext* ctx) {
  auto* context = static_cast<const platform::XPUDeviceContext*>(ctx);

#define XPUCastCallback(cpp_type, proto_type)          \
  do {                                                 \
    if (dst_type == proto_type) {                      \
      XPUCastData<InType, cpp_type>(in, out, context); \
    }                                                  \
  } while (0)

  if (dst_type == proto::VarType::FP32 && dst_type == proto::VarType::FP16 &&
      dst_type == proto::VarType::BOOL && dst_type == proto::VarType::INT16 &&
      dst_type == proto::VarType::INT32 && dst_type == proto::VarType::INT64) {
    _ForEachDataType_(XPUCastCallback);
  } else {
    PADDLE_THROW(platform::errors::Unimplemented(
        "Data type (%s) is not supported in XPU when casting data type.",
        DataTypeToString(dst_type)));
  }
}

#endif

80 81
template <typename InType>
struct CastDataType {
82 83
  CastDataType(const phi::DenseTensor& in,
               phi::DenseTensor* out,
84 85
               const platform::DeviceContext* ctx)
      : in_(in), out_(out), ctx_(ctx) {}
86 87
  const phi::DenseTensor in_;
  phi::DenseTensor* out_;
88 89 90
  const platform::DeviceContext* ctx_;

  template <typename OutType>
D
dzhwinter 已提交
91
  void apply() {
92
    auto* in_begin = in_.data<InType>();
Q
Qiao Longfei 已提交
93 94
    auto* in_end = in_begin + in_.numel();
    auto* out_begin = out_->mutable_data<OutType>(in_.place());
95

Q
Qiao Longfei 已提交
96
    if (platform::is_cpu_place(in_.place())) {
97
      phi::Transform<phi::CPUContext> trans;
L
Leo Chen 已提交
98
      auto* context = static_cast<const phi::CPUContext*>(ctx_);
99 100 101 102
      trans(*context,
            in_begin,
            in_end,
            out_begin,
103
            CastDataTypeFunctor<InType, OutType>());
104
#if defined(__NVCC__) || defined(__HIPCC__)
105
    } else if (platform::is_gpu_place(in_.place())) {
106
      phi::Transform<phi::GPUContext> trans;
L
Leo Chen 已提交
107
      auto* context = static_cast<const phi::GPUContext*>(ctx_);
108 109 110 111
      trans(*context,
            in_begin,
            in_end,
            out_begin,
112
            CastDataTypeFunctor<InType, OutType>());
113
      context->Wait();
114 115 116
#endif
#if defined(PADDLE_WITH_IPU)
    } else if (platform::is_ipu_place(in_.place())) {
117
      phi::Transform<phi::CPUContext> trans;
118 119 120 121 122 123
      auto* context = static_cast<const phi::CPUContext*>(ctx_);
      trans(*context,
            in_begin,
            in_end,
            out_begin,
            CastDataTypeFunctor<InType, OutType>());
124
#endif
125
    } else {
126 127
      PADDLE_THROW(platform::errors::Unimplemented(
          "Place type is not supported when casting data type."));
128 129 130 131
    }
  }
};

132 133
void TransDataType(const phi::KernelKey& kernel_type_for_var,
                   const phi::KernelKey& expected_kernel_type,
134 135
                   const phi::DenseTensor& in,
                   phi::DenseTensor* out) {
136 137 138 139 140 141 142 143
  PADDLE_ENFORCE_EQ(in.dtype(),
                    kernel_type_for_var.dtype(),
                    platform::errors::InvalidArgument(
                        "The src dtype(%s) of input tensor and kernel_type(%s) "
                        "are not conststent.",
                        DataTypeToString(in.dtype()),
                        DataTypeToString(kernel_type_for_var.dtype())));
  auto dst_type = framework::TransToProtoVarType(expected_kernel_type.dtype());
L
Leo Chen 已提交
144 145 146
  TransDataType(in, dst_type, out);
}

147
void TransDataType(const phi::DenseTensor& in,
L
Leo Chen 已提交
148
                   const paddle::framework::proto::VarType::Type& type,
149
                   phi::DenseTensor* out) {
Q
Qiao Longfei 已提交
150
  platform::DeviceContextPool& pool = platform::DeviceContextPool::Instance();
151

Q
Qiao Longfei 已提交
152
  out->Resize(in.dims());
153
  auto src_type = framework::TransToProtoVarType(in.dtype());
L
Leo Chen 已提交
154
  auto dst_type = type;
Q
Qiao Longfei 已提交
155
  auto ctx = pool.Get(in.place());
156

157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184
#if defined(PADDLE_WITH_XPU)
  switch (src_type) {
    case proto::VarType::FP16:
      XPUTransDataType<platform::float16>(in, out, dst_type, ctx);
      break;
    case proto::VarType::FP32:
      XPUTransDataType<float>(in, out, dst_type, ctx);
      break;
    case proto::VarType::BOOL:
      XPUTransDataType<bool>(in, out, dst_type, ctx);
      break;
    case proto::VarType::INT16:
      XPUTransDataType<int16_t>(in, out, dst_type, ctx);
      break;
    case proto::VarType::INT32:
      XPUTransDataType<int>(in, out, dst_type, ctx);
      break;
    case proto::VarType::INT64:
      XPUTransDataType<int64_t>(in, out, dst_type, ctx);
      break;
    default:
      PADDLE_THROW(platform::errors::Unimplemented(
          "Data type (%s) is not supported in XPU when casting data type.",
          DataTypeToString(src_type)));
  }

#else

185
  switch (src_type) {
186 187 188 189
    case proto::VarType::FP16:
      framework::VisitDataType(dst_type,
                               CastDataType<platform::float16>(in, out, ctx));
      break;
190 191 192 193
    case proto::VarType::BF16:
      framework::VisitDataType(dst_type,
                               CastDataType<platform::bfloat16>(in, out, ctx));
      break;
194
    case proto::VarType::FP32:
Q
Qiao Longfei 已提交
195
      framework::VisitDataType(dst_type, CastDataType<float>(in, out, ctx));
196
      break;
197
    case proto::VarType::FP64:
Q
Qiao Longfei 已提交
198
      framework::VisitDataType(dst_type, CastDataType<double>(in, out, ctx));
199
      break;
200
    case proto::VarType::INT32:
Q
Qiao Longfei 已提交
201
      framework::VisitDataType(dst_type, CastDataType<int>(in, out, ctx));
202
      break;
203
    case proto::VarType::INT64:
Q
Qiao Longfei 已提交
204
      framework::VisitDataType(dst_type, CastDataType<int64_t>(in, out, ctx));
205
      break;
206
    case proto::VarType::BOOL:
Q
Qiao Longfei 已提交
207
      framework::VisitDataType(dst_type, CastDataType<bool>(in, out, ctx));
208
      break;
209
    case proto::VarType::INT16:
210
      framework::VisitDataType(dst_type, CastDataType<int16_t>(in, out, ctx));
211 212
      break;
    case proto::VarType::UINT8:
213
      framework::VisitDataType(dst_type, CastDataType<uint8_t>(in, out, ctx));
214
      break;
215
    default:
216 217 218
      PADDLE_THROW(platform::errors::Unimplemented(
          "Data type (%s) is not supported when casting data type.",
          DataTypeToString(src_type)));
219
  }
220
#endif
221 222
}

223
void TransComplexToReal(const proto::VarType::Type& dst_type,
224
                        const proto::VarType::Type& src_type,
225 226
                        const phi::DenseTensor& in,
                        phi::DenseTensor* out) {
227 228 229 230 231 232
  auto& pool = platform::DeviceContextPool::Instance();
  auto* ctx = pool.Get(in.place());
  out->Resize(in.dims());
  // complex -> real
  switch (src_type) {
    case proto::VarType::COMPLEX64:
233 234
      framework::VisitDataType(
          dst_type, CastDataType<platform::complex<float>>(in, out, ctx));
235 236 237
      break;
    case proto::VarType::COMPLEX128:
      framework::VisitDataType(
238
          dst_type, CastDataType<platform::complex<double>>(in, out, ctx));
239 240 241 242 243 244 245 246 247
      break;
    default:
      PADDLE_THROW(platform::errors::Unimplemented(
          "Data type (%s) is not supported when casting complex tensor to real "
          "data type.",
          DataTypeToString(src_type)));
  }
}

248 249
}  // namespace framework
}  // namespace paddle