“c6f49f0b9f189e043b458348d7fd1468e2645621”上不存在“git@gitcode.net:BaiXuePrincess/Paddle.git”
reshape_grad_kernel.cc 3.7 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
//   Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

15
#include "paddle/phi/kernels/reshape_grad_kernel.h"
16

17 18
#include "paddle/phi/backends/all_context.h"
#include "paddle/phi/core/kernel_registry.h"
19
#include "paddle/phi/core/tensor_utils.h"
20 21 22
#ifdef PADDLE_WITH_XPU
#include "paddle/phi/backends/xpu/enforce_xpu.h"
#endif
23

24
namespace phi {
25 26 27 28 29 30

template <typename Context>
void ReshapeGradKernel(const Context& dev_ctx,
                       const DenseTensor& out_grad,
                       DenseTensor* x_grad) {
  auto x_dims = x_grad->dims();
31
  phi::Copy(dev_ctx, out_grad, dev_ctx.GetPlace(), false, x_grad);
32 33 34
  x_grad->Resize(x_dims);
}

35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53
#ifdef PADDLE_WITH_XPU
template <>
void ReshapeGradKernel<phi::XPUContext>(const XPUContext& dev_ctx,
                                        const DenseTensor& out_grad,
                                        DenseTensor* x_grad) {
  auto x_dims = x_grad->dims();
  dev_ctx.Alloc(x_grad, out_grad.dtype());
  auto* src_ptr = out_grad.data();
  auto* dst_ptr = x_grad->data();
  auto size = out_grad.numel() * paddle::experimental::SizeOf(out_grad.dtype());
  int ret = xpu::copy(dev_ctx.x_context(),
                      reinterpret_cast<const int8_t*>(src_ptr),
                      reinterpret_cast<int8_t*>(dst_ptr),
                      size);
  PADDLE_ENFORCE_XDNN_SUCCESS(ret, "copy");
  x_grad->Resize(x_dims);
}
#endif

54 55
template <typename Context>
void ReshapeDoubleGradKernel(const Context& dev_ctx,
56
                             const DenseTensor& out_grad,
57 58 59 60 61
                             const DenseTensor& x_grad_grad,
                             DenseTensor* out_grad_grad) {
  ReshapeGradKernel(dev_ctx, x_grad_grad, out_grad_grad);
}

62
}  // namespace phi
63

64
PD_REGISTER_GENERAL_KERNEL(reshape_grad,
65 66
                           CPU,
                           ALL_LAYOUT,
67
                           phi::ReshapeGradKernel<phi::CPUContext>,
68
                           ALL_DTYPE) {}
69
PD_REGISTER_GENERAL_KERNEL(reshape_double_grad,
70 71
                           CPU,
                           ALL_LAYOUT,
72
                           phi::ReshapeDoubleGradKernel<phi::CPUContext>,
73 74 75
                           ALL_DTYPE) {}

#if defined(PADDLE_WITH_CUDA) || defined(PADDLE_WITH_HIP)
76
PD_REGISTER_GENERAL_KERNEL(reshape_grad,
77 78
                           GPU,
                           ALL_LAYOUT,
79
                           phi::ReshapeGradKernel<phi::GPUContext>,
80
                           ALL_DTYPE) {}
81
PD_REGISTER_GENERAL_KERNEL(reshape_double_grad,
82 83
                           GPU,
                           ALL_LAYOUT,
84
                           phi::ReshapeDoubleGradKernel<phi::GPUContext>,
85 86 87 88
                           ALL_DTYPE) {}
#endif

#ifdef PADDLE_WITH_XPU
89
PD_REGISTER_GENERAL_KERNEL(reshape_grad,
90 91
                           XPU,
                           ALL_LAYOUT,
92
                           phi::ReshapeGradKernel<phi::XPUContext>,
93
                           ALL_DTYPE) {}
94
PD_REGISTER_GENERAL_KERNEL(reshape_double_grad,
95 96
                           XPU,
                           ALL_LAYOUT,
97
                           phi::ReshapeDoubleGradKernel<phi::XPUContext>,
98 99
                           ALL_DTYPE) {}
#endif