batch_norm_sig.cc 4.4 KB
Newer Older
H
hong 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "paddle/phi/core/compat/op_utils.h"

namespace phi {

KernelSignature BatchNormOpArgumentMapping(const ArgumentMappingContext& ctx) {
20
  bool is_test = paddle::any_cast<bool>(ctx.Attr("is_test"));
C
Chen Weihang 已提交
21 22 23 24
  bool use_global_stats =
      ctx.HasAttr("use_global_stats")
          ? paddle::any_cast<bool>(ctx.Attr("use_global_stats"))
          : false;
25
  bool trainable_statistics =
C
Chen Weihang 已提交
26 27 28 29 30 31
      ctx.HasAttr("trainable_statistics")
          ? paddle::any_cast<bool>(ctx.Attr("trainable_statistics"))
          : false;
  bool fuse_with_relu = ctx.HasAttr("fuse_with_relu")
                            ? paddle::any_cast<bool>(ctx.Attr("fuse_with_relu"))
                            : false;
32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55
  // Dispenable `MomentumTensor` is useless now
  if (is_test && !use_global_stats && !trainable_statistics &&
      !fuse_with_relu) {
    return KernelSignature("batch_norm_infer",
                           {"X", "Scale", "Bias", "Mean", "Variance"},
                           {"momentum", "epsilon", "data_layout"},
                           {"Y", "MeanOut", "VarianceOut"});
  } else {
    return KernelSignature("batch_norm",
                           {"X", "Scale", "Bias", "Mean", "Variance"},
                           {"momentum",
                            "epsilon",
                            "data_layout",
                            "is_test",
                            "use_global_stats",
                            "trainable_statistics",
                            "fuse_with_relu"},
                           {"Y",
                            "MeanOut",
                            "VarianceOut",
                            "SavedMean",
                            "SavedVariance",
                            "ReserveSpace"});
  }
H
hong 已提交
56 57 58 59
}

KernelSignature BatchNormGradOpArgumentMapping(
    const ArgumentMappingContext& ctx) {
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
  return KernelSignature("batch_norm_grad",
                         {
                             "X",
                             "Scale",
                             "Bias",
                             "Mean",
                             "Variance",
                             "SavedMean",
                             "SavedVariance",
                             "ReserveSpace",
                             "Y@GRAD",
                         },
                         {"momentum",
                          "epsilon",
                          "data_layout",
                          "is_test",
                          "use_global_stats",
                          "trainable_statistics",
                          "fuse_with_relu"},
                         {"X@GRAD", "Scale@GRAD", "Bias@GRAD"});
H
hong 已提交
80 81 82 83 84
}

KernelSignature BatchNormGradGradOpArgumentMapping(
    const ArgumentMappingContext& ctx) {
  return KernelSignature("batch_norm_grad_grad",
85
                         {"X",
H
hong 已提交
86
                          "Scale",
87 88
                          "Mean",
                          "Variance",
H
hong 已提交
89 90
                          "SavedMean",
                          "SavedVariance",
91 92 93 94
                          "DY",
                          "DDX",
                          "DDScale",
                          "DDBias"},
H
hong 已提交
95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111
                         {"momentum",
                          "epsilon",
                          "data_layout",
                          "is_test",
                          "use_global_stats",
                          "trainable_statistics",
                          "fuse_with_relu"},
                         {"DX", "DScale", "DDY"});
}

}  // namespace phi

PD_REGISTER_ARG_MAPPING_FN(batch_norm, phi::BatchNormOpArgumentMapping);
PD_REGISTER_ARG_MAPPING_FN(batch_norm_grad,
                           phi::BatchNormGradOpArgumentMapping);
PD_REGISTER_ARG_MAPPING_FN(batch_norm_grad_grad,
                           phi::BatchNormGradGradOpArgumentMapping);