batch_norm_sig.cc 4.2 KB
Newer Older
H
hong 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "paddle/phi/core/compat/op_utils.h"

namespace phi {

KernelSignature BatchNormOpArgumentMapping(const ArgumentMappingContext& ctx) {
20
  bool is_test = paddle::any_cast<bool>(ctx.Attr("is_test"));
C
Chen Weihang 已提交
21 22 23 24
  bool use_global_stats =
      ctx.HasAttr("use_global_stats")
          ? paddle::any_cast<bool>(ctx.Attr("use_global_stats"))
          : false;
25
  bool trainable_statistics =
C
Chen Weihang 已提交
26 27 28 29 30 31
      ctx.HasAttr("trainable_statistics")
          ? paddle::any_cast<bool>(ctx.Attr("trainable_statistics"))
          : false;
  bool fuse_with_relu = ctx.HasAttr("fuse_with_relu")
                            ? paddle::any_cast<bool>(ctx.Attr("fuse_with_relu"))
                            : false;
32 33 34 35
  // Dispenable `MomentumTensor` is useless now
  if (is_test && !use_global_stats && !trainable_statistics &&
      !fuse_with_relu) {
    return KernelSignature("batch_norm_infer",
36
                           {"X", "Mean", "Variance", "Scale", "Bias"},
37 38 39 40
                           {"momentum", "epsilon", "data_layout"},
                           {"Y", "MeanOut", "VarianceOut"});
  } else {
    return KernelSignature("batch_norm",
41 42 43
                           {"X", "Mean", "Variance", "Scale", "Bias"},
                           {"is_test",
                            "momentum",
44 45 46
                            "epsilon",
                            "data_layout",
                            "use_global_stats",
47
                            "trainable_statistics"},
48 49 50 51 52 53 54
                           {"Y",
                            "MeanOut",
                            "VarianceOut",
                            "SavedMean",
                            "SavedVariance",
                            "ReserveSpace"});
  }
H
hong 已提交
55 56 57 58
}

KernelSignature BatchNormGradOpArgumentMapping(
    const ArgumentMappingContext& ctx) {
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75
  return KernelSignature("batch_norm_grad",
                         {
                             "X",
                             "Scale",
                             "Bias",
                             "Mean",
                             "Variance",
                             "SavedMean",
                             "SavedVariance",
                             "ReserveSpace",
                             "Y@GRAD",
                         },
                         {"momentum",
                          "epsilon",
                          "data_layout",
                          "is_test",
                          "use_global_stats",
76
                          "trainable_statistics"},
77
                         {"X@GRAD", "Scale@GRAD", "Bias@GRAD"});
H
hong 已提交
78 79 80 81
}

KernelSignature BatchNormGradGradOpArgumentMapping(
    const ArgumentMappingContext& ctx) {
82
  return KernelSignature("batch_norm_double_grad",
83
                         {"X",
H
hong 已提交
84
                          "Scale",
85 86
                          "Mean",
                          "Variance",
H
hong 已提交
87 88
                          "SavedMean",
                          "SavedVariance",
89 90 91 92
                          "DY",
                          "DDX",
                          "DDScale",
                          "DDBias"},
H
hong 已提交
93 94 95 96 97
                         {"momentum",
                          "epsilon",
                          "data_layout",
                          "is_test",
                          "use_global_stats",
98
                          "trainable_statistics"},
H
hong 已提交
99 100 101 102 103 104 105 106 107 108
                         {"DX", "DScale", "DDY"});
}

}  // namespace phi

PD_REGISTER_ARG_MAPPING_FN(batch_norm, phi::BatchNormOpArgumentMapping);
PD_REGISTER_ARG_MAPPING_FN(batch_norm_grad,
                           phi::BatchNormGradOpArgumentMapping);
PD_REGISTER_ARG_MAPPING_FN(batch_norm_grad_grad,
                           phi::BatchNormGradGradOpArgumentMapping);