fc_compute.cc 2.4 KB
Newer Older
S
superjomn 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "paddle/fluid/lite/kernels/host/fc_compute.h"
#include <Eigen/Core>
S
update  
superjomn 已提交
17
#include "paddle/fluid/lite/core/op_registry.h"
S
superjomn 已提交
18
#include "paddle/fluid/lite/core/type_system.h"
S
superjomn 已提交
19 20 21 22 23 24 25 26

namespace paddle {
namespace lite {
namespace kernels {
namespace host {

// NOTE should use pure std C++ implementation.
void FcCompute::Run() {
27
  auto& param = this->Param<operators::FcParam>();
S
superjomn 已提交
28

S
superjomn 已提交
29
  CHECK_GE(param.input->dims().size(), 2UL);
S
superjomn 已提交
30
  CHECK_EQ(param.output->dims().size(), 2UL);
S
superjomn 已提交
31

32 33 34 35 36 37 38 39 40 41 42
  fc_compute_eigen(
      param.input->data<float>(),  // x
      param.input->dims().Slice(0, param.in_num_col_dims).production(),
      param.input->dims()
          .Slice(param.in_num_col_dims, param.input->dims().size())
          .production(),
      param.w->data<float>(),     // w
      param.w->dims()[1],         // w_w
      param.w->dims()[0],         // w_h
      param.bias->data<float>(),  // b
      param.output->mutable_data<float>());
S
superjomn 已提交
43 44
}

S
Superjomn 已提交
45
// TargetType FcCompute::target() const { return TARGET(kHost); }
S
superjomn 已提交
46

S
Superjomn 已提交
47
// PrecisionType FcCompute::precision() const { return PRECISION(kFloat); }
S
superjomn 已提交
48

S
superjomn 已提交
49 50 51 52 53
}  // namespace host
}  // namespace kernels
}  // namespace lite
}  // namespace paddle

S
superjomn 已提交
54 55
REGISTER_LITE_KERNEL(fc, kHost, kFloat, kNCHW,
                     paddle::lite::kernels::host::FcCompute, def)
56 57 58 59 60 61 62 63 64
    .BindInput("Input",
               {paddle::lite::Type::Get<paddle::lite::TensorFp32NCHWTy>(
                   TARGET(kHost))})
    .BindInput("Bias", {paddle::lite::Type::Get<paddle::lite::TensorFp32NCHWTy>(
                           TARGET(kHost))})
    .BindInput("W", {paddle::lite::Type::Get<paddle::lite::TensorFp32NCHWTy>(
                        TARGET(kHost))})
    .BindOutput("Out", {paddle::lite::Type::Get<paddle::lite::TensorFp32NCHWTy>(
                           TARGET(kHost))})
S
superjomn 已提交
65
    .Finalize();