From 542008c0213e7ea96c093a8b90e19f329c49ead3 Mon Sep 17 00:00:00 2001 From: jameswu2014 <545426914@qq.com> Date: Fri, 10 May 2019 13:16:30 +0800 Subject: [PATCH] io+softmax update for FPGA V1 track (#1613) * V2-conv-hellocase pass & V1 verify-pass * V1 paralvl equal to 2 * softmax channel =2 bug * softmaxbug update2 * io update2 --- src/io/paddle_inference_api.h | 1 + .../kernel/fpga/V1/softmax_kernel.cpp | 32 +++++++++---------- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/src/io/paddle_inference_api.h b/src/io/paddle_inference_api.h index 1771278308..e01b5abb78 100644 --- a/src/io/paddle_inference_api.h +++ b/src/io/paddle_inference_api.h @@ -33,6 +33,7 @@ namespace paddle_mobile { namespace fpga { int open_device(); +int close_device(); void* fpga_malloc(size_t size); void fpga_free(void* ptr); diff --git a/src/operators/kernel/fpga/V1/softmax_kernel.cpp b/src/operators/kernel/fpga/V1/softmax_kernel.cpp index ba86787c64..ff5ff5380f 100644 --- a/src/operators/kernel/fpga/V1/softmax_kernel.cpp +++ b/src/operators/kernel/fpga/V1/softmax_kernel.cpp @@ -47,7 +47,22 @@ bool SoftmaxKernel::Init(SoftmaxParam *param) { input->Resize(framework::make_ddim(dims)); float_input->Resize(framework::make_ddim(dims)); - if (channel != 2) { // Use CPU + if (channel == 2 && input->type() == type_id()) { // Use FPGA + fpga::format_fp16_ofm(out); + fpga::BypassArgs args = {fpga::DATA_TYPE_FP16}; + args.input_layout_type = fpga::LAYOUT_HWC; + args.output_layout_type = fpga::LAYOUT_CHW; + args.input_data_type = fpga::DATA_TYPE_FP16; + args.output_data_type = fpga::DATA_TYPE_FP16; + args.image.address = input_ptr; + args.image.height = (uint32_t)input->dims()[1]; + args.image.width = (uint32_t)input->dims()[2]; + args.image.channels = (uint32_t)input->dims()[3]; + args.output.address = out->data(); + args.output.scale_address = out->scale; + args.output.activation.activation_type = fpga::SOFTMAX; + param->SetFpgaArgs(args); + } else { // Use CPU out->Resize(framework::make_ddim(dims)); out->mutable_data(framework::make_ddim(dims)); float_input->init(type_id().hash_code()); @@ -68,21 +83,6 @@ bool SoftmaxKernel::Init(SoftmaxParam *param) { args.output.scale_address = float_input->scale; param->SetFloatInput(float_input); param->SetFpgaArgs(args); - } else { // Use FPGA - fpga::format_fp16_ofm(out); - fpga::BypassArgs args = {fpga::DATA_TYPE_FP16}; - args.input_layout_type = fpga::LAYOUT_HWC; - args.output_layout_type = fpga::LAYOUT_CHW; - args.input_data_type = fpga::DATA_TYPE_FP16; - args.output_data_type = fpga::DATA_TYPE_FP16; - args.image.address = input_ptr; - args.image.height = (uint32_t)input->dims()[1]; - args.image.width = (uint32_t)input->dims()[2]; - args.image.channels = (uint32_t)input->dims()[3]; - args.output.address = out->data(); - args.output.scale_address = out->scale; - args.output.activation.activation_type = fpga::SOFTMAX; - param->SetFpgaArgs(args); } return true; -- GitLab