提交 2389d3f1 编写于 作者: L liuqi

Rename fc to fully_connected to keep consistency.

上级 07927d5a
...@@ -26,12 +26,12 @@ struct FullyConnectedBase { ...@@ -26,12 +26,12 @@ struct FullyConnectedBase {
const float prelu_alpha_; const float prelu_alpha_;
}; };
template <DeviceType D, typename T> template<DeviceType D, typename T>
struct FullyConnectedFunctor : FullyConnectedBase{ struct FullyConnectedFunctor : FullyConnectedBase {
FullyConnectedFunctor(const ActivationType activation, FullyConnectedFunctor(const ActivationType activation,
const float relux_max_limit, const float relux_max_limit,
const float prelu_alpha) : const float prelu_alpha) :
FullyConnectedBase(activation, relux_max_limit, prelu_alpha){} FullyConnectedBase(activation, relux_max_limit, prelu_alpha) {}
void operator()(const Tensor *input, void operator()(const Tensor *input,
const Tensor *weight, const Tensor *weight,
...@@ -74,13 +74,12 @@ struct FullyConnectedFunctor : FullyConnectedBase{ ...@@ -74,13 +74,12 @@ struct FullyConnectedFunctor : FullyConnectedBase{
} }
}; };
template<typename T>
template <typename T> struct FullyConnectedFunctor<DeviceType::OPENCL, T> : FullyConnectedBase {
struct FullyConnectedFunctor<DeviceType::OPENCL, T> : FullyConnectedBase{
FullyConnectedFunctor(const ActivationType activation, FullyConnectedFunctor(const ActivationType activation,
const float relux_max_limit, const float relux_max_limit,
const float prelu_alpha) : const float prelu_alpha) :
FullyConnectedBase(activation, relux_max_limit, prelu_alpha){} FullyConnectedBase(activation, relux_max_limit, prelu_alpha) {}
void operator()(const Tensor *input, void operator()(const Tensor *input,
const Tensor *weight, const Tensor *weight,
......
#include <common.h> #include <common.h>
// output = weight * input + bias // output = weight * input + bias
__kernel void fc(__read_only image2d_t input, __kernel void fully_connected(__read_only image2d_t input,
__read_only image2d_t weight, __read_only image2d_t weight,
#ifdef BIAS #ifdef BIAS
__read_only image2d_t bias, __read_only image2d_t bias,
......
...@@ -32,8 +32,8 @@ void FullyConnectedFunctor<DeviceType::OPENCL, T>::operator()( ...@@ -32,8 +32,8 @@ void FullyConnectedFunctor<DeviceType::OPENCL, T>::operator()(
auto runtime = OpenCLRuntime::Global(); auto runtime = OpenCLRuntime::Global();
std::set<std::string> built_options; std::set<std::string> built_options;
auto dt = DataTypeToEnum<T>::value; auto dt = DataTypeToEnum<T>::value;
std::string kernel_name = MACE_OBFUSCATE_SYMBOL("fc"); std::string kernel_name = MACE_OBFUSCATE_SYMBOL("fully_connected");
built_options.emplace("-Dfc=" + kernel_name); built_options.emplace("-Dfully_connected=" + kernel_name);
built_options.emplace("-DDATA_TYPE=" + DtToUpstreamCLDt(dt)); built_options.emplace("-DDATA_TYPE=" + DtToUpstreamCLDt(dt));
built_options.emplace("-DCMD_DATA_TYPE=" + DtToUpstreamCLCMDDt(dt)); built_options.emplace("-DCMD_DATA_TYPE=" + DtToUpstreamCLCMDDt(dt));
if (bias != nullptr) { if (bias != nullptr) {
...@@ -60,7 +60,7 @@ void FullyConnectedFunctor<DeviceType::OPENCL, T>::operator()( ...@@ -60,7 +60,7 @@ void FullyConnectedFunctor<DeviceType::OPENCL, T>::operator()(
default: default:
LOG(FATAL) << "Unknown activation type: " << activation_; LOG(FATAL) << "Unknown activation type: " << activation_;
} }
kernel_ = runtime->BuildKernel("fc", kernel_name, built_options); kernel_ = runtime->BuildKernel("fully_connected", kernel_name, built_options);
uint32_t idx = 0; uint32_t idx = 0;
kernel_.setArg(idx++, kernel_.setArg(idx++,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册