diff --git a/modules/dnn/include/opencv2/dnn/all_layers.hpp b/modules/dnn/include/opencv2/dnn/all_layers.hpp index b8a0c3ed40ea01a14ad81ef6614b23c5e4feba00..31fda7e456b419300df7d38cdc95fa1969d6ab30 100644 --- a/modules/dnn/include/opencv2/dnn/all_layers.hpp +++ b/modules/dnn/include/opencv2/dnn/all_layers.hpp @@ -422,7 +422,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN class CV_EXPORTS ChannelsPReLULayer : public ActivationLayer { public: - static Ptr create(const LayerParams& params); + static Ptr create(const LayerParams& params); }; class CV_EXPORTS ELULayer : public ActivationLayer diff --git a/modules/dnn/src/caffe/caffe_importer.cpp b/modules/dnn/src/caffe/caffe_importer.cpp index 70d439af4080e16f67312c7d0c16a3eae7aa3b02..1a22882bf819ba22229afc2899179798b25ca4a5 100644 --- a/modules/dnn/src/caffe/caffe_importer.cpp +++ b/modules/dnn/src/caffe/caffe_importer.cpp @@ -216,7 +216,7 @@ public: shape.push_back((int)_shape.dim(i)); } else - CV_Error(Error::StsError, "Unknown shape of input blob"); + shape.resize(1, 1); // Is a scalar. } void blobFromProto(const caffe::BlobProto &pbBlob, cv::Mat &dstBlob) @@ -274,9 +274,9 @@ public: struct BlobNote { BlobNote(const std::string &_name, int _layerId, int _outNum) : - name(_name.c_str()), layerId(_layerId), outNum(_outNum) {} + name(_name), layerId(_layerId), outNum(_outNum) {} - const char *name; + std::string name; int layerId, outNum; }; diff --git a/modules/dnn/src/init.cpp b/modules/dnn/src/init.cpp index 13f687fb26f3ecdb1f5c5446897f6ec019b9e7e3..ec1f0f4d7b6a34b41b2c3f89f2f835fc9b0f02e0 100644 --- a/modules/dnn/src/init.cpp +++ b/modules/dnn/src/init.cpp @@ -97,6 +97,7 @@ void initializeLayerFactory() CV_DNN_REGISTER_LAYER_CLASS(ReLU, ReLULayer); CV_DNN_REGISTER_LAYER_CLASS(ReLU6, ReLU6Layer); CV_DNN_REGISTER_LAYER_CLASS(ChannelsPReLU, ChannelsPReLULayer); + CV_DNN_REGISTER_LAYER_CLASS(PReLU, ChannelsPReLULayer); CV_DNN_REGISTER_LAYER_CLASS(Sigmoid, SigmoidLayer); CV_DNN_REGISTER_LAYER_CLASS(TanH, TanHLayer); CV_DNN_REGISTER_LAYER_CLASS(ELU, ELULayer); diff --git a/modules/dnn/src/layers/elementwise_layers.cpp b/modules/dnn/src/layers/elementwise_layers.cpp index 027eda4cc2951d8dca96ba2791f53f9cf3d05a09..eb93363085473fa3b11b8e365cc7271e0f1ecd92 100644 --- a/modules/dnn/src/layers/elementwise_layers.cpp +++ b/modules/dnn/src/layers/elementwise_layers.cpp @@ -754,8 +754,15 @@ Ptr PowerLayer::create(const LayerParams& params) return l; } -Ptr ChannelsPReLULayer::create(const LayerParams& params) +Ptr ChannelsPReLULayer::create(const LayerParams& params) { + CV_Assert(params.blobs.size() == 1); + if (params.blobs[0].total() == 1) + { + LayerParams reluParams = params; + reluParams.set("negative_slope", params.blobs[0].at(0)); + return ReLULayer::create(reluParams); + } Ptr l(new ElementWiseLayer(ChannelsPReLUFunctor(params.blobs[0]))); l->setParamsFrom(params); diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index 75861c96acdeb3d0b5d7ed9c3480ab579aabb27e..1e583b33c8f60a953f4f2a04bf3f2f74b2a0b4ac 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -279,6 +279,11 @@ TEST(Layer_Test_Eltwise, Accuracy) testLayerUsingCaffeModels("layer_eltwise"); } +TEST(Layer_Test_PReLU, Accuracy) +{ + testLayerUsingCaffeModels("layer_prelu", DNN_TARGET_CPU, true); +} + //template //static void test_Layer_Concat() //{