Skip to content

Commit 0be1f4a

Browse files
committed
Merge pull request opencv#9811 from dkurt:prelu_with_shared_channels
2 parents 556768e + eabf728 commit 0be1f4a

File tree

5 files changed

+18
-5
lines changed

5 files changed

+18
-5
lines changed

modules/dnn/include/opencv2/dnn/all_layers.hpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -422,7 +422,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
422422
class CV_EXPORTS ChannelsPReLULayer : public ActivationLayer
423423
{
424424
public:
425-
static Ptr<ChannelsPReLULayer> create(const LayerParams& params);
425+
static Ptr<Layer> create(const LayerParams& params);
426426
};
427427

428428
class CV_EXPORTS ELULayer : public ActivationLayer

modules/dnn/src/caffe/caffe_importer.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ class CaffeImporter : public Importer
216216
shape.push_back((int)_shape.dim(i));
217217
}
218218
else
219-
CV_Error(Error::StsError, "Unknown shape of input blob");
219+
shape.resize(1, 1); // Is a scalar.
220220
}
221221

222222
void blobFromProto(const caffe::BlobProto &pbBlob, cv::Mat &dstBlob)
@@ -274,9 +274,9 @@ class CaffeImporter : public Importer
274274
struct BlobNote
275275
{
276276
BlobNote(const std::string &_name, int _layerId, int _outNum) :
277-
name(_name.c_str()), layerId(_layerId), outNum(_outNum) {}
277+
name(_name), layerId(_layerId), outNum(_outNum) {}
278278

279-
const char *name;
279+
std::string name;
280280
int layerId, outNum;
281281
};
282282

modules/dnn/src/init.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -97,6 +97,7 @@ void initializeLayerFactory()
9797
CV_DNN_REGISTER_LAYER_CLASS(ReLU, ReLULayer);
9898
CV_DNN_REGISTER_LAYER_CLASS(ReLU6, ReLU6Layer);
9999
CV_DNN_REGISTER_LAYER_CLASS(ChannelsPReLU, ChannelsPReLULayer);
100+
CV_DNN_REGISTER_LAYER_CLASS(PReLU, ChannelsPReLULayer);
100101
CV_DNN_REGISTER_LAYER_CLASS(Sigmoid, SigmoidLayer);
101102
CV_DNN_REGISTER_LAYER_CLASS(TanH, TanHLayer);
102103
CV_DNN_REGISTER_LAYER_CLASS(ELU, ELULayer);

modules/dnn/src/layers/elementwise_layers.cpp

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -754,8 +754,15 @@ Ptr<PowerLayer> PowerLayer::create(const LayerParams& params)
754754
return l;
755755
}
756756

757-
Ptr<ChannelsPReLULayer> ChannelsPReLULayer::create(const LayerParams& params)
757+
Ptr<Layer> ChannelsPReLULayer::create(const LayerParams& params)
758758
{
759+
CV_Assert(params.blobs.size() == 1);
760+
if (params.blobs[0].total() == 1)
761+
{
762+
LayerParams reluParams = params;
763+
reluParams.set("negative_slope", params.blobs[0].at<float>(0));
764+
return ReLULayer::create(reluParams);
765+
}
759766
Ptr<ChannelsPReLULayer> l(new ElementWiseLayer<ChannelsPReLUFunctor>(ChannelsPReLUFunctor(params.blobs[0])));
760767
l->setParamsFrom(params);
761768

modules/dnn/test/test_layers.cpp

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -279,6 +279,11 @@ TEST(Layer_Test_Eltwise, Accuracy)
279279
testLayerUsingCaffeModels("layer_eltwise");
280280
}
281281

282+
TEST(Layer_Test_PReLU, Accuracy)
283+
{
284+
testLayerUsingCaffeModels("layer_prelu", DNN_TARGET_CPU, true);
285+
}
286+
282287
//template<typename XMat>
283288
//static void test_Layer_Concat()
284289
//{

0 commit comments

Comments
 (0)