Skip to content

Commit 1ce9ffc

Browse files
committed
Merge pull request opencv#9285 from arrybn:issue_9223
2 parents da0a36c + 8d6b8b4 commit 1ce9ffc

File tree

5 files changed

+69
-0
lines changed

5 files changed

+69
-0
lines changed

modules/dnn/include/opencv2/dnn/all_layers.hpp

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -349,6 +349,12 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
349349
static Ptr<ChannelsPReLULayer> create(const LayerParams& params);
350350
};
351351

352+
class CV_EXPORTS ELULayer : public ActivationLayer
353+
{
354+
public:
355+
static Ptr<ELULayer> create(const LayerParams &params);
356+
};
357+
352358
class CV_EXPORTS TanHLayer : public ActivationLayer
353359
{
354360
public:

modules/dnn/src/init.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,7 @@ void initializeLayerFactory()
9696
CV_DNN_REGISTER_LAYER_CLASS(ChannelsPReLU, ChannelsPReLULayer);
9797
CV_DNN_REGISTER_LAYER_CLASS(Sigmoid, SigmoidLayer);
9898
CV_DNN_REGISTER_LAYER_CLASS(TanH, TanHLayer);
99+
CV_DNN_REGISTER_LAYER_CLASS(ELU, ELULayer);
99100
CV_DNN_REGISTER_LAYER_CLASS(BNLL, BNLLLayer);
100101
CV_DNN_REGISTER_LAYER_CLASS(AbsVal, AbsLayer);
101102
CV_DNN_REGISTER_LAYER_CLASS(Power, PowerLayer);

modules/dnn/src/layers/elementwise_layers.cpp

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -302,6 +302,35 @@ struct SigmoidFunctor
302302
int64 getFLOPSPerElement() const { return 3; }
303303
};
304304

305+
struct ELUFunctor
306+
{
307+
typedef ELULayer Layer;
308+
309+
explicit ELUFunctor() {}
310+
311+
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
312+
{
313+
for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize )
314+
{
315+
for(int i = 0; i < len; i++ )
316+
{
317+
float x = srcptr[i];
318+
dstptr[i] = x >= 0.f ? x : exp(x) - 1;
319+
}
320+
}
321+
}
322+
323+
#ifdef HAVE_HALIDE
324+
void attachHalide(const Halide::Expr& input, Halide::Func& top)
325+
{
326+
Halide::Var x("x"), y("y"), c("c"), n("n");
327+
top(x, y, c, n) = select(input >= 0.0f, input, exp(input) - 1);
328+
}
329+
#endif // HAVE_HALIDE
330+
331+
int64 getFLOPSPerElement() const { return 2; }
332+
};
333+
305334
struct AbsValFunctor
306335
{
307336
typedef AbsLayer Layer;
@@ -504,6 +533,14 @@ Ptr<SigmoidLayer> SigmoidLayer::create(const LayerParams& params)
504533
return l;
505534
}
506535

536+
Ptr<ELULayer> ELULayer::create(const LayerParams& params)
537+
{
538+
Ptr<ELULayer> l(new ElementWiseLayer<ELUFunctor>(ELUFunctor()));
539+
l->setParamsFrom(params);
540+
541+
return l;
542+
}
543+
507544
Ptr<AbsLayer> AbsLayer::create(const LayerParams& params)
508545
{
509546
Ptr<AbsLayer> l(new ElementWiseLayer<AbsValFunctor>());

modules/dnn/src/tensorflow/tf_importer.cpp

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -677,6 +677,13 @@ void TFImporter::populateNet(Net dstNet)
677677

678678
connectToAllBlobs(layer_id, dstNet, parsePin(layer.input(0)), id, layer.input_size());
679679
}
680+
else if (type == "Elu")
681+
{
682+
int id = dstNet.addLayer(name, "ELU", layerParams);
683+
layer_id[name] = id;
684+
685+
connectToAllBlobs(layer_id, dstNet, parsePin(layer.input(0)), id, layer.input_size());
686+
}
680687
else if (type == "MaxPool")
681688
{
682689
layerParams.set("pool", "max");

modules/dnn/test/test_layers.cpp

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -268,11 +268,29 @@ static void test_Reshape_Split_Slice_layers()
268268

269269
normAssert(input, output);
270270
}
271+
271272
TEST(Layer_Test_Reshape_Split_Slice, Accuracy)
272273
{
273274
test_Reshape_Split_Slice_layers();
274275
}
275276

277+
TEST(Layer_Conv_Elu, Accuracy)
278+
{
279+
Net net;
280+
{
281+
Ptr<Importer> importer = createTensorflowImporter(_tf("layer_elu_model.pb"));
282+
ASSERT_TRUE(importer != NULL);
283+
importer->populateNet(net);
284+
}
285+
Mat inp = blobFromNPY(_tf("layer_elu_in.npy"));
286+
Mat ref = blobFromNPY(_tf("layer_elu_out.npy"));
287+
288+
net.setInput(inp, "input");
289+
Mat out = net.forward();
290+
291+
normAssert(ref, out);
292+
}
293+
276294
class Layer_LSTM_Test : public ::testing::Test
277295
{
278296
public:

0 commit comments

Comments
 (0)