@@ -430,7 +430,7 @@ TEST_P(ReLU, Accuracy)
430
430
}
431
431
432
432
INSTANTIATE_TEST_CASE_P (Layer_Test_Halide, ReLU, Values(
433
- /* negative slope*/ 2 .0f , 0 .3f , -0 .1f
433
+ /* negative slope*/ 2 .0f , 0 .3f , -0 .1f , 0 . 0f
434
434
));
435
435
436
436
typedef TestWithParam<tuple<std::string> > NoParamActivation;
@@ -515,12 +515,7 @@ TEST_P(Concat, Accuracy)
515
515
516
516
Net net;
517
517
518
- LayerParams concatParam;
519
- concatParam.type = " Concat" ;
520
- concatParam.name = " testLayer" ;
521
- int concatId = net.addLayer (concatParam.name , concatParam.type , concatParam);
522
- net.connect (0 , 0 , concatId, 0 );
523
-
518
+ std::vector<int > convLayerIds (numChannels.channels );
524
519
for (int i = 0 , n = numChannels.channels ; i < n; ++i)
525
520
{
526
521
if (!numChannels[i])
@@ -540,9 +535,18 @@ TEST_P(Concat, Accuracy)
540
535
convParam.name = ss.str ();
541
536
convParam.blobs .push_back (weights);
542
537
543
- int convId = net.addLayer (convParam.name , convParam.type , convParam);
544
- net.connect (0 , 0 , convId, 0 );
545
- net.connect (convId, 0 , concatId, i + 1 );
538
+ convLayerIds[i] = net.addLayer (convParam.name , convParam.type , convParam);
539
+ net.connect (0 , 0 , convLayerIds[i], 0 );
540
+ }
541
+
542
+ LayerParams concatParam;
543
+ concatParam.type = " Concat" ;
544
+ concatParam.name = " testLayer" ;
545
+ int concatId = net.addLayer (concatParam.name , concatParam.type , concatParam);
546
+ net.connect (0 , 0 , concatId, 0 );
547
+ for (int i = 0 ; i < convLayerIds.size (); ++i)
548
+ {
549
+ net.connect (convLayerIds[i], 0 , concatId, i + 1 );
546
550
}
547
551
548
552
Mat input ({1 , inSize[0 ], inSize[1 ], inSize[2 ]}, CV_32F);
@@ -578,12 +582,7 @@ TEST_P(Eltwise, Accuracy)
578
582
579
583
Net net;
580
584
581
- LayerParams eltwiseParam;
582
- eltwiseParam.type = " Eltwise" ;
583
- eltwiseParam.name = " testLayer" ;
584
- int eltwiseId = net.addLayer (eltwiseParam.name , eltwiseParam.type , eltwiseParam);
585
- net.connect (0 , 0 , eltwiseId, 0 );
586
-
585
+ std::vector<int > convLayerIds (numConv);
587
586
for (int i = 0 ; i < numConv; ++i)
588
587
{
589
588
Mat weights ({inSize[0 ], inSize[0 ], 1 , 1 }, CV_32F);
@@ -600,9 +599,18 @@ TEST_P(Eltwise, Accuracy)
600
599
convParam.name = ss.str ();
601
600
convParam.blobs .push_back (weights);
602
601
603
- int convId = net.addLayer (convParam.name , convParam.type , convParam);
604
- net.connect (0 , 0 , convId, 0 );
605
- net.connect (convId, 0 , eltwiseId, i + 1 );
602
+ convLayerIds[i] = net.addLayer (convParam.name , convParam.type , convParam);
603
+ net.connect (0 , 0 , convLayerIds[i], 0 );
604
+ }
605
+
606
+ LayerParams eltwiseParam;
607
+ eltwiseParam.type = " Eltwise" ;
608
+ eltwiseParam.name = " testLayer" ;
609
+ int eltwiseId = net.addLayer (eltwiseParam.name , eltwiseParam.type , eltwiseParam);
610
+ net.connect (0 , 0 , eltwiseId, 0 );
611
+ for (int i = 0 ; i < numConv; ++i)
612
+ {
613
+ net.connect (convLayerIds[i], 0 , eltwiseId, i + 1 );
606
614
}
607
615
608
616
Mat input ({1 , inSize[0 ], inSize[1 ], inSize[2 ]}, CV_32F);
0 commit comments