Skip to content

Commit 70c605a

Browse files
committed
Limit Concat layer optimization
1 parent a8844de commit 70c605a

File tree

2 files changed

+7
-9
lines changed

2 files changed

+7
-9
lines changed

modules/dnn/src/dnn.cpp

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1398,7 +1398,8 @@ struct Net::Impl
13981398
LayerPin pin = ld.inputBlobsId[i];
13991399
LayerData* inp_i_data = &layers[pin.lid];
14001400
while(inp_i_data->skipFlags[DNN_BACKEND_DEFAULT] &&
1401-
inp_i_data->inputBlobsId.size() == 1)
1401+
inp_i_data->inputBlobsId.size() == 1 &&
1402+
inp_i_data->consumers.size() == 1)
14021403
{
14031404
pin = inp_i_data->inputBlobsId[0];
14041405
inp_i_data = &layers[pin.lid];
@@ -1428,15 +1429,11 @@ struct Net::Impl
14281429
Mat output_slice = output(chrange);
14291430
Mat& curr_output = inp_i_data->outputBlobs[pin.oid];
14301431
CV_Assert(output_slice.isContinuous() && output_slice.size == curr_output.size);
1432+
Mat* oldPtr = &curr_output;
14311433
curr_output = output_slice;
1432-
1433-
pin = ld.inputBlobsId[i];
1434-
inp_i_data = &layers[pin.lid];
1435-
for (int j = 0; j < inp_i_data->consumers.size(); ++j)
1436-
{
1437-
LayerPin consumer = inp_i_data->consumers[j];
1438-
layers[consumer.lid].inputBlobs[consumer.oid] = &curr_output;
1439-
}
1434+
// Layers that refer old input Mat will refer to the
1435+
// new data but the same Mat object.
1436+
CV_Assert(curr_output.data == output_slice.data, oldPtr == &curr_output);
14401437
}
14411438
ld.skipFlags[DNN_BACKEND_DEFAULT] = true;
14421439
printf_(("\toptimized out Concat layer %s\n", concatLayer->name.c_str()));

modules/dnn/test/test_layers.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -314,6 +314,7 @@ TEST(Layer_Test_Fused_Concat, Accuracy)
314314
//
315315

316316
testLayerUsingCaffeModels("layer_concat_optim", DNN_TARGET_CPU, true, false);
317+
testLayerUsingCaffeModels("layer_concat_shared_input", DNN_TARGET_CPU, true, false);
317318
}
318319

319320
TEST(Layer_Test_Eltwise, Accuracy)

0 commit comments

Comments
 (0)