@@ -1398,7 +1398,8 @@ struct Net::Impl
1398
1398
LayerPin pin = ld.inputBlobsId [i];
1399
1399
LayerData* inp_i_data = &layers[pin.lid ];
1400
1400
while (inp_i_data->skipFlags [DNN_BACKEND_DEFAULT] &&
1401
- inp_i_data->inputBlobsId .size () == 1 )
1401
+ inp_i_data->inputBlobsId .size () == 1 &&
1402
+ inp_i_data->consumers .size () == 1 )
1402
1403
{
1403
1404
pin = inp_i_data->inputBlobsId [0 ];
1404
1405
inp_i_data = &layers[pin.lid ];
@@ -1428,15 +1429,11 @@ struct Net::Impl
1428
1429
Mat output_slice = output (chrange);
1429
1430
Mat& curr_output = inp_i_data->outputBlobs [pin.oid ];
1430
1431
CV_Assert (output_slice.isContinuous () && output_slice.size == curr_output.size );
1432
+ Mat* oldPtr = &curr_output;
1431
1433
curr_output = output_slice;
1432
-
1433
- pin = ld.inputBlobsId [i];
1434
- inp_i_data = &layers[pin.lid ];
1435
- for (int j = 0 ; j < inp_i_data->consumers .size (); ++j)
1436
- {
1437
- LayerPin consumer = inp_i_data->consumers [j];
1438
- layers[consumer.lid ].inputBlobs [consumer.oid ] = &curr_output;
1439
- }
1434
+ // Layers that refer old input Mat will refer to the
1435
+ // new data but the same Mat object.
1436
+ CV_Assert (curr_output.data == output_slice.data , oldPtr == &curr_output);
1440
1437
}
1441
1438
ld.skipFlags [DNN_BACKEND_DEFAULT] = true ;
1442
1439
printf_ ((" \t optimized out Concat layer %s\n " , concatLayer->name .c_str ()));
0 commit comments