Skip to content

Commit 20a2dc6

Browse files
committed
Fix multiple inputs models from Caffe.
Fixed Concat optimization.
1 parent bc348eb commit 20a2dc6

File tree

4 files changed

+97
-4
lines changed

4 files changed

+97
-4
lines changed

modules/dnn/src/caffe/caffe_importer.cpp

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -318,8 +318,12 @@ class CaffeImporter : public Importer
318318

319319
if (type == "Input")
320320
{
321-
addedBlobs.push_back(BlobNote(name, 0, netInputs.size()));
322-
netInputs.push_back(name);
321+
for (int outNum = 0; outNum < layer.top_size(); outNum++)
322+
{
323+
addOutput(layer, 0, outNum);
324+
addedBlobs.back().outNum = netInputs.size();
325+
netInputs.push_back(addedBlobs.back().name);
326+
}
323327
continue;
324328
}
325329

modules/dnn/src/dnn.cpp

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -275,6 +275,16 @@ struct DataLayer : public Layer
275275
outNames.assign(names.begin(), names.end());
276276
}
277277

278+
bool getMemoryShapes(const std::vector<MatShape> &inputs,
279+
const int requiredOutputs,
280+
std::vector<MatShape> &outputs,
281+
std::vector<MatShape> &internals) const
282+
{
283+
CV_Assert(inputs.size() == requiredOutputs);
284+
outputs.assign(inputs.begin(), inputs.end());
285+
return false;
286+
}
287+
278288
private:
279289
std::vector<String> outNames;
280290
};
@@ -1184,7 +1194,7 @@ struct Net::Impl
11841194
layers[ld.inputBlobsId[i].lid].getLayerInstance()->name.c_str(),
11851195
inp_i_data->getLayerInstance()->name.c_str()));
11861196

1187-
if(inp_i_data->skipFlags[DNN_BACKEND_DEFAULT])
1197+
if(inp_i_data->skipFlags[DNN_BACKEND_DEFAULT] || inp_i_data->consumers.size() != 1)
11881198
break;
11891199
realinputs[i] = pin;
11901200
}
@@ -1206,6 +1216,14 @@ struct Net::Impl
12061216
Mat& curr_output = inp_i_data->outputBlobs[pin.oid];
12071217
CV_Assert(output_slice.isContinuous() && output_slice.size == curr_output.size);
12081218
curr_output = output_slice;
1219+
1220+
pin = ld.inputBlobsId[i];
1221+
inp_i_data = &layers[pin.lid];
1222+
for (int j = 0; j < inp_i_data->consumers.size(); ++j)
1223+
{
1224+
LayerPin consumer = inp_i_data->consumers[j];
1225+
layers[consumer.lid].inputBlobs[consumer.oid] = &curr_output;
1226+
}
12091227
}
12101228
ld.skipFlags[DNN_BACKEND_DEFAULT] = true;
12111229
printf_(("\toptimized out Concat layer %s\n", concatLayer->name.c_str()));
@@ -1235,7 +1253,9 @@ struct Net::Impl
12351253

12361254
blobManager.reset();
12371255
backendWrappers.clear();
1238-
blobManager.addReference(LayerPin(0, 0));
1256+
// Fake references to input blobs.
1257+
for (int i = 0; i < layers[0].outputBlobs.size(); ++i)
1258+
blobManager.addReference(LayerPin(0, i));
12391259
for (it = layers.begin(); it != layers.end(); ++it)
12401260
{
12411261
const LayerData& ld = it->second;

modules/dnn/test/test_caffe_importer.cpp

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -280,4 +280,31 @@ TEST(Reproducibility_DenseNet_121, Accuracy)
280280
normAssert(out, ref);
281281
}
282282

283+
TEST(Test_Caffe, multiple_inputs)
284+
{
285+
const string proto = findDataFile("dnn/layers/net_input.prototxt", false);
286+
Net net = readNetFromCaffe(proto);
287+
288+
Mat first_image(10, 11, CV_32FC3);
289+
Mat second_image(10, 11, CV_32FC3);
290+
randu(first_image, -1, 1);
291+
randu(second_image, -1, 1);
292+
293+
first_image = blobFromImage(first_image);
294+
second_image = blobFromImage(second_image);
295+
296+
Mat first_image_blue_green = slice(first_image, Range::all(), Range(0, 2), Range::all(), Range::all());
297+
Mat first_image_red = slice(first_image, Range::all(), Range(2, 3), Range::all(), Range::all());
298+
Mat second_image_blue_green = slice(second_image, Range::all(), Range(0, 2), Range::all(), Range::all());
299+
Mat second_image_red = slice(second_image, Range::all(), Range(2, 3), Range::all(), Range::all());
300+
301+
net.setInput(first_image_blue_green, "old_style_input_blue_green");
302+
net.setInput(first_image_red, "different_name_for_red");
303+
net.setInput(second_image_blue_green, "input_layer_blue_green");
304+
net.setInput(second_image_red, "old_style_input_red");
305+
Mat out = net.forward();
306+
307+
normAssert(out, first_image + second_image);
308+
}
309+
283310
}

modules/dnn/test/test_layers.cpp

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -274,6 +274,48 @@ OCL_TEST(Layer_Test_Concat, Accuracy)
274274
testLayerUsingCaffeModels("layer_concat", DNN_TARGET_OPENCL);
275275
}
276276

277+
TEST(Layer_Test_Fused_Concat, Accuracy)
278+
{
279+
// Test case
280+
// input
281+
// |
282+
// v
283+
// some_layer
284+
// | |
285+
// v v
286+
// concat
287+
Net net;
288+
int interLayer;
289+
{
290+
LayerParams lp;
291+
lp.type = "AbsVal";
292+
lp.name = "someLayer";
293+
interLayer = net.addLayerToPrev(lp.name, lp.type, lp);
294+
}
295+
{
296+
LayerParams lp;
297+
lp.set("axis", 1);
298+
lp.type = "Concat";
299+
lp.name = "testConcat";
300+
int id = net.addLayer(lp.name, lp.type, lp);
301+
net.connect(interLayer, 0, id, 0);
302+
net.connect(interLayer, 0, id, 1);
303+
}
304+
int shape[] = {1, 2, 3, 4};
305+
Mat input(4, shape, CV_32F);
306+
randu(input, 0.0f, 1.0f); // [0, 1] to make AbsVal an identity transformation.
307+
308+
net.setInput(input);
309+
Mat out = net.forward();
310+
311+
normAssert(slice(out, Range::all(), Range(0, 2), Range::all(), Range::all()), input);
312+
normAssert(slice(out, Range::all(), Range(2, 4), Range::all(), Range::all()), input);
313+
314+
//
315+
316+
testLayerUsingCaffeModels("layer_concat_optim", DNN_TARGET_CPU, true, false);
317+
}
318+
277319
TEST(Layer_Test_Eltwise, Accuracy)
278320
{
279321
testLayerUsingCaffeModels("layer_eltwise");

0 commit comments

Comments
 (0)