Skip to content

Commit a0d3d11

Browse files
committed
Merge pull request opencv#9649 from dkurt:dnn_reshape_transpose
2 parents f1695bd + 17a85b1 commit a0d3d11

File tree

5 files changed

+11
-61
lines changed

5 files changed

+11
-61
lines changed

modules/dnn/src/layers/permute_layer.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -132,6 +132,7 @@ class PermuteLayerImpl : public PermuteLayer
132132

133133
for (size_t i = 0; i < inputs.size(); i++)
134134
{
135+
CV_Assert(inputs[i].size() == 4);
135136
CV_Assert(inputs[i][2] == shapeBefore[2] && inputs[i][3] == shapeBefore[3]);
136137
CV_Assert(total(inputs[i]) == total(shapeAfter));
137138
outputs.push_back(shapeAfter);

modules/dnn/src/layers/reshape_layer.cpp

Lines changed: 3 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -146,13 +146,11 @@ static void computeShapeByReshapeMask(const MatShape &srcShape,
146146
class ReshapeLayerImpl : public ReshapeLayer
147147
{
148148
public:
149-
ReshapeLayerImpl(const LayerParams& params):
150-
performReordering(false)
149+
ReshapeLayerImpl(const LayerParams& params)
151150
{
152151
setParamsFrom(params);
153152
int axis = params.get<int>("axis", 0);
154153
int numAxes = params.get<int>("num_axes", -1);
155-
enableReordering = params.get<bool>("reorder_dims", false);
156154
CV_Assert(numAxes >= -1);
157155
newShapeRange = (numAxes == -1) ? Range(axis, INT_MAX) : Range(axis, axis + numAxes);
158156

@@ -184,25 +182,6 @@ class ReshapeLayerImpl : public ReshapeLayer
184182
return true;
185183
}
186184

187-
void finalize(const std::vector<Mat*> &inputs, std::vector<Mat> &outputs)
188-
{
189-
CV_Assert(inputs.size());
190-
CV_Assert(outputs.size());
191-
Mat srcBlob = *inputs[0];
192-
int dims = srcBlob.dims;
193-
MatShape inputShape = shape(srcBlob), outShape = shape(outputs[0]);
194-
195-
// input.total() == output.total(). So if reordering is require,
196-
// one of the sizes will be are not equal.
197-
// Example where reordering is require: from 1x128x4x4 to 1x2048
198-
// Example where reordering is NOT require: from 1x1024x1x1 to 1x1024.
199-
bool reorderingRequire = false;
200-
const int minDims = min(dims, (int)outShape.size());
201-
for (int i = 0; !reorderingRequire && i < minDims; ++i)
202-
reorderingRequire = inputShape[i] != outShape[i];
203-
performReordering = enableReordering && reorderingRequire;
204-
}
205-
206185
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
207186
{
208187
CV_TRACE_FUNCTION();
@@ -211,43 +190,10 @@ class ReshapeLayerImpl : public ReshapeLayer
211190
for (size_t i = 0; i < inputs.size(); i++)
212191
{
213192
Mat srcBlob = *inputs[i];
214-
MatShape inputShape = shape(srcBlob), outShape = shape(outputs[i]);
215-
216-
if (performReordering)
217-
{
218-
float *dstData = internals[i].ptr<float>();
219-
const float *srcData = srcBlob.ptr<float>();
220-
221-
int num = inputShape[0], channels = inputShape[1], height = inputShape[2], width = inputShape[3];
222-
int total = num*channels*height*width;
223-
for(int i_n = 0; i_n < num; i_n++) {
224-
for(int i_c = 0; i_c < channels; i_c++) {
225-
for(int i_h = 0; i_h < height; i_h++) {
226-
for(int i_w = 0; i_w < width; i_w++) {
227-
int src_i = channels*height*width*i_n + height*width*i_c + width*i_h + i_w;
228-
int dst_i = channels*height*width*i_n + i_c + channels*width*i_h + channels*i_w;
229-
230-
CV_Assert(dst_i < total);
231-
CV_Assert(src_i < total);
232-
233-
dstData[dst_i] = srcData[src_i];
234-
}
235-
}
236-
}
237-
}
238-
internals[i].copyTo(outputs[i]);
239-
}
240-
else
241-
{
242-
if (outputs[i].data != srcBlob.data)
243-
srcBlob.reshape(1, outShape).copyTo(outputs[i]);
244-
}
193+
if (outputs[i].data != srcBlob.data)
194+
srcBlob.reshape(1, shape(outputs[i])).copyTo(outputs[i]);
245195
}
246196
}
247-
248-
private:
249-
std::vector<std::vector<int> > outShapes;
250-
bool enableReordering, performReordering;
251197
};
252198

253199
Ptr<ReshapeLayer> ReshapeLayer::create(const LayerParams& params)

modules/dnn/src/tensorflow/tf_importer.cpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -771,7 +771,6 @@ void TFImporter::populateNet(Net dstNet)
771771
else if (type == "Reshape")
772772
{
773773
layerParams.set("dim", parseDims(getConstBlob(layer, value_id, 1)));
774-
layerParams.set("reorder_dims", true);
775774

776775
int id = dstNet.addLayer(name, "Reshape", layerParams);
777776
layer_id[name] = id;

modules/dnn/test/test_layers.cpp

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -166,13 +166,12 @@ TEST(Layer_Test_MVN, Accuracy)
166166
}
167167

168168
void testReshape(const MatShape& inputShape, const MatShape& targetShape,
169-
int axis = 0, int num_axes = -1, bool reorder_dims = false,
169+
int axis = 0, int num_axes = -1,
170170
MatShape mask = MatShape())
171171
{
172172
LayerParams params;
173173
params.set("axis", axis);
174174
params.set("num_axes", num_axes);
175-
params.set("reorder_dims", reorder_dims);
176175
if (!mask.empty())
177176
{
178177
params.set("dim", DictValue::arrayInt<int*>(&mask[0], mask.size()));
@@ -201,7 +200,7 @@ TEST(Layer_Test_Reshape, Accuracy)
201200
int inp[] = {1, 128, 4, 4};
202201
int out[] = {1, 2048};
203202
int mask[] = {-1, 2048};
204-
testReshape(MatShape(inp, inp + 4), MatShape(out, out + 2), 0, -1, true,
203+
testReshape(MatShape(inp, inp + 4), MatShape(out, out + 2), 0, -1,
205204
MatShape(mask, mask + 2));
206205
}
207206
}

modules/dnn/test/test_tf_importer.cpp

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,11 @@ TEST(Test_TensorFlow, defun)
143143
runTensorFlowNet("defun_dropout");
144144
}
145145

146+
TEST(Test_TensorFlow, reshape)
147+
{
148+
runTensorFlowNet("shift_reshape_no_reorder");
149+
}
150+
146151
TEST(Test_TensorFlow, fp16)
147152
{
148153
const float l1 = 1e-3;

0 commit comments

Comments
 (0)