@@ -559,21 +559,39 @@ void TFImporter::populateNet(Net dstNet)
559
559
}
560
560
else if (type == " BiasAdd" || type == " Add" )
561
561
{
562
- layerParams.blobs .resize (1 );
563
- blobFromTensor (getConstBlob (layer, value_id), layerParams.blobs [0 ]);
562
+ bool haveConst = false ;
563
+ for (int ii = 0 ; !haveConst && ii < layer.input_size (); ++ii)
564
+ {
565
+ Pin input = parsePin (layer.input (ii));
566
+ haveConst = value_id.find (input.name ) != value_id.end ();
567
+ }
568
+ CV_Assert (!haveConst || layer.input_size () == 2 );
564
569
565
- int id = dstNet.addLayer (name, " Shift" , layerParams);
566
- layer_id[name] = id;
570
+ if (haveConst)
571
+ {
572
+ layerParams.blobs .resize (1 );
573
+ blobFromTensor (getConstBlob (layer, value_id), layerParams.blobs [0 ]);
567
574
568
- // one input only
569
- connect (layer_id, dstNet, parsePin (layer.input (0 )), id, 0 );
570
- }
571
- else if (type == " Identity" )
572
- {
573
- int id = dstNet.addLayer (name, " Identity" , layerParams);
574
- layer_id[name] = id;
575
+ int id = dstNet.addLayer (name, " Shift" , layerParams);
576
+ layer_id[name] = id;
575
577
576
- connectToAllBlobs (layer_id, dstNet, parsePin (layer.input (0 )), id, layer.input_size ());
578
+ // one input only
579
+ connect (layer_id, dstNet, parsePin (layer.input (0 )), id, 0 );
580
+ }
581
+ else
582
+ {
583
+ layerParams.set (" operation" , " sum" );
584
+ int id = dstNet.addLayer (name, " Eltwise" , layerParams);
585
+ layer_id[name] = id;
586
+
587
+ for (int ii = 0 ; ii < layer.input_size (); ii++)
588
+ {
589
+ Pin inp = parsePin (layer.input (ii));
590
+ if (layer_id.find (inp.name ) == layer_id.end ())
591
+ CV_Error (Error::StsError, " Input layer not found: " + inp.name );
592
+ dstNet.connect (layer_id.at (inp.name ), inp.blobIndex , id, ii);
593
+ }
594
+ }
577
595
}
578
596
else if (type == " MatMul" )
579
597
{
@@ -624,13 +642,6 @@ void TFImporter::populateNet(Net dstNet)
624
642
else if (type == " Const" )
625
643
{
626
644
}
627
- else if (type == " Softmax" )
628
- {
629
- int id = dstNet.addLayer (name, " Softmax" , layerParams);
630
- layer_id[name] = id;
631
-
632
- connectToAllBlobs (layer_id, dstNet, parsePin (layer.input (0 )), id, layer.input_size ());
633
- }
634
645
else if (type == " LRN" )
635
646
{
636
647
if (hasLayerAttr (layer, " alpha" )) {
@@ -653,37 +664,28 @@ void TFImporter::populateNet(Net dstNet)
653
664
654
665
connectToAllBlobs (layer_id, dstNet, parsePin (layer.input (0 )), id, layer.input_size ());
655
666
}
656
- else if (type == " Concat" )
667
+ else if (type == " Concat" || type == " ConcatV2 " )
657
668
{
658
- int axis = getConstBlob (layer, value_id, 0 ).int_val ().Get (0 );
669
+ int axisId = (type == " Concat" ? 0 : layer.input_size () - 1 );
670
+ int axis = getConstBlob (layer, value_id, axisId).int_val ().Get (0 );
659
671
layerParams.set (" axis" , toNCHW[axis]);
660
672
661
673
int id = dstNet.addLayer (name, " Concat" , layerParams);
662
674
layer_id[name] = id;
663
675
664
- // input(0) is concat_dim
665
- for (int ii = 1 ; ii < layer.input_size (); ii++)
676
+
677
+ int from = (type == " Concat" ? 1 : 0 );
678
+ int to = (type == " Concat" ? layer.input_size () : layer.input_size () - 1 );
679
+
680
+ // input(0) or input(n-1) is concat_dim
681
+ for (int ii = from; ii < to; ii++)
666
682
{
667
683
Pin inp = parsePin (layer.input (ii));
668
684
if (layer_id.find (inp.name ) == layer_id.end ())
669
685
CV_Error (Error::StsError, " Input layer not found: " + inp.name );
670
- dstNet.connect (layer_id.at (inp.name ), inp.blobIndex , id, ii - 1 );
686
+ dstNet.connect (layer_id.at (inp.name ), inp.blobIndex , id, ii - from );
671
687
}
672
688
}
673
- else if (type == " Relu" )
674
- {
675
- int id = dstNet.addLayer (name, " ReLU" , layerParams);
676
- layer_id[name] = id;
677
-
678
- connectToAllBlobs (layer_id, dstNet, parsePin (layer.input (0 )), id, layer.input_size ());
679
- }
680
- else if (type == " Elu" )
681
- {
682
- int id = dstNet.addLayer (name, " ELU" , layerParams);
683
- layer_id[name] = id;
684
-
685
- connectToAllBlobs (layer_id, dstNet, parsePin (layer.input (0 )), id, layer.input_size ());
686
- }
687
689
else if (type == " MaxPool" )
688
690
{
689
691
layerParams.set (" pool" , " max" );
@@ -736,6 +738,145 @@ void TFImporter::populateNet(Net dstNet)
736
738
// one input only
737
739
connect (layer_id, dstNet, parsePin (layer.input (1 )), id, 0 );
738
740
}
741
+ else if (type == " Mul" )
742
+ {
743
+ bool haveConst = false ;
744
+ for (int ii = 0 ; !haveConst && ii < layer.input_size (); ++ii)
745
+ {
746
+ Pin input = parsePin (layer.input (ii));
747
+ haveConst = value_id.find (input.name ) != value_id.end ();
748
+ }
749
+ CV_Assert (!haveConst || layer.input_size () == 2 );
750
+
751
+ if (haveConst)
752
+ {
753
+ // Multiplication by constant.
754
+ CV_Assert (layer.input_size () == 2 );
755
+
756
+ float scale = getConstBlob (layer, value_id).float_val ()[0 ];
757
+ layerParams.set (" scale" , scale);
758
+
759
+ int id = dstNet.addLayer (name, " Power" , layerParams);
760
+ layer_id[name] = id;
761
+
762
+ Pin inp0 = parsePin (layer.input (0 ));
763
+ if (layer_id.find (inp0.name ) != layer_id.end ())
764
+ // First operand is a constant.
765
+ connect (layer_id, dstNet, parsePin (layer.input (0 )), id, 0 );
766
+ else
767
+ connect (layer_id, dstNet, parsePin (layer.input (1 )), id, 0 );
768
+ }
769
+ else
770
+ {
771
+ layerParams.set (" operation" , " prod" );
772
+ int id = dstNet.addLayer (name, " Eltwise" , layerParams);
773
+ layer_id[name] = id;
774
+
775
+ for (int ii = 0 ; ii < layer.input_size (); ii++)
776
+ {
777
+ Pin inp = parsePin (layer.input (ii));
778
+ if (layer_id.find (inp.name ) == layer_id.end ())
779
+ CV_Error (Error::StsError, " Input layer not found: " + inp.name );
780
+ dstNet.connect (layer_id.at (inp.name ), inp.blobIndex , id, ii);
781
+ }
782
+ }
783
+ }
784
+ else if (type == " Pad" )
785
+ {
786
+ tensorflow::TensorProto paddings = getConstBlob (layer, value_id, 1 );
787
+ MatShape shape;
788
+ blobShapeFromTensor (paddings, shape);
789
+ if (shape[0 ] != 4 )
790
+ CV_Error (Error::StsError, " Expected NHWC data format" );
791
+
792
+ // Copy tensor with paddings.
793
+ std::vector<int32_t > values (shape[0 ] * 2 );
794
+ CV_Assert (sizeof (int32_t ) * values.size () ==
795
+ paddings.tensor_content ().size ());
796
+ memcpy (&values[0 ], &paddings.tensor_content ()[0 ],
797
+ paddings.tensor_content ().size ());
798
+
799
+ // Allow only one padding operation per layer.
800
+ bool padded = false ;
801
+ for (int i = 0 ; i < values.size (); ++i)
802
+ {
803
+ if (values[i])
804
+ {
805
+ if (padded)
806
+ CV_Error (Error::StsError,
807
+ " Only single padding operation per layer is supported" );
808
+ padded = true ;
809
+
810
+ int axis = i / 2 ;
811
+ // Remap NHWC to NCHW.
812
+ // 0 -> 0
813
+ // 1 -> 2
814
+ // 2 -> 3
815
+ // 3 -> 1
816
+ if (axis != 0 )
817
+ axis = axis % 3 + 1 ;
818
+
819
+ layerParams.set (" padding_dim" , axis);
820
+ if (i % 2 ) // Pad after
821
+ layerParams.set (" padding" , values[i]);
822
+ else // Pad before
823
+ layerParams.set (" padding" , -1 * values[i]);
824
+
825
+ int id = dstNet.addLayer (name, " Padding" , layerParams);
826
+ layer_id[name] = id;
827
+
828
+ connect (layer_id, dstNet, parsePin (layer.input (0 )), id, 0 );
829
+ }
830
+ }
831
+ }
832
+ else if (type == " FusedBatchNorm" )
833
+ {
834
+ // op: "FusedBatchNorm"
835
+ // input: "input"
836
+ // input: "BatchNorm/gamma"
837
+ // input: "BatchNorm/beta"
838
+ // input: "BatchNorm/moving_mean"
839
+ // input: "BatchNorm/moving_variance"
840
+ if (layer.input_size () != 5 )
841
+ CV_Error (Error::StsNotImplemented,
842
+ " Expected gamma, beta, mean and std" );
843
+
844
+ layerParams.blobs .resize (4 );
845
+ // gamma
846
+ blobFromTensor (getConstBlob (layer, value_id, 1 ), layerParams.blobs [2 ]);
847
+ // beta
848
+ blobFromTensor (getConstBlob (layer, value_id, 2 ), layerParams.blobs [3 ]);
849
+ // mean
850
+ blobFromTensor (getConstBlob (layer, value_id, 3 ), layerParams.blobs [0 ]);
851
+ // std
852
+ blobFromTensor (getConstBlob (layer, value_id, 4 ), layerParams.blobs [1 ]);
853
+
854
+ if (hasLayerAttr (layer, " epsilon" ))
855
+ layerParams.set (" eps" , getLayerAttr (layer, " epsilon" ).f ());
856
+
857
+ layerParams.set (" has_weight" , true );
858
+ layerParams.set (" has_bias" , true );
859
+
860
+ int id = dstNet.addLayer (name, " BatchNorm" , layerParams);
861
+ layer_id[name] = id;
862
+
863
+ // one input only
864
+ connect (layer_id, dstNet, parsePin (layer.input (0 )), id, 0 );
865
+ }
866
+ else if (type == " Abs" || type == " Tanh" || type == " Sigmoid" ||
867
+ type == " Relu" || type == " Elu" || type == " Softmax" ||
868
+ type == " Identity" )
869
+ {
870
+ std::string dnnType = type;
871
+ if (type == " Abs" ) dnnType = " AbsVal" ;
872
+ else if (type == " Tanh" ) dnnType = " TanH" ;
873
+ else if (type == " Relu" ) dnnType = " ReLU" ;
874
+ else if (type == " Elu" ) dnnType = " ELU" ;
875
+
876
+ int id = dstNet.addLayer (name, dnnType, layerParams);
877
+ layer_id[name] = id;
878
+ connectToAllBlobs (layer_id, dstNet, parsePin (layer.input (0 )), id, layer.input_size ());
879
+ }
739
880
else
740
881
{
741
882
printLayerAttr (layer);
0 commit comments