@@ -155,7 +155,9 @@ - (IplImage*)getOutput;
155
155
uint8_t *mOutImagedata ;
156
156
IplImage *mOutImage ;
157
157
size_t currSize;
158
+ /*
158
159
int mMode;
160
+ */
159
161
int mFormat ;
160
162
161
163
bool setupReadingAt (CMTime position);
@@ -677,7 +679,9 @@ -(int) updateImage {
677
679
mOutImage = NULL ;
678
680
mOutImagedata = NULL ;
679
681
currSize = 0 ;
682
+ /*
680
683
mMode = CV_CAP_MODE_BGR;
684
+ */
681
685
mFormat = CV_8UC3;
682
686
mCurrentSampleBuffer = NULL ;
683
687
mGrabbedPixels = NULL ;
@@ -741,12 +745,15 @@ -(int) updateImage {
741
745
742
746
// Capture in a pixel format that can be converted efficiently to the output mode.
743
747
OSType pixelFormat;
748
+ /*
744
749
if (mMode == CV_CAP_MODE_BGR || mMode == CV_CAP_MODE_RGB) {
750
+ */
745
751
// For CV_CAP_MODE_BGR, read frames as BGRA (AV Foundation's YUV->RGB conversion is slightly faster than OpenCV's CV_YUV2BGR_YV12)
746
752
// kCVPixelFormatType_32ABGR is reportedly faster on OS X, but OpenCV doesn't have a CV_ABGR2BGR conversion.
747
753
// kCVPixelFormatType_24RGB is significanly slower than kCVPixelFormatType_32BGRA.
748
754
pixelFormat = kCVPixelFormatType_32BGRA ;
749
755
mFormat = CV_8UC3;
756
+ /*
750
757
} else if (mMode == CV_CAP_MODE_GRAY) {
751
758
// For CV_CAP_MODE_GRAY, read frames as 420v (faster than 420f or 422 -- at least for H.264 files)
752
759
pixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
@@ -759,6 +766,7 @@ -(int) updateImage {
759
766
fprintf(stderr, "VIDEOIO ERROR: AVF Mac: Unsupported mode: %d\n", mMode);
760
767
return false;
761
768
}
769
+ */
762
770
763
771
NSDictionary *settings =
764
772
@{
@@ -848,8 +856,11 @@ -(int) updateImage {
848
856
849
857
// Output image paramaters.
850
858
int outChannels;
859
+ /*
851
860
if (mMode == CV_CAP_MODE_BGR || mMode == CV_CAP_MODE_RGB) {
861
+ */
852
862
outChannels = 3 ;
863
+ /*
853
864
} else if (mMode == CV_CAP_MODE_GRAY) {
854
865
outChannels = 1;
855
866
} else if (mMode == CV_CAP_MODE_YUYV) {
@@ -861,6 +872,7 @@ -(int) updateImage {
861
872
mGrabbedPixels = NULL;
862
873
return 0;
863
874
}
875
+ */
864
876
865
877
if ( currSize != width*outChannels*height ) {
866
878
currSize = width*outChannels*height;
@@ -888,8 +900,11 @@ -(int) updateImage {
888
900
if ( pixelFormat == kCVPixelFormatType_32BGRA ) {
889
901
deviceChannels = 4 ;
890
902
903
+ /*
891
904
if (mMode == CV_CAP_MODE_BGR) {
905
+ */
892
906
cvtCode = CV_BGRA2BGR;
907
+ /*
893
908
} else if (mMode == CV_CAP_MODE_RGB) {
894
909
cvtCode = CV_BGRA2RGB;
895
910
} else if (mMode == CV_CAP_MODE_GRAY) {
@@ -901,11 +916,15 @@ -(int) updateImage {
901
916
fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
902
917
return 0;
903
918
}
919
+ */
904
920
} else if ( pixelFormat == kCVPixelFormatType_24RGB ) {
905
921
deviceChannels = 3 ;
906
922
923
+ /*
907
924
if (mMode == CV_CAP_MODE_BGR) {
925
+ */
908
926
cvtCode = CV_RGB2BGR;
927
+ /*
909
928
} else if (mMode == CV_CAP_MODE_RGB) {
910
929
cvtCode = 0;
911
930
} else if (mMode == CV_CAP_MODE_GRAY) {
@@ -917,11 +936,15 @@ -(int) updateImage {
917
936
fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
918
937
return 0;
919
938
}
939
+ */
920
940
} else if ( pixelFormat == kCVPixelFormatType_422YpCbCr8 ) { // 422 (2vuy, UYVY)
921
941
deviceChannels = 2 ;
922
942
943
+ /*
923
944
if (mMode == CV_CAP_MODE_BGR) {
945
+ */
924
946
cvtCode = CV_YUV2BGR_UYVY;
947
+ /*
925
948
} else if (mMode == CV_CAP_MODE_RGB) {
926
949
cvtCode = CV_YUV2RGB_UYVY;
927
950
} else if (mMode == CV_CAP_MODE_GRAY) {
@@ -935,15 +958,19 @@ -(int) updateImage {
935
958
fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
936
959
return 0;
937
960
}
961
+ */
938
962
} else if ( pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || // 420v
939
963
pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ) { // 420f
940
964
// cvCvtColor(CV_YUV2GRAY_420) is expecting a single buffer with both the Y plane and the CrCb planes.
941
965
// So, lie about the height of the buffer. cvCvtColor(CV_YUV2GRAY_420) will only read the first 2/3 of it.
942
966
height = height * 3 / 2 ;
943
967
deviceChannels = 1 ;
944
968
969
+ /*
945
970
if (mMode == CV_CAP_MODE_BGR) {
971
+ */
946
972
cvtCode = CV_YUV2BGR_YV12;
973
+ /*
947
974
} else if (mMode == CV_CAP_MODE_RGB) {
948
975
cvtCode = CV_YUV2RGB_YV12;
949
976
} else if (mMode == CV_CAP_MODE_GRAY) {
@@ -955,6 +982,7 @@ -(int) updateImage {
955
982
fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
956
983
return 0;
957
984
}
985
+ */
958
986
} else {
959
987
fprintf (stderr, " OpenCV: unsupported pixel format 0x%08X\n " , pixelFormat);
960
988
CVPixelBufferUnlockBaseAddress (mGrabbedPixels , 0 );
@@ -1020,8 +1048,10 @@ -(int) updateImage {
1020
1048
return round ((t.value * mAssetTrack .nominalFrameRate ) / double (t.timescale ));
1021
1049
case CV_CAP_PROP_FORMAT:
1022
1050
return mFormat ;
1051
+ /*
1023
1052
case CV_CAP_PROP_MODE:
1024
1053
return mMode;
1054
+ */
1025
1055
default :
1026
1056
break ;
1027
1057
}
@@ -1054,6 +1084,7 @@ -(int) updateImage {
1054
1084
setupReadingAt (t);
1055
1085
retval = true ;
1056
1086
break ;
1087
+ /*
1057
1088
case CV_CAP_PROP_MODE:
1058
1089
int mode;
1059
1090
mode = cvRound(value);
@@ -1076,6 +1107,7 @@ -(int) updateImage {
1076
1107
}
1077
1108
}
1078
1109
break;
1110
+ */
1079
1111
default :
1080
1112
break ;
1081
1113
}
0 commit comments