@@ -1107,6 +1107,69 @@ void Mat::push_back(const MatExpr& expr)
1107
1107
push_back (static_cast <Mat>(expr));
1108
1108
}
1109
1109
1110
+ #ifdef CV_CXX_MOVE_SEMANTICS
1111
+
1112
+ inline
1113
+ Mat::Mat (Mat&& m)
1114
+ : flags(m.flags), dims(m.dims), rows(m.rows), cols(m.cols), data(m.data),
1115
+ datastart(m.datastart), dataend(m.dataend), datalimit(m.datalimit), allocator(m.allocator),
1116
+ u(m.u), size(&rows)
1117
+ {
1118
+ if (m.dims <= 2 ) // move new step/size info
1119
+ {
1120
+ step[0 ] = m.step [0 ];
1121
+ step[1 ] = m.step [1 ];
1122
+ }
1123
+ else
1124
+ {
1125
+ CV_DbgAssert (m.step .p != m.step .buf );
1126
+ step.p = m.step .p ;
1127
+ size.p = m.size .p ;
1128
+ m.step .p = m.step .buf ;
1129
+ m.size .p = &m.rows ;
1130
+ }
1131
+ m.flags = MAGIC_VAL; m.dims = m.rows = m.cols = 0 ;
1132
+ m.data = NULL ; m.datastart = NULL ; m.dataend = NULL ; m.datalimit = NULL ;
1133
+ m.allocator = NULL ;
1134
+ m.u = NULL ;
1135
+ }
1136
+
1137
+ inline
1138
+ Mat& Mat::operator = (Mat&& m)
1139
+ {
1140
+ release ();
1141
+ flags = m.flags ; dims = m.dims ; rows = m.rows ; cols = m.cols ; data = m.data ;
1142
+ datastart = m.datastart ; dataend = m.dataend ; datalimit = m.datalimit ; allocator = m.allocator ;
1143
+ u = m.u ;
1144
+ if (step.p != step.buf ) // release self step/size
1145
+ {
1146
+ fastFree (step.p );
1147
+ step.p = step.buf ;
1148
+ size.p = &rows;
1149
+ }
1150
+ if (m.dims <= 2 ) // move new step/size info
1151
+ {
1152
+ step[0 ] = m.step [0 ];
1153
+ step[1 ] = m.step [1 ];
1154
+ }
1155
+ else
1156
+ {
1157
+ CV_DbgAssert (m.step .p != m.step .buf );
1158
+ step.p = m.step .p ;
1159
+ size.p = m.size .p ;
1160
+ m.step .p = m.step .buf ;
1161
+ m.size .p = &m.rows ;
1162
+ }
1163
+ m.flags = MAGIC_VAL; m.dims = m.rows = m.cols = 0 ;
1164
+ m.data = NULL ; m.datastart = NULL ; m.dataend = NULL ; m.datalimit = NULL ;
1165
+ m.allocator = NULL ;
1166
+ m.u = NULL ;
1167
+ return *this ;
1168
+ }
1169
+
1170
+ #endif
1171
+
1172
+
1110
1173
// /////////////////////////// MatSize ////////////////////////////
1111
1174
1112
1175
inline
@@ -1655,6 +1718,57 @@ void Mat_<_Tp>::forEach(const Functor& operation) const {
1655
1718
Mat::forEach<_Tp, Functor>(operation);
1656
1719
}
1657
1720
1721
+ #ifdef CV_CXX_MOVE_SEMANTICS
1722
+
1723
+ template <typename _Tp> inline
1724
+ Mat_<_Tp>::Mat_(Mat_&& m)
1725
+ : Mat(m)
1726
+ {
1727
+ }
1728
+
1729
+ template <typename _Tp> inline
1730
+ Mat_<_Tp>& Mat_<_Tp>::operator = (Mat_&& m)
1731
+ {
1732
+ Mat::operator = (m);
1733
+ return *this ;
1734
+ }
1735
+
1736
+ template <typename _Tp> inline
1737
+ Mat_<_Tp>::Mat_(Mat&& m)
1738
+ : Mat()
1739
+ {
1740
+ flags = (flags & ~CV_MAT_TYPE_MASK) | DataType<_Tp>::type;
1741
+ *this = m;
1742
+ }
1743
+
1744
+ template <typename _Tp> inline
1745
+ Mat_<_Tp>& Mat_<_Tp>::operator = (Mat&& m)
1746
+ {
1747
+ if ( DataType<_Tp>::type == m.type () )
1748
+ {
1749
+ Mat::operator = ((Mat&&)m);
1750
+ return *this ;
1751
+ }
1752
+ if ( DataType<_Tp>::depth == m.depth () )
1753
+ {
1754
+ Mat::operator = ((Mat&&)m.reshape (DataType<_Tp>::channels, m.dims , 0 ));
1755
+ return *this ;
1756
+ }
1757
+ CV_DbgAssert (DataType<_Tp>::channels == m.channels ());
1758
+ m.convertTo (*this , type ());
1759
+ return *this ;
1760
+ }
1761
+
1762
+ template <typename _Tp> inline
1763
+ Mat_<_Tp>::Mat_(MatExpr&& e)
1764
+ : Mat()
1765
+ {
1766
+ flags = (flags & ~CV_MAT_TYPE_MASK) | DataType<_Tp>::type;
1767
+ *this = Mat (e);
1768
+ }
1769
+
1770
+ #endif
1771
+
1658
1772
// /////////////////////////// SparseMat /////////////////////////////
1659
1773
1660
1774
inline
@@ -3419,6 +3533,69 @@ size_t UMat::total() const
3419
3533
return p;
3420
3534
}
3421
3535
3536
+ #ifdef CV_CXX_MOVE_SEMANTICS
3537
+
3538
+ inline
3539
+ UMat::UMat (UMat&& m)
3540
+ : flags(m.flags), dims(m.dims), rows(m.rows), cols(m.cols), allocator(m.allocator),
3541
+ usageFlags(m.usageFlags), u(m.u), offset(m.offset), size(&rows)
3542
+ {
3543
+ if (m.dims <= 2 ) // move new step/size info
3544
+ {
3545
+ step[0 ] = m.step [0 ];
3546
+ step[1 ] = m.step [1 ];
3547
+ }
3548
+ else
3549
+ {
3550
+ CV_DbgAssert (m.step .p != m.step .buf );
3551
+ step.p = m.step .p ;
3552
+ size.p = m.size .p ;
3553
+ m.step .p = m.step .buf ;
3554
+ m.size .p = &m.rows ;
3555
+ }
3556
+ m.flags = MAGIC_VAL; m.dims = m.rows = m.cols = 0 ;
3557
+ m.allocator = NULL ;
3558
+ m.u = NULL ;
3559
+ m.offset = 0 ;
3560
+ }
3561
+
3562
+ inline
3563
+ UMat& UMat::operator = (UMat&& m)
3564
+ {
3565
+ release ();
3566
+ flags = m.flags ; dims = m.dims ; rows = m.rows ; cols = m.cols ;
3567
+ allocator = m.allocator ; usageFlags = m.usageFlags ;
3568
+ u = m.u ;
3569
+ offset = m.offset ;
3570
+ if (step.p != step.buf ) // release self step/size
3571
+ {
3572
+ fastFree (step.p );
3573
+ step.p = step.buf ;
3574
+ size.p = &rows;
3575
+ }
3576
+ if (m.dims <= 2 ) // move new step/size info
3577
+ {
3578
+ step[0 ] = m.step [0 ];
3579
+ step[1 ] = m.step [1 ];
3580
+ }
3581
+ else
3582
+ {
3583
+ CV_DbgAssert (m.step .p != m.step .buf );
3584
+ step.p = m.step .p ;
3585
+ size.p = m.size .p ;
3586
+ m.step .p = m.step .buf ;
3587
+ m.size .p = &m.rows ;
3588
+ }
3589
+ m.flags = MAGIC_VAL; m.dims = m.rows = m.cols = 0 ;
3590
+ m.allocator = NULL ;
3591
+ m.u = NULL ;
3592
+ m.offset = 0 ;
3593
+ return *this ;
3594
+ }
3595
+
3596
+ #endif
3597
+
3598
+
3422
3599
inline bool UMatData::hostCopyObsolete () const { return (flags & HOST_COPY_OBSOLETE) != 0 ; }
3423
3600
inline bool UMatData::deviceCopyObsolete () const { return (flags & DEVICE_COPY_OBSOLETE) != 0 ; }
3424
3601
inline bool UMatData::deviceMemMapped () const { return (flags & DEVICE_MEM_MAPPED) != 0 ; }
0 commit comments