@@ -1102,6 +1102,8 @@ static void xilinx_vdma_start_transfer(struct xilinx_dma_chan *chan)
1102
1102
struct xilinx_dma_tx_descriptor * desc , * tail_desc ;
1103
1103
u32 reg , j ;
1104
1104
struct xilinx_vdma_tx_segment * tail_segment ;
1105
+ struct xilinx_vdma_tx_segment * segment , * last = NULL ;
1106
+ int i = 0 ;
1105
1107
1106
1108
/* This function was invoked with lock held */
1107
1109
if (chan -> err )
@@ -1121,14 +1123,6 @@ static void xilinx_vdma_start_transfer(struct xilinx_dma_chan *chan)
1121
1123
tail_segment = list_last_entry (& tail_desc -> segments ,
1122
1124
struct xilinx_vdma_tx_segment , node );
1123
1125
1124
- /*
1125
- * If hardware is idle, then all descriptors on the running lists are
1126
- * done, start new transfers
1127
- */
1128
- if (chan -> has_sg )
1129
- dma_ctrl_write (chan , XILINX_DMA_REG_CURDESC ,
1130
- desc -> async_tx .phys );
1131
-
1132
1126
/* Configure the hardware using info in the config structure */
1133
1127
if (chan -> has_vflip ) {
1134
1128
reg = dma_read (chan , XILINX_VDMA_REG_ENABLE_VERTICAL_FLIP );
@@ -1145,15 +1139,11 @@ static void xilinx_vdma_start_transfer(struct xilinx_dma_chan *chan)
1145
1139
else
1146
1140
reg &= ~XILINX_DMA_DMACR_FRAMECNT_EN ;
1147
1141
1148
- /*
1149
- * With SG, start with circular mode, so that BDs can be fetched.
1150
- * In direct register mode, if not parking, enable circular mode
1151
- */
1152
- if (chan -> has_sg || !config -> park )
1153
- reg |= XILINX_DMA_DMACR_CIRC_EN ;
1154
-
1142
+ /* If not parking, enable circular mode */
1155
1143
if (config -> park )
1156
1144
reg &= ~XILINX_DMA_DMACR_CIRC_EN ;
1145
+ else
1146
+ reg |= XILINX_DMA_DMACR_CIRC_EN ;
1157
1147
1158
1148
dma_ctrl_write (chan , XILINX_DMA_REG_DMACR , reg );
1159
1149
@@ -1175,48 +1165,38 @@ static void xilinx_vdma_start_transfer(struct xilinx_dma_chan *chan)
1175
1165
return ;
1176
1166
1177
1167
/* Start the transfer */
1178
- if (chan -> has_sg ) {
1179
- dma_ctrl_write (chan , XILINX_DMA_REG_TAILDESC ,
1180
- tail_segment -> phys );
1181
- list_splice_tail_init (& chan -> pending_list , & chan -> active_list );
1182
- chan -> desc_pendingcount = 0 ;
1183
- } else {
1184
- struct xilinx_vdma_tx_segment * segment , * last = NULL ;
1185
- int i = 0 ;
1186
-
1187
- if (chan -> desc_submitcount < chan -> num_frms )
1188
- i = chan -> desc_submitcount ;
1189
-
1190
- list_for_each_entry (segment , & desc -> segments , node ) {
1191
- if (chan -> ext_addr )
1192
- vdma_desc_write_64 (chan ,
1193
- XILINX_VDMA_REG_START_ADDRESS_64 (i ++ ),
1194
- segment -> hw .buf_addr ,
1195
- segment -> hw .buf_addr_msb );
1196
- else
1197
- vdma_desc_write (chan ,
1168
+ if (chan -> desc_submitcount < chan -> num_frms )
1169
+ i = chan -> desc_submitcount ;
1170
+
1171
+ list_for_each_entry (segment , & desc -> segments , node ) {
1172
+ if (chan -> ext_addr )
1173
+ vdma_desc_write_64 (chan ,
1174
+ XILINX_VDMA_REG_START_ADDRESS_64 (i ++ ),
1175
+ segment -> hw .buf_addr ,
1176
+ segment -> hw .buf_addr_msb );
1177
+ else
1178
+ vdma_desc_write (chan ,
1198
1179
XILINX_VDMA_REG_START_ADDRESS (i ++ ),
1199
1180
segment -> hw .buf_addr );
1200
1181
1201
- last = segment ;
1202
- }
1203
-
1204
- if (!last )
1205
- return ;
1182
+ last = segment ;
1183
+ }
1206
1184
1207
- /* HW expects these parameters to be same for one transaction */
1208
- vdma_desc_write (chan , XILINX_DMA_REG_HSIZE , last -> hw .hsize );
1209
- vdma_desc_write (chan , XILINX_DMA_REG_FRMDLY_STRIDE ,
1210
- last -> hw .stride );
1211
- vdma_desc_write (chan , XILINX_DMA_REG_VSIZE , last -> hw .vsize );
1185
+ if (!last )
1186
+ return ;
1212
1187
1213
- chan -> desc_submitcount ++ ;
1214
- chan -> desc_pendingcount -- ;
1215
- list_del (& desc -> node );
1216
- list_add_tail (& desc -> node , & chan -> active_list );
1217
- if (chan -> desc_submitcount == chan -> num_frms )
1218
- chan -> desc_submitcount = 0 ;
1219
- }
1188
+ /* HW expects these parameters to be same for one transaction */
1189
+ vdma_desc_write (chan , XILINX_DMA_REG_HSIZE , last -> hw .hsize );
1190
+ vdma_desc_write (chan , XILINX_DMA_REG_FRMDLY_STRIDE ,
1191
+ last -> hw .stride );
1192
+ vdma_desc_write (chan , XILINX_DMA_REG_VSIZE , last -> hw .vsize );
1193
+
1194
+ chan -> desc_submitcount ++ ;
1195
+ chan -> desc_pendingcount -- ;
1196
+ list_del (& desc -> node );
1197
+ list_add_tail (& desc -> node , & chan -> active_list );
1198
+ if (chan -> desc_submitcount == chan -> num_frms )
1199
+ chan -> desc_submitcount = 0 ;
1220
1200
1221
1201
chan -> idle = false;
1222
1202
}
0 commit comments