@@ -1756,55 +1756,56 @@ static enum dma_status edma_tx_status(struct dma_chan *chan,
1756
1756
return ret ;
1757
1757
}
1758
1758
1759
- static void __init edma_chan_init (struct edma_cc * ecc , struct dma_device * dma ,
1760
- struct edma_chan * echans )
1759
+ #define EDMA_DMA_BUSWIDTHS (BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | \
1760
+ BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | \
1761
+ BIT(DMA_SLAVE_BUSWIDTH_3_BYTES) | \
1762
+ BIT(DMA_SLAVE_BUSWIDTH_4_BYTES))
1763
+
1764
+ static void edma_dma_init (struct edma_cc * ecc )
1761
1765
{
1766
+ struct dma_device * ddev = & ecc -> dma_slave ;
1762
1767
int i , j ;
1763
1768
1769
+ dma_cap_zero (ddev -> cap_mask );
1770
+ dma_cap_set (DMA_SLAVE , ddev -> cap_mask );
1771
+ dma_cap_set (DMA_CYCLIC , ddev -> cap_mask );
1772
+ dma_cap_set (DMA_MEMCPY , ddev -> cap_mask );
1773
+
1774
+ ddev -> device_prep_slave_sg = edma_prep_slave_sg ;
1775
+ ddev -> device_prep_dma_cyclic = edma_prep_dma_cyclic ;
1776
+ ddev -> device_prep_dma_memcpy = edma_prep_dma_memcpy ;
1777
+ ddev -> device_alloc_chan_resources = edma_alloc_chan_resources ;
1778
+ ddev -> device_free_chan_resources = edma_free_chan_resources ;
1779
+ ddev -> device_issue_pending = edma_issue_pending ;
1780
+ ddev -> device_tx_status = edma_tx_status ;
1781
+ ddev -> device_config = edma_slave_config ;
1782
+ ddev -> device_pause = edma_dma_pause ;
1783
+ ddev -> device_resume = edma_dma_resume ;
1784
+ ddev -> device_terminate_all = edma_terminate_all ;
1785
+
1786
+ ddev -> src_addr_widths = EDMA_DMA_BUSWIDTHS ;
1787
+ ddev -> dst_addr_widths = EDMA_DMA_BUSWIDTHS ;
1788
+ ddev -> directions = BIT (DMA_DEV_TO_MEM ) | BIT (DMA_MEM_TO_DEV );
1789
+ ddev -> residue_granularity = DMA_RESIDUE_GRANULARITY_BURST ;
1790
+
1791
+ ddev -> dev = ecc -> dev ;
1792
+
1793
+ INIT_LIST_HEAD (& ddev -> channels );
1794
+
1764
1795
for (i = 0 ; i < ecc -> num_channels ; i ++ ) {
1765
- struct edma_chan * echan = & echans [i ];
1796
+ struct edma_chan * echan = & ecc -> slave_chans [i ];
1766
1797
echan -> ch_num = EDMA_CTLR_CHAN (ecc -> id , i );
1767
1798
echan -> ecc = ecc ;
1768
1799
echan -> vchan .desc_free = edma_desc_free ;
1769
1800
1770
- vchan_init (& echan -> vchan , dma );
1801
+ vchan_init (& echan -> vchan , ddev );
1771
1802
1772
1803
INIT_LIST_HEAD (& echan -> node );
1773
1804
for (j = 0 ; j < EDMA_MAX_SLOTS ; j ++ )
1774
1805
echan -> slot [j ] = -1 ;
1775
1806
}
1776
1807
}
1777
1808
1778
- #define EDMA_DMA_BUSWIDTHS (BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | \
1779
- BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | \
1780
- BIT(DMA_SLAVE_BUSWIDTH_3_BYTES) | \
1781
- BIT(DMA_SLAVE_BUSWIDTH_4_BYTES))
1782
-
1783
- static void edma_dma_init (struct edma_cc * ecc , struct dma_device * dma ,
1784
- struct device * dev )
1785
- {
1786
- dma -> device_prep_slave_sg = edma_prep_slave_sg ;
1787
- dma -> device_prep_dma_cyclic = edma_prep_dma_cyclic ;
1788
- dma -> device_prep_dma_memcpy = edma_prep_dma_memcpy ;
1789
- dma -> device_alloc_chan_resources = edma_alloc_chan_resources ;
1790
- dma -> device_free_chan_resources = edma_free_chan_resources ;
1791
- dma -> device_issue_pending = edma_issue_pending ;
1792
- dma -> device_tx_status = edma_tx_status ;
1793
- dma -> device_config = edma_slave_config ;
1794
- dma -> device_pause = edma_dma_pause ;
1795
- dma -> device_resume = edma_dma_resume ;
1796
- dma -> device_terminate_all = edma_terminate_all ;
1797
-
1798
- dma -> src_addr_widths = EDMA_DMA_BUSWIDTHS ;
1799
- dma -> dst_addr_widths = EDMA_DMA_BUSWIDTHS ;
1800
- dma -> directions = BIT (DMA_DEV_TO_MEM ) | BIT (DMA_MEM_TO_DEV );
1801
- dma -> residue_granularity = DMA_RESIDUE_GRANULARITY_BURST ;
1802
-
1803
- dma -> dev = dev ;
1804
-
1805
- INIT_LIST_HEAD (& dma -> channels );
1806
- }
1807
-
1808
1809
static int edma_setup_from_hw (struct device * dev , struct edma_soc_info * pdata ,
1809
1810
struct edma_cc * ecc )
1810
1811
{
@@ -2137,14 +2138,8 @@ static int edma_probe(struct platform_device *pdev)
2137
2138
}
2138
2139
ecc -> info = info ;
2139
2140
2140
- dma_cap_zero (ecc -> dma_slave .cap_mask );
2141
- dma_cap_set (DMA_SLAVE , ecc -> dma_slave .cap_mask );
2142
- dma_cap_set (DMA_CYCLIC , ecc -> dma_slave .cap_mask );
2143
- dma_cap_set (DMA_MEMCPY , ecc -> dma_slave .cap_mask );
2144
-
2145
- edma_dma_init (ecc , & ecc -> dma_slave , dev );
2146
-
2147
- edma_chan_init (ecc , & ecc -> dma_slave , ecc -> slave_chans );
2141
+ /* Init the dma device and channels */
2142
+ edma_dma_init (ecc );
2148
2143
2149
2144
for (i = 0 ; i < ecc -> num_channels ; i ++ ) {
2150
2145
/* Assign all channels to the default queue */
0 commit comments