@@ -632,7 +632,7 @@ static bool skd_preop_sg_list(struct skd_device *skdev,
632
632
* Map scatterlist to PCI bus addresses.
633
633
* Note PCI might change the number of entries.
634
634
*/
635
- n_sg = pci_map_sg ( skdev -> pdev , sgl , n_sg , skreq -> data_dir );
635
+ n_sg = dma_map_sg ( & skdev -> pdev -> dev , sgl , n_sg , skreq -> data_dir );
636
636
if (n_sg <= 0 )
637
637
return false;
638
638
@@ -682,7 +682,8 @@ static void skd_postop_sg_list(struct skd_device *skdev,
682
682
skreq -> sksg_list [skreq -> n_sg - 1 ].next_desc_ptr =
683
683
skreq -> sksg_dma_address +
684
684
((skreq -> n_sg ) * sizeof (struct fit_sg_descriptor ));
685
- pci_unmap_sg (skdev -> pdev , & skreq -> sg [0 ], skreq -> n_sg , skreq -> data_dir );
685
+ dma_unmap_sg (& skdev -> pdev -> dev , & skreq -> sg [0 ], skreq -> n_sg ,
686
+ skreq -> data_dir );
686
687
}
687
688
688
689
/*
@@ -2632,8 +2633,8 @@ static int skd_cons_skcomp(struct skd_device *skdev)
2632
2633
"comp pci_alloc, total bytes %zd entries %d\n" ,
2633
2634
SKD_SKCOMP_SIZE , SKD_N_COMPLETION_ENTRY );
2634
2635
2635
- skcomp = pci_zalloc_consistent ( skdev -> pdev , SKD_SKCOMP_SIZE ,
2636
- & skdev -> cq_dma_address );
2636
+ skcomp = dma_zalloc_coherent ( & skdev -> pdev -> dev , SKD_SKCOMP_SIZE ,
2637
+ & skdev -> cq_dma_address , GFP_KERNEL );
2637
2638
2638
2639
if (skcomp == NULL ) {
2639
2640
rc = - ENOMEM ;
@@ -2674,10 +2675,10 @@ static int skd_cons_skmsg(struct skd_device *skdev)
2674
2675
2675
2676
skmsg -> id = i + SKD_ID_FIT_MSG ;
2676
2677
2677
- skmsg -> msg_buf = pci_alloc_consistent ( skdev -> pdev ,
2678
- SKD_N_FITMSG_BYTES ,
2679
- & skmsg -> mb_dma_address );
2680
-
2678
+ skmsg -> msg_buf = dma_alloc_coherent ( & skdev -> pdev -> dev ,
2679
+ SKD_N_FITMSG_BYTES ,
2680
+ & skmsg -> mb_dma_address ,
2681
+ GFP_KERNEL );
2681
2682
if (skmsg -> msg_buf == NULL ) {
2682
2683
rc = - ENOMEM ;
2683
2684
goto err_out ;
@@ -2971,8 +2972,8 @@ static struct skd_device *skd_construct(struct pci_dev *pdev)
2971
2972
static void skd_free_skcomp (struct skd_device * skdev )
2972
2973
{
2973
2974
if (skdev -> skcomp_table )
2974
- pci_free_consistent ( skdev -> pdev , SKD_SKCOMP_SIZE ,
2975
- skdev -> skcomp_table , skdev -> cq_dma_address );
2975
+ dma_free_coherent ( & skdev -> pdev -> dev , SKD_SKCOMP_SIZE ,
2976
+ skdev -> skcomp_table , skdev -> cq_dma_address );
2976
2977
2977
2978
skdev -> skcomp_table = NULL ;
2978
2979
skdev -> cq_dma_address = 0 ;
@@ -2991,8 +2992,8 @@ static void skd_free_skmsg(struct skd_device *skdev)
2991
2992
skmsg = & skdev -> skmsg_table [i ];
2992
2993
2993
2994
if (skmsg -> msg_buf != NULL ) {
2994
- pci_free_consistent ( skdev -> pdev , SKD_N_FITMSG_BYTES ,
2995
- skmsg -> msg_buf ,
2995
+ dma_free_coherent ( & skdev -> pdev -> dev , SKD_N_FITMSG_BYTES ,
2996
+ skmsg -> msg_buf ,
2996
2997
skmsg -> mb_dma_address );
2997
2998
}
2998
2999
skmsg -> msg_buf = NULL ;
@@ -3172,18 +3173,12 @@ static int skd_pci_probe(struct pci_dev *pdev, const struct pci_device_id *ent)
3172
3173
rc = pci_request_regions (pdev , DRV_NAME );
3173
3174
if (rc )
3174
3175
goto err_out ;
3175
- rc = pci_set_dma_mask (pdev , DMA_BIT_MASK (64 ));
3176
- if (!rc ) {
3177
- if (pci_set_consistent_dma_mask (pdev , DMA_BIT_MASK (64 ))) {
3178
- dev_err (& pdev -> dev , "consistent DMA mask error %d\n" ,
3179
- rc );
3180
- }
3181
- } else {
3182
- rc = pci_set_dma_mask (pdev , DMA_BIT_MASK (32 ));
3183
- if (rc ) {
3184
- dev_err (& pdev -> dev , "DMA mask error %d\n" , rc );
3185
- goto err_out_regions ;
3186
- }
3176
+ rc = dma_set_mask_and_coherent (& pdev -> dev , DMA_BIT_MASK (64 ));
3177
+ if (rc )
3178
+ dma_set_mask_and_coherent (& pdev -> dev , DMA_BIT_MASK (32 ));
3179
+ if (rc ) {
3180
+ dev_err (& pdev -> dev , "DMA mask error %d\n" , rc );
3181
+ goto err_out_regions ;
3187
3182
}
3188
3183
3189
3184
if (!skd_major ) {
@@ -3367,20 +3362,12 @@ static int skd_pci_resume(struct pci_dev *pdev)
3367
3362
rc = pci_request_regions (pdev , DRV_NAME );
3368
3363
if (rc )
3369
3364
goto err_out ;
3370
- rc = pci_set_dma_mask (pdev , DMA_BIT_MASK (64 ));
3371
- if (!rc ) {
3372
- if (pci_set_consistent_dma_mask (pdev , DMA_BIT_MASK (64 ))) {
3373
-
3374
- dev_err (& pdev -> dev , "consistent DMA mask error %d\n" ,
3375
- rc );
3376
- }
3377
- } else {
3378
- rc = pci_set_dma_mask (pdev , DMA_BIT_MASK (32 ));
3379
- if (rc ) {
3380
-
3381
- dev_err (& pdev -> dev , "DMA mask error %d\n" , rc );
3382
- goto err_out_regions ;
3383
- }
3365
+ rc = dma_set_mask_and_coherent (& pdev -> dev , DMA_BIT_MASK (64 ));
3366
+ if (rc )
3367
+ dma_set_mask_and_coherent (& pdev -> dev , DMA_BIT_MASK (32 ));
3368
+ if (rc ) {
3369
+ dev_err (& pdev -> dev , "DMA mask error %d\n" , rc );
3370
+ goto err_out_regions ;
3384
3371
}
3385
3372
3386
3373
pci_set_master (pdev );
0 commit comments