@@ -252,7 +252,7 @@ enum atc_status {
252
252
253
253
/**
254
254
* struct at_dma_chan - internal representation of an Atmel HDMAC channel
255
- * @chan_common : common dmaengine channel object members
255
+ * @dma_chan : common dmaengine channel object members
256
256
* @device: parent device
257
257
* @ch_regs: memory mapped register base
258
258
* @mask: channel index in a mask
@@ -272,7 +272,7 @@ enum atc_status {
272
272
* @free_list: list of descriptors usable by the channel
273
273
*/
274
274
struct at_dma_chan {
275
- struct dma_chan chan_common ;
275
+ struct dma_chan dma_chan ;
276
276
struct at_dma * device ;
277
277
void __iomem * ch_regs ;
278
278
u8 mask ;
@@ -300,7 +300,7 @@ struct at_dma_chan {
300
300
301
301
static inline struct at_dma_chan * to_at_dma_chan (struct dma_chan * dchan )
302
302
{
303
- return container_of (dchan , struct at_dma_chan , chan_common );
303
+ return container_of (dchan , struct at_dma_chan , dma_chan );
304
304
}
305
305
306
306
/*
@@ -382,15 +382,15 @@ static struct device *chan2dev(struct dma_chan *chan)
382
382
#if defined(VERBOSE_DEBUG )
383
383
static void vdbg_dump_regs (struct at_dma_chan * atchan )
384
384
{
385
- struct at_dma * atdma = to_at_dma (atchan -> chan_common .device );
385
+ struct at_dma * atdma = to_at_dma (atchan -> dma_chan .device );
386
386
387
- dev_err (chan2dev (& atchan -> chan_common ),
387
+ dev_err (chan2dev (& atchan -> dma_chan ),
388
388
" channel %d : imr = 0x%x, chsr = 0x%x\n" ,
389
- atchan -> chan_common .chan_id ,
389
+ atchan -> dma_chan .chan_id ,
390
390
dma_readl (atdma , EBCIMR ),
391
391
dma_readl (atdma , CHSR ));
392
392
393
- dev_err (chan2dev (& atchan -> chan_common ),
393
+ dev_err (chan2dev (& atchan -> dma_chan ),
394
394
" channel: s0x%x d0x%x ctrl0x%x:0x%x cfg0x%x l0x%x\n" ,
395
395
channel_readl (atchan , SADDR ),
396
396
channel_readl (atchan , DADDR ),
@@ -405,7 +405,7 @@ static void vdbg_dump_regs(struct at_dma_chan *atchan) {}
405
405
406
406
static void atc_dump_lli (struct at_dma_chan * atchan , struct at_lli * lli )
407
407
{
408
- dev_crit (chan2dev (& atchan -> chan_common ),
408
+ dev_crit (chan2dev (& atchan -> dma_chan ),
409
409
"desc: s%pad d%pad ctrl0x%x:0x%x l%pad\n" ,
410
410
& lli -> saddr , & lli -> daddr ,
411
411
lli -> ctrla , lli -> ctrlb , & lli -> dscr );
@@ -442,7 +442,7 @@ static void atc_disable_chan_irq(struct at_dma *atdma, int chan_id)
442
442
*/
443
443
static inline int atc_chan_is_enabled (struct at_dma_chan * atchan )
444
444
{
445
- struct at_dma * atdma = to_at_dma (atchan -> chan_common .device );
445
+ struct at_dma * atdma = to_at_dma (atchan -> dma_chan .device );
446
446
447
447
return !!(dma_readl (atdma , CHSR ) & atchan -> mask );
448
448
}
@@ -603,16 +603,16 @@ static struct at_desc *atc_desc_get(struct at_dma_chan *atchan)
603
603
ret = desc ;
604
604
break ;
605
605
}
606
- dev_dbg (chan2dev (& atchan -> chan_common ),
606
+ dev_dbg (chan2dev (& atchan -> dma_chan ),
607
607
"desc %p not ACKed\n" , desc );
608
608
}
609
609
spin_unlock_irqrestore (& atchan -> lock , flags );
610
- dev_vdbg (chan2dev (& atchan -> chan_common ),
610
+ dev_vdbg (chan2dev (& atchan -> dma_chan ),
611
611
"scanned %u descriptors on freelist\n" , i );
612
612
613
613
/* no more descriptor available in initial pool: create one more */
614
614
if (!ret )
615
- ret = atc_alloc_descriptor (& atchan -> chan_common , GFP_NOWAIT );
615
+ ret = atc_alloc_descriptor (& atchan -> dma_chan , GFP_NOWAIT );
616
616
617
617
return ret ;
618
618
}
@@ -630,11 +630,11 @@ static void atc_desc_put(struct at_dma_chan *atchan, struct at_desc *desc)
630
630
631
631
spin_lock_irqsave (& atchan -> lock , flags );
632
632
list_for_each_entry (child , & desc -> tx_list , desc_node )
633
- dev_vdbg (chan2dev (& atchan -> chan_common ),
633
+ dev_vdbg (chan2dev (& atchan -> dma_chan ),
634
634
"moving child desc %p to freelist\n" ,
635
635
child );
636
636
list_splice_init (& desc -> tx_list , & atchan -> free_list );
637
- dev_vdbg (chan2dev (& atchan -> chan_common ),
637
+ dev_vdbg (chan2dev (& atchan -> dma_chan ),
638
638
"moving desc %p to freelist\n" , desc );
639
639
list_add (& desc -> desc_node , & atchan -> free_list );
640
640
spin_unlock_irqrestore (& atchan -> lock , flags );
@@ -673,13 +673,13 @@ static void atc_desc_chain(struct at_desc **first, struct at_desc **prev,
673
673
*/
674
674
static void atc_dostart (struct at_dma_chan * atchan , struct at_desc * first )
675
675
{
676
- struct at_dma * atdma = to_at_dma (atchan -> chan_common .device );
676
+ struct at_dma * atdma = to_at_dma (atchan -> dma_chan .device );
677
677
678
678
/* ASSERT: channel is idle */
679
679
if (atc_chan_is_enabled (atchan )) {
680
- dev_err (chan2dev (& atchan -> chan_common ),
680
+ dev_err (chan2dev (& atchan -> dma_chan ),
681
681
"BUG: Attempted to start non-idle channel\n" );
682
- dev_err (chan2dev (& atchan -> chan_common ),
682
+ dev_err (chan2dev (& atchan -> dma_chan ),
683
683
" channel: s0x%x d0x%x ctrl0x%x:0x%x l0x%x\n" ,
684
684
channel_readl (atchan , SADDR ),
685
685
channel_readl (atchan , DADDR ),
@@ -905,10 +905,10 @@ static void
905
905
atc_chain_complete (struct at_dma_chan * atchan , struct at_desc * desc )
906
906
{
907
907
struct dma_async_tx_descriptor * txd = & desc -> txd ;
908
- struct at_dma * atdma = to_at_dma (atchan -> chan_common .device );
908
+ struct at_dma * atdma = to_at_dma (atchan -> dma_chan .device );
909
909
unsigned long flags ;
910
910
911
- dev_vdbg (chan2dev (& atchan -> chan_common ),
911
+ dev_vdbg (chan2dev (& atchan -> dma_chan ),
912
912
"descriptor %u complete\n" , txd -> cookie );
913
913
914
914
spin_lock_irqsave (& atchan -> lock , flags );
@@ -951,7 +951,7 @@ static void atc_advance_work(struct at_dma_chan *atchan)
951
951
struct at_desc * desc ;
952
952
unsigned long flags ;
953
953
954
- dev_vdbg (chan2dev (& atchan -> chan_common ), "advance_work\n" );
954
+ dev_vdbg (chan2dev (& atchan -> dma_chan ), "advance_work\n" );
955
955
956
956
spin_lock_irqsave (& atchan -> lock , flags );
957
957
if (atc_chan_is_enabled (atchan ) || list_empty (& atchan -> active_list ))
@@ -1010,9 +1010,9 @@ static void atc_handle_error(struct at_dma_chan *atchan)
1010
1010
* controller flagged an error instead of scribbling over
1011
1011
* random memory locations.
1012
1012
*/
1013
- dev_crit (chan2dev (& atchan -> chan_common ),
1013
+ dev_crit (chan2dev (& atchan -> dma_chan ),
1014
1014
"Bad descriptor submitted for DMA!\n" );
1015
- dev_crit (chan2dev (& atchan -> chan_common ),
1015
+ dev_crit (chan2dev (& atchan -> dma_chan ),
1016
1016
" cookie: %d\n" , bad_desc -> txd .cookie );
1017
1017
atc_dump_lli (atchan , & bad_desc -> lli );
1018
1018
list_for_each_entry (child , & bad_desc -> tx_list , desc_node )
@@ -1031,7 +1031,7 @@ static void atc_handle_cyclic(struct at_dma_chan *atchan)
1031
1031
struct at_desc * first = atc_first_active (atchan );
1032
1032
struct dma_async_tx_descriptor * txd = & first -> txd ;
1033
1033
1034
- dev_vdbg (chan2dev (& atchan -> chan_common ),
1034
+ dev_vdbg (chan2dev (& atchan -> dma_chan ),
1035
1035
"new cyclic period llp 0x%08x\n" ,
1036
1036
channel_readl (atchan , DSCR ));
1037
1037
@@ -1825,7 +1825,7 @@ static int atc_pause(struct dma_chan *chan)
1825
1825
{
1826
1826
struct at_dma_chan * atchan = to_at_dma_chan (chan );
1827
1827
struct at_dma * atdma = to_at_dma (chan -> device );
1828
- int chan_id = atchan -> chan_common .chan_id ;
1828
+ int chan_id = atchan -> dma_chan .chan_id ;
1829
1829
unsigned long flags ;
1830
1830
1831
1831
dev_vdbg (chan2dev (chan ), "%s\n" , __func__ );
@@ -1844,7 +1844,7 @@ static int atc_resume(struct dma_chan *chan)
1844
1844
{
1845
1845
struct at_dma_chan * atchan = to_at_dma_chan (chan );
1846
1846
struct at_dma * atdma = to_at_dma (chan -> device );
1847
- int chan_id = atchan -> chan_common .chan_id ;
1847
+ int chan_id = atchan -> dma_chan .chan_id ;
1848
1848
unsigned long flags ;
1849
1849
1850
1850
dev_vdbg (chan2dev (chan ), "%s\n" , __func__ );
@@ -1866,7 +1866,7 @@ static int atc_terminate_all(struct dma_chan *chan)
1866
1866
{
1867
1867
struct at_dma_chan * atchan = to_at_dma_chan (chan );
1868
1868
struct at_dma * atdma = to_at_dma (chan -> device );
1869
- int chan_id = atchan -> chan_common .chan_id ;
1869
+ int chan_id = atchan -> dma_chan .chan_id ;
1870
1870
unsigned long flags ;
1871
1871
1872
1872
dev_vdbg (chan2dev (chan ), "%s\n" , __func__ );
@@ -2305,9 +2305,9 @@ static int __init at_dma_probe(struct platform_device *pdev)
2305
2305
2306
2306
atchan -> mem_if = AT_DMA_MEM_IF ;
2307
2307
atchan -> per_if = AT_DMA_PER_IF ;
2308
- atchan -> chan_common .device = & atdma -> dma_device ;
2309
- dma_cookie_init (& atchan -> chan_common );
2310
- list_add_tail (& atchan -> chan_common .device_node ,
2308
+ atchan -> dma_chan .device = & atdma -> dma_device ;
2309
+ dma_cookie_init (& atchan -> dma_chan );
2310
+ list_add_tail (& atchan -> dma_chan .device_node ,
2311
2311
& atdma -> dma_device .channels );
2312
2312
2313
2313
atchan -> ch_regs = atdma -> regs + ch_regs (i );
@@ -2455,7 +2455,7 @@ static int at_dma_prepare(struct device *dev)
2455
2455
2456
2456
static void atc_suspend_cyclic (struct at_dma_chan * atchan )
2457
2457
{
2458
- struct dma_chan * chan = & atchan -> chan_common ;
2458
+ struct dma_chan * chan = & atchan -> dma_chan ;
2459
2459
2460
2460
/* Channel should be paused by user
2461
2461
* do it anyway even if it is not done already */
@@ -2496,7 +2496,7 @@ static int at_dma_suspend_noirq(struct device *dev)
2496
2496
2497
2497
static void atc_resume_cyclic (struct at_dma_chan * atchan )
2498
2498
{
2499
- struct at_dma * atdma = to_at_dma (atchan -> chan_common .device );
2499
+ struct at_dma * atdma = to_at_dma (atchan -> dma_chan .device );
2500
2500
2501
2501
/* restore channel status for cyclic descriptors list:
2502
2502
* next descriptor in the cyclic list at the time of suspend */
0 commit comments