@@ -92,6 +92,12 @@ struct dma_mcux_channel_transfer_edma_settings {
9292 enum dma_channel_direction direction ;
9393 edma_transfer_type_t transfer_type ;
9494 bool valid ;
95+ bool cyclic ; //loop mode or not
96+ //these parameters are for cyclic mode, you can find similiar var in edma_handle_t
97+ //we don't want to use them as they are used for dynamic SG mode.
98+ volatile int8_t write_idx ; /*Next empty TCD idx which can be used for transfer */
99+ /*How many TCDs in TCD pool is emtpy(can be used to write transfer parameters)*/
100+ volatile uint8_t empty_tcds ;
95101};
96102
97103
@@ -181,7 +187,13 @@ static void nxp_edma_callback(edma_handle_t *handle, void *param, bool transferD
181187
182188 uint32_t channel = dma_mcux_edma_remove_channel_gap (data -> dev , handle -> channel );
183189
184- if (transferDone ) {
190+ if (data -> transfer_settings .cyclic )
191+ {
192+ data -> transfer_settings .empty_tcds ++ ;
193+ data -> busy = 1 ; // in loop mode, DMA is always busy
194+ ret = DMA_STATUS_COMPLETE ;
195+
196+ }else if (transferDone ) {
185197 /* DMA is no longer busy when there are no remaining TCDs to transfer */
186198 data -> busy = (handle -> tcdPool != NULL ) && (handle -> tcdUsed > 0 );
187199 ret = DMA_STATUS_COMPLETE ;
@@ -316,6 +328,7 @@ static int dma_mcux_edma_configure(const struct device *dev, uint32_t channel,
316328 data -> transfer_settings .direction = config -> channel_direction ;
317329 data -> transfer_settings .transfer_type = transfer_type ;
318330 data -> transfer_settings .valid = true;
331+ data -> transfer_settings .cyclic = config -> cyclic ;
319332
320333
321334 /* Lock and page in the channel configuration */
@@ -358,26 +371,88 @@ static int dma_mcux_edma_configure(const struct device *dev, uint32_t channel,
358371 EDMA_EnableChannelInterrupts (DEV_BASE (dev ), hw_channel , kEDMA_ErrorInterruptEnable );
359372
360373 if (block_config -> source_gather_en || block_config -> dest_scatter_en ) {
361- EDMA_InstallTCDMemory (p_handle , DEV_CFG (dev )-> tcdpool [channel ],
362- CONFIG_DMA_TCD_QUEUE_SIZE );
363- while (block_config != NULL ) {
364- EDMA_PrepareTransfer (
365- & (data -> transferConfig ),
366- (void * )block_config -> source_address ,
367- config -> source_data_size ,
368- (void * )block_config -> dest_address ,
369- config -> dest_data_size ,
370- config -> source_burst_length ,
371- block_config -> block_size , transfer_type );
372-
373- const status_t submit_status =
374- EDMA_SubmitTransfer (p_handle , & (data -> transferConfig ));
375- if (submit_status != kStatus_Success ) {
376- LOG_ERR ("Error submitting EDMA Transfer: 0x%x" , submit_status );
377- ret = - EFAULT ;
378- }
379- block_config = block_config -> next_block ;
380- }
374+ if (config -> cyclic )
375+ {
376+ /*Loop mode*/
377+ data -> transfer_settings .write_idx = 0 ;
378+ data -> transfer_settings .empty_tcds = CONFIG_DMA_TCD_QUEUE_SIZE ;
379+
380+ EDMA_PrepareTransfer (
381+ & data -> transferConfig ,
382+ (void * )block_config -> source_address ,
383+ config -> source_data_size ,
384+ (void * )block_config -> dest_address ,
385+ config -> dest_data_size ,
386+ config -> source_burst_length ,
387+ block_config -> block_size ,
388+ transfer_type );
389+
390+ /*Init all TCDs with the parameters in transfer config and link them in loop mode */
391+ for (int i = 0 ;i < CONFIG_DMA_TCD_QUEUE_SIZE ;i ++ )
392+ {
393+ // data->transferConfig.linkTCD = &tcdpool[channel][(i + 1)%CONFIG_DMA_TCD_QUEUE_SIZE];
394+ // EDMA_ConfigChannelSoftwareTCD(&tcdpool[channel][i],&data->transferConfig);
395+ EDMA_TcdSetTransferConfig (& DEV_CFG (dev )-> tcdpool [channel ][i ],
396+ & data -> transferConfig ,
397+ & DEV_CFG (dev )-> tcdpool [channel ][(i + 1 )%CONFIG_DMA_TCD_QUEUE_SIZE ]);
398+
399+ /*In some version of SDK driver, major interrupt is not enabled by default.*/
400+ EDMA_TcdEnableInterrupts (& DEV_CFG (dev )-> tcdpool [channel ][i ],kEDMA_MajorInterruptEnable );
401+ }
402+
403+ //load valid transfers
404+ while (block_config != NULL && data -> transfer_settings .write_idx < CONFIG_DMA_TCD_QUEUE_SIZE )
405+ {
406+
407+ DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].SADDR = block_config -> source_address ;
408+ DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].DADDR = block_config -> dest_address ;
409+ DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].BITER =
410+ block_config -> block_size /config -> source_data_size ;
411+ DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].CITER =
412+ block_config -> block_size /config -> source_data_size ;
413+ /*Enable auto stop for last transfer.*/
414+ if (block_config -> next_block == NULL )
415+ {
416+ DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].CSR |= DMA_CSR_DREQ (1U );
417+ }else
418+ {
419+ DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].CSR &= ~DMA_CSR_DREQ (1U );
420+ }
421+
422+ data -> transfer_settings .write_idx = (data -> transfer_settings .write_idx + 1 )%CONFIG_DMA_TCD_QUEUE_SIZE ;
423+ data -> transfer_settings .empty_tcds -- ;
424+ block_config = block_config -> next_block ;
425+
426+
427+ }
428+
429+ //Push the 1st TCD into HW
430+ EDMA_InstallTCD (p_handle -> base , p_handle -> channel ,& DEV_CFG (dev )-> tcdpool [channel ][0 ]);
431+
432+ }
433+ else
434+ {
435+ //normal mode
436+ EDMA_InstallTCDMemory (p_handle , DEV_CFG (dev )-> tcdpool [channel ], CONFIG_DMA_TCD_QUEUE_SIZE );
437+ while (block_config != NULL ) {
438+ EDMA_PrepareTransfer (
439+ & (data -> transferConfig ),
440+ (void * )block_config -> source_address ,
441+ config -> source_data_size ,
442+ (void * )block_config -> dest_address ,
443+ config -> dest_data_size ,
444+ config -> source_burst_length ,
445+ block_config -> block_size , transfer_type );
446+
447+ const status_t submit_status =
448+ EDMA_SubmitTransfer (p_handle , & (data -> transferConfig ));
449+ if (submit_status != kStatus_Success ) {
450+ LOG_ERR ("Error submitting EDMA Transfer: 0x%x" , submit_status );
451+ ret = - EFAULT ;
452+ }
453+ block_config = block_config -> next_block ;
454+ }
455+ }
381456 } else {
382457 /* block_count shall be 1 */
383458 LOG_DBG ("block size is: %d" , block_config -> block_size );
@@ -491,6 +566,26 @@ static int dma_mcux_edma_resume(const struct device *dev, uint32_t channel)
491566 return 0 ;
492567}
493568
569+ static void dma_mcux_edma_update_hw_tcd (const struct device * dev , uint32_t channel ,
570+ uint32_t src , uint32_t dst , size_t size )
571+ {
572+ #ifdef CONFIG_DMA_MCUX_EDMA
573+ DEV_BASE (dev )-> TCD [channel ].SADDR = src ;
574+ DEV_BASE (dev )-> TCD [channel ].DADDR = dst ;
575+ DEV_BASE (dev )-> TCD [channel ].BITER_ELINKNO = size ;
576+ DEV_BASE (dev )-> TCD [channel ].CITER_ELINKNO = size ;
577+ DEV_BASE (dev )-> TCD [channel ].CSR |= DMA_CSR_DREQ (1U );
578+
579+ #elif (CONFIG_DMA_MCUX_EDMA_V3 || CONFIG_DMA_MCUX_EDMA_V4 )
580+ DEV_BASE (dev )-> CH [channel ].TCD_SADDR = src ;
581+ DEV_BASE (dev )-> CH [channel ].TCD_DADDR = dst ;
582+ DEV_BASE (dev )-> CH [channel ].TCD_BITER_ELINKNO = size ;
583+ DEV_BASE (dev )-> CH [channel ].TCD_CITER_ELINKNO = size ;
584+ DEV_BASE (dev )-> CH [channel ].TCD_CSR |= DMA_CSR_DREQ (1U );
585+ #endif
586+
587+ }
588+
494589
495590static int dma_mcux_edma_reload (const struct device * dev , uint32_t channel ,
496591 uint32_t src , uint32_t dst , size_t size )
@@ -507,30 +602,111 @@ static int dma_mcux_edma_reload(const struct device *dev, uint32_t channel,
507602 goto cleanup ;
508603 }
509604
510- /* If the tcdPool is not in use (no s/g) then only a single TCD can be active at once. */
511- if (data -> busy && data -> edma_handle .tcdPool == NULL ) {
512- LOG_ERR ("EDMA busy. Wait until the transfer completes before reloading." );
513- ret = - EBUSY ;
514- goto cleanup ;
515- }
605+ if (data -> transfer_settings .cyclic )
606+ {
607+ if (data -> transfer_settings .empty_tcds == 0 )
608+ {
609+ LOG_DBG ("TCD list is full in loop mode." );
610+ ret = - ENOBUFS ;
611+ goto cleanup ;
612+ }
613+
614+ /*Convert size into major loop count*/
615+ size = size /data -> transfer_settings .dest_data_size ;
616+
617+ //append the transfer normally
618+ DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].SADDR = src ;
619+ DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].DADDR = dst ;
620+ DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].BITER = size ;
621+ DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].CITER = size ;
622+ //enable automatically stop
623+ DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].CSR |= DMA_CSR_DREQ (1U );
624+
625+ //manaually stop DMA whatever it is running or already stopped(Automatically)
626+ //Make sure the code between EDMA_DisableChannelRequest and EDMA_EnableChannelRequest is minimum.
627+ EDMA_DisableChannelRequest (DEV_BASE (dev ),channel );
628+ #ifdef CONFIG_DMA_MCUX_EDMA
629+ while (DEV_BASE (dev )-> TCD [channel ].CSR & DMA_CSR_ACTIVE_MASK )
630+ #elif (CONFIG_DMA_MCUX_EDMA_V3 || CONFIG_DMA_MCUX_EDMA_V4 )
631+ while (DEV_BASE (dev )-> CH [channel ].TCD_CSR & DMA_CH_CSR_ACTIVE_MASK )
632+ #endif
633+ {
634+ ;
635+ }
636+
637+ //use DLAST_SGA as the ID
638+ uint32_t hw_id = EDMA_GetNextTCDAddress (DEV_EDMA_HANDLE (dev ,channel ));
639+ if (data -> transfer_settings .empty_tcds >= CONFIG_DMA_TCD_QUEUE_SIZE
640+ || hw_id == DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ].DLAST_SGA )
641+ {
642+ /*All transfers have been done*/
643+ //DMA is stopped automatically, invalid TCD has been loaded into the HW, update HW
644+ dma_mcux_edma_update_hw_tcd (dev ,channel ,src ,dst ,size );
645+ LOG_DBG ("Transfer done,auto stop" );
646+
647+ }else {
648+ //Disable automatically stop of previous one
649+ DEV_CFG (dev )-> tcdpool [channel ][(data -> transfer_settings .write_idx - 1 )%CONFIG_DMA_TCD_QUEUE_SIZE ].CSR
650+ &= ~DMA_CSR_DREQ (1U );
651+ if (data -> transfer_settings .empty_tcds == CONFIG_DMA_TCD_QUEUE_SIZE - 1
652+ || hw_id == (uint32_t )& DEV_CFG (dev )-> tcdpool [channel ][data -> transfer_settings .write_idx ])
653+ {
654+ //DMA is running on last transfer.
655+ //HW has loaded the last one, we need ensure it's DREQ is cleared.
656+ EDMA_EnableAutoStopRequest (DEV_BASE (dev ),channel ,false);
657+ LOG_DBG ("Last transfer." );
658+ }
659+ LOG_DBG ("Manu stop" );
660+
661+ }
662+ /*Update the write index and available TCD numbers.*/
663+ data -> transfer_settings .write_idx = (data -> transfer_settings .write_idx + 1 )%CONFIG_DMA_TCD_QUEUE_SIZE ;
664+ data -> transfer_settings .empty_tcds -- ;
516665
517- EDMA_PrepareTransfer (
518- & (data -> transferConfig ),
519- (void * )src ,
520- data -> transfer_settings .source_data_size ,
521- (void * )dst ,
522- data -> transfer_settings .dest_data_size ,
523- data -> transfer_settings .source_burst_length ,
524- size ,
525- data -> transfer_settings .transfer_type );
526-
527- const status_t submit_status =
528- EDMA_SubmitTransfer (DEV_EDMA_HANDLE (dev , channel ), & (data -> transferConfig ));
529-
530- if (submit_status != kStatus_Success ) {
531- LOG_ERR ("Error submitting EDMA Transfer: 0x%x" , submit_status );
532- ret = - EFAULT ;
533- }
666+
667+ #ifdef CONFIG_DMA_MCUX_EDMA
668+ /*It seems that there is HW issue which may cause ESG bit is cleared.
669+ This is a workaround. Clear the DONE bit before setting ESG bit.*/
670+ EDMA_ClearChannelStatusFlags (DEV_BASE (dev ),channel ,kEDMA_DoneFlag );
671+ DEV_BASE (dev )-> TCD [channel ].CSR |= DMA_CSR_ESG_MASK ;
672+ #elif (CONFIG_DMA_MCUX_EDMA_V3 || CONFIG_DMA_MCUX_EDMA_V4 )
673+ /*We have not verified if this issue exist on V3/V4 HW, jut place a holder here.*/
674+ //EDMA_ClearChannelStatusFlags(DEV_BASE(dev),channel,kEDMA_DoneFlag);
675+ //DEV_BASE(dev)->CH[channel].TCD_CSR |= DMA_CSR_ESG_MASK;
676+ #endif
677+ /*This is the end of workaround.*/
678+
679+ EDMA_EnableChannelRequest (DEV_BASE (dev ),channel );
680+ LOG_DBG ("w_idx:%d no:%d(ch:%d)" ,
681+ data -> transfer_settings .write_idx ,data -> transfer_settings .empty_tcds ,
682+ channel );
683+
684+ }else {
685+ /* If the tcdPool is not in use (no s/g) then only a single TCD can be active at once. */
686+ if (data -> busy && data -> edma_handle .tcdPool == NULL ) {
687+ LOG_ERR ("EDMA busy. Wait until the transfer completes before reloading." );
688+ ret = - EBUSY ;
689+ goto cleanup ;
690+ }
691+
692+ EDMA_PrepareTransfer (
693+ & (data -> transferConfig ),
694+ (void * )src ,
695+ data -> transfer_settings .source_data_size ,
696+ (void * )dst ,
697+ data -> transfer_settings .dest_data_size ,
698+ data -> transfer_settings .source_burst_length ,
699+ size ,
700+ data -> transfer_settings .transfer_type );
701+
702+ const status_t submit_status =
703+ EDMA_SubmitTransfer (DEV_EDMA_HANDLE (dev , channel ), & (data -> transferConfig ));
704+
705+ if (submit_status != kStatus_Success ) {
706+ LOG_ERR ("Error submitting EDMA Transfer: 0x%x" , submit_status );
707+ ret = - EFAULT ;
708+ }
709+ }
534710
535711cleanup :
536712 irq_unlock (key );
@@ -544,8 +720,10 @@ static int dma_mcux_edma_get_status(const struct device *dev, uint32_t channel,
544720
545721 if (DEV_CHANNEL_DATA (dev , channel )-> busy ) {
546722 status -> busy = true;
723+ /* Be aware that here we need multiply NBYTES for each minor loops*/
547724 status -> pending_length =
548- EDMA_GetRemainingMajorLoopCount (DEV_BASE (dev ), hw_channel );
725+ EDMA_GetRemainingMajorLoopCount (DEV_BASE (dev ), hw_channel ) *
726+ DEV_CHANNEL_DATA (dev , channel )-> transfer_settings .source_data_size ;
549727 } else {
550728 status -> busy = false;
551729 status -> pending_length = 0 ;
0 commit comments