88
99#include <string.h>
1010#include <errno.h>
11+ #if CONFIG_SPI_MAX32_DMA
12+ #include <zephyr/drivers/dma.h>
13+ #endif
1114#include <zephyr/drivers/pinctrl.h>
1215#include <zephyr/drivers/spi.h>
1316#include <zephyr/drivers/spi/rtio.h>
2023LOG_MODULE_REGISTER (spi_max32 , CONFIG_SPI_LOG_LEVEL );
2124#include "spi_context.h"
2225
26+ #ifdef CONFIG_SPI_MAX32_DMA
27+ struct max32_spi_dma_config {
28+ const struct device * dev ;
29+ const uint32_t channel ;
30+ const uint32_t slot ;
31+ };
32+ #endif /* CONFIG_SPI_MAX32_DMA */
33+
2334struct max32_spi_config {
2435 mxc_spi_regs_t * regs ;
2536 const struct pinctrl_dev_config * pctrl ;
@@ -28,6 +39,10 @@ struct max32_spi_config {
2839#ifdef CONFIG_SPI_MAX32_INTERRUPT
2940 void (* irq_config_func )(const struct device * dev );
3041#endif /* CONFIG_SPI_MAX32_INTERRUPT */
42+ #ifdef CONFIG_SPI_MAX32_DMA
43+ struct max32_spi_dma_config tx_dma ;
44+ struct max32_spi_dma_config rx_dma ;
45+ #endif /* CONFIG_SPI_MAX32_DMA */
3146};
3247
3348/* Device run time data */
@@ -36,11 +51,21 @@ struct max32_spi_data {
3651 const struct device * dev ;
3752 mxc_spi_req_t req ;
3853 uint8_t dummy [2 ];
54+ #ifdef CONFIG_SPI_MAX32_DMA
55+ volatile uint8_t dma_stat ;
56+ #endif /* CONFIG_SPI_MAX32_DMA */
3957#ifdef CONFIG_SPI_ASYNC
4058 struct k_work async_work ;
4159#endif /* CONFIG_SPI_ASYNC */
4260};
4361
62+ #ifdef CONFIG_SPI_MAX32_DMA
63+ #define SPI_MAX32_DMA_ERROR_FLAG 0x01U
64+ #define SPI_MAX32_DMA_RX_DONE_FLAG 0x02U
65+ #define SPI_MAX32_DMA_TX_DONE_FLAG 0x04U
66+ #define SPI_MAX32_DMA_DONE_FLAG (SPI_MAX32_DMA_RX_DONE_FLAG | SPI_MAX32_DMA_TX_DONE_FLAG)
67+ #endif /* CONFIG_SPI_MAX32_DMA */
68+
4469#ifdef CONFIG_SPI_MAX32_INTERRUPT
4570static void spi_max32_callback (mxc_spi_req_t * req , int error );
4671#endif /* CONFIG_SPI_MAX32_INTERRUPT */
@@ -344,9 +369,212 @@ static int transceive(const struct device *dev, const struct spi_config *config,
344369 return ret ;
345370}
346371
372+ #ifdef CONFIG_SPI_MAX32_DMA
373+ static void spi_max32_dma_callback (const struct device * dev , void * arg , uint32_t channel ,
374+ int status )
375+ {
376+ struct max32_spi_data * data = arg ;
377+ const struct device * spi_dev = data -> dev ;
378+ const struct max32_spi_config * config = spi_dev -> config ;
379+ uint32_t len ;
380+
381+ if (status < 0 ) {
382+ LOG_ERR ("DMA callback error with channel %d." , channel );
383+ } else {
384+ /* identify the origin of this callback */
385+ if (channel == config -> tx_dma .channel ) {
386+ data -> dma_stat |= SPI_MAX32_DMA_TX_DONE_FLAG ;
387+ } else if (channel == config -> rx_dma .channel ) {
388+ data -> dma_stat |= SPI_MAX32_DMA_RX_DONE_FLAG ;
389+ }
390+ }
391+ if ((data -> dma_stat & SPI_MAX32_DMA_DONE_FLAG ) == SPI_MAX32_DMA_DONE_FLAG ) {
392+ len = spi_context_max_continuous_chunk (& data -> ctx );
393+ spi_context_update_tx (& data -> ctx , 1 , len );
394+ spi_context_update_rx (& data -> ctx , 1 , len );
395+ spi_context_complete (& data -> ctx , spi_dev , status == 0 ? 0 : - EIO );
396+ }
397+ }
398+
399+ static int spi_max32_tx_dma_load (const struct device * dev , const uint8_t * buf , uint32_t len ,
400+ uint8_t word_shift )
401+ {
402+ int ret ;
403+ const struct max32_spi_config * config = dev -> config ;
404+ struct max32_spi_data * data = dev -> data ;
405+ struct dma_config dma_cfg = {0 };
406+ struct dma_block_config dma_blk = {0 };
407+
408+ dma_cfg .channel_direction = MEMORY_TO_PERIPHERAL ;
409+ dma_cfg .dma_callback = spi_max32_dma_callback ;
410+ dma_cfg .user_data = (void * )data ;
411+ dma_cfg .dma_slot = config -> tx_dma .slot ;
412+ dma_cfg .block_count = 1 ;
413+ dma_cfg .source_data_size = 1U << word_shift ;
414+ dma_cfg .source_burst_length = 1U ;
415+ dma_cfg .dest_data_size = 1U << word_shift ;
416+ dma_cfg .head_block = & dma_blk ;
417+ dma_blk .block_size = len ;
418+ if (buf ) {
419+ dma_blk .source_addr_adj = DMA_ADDR_ADJ_INCREMENT ;
420+ dma_blk .source_address = (uint32_t )buf ;
421+ } else {
422+ dma_blk .source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE ;
423+ dma_blk .source_address = (uint32_t )data -> dummy ;
424+ }
425+
426+ ret = dma_config (config -> tx_dma .dev , config -> tx_dma .channel , & dma_cfg );
427+ if (ret < 0 ) {
428+ LOG_ERR ("Error configuring Tx DMA (%d)" , ret );
429+ }
430+
431+ return dma_start (config -> tx_dma .dev , config -> tx_dma .channel );
432+ }
433+
434+ static int spi_max32_rx_dma_load (const struct device * dev , const uint8_t * buf , uint32_t len ,
435+ uint8_t word_shift )
436+ {
437+ int ret ;
438+ const struct max32_spi_config * config = dev -> config ;
439+ struct max32_spi_data * data = dev -> data ;
440+ struct dma_config dma_cfg = {0 };
441+ struct dma_block_config dma_blk = {0 };
442+
443+ dma_cfg .channel_direction = PERIPHERAL_TO_MEMORY ;
444+ dma_cfg .dma_callback = spi_max32_dma_callback ;
445+ dma_cfg .user_data = (void * )data ;
446+ dma_cfg .dma_slot = config -> rx_dma .slot ;
447+ dma_cfg .block_count = 1 ;
448+ dma_cfg .source_data_size = 1U << word_shift ;
449+ dma_cfg .source_burst_length = 1U ;
450+ dma_cfg .dest_data_size = 1U << word_shift ;
451+ dma_cfg .head_block = & dma_blk ;
452+ dma_blk .block_size = len ;
453+ if (buf ) {
454+ dma_blk .dest_addr_adj = DMA_ADDR_ADJ_INCREMENT ;
455+ dma_blk .dest_address = (uint32_t )buf ;
456+ } else {
457+ dma_blk .dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE ;
458+ dma_blk .dest_address = (uint32_t )data -> dummy ;
459+ }
460+ ret = dma_config (config -> rx_dma .dev , config -> rx_dma .channel , & dma_cfg );
461+ if (ret < 0 ) {
462+ LOG_ERR ("Error configuring Rx DMA (%d)" , ret );
463+ }
464+
465+ return dma_start (config -> rx_dma .dev , config -> rx_dma .channel );
466+ }
467+
468+ static int transceive_dma (const struct device * dev , const struct spi_config * config ,
469+ const struct spi_buf_set * tx_bufs , const struct spi_buf_set * rx_bufs ,
470+ bool async , spi_callback_t cb , void * userdata )
471+ {
472+ int ret = 0 ;
473+ const struct max32_spi_config * cfg = dev -> config ;
474+ struct max32_spi_data * data = dev -> data ;
475+ struct spi_context * ctx = & data -> ctx ;
476+ mxc_spi_regs_t * spi = cfg -> regs ;
477+ struct dma_status status ;
478+ uint32_t len , word_count ;
479+ uint8_t dfs_shift ;
480+
481+ bool hw_cs_ctrl = true;
482+
483+ spi_context_lock (ctx , async , cb , userdata , config );
484+
485+ ret = dma_get_status (cfg -> tx_dma .dev , cfg -> tx_dma .channel , & status );
486+ if (ret < 0 || status .busy ) {
487+ ret = ret < 0 ? ret : - EBUSY ;
488+ goto unlock ;
489+ }
490+
491+ ret = dma_get_status (cfg -> rx_dma .dev , cfg -> rx_dma .channel , & status );
492+ if (ret < 0 || status .busy ) {
493+ ret = ret < 0 ? ret : - EBUSY ;
494+ goto unlock ;
495+ }
496+
497+ ret = spi_configure (dev , config );
498+ if (ret != 0 ) {
499+ ret = - EIO ;
500+ goto unlock ;
501+ }
502+
503+ spi_context_buffers_setup (ctx , tx_bufs , rx_bufs , 1 );
504+
505+ /* Check if CS GPIO exists */
506+ if (spi_cs_is_gpio (config )) {
507+ hw_cs_ctrl = false;
508+ }
509+ MXC_SPI_HWSSControl (cfg -> regs , hw_cs_ctrl );
510+
511+ /* Assert the CS line if HW control disabled */
512+ if (!hw_cs_ctrl ) {
513+ spi_context_cs_control (ctx , true);
514+ }
515+
516+ MXC_SPI_SetSlave (cfg -> regs , ctx -> config -> slave );
517+
518+ do {
519+ spi -> ctrl0 &= ~(MXC_F_SPI_CTRL0_EN );
520+
521+ len = spi_context_max_continuous_chunk (ctx );
522+ dfs_shift = spi_max32_get_dfs_shift (ctx );
523+ word_count = len >> dfs_shift ;
524+
525+ MXC_SETFIELD (spi -> ctrl1 , MXC_F_SPI_CTRL1_RX_NUM_CHAR ,
526+ word_count << MXC_F_SPI_CTRL1_RX_NUM_CHAR_POS );
527+ spi -> dma |= ADI_MAX32_SPI_DMA_RX_FIFO_CLEAR ;
528+ spi -> dma |= MXC_F_SPI_DMA_RX_FIFO_EN ;
529+ spi -> dma |= ADI_MAX32_SPI_DMA_RX_DMA_EN ;
530+ MXC_SPI_SetRXThreshold (spi , 0 );
531+
532+ ret = spi_max32_rx_dma_load (dev , ctx -> rx_buf , len , dfs_shift );
533+ if (ret < 0 ) {
534+ goto unlock ;
535+ }
536+
537+ MXC_SETFIELD (spi -> ctrl1 , MXC_F_SPI_CTRL1_TX_NUM_CHAR ,
538+ word_count << MXC_F_SPI_CTRL1_TX_NUM_CHAR_POS );
539+ spi -> dma |= ADI_MAX32_SPI_DMA_TX_FIFO_CLEAR ;
540+ spi -> dma |= MXC_F_SPI_DMA_TX_FIFO_EN ;
541+ spi -> dma |= ADI_MAX32_SPI_DMA_TX_DMA_EN ;
542+ MXC_SPI_SetTXThreshold (spi , 1 );
543+
544+ ret = spi_max32_tx_dma_load (dev , ctx -> tx_buf , len , dfs_shift );
545+ if (ret < 0 ) {
546+ goto unlock ;
547+ }
548+
549+ spi -> ctrl0 |= MXC_F_SPI_CTRL0_EN ;
550+
551+ data -> dma_stat = 0 ;
552+ MXC_SPI_StartTransmission (spi );
553+ ret = spi_context_wait_for_completion (ctx );
554+ } while (!ret && (spi_context_tx_on (ctx ) || spi_context_rx_on (ctx )));
555+
556+ unlock :
557+ /* Deassert the CS line if hw control disabled */
558+ if (!hw_cs_ctrl ) {
559+ spi_context_cs_control (ctx , false);
560+ }
561+
562+ spi_context_release (ctx , ret );
563+
564+ return ret ;
565+ }
566+ #endif /* CONFIG_SPI_MAX32_DMA */
567+
347568static int api_transceive (const struct device * dev , const struct spi_config * config ,
348569 const struct spi_buf_set * tx_bufs , const struct spi_buf_set * rx_bufs )
349570{
571+ #ifdef CONFIG_SPI_MAX32_DMA
572+ const struct max32_spi_config * cfg = dev -> config ;
573+
574+ if (cfg -> tx_dma .channel != 0xFF && cfg -> rx_dma .channel != 0xFF ) {
575+ return transceive_dma (dev , config , tx_bufs , rx_bufs , false, NULL , NULL );
576+ }
577+ #endif /* CONFIG_SPI_MAX32_DMA */
350578 return transceive (dev , config , tx_bufs , rx_bufs , false, NULL , NULL );
351579}
352580
@@ -536,6 +764,26 @@ static const struct spi_driver_api spi_max32_api = {
536764#define SPI_MAX32_IRQ_CONFIG_FUNC (n )
537765#endif /* CONFIG_SPI_MAX32_INTERRUPT */
538766
767+ #if CONFIG_SPI_MAX32_DMA
768+ #define MAX32_DT_INST_DMA_CTLR (n , name ) \
769+ COND_CODE_1(DT_INST_NODE_HAS_PROP(n, dmas), \
770+ (DEVICE_DT_GET(DT_INST_DMAS_CTLR_BY_NAME(n, name))), (NULL))
771+
772+ #define MAX32_DT_INST_DMA_CELL (n , name , cell ) \
773+ COND_CODE_1(DT_INST_NODE_HAS_PROP(n, dmas), (DT_INST_DMAS_CELL_BY_NAME(n, name, cell)), \
774+ (0xff))
775+
776+ #define MAX32_SPI_DMA_INIT (n ) \
777+ .tx_dma.dev = MAX32_DT_INST_DMA_CTLR(n, tx), \
778+ .tx_dma.channel = MAX32_DT_INST_DMA_CELL(n, tx, channel), \
779+ .tx_dma.slot = MAX32_DT_INST_DMA_CELL(n, tx, slot), \
780+ .rx_dma.dev = MAX32_DT_INST_DMA_CTLR(n, rx), \
781+ .rx_dma.channel = MAX32_DT_INST_DMA_CELL(n, rx, channel), \
782+ .rx_dma.slot = MAX32_DT_INST_DMA_CELL(n, rx, slot),
783+ #else
784+ #define MAX32_SPI_DMA_INIT (n )
785+ #endif
786+
539787#define DEFINE_SPI_MAX32 (_num ) \
540788 PINCTRL_DT_INST_DEFINE(_num); \
541789 SPI_MAX32_IRQ_CONFIG_FUNC(_num) \
@@ -545,7 +793,7 @@ static const struct spi_driver_api spi_max32_api = {
545793 .clock = DEVICE_DT_GET(DT_INST_CLOCKS_CTLR(_num)), \
546794 .perclk.bus = DT_INST_CLOCKS_CELL(_num, offset), \
547795 .perclk.bit = DT_INST_CLOCKS_CELL(_num, bit), \
548- SPI_MAX32_CONFIG_IRQ_FUNC(_num)}; \
796+ MAX32_SPI_DMA_INIT(_num) SPI_MAX32_CONFIG_IRQ_FUNC(_num)}; \
549797 static struct max32_spi_data max32_spi_data_##_num = { \
550798 SPI_CONTEXT_INIT_LOCK(max32_spi_data_##_num, ctx), \
551799 SPI_CONTEXT_INIT_SYNC(max32_spi_data_##_num, ctx), \
0 commit comments