8
8
#include <zephyr/irq.h>
9
9
#include <zephyr/sys/util.h>
10
10
#include <zephyr/sys/sys_io.h>
11
+ #include <zephyr/sys/mem_blocks.h>
11
12
#include <zephyr/device.h>
12
13
#include <zephyr/drivers/dma.h>
13
14
#include <zephyr/drivers/clock_control.h>
14
15
#include <zephyr/logging/log.h>
15
16
#include <zephyr/types.h>
17
+ #include "rsi_rom_udma.h"
16
18
#include "rsi_rom_udma_wrapper.h"
17
19
#include "rsi_udma.h"
18
20
#include "sl_status.h"
19
21
20
- #define DT_DRV_COMPAT silabs_siwx91x_dma
21
- #define DMA_MAX_TRANSFER_COUNT 1024
22
- #define DMA_CH_PRIORITY_HIGH 1
23
- #define DMA_CH_PRIORITY_LOW 0
24
- #define UDMA_ADDR_INC_NONE 0x03
22
+ #define DT_DRV_COMPAT silabs_siwx91x_dma
23
+ #define DMA_MAX_TRANSFER_COUNT 1024
24
+ #define DMA_CH_PRIORITY_HIGH 1
25
+ #define DMA_CH_PRIORITY_LOW 0
26
+ #define UDMA_ADDR_INC_NONE 0x03
27
+ #define UDMA_MODE_PER_ALT_SCATTER_GATHER 0x07
25
28
26
29
LOG_MODULE_REGISTER (si91x_dma , CONFIG_DMA_LOG_LEVEL );
27
30
31
34
};
32
35
33
36
struct dma_siwx91x_channel_info {
34
- dma_callback_t dma_callback ; /* User callback */
35
- void * cb_data ; /* User callback data */
37
+ dma_callback_t dma_callback ; /* User callback */
38
+ void * cb_data ; /* User callback data */
39
+ RSI_UDMA_DESC_T * sg_desc_addr_info ; /* Scatter-Gather table start address */
36
40
};
37
41
38
42
struct dma_siwx91x_config {
@@ -48,6 +52,7 @@ struct dma_siwx91x_data {
48
52
struct dma_context dma_ctx ;
49
53
UDMA_Channel_Info * chan_info ;
50
54
struct dma_siwx91x_channel_info * zephyr_channel_info ;
55
+ struct sys_mem_blocks * dma_desc_pool ; /* Pointer to the memory pool for DMA descriptor */
51
56
RSI_UDMA_DATACONTEXT_T udma_handle ; /* Buffer to store UDMA handle
52
57
* related information
53
58
*/
@@ -104,8 +109,158 @@ static int siwx91x_addr_adjustment(uint32_t adjustment)
104
109
}
105
110
}
106
111
107
- static int siwx91x_channel_config (const struct device * dev , RSI_UDMA_HANDLE_T udma_handle ,
112
+ /* Sets up the scatter-gather descriptor table for a DMA transfer */
113
+ static int siwx91x_sg_fill_desc (RSI_UDMA_DESC_T * descs , const struct dma_config * config_zephyr )
114
+ {
115
+ const struct dma_block_config * block_addr = config_zephyr -> head_block ;
116
+ RSI_UDMA_CHA_CONFIG_DATA_T * cfg_91x ;
117
+
118
+ for (int i = 0 ; i < config_zephyr -> block_count ; i ++ ) {
119
+ sys_write32 ((uint32_t )& descs [i ].vsUDMAChaConfigData1 , (mem_addr_t )& cfg_91x );
120
+
121
+ if (siwx91x_addr_adjustment (block_addr -> source_addr_adj ) == UDMA_ADDR_INC_NONE ) {
122
+ descs [i ].pSrcEndAddr = (void * )block_addr -> source_address ;
123
+ } else {
124
+ descs [i ].pSrcEndAddr = (void * )(block_addr -> source_address +
125
+ (block_addr -> block_size -
126
+ config_zephyr -> source_burst_length ));
127
+ }
128
+ if (siwx91x_addr_adjustment (block_addr -> dest_addr_adj ) == UDMA_ADDR_INC_NONE ) {
129
+ descs [i ].pDstEndAddr = (void * )block_addr -> dest_address ;
130
+ } else {
131
+ descs [i ].pDstEndAddr = (void * )(block_addr -> dest_address +
132
+ (block_addr -> block_size -
133
+ config_zephyr -> dest_burst_length ));
134
+ }
135
+
136
+ cfg_91x -> srcSize = siwx91x_burst_length (config_zephyr -> source_burst_length );
137
+ cfg_91x -> dstSize = siwx91x_burst_length (config_zephyr -> dest_burst_length );
138
+
139
+ /* Calculate the number of DMA transfers required */
140
+ if (block_addr -> block_size / config_zephyr -> source_burst_length >
141
+ DMA_MAX_TRANSFER_COUNT ) {
142
+ return - EINVAL ;
143
+ }
144
+
145
+ cfg_91x -> totalNumOfDMATrans =
146
+ block_addr -> block_size / config_zephyr -> source_burst_length - 1 ;
147
+
148
+ /* Set the transfer type based on whether it is a peripheral request */
149
+ if (siwx91x_transfer_direction (config_zephyr -> channel_direction ) ==
150
+ TRANSFER_TO_OR_FROM_PER ) {
151
+ cfg_91x -> transferType = UDMA_MODE_PER_ALT_SCATTER_GATHER ;
152
+ } else {
153
+ cfg_91x -> transferType = UDMA_MODE_MEM_ALT_SCATTER_GATHER ;
154
+ }
155
+
156
+ cfg_91x -> rPower = ARBSIZE_1 ;
157
+
158
+ if (siwx91x_addr_adjustment (block_addr -> source_addr_adj ) < 0 ||
159
+ siwx91x_addr_adjustment (block_addr -> dest_addr_adj ) < 0 ) {
160
+ return - EINVAL ;
161
+ }
162
+
163
+ if (siwx91x_addr_adjustment (block_addr -> source_addr_adj ) == UDMA_ADDR_INC_NONE ) {
164
+ cfg_91x -> srcInc = UDMA_SRC_INC_NONE ;
165
+ } else {
166
+ cfg_91x -> srcInc = siwx91x_burst_length (config_zephyr -> source_burst_length );
167
+ }
168
+
169
+ if (siwx91x_addr_adjustment (block_addr -> dest_addr_adj ) == UDMA_ADDR_INC_NONE ) {
170
+ cfg_91x -> dstInc = UDMA_DST_INC_NONE ;
171
+ } else {
172
+ cfg_91x -> dstInc = siwx91x_burst_length (config_zephyr -> dest_burst_length );
173
+ }
174
+
175
+ /* Move to the next block */
176
+ block_addr = block_addr -> next_block ;
177
+ }
178
+
179
+ if (block_addr != NULL ) {
180
+ /* next_block address for last block must be null */
181
+ return - EINVAL ;
182
+ }
183
+
184
+ /* Set the transfer type for the last descriptor */
185
+ switch (siwx91x_transfer_direction (config_zephyr -> channel_direction )) {
186
+ case TRANSFER_TO_OR_FROM_PER :
187
+ descs [config_zephyr -> block_count - 1 ].vsUDMAChaConfigData1 .transferType =
188
+ UDMA_MODE_BASIC ;
189
+ break ;
190
+ case TRANSFER_MEM_TO_MEM :
191
+ descs [config_zephyr -> block_count - 1 ].vsUDMAChaConfigData1 .transferType =
192
+ UDMA_MODE_AUTO ;
193
+ break ;
194
+ default :
195
+ return - EINVAL ;
196
+ }
197
+
198
+ return 0 ;
199
+ }
200
+
201
+ /* Configure DMA for scatter-gather transfer */
202
+ static int siwx91x_sg_chan_config (const struct device * dev , RSI_UDMA_HANDLE_T udma_handle ,
108
203
uint32_t channel , const struct dma_config * config )
204
+ {
205
+ const struct dma_siwx91x_config * cfg = dev -> config ;
206
+ struct dma_siwx91x_data * data = dev -> data ;
207
+ RSI_UDMA_DESC_T * sg_desc_base_addr = NULL ;
208
+ uint8_t transfer_type ;
209
+ int ret ;
210
+
211
+ ret = siwx91x_transfer_direction (config -> channel_direction );
212
+ if (ret < 0 ) {
213
+ return - EINVAL ;
214
+ }
215
+ transfer_type = ret ? UDMA_MODE_PER_SCATTER_GATHER : UDMA_MODE_MEM_SCATTER_GATHER ;
216
+
217
+ if (!siwx91x_is_data_width_valid (config -> source_data_size ) ||
218
+ !siwx91x_is_data_width_valid (config -> dest_data_size )) {
219
+ return - EINVAL ;
220
+ }
221
+
222
+ if (siwx91x_burst_length (config -> source_burst_length ) < 0 ||
223
+ siwx91x_burst_length (config -> dest_burst_length ) < 0 ) {
224
+ return - EINVAL ;
225
+ }
226
+
227
+ /* Request start index for scatter-gather descriptor table */
228
+ if (sys_mem_blocks_alloc_contiguous (data -> dma_desc_pool , config -> block_count ,
229
+ (void * * )& sg_desc_base_addr )) {
230
+ return - EINVAL ;
231
+ }
232
+
233
+ if (siwx91x_sg_fill_desc (sg_desc_base_addr , config )) {
234
+ return - EINVAL ;
235
+ }
236
+
237
+ /* This channel information is used to distinguish scatter-gather transfers and
238
+ * free the allocated descriptors in sg_transfer_desc_block
239
+ */
240
+ data -> chan_info [channel ].Cnt = config -> block_count ;
241
+ data -> zephyr_channel_info [channel ].sg_desc_addr_info = sg_desc_base_addr ;
242
+ RSI_UDMA_InterruptClear (udma_handle , channel );
243
+ RSI_UDMA_ErrorStatusClear (udma_handle );
244
+
245
+ if (cfg -> reg == UDMA0 ) {
246
+ /* UDMA0 is accessible by both TA and M4, so an interrupt should be configured in
247
+ * the TA-M4 common register set to signal the TA when UDMA0 is actively in use.
248
+ */
249
+ sys_write32 ((BIT (channel ) | M4SS_UDMA_INTR_SEL ), (mem_addr_t )& M4SS_UDMA_INTR_SEL );
250
+ } else {
251
+ sys_set_bit ((mem_addr_t )& cfg -> reg -> UDMA_INTR_MASK_REG , channel );
252
+ }
253
+
254
+ sys_write32 (BIT (channel ), (mem_addr_t )& cfg -> reg -> CHNL_PRI_ALT_SET );
255
+ sys_write32 (BIT (channel ), (mem_addr_t )& cfg -> reg -> CHNL_REQ_MASK_CLR );
256
+
257
+ RSI_UDMA_SetChannelScatterGatherTransfer (udma_handle , channel , config -> block_count ,
258
+ sg_desc_base_addr , transfer_type );
259
+ return 0 ;
260
+ }
261
+
262
+ static int siwx91x_direct_chan_config (const struct device * dev , RSI_UDMA_HANDLE_T udma_handle ,
263
+ uint32_t channel , const struct dma_config * config )
109
264
{
110
265
uint32_t dma_transfer_num = config -> head_block -> block_size / config -> source_burst_length ;
111
266
const struct dma_siwx91x_config * cfg = dev -> config ;
@@ -210,8 +365,20 @@ static int siwx91x_dma_configure(const struct device *dev, uint32_t channel,
210
365
return - EINVAL ;
211
366
}
212
367
368
+ if (config -> cyclic || config -> complete_callback_en ) {
369
+ /* Cyclic DMA feature and completion callback for each block
370
+ * is not supported
371
+ */
372
+ return - EINVAL ;
373
+ }
374
+
213
375
/* Configure dma channel for transfer */
214
- status = siwx91x_channel_config (dev , udma_handle , channel , config );
376
+ if (config -> head_block -> next_block != NULL ) {
377
+ /* Configure DMA for a Scatter-Gather transfer */
378
+ status = siwx91x_sg_chan_config (dev , udma_handle , channel , config );
379
+ } else {
380
+ status = siwx91x_direct_chan_config (dev , udma_handle , channel , config );
381
+ }
215
382
if (status ) {
216
383
return status ;
217
384
}
@@ -429,6 +596,20 @@ static void siwx91x_dma_isr(const struct device *dev)
429
596
/* find_lsb_set() returns 1 indexed value */
430
597
channel -= 1 ;
431
598
599
+ if (data -> zephyr_channel_info [channel ].sg_desc_addr_info ) {
600
+ /* A Scatter-Gather transfer is completed, free the allocated descriptors */
601
+ if (sys_mem_blocks_free_contiguous (
602
+ data -> dma_desc_pool ,
603
+ (void * )data -> zephyr_channel_info [channel ].sg_desc_addr_info ,
604
+ data -> chan_info [channel ].Cnt )) {
605
+ sys_write32 (BIT (channel ), (mem_addr_t )& cfg -> reg -> UDMA_DONE_STATUS_REG );
606
+ goto out ;
607
+ }
608
+ data -> chan_info [channel ].Cnt = 0 ;
609
+ data -> chan_info [channel ].Size = 0 ;
610
+ data -> zephyr_channel_info [channel ].sg_desc_addr_info = NULL ;
611
+ }
612
+
432
613
if (data -> chan_info [channel ].Cnt == data -> chan_info [channel ].Size ) {
433
614
if (data -> zephyr_channel_info [channel ].dma_callback ) {
434
615
/* Transfer complete, call user callback */
@@ -466,6 +647,8 @@ static DEVICE_API(dma, siwx91x_dma_api) = {
466
647
#define SIWX91X_DMA_INIT (inst ) \
467
648
static ATOMIC_DEFINE(dma_channels_atomic_##inst, DT_INST_PROP(inst, dma_channels)); \
468
649
static UDMA_Channel_Info dma_channel_info_##inst[DT_INST_PROP(inst, dma_channels)]; \
650
+ SYS_MEM_BLOCKS_DEFINE_STATIC(desc_pool_##inst, sizeof(RSI_UDMA_DESC_T), \
651
+ CONFIG_DMA_SILABS_SIWX91X_SG_BUFFER_COUNT, 4); \
469
652
static struct dma_siwx91x_channel_info \
470
653
zephyr_channel_info_##inst[DT_INST_PROP(inst, dma_channels)]; \
471
654
static struct dma_siwx91x_data dma_data_##inst = { \
@@ -474,6 +657,7 @@ static DEVICE_API(dma, siwx91x_dma_api) = {
474
657
.dma_ctx.atomic = dma_channels_atomic_##inst, \
475
658
.chan_info = dma_channel_info_##inst, \
476
659
.zephyr_channel_info = zephyr_channel_info_##inst, \
660
+ .dma_desc_pool = &desc_pool_##inst, \
477
661
}; \
478
662
static void siwx91x_dma_irq_configure_##inst(void) \
479
663
{ \
0 commit comments