@@ -1021,6 +1021,81 @@ static struct dma_async_tx_descriptor *stm32_dma3_prep_slave_sg(struct dma_chan
1021
1021
return NULL ;
1022
1022
}
1023
1023
1024
+ static struct dma_async_tx_descriptor * stm32_dma3_prep_dma_cyclic (struct dma_chan * c ,
1025
+ dma_addr_t buf_addr ,
1026
+ size_t buf_len , size_t period_len ,
1027
+ enum dma_transfer_direction dir ,
1028
+ unsigned long flags )
1029
+ {
1030
+ struct stm32_dma3_chan * chan = to_stm32_dma3_chan (c );
1031
+ struct stm32_dma3_swdesc * swdesc ;
1032
+ dma_addr_t src , dst ;
1033
+ u32 count , i , ctr1 , ctr2 ;
1034
+ int ret ;
1035
+
1036
+ if (!buf_len || !period_len || period_len > STM32_DMA3_MAX_BLOCK_SIZE ) {
1037
+ dev_err (chan2dev (chan ), "Invalid buffer/period length\n" );
1038
+ return NULL ;
1039
+ }
1040
+
1041
+ if (buf_len % period_len ) {
1042
+ dev_err (chan2dev (chan ), "Buffer length not multiple of period length\n" );
1043
+ return NULL ;
1044
+ }
1045
+
1046
+ count = buf_len / period_len ;
1047
+ swdesc = stm32_dma3_chan_desc_alloc (chan , count );
1048
+ if (!swdesc )
1049
+ return NULL ;
1050
+
1051
+ if (dir == DMA_MEM_TO_DEV ) {
1052
+ src = buf_addr ;
1053
+ dst = chan -> dma_config .dst_addr ;
1054
+
1055
+ ret = stm32_dma3_chan_prep_hw (chan , DMA_MEM_TO_DEV , & swdesc -> ccr , & ctr1 , & ctr2 ,
1056
+ src , dst , period_len );
1057
+ } else if (dir == DMA_DEV_TO_MEM ) {
1058
+ src = chan -> dma_config .src_addr ;
1059
+ dst = buf_addr ;
1060
+
1061
+ ret = stm32_dma3_chan_prep_hw (chan , DMA_DEV_TO_MEM , & swdesc -> ccr , & ctr1 , & ctr2 ,
1062
+ src , dst , period_len );
1063
+ } else {
1064
+ dev_err (chan2dev (chan ), "Invalid direction\n" );
1065
+ ret = - EINVAL ;
1066
+ }
1067
+
1068
+ if (ret )
1069
+ goto err_desc_free ;
1070
+
1071
+ for (i = 0 ; i < count ; i ++ ) {
1072
+ if (dir == DMA_MEM_TO_DEV ) {
1073
+ src = buf_addr + i * period_len ;
1074
+ dst = chan -> dma_config .dst_addr ;
1075
+ } else { /* (dir == DMA_DEV_TO_MEM) */
1076
+ src = chan -> dma_config .src_addr ;
1077
+ dst = buf_addr + i * period_len ;
1078
+ }
1079
+
1080
+ stm32_dma3_chan_prep_hwdesc (chan , swdesc , i , src , dst , period_len ,
1081
+ ctr1 , ctr2 , i == (count - 1 ), true);
1082
+ }
1083
+
1084
+ /* Enable Error interrupts */
1085
+ swdesc -> ccr |= CCR_USEIE | CCR_ULEIE | CCR_DTEIE ;
1086
+ /* Enable Transfer state interrupts */
1087
+ swdesc -> ccr |= CCR_TCIE ;
1088
+
1089
+ swdesc -> cyclic = true;
1090
+
1091
+ return vchan_tx_prep (& chan -> vchan , & swdesc -> vdesc , flags );
1092
+
1093
+ err_desc_free :
1094
+ stm32_dma3_chan_desc_free (chan , swdesc );
1095
+
1096
+ return NULL ;
1097
+ }
1098
+
1024
1099
static void stm32_dma3_caps (struct dma_chan * c , struct dma_slave_caps * caps )
1025
1100
{
1026
1101
struct stm32_dma3_chan * chan = to_stm32_dma3_chan (c );
@@ -1255,6 +1330,7 @@ static int stm32_dma3_probe(struct platform_device *pdev)
1255
1330
1256
1331
dma_cap_set (DMA_SLAVE , dma_dev -> cap_mask );
1257
1332
dma_cap_set (DMA_PRIVATE , dma_dev -> cap_mask );
1333
+ dma_cap_set (DMA_CYCLIC , dma_dev -> cap_mask );
1258
1334
dma_dev -> dev = & pdev -> dev ;
1259
1335
/*
1260
1336
* This controller supports up to 8-byte buswidth depending on the port used and the
@@ -1277,6 +1353,7 @@ static int stm32_dma3_probe(struct platform_device *pdev)
1277
1353
dma_dev -> device_alloc_chan_resources = stm32_dma3_alloc_chan_resources ;
1278
1354
dma_dev -> device_free_chan_resources = stm32_dma3_free_chan_resources ;
1279
1355
dma_dev -> device_prep_slave_sg = stm32_dma3_prep_slave_sg ;
1356
+ dma_dev -> device_prep_dma_cyclic = stm32_dma3_prep_dma_cyclic ;
1280
1357
dma_dev -> device_caps = stm32_dma3_caps ;
1281
1358
dma_dev -> device_config = stm32_dma3_config ;
1282
1359
dma_dev -> device_terminate_all = stm32_dma3_terminate_all ;
0 commit comments