3434#include "block_device_ev3.h"
3535
3636#include <pbdrv/block_device.h>
37+ #include <pbdrv/cache.h>
3738#include <pbdrv/clock.h>
3839#include <pbdrv/compiler.h>
3940#include <pbdrv/gpio.h>
@@ -103,6 +104,10 @@ static struct {
103104 uint8_t spi_cmd_buf_tx [SPI_CMD_BUF_SZ ];
104105 // This is used to hold the replies to commands to the SPI peripheral.
105106 uint8_t spi_cmd_buf_rx [SPI_CMD_BUF_SZ ];
107+ // This stores the RX buffer address so that we clean the cache when DMA is complete
108+ uint32_t rx_user_buf_addr ;
109+ // This stores the RX buffer size;
110+ uint32_t rx_user_buf_sz ;
106111} spi_dev ;
107112
108113static uint32_t last_spi_dma_complete_time ;
@@ -114,6 +119,9 @@ static void spi_dma_complete(void) {
114119 }
115120 SPIIntDisable (SOC_SPI_0_REGS , SPI_DMA_REQUEST_ENA_INT );
116121 pbio_os_request_poll ();
122+ if (spi_dev .rx_user_buf_addr && spi_dev .rx_user_buf_sz ) {
123+ pbdrv_cache_prepare_after_dma ((void * )spi_dev .rx_user_buf_addr , spi_dev .rx_user_buf_sz );
124+ }
117125 last_spi_dma_complete_time = pbdrv_clock_get_ms ();
118126}
119127
@@ -153,7 +161,7 @@ static void spi0_isr(void) {
153161 continue ;
154162 }
155163
156- spi_dev .spi_cmd_buf_rx [0 ] = HWREG (SOC_SPI_0_REGS + SPI_SPIBUF );
164+ PBDRV_UNCACHED ( spi_dev .spi_cmd_buf_rx [0 ]) = HWREG (SOC_SPI_0_REGS + SPI_SPIBUF );
157165 spi_dev .status &= ~SPI_STATUS_WAIT_RX ;
158166 SPIIntDisable (SOC_SPI_0_REGS , SPI_RECV_INT );
159167 pbio_os_request_poll ();
@@ -351,16 +359,21 @@ static pbio_error_t spi_begin_for_flash(
351359
352360 spi_dev .status = SPI_STATUS_WAIT_RX ;
353361
362+ // Prevent write to spi_dev.status from being reordered
363+ pbdrv_compiler_memory_barrier ();
364+
354365 uint32_t tx = spi0_last_dat1_for_flash (cmd [0 ]);
355366 SPIIntEnable (SOC_SPI_0_REGS , SPI_RECV_INT );
356367 HWREG (SOC_SPI_0_REGS + SPI_SPIDAT1 ) = tx ;
357368 } else {
358- memcpy (& spi_dev .spi_cmd_buf_tx , cmd , cmd_len );
369+ memcpy (PBDRV_UNCACHED_ADDR (spi_dev .spi_cmd_buf_tx ), cmd , cmd_len );
370+ spi_dev .rx_user_buf_addr = (uint32_t )user_data_rx ;
371+ spi_dev .rx_user_buf_sz = user_data_len ;
359372
360373 if (user_data_len == 0 ) {
361374 // Only a command, no user data
362375
363- spi_dev .tx_last_word = spi0_last_dat1_for_flash (cmd [cmd_len - 1 ]);
376+ PBDRV_UNCACHED ( spi_dev .tx_last_word ) = spi0_last_dat1_for_flash (cmd [cmd_len - 1 ]);
364377
365378 // TX everything except last byte
366379 ps .p .srcAddr = (unsigned int )(& spi_dev .spi_cmd_buf_tx );
@@ -418,7 +431,8 @@ static pbio_error_t spi_begin_for_flash(
418431 edma3_set_param (EDMA3_CHA_SPI0_TX , & ps );
419432
420433 if (user_data_tx ) {
421- spi_dev .tx_last_word = spi0_last_dat1_for_flash (user_data_tx [user_data_len - 1 ]);
434+ pbdrv_cache_prepare_before_dma (user_data_tx , user_data_len );
435+ PBDRV_UNCACHED (spi_dev .tx_last_word ) = spi0_last_dat1_for_flash (user_data_tx [user_data_len - 1 ]);
422436
423437 // TX all but the last byte
424438 ps .p .srcAddr = (unsigned int )(user_data_tx );
@@ -434,7 +448,7 @@ static pbio_error_t spi_begin_for_flash(
434448 ps .p .opt = EDMA3CC_OPT_TCINTEN | (EDMA3_CHA_SPI0_TX << EDMA3CC_OPT_TCC_SHIFT );
435449 edma3_set_param (127 , & ps );
436450 } else {
437- spi_dev .tx_last_word = spi0_last_dat1_for_flash (0 );
451+ PBDRV_UNCACHED ( spi_dev .tx_last_word ) = spi0_last_dat1_for_flash (0 );
438452
439453 // TX all but the last byte
440454 ps .p .srcAddr = (unsigned int )(& spi_dev .tx_dummy_byte );
@@ -487,7 +501,7 @@ static pbio_error_t spi_begin_for_flash(
487501
488502 spi_dev .status = SPI_STATUS_WAIT_TX | SPI_STATUS_WAIT_RX ;
489503
490- // TODO: eventually needs DMA cache management
504+ // Prevent write to spi_dev.status from being reordered
491505 pbdrv_compiler_memory_barrier ();
492506
493507 EDMA3EnableTransfer (SOC_EDMA30CC_0_REGS , EDMA3_CHA_SPI0_TX , EDMA3_TRIG_MODE_EVENT );
@@ -600,7 +614,7 @@ static pbio_error_t flash_wait_write(pbio_os_state_t *state) {
600614 }
601615 PBIO_OS_AWAIT_WHILE (state , spi_dev .status & SPI_STATUS_WAIT_ANY );
602616
603- status = spi_dev .spi_cmd_buf_rx [1 ];
617+ status = PBDRV_UNCACHED ( spi_dev .spi_cmd_buf_rx [1 ]) ;
604618 } while (status & FLASH_STATUS_BUSY );
605619
606620 PBIO_OS_ASYNC_END (PBIO_SUCCESS );
@@ -804,9 +818,14 @@ static pbio_error_t pbdrv_block_device_ev3_spi_begin_for_adc(const uint32_t *cmd
804818 ps .p .opt = EDMA3CC_OPT_TCINTEN | (EDMA3_CHA_SPI0_RX << EDMA3CC_OPT_TCC_SHIFT );
805819 edma3_set_param (EDMA3_CHA_SPI0_RX , & ps );
806820
821+ // We play dangerously and don't flush the cache for commands (since they're const)
822+ // but we do need to flush the cache for the data which is read.
823+ spi_dev .rx_user_buf_addr = (uint32_t )data ;
824+ spi_dev .rx_user_buf_sz = sizeof (uint16_t ) * len ;
825+
807826 spi_dev .status = SPI_STATUS_WAIT_TX | SPI_STATUS_WAIT_RX ;
808827
809- // TODO: eventually needs DMA cache management
828+ // Prevent write to spi_dev.status from being reordered
810829 pbdrv_compiler_memory_barrier ();
811830
812831 EDMA3EnableTransfer (SOC_EDMA30CC_0_REGS , EDMA3_CHA_SPI0_TX , EDMA3_TRIG_MODE_EVENT );
@@ -869,7 +888,7 @@ pbio_error_t ev3_spi_process_thread(pbio_os_state_t *state, void *context) {
869888 return err ;
870889 }
871890 PBIO_OS_AWAIT_WHILE (state , spi_dev .status & SPI_STATUS_WAIT_ANY );
872- if (memcmp (device_id , & spi_dev .spi_cmd_buf_rx [1 ], sizeof (device_id ))) {
891+ if (memcmp (device_id , PBDRV_UNCACHED_ADDR ( spi_dev .spi_cmd_buf_rx [1 ]) , sizeof (device_id ))) {
873892 return PBIO_ERROR_FAILED ;
874893 }
875894
0 commit comments