@@ -139,8 +139,8 @@ typedef struct {
139
139
uint8_t buffer [NUMBER_OF_BANKS ][DMA_BUFFER_SIZE ];
140
140
uint32_t usage_counter ;
141
141
uint8_t tx_data ;
142
- volatile uint8_t tx_in_progress ;
143
- volatile uint8_t rx_in_progress ;
142
+ bool tx_in_progress ;
143
+ bool rx_in_progress ;
144
144
bool tx_asynch ;
145
145
bool rx_asynch ;
146
146
bool callback_posted ;
@@ -253,7 +253,7 @@ static void nordic_nrf5_uart_callback_handler(uint32_t instance)
253
253
static void nordic_nrf5_uart_event_handler_endtx (int instance )
254
254
{
255
255
/* Release mutex. As the owner this call is safe. */
256
- nordic_nrf5_uart_state [instance ].tx_in_progress = 0 ;
256
+ core_util_atomic_store_bool ( & nordic_nrf5_uart_state [instance ].tx_in_progress , false) ;
257
257
258
258
/* Check if callback handler and Tx event mask is set. */
259
259
uart_irq_handler callback = (uart_irq_handler ) nordic_nrf5_uart_state [instance ].owner -> handler ;
@@ -276,8 +276,8 @@ static void nordic_nrf5_uart_event_handler_endtx(int instance)
276
276
static void nordic_nrf5_uart_event_handler_endtx_asynch (int instance )
277
277
{
278
278
/* Set Tx done and reset Tx mode to be not asynchronous. */
279
- nordic_nrf5_uart_state [instance ].tx_in_progress = 0 ;
280
279
nordic_nrf5_uart_state [instance ].tx_asynch = false;
280
+ core_util_atomic_store_bool (& nordic_nrf5_uart_state [instance ].tx_in_progress , false);
281
281
282
282
/* Cast handler to callback function pointer. */
283
283
void (* callback )(void ) = (void (* )(void )) nordic_nrf5_uart_state [instance ].owner -> tx_handler ;
@@ -483,8 +483,8 @@ static void nordic_nrf5_uart_event_handler_rxstarted(int instance)
483
483
static void nordic_nrf5_uart_event_handler_endrx_asynch (int instance )
484
484
{
485
485
/* Set Rx done and reset Rx mode to be not asynchronous. */
486
- nordic_nrf5_uart_state [instance ].rx_in_progress = 0 ;
487
486
nordic_nrf5_uart_state [instance ].rx_asynch = false;
487
+ core_util_atomic_store_bool (& nordic_nrf5_uart_state [instance ].rx_in_progress , false);
488
488
489
489
/* Cast handler to callback function pointer. */
490
490
void (* callback )(void ) = (void (* )(void )) nordic_nrf5_uart_state [instance ].owner -> rx_handler ;
@@ -1411,7 +1411,7 @@ int serial_writable(serial_t *obj)
1411
1411
1412
1412
int instance = uart_object -> instance ;
1413
1413
1414
- return (( nordic_nrf5_uart_state [instance ].tx_in_progress == 0 ) &&
1414
+ return (! core_util_atomic_load_bool ( & nordic_nrf5_uart_state [instance ].tx_in_progress ) &&
1415
1415
(nrf_uarte_event_extra_check (nordic_nrf5_uart_register [instance ], NRF_UARTE_EVENT_TXDRDY )));
1416
1416
}
1417
1417
@@ -1470,16 +1470,14 @@ int serial_tx_asynch(serial_t *obj, const void *tx, size_t tx_length, uint8_t tx
1470
1470
1471
1471
/**
1472
1472
* tx_in_progress acts like a mutex to ensure only one transmission can be active at a time.
1473
- * The flag is modified using the atomic compare-and-set function.
1473
+ * The flag is modified using the atomic exchange function - only proceed when we see the
1474
+ * flag clear and we set it to true.
1474
1475
*/
1475
- bool mutex = false ;
1476
+ bool old_mutex ;
1476
1477
1477
1478
do {
1478
- uint8_t expected = 0 ;
1479
- uint8_t desired = 1 ;
1480
-
1481
- mutex = core_util_atomic_cas_u8 ((uint8_t * ) & nordic_nrf5_uart_state [instance ].tx_in_progress , & expected , desired );
1482
- } while (mutex == false);
1479
+ old_mutex = core_util_atomic_exchange_bool (& nordic_nrf5_uart_state [instance ].tx_in_progress , true);
1480
+ } while (old_mutex == true);
1483
1481
1484
1482
/* State variables. */
1485
1483
int result = 0 ;
@@ -1596,16 +1594,14 @@ void serial_rx_asynch(serial_t *obj, void *rx, size_t rx_length, uint8_t rx_widt
1596
1594
1597
1595
/**
1598
1596
* rx_in_progress acts like a mutex to ensure only one asynchronous reception can be active at a time.
1599
- * The flag is modified using the atomic compare-and-set function.
1597
+ * The flag is modified using the atomic exchange function - only proceed when we see the
1598
+ * flag clear and we set it to true.
1600
1599
*/
1601
- bool mutex = false ;
1600
+ bool old_mutex ;
1602
1601
1603
1602
do {
1604
- uint8_t expected = 0 ;
1605
- uint8_t desired = 1 ;
1606
-
1607
- mutex = core_util_atomic_cas_u8 ((uint8_t * ) & nordic_nrf5_uart_state [instance ].rx_in_progress , & expected , desired );
1608
- } while (mutex == false);
1603
+ old_mutex = core_util_atomic_exchange_bool (& nordic_nrf5_uart_state [instance ].rx_in_progress , true);
1604
+ } while (old_mutex == true);
1609
1605
1610
1606
/* Store callback handler, mask and reset event value. */
1611
1607
obj -> serial .rx_handler = handler ;
@@ -1684,8 +1680,8 @@ void serial_tx_abort_asynch(serial_t *obj)
1684
1680
nrf_uarte_event_clear (nordic_nrf5_uart_register [instance ], NRF_UARTE_EVENT_ENDTX );
1685
1681
1686
1682
/* Reset Tx flags. */
1687
- nordic_nrf5_uart_state [instance ].tx_in_progress = 0 ;
1688
1683
nordic_nrf5_uart_state [instance ].tx_asynch = false;
1684
+ nordic_nrf5_uart_state [instance ].tx_in_progress = false;
1689
1685
1690
1686
/* Force reconfiguration. */
1691
1687
obj -> serial .update = true;
@@ -1712,8 +1708,8 @@ void serial_rx_abort_asynch(serial_t *obj)
1712
1708
core_util_critical_section_enter ();
1713
1709
1714
1710
/* Reset Rx flags. */
1715
- nordic_nrf5_uart_state [obj -> serial .instance ].rx_in_progress = 0 ;
1716
1711
nordic_nrf5_uart_state [obj -> serial .instance ].rx_asynch = false;
1712
+ nordic_nrf5_uart_state [obj -> serial .instance ].rx_in_progress = false;
1717
1713
obj -> serial .rx_asynch = false;
1718
1714
1719
1715
/* Force reconfiguration. */
0 commit comments