2222
2323LOG_MODULE_REGISTER (uart_silabs_eusart , CONFIG_UART_LOG_LEVEL );
2424
25+ /* Compatibility section for older EUSART IP versions (e.g., xg22 SoC Series).
26+ *
27+ * Older EUSART hardware (IP version 0x00000000) lacks the RXTO (RX Timeout) interrupt,
28+ * which is used to detect when the RX line becomes idle after receiving partial data.
29+ *
30+ * For these devices, we implement a pure polling mode using a work queue that periodically
31+ * checks the RXIDLE status instead of relying on hardware timeout interrupts. It indead can
32+ * introduce a some bad behavior with really slow/high timeout values.
33+ *
34+ * The EUSART_RXTO flag helps to enable/disable this workaround, and we define dummy values for the
35+ * missing RXTO interrupt flags to avoid compilation errors.
36+ */
37+ #if (_EUSART_IPVERSION_RESETVALUE == 0x00000000UL )
38+
39+ #define EUSART_IF_RXTO -1
40+ #define EUSART_CFG1_RXTIMEOUT_ONEFRAME -1
41+
42+ #else
43+
44+ #define EUSART_RXTO 1
45+
46+ #endif
47+
2548struct eusart_dma_channel {
2649 const struct device * dma_dev ;
2750 uint32_t dma_channel ;
@@ -457,6 +480,11 @@ __maybe_unused static void eusart_dma_rx_cb(const struct device *dma_dev, void *
457480 if (data -> rx_next_buffer ) {
458481 eusart_async_evt_rx_buf_release (data );
459482 eusart_dma_replace_buffer (uart_dev );
483+
484+ if (!IS_ENABLED (EUSART_RXTO )) {
485+ eusart_async_timer_start (& data -> dma_rx .timeout_work , data -> dma_rx .timeout );
486+ }
487+
460488 } else {
461489 dma_stop (data -> dma_rx .dma_dev , data -> dma_rx .dma_channel );
462490 data -> dma_rx .enabled = false;
@@ -595,9 +623,16 @@ static int eusart_async_rx_enable(const struct device *dev, uint8_t *rx_buf, siz
595623 }
596624
597625 eusart_pm_lock_get (dev , EUSART_PM_LOCK_RX );
598- EUSART_IntClear (config -> eusart , EUSART_IF_RXOF | EUSART_IF_RXTO );
626+ EUSART_IntClear (config -> eusart , EUSART_IF_RXOF );
599627 EUSART_IntEnable (config -> eusart , EUSART_IF_RXOF );
600- EUSART_IntEnable (config -> eusart , EUSART_IF_RXTO );
628+
629+ if (IS_ENABLED (EUSART_RXTO )) {
630+ EUSART_IntClear (config -> eusart , EUSART_IF_RXTO );
631+ EUSART_IntEnable (config -> eusart , EUSART_IF_RXTO );
632+ } else {
633+ /* Use pure polling via timeout work instead of RXTO interrupt.*/
634+ eusart_async_timer_start (& data -> dma_rx .timeout_work , data -> dma_rx .timeout );
635+ }
601636
602637 data -> dma_rx .enabled = true;
603638
@@ -622,12 +657,17 @@ static int eusart_async_rx_disable(const struct device *dev)
622657 dma_stop (data -> dma_rx .dma_dev , data -> dma_rx .dma_channel );
623658
624659 EUSART_IntDisable (eusart , EUSART_IF_RXOF );
625- EUSART_IntDisable (eusart , EUSART_IF_RXTO );
626- EUSART_IntClear (eusart , EUSART_IF_RXOF | EUSART_IF_RXTO );
627- eusart_pm_lock_put (dev , EUSART_PM_LOCK_RX );
660+ EUSART_IntClear (eusart , EUSART_IF_RXOF );
628661
629662 k_work_cancel_delayable (& data -> dma_rx .timeout_work );
630663
664+ if (IS_ENABLED (EUSART_RXTO )) {
665+ EUSART_IntDisable (eusart , EUSART_IF_RXTO );
666+ EUSART_IntClear (eusart , EUSART_IF_RXTO );
667+ }
668+
669+ eusart_pm_lock_put (dev , EUSART_PM_LOCK_RX );
670+
631671 eusart_dma_rx_flush (data );
632672
633673 eusart_async_evt_rx_buf_release (data );
@@ -687,8 +727,33 @@ static void eusart_async_rx_timeout(struct k_work *work)
687727 struct eusart_dma_channel * rx_channel =
688728 CONTAINER_OF (dwork , struct eusart_dma_channel , timeout_work );
689729 struct eusart_data * data = CONTAINER_OF (rx_channel , struct eusart_data , dma_rx );
730+ const struct eusart_config * config = data -> uart_dev -> config ;
731+ struct dma_status stat ;
732+ size_t pending = 0 ;
690733
691- eusart_dma_rx_flush (data );
734+ if (IS_ENABLED (EUSART_RXTO )) {
735+ eusart_dma_rx_flush (data );
736+ return ;
737+ }
738+
739+ if (!dma_get_status (data -> dma_rx .dma_dev , data -> dma_rx .dma_channel , & stat )) {
740+ pending = stat .pending_length ;
741+ }
742+
743+ if (!(EUSART_StatusGet (config -> eusart ) & EUSART_STATUS_RXIDLE )) {
744+ eusart_async_timer_start (& data -> dma_rx .timeout_work , data -> dma_rx .timeout );
745+ return ;
746+ }
747+
748+ /* some data has been received and the dma is not done yet*/
749+ if (pending < data -> dma_rx .buffer_length ) {
750+ eusart_dma_rx_flush (data );
751+ }
752+
753+ /* Continue polling if RX is still enabled (for next transmission) */
754+ if (data -> dma_rx .enabled ) {
755+ eusart_async_timer_start (& data -> dma_rx .timeout_work , data -> dma_rx .timeout );
756+ }
692757}
693758
694759static void eusart_async_tx_timeout (struct k_work * work )
@@ -957,7 +1022,9 @@ static void eusart_configure_peripheral(const struct device *dev, bool enable)
9571022 EUSART_UartInitHf (config -> eusart , & eusartInit );
9581023
9591024#ifdef CONFIG_UART_SILABS_EUSART_ASYNC
960- config -> eusart -> CFG1 |= EUSART_CFG1_RXTIMEOUT_ONEFRAME ;
1025+ if (IS_ENABLED (EUSART_RXTO )) {
1026+ config -> eusart -> CFG1 |= EUSART_CFG1_RXTIMEOUT_ONEFRAME ;
1027+ }
9611028#endif
9621029
9631030 if (enable ) {
0 commit comments