@@ -700,11 +700,11 @@ static void dma_callback(const struct device *dev, void *user_data,
700
700
LOG_DBG ("dma callback" );
701
701
702
702
if (channel == data -> dma .channel ) {
703
- #if !defined(CONFIG_SOC_SERIES_STM32F1X )
703
+ #if !defined(CONFIG_SOC_SERIES_STM32F1X ) && !defined( STM32F3X_ADC_V2_5 )
704
704
if (LL_ADC_IsActiveFlag_OVR (adc ) || (status >= 0 )) {
705
705
#else
706
706
if (status >= 0 ) {
707
- #endif /* !defined(CONFIG_SOC_SERIES_STM32F1X) */
707
+ #endif /* !defined(CONFIG_SOC_SERIES_STM32F1X) && !defined(STM32F3X_ADC_V2_5) */
708
708
data -> samples_count = data -> channel_count ;
709
709
data -> buffer += data -> channel_count ;
710
710
/* Stop the DMA engine, only to start it again when the callback returns
@@ -713,9 +713,9 @@ static void dma_callback(const struct device *dev, void *user_data,
713
713
* within adc_context_start_sampling
714
714
*/
715
715
dma_stop (data -> dma .dma_dev , data -> dma .channel );
716
- #if !defined(CONFIG_SOC_SERIES_STM32F1X )
716
+ #if !defined(CONFIG_SOC_SERIES_STM32F1X ) && !defined( STM32F3X_ADC_V2_5 )
717
717
LL_ADC_ClearFlag_OVR (adc );
718
- #endif /* !defined(CONFIG_SOC_SERIES_STM32F1X) */
718
+ #endif /* !defined(CONFIG_SOC_SERIES_STM32F1X) && !defined(STM32F3X_ADC_V2_5) */
719
719
/* No need to invalidate the cache because it's assumed that
720
720
* the address is in a non-cacheable SRAM region.
721
721
*/
@@ -1005,9 +1005,9 @@ static int start_read(const struct device *dev,
1005
1005
*/
1006
1006
adc_stm32_enable (adc );
1007
1007
1008
- #if !defined(CONFIG_SOC_SERIES_STM32F1X )
1008
+ #if !defined(CONFIG_SOC_SERIES_STM32F1X ) && !defined( STM32F3X_ADC_V2_5 )
1009
1009
LL_ADC_ClearFlag_OVR (adc );
1010
- #endif /* !defined(CONFIG_SOC_SERIES_STM32F1X) */
1010
+ #endif /* !defined(CONFIG_SOC_SERIES_STM32F1X) && !defined(STM32F3X_ADC_V2_5) */
1011
1011
1012
1012
#if !defined(CONFIG_ADC_STM32_DMA )
1013
1013
#if defined(CONFIG_SOC_SERIES_STM32F2X ) || \
0 commit comments