diff --git a/boards/st/nucleo_n657x0_q/twister.yaml b/boards/st/nucleo_n657x0_q/twister.yaml index 1b3b669ae47ec..b3175f3bc52cb 100644 --- a/boards/st/nucleo_n657x0_q/twister.yaml +++ b/boards/st/nucleo_n657x0_q/twister.yaml @@ -12,10 +12,12 @@ supported: - dma - i2c - gpio + - netif:eth - spi - uart - usb_device - usbd + - netif:eth vendor: st variants: nucleo_n657x0_q/stm32n657xx: diff --git a/boards/st/stm32n6570_dk/twister.yaml b/boards/st/stm32n6570_dk/twister.yaml index 6044f38c7afb8..4bc6494b55572 100644 --- a/boards/st/stm32n6570_dk/twister.yaml +++ b/boards/st/stm32n6570_dk/twister.yaml @@ -14,12 +14,15 @@ supported: - dma - i2c - gpio - - pwm - memc + - netif:eth + - pwm - spi - uart - usb_device - usbd + - netif:eth + - video variants: stm32n6570_dk/stm32n657xx: twister: false diff --git a/doc/build/dts/api/api.rst b/doc/build/dts/api/api.rst index 31cfb3842edc6..7352317e9dfa5 100644 --- a/doc/build/dts/api/api.rst +++ b/doc/build/dts/api/api.rst @@ -466,6 +466,8 @@ device. WS2812 GPIO driver * - zephyr,touch - touchscreen controller device node. + * - zephyr,videoenc + - Video encoder device, typically an H264 or MJPEG video encoder. * - mcuboot,ram-load-dev - When a Zephyr application is built to be loaded to RAM by MCUboot, with :kconfig:option:`CONFIG_MCUBOOT_BOOTLOADER_MODE_SINGLE_APP_RAM_LOAD`, diff --git a/drivers/ethernet/eth_stm32_hal_common.c b/drivers/ethernet/eth_stm32_hal_common.c index dba7cf1f90fd9..966083abb3f4b 100644 --- a/drivers/ethernet/eth_stm32_hal_common.c +++ b/drivers/ethernet/eth_stm32_hal_common.c @@ -46,8 +46,8 @@ uint8_t dma_rx_buffer[ETH_RXBUFNB][ETH_STM32_RX_BUF_SIZE] __eth_stm32_buf; uint8_t dma_tx_buffer[ETH_TXBUFNB][ETH_STM32_TX_BUF_SIZE] __eth_stm32_buf; #if DT_HAS_COMPAT_STATUS_OKAY(st_stm32n6_ethernet) -ETH_DMADescTypeDef dma_rx_desc_tab[ETH_DMA_RX_CH_CNT][ETH_RXBUFNB] ALIGN_32BYTES(__eth_stm32_desc); -ETH_DMADescTypeDef dma_tx_desc_tab[ETH_DMA_TX_CH_CNT][ETH_TXBUFNB] ALIGN_32BYTES(__eth_stm32_desc); +ETH_DMADescTypeDef dma_rx_desc_tab[ETH_DMA_RX_CH_CNT][ETH_RXBUFNB] __eth_stm32_desc __aligned(32); +ETH_DMADescTypeDef dma_tx_desc_tab[ETH_DMA_TX_CH_CNT][ETH_TXBUFNB] __eth_stm32_desc __aligned(32); #else ETH_DMADescTypeDef dma_rx_desc_tab[ETH_RXBUFNB] __eth_stm32_desc; ETH_DMADescTypeDef dma_tx_desc_tab[ETH_TXBUFNB] __eth_stm32_desc; diff --git a/drivers/ethernet/eth_stm32_hal_priv.h b/drivers/ethernet/eth_stm32_hal_priv.h index f94ff046bef84..cfe063e1a81b4 100644 --- a/drivers/ethernet/eth_stm32_hal_priv.h +++ b/drivers/ethernet/eth_stm32_hal_priv.h @@ -44,24 +44,12 @@ extern const struct device *eth_stm32_phy_dev; #define __eth_stm32_buf __aligned(4) #endif -#if DT_HAS_COMPAT_STATUS_OKAY(st_stm32n6_ethernet) -#define STM32_ETH_PHY_MODE(inst) \ - ((DT_INST_ENUM_HAS_VALUE(inst, phy_connection_type, rgmii) ? ETH_RGMII_MODE : \ - (DT_INST_ENUM_HAS_VALUE(inst, phy_connection_type, gmii) ? ETH_GMII_MODE : \ - (DT_INST_ENUM_HAS_VALUE(inst, phy_connection_type, mii) ? ETH_MII_MODE : \ - ETH_RMII_MODE)))) -#else -#define STM32_ETH_PHY_MODE(inst) \ - (DT_INST_ENUM_HAS_VALUE(inst, phy_connection_type, mii) ? \ - ETH_MII_MODE : ETH_RMII_MODE) -#endif - #if defined(CONFIG_ETH_STM32_HAL_API_V1) #define ETH_MII_MODE ETH_MEDIA_INTERFACE_MII #define ETH_RMII_MODE ETH_MEDIA_INTERFACE_RMII -#define ETH_STM32_AUTO_NEGOTIATION_ENABLE \ +#define ETH_STM32_AUTO_NEGOTIATION_ENABLE \ UTIL_NOT(DT_NODE_HAS_PROP(DT_INST_PHANDLE(0, phy_handle), fixed_link)) #else /* CONFIG_ETH_STM32_HAL_API_V2 */ @@ -77,23 +65,37 @@ struct eth_stm32_tx_context { #endif /* CONFIG_ETH_STM32_HAL_API_V2 */ +#if DT_HAS_COMPAT_STATUS_OKAY(st_stm32n6_ethernet) + +#define ETH_GMII_MODE HAL_ETH_GMII_MODE +#define ETH_RGMII_MODE HAL_ETH_RGMII_MODE + +#define STM32_ETH_PHY_MODE(inst) \ + ((DT_INST_ENUM_HAS_VALUE(inst, phy_connection_type, rgmii) ? ETH_RGMII_MODE : \ + (DT_INST_ENUM_HAS_VALUE(inst, phy_connection_type, gmii) ? ETH_GMII_MODE : \ + (DT_INST_ENUM_HAS_VALUE(inst, phy_connection_type, mii) ? ETH_MII_MODE : \ + ETH_RMII_MODE)))) +#else +#define STM32_ETH_PHY_MODE(inst) \ + (DT_INST_ENUM_HAS_VALUE(inst, phy_connection_type, mii) ? \ + ETH_MII_MODE : ETH_RMII_MODE) +#endif + /* Definition of the Ethernet driver buffers size and count */ #define ETH_STM32_RX_BUF_SIZE ETH_MAX_PACKET_SIZE /* buffer size for receive */ #define ETH_STM32_TX_BUF_SIZE ETH_MAX_PACKET_SIZE /* buffer size for transmit */ BUILD_ASSERT(ETH_STM32_RX_BUF_SIZE % 4 == 0, "Rx buffer size must be a multiple of 4"); -extern uint8_t dma_rx_buffer[ETH_RXBUFNB][ETH_STM32_RX_BUF_SIZE] __eth_stm32_buf; -extern uint8_t dma_tx_buffer[ETH_TXBUFNB][ETH_STM32_TX_BUF_SIZE] __eth_stm32_buf; +extern uint8_t dma_rx_buffer[ETH_RXBUFNB][ETH_STM32_RX_BUF_SIZE]; +extern uint8_t dma_tx_buffer[ETH_TXBUFNB][ETH_STM32_TX_BUF_SIZE]; #if DT_HAS_COMPAT_STATUS_OKAY(st_stm32n6_ethernet) -extern ETH_DMADescTypeDef dma_rx_desc_tab[ETH_DMA_RX_CH_CNT][ETH_RXBUFNB] - ALIGN_32BYTES(__eth_stm32_desc); -extern ETH_DMADescTypeDef dma_tx_desc_tab[ETH_DMA_TX_CH_CNT][ETH_TXBUFNB] - ALIGN_32BYTES(__eth_stm32_desc); +extern ETH_DMADescTypeDef dma_rx_desc_tab[ETH_DMA_RX_CH_CNT][ETH_RXBUFNB]; +extern ETH_DMADescTypeDef dma_tx_desc_tab[ETH_DMA_TX_CH_CNT][ETH_TXBUFNB]; #else -extern ETH_DMADescTypeDef dma_rx_desc_tab[ETH_RXBUFNB] __eth_stm32_desc; -extern ETH_DMADescTypeDef dma_tx_desc_tab[ETH_TXBUFNB] __eth_stm32_desc; +extern ETH_DMADescTypeDef dma_rx_desc_tab[ETH_RXBUFNB]; +extern ETH_DMADescTypeDef dma_tx_desc_tab[ETH_TXBUFNB]; #endif /* Device constant configuration parameters */ diff --git a/drivers/ethernet/eth_stm32_hal_v2.c b/drivers/ethernet/eth_stm32_hal_v2.c index f53fc3b0efab5..bc95ac2ca8394 100644 --- a/drivers/ethernet/eth_stm32_hal_v2.c +++ b/drivers/ethernet/eth_stm32_hal_v2.c @@ -19,11 +19,6 @@ LOG_MODULE_DECLARE(eth_stm32_hal, CONFIG_ETHERNET_LOG_LEVEL); -#if DT_HAS_COMPAT_STATUS_OKAY(st_stm32n6_ethernet) -#define ETH_GMII_MODE HAL_ETH_GMII_MODE -#define ETH_RGMII_MODE HAL_ETH_RGMII_MODE -#endif - #define ETH_DMA_TX_TIMEOUT_MS 20U /* transmit timeout in milliseconds */ struct eth_stm32_rx_buffer_header { diff --git a/drivers/video/CMakeLists.txt b/drivers/video/CMakeLists.txt index 7a03156165ce0..2f6a3244b5ad9 100644 --- a/drivers/video/CMakeLists.txt +++ b/drivers/video/CMakeLists.txt @@ -15,6 +15,7 @@ zephyr_library_sources_ifdef(CONFIG_VIDEO_OV7725 ov7725.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_OV2640 ov2640.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_GC2145 gc2145.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_STM32_DCMI video_stm32_dcmi.c) +zephyr_library_sources_ifdef(CONFIG_VIDEO_STM32_VENC video_stm32_venc.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_OV5640 ov5640.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_OV7670 ov7670.c) zephyr_library_sources_ifdef(CONFIG_VIDEO_OV9655 ov9655.c) diff --git a/drivers/video/Kconfig b/drivers/video/Kconfig index 7b1f57a83c487..9d7fa8417b35d 100644 --- a/drivers/video/Kconfig +++ b/drivers/video/Kconfig @@ -76,6 +76,8 @@ source "drivers/video/Kconfig.ov2640" source "drivers/video/Kconfig.stm32_dcmi" +source "drivers/video/Kconfig.stm32_venc" + source "drivers/video/Kconfig.ov5640" source "drivers/video/Kconfig.ov7670" diff --git a/drivers/video/Kconfig.stm32_venc b/drivers/video/Kconfig.stm32_venc new file mode 100644 index 0000000000000..63efe52f5bb1a --- /dev/null +++ b/drivers/video/Kconfig.stm32_venc @@ -0,0 +1,23 @@ +# STM32 VENC driver configuration options + +# Copyright (c) 2025 STMicroelectronics. +# SPDX-License-Identifier: Apache-2.0 + +config VIDEO_STM32_VENC + bool "STM32 video encoder (VENC) driver" + default y + depends on DT_HAS_ST_STM32_VENC_ENABLED + select HAS_STM32LIB + select USE_STM32_LL_VENC + select USE_STM32_HAL_RIF if SOC_SERIES_STM32N6X + select RESET + help + Enable driver for STM32 video encoder peripheral. + +if VIDEO_STM32_VENC + +module = VC8000NANOE +module-str = vc8000nanoe +source "subsys/logging/Kconfig.template.log_config" + +endif diff --git a/drivers/video/video_stm32_venc.c b/drivers/video/video_stm32_venc.c new file mode 100644 index 0000000000000..6a6920e0e201e --- /dev/null +++ b/drivers/video/video_stm32_venc.c @@ -0,0 +1,903 @@ +/* + * Copyright (c) 2025 STMicroelectronics. + * + * SPDX-License-Identifier: Apache-2.0 + */ + +#define DT_DRV_COMPAT st_stm32_venc + +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "ewl.h" +#include "h264encapi.h" +#include "reg_offset_v7.h" + +LOG_MODULE_REGISTER(video_stm32_venc, CONFIG_VIDEO_LOG_LEVEL); + +#define VENC_DEFAULT_WIDTH 320 +#define VENC_DEFAULT_HEIGHT 240 +#define VENC_DEFAULT_FMT_IN VIDEO_PIX_FMT_NV12 +#define VENC_DEFAULT_FMT_OUT VIDEO_PIX_FMT_H264 +#define VENC_DEFAULT_FRAMERATE 30 +#define VENC_DEFAULT_LEVEL H264ENC_LEVEL_4 +#define VENC_DEFAULT_QP 25 +#define VENC_ESTIMATED_COMPRESSION_RATIO 10 + +#define ALIGNMENT_INCR 8UL + +#define EWL_HEAP_ALIGNED_ALLOC(size)\ + shared_multi_heap_aligned_alloc(CONFIG_VIDEO_BUFFER_SMH_ATTRIBUTE, ALIGNMENT_INCR, size) +#define EWL_HEAP_ALIGNED_FREE(block) shared_multi_heap_free(block) + +#define EWL_TIMEOUT 100UL + +#define MEM_CHUNKS 32 + +typedef void (*irq_config_func_t)(const struct device *dev); + +struct video_stm32_venc_config { + mm_reg_t reg; + const struct stm32_pclken pclken; + const struct reset_dt_spec reset; + irq_config_func_t irq_config; +}; + +typedef struct { + uint32_t clientType; + uint32_t *chunks[MEM_CHUNKS]; + uint32_t *alignedChunks[MEM_CHUNKS]; + uint32_t totalChunks; + const struct video_stm32_venc_config *config; + struct k_sem complete; + uint32_t irq_status; + uint32_t irq_cnt; + uint32_t mem_cnt; +} VENC_EWL_TypeDef; + +static VENC_EWL_TypeDef ewl_instance; + +struct video_stm32_venc_data { + const struct device *dev; + struct video_format fmt_in; + struct video_format fmt_out; + struct k_fifo fifo_input; + struct k_fifo fifo_output_in; + struct k_fifo fifo_output_out; + struct video_buffer *vbuf; + H264EncInst encoder; + uint32_t frame_nb; + bool resync; +}; + +static int encoder_prepare(struct video_stm32_venc_data *data); +static int encode_frame(struct video_stm32_venc_data *data); +static int encoder_end(struct video_stm32_venc_data *data); +static int encoder_start(struct video_stm32_venc_data *data); + +static inline H264EncPictureType to_h264pixfmt(uint32_t pixelformat) +{ + switch (pixelformat) { + case VIDEO_PIX_FMT_NV12: + return H264ENC_YUV420_SEMIPLANAR; + case VIDEO_PIX_FMT_RGB565: + return H264ENC_RGB565; + default: + CODE_UNREACHABLE; + } +} + +u32 EWLReadAsicID(void) +{ + const struct video_stm32_venc_config *config = ewl_instance.config; + + return sys_read32(config->reg + BASE_HEncASIC); +} + +EWLHwConfig_t EWLReadAsicConfig(void) +{ + const struct video_stm32_venc_config *config = ewl_instance.config; + EWLHwConfig_t cfg_info; + uint32_t cfgval, cfgval2; + + cfgval = sys_read32(config->reg + BASE_HEncSynth); + cfgval2 = sys_read32(config->reg + BASE_HEncSynth1); + + cfg_info = EWLBuildHwConfig(cfgval, cfgval2); + + LOG_DBG("maxEncodedWidth = %d", cfg_info.maxEncodedWidth); + LOG_DBG("h264Enabled = %d", cfg_info.h264Enabled); + LOG_DBG("jpegEnabled = %d", cfg_info.jpegEnabled); + LOG_DBG("vp8Enabled = %d", cfg_info.vp8Enabled); + LOG_DBG("vsEnabled = %d", cfg_info.vsEnabled); + LOG_DBG("rgbEnabled = %d", cfg_info.rgbEnabled); + LOG_DBG("searchAreaSmall = %d", cfg_info.searchAreaSmall); + LOG_DBG("scalingEnabled = %d", cfg_info.scalingEnabled); + LOG_DBG("address64bits = %d", cfg_info.addr64Support); + LOG_DBG("denoiseEnabled = %d", cfg_info.dnfSupport); + LOG_DBG("rfcEnabled = %d", cfg_info.rfcSupport); + LOG_DBG("instanctEnabled = %d", cfg_info.instantSupport); + LOG_DBG("busType = %d", cfg_info.busType); + LOG_DBG("synthesisLanguage = %d", cfg_info.synthesisLanguage); + LOG_DBG("busWidth = %d", cfg_info.busWidth * 32); + + return cfg_info; +} + +const void *EWLInit(EWLInitParam_t *param) +{ + __ASSERT_NO_MSG(param != NULL); + __ASSERT_NO_MSG(param->clientType == EWL_CLIENT_TYPE_H264_ENC); + + /* sync */ + k_sem_init(&ewl_instance.complete, 0, 1); + k_sem_reset(&ewl_instance.complete); + + /* set client type */ + ewl_instance.clientType = param->clientType; + ewl_instance.irq_cnt = 0; + + return (void *)&ewl_instance; +} + +i32 EWLRelease(const void *inst) +{ + __ASSERT_NO_MSG(inst != NULL); + + return EWL_OK; +} + +void EWLWriteReg(const void *inst, uint32_t offset, uint32_t val) +{ + const struct video_stm32_venc_config *config = ewl_instance.config; + + sys_write32(val, config->reg + offset); +} + +void EWLEnableHW(const void *inst, uint32_t offset, uint32_t val) +{ + const struct video_stm32_venc_config *config = ewl_instance.config; + + sys_write32(val, config->reg + offset); +} + +void EWLDisableHW(const void *inst, uint32_t offset, uint32_t val) +{ + const struct video_stm32_venc_config *config = ewl_instance.config; + + sys_write32(val, config->reg + offset); +} + +uint32_t EWLReadReg(const void *inst, uint32_t offset) +{ + const struct video_stm32_venc_config *config = ewl_instance.config; + + return sys_read32(config->reg + offset); +} + +i32 EWLMallocRefFrm(const void *instance, uint32_t size, EWLLinearMem_t *info) +{ + return EWLMallocLinear(instance, size, info); +} + +void EWLFreeRefFrm(const void *instance, EWLLinearMem_t *info) +{ + EWLFreeLinear(instance, info); +} + +i32 EWLMallocLinear(const void *instance, uint32_t size, EWLLinearMem_t *info) +{ + VENC_EWL_TypeDef *inst = (VENC_EWL_TypeDef *) instance; + + __ASSERT_NO_MSG(inst != NULL); + __ASSERT_NO_MSG(info != NULL); + + /* align size */ + uint32_t size_aligned = ROUND_UP(size, ALIGNMENT_INCR); + + info->size = size_aligned; + + /* allocate */ + inst->chunks[inst->totalChunks] = (uint32_t *)EWL_HEAP_ALIGNED_ALLOC(size_aligned); + if (inst->chunks[inst->totalChunks] == NULL) { + LOG_DBG("unable to allocate %8d bytes", size_aligned); + return EWL_ERROR; + } + + /* align given allocated buffer */ + inst->alignedChunks[inst->totalChunks] = + (uint32_t *)ROUND_UP((uint32_t)inst->chunks[inst->totalChunks], ALIGNMENT_INCR); + /* put the aligned pointer in the return structure */ + info->virtualAddress = inst->alignedChunks[inst->totalChunks++]; + + if (info->virtualAddress == NULL) { + LOG_DBG("unable to get chunk for %8d bytes", size_aligned); + return EWL_ERROR; + } + + /* bus address is the same as virtual address because no MMU */ + info->busAddress = (ptr_t)info->virtualAddress; + + inst->mem_cnt += size; + LOG_DBG("allocated %8d bytes --> %p / 0x%x. Total : %d", + size_aligned, (void *)info->virtualAddress, + info->busAddress, inst->mem_cnt); + + return EWL_OK; +} + +void EWLFreeLinear(const void *instance, EWLLinearMem_t *info) +{ + VENC_EWL_TypeDef *inst = (VENC_EWL_TypeDef *) instance; + + __ASSERT_NO_MSG(inst != NULL); + __ASSERT_NO_MSG(info != NULL); + + /* find the pointer corresponding to the aligned buffer */ + for (uint32_t i = 0; i < inst->totalChunks; i++) { + if (inst->alignedChunks[i] == info->virtualAddress) { + EWL_HEAP_ALIGNED_FREE(inst->chunks[i]); + break; + } + } + info->virtualAddress = NULL; + info->busAddress = 0; + info->size = 0; +} + +i32 EWLReserveHw(const void *inst) +{ + __ASSERT_NO_MSG(inst != NULL); + + return EWL_OK; +} + +void EWLReleaseHw(const void *inst) +{ + __ASSERT_NO_MSG(inst != NULL); +} + +void *EWLmalloc(uint32_t n) +{ + VENC_EWL_TypeDef *inst = &ewl_instance; + void *p = NULL; + + p = EWL_HEAP_ALIGNED_ALLOC(n); + if (p == NULL) { + LOG_ERR("alloc failed for size=%d", n); + return NULL; + } + + inst->mem_cnt += n; + LOG_DBG("%8d bytes --> %p, total : %d", n, p, inst->mem_cnt); + + return p; +} + +void EWLfree(void *p) +{ + EWL_HEAP_ALIGNED_FREE(p); +} + +void *EWLcalloc(uint32_t n, uint32_t s) +{ + void *p = EWLmalloc(n * s); + + EWLmemset(p, 0, n * s); + + return p; +} + +void *EWLmemcpy(void *d, const void *s, uint32_t n) +{ + return memcpy(d, s, (size_t)n); +} + +void *EWLmemset(void *d, i32 c, uint32_t n) +{ + return memset(d, c, (size_t)n); +} + +int EWLmemcmp(const void *s1, const void *s2, uint32_t n) +{ + return memcmp((const uint8_t *) s1, (const uint8_t *) s2, (size_t)n); +} + +#define NUM_SLICES_READY_MASK GENMASK(23, 16) +#define LOW_LATENCY_HW_ITF_EN 29 + +i32 EWLWaitHwRdy(const void *inst, uint32_t *slicesReady) +{ + __ASSERT_NO_MSG(inst != NULL); + const struct video_stm32_venc_config *config = ewl_instance.config; + uint32_t ret = EWL_HW_WAIT_TIMEOUT; + volatile uint32_t irq_stats; + uint32_t prevSlicesReady = 0; + k_timepoint_t timeout = sys_timepoint_calc(K_MSEC(EWL_TIMEOUT)); + uint32_t start = sys_clock_tick_get_32(); + + /* check how to clear IRQ flags for VENC */ + uint32_t clrByWrite1 = (EWLReadReg(inst, BASE_HWFuse2) & HWCFGIrqClearSupport); + + do { + irq_stats = sys_read32(config->reg + BASE_HEncIRQ); + /* get the number of completed slices from ASIC registers. */ + if (slicesReady != NULL && *slicesReady > prevSlicesReady) { + *slicesReady = FIELD_GET(NUM_SLICES_READY_MASK, + sys_read32(config->reg + BASE_HEncControl7)); + } + + LOG_DBG("IRQ stat = %08x", irq_stats); + uint32_t hw_handshake_status = + IS_BIT_SET(sys_read32(config->reg + BASE_HEncInstantInput), + LOW_LATENCY_HW_ITF_EN); + + /* ignore the irq status of input line buffer in hw handshake mode */ + if ((irq_stats == ASIC_STATUS_LINE_BUFFER_DONE) && (hw_handshake_status != 0UL)) { + sys_write32(ASIC_STATUS_FUSE, config->reg + BASE_HEncIRQ); + continue; + } + + if ((irq_stats & ASIC_STATUS_ALL) != 0UL) { + /* clear IRQ and slice ready status */ + uint32_t clr_stats; + + irq_stats &= (~(ASIC_STATUS_SLICE_READY | ASIC_IRQ_LINE)); + + if (clrByWrite1 != 0UL) { + clr_stats = ASIC_STATUS_SLICE_READY | ASIC_IRQ_LINE; + } else { + clr_stats = irq_stats; + } + + sys_write32(clr_stats, config->reg + BASE_HEncIRQ); + ret = EWL_OK; + break; + } + + if (slicesReady != NULL) { + if (*slicesReady > prevSlicesReady) { + ret = EWL_OK; + break; + } + } + + } while (!sys_timepoint_expired(timeout)); + + LOG_DBG("encoding = %d ms", k_ticks_to_ms_ceil32(sys_clock_tick_get_32() - start)); + + if (slicesReady != NULL) { + LOG_DBG("slicesReady = %d", *slicesReady); + } + + return ret; +} + +void EWLassert(bool expr, const char *str_expr, const char *file, unsigned int line) +{ + __ASSERT(expr, "ASSERTION FAIL [%s] @ %s:%d", str_expr, file, line); +} + +/* Set CONFIG_VC8000NANOE_LOG_LEVEL_DBG to enable library tracing */ +void EWLtrace(const char *s) +{ + printk("%s\n", s); +} + +void EWLtraceparam(const char *fmt, const char *param, unsigned int val) +{ + printk(fmt, param, val); +} + +static int stm32_venc_enable_clock(const struct device *dev) +{ + const struct video_stm32_venc_config *config = dev->config; + const struct device *clk = DEVICE_DT_GET(STM32_CLOCK_CONTROL_NODE); + + if (!device_is_ready(clk)) { + LOG_ERR("clock control device not ready"); + return -ENODEV; + } + + if (clock_control_on(clk, + (clock_control_subsys_t)&config->pclken) != 0) { + return -EIO; + } + + return 0; +} + +static int video_stm32_venc_set_fmt(const struct device *dev, + struct video_format *fmt) +{ + struct video_stm32_venc_data *data = dev->data; + + if (fmt->type == VIDEO_BUF_TYPE_INPUT) { + if ((fmt->pixelformat != VIDEO_PIX_FMT_NV12) && + (fmt->pixelformat != VIDEO_PIX_FMT_RGB565)) { + LOG_ERR("invalid input pixel format"); + return -EINVAL; + } + + fmt->pitch = fmt->width * + video_bits_per_pixel(fmt->pixelformat) / BITS_PER_BYTE; + data->fmt_in = *fmt; + } else { + if (fmt->pixelformat != VIDEO_PIX_FMT_H264) { + LOG_ERR("invalid output pixel format"); + return -EINVAL; + } + + fmt->sizeimage = fmt->width * fmt->height / VENC_ESTIMATED_COMPRESSION_RATIO; + data->fmt_out = *fmt; + } + + return 0; +} + +static int video_stm32_venc_get_fmt(const struct device *dev, + struct video_format *fmt) +{ + struct video_stm32_venc_data *data = dev->data; + + if (fmt->type == VIDEO_BUF_TYPE_INPUT) { + *fmt = data->fmt_in; + } else { + *fmt = data->fmt_out; + } + + return 0; +} + +static int encoder_prepare(struct video_stm32_venc_data *data) +{ + H264EncRet ret; + H264EncConfig cfg = {0}; + H264EncPreProcessingCfg preproc_cfg = {0}; + H264EncRateCtrl ratectrl_cfg = {0}; + H264EncCodingCtrl codingctrl_cfg = {0}; + + data->frame_nb = 0; + + /* set config to 1 reference frame */ + cfg.refFrameAmount = 1; + /* frame rate */ + cfg.frameRateDenom = 1; + cfg.frameRateNum = VENC_DEFAULT_FRAMERATE; + /* image resolution */ + cfg.width = data->fmt_out.width; + cfg.height = data->fmt_out.height; + /* stream type */ + cfg.streamType = H264ENC_BYTE_STREAM; + + /* encoding level*/ + cfg.level = VENC_DEFAULT_LEVEL; + cfg.svctLevel = 0; + cfg.viewMode = H264ENC_BASE_VIEW_SINGLE_BUFFER; + + ret = H264EncInit(&cfg, &data->encoder); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncInit error=%d", ret); + return -EIO; + } + + /* set format conversion for preprocessing */ + ret = H264EncGetPreProcessing(data->encoder, &preproc_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncGetPreProcessing error=%d", ret); + return -EIO; + } + preproc_cfg.inputType = to_h264pixfmt(data->fmt_in.pixelformat); + ret = H264EncSetPreProcessing(data->encoder, &preproc_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncSetPreProcessing error=%d", ret); + return -EIO; + } + + /* setup coding ctrl */ + ret = H264EncGetCodingCtrl(data->encoder, &codingctrl_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncGetCodingCtrl error=%d", ret); + return -EIO; + } + + ret = H264EncSetCodingCtrl(data->encoder, &codingctrl_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncSetCodingCtrl error=%d", ret); + return -EIO; + } + + /* set bit rate configuration */ + ret = H264EncGetRateCtrl(data->encoder, &ratectrl_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncGetRateCtrl error=%d", ret); + return -EIO; + } + + /* Constant bitrate */ + ratectrl_cfg.pictureRc = 0; + ratectrl_cfg.mbRc = 0; + ratectrl_cfg.pictureSkip = 0; + ratectrl_cfg.hrd = 0; + ratectrl_cfg.qpHdr = VENC_DEFAULT_QP; + ratectrl_cfg.qpMin = ratectrl_cfg.qpHdr; + ratectrl_cfg.qpMax = ratectrl_cfg.qpHdr; + + ret = H264EncSetRateCtrl(data->encoder, &ratectrl_cfg); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncSetRateCtrl error=%d", ret); + return -EIO; + } + + return 0; +} + +static int encoder_start(struct video_stm32_venc_data *data) +{ + H264EncRet ret; + struct video_buffer *output; + H264EncIn encIn = {0}; + H264EncOut encOut = {0}; + + output = k_fifo_get(&data->fifo_output_in, K_FOREVER); + + encIn.pOutBuf = (uint32_t *)output->buffer; + encIn.busOutBuf = (uint32_t) encIn.pOutBuf; + encIn.outBufSize = ROUND_UP(output->size, ALIGNMENT_INCR); + + /* create stream */ + ret = H264EncStrmStart(data->encoder, &encIn, &encOut); + if (ret != H264ENC_OK) { + LOG_ERR("H264EncStrmStart error=%d", ret); + return -EIO; + } + + output->bytesused = encOut.streamSize; + LOG_DBG("SPS/PPS generated, size= %d", output->bytesused); + + k_fifo_put(&data->fifo_output_in, output); + k_fifo_put(&data->fifo_output_out, output); + + data->resync = true; + + return 0; +} + +static int encode_frame(struct video_stm32_venc_data *data) +{ + int ret = H264ENC_FRAME_READY; + struct video_buffer *input; + struct video_buffer *output; + H264EncIn encIn = {0}; + H264EncOut encOut = {0}; + + if (data->encoder == NULL) { + ret = encoder_prepare(data); + if (ret) { + return ret; + } + + ret = encoder_start(data); + if (ret) { + return ret; + } + + LOG_DBG("SPS/PPS generated and pushed"); + return 0; + } + + input = k_fifo_get(&data->fifo_input, K_NO_WAIT); + if (input == NULL) { + return 0; + } + + output = k_fifo_get(&data->fifo_output_in, K_FOREVER); + + /* one key frame every seconds */ + if (!(data->frame_nb % VENC_DEFAULT_FRAMERATE) || data->resync) { + /* if frame is the first or resync needed: set as intra coded */ + encIn.codingType = H264ENC_INTRA_FRAME; + } else { + /* if there was a frame previously, set as predicted */ + encIn.timeIncrement = 1; + encIn.codingType = H264ENC_PREDICTED_FRAME; + } + + encIn.ipf = H264ENC_REFERENCE_AND_REFRESH; + encIn.ltrf = H264ENC_REFERENCE; + + /* set input buffers to structures */ + encIn.busLuma = (ptr_t)input->buffer; + encIn.busChromaU = (ptr_t)encIn.busLuma + data->fmt_in.width * data->fmt_in.height; + + encIn.pOutBuf = (uint32_t *)output->buffer; + encIn.busOutBuf = (uint32_t)encIn.pOutBuf; + encIn.outBufSize = ROUND_UP(output->size, ALIGNMENT_INCR); + encOut.streamSize = 0; + + ret = H264EncStrmEncode(data->encoder, &encIn, &encOut, NULL, NULL, NULL); + output->bytesused = encOut.streamSize; + LOG_DBG("output=%p, encOut.streamSize=%d", output, encOut.streamSize); + + k_fifo_put(&data->fifo_output_in, output); + k_fifo_put(&data->fifo_output_out, output); + + switch (ret) { + case H264ENC_FRAME_READY: + /* save stream */ + if (encOut.streamSize == 0) { + /* Nothing encoded */ + data->resync = true; + return -ENODATA; + } + output->bytesused = encOut.streamSize; + break; + case H264ENC_FUSE_ERROR: + LOG_ERR("H264EncStrmEncode error=%d", ret); + + LOG_ERR("DCMIPP and VENC desync at frame %d, restart the video", data->frame_nb); + encoder_end(data); + + ret = encoder_start(data); + if (ret) { + return ret; + } + + break; + default: + LOG_ERR("H264EncStrmEncode error=%d", ret); + LOG_ERR("error encoding frame %d", data->frame_nb); + + encoder_end(data); + + ret = encoder_start(data); + if (ret) { + return ret; + } + + data->resync = true; + + return -EIO; + break; + } + + data->frame_nb++; + + return 0; +} + + +static int encoder_end(struct video_stm32_venc_data *data) +{ + H264EncIn encIn = {0}; + H264EncOut encOut = {0}; + + if (data->encoder != NULL) { + H264EncStrmEnd(data->encoder, &encIn, &encOut); + data->encoder = NULL; + } + + return 0; +} + +static int video_stm32_venc_set_stream(const struct device *dev, bool enable, + enum video_buf_type type) +{ + struct video_stm32_venc_data *data = dev->data; + + ARG_UNUSED(type); + + if (!enable) { + /* Stop VENC */ + encoder_end(data); + + return 0; + } + + return 0; +} + +static int video_stm32_venc_enqueue(const struct device *dev, + struct video_buffer *vbuf) +{ + struct video_stm32_venc_data *data = dev->data; + int ret = 0; + + if (vbuf->type == VIDEO_BUF_TYPE_INPUT) { + k_fifo_put(&data->fifo_input, vbuf); + ret = encode_frame(data); + } else { + k_fifo_put(&data->fifo_output_in, vbuf); + } + + return ret; +} + +static int video_stm32_venc_dequeue(const struct device *dev, + struct video_buffer **vbuf, + k_timeout_t timeout) +{ + struct video_stm32_venc_data *data = dev->data; + + *vbuf = k_fifo_get(&data->fifo_output_out, timeout); + if (*vbuf == NULL) { + return -EAGAIN; + } + + return 0; +} + +ISR_DIRECT_DECLARE(stm32_venc_isr) +{ + const struct video_stm32_venc_config *config = ewl_instance.config; + VENC_EWL_TypeDef *inst = &ewl_instance; + uint32_t hw_handshake_status = + IS_BIT_SET(sys_read32(config->reg + BASE_HEncInstantInput), + LOW_LATENCY_HW_ITF_EN); + uint32_t irq_status = sys_read32(config->reg + BASE_HEncIRQ); + + inst->irq_status = irq_status; + inst->irq_cnt++; + + if (!hw_handshake_status && (irq_status & ASIC_STATUS_FUSE)) { + sys_write32(ASIC_STATUS_FUSE | ASIC_IRQ_LINE, config->reg + BASE_HEncIRQ); + /* read back the IRQ status to update its value */ + irq_status = sys_read32(config->reg + BASE_HEncIRQ); + } + + if (irq_status != 0U) { + /* status flag is raised, + * clear the ones that the IRQ needs to clear + * and signal to EWLWaitHwRdy + */ + sys_write32(ASIC_STATUS_SLICE_READY | ASIC_IRQ_LINE, config->reg + BASE_HEncIRQ); + } + + k_sem_give(&inst->complete); + + return 0; +} + +#define VENC_FORMAT_CAP(pixfmt) \ + { \ + .pixelformat = pixfmt, \ + .width_min = 48, \ + .width_max = 1920, \ + .height_min = 48, \ + .height_max = 1088, \ + .width_step = 16, \ + .height_step = 16, \ + } + +static const struct video_format_cap fmts[] = { + VENC_FORMAT_CAP(VIDEO_PIX_FMT_H264), + {0}, +}; + +static int video_stm32_venc_get_caps(const struct device *dev, + struct video_caps *caps) +{ + caps->format_caps = fmts; + + /* VENC produces full frames */ + caps->min_line_count = caps->max_line_count = LINE_COUNT_HEIGHT; + caps->min_vbuf_count = 1; + + return 0; +} + +static DEVICE_API(video, video_stm32_venc_driver_api) = { + .set_format = video_stm32_venc_set_fmt, + .get_format = video_stm32_venc_get_fmt, + .set_stream = video_stm32_venc_set_stream, + .enqueue = video_stm32_venc_enqueue, + .dequeue = video_stm32_venc_dequeue, + .get_caps = video_stm32_venc_get_caps, +}; + +static void video_stm32_venc_irq_config_func(const struct device *dev) +{ + IRQ_DIRECT_CONNECT(DT_INST_IRQN(0), DT_INST_IRQ(0, priority), + stm32_venc_isr, 0); + irq_enable(DT_INST_IRQN(0)); +} + +static struct video_stm32_venc_data video_stm32_venc_data_0 = { +}; + +static const struct video_stm32_venc_config video_stm32_venc_config_0 = { + .reg = DT_INST_REG_ADDR(0), + .pclken = { + .bus = DT_INST_CLOCKS_CELL(0, bus), + .enr = DT_INST_CLOCKS_CELL(0, bits) + }, + .reset = RESET_DT_SPEC_INST_GET_BY_IDX(0, 0), + .irq_config = video_stm32_venc_irq_config_func, +}; + +static void RISAF_Config(void) +{ + /* Define and initialize the master configuration structure */ + RIMC_MasterConfig_t RIMC_master = {0}; + + /* Enable the clock for the RIFSC (RIF Security Controller) */ + __HAL_RCC_RIFSC_CLK_ENABLE(); + + RIMC_master.MasterCID = RIF_CID_1; + RIMC_master.SecPriv = RIF_ATTRIBUTE_SEC | RIF_ATTRIBUTE_PRIV; + + /* Configure the master attributes for the Ethernet peripheral (VENC) */ + HAL_RIF_RIMC_ConfigMasterAttributes(RIF_MASTER_INDEX_VENC, &RIMC_master); + + /* Set the secure and privileged attributes for the Ethernet peripheral (VENC) as a slave */ + HAL_RIF_RISC_SetSlaveSecureAttributes(RIF_RISC_PERIPH_INDEX_VENC, + RIF_ATTRIBUTE_SEC | RIF_ATTRIBUTE_PRIV); +} + +static int video_stm32_venc_init(const struct device *dev) +{ + const struct video_stm32_venc_config *config = dev->config; + struct video_stm32_venc_data *data = dev->data; + int err; + + /* Enable VENC clock */ + err = stm32_venc_enable_clock(dev); + if (err < 0) { + LOG_ERR("clock enabling failed."); + return err; + } + + /* Reset VENC */ + if (!device_is_ready(config->reset.dev)) { + LOG_ERR("reset controller not ready"); + return -ENODEV; + } + reset_line_toggle_dt(&config->reset); + + data->dev = dev; + k_fifo_init(&data->fifo_input); + k_fifo_init(&data->fifo_output_in); + k_fifo_init(&data->fifo_output_out); + + /* Run IRQ init */ + config->irq_config(dev); + + RISAF_Config(); + + LOG_DBG("CPU frequency : %d", HAL_RCC_GetCpuClockFreq() / 1000000); + LOG_DBG("sysclk frequency : %d", HAL_RCC_GetSysClockFreq() / 1000000); + LOG_DBG("pclk5 frequency : %d", HAL_RCC_GetPCLK5Freq() / 1000000); + + /* default input */ + data->fmt_in.width = VENC_DEFAULT_WIDTH; + data->fmt_in.height = VENC_DEFAULT_HEIGHT; + data->fmt_in.pixelformat = VENC_DEFAULT_FMT_IN; + data->fmt_in.pitch = data->fmt_in.width; + + /* default output */ + data->fmt_out.width = VENC_DEFAULT_WIDTH; + data->fmt_out.height = VENC_DEFAULT_HEIGHT; + data->fmt_out.pixelformat = VENC_DEFAULT_FMT_OUT; + + /* store config for register accesses */ + ewl_instance.config = config; + + LOG_DBG("%s inited", dev->name); + + return 0; +} + +DEVICE_DT_INST_DEFINE(0, &video_stm32_venc_init, + NULL, &video_stm32_venc_data_0, + &video_stm32_venc_config_0, + POST_KERNEL, CONFIG_VIDEO_INIT_PRIORITY, + &video_stm32_venc_driver_api); diff --git a/dts/arm/st/n6/stm32n6.dtsi b/dts/arm/st/n6/stm32n6.dtsi index c06a499b4cfce..e49cc68690b44 100644 --- a/dts/arm/st/n6/stm32n6.dtsi +++ b/dts/arm/st/n6/stm32n6.dtsi @@ -1273,6 +1273,15 @@ resets = <&rctl STM32_RESET(AHB5, 31)>; status = "disabled"; }; + + venc: venc@58005000 { + compatible = "st,stm32-venc"; + reg = <0x58005000 0x1000>; + interrupts = <62 0>; + clocks = <&rcc STM32_CLOCK(APB5, 5)>; + resets = <&rctl STM32_RESET(APB5, 5)>; + status = "disabled"; + }; }; }; diff --git a/dts/bindings/video/st,stm32-venc.yaml b/dts/bindings/video/st,stm32-venc.yaml new file mode 100644 index 0000000000000..086f6c7512292 --- /dev/null +++ b/dts/bindings/video/st,stm32-venc.yaml @@ -0,0 +1,23 @@ +# +# Copyright (c) 2025 STMicroelectronics. +# +# SPDX-License-Identifier: Apache-2.0 +# + +description: | + STMicroelectronics STM32 video encoder peripheral (VENC). + Example of node configuration at board level: + +compatible: "st,stm32-venc" + +include: [base.yaml, reset-device.yaml] + +properties: + interrupts: + required: true + + clocks: + required: true + + resets: + required: true diff --git a/include/zephyr/drivers/video.h b/include/zephyr/drivers/video.h index d4407420d0859..161a51545ff82 100644 --- a/include/zephyr/drivers/video.h +++ b/include/zephyr/drivers/video.h @@ -79,6 +79,12 @@ struct video_format { * the next row (>=width). */ uint32_t pitch; + /** + * @brief size of buffer data. + * + * This is the maximum size in bytes required for buffer data. + */ + uint32_t sizeimage; }; /** @@ -1768,6 +1774,16 @@ int64_t video_get_csi_link_freq(const struct device *dev, uint8_t bpp, uint8_t l */ #define VIDEO_PIX_FMT_JPEG VIDEO_FOURCC('J', 'P', 'E', 'G') +/** + * H264 with start code + */ +#define VIDEO_PIX_FMT_H264 VIDEO_FOURCC('H', '2', '6', '4') + +/** + * H264 without start code + */ +#define VIDEO_PIX_FMT_H264_NO_SC VIDEO_FOURCC('A', 'V', 'C', '1') + /** * @} */ diff --git a/samples/drivers/video/tcpserversink/Kconfig b/samples/drivers/video/tcpserversink/Kconfig new file mode 100644 index 0000000000000..d8ab8be1c7c13 --- /dev/null +++ b/samples/drivers/video/tcpserversink/Kconfig @@ -0,0 +1,78 @@ +# Copyright (c) 2024 Espressif Systems (Shanghai) Co., Ltd. +# Copyright (c) 2025 STMicroelectronics. +# SPDX-License-Identifier: Apache-2.0 + +mainmenu "TCP camera streaming sample application" + +menu "Video capture configuration" + +config VIDEO_SOURCE_CROP_LEFT + int "Crop area left value" + default 0 + help + Left value of the crop area within the video source. + +config VIDEO_SOURCE_CROP_TOP + int "Crop area top value" + default 0 + help + Top value of the crop area within the video source. + +config VIDEO_SOURCE_CROP_WIDTH + int "Crop area width value" + default 0 + help + Width value of the crop area within the video source. + If set to 0, the crop is not applied. + +config VIDEO_SOURCE_CROP_HEIGHT + int "Crop area height value" + default 0 + help + Height value of the crop area within the video source. + If set to 0, the crop is not applied. + +config VIDEO_FRAME_HEIGHT + int "Height of the video frame" + default 0 + help + Height of the video frame. If set to 0, the default height is used. + +config VIDEO_FRAME_WIDTH + int "Width of the video frame" + default 0 + help + Width of the video frame. If set to 0, the default width is used. + +config VIDEO_PIXEL_FORMAT + string "Pixel format of the video frame" + help + Pixel format of the video frame. If not set, the default pixel format is used. + +config VIDEO_CAPTURE_N_BUFFERING + int "Capture N-buffering" + default 2 + help + Framerate versus memory usage tradeoff. + "2" allows to capture while sending data (optimal framerate). + "1" allows to reduce memory usage but capture framerate is lower. + If not set defaults to "2". + +config VIDEO_CTRL_HFLIP + bool "Mirror the video frame horizontally" + help + If set, mirror the video frame horizontally + +config VIDEO_CTRL_VFLIP + bool "Mirror the video frame vertically" + help + If set, mirror the video frame vertically + +config VIDEO_ENCODED_PIXEL_FORMAT + string "Pixel format of the encoded frame" + help + Pixel format of the encoded frame. + +endmenu + +source "Kconfig.zephyr" diff --git a/samples/drivers/video/tcpserversink/README.rst b/samples/drivers/video/tcpserversink/README.rst index b44d9f3105ad7..4ff4c03573981 100644 --- a/samples/drivers/video/tcpserversink/README.rst +++ b/samples/drivers/video/tcpserversink/README.rst @@ -18,6 +18,9 @@ This samples requires a video capture device and network support. - :zephyr:board:`mimxrt1064_evk` - `MT9M114 camera module`_ +- :zephyr:board:`stm32n6570_dk` +- `MB1854 camera module`_ + Wiring ****** @@ -26,6 +29,12 @@ J35 camera connector. A USB cable should be connected from a host to the micro USB debug connector (J41) in order to get console output via the freelink interface. Ethernet cable must be connected to RJ45 connector. +On :zephyr:board:`stm32n6570_dk`, the MB1854 IMX335 camera module must be plugged in +the CSI-2 camera connector. A RJ45 ethernet cable must be plugged in the ethernet CN6 +connector. For an optimal image experience, it is advice to embed STM32 image signal +processing middleware: https://github.com/stm32-hotspot/zephyr-stm32-mw-isp. + + Building and Running ******************** @@ -49,6 +58,15 @@ a video software pattern generator is supported by using :ref:`snippet-video-sw- :goals: build :compact: +For :zephyr:board:`stm32n6570_dk`, the sample can be built with the following command: + +.. zephyr-app-commands:: + :zephyr-app: samples/drivers/video/tcpserversink + :board: stm32n6570_dk + :shield: st_b_cams_imx_mb1854 + :goals: build + :compact: + Sample Output ============= @@ -71,6 +89,13 @@ Example with gstreamer: For video software generator, the default resolution should be width=320 and height=160. +When using compression support, use this GStreamer command line: + +.. code-block:: console + + gst-launch-1.0 tcpclientsrc host=192.0.2.1 port=5000 \ + ! queue ! decodebin ! queue ! fpsdisplaysink sync=false + References ********** diff --git a/samples/drivers/video/tcpserversink/boards/stm32n6570_dk.conf b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk.conf new file mode 100644 index 0000000000000..8bd71ecc50267 --- /dev/null +++ b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk.conf @@ -0,0 +1,24 @@ +# Video buffer pool +CONFIG_VIDEO_BUFFER_POOL_SZ_MAX=10000000 +CONFIG_VIDEO_BUFFER_POOL_NUM_MAX=10 + +# Camera interface +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_PIXEL_FORMAT="pRAA" +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_WIDTH=2592 +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_HEIGHT=1944 +CONFIG_FPU=y + +# Capture +CONFIG_VIDEO_FRAME_WIDTH=1920 +CONFIG_VIDEO_FRAME_HEIGHT=1080 +CONFIG_VIDEO_PIXEL_FORMAT="NV12" +CONFIG_VIDEO_CAPTURE_N_BUFFERING=2 + +# Video encoder +CONFIG_VIDEO_STM32_VENC=y +CONFIG_MAIN_STACK_SIZE=4096 +CONFIG_VIDEO_ENCODED_PIXEL_FORMAT="H264" + +# Network buffers +CONFIG_NET_BUF_RX_COUNT=4 +CONFIG_NET_BUF_TX_COUNT=8 diff --git a/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_fsbl.conf b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_fsbl.conf new file mode 100644 index 0000000000000..8bd71ecc50267 --- /dev/null +++ b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_fsbl.conf @@ -0,0 +1,24 @@ +# Video buffer pool +CONFIG_VIDEO_BUFFER_POOL_SZ_MAX=10000000 +CONFIG_VIDEO_BUFFER_POOL_NUM_MAX=10 + +# Camera interface +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_PIXEL_FORMAT="pRAA" +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_WIDTH=2592 +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_HEIGHT=1944 +CONFIG_FPU=y + +# Capture +CONFIG_VIDEO_FRAME_WIDTH=1920 +CONFIG_VIDEO_FRAME_HEIGHT=1080 +CONFIG_VIDEO_PIXEL_FORMAT="NV12" +CONFIG_VIDEO_CAPTURE_N_BUFFERING=2 + +# Video encoder +CONFIG_VIDEO_STM32_VENC=y +CONFIG_MAIN_STACK_SIZE=4096 +CONFIG_VIDEO_ENCODED_PIXEL_FORMAT="H264" + +# Network buffers +CONFIG_NET_BUF_RX_COUNT=4 +CONFIG_NET_BUF_TX_COUNT=8 diff --git a/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_fsbl.overlay b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_fsbl.overlay new file mode 100644 index 0000000000000..23d7b74de9fce --- /dev/null +++ b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_fsbl.overlay @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2025 STMicroelectronics. + * + * SPDX-License-Identifier: Apache-2.0 + */ + +/ { + chosen { + zephyr,videoenc = &venc; + }; +}; + +&venc { + status = "okay"; +}; diff --git a/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_sb.conf b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_sb.conf new file mode 100644 index 0000000000000..8bd71ecc50267 --- /dev/null +++ b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_sb.conf @@ -0,0 +1,24 @@ +# Video buffer pool +CONFIG_VIDEO_BUFFER_POOL_SZ_MAX=10000000 +CONFIG_VIDEO_BUFFER_POOL_NUM_MAX=10 + +# Camera interface +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_PIXEL_FORMAT="pRAA" +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_WIDTH=2592 +CONFIG_VIDEO_STM32_DCMIPP_SENSOR_HEIGHT=1944 +CONFIG_FPU=y + +# Capture +CONFIG_VIDEO_FRAME_WIDTH=1920 +CONFIG_VIDEO_FRAME_HEIGHT=1080 +CONFIG_VIDEO_PIXEL_FORMAT="NV12" +CONFIG_VIDEO_CAPTURE_N_BUFFERING=2 + +# Video encoder +CONFIG_VIDEO_STM32_VENC=y +CONFIG_MAIN_STACK_SIZE=4096 +CONFIG_VIDEO_ENCODED_PIXEL_FORMAT="H264" + +# Network buffers +CONFIG_NET_BUF_RX_COUNT=4 +CONFIG_NET_BUF_TX_COUNT=8 diff --git a/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_sb.overlay b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_sb.overlay new file mode 100644 index 0000000000000..23d7b74de9fce --- /dev/null +++ b/samples/drivers/video/tcpserversink/boards/stm32n6570_dk_stm32n657xx_sb.overlay @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2025 STMicroelectronics. + * + * SPDX-License-Identifier: Apache-2.0 + */ + +/ { + chosen { + zephyr,videoenc = &venc; + }; +}; + +&venc { + status = "okay"; +}; diff --git a/samples/drivers/video/tcpserversink/prj.conf b/samples/drivers/video/tcpserversink/prj.conf index a49aa3de90ff3..befb3e73fcbe7 100644 --- a/samples/drivers/video/tcpserversink/prj.conf +++ b/samples/drivers/video/tcpserversink/prj.conf @@ -37,6 +37,6 @@ CONFIG_NET_SHELL=y # Network application options and configuration CONFIG_NET_CONFIG_SETTINGS=y -CONFIG_NET_CONFIG_MY_IPV4_ADDR="192.0.2.1" +CONFIG_NET_CONFIG_MY_IPV4_ADDR="192.168.0.4" CONFIG_VIDEO=y diff --git a/samples/drivers/video/tcpserversink/sample.yaml b/samples/drivers/video/tcpserversink/sample.yaml index 9e9123b48c070..eb629027ead33 100644 --- a/samples/drivers/video/tcpserversink/sample.yaml +++ b/samples/drivers/video/tcpserversink/sample.yaml @@ -8,11 +8,15 @@ tests: - net - socket - shield - platform_allow: mimxrt1064_evk/mimxrt1064 + platform_allow: + - mimxrt1064_evk/mimxrt1064 + - stm32n6570_dk/stm32n657xx/sb depends_on: - video - netif integration_platforms: - mimxrt1064_evk/mimxrt1064 + - stm32n6570_dk/stm32n657xx/sb extra_args: - platform:mimxrt1064_evk/mimxrt1064:SHIELD=dvp_fpc24_mt9m114 + - platform:stm32n6570_dk/stm32n657xx/sb:SHIELD=st_b_cams_imx_mb1854 diff --git a/samples/drivers/video/tcpserversink/src/main.c b/samples/drivers/video/tcpserversink/src/main.c index 640a728f64eea..ad3e2b1c9e05d 100644 --- a/samples/drivers/video/tcpserversink/src/main.c +++ b/samples/drivers/video/tcpserversink/src/main.c @@ -1,11 +1,14 @@ /* * Copyright (c) 2019 Linaro Limited + * Copyright 2025 NXP + * Copyright (c) 2025 STMicroelectronics. * * SPDX-License-Identifier: Apache-2.0 */ #include #include +#include #include #include #include @@ -15,6 +18,9 @@ LOG_MODULE_REGISTER(main, CONFIG_LOG_DEFAULT_LEVEL); #define MY_PORT 5000 #define MAX_CLIENT_QUEUE 1 +/* Assuming that video encoder will at least compress to this ratio */ +#define ESTIMATED_COMPRESSION_RATIO 10 + static ssize_t sendall(int sock, const void *buf, size_t len) { while (len) { @@ -30,17 +36,165 @@ static ssize_t sendall(int sock, const void *buf, size_t len) return 0; } +#if DT_HAS_CHOSEN(zephyr_videoenc) +const struct device *encoder_dev = NULL; + +int configure_encoder() +{ + struct video_buffer *buffer; + struct video_format fmt; + struct video_caps caps; + uint32_t size; + int i = 0; + + encoder_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_videoenc)); + if (!device_is_ready(encoder_dev)) { + LOG_ERR("%s: encoder video device not ready.", + encoder_dev->name); + return -1; + } + + /* Get capabilities */ + caps.type = VIDEO_BUF_TYPE_OUTPUT; + if (video_get_caps(encoder_dev, &caps)) { + LOG_ERR("Unable to retrieve video capabilities"); + return -1; + } + + LOG_INF("- Capabilities:"); + while (caps.format_caps[i].pixelformat) { + const struct video_format_cap *fcap = &caps.format_caps[i]; + /* fourcc to string */ + LOG_INF(" %s width [%u; %u; %u] height [%u; %u; %u]", + VIDEO_FOURCC_TO_STR(fcap->pixelformat), + fcap->width_min, fcap->width_max, fcap->width_step, + fcap->height_min, fcap->height_max, fcap->height_step); + i++; + } + + /* Get default/native format */ + fmt.type = VIDEO_BUF_TYPE_OUTPUT; + if (video_get_format(encoder_dev, &fmt)) { + LOG_ERR("Unable to retrieve video format"); + return -1; + } + + printk("Video encoder device detected, format: %s %ux%u\n", + VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height); + +#if CONFIG_VIDEO_FRAME_HEIGHT + fmt.height = CONFIG_VIDEO_FRAME_HEIGHT; +#endif + +#if CONFIG_VIDEO_FRAME_WIDTH + fmt.width = CONFIG_VIDEO_FRAME_WIDTH; +#endif + + /* Set output format */ + if (strcmp(CONFIG_VIDEO_ENCODED_PIXEL_FORMAT, "")) { + fmt.pixelformat = VIDEO_FOURCC_FROM_STR(CONFIG_VIDEO_ENCODED_PIXEL_FORMAT); + } + + LOG_INF("- Video encoded format: %s %ux%u", + VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height); + + fmt.type = VIDEO_BUF_TYPE_OUTPUT; + if (video_set_format(encoder_dev, &fmt)) { + LOG_ERR("Unable to set format"); + return -1; + } + + /* Alloc output buffer */ + size = fmt.sizeimage; + if (size == 0) { + LOG_ERR("Encoder driver must set sizeimage"); + return -1; + } + buffer = video_buffer_aligned_alloc(size, CONFIG_VIDEO_BUFFER_POOL_ALIGN, + K_FOREVER); + if (buffer == NULL) { + LOG_ERR("Unable to alloc compressed video buffer size=%d", size); + return -1; + } + buffer->type = VIDEO_BUF_TYPE_OUTPUT; + video_enqueue(encoder_dev, buffer); + + /* Set input format */ + if (strcmp(CONFIG_VIDEO_PIXEL_FORMAT, "")) { + fmt.pixelformat = VIDEO_FOURCC_FROM_STR(CONFIG_VIDEO_PIXEL_FORMAT); + } + + LOG_INF("- Video input format: %s %ux%u", + VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height); + + fmt.type = VIDEO_BUF_TYPE_INPUT; + if (video_set_format(encoder_dev, &fmt)) { + LOG_ERR("Unable to set input format"); + return 0; + } + + /* Start video encoder */ + if (video_stream_start(encoder_dev, VIDEO_BUF_TYPE_INPUT)) { + LOG_ERR("Unable to start video encoder (input)"); + return -1; + } + if (video_stream_start(encoder_dev, VIDEO_BUF_TYPE_OUTPUT)) { + LOG_ERR("Unable to start video encoder (output)"); + return -1; + } + + return 0; +} + +int encode_frame(struct video_buffer *in, struct video_buffer **out) +{ + int ret; + + in->type = VIDEO_BUF_TYPE_INPUT; + video_enqueue(encoder_dev, in); + + (*out)->type = VIDEO_BUF_TYPE_OUTPUT; + ret = video_dequeue(encoder_dev, out, K_FOREVER); + if (ret) { + LOG_ERR("Unable to dequeue encoder buf"); + return ret; + } + + return 0; +} + +void stop_encoder(void) +{ + if (video_stream_stop(encoder_dev, VIDEO_BUF_TYPE_OUTPUT)) + LOG_ERR("Unable to stop encoder"); +} +#endif + int main(void) { struct sockaddr_in addr, client_addr; socklen_t client_addr_len = sizeof(client_addr); - struct video_buffer *buffers[2]; + struct video_buffer *buffers[CONFIG_VIDEO_CAPTURE_N_BUFFERING]; struct video_buffer *vbuf = &(struct video_buffer){}; - int i, ret, sock, client; +#if DT_HAS_CHOSEN(zephyr_videoenc) + struct video_buffer *vbuf_out = &(struct video_buffer){}; +#endif + int ret, sock, client; struct video_format fmt; struct video_caps caps; + struct video_frmival frmival; + struct video_frmival_enum fie; enum video_buf_type type = VIDEO_BUF_TYPE_OUTPUT; const struct device *video_dev; +#if (CONFIG_VIDEO_SOURCE_CROP_WIDTH && CONFIG_VIDEO_SOURCE_CROP_HEIGHT) || \ + CONFIG_VIDEO_FRAME_HEIGHT || CONFIG_VIDEO_FRAME_WIDTH + struct video_selection sel = { + .type = VIDEO_BUF_TYPE_OUTPUT, + }; +#endif + size_t bsize; + int i = 0; + int err; video_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_camera)); if (!device_is_ready(video_dev)) { @@ -80,6 +234,17 @@ int main(void) return 0; } + LOG_INF("- Capabilities:"); + while (caps.format_caps[i].pixelformat) { + const struct video_format_cap *fcap = &caps.format_caps[i]; + /* fourcc to string */ + LOG_INF(" %s width [%u; %u; %u] height [%u; %u; %u]", + VIDEO_FOURCC_TO_STR(fcap->pixelformat), + fcap->width_min, fcap->width_max, fcap->width_step, + fcap->height_min, fcap->height_max, fcap->height_step); + i++; + } + /* Get default/native format */ fmt.type = type; if (video_get_format(video_dev, &fmt)) { @@ -95,9 +260,139 @@ int main(void) return 0; } + /* Set the crop setting if necessary */ +#if CONFIG_VIDEO_SOURCE_CROP_WIDTH && CONFIG_VIDEO_SOURCE_CROP_HEIGHT + sel.target = VIDEO_SEL_TGT_CROP; + sel.rect.left = CONFIG_VIDEO_SOURCE_CROP_LEFT; + sel.rect.top = CONFIG_VIDEO_SOURCE_CROP_TOP; + sel.rect.width = CONFIG_VIDEO_SOURCE_CROP_WIDTH; + sel.rect.height = CONFIG_VIDEO_SOURCE_CROP_HEIGHT; + if (video_set_selection(video_dev, &sel)) { + LOG_ERR("Unable to set selection crop"); + return 0; + } + LOG_INF("Selection crop set to (%u,%u)/%ux%u", + sel.rect.left, sel.rect.top, sel.rect.width, sel.rect.height); +#endif + +#if CONFIG_VIDEO_FRAME_HEIGHT || CONFIG_VIDEO_FRAME_WIDTH +#if CONFIG_VIDEO_FRAME_HEIGHT + fmt.height = CONFIG_VIDEO_FRAME_HEIGHT; +#endif + +#if CONFIG_VIDEO_FRAME_WIDTH + fmt.width = CONFIG_VIDEO_FRAME_WIDTH; +#endif + + /* + * Check (if possible) if targeted size is same as crop + * and if compose is necessary + */ + sel.target = VIDEO_SEL_TGT_CROP; + err = video_get_selection(video_dev, &sel); + if (err < 0 && err != -ENOSYS) { + LOG_ERR("Unable to get selection crop"); + return 0; + } + + if (err == 0 && (sel.rect.width != fmt.width || sel.rect.height != fmt.height)) { + sel.target = VIDEO_SEL_TGT_COMPOSE; + sel.rect.left = 0; + sel.rect.top = 0; + sel.rect.width = fmt.width; + sel.rect.height = fmt.height; + err = video_set_selection(video_dev, &sel); + if (err < 0 && err != -ENOSYS) { + LOG_ERR("Unable to set selection compose"); + return 0; + } + } +#endif + + if (strcmp(CONFIG_VIDEO_PIXEL_FORMAT, "")) { + fmt.pixelformat = VIDEO_FOURCC_FROM_STR(CONFIG_VIDEO_PIXEL_FORMAT); + } + + LOG_INF("- Video format: %s %ux%u", + VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height); + + if (video_set_format(video_dev, &fmt)) { + LOG_ERR("Unable to set format"); + return 0; + } + + if (!video_get_frmival(video_dev, &frmival)) { + LOG_INF("- Default frame rate : %f fps", + 1.0 * frmival.denominator / frmival.numerator); + } + + LOG_INF("- Supported frame intervals for the default format:"); + memset(&fie, 0, sizeof(fie)); + fie.format = &fmt; + while (video_enum_frmival(video_dev, &fie) == 0) { + if (fie.type == VIDEO_FRMIVAL_TYPE_DISCRETE) { + LOG_INF(" %u/%u", fie.discrete.numerator, fie.discrete.denominator); + } else { + LOG_INF(" [min = %u/%u; max = %u/%u; step = %u/%u]", + fie.stepwise.min.numerator, fie.stepwise.min.denominator, + fie.stepwise.max.numerator, fie.stepwise.max.denominator, + fie.stepwise.step.numerator, fie.stepwise.step.denominator); + } + fie.index++; + } + + /* Get supported controls */ + LOG_INF("- Supported controls:"); + const struct device *last_dev = NULL; + struct video_ctrl_query cq = {.dev = video_dev, .id = VIDEO_CTRL_FLAG_NEXT_CTRL}; + + while (!video_query_ctrl(&cq)) { + if (cq.dev != last_dev) { + last_dev = cq.dev; + LOG_INF("\t\tdevice: %s", cq.dev->name); + } + video_print_ctrl(&cq); + cq.id |= VIDEO_CTRL_FLAG_NEXT_CTRL; + } + + /* Set controls */ + struct video_control ctrl = {.id = VIDEO_CID_HFLIP, .val = 1}; + int tp_set_ret = -ENOTSUP; + + if (IS_ENABLED(CONFIG_VIDEO_CTRL_HFLIP)) { + video_set_ctrl(video_dev, &ctrl); + } + + if (IS_ENABLED(CONFIG_VIDEO_CTRL_VFLIP)) { + ctrl.id = VIDEO_CID_VFLIP; + video_set_ctrl(video_dev, &ctrl); + } + + if (IS_ENABLED(CONFIG_TEST)) { + ctrl.id = VIDEO_CID_TEST_PATTERN; + tp_set_ret = video_set_ctrl(video_dev, &ctrl); + } + + /* Size to allocate for each buffer */ + if (caps.min_line_count == LINE_COUNT_HEIGHT) { + if (fmt.pixelformat == VIDEO_PIX_FMT_NV12) { + bsize = fmt.width * fmt.height * + video_bits_per_pixel(fmt.pixelformat) / BITS_PER_BYTE; + } else { + bsize = fmt.pitch * fmt.height; + } + } else { + bsize = fmt.pitch * caps.min_line_count; + } + /* Alloc Buffers */ for (i = 0; i < ARRAY_SIZE(buffers); i++) { - buffers[i] = video_buffer_alloc(fmt.pitch * fmt.height, K_FOREVER); + /* + * For some hardwares, such as the PxP used on i.MX RT1170 to do image rotation, + * buffer alignment is needed in order to achieve the best performance + */ + buffers[i] = video_buffer_aligned_alloc(bsize, CONFIG_VIDEO_BUFFER_POOL_ALIGN, + K_FOREVER); if (buffers[i] == NULL) { LOG_ERR("Unable to alloc video buffer"); return 0; @@ -117,6 +412,13 @@ int main(void) printk("TCP: Accepted connection\n"); +#if DT_HAS_CHOSEN(zephyr_videoenc) + if (configure_encoder()) { + LOG_ERR("Unable to configure video encoder"); + return 0; + } +#endif + /* Enqueue Buffers */ for (i = 0; i < ARRAY_SIZE(buffers); i++) { video_enqueue(video_dev, buffers[i]); @@ -140,16 +442,27 @@ int main(void) return 0; } - printk("\rSending frame %d\n", i++); +#if DT_HAS_CHOSEN(zephyr_videoenc) + encode_frame(vbuf, &vbuf_out); + printk("\rSending compressed frame %d (size=%d bytes)\n", i++, vbuf_out->bytesused); + /* Send compressed video buffer to TCP client */ + ret = sendall(client, vbuf_out->buffer, vbuf_out->bytesused); + + vbuf_out->type = VIDEO_BUF_TYPE_OUTPUT; + video_enqueue(encoder_dev, vbuf_out); +#else + printk("\rSending frame %d\n", i++); /* Send video buffer to TCP client */ ret = sendall(client, vbuf->buffer, vbuf->bytesused); +#endif if (ret && ret != -EAGAIN) { /* client disconnected */ printk("\nTCP: Client disconnected %d\n", ret); close(client); } + vbuf->type = VIDEO_BUF_TYPE_INPUT; (void)video_enqueue(video_dev, vbuf); } while (!ret); @@ -159,8 +472,13 @@ int main(void) return 0; } +#if DT_HAS_CHOSEN(zephyr_videoenc) + stop_encoder(); +#endif + /* Flush remaining buffers */ do { + vbuf->type = VIDEO_BUF_TYPE_INPUT; ret = video_dequeue(video_dev, &vbuf, K_NO_WAIT); } while (!ret); diff --git a/tests/drivers/build_all/video/testcase.yaml b/tests/drivers/build_all/video/testcase.yaml index 30430a791ee11..dc8300b6e37a1 100644 --- a/tests/drivers/build_all/video/testcase.yaml +++ b/tests/drivers/build_all/video/testcase.yaml @@ -41,3 +41,6 @@ tests: - ek_ra8d1/r7fa8d1bhecbd extra_args: - platform:ek_ra81/r7fa8d1bhecbd:SHIELD="dvp_20pin_ov7670" + drivers.video.stm32_venc.build: + platform_allow: + - stm32n6570_dk/stm32n657xx/sb diff --git a/west.yml b/west.yml index 319084e4118ce..01a70fd787f71 100644 --- a/west.yml +++ b/west.yml @@ -250,7 +250,7 @@ manifest: groups: - hal - name: hal_stm32 - revision: dc7c2543a079c57a4e7dbee54ed7877880dc235b + revision: pull/295/head path: modules/hal/stm32 groups: - hal