diff --git a/doc/releases/migration-guide-4.3.rst b/doc/releases/migration-guide-4.3.rst index 72eb4db90327c..1d900e6926e08 100644 --- a/doc/releases/migration-guide-4.3.rst +++ b/doc/releases/migration-guide-4.3.rst @@ -47,6 +47,12 @@ Stepper * :dtcompatible:`zephyr,gpio-stepper` has been replaced by :dtcompatible:`zephyr,h-bridge-stepper`. +USB +=== + +* The USB Video Class was configuring the framerate and format of the source video device. + This is now to be done by the application after the host selected the format (:github:`93192`). + .. zephyr-keep-sorted-stop Bluetooth diff --git a/doc/releases/release-notes-4.3.rst b/doc/releases/release-notes-4.3.rst index 1ec3f4af22093..13d99b5e16c62 100644 --- a/doc/releases/release-notes-4.3.rst +++ b/doc/releases/release-notes-4.3.rst @@ -123,6 +123,12 @@ New APIs and options * :kconfig:option:`CONFIG_SETTINGS_TFM_ITS` +* USB + + * Video + + * :c:func:`uvc_add_format` + .. zephyr-keep-sorted-stop New Boards diff --git a/drivers/video/video_common.c b/drivers/video/video_common.c index c77b3e704f06b..d5c1693906c81 100644 --- a/drivers/video/video_common.c +++ b/drivers/video/video_common.c @@ -443,3 +443,24 @@ int64_t video_get_csi_link_freq(const struct device *dev, uint8_t bpp, uint8_t l /* CSI D-PHY is using a DDR data bus so bitrate is twice the frequency */ return ctrl.val64 * bpp / (2 * lane_nb); } + +int video_set_compose_format(const struct device *dev, struct video_format *fmt) +{ + struct video_selection sel = { + .type = fmt->type, + .target = VIDEO_SEL_TGT_COMPOSE, + .rect.left = 0, + .rect.top = 0, + .rect.width = fmt->width, + .rect.height = fmt->height, + }; + int ret; + + ret = video_set_selection(dev, &sel); + if (ret < 0 && ret != -ENOSYS) { + LOG_ERR("Unable to set selection compose"); + return ret; + } + + return video_set_format(dev, fmt); +} diff --git a/drivers/video/video_stm32_dcmipp.c b/drivers/video/video_stm32_dcmipp.c index a7c182a89e22d..cc80ddc977b28 100644 --- a/drivers/video/video_stm32_dcmipp.c +++ b/drivers/video/video_stm32_dcmipp.c @@ -34,6 +34,7 @@ #define STM32_DCMIPP_HAS_PIXEL_PIPES #endif +#if defined(STM32_DCMIPP_HAS_PIXEL_PIPES) /* Weak function declaration in order to interface with external ISP handler */ void __weak stm32_dcmipp_isp_vsync_update(DCMIPP_HandleTypeDef *hdcmipp, uint32_t Pipe) { @@ -53,6 +54,7 @@ int __weak stm32_dcmipp_isp_stop(void) { return 0; } +#endif LOG_MODULE_REGISTER(stm32_dcmipp, CONFIG_VIDEO_LOG_LEVEL); @@ -132,6 +134,49 @@ struct stm32_dcmipp_config { #define STM32_DCMIPP_WIDTH_MAX 4094 #define STM32_DCMIPP_HEIGHT_MAX 4094 +static void stm32_dcmipp_set_next_buffer_addr(struct stm32_dcmipp_pipe_data *pipe) +{ + struct stm32_dcmipp_data *dcmipp = pipe->dcmipp; +#if defined(STM32_DCMIPP_HAS_PIXEL_PIPES) + struct video_format *fmt = &pipe->fmt; +#endif + uint8_t *plane = pipe->next->buffer; + + /* TODO - the HAL is missing a SetMemoryAddress for auxiliary addresses */ + /* Update main buffer address */ + if (pipe->id == DCMIPP_PIPE0) { + WRITE_REG(dcmipp->hdcmipp.Instance->P0PPM0AR1, (uint32_t)plane); + } +#if defined(STM32_DCMIPP_HAS_PIXEL_PIPES) + else if (pipe->id == DCMIPP_PIPE1) { + WRITE_REG(dcmipp->hdcmipp.Instance->P1PPM0AR1, (uint32_t)plane); + } else { + WRITE_REG(dcmipp->hdcmipp.Instance->P2PPM0AR1, (uint32_t)plane); + } + + if (pipe->id != DCMIPP_PIPE1) { + return; + } + + if (fmt->pixelformat == VIDEO_PIX_FMT_NV12 || fmt->pixelformat == VIDEO_PIX_FMT_NV21 || + fmt->pixelformat == VIDEO_PIX_FMT_NV16 || fmt->pixelformat == VIDEO_PIX_FMT_NV61 || + fmt->pixelformat == VIDEO_PIX_FMT_YUV420 || fmt->pixelformat == VIDEO_PIX_FMT_YVU420) { + /* Y plane has 8 bit per pixel, next plane is located at off + width * height */ + plane += (fmt->width * fmt->height); + + WRITE_REG(dcmipp->hdcmipp.Instance->P1PPM1AR1, (uint32_t)plane); + + if (fmt->pixelformat == VIDEO_PIX_FMT_YUV420 || + fmt->pixelformat == VIDEO_PIX_FMT_YVU420) { + /* In case of YUV420 / YVU420, U plane has half width / half height */ + plane += (fmt->width * fmt->height) / 4; + + WRITE_REG(dcmipp->hdcmipp.Instance->P1PPM2AR1, (uint32_t)plane); + } + } +#endif +} + /* Callback getting called for each frame written into memory */ void HAL_DCMIPP_PIPE_FrameEventCallback(DCMIPP_HandleTypeDef *hdcmipp, uint32_t Pipe) { @@ -169,13 +214,14 @@ void HAL_DCMIPP_PIPE_VsyncEventCallback(DCMIPP_HandleTypeDef *hdcmipp, uint32_t struct stm32_dcmipp_data *dcmipp = CONTAINER_OF(hdcmipp, struct stm32_dcmipp_data, hdcmipp); struct stm32_dcmipp_pipe_data *pipe = dcmipp->pipe[Pipe]; - int ret; +#if defined(STM32_DCMIPP_HAS_PIXEL_PIPES) /* * Let the external ISP handler know that a VSYNC happened a new statistics are * thus available */ stm32_dcmipp_isp_vsync_update(hdcmipp, Pipe); +#endif if (pipe->state != STM32_DCMIPP_RUNNING) { return; @@ -205,17 +251,8 @@ void HAL_DCMIPP_PIPE_VsyncEventCallback(DCMIPP_HandleTypeDef *hdcmipp, uint32_t return; } - /* - * TODO - we only support 1 buffer formats for the time being, setting of - * MEMORY_ADDRESS_1 and MEMORY_ADDRESS_2 required depending on the pixelformat - * for Pipe1 - */ - ret = HAL_DCMIPP_PIPE_SetMemoryAddress(&dcmipp->hdcmipp, Pipe, DCMIPP_MEMORY_ADDRESS_0, - (uint32_t)pipe->next->buffer); - if (ret != HAL_OK) { - LOG_ERR("Failed to update memory address"); - return; - } + /* Update buffer address */ + stm32_dcmipp_set_next_buffer_addr(pipe); } #if defined(STM32_DCMIPP_HAS_CSI) @@ -439,7 +476,13 @@ static const struct stm32_dcmipp_mapping { PIXEL_PIPE_FMT(ABGR32, ARGB8888, 0, (BIT(1) | BIT(2))), PIXEL_PIPE_FMT(RGBA32, ARGB8888, 1, (BIT(1) | BIT(2))), PIXEL_PIPE_FMT(BGRA32, RGBA888, 0, (BIT(1) | BIT(2))), - /* TODO - need to add the semiplanar & planar formats */ + /* Multi-planes are only available on Pipe main (1) */ + PIXEL_PIPE_FMT(NV12, YUV420_2, 0, BIT(1)), + PIXEL_PIPE_FMT(NV21, YUV420_2, 1, BIT(1)), + PIXEL_PIPE_FMT(NV16, YUV422_2, 0, BIT(1)), + PIXEL_PIPE_FMT(NV61, YUV422_2, 1, BIT(1)), + PIXEL_PIPE_FMT(YUV420, YUV420_3, 0, BIT(1)), + PIXEL_PIPE_FMT(YVU420, YUV420_3, 1, BIT(1)), #endif }; @@ -460,6 +503,9 @@ static const struct stm32_dcmipp_mapping { ((fmt) == VIDEO_PIX_FMT_GREY || \ (fmt) == VIDEO_PIX_FMT_YUYV || (fmt) == VIDEO_PIX_FMT_YVYU || \ (fmt) == VIDEO_PIX_FMT_VYUY || (fmt) == VIDEO_PIX_FMT_UYVY || \ + (fmt) == VIDEO_PIX_FMT_NV12 || (fmt) == VIDEO_PIX_FMT_NV21 || \ + (fmt) == VIDEO_PIX_FMT_NV16 || (fmt) == VIDEO_PIX_FMT_NV61 || \ + (fmt) == VIDEO_PIX_FMT_YUV420 || (fmt) == VIDEO_PIX_FMT_YVU420 || \ (fmt) == VIDEO_PIX_FMT_XYUV32) ? VIDEO_COLORSPACE_YUV : \ \ VIDEO_COLORSPACE_RAW) @@ -855,6 +901,98 @@ static int stm32_dcmipp_set_yuv_conversion(struct stm32_dcmipp_pipe_data *pipe, } #endif +static int stm32_dcmipp_start_pipeline(const struct device *dev, + struct stm32_dcmipp_pipe_data *pipe) +{ + const struct stm32_dcmipp_config *config = dev->config; + struct stm32_dcmipp_data *dcmipp = pipe->dcmipp; +#if defined(STM32_DCMIPP_HAS_PIXEL_PIPES) + struct video_format *fmt = &pipe->fmt; +#endif + int ret; + +#if defined(STM32_DCMIPP_HAS_PIXEL_PIPES) + if (fmt->pixelformat == VIDEO_PIX_FMT_YUV420 || fmt->pixelformat == VIDEO_PIX_FMT_YVU420) { + uint8_t *u_addr = pipe->next->buffer + fmt->width * fmt->height; + uint8_t *v_addr = u_addr + (fmt->width * fmt->height / 4); + DCMIPP_FullPlanarDstAddressTypeDef planar_addr = { + .YAddress = (uint32_t)pipe->next->buffer, + .UAddress = (uint32_t)u_addr, + .VAddress = (uint32_t)v_addr, + }; + + if (config->bus_type == VIDEO_BUS_TYPE_PARALLEL) { + ret = HAL_DCMIPP_PIPE_FullPlanarStart(&dcmipp->hdcmipp, pipe->id, + &planar_addr, DCMIPP_MODE_CONTINUOUS); + } +#if defined(STM32_DCMIPP_HAS_CSI) + else if (config->bus_type == VIDEO_BUS_TYPE_CSI2_DPHY) { + ret = HAL_DCMIPP_CSI_PIPE_FullPlanarStart(&dcmipp->hdcmipp, pipe->id, + DCMIPP_VIRTUAL_CHANNEL0, + &planar_addr, + DCMIPP_MODE_CONTINUOUS); + } +#endif + else { + LOG_ERR("Invalid bus_type"); + ret = -EINVAL; + } + } else if (fmt->pixelformat == VIDEO_PIX_FMT_NV12 || + fmt->pixelformat == VIDEO_PIX_FMT_NV21 || + fmt->pixelformat == VIDEO_PIX_FMT_NV16 || + fmt->pixelformat == VIDEO_PIX_FMT_NV61) { + uint8_t *uv_addr = pipe->next->buffer + fmt->width * fmt->height; + DCMIPP_SemiPlanarDstAddressTypeDef semiplanar_addr = { + .YAddress = (uint32_t)pipe->next->buffer, + .UVAddress = (uint32_t)uv_addr, + }; + + if (config->bus_type == VIDEO_BUS_TYPE_PARALLEL) { + ret = HAL_DCMIPP_PIPE_SemiPlanarStart(&dcmipp->hdcmipp, pipe->id, + &semiplanar_addr, + DCMIPP_MODE_CONTINUOUS); + } +#if defined(STM32_DCMIPP_HAS_CSI) + else if (config->bus_type == VIDEO_BUS_TYPE_CSI2_DPHY) { + ret = HAL_DCMIPP_CSI_PIPE_SemiPlanarStart(&dcmipp->hdcmipp, pipe->id, + DCMIPP_VIRTUAL_CHANNEL0, + &semiplanar_addr, + DCMIPP_MODE_CONTINUOUS); + } +#endif + else { + LOG_ERR("Invalid bus_type"); + ret = -EINVAL; + } + } else { +#endif + if (config->bus_type == VIDEO_BUS_TYPE_PARALLEL) { + ret = HAL_DCMIPP_PIPE_Start(&dcmipp->hdcmipp, pipe->id, + (uint32_t)pipe->next->buffer, + DCMIPP_MODE_CONTINUOUS); + } +#if defined(STM32_DCMIPP_HAS_CSI) + else if (config->bus_type == VIDEO_BUS_TYPE_CSI2_DPHY) { + ret = HAL_DCMIPP_CSI_PIPE_Start(&dcmipp->hdcmipp, pipe->id, + DCMIPP_VIRTUAL_CHANNEL0, + (uint32_t)pipe->next->buffer, + DCMIPP_MODE_CONTINUOUS); + } +#endif + else { + LOG_ERR("Invalid bus_type"); + ret = -EINVAL; + } +#if defined(STM32_DCMIPP_HAS_PIXEL_PIPES) + } +#endif + if (ret != HAL_OK) { + return -EIO; + } + + return 0; +} + static int stm32_dcmipp_stream_enable(const struct device *dev) { struct stm32_dcmipp_pipe_data *pipe = dev->data; @@ -942,7 +1080,16 @@ static int stm32_dcmipp_stream_enable(const struct device *dev) pipe_cfg.FrameRate = DCMIPP_FRAME_RATE_ALL; #if defined(STM32_DCMIPP_HAS_PIXEL_PIPES) if (pipe->id == DCMIPP_PIPE1 || pipe->id == DCMIPP_PIPE2) { - pipe_cfg.PixelPipePitch = fmt->pitch; + if (fmt->pixelformat == VIDEO_PIX_FMT_NV12 || + fmt->pixelformat == VIDEO_PIX_FMT_NV21 || + fmt->pixelformat == VIDEO_PIX_FMT_NV16 || + fmt->pixelformat == VIDEO_PIX_FMT_NV61 || + fmt->pixelformat == VIDEO_PIX_FMT_YUV420 || + fmt->pixelformat == VIDEO_PIX_FMT_YVU420) { + pipe_cfg.PixelPipePitch = fmt->width; + } else { + pipe_cfg.PixelPipePitch = fmt->pitch; + } pipe_cfg.PixelPackerFormat = mapping->pixels.dcmipp_format; } #endif @@ -1029,34 +1176,18 @@ static int stm32_dcmipp_stream_enable(const struct device *dev) goto out; } } -#endif /* Initialize the external ISP handling stack */ ret = stm32_dcmipp_isp_init(&dcmipp->hdcmipp, config->source_dev); if (ret < 0) { goto out; } +#endif /* Enable the DCMIPP Pipeline */ - if (config->bus_type == VIDEO_BUS_TYPE_PARALLEL) { - ret = HAL_DCMIPP_PIPE_Start(&dcmipp->hdcmipp, pipe->id, - (uint32_t)pipe->next->buffer, DCMIPP_MODE_CONTINUOUS); - } -#if defined(STM32_DCMIPP_HAS_CSI) - else if (config->bus_type == VIDEO_BUS_TYPE_CSI2_DPHY) { - ret = HAL_DCMIPP_CSI_PIPE_Start(&dcmipp->hdcmipp, pipe->id, DCMIPP_VIRTUAL_CHANNEL0, - (uint32_t)pipe->next->buffer, - DCMIPP_MODE_CONTINUOUS); - } -#endif - else { - LOG_ERR("Invalid bus_type"); - ret = -EINVAL; - goto out; - } - if (ret != HAL_OK) { + ret = stm32_dcmipp_start_pipeline(dev, pipe); + if (ret < 0) { LOG_ERR("Failed to start the pipeline"); - ret = -EIO; goto out; } @@ -1082,11 +1213,13 @@ static int stm32_dcmipp_stream_enable(const struct device *dev) } } +#if defined(STM32_DCMIPP_HAS_PIXEL_PIPES) /* Start the external ISP handling */ ret = stm32_dcmipp_isp_start(); if (ret < 0) { goto out; } +#endif pipe->state = STM32_DCMIPP_RUNNING; pipe->is_streaming = true; @@ -1112,11 +1245,13 @@ static int stm32_dcmipp_stream_disable(const struct device *dev) goto out; } +#if defined(STM32_DCMIPP_HAS_PIXEL_PIPES) /* Stop the external ISP handling */ ret = stm32_dcmipp_isp_stop(); if (ret < 0) { goto out; } +#endif /* Disable the DCMIPP Pipeline */ if (config->bus_type == VIDEO_BUS_TYPE_PARALLEL) { @@ -1175,7 +1310,6 @@ static int stm32_dcmipp_enqueue(const struct device *dev, struct video_buffer *v { struct stm32_dcmipp_pipe_data *pipe = dev->data; struct stm32_dcmipp_data *dcmipp = pipe->dcmipp; - int ret; k_mutex_lock(&pipe->lock, K_FOREVER); @@ -1186,13 +1320,7 @@ static int stm32_dcmipp_enqueue(const struct device *dev, struct video_buffer *v if (pipe->state == STM32_DCMIPP_WAIT_FOR_BUFFER) { LOG_DBG("Restart CPTREQ after wait for buffer"); pipe->next = vbuf; - ret = HAL_DCMIPP_PIPE_SetMemoryAddress(&dcmipp->hdcmipp, pipe->id, - DCMIPP_MEMORY_ADDRESS_0, - (uint32_t)pipe->next->buffer); - if (ret != HAL_OK) { - LOG_ERR("Failed to update memory address"); - return -EIO; - } + stm32_dcmipp_set_next_buffer_addr(pipe); if (pipe->id == DCMIPP_PIPE0) { SET_BIT(dcmipp->hdcmipp.Instance->P0FCTCR, DCMIPP_P0FCTCR_CPTREQ); } @@ -1226,25 +1354,89 @@ static int stm32_dcmipp_dequeue(const struct device *dev, struct video_buffer ** return 0; } -/* - * TODO: caps aren't yet handled hence give back straight the caps given by the - * source. Normally this should be the intersection of what the source produces - * vs what the DCMIPP can input (for pipe0) and, for pipe 1 and 2, for a given - * input format, generate caps based on capabilities, color conversion, decimation - * etc - */ +#define DCMIPP_CEIL_DIV(a, b) (((a) + (b) - 1) / (b)) +#define DCMIPP_VIDEO_FORMAT_CAP(format) \ + { \ + .pixelformat = VIDEO_PIX_FMT_##format, \ + .width_min = DCMIPP_CEIL_DIV(CONFIG_VIDEO_STM32_DCMIPP_SENSOR_WIDTH, \ + STM32_DCMIPP_MAX_PIPE_SCALE_FACTOR), \ + .width_max = CONFIG_VIDEO_STM32_DCMIPP_SENSOR_WIDTH, \ + .height_min = DCMIPP_CEIL_DIV(CONFIG_VIDEO_STM32_DCMIPP_SENSOR_HEIGHT, \ + STM32_DCMIPP_MAX_PIPE_SCALE_FACTOR), \ + .height_max = CONFIG_VIDEO_STM32_DCMIPP_SENSOR_HEIGHT, \ + .width_step = 1, .height_step = 1, \ + } + +static const struct video_format_cap stm32_dcmipp_dump_fmt[] = { + { + .pixelformat = + VIDEO_FOURCC_FROM_STR(CONFIG_VIDEO_STM32_DCMIPP_SENSOR_PIXEL_FORMAT), + .width_min = CONFIG_VIDEO_STM32_DCMIPP_SENSOR_WIDTH, + .width_max = CONFIG_VIDEO_STM32_DCMIPP_SENSOR_WIDTH, + .height_min = CONFIG_VIDEO_STM32_DCMIPP_SENSOR_HEIGHT, + .height_max = CONFIG_VIDEO_STM32_DCMIPP_SENSOR_HEIGHT, + .width_step = 1, .height_step = 1, + }, + {0}, +}; + +static const struct video_format_cap stm32_dcmipp_main_fmts[] = { + DCMIPP_VIDEO_FORMAT_CAP(RGB565), + DCMIPP_VIDEO_FORMAT_CAP(YUYV), + DCMIPP_VIDEO_FORMAT_CAP(YVYU), + DCMIPP_VIDEO_FORMAT_CAP(GREY), + DCMIPP_VIDEO_FORMAT_CAP(RGB24), + DCMIPP_VIDEO_FORMAT_CAP(BGR24), + DCMIPP_VIDEO_FORMAT_CAP(ARGB32), + DCMIPP_VIDEO_FORMAT_CAP(ABGR32), + DCMIPP_VIDEO_FORMAT_CAP(RGBA32), + DCMIPP_VIDEO_FORMAT_CAP(BGRA32), + DCMIPP_VIDEO_FORMAT_CAP(NV12), + DCMIPP_VIDEO_FORMAT_CAP(NV21), + DCMIPP_VIDEO_FORMAT_CAP(NV16), + DCMIPP_VIDEO_FORMAT_CAP(NV61), + DCMIPP_VIDEO_FORMAT_CAP(YUV420), + DCMIPP_VIDEO_FORMAT_CAP(YVU420), + {0}, +}; + +static const struct video_format_cap stm32_dcmipp_aux_fmts[] = { + DCMIPP_VIDEO_FORMAT_CAP(RGB565), + DCMIPP_VIDEO_FORMAT_CAP(YUYV), + DCMIPP_VIDEO_FORMAT_CAP(YVYU), + DCMIPP_VIDEO_FORMAT_CAP(GREY), + DCMIPP_VIDEO_FORMAT_CAP(RGB24), + DCMIPP_VIDEO_FORMAT_CAP(BGR24), + DCMIPP_VIDEO_FORMAT_CAP(ARGB32), + DCMIPP_VIDEO_FORMAT_CAP(ABGR32), + DCMIPP_VIDEO_FORMAT_CAP(RGBA32), + DCMIPP_VIDEO_FORMAT_CAP(BGRA32), + {0}, +}; + static int stm32_dcmipp_get_caps(const struct device *dev, struct video_caps *caps) { - const struct stm32_dcmipp_config *config = dev->config; - int ret; + struct stm32_dcmipp_pipe_data *pipe = dev->data; - ret = video_get_caps(config->source_dev, caps); + switch (pipe->id) { + case DCMIPP_PIPE0: + caps->format_caps = stm32_dcmipp_dump_fmt; + break; + case DCMIPP_PIPE1: + caps->format_caps = stm32_dcmipp_main_fmts; + break; + case DCMIPP_PIPE2: + caps->format_caps = stm32_dcmipp_aux_fmts; + break; + default: + CODE_UNREACHABLE; + } caps->min_vbuf_count = 1; caps->min_line_count = LINE_COUNT_HEIGHT; caps->max_line_count = LINE_COUNT_HEIGHT; - return ret; + return 0; } static int stm32_dcmipp_get_frmival(const struct device *dev, struct video_frmival *frmival) diff --git a/include/zephyr/drivers/video.h b/include/zephyr/drivers/video.h index 0744219a76960..b0ef0339416de 100644 --- a/include/zephyr/drivers/video.h +++ b/include/zephyr/drivers/video.h @@ -988,6 +988,22 @@ void video_closest_frmival(const struct device *dev, struct video_frmival_enum * */ int64_t video_get_csi_link_freq(const struct device *dev, uint8_t bpp, uint8_t lane_nb); +/** + * @brief Set compose rectangle (if applicable) prior to setting format + * + * Some devices expose compose capabilities, allowing them to apply a transformation + * (downscale / upscale) to the frame. For those devices, it is necessary to set the + * compose rectangle before being able to apply the frame format (which must have the + * same width / height and the compose rectangle width / height. + * In order to allow non-compose aware application to be able to control such devices, + * introduce a helper which, if available, will apply the compose rectangle prior to + * setting the format. + * + * @param dev Video device to query. + * @param fmt Video format structure pointer + */ +int video_set_compose_format(const struct device *dev, struct video_format *fmt); + /** * @defgroup video_pixel_formats Video pixel formats * The '|' characters separate the pixels or logical blocks, and spaces separate the bytes. @@ -1551,6 +1567,196 @@ int64_t video_get_csi_link_freq(const struct device *dev, uint8_t bpp, uint8_t l */ #define VIDEO_PIX_FMT_XYUV32 VIDEO_FOURCC('X', 'Y', 'U', 'V') +/** + * Planar formats + */ +/** + * Chroma (U/V) are subsampled horizontaly and vertically + * + * @code{.unparsed} + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | ... + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | ... + * | ... | + * | Uuuuuuuu Vvvvvvvv | Uuuuuuuu Vvvvvvvv | ... + * | ... | + * @endcode + * + * Below diagram show how luma and chroma relate to each others + * Y0 Y1 Y2 Y3 ... + * Y6 Y7 Y8 Y9 ... + * ... + * + * U0/1/6/7 V0/1/6/7 U2/3/8/9 V2/3/8/9 ... + * ... + */ +#define VIDEO_PIX_FMT_NV12 VIDEO_FOURCC('N', 'V', '1', '2') + +/** + * Chroma (U/V) are subsampled horizontaly and vertically + * + * @code{.unparsed} + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | ... + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | ... + * | ... | + * | Vvvvvvvv Uuuuuuuu | Vvvvvvvv Uuuuuuuu | ... + * | ... | + * @endcode + * + * Below diagram show how luma and chroma relate to each others + * Y0 Y1 Y2 Y3 ... + * Y6 Y7 Y8 Y9 ... + * ... + * + * V0/1/6/7 U0/1/6/7 V2/3/8/9 U2/3/8/9 ... + * ... + */ +#define VIDEO_PIX_FMT_NV21 VIDEO_FOURCC('N', 'V', '2', '1') + +/** + * Chroma (U/V) are subsampled horizontaly + * + * @code{.unparsed} + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | ... + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | ... + * | ... | + * | Uuuuuuuu Vvvvvvvv | Uuuuuuuu Vvvvvvvv | ... + * | Uuuuuuuu Vvvvvvvv | Uuuuuuuu Vvvvvvvv | ... + * | ... | + * @endcode + * + * Below diagram show how luma and chroma relate to each others + * Y0 Y1 Y2 Y3 ... + * Y6 Y7 Y8 Y9 ... + * ... + * + * U0/1 V0/1 U2/3 V2/3 ... + * U6/7 V6/7 U8/9 V8/9 ... + * ... + */ +#define VIDEO_PIX_FMT_NV16 VIDEO_FOURCC('N', 'V', '1', '6') + +/** + * Chroma (U/V) are subsampled horizontaly + * + * @code{.unparsed} + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | ... + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | ... + * | ... | + * | Vvvvvvvv Uuuuuuuu | Vvvvvvvv Uuuuuuuu | ... + * | Vvvvvvvv Uuuuuuuu | Vvvvvvvv Uuuuuuuu | ... + * | ... | + * + * Below diagram show how luma and chroma relate to each others + * Y0 Y1 Y2 Y3 ... + * Y6 Y7 Y8 Y9 ... + * ... + * + * V0/1 U0/1 V2/3 U2/3 ... + * V6/7 U6/7 V8/9 U8/9 ... + * ... + * @endcode + */ + +#define VIDEO_PIX_FMT_NV61 VIDEO_FOURCC('N', 'V', '6', '1') + +/** + * Chroma (U/V) are not subsampled + * + * @code{.unparsed} + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | + * | ... | + * | Uuuuuuuu Vvvvvvvv | Uuuuuuuu Vvvvvvvv | Uuuuuuuu Vvvvvvvv | Uuuuuuuu Vvvvvvvv | + * | Uuuuuuuu Vvvvvvvv | Uuuuuuuu Vvvvvvvv | Uuuuuuuu Vvvvvvvv | Uuuuuuuu Vvvvvvvv | + * | ... | + * @endcode + * + * Below diagram show how luma and chroma relate to each others + * Y0 Y1 Y2 Y3 ... + * Y6 Y7 Y8 Y9 ... + * ... + * + * U0 V0 U1 V1 U2 V2 U3 V3 ... + * U6 V6 U7 V7 U8 V8 U9 V9 ... + * ... + */ +#define VIDEO_PIX_FMT_NV24 VIDEO_FOURCC('N', 'V', '2', '4') + +/** + * Chroma (U/V) are not subsampled + * + * @code{.unparsed} + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | + * | ... | + * | Vvvvvvvv Uuuuuuuu | Vvvvvvvv Uuuuuuuu | Vvvvvvvv Uuuuuuuu | Vvvvvvvv Uuuuuuuu | + * | Vvvvvvvv Uuuuuuuu | Vvvvvvvv Uuuuuuuu | Vvvvvvvv Uuuuuuuu | Vvvvvvvv Uuuuuuuu | + * | ... | + * @endcode + * + * Below diagram show how luma and chroma relate to each others + * Y0 Y1 Y2 Y3 ... + * Y6 Y7 Y8 Y9 ... + * ... + * + * V0 U0 V1 U1 V2 U2 V3 U3 ... + * V6 U6 V7 U7 V8 U8 V9 U9 ... + * ... + */ +#define VIDEO_PIX_FMT_NV42 VIDEO_FOURCC('N', 'V', '4', '2') + +/** + * Chroma (U/V) are subsampled horizontaly and vertically + * + * @code{.unparsed} + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | + * | ... | + * | Uuuuuuuu | Uuuuuuuu | + * | ... | + * | Vvvvvvvv | Vvvvvvvv | + * | ... | + * @endcode + * + * Below diagram show how luma and chroma relate to each others + * Y0 Y1 Y2 Y3 ... + * Y6 Y7 Y8 Y9 ... + * ... + * + * U0/1/6/7 U2/3/8/9 ... + * ... + * + * V0/1/6/7 V2/3/8/9 ... + * ... + */ +#define VIDEO_PIX_FMT_YUV420 VIDEO_FOURCC('Y', 'U', '1', '2') + +/** + * Chroma (U/V) are subsampled horizontaly and vertically + * + * @code{.unparsed} + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | + * | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | Yyyyyyyy | + * | ... | + * | Vvvvvvvv | Vvvvvvvv | + * | ... | + * | Uuuuuuuu | Uuuuuuuu | + * | ... | + * @endcode + * + * Below diagram show how luma and chroma relate to each others + * Y0 Y1 Y2 Y3 ... + * Y6 Y7 Y8 Y9 ... + * ... + * + * V0/1/6/7 V2/3/8/9 ... + * ... + * + * U0/1/6/7 U2/3/8/9 ... + * ... + */ +#define VIDEO_PIX_FMT_YVU420 VIDEO_FOURCC('Y', 'V', '1', '2') + /** * @} */ @@ -1597,6 +1803,10 @@ static inline unsigned int video_bits_per_pixel(uint32_t pixfmt) case VIDEO_PIX_FMT_SGRBG12P: case VIDEO_PIX_FMT_SRGGB12P: case VIDEO_PIX_FMT_Y12P: + case VIDEO_PIX_FMT_NV12: + case VIDEO_PIX_FMT_NV21: + case VIDEO_PIX_FMT_YUV420: + case VIDEO_PIX_FMT_YVU420: return 12; case VIDEO_PIX_FMT_SBGGR14P: case VIDEO_PIX_FMT_SGBRG14P: @@ -1629,9 +1839,13 @@ static inline unsigned int video_bits_per_pixel(uint32_t pixfmt) case VIDEO_PIX_FMT_Y12: case VIDEO_PIX_FMT_Y14: case VIDEO_PIX_FMT_Y16: + case VIDEO_PIX_FMT_NV16: + case VIDEO_PIX_FMT_NV61: return 16; case VIDEO_PIX_FMT_BGR24: case VIDEO_PIX_FMT_RGB24: + case VIDEO_PIX_FMT_NV24: + case VIDEO_PIX_FMT_NV42: return 24; case VIDEO_PIX_FMT_XRGB32: case VIDEO_PIX_FMT_XYUV32: diff --git a/include/zephyr/usb/class/usbd_uvc.h b/include/zephyr/usb/class/usbd_uvc.h index bd5562389206b..b99a3376d19c9 100644 --- a/include/zephyr/usb/class/usbd_uvc.h +++ b/include/zephyr/usb/class/usbd_uvc.h @@ -26,20 +26,33 @@ */ /** - * @brief Set the video device that a UVC instance will use. + * @brief Set the video device that a UVC instance will use for control requests. * - * It will query its supported controls, formats and frame rates, and use this information to - * generate USB descriptors sent to the host. - * - * At runtime, it will forward all USB controls from the host to this device. + * It will query its supported video controls and frame intervals and use this information to + * generate the USB descriptors presented to the host. At runtime, it will forward all USB controls + * from the host to this device. * * @note This function must be called before @ref usbd_enable. * * @param uvc_dev The UVC device - * @param video_dev The video device that this UVC instance controls + * @param video_dev The video device that this UVC instance send controls requests to */ void uvc_set_video_dev(const struct device *uvc_dev, const struct device *video_dev); +/** + * @brief Set the video format capabilities that a UVC instance will present to the host. + * + * This information will be used to generate USB descriptors. + * The particular format selected by the host can be queried with @ref video_get_format. + * + * @note This function must be called before @ref usbd_enable and before @ref uvc_set_video_dev. + * + * @param uvc_dev The UVC device to configure + * @param fmt The video format to add to this UVC instance + * @return 0 on success, negative value on error + */ +int uvc_add_format(const struct device *const uvc_dev, const struct video_format *const fmt); + /** * @} */ diff --git a/samples/drivers/video/capture/src/main.c b/samples/drivers/video/capture/src/main.c index bfb8e051f2993..1d355baf82abb 100644 --- a/samples/drivers/video/capture/src/main.c +++ b/samples/drivers/video/capture/src/main.c @@ -98,10 +98,14 @@ int main(void) struct video_frmival frmival; struct video_frmival_enum fie; enum video_buf_type type = VIDEO_BUF_TYPE_OUTPUT; -#if (CONFIG_VIDEO_SOURCE_CROP_WIDTH && CONFIG_VIDEO_SOURCE_CROP_HEIGHT) || \ - CONFIG_VIDEO_FRAME_HEIGHT || CONFIG_VIDEO_FRAME_WIDTH - struct video_selection sel = { +#if (CONFIG_VIDEO_SOURCE_CROP_WIDTH && CONFIG_VIDEO_SOURCE_CROP_HEIGHT) + struct video_selection crop_sel = { .type = VIDEO_BUF_TYPE_OUTPUT, + .target = VIDEO_SEL_TGT_CROP; + .rect.left = CONFIG_VIDEO_SOURCE_CROP_LEFT; + .rect.top = CONFIG_VIDEO_SOURCE_CROP_TOP; + .rect.width = CONFIG_VIDEO_SOURCE_CROP_WIDTH; + .rect.height = CONFIG_VIDEO_SOURCE_CROP_HEIGHT; }; #endif unsigned int frame = 0; @@ -150,12 +154,7 @@ int main(void) /* Set the crop setting if necessary */ #if CONFIG_VIDEO_SOURCE_CROP_WIDTH && CONFIG_VIDEO_SOURCE_CROP_HEIGHT - sel.target = VIDEO_SEL_TGT_CROP; - sel.rect.left = CONFIG_VIDEO_SOURCE_CROP_LEFT; - sel.rect.top = CONFIG_VIDEO_SOURCE_CROP_TOP; - sel.rect.width = CONFIG_VIDEO_SOURCE_CROP_WIDTH; - sel.rect.height = CONFIG_VIDEO_SOURCE_CROP_HEIGHT; - if (video_set_selection(video_dev, &sel)) { + if (video_set_selection(video_dev, &crop_sel)) { LOG_ERR("Unable to set selection crop"); return 0; } @@ -163,7 +162,6 @@ int main(void) sel.rect.left, sel.rect.top, sel.rect.width, sel.rect.height); #endif -#if CONFIG_VIDEO_FRAME_HEIGHT || CONFIG_VIDEO_FRAME_WIDTH #if CONFIG_VIDEO_FRAME_HEIGHT fmt.height = CONFIG_VIDEO_FRAME_HEIGHT; #endif @@ -172,31 +170,6 @@ int main(void) fmt.width = CONFIG_VIDEO_FRAME_WIDTH; #endif - /* - * Check (if possible) if targeted size is same as crop - * and if compose is necessary - */ - sel.target = VIDEO_SEL_TGT_CROP; - err = video_get_selection(video_dev, &sel); - if (err < 0 && err != -ENOSYS) { - LOG_ERR("Unable to get selection crop"); - return 0; - } - - if (err == 0 && (sel.rect.width != fmt.width || sel.rect.height != fmt.height)) { - sel.target = VIDEO_SEL_TGT_COMPOSE; - sel.rect.left = 0; - sel.rect.top = 0; - sel.rect.width = fmt.width; - sel.rect.height = fmt.height; - err = video_set_selection(video_dev, &sel); - if (err < 0 && err != -ENOSYS) { - LOG_ERR("Unable to set selection compose"); - return 0; - } - } -#endif - if (strcmp(CONFIG_VIDEO_PIXEL_FORMAT, "")) { fmt.pixelformat = VIDEO_FOURCC_FROM_STR(CONFIG_VIDEO_PIXEL_FORMAT); } @@ -204,7 +177,7 @@ int main(void) LOG_INF("- Video format: %s %ux%u", VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height); - if (video_set_format(video_dev, &fmt)) { + if (video_set_compose_format(video_dev, &fmt)) { LOG_ERR("Unable to set format"); return 0; } diff --git a/samples/subsys/usb/uvc/Kconfig b/samples/subsys/usb/uvc/Kconfig index d1b0c2bec39c3..76ee42eceee14 100644 --- a/samples/subsys/usb/uvc/Kconfig +++ b/samples/subsys/usb/uvc/Kconfig @@ -6,4 +6,16 @@ # tree, you cannot use them in your own application. source "samples/subsys/usb/common/Kconfig.sample_usbd" +menu "UVC specific configuration" + +config VIDEO_MAX_RANGE_RESOLUTIONS + int "Maximum number of intermediate resolutions" + default 5 + help + Control the maximum number of resolution that will be advertised + to the USB client in case of the video capture supports a range + of resolutions. + +endmenu + source "Kconfig.zephyr" diff --git a/samples/subsys/usb/uvc/src/main.c b/samples/subsys/usb/uvc/src/main.c index e2520556ba10e..f69d22387bf8c 100644 --- a/samples/subsys/usb/uvc/src/main.c +++ b/samples/subsys/usb/uvc/src/main.c @@ -17,15 +17,137 @@ LOG_MODULE_REGISTER(uvc_sample, LOG_LEVEL_INF); -const struct device *const uvc_dev = DEVICE_DT_GET(DT_NODELABEL(uvc)); -const struct device *const video_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_camera)); +const static struct device *const uvc_dev = DEVICE_DT_GET(DT_NODELABEL(uvc)); +const static struct device *const video_dev = DEVICE_DT_GET(DT_CHOSEN(zephyr_camera)); + +/* Format capabilities of video_dev, usd everywhere through the sampel */ +static struct video_caps video_caps = {.type = VIDEO_BUF_TYPE_OUTPUT}; + +static size_t app_get_min_buf_size(const struct video_format *const fmt) +{ + if (video_caps.min_line_count == LINE_COUNT_HEIGHT) { + return fmt->pitch * fmt->height; + } else { + return fmt->pitch * video_caps.min_line_count; + } +} + +static bool app_is_standard_format(uint32_t pixfmt) +{ + return pixfmt == VIDEO_PIX_FMT_GREY || pixfmt == VIDEO_PIX_FMT_JPEG || + pixfmt == VIDEO_PIX_FMT_YUYV; +} + +/* Check whether the video device supports one of the wisespread image sensor formats */ +static bool app_has_standard_formats(void) +{ + for (int i = 0;; i++) { + uint32_t pixfmt = video_caps.format_caps[i].pixelformat; + + if (pixfmt == 0) { + return false; + } + if (app_is_standard_format(pixfmt)) { + return true; + } + } +} + +static void app_add_format(uint32_t pixfmt, uint16_t width, uint16_t height, bool has_std_fmts) +{ + struct video_format fmt = { + .pixelformat = pixfmt, + .width = width, + .height = height, + .type = VIDEO_BUF_TYPE_OUTPUT, + }; + int ret; + + /* If the system has any standard pixel format, only propose them to the host */ + if (has_std_fmts && !app_is_standard_format(pixfmt)) { + return; + } + + /* Set the format to get the pitch */ + ret = video_set_compose_format(video_dev, &fmt); + if (ret != 0) { + LOG_ERR("Could not set the format of %s", video_dev->name); + return; + } + + if (app_get_min_buf_size(&fmt) > CONFIG_VIDEO_BUFFER_POOL_SZ_MAX) { + LOG_WRN("Skipping format %ux%u", fmt.width, fmt.height); + return; + } + + uvc_add_format(uvc_dev, &fmt); +} + +struct video_resolution { + uint16_t width; + uint16_t height; +}; + +static struct video_resolution video_common_fmts[] = { + { .width = 3840, .height = 2160, }, /* UHD */ + { .width = 1920, .height = 1080, }, /* FHD */ + { .width = 1280, .height = 1024, }, /* SXGA */ + { .width = 1280, .height = 720, }, /* HD */ + { .width = 800, .height = 600, }, /* SVGA */ + { .width = 854, .height = 480, }, /* WVGA */ + { .width = 640, .height = 480, }, /* VGA */ + { .width = 320, .height = 240, }, /* QVGA */ + { .width = 160, .height = 120, }, /* QQVGA */ +}; + +/* Submit to UVC only the formats expected to be working (enough memory for the size, etc.) */ +static void app_add_filtered_formats(void) +{ + const bool has_std_fmts = app_has_standard_formats(); + + for (int i = 0; video_caps.format_caps[i].pixelformat != 0; i++) { + const struct video_format_cap *vcap = &video_caps.format_caps[i]; + int range_count = 0; + + app_add_format(vcap->pixelformat, vcap->width_min, vcap->height_min, has_std_fmts); + + if (vcap->width_min != vcap->width_max || vcap->height_min != vcap->height_max) { + app_add_format(vcap->pixelformat, vcap->width_max, vcap->height_max, + has_std_fmts); + } + + if (vcap->width_step == 0 && vcap->height_step == 0) { + continue; + } + + /* RANGE Resolution processing */ + for (int j = 0; j < ARRAY_SIZE(video_common_fmts); j++) { + if (range_count >= CONFIG_VIDEO_MAX_RANGE_RESOLUTIONS) { + break; + } + if (!IN_RANGE(video_common_fmts[j].width, + vcap->width_min, vcap->width_max) || + !IN_RANGE(video_common_fmts[j].height, + vcap->height_min, vcap->height_max)) { + continue; + } + if ((video_common_fmts[j].width - vcap->width_min) % vcap->width_step || + (video_common_fmts[j].height - vcap->height_min) % vcap->height_step) { + continue; + } + + app_add_format(vcap->pixelformat, video_common_fmts[j].width, + video_common_fmts[j].height, has_std_fmts); + } + } +} int main(void) { struct usbd_context *sample_usbd; struct video_buffer *vbuf; struct video_format fmt = {0}; - struct video_caps caps; + struct video_frmival frmival = {0}; struct k_poll_signal sig; struct k_poll_event evt[1]; k_timeout_t timeout = K_FOREVER; @@ -37,16 +159,18 @@ int main(void) return -ENODEV; } - caps.type = VIDEO_BUF_TYPE_OUTPUT; - - if (video_get_caps(video_dev, &caps)) { + ret = video_get_caps(video_dev, &video_caps); + if (ret != 0) { LOG_ERR("Unable to retrieve video capabilities"); return 0; } - /* Must be done before initializing USB */ + /* Must be called before usb_enable() */ uvc_set_video_dev(uvc_dev, video_dev); + /* Must be called before uvc_set_video_dev() */ + app_add_filtered_formats(); + sample_usbd = sample_usbd_init_device(NULL); if (sample_usbd == NULL) { return -ENODEV; @@ -59,7 +183,6 @@ int main(void) LOG_INF("Waiting the host to select the video format"); - /* Get the video format once it is selected by the host */ while (true) { fmt.type = VIDEO_BUF_TYPE_INPUT; @@ -75,19 +198,34 @@ int main(void) k_sleep(K_MSEC(10)); } - LOG_INF("The host selected format '%s' %ux%u, preparing %u buffers of %u bytes", + ret = video_get_frmival(uvc_dev, &frmival); + if (ret != 0) { + LOG_ERR("Failed to get the video frame interval"); + return ret; + } + + LOG_INF("The host selected format '%s' %ux%u at frame interval %u/%u", VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height, - CONFIG_VIDEO_BUFFER_POOL_NUM_MAX, fmt.pitch * fmt.height); + frmival.numerator, frmival.denominator); - /* Size to allocate for each buffer */ - if (caps.min_line_count == LINE_COUNT_HEIGHT) { - bsize = fmt.pitch * fmt.height; - } else { - bsize = fmt.pitch * caps.min_line_count; + fmt.type = VIDEO_BUF_TYPE_OUTPUT; + + ret = video_set_compose_format(video_dev, &fmt); + if (ret != 0) { + LOG_WRN("Could not set the format of %s", video_dev->name); } + ret = video_set_frmival(video_dev, &frmival); + if (ret != 0) { + LOG_WRN("Could not set the framerate of %s", video_dev->name); + } + + bsize = app_get_min_buf_size(&fmt); + + LOG_INF("Preparing %u buffers of %u bytes", CONFIG_VIDEO_BUFFER_POOL_NUM_MAX, bsize); + for (int i = 0; i < CONFIG_VIDEO_BUFFER_POOL_NUM_MAX; i++) { - vbuf = video_buffer_alloc(bsize, K_NO_WAIT); + vbuf = video_buffer_aligned_alloc(bsize, CONFIG_VIDEO_BUFFER_POOL_ALIGN, K_NO_WAIT); if (vbuf == NULL) { LOG_ERR("Could not allocate the video buffer"); return -ENOMEM; diff --git a/subsys/usb/device_next/class/usbd_uvc.c b/subsys/usb/device_next/class/usbd_uvc.c index 9038a8e0d3f15..f281e9e9b623d 100644 --- a/subsys/usb/device_next/class/usbd_uvc.c +++ b/subsys/usb/device_next/class/usbd_uvc.c @@ -111,6 +111,10 @@ struct uvc_data { struct video_frmival video_frmival; /* Signal to alert video devices of buffer-related evenets */ struct k_poll_signal *video_sig; + /* Last pixel format that was added by uvc_add_format() */ + uint32_t last_pix_fmt; + /* Last format descriptor that was added by uvc_add_format() */ + struct uvc_format_descriptor *last_format_desc; /* Makes sure flushing the stream only happens in one context at a time */ struct k_mutex mutex; /* Zero Length packet used to reset a stream when restarted */ @@ -179,15 +183,6 @@ UDC_BUF_POOL_VAR_DEFINE(uvc_buf_pool, UVC_TOTAL_BUFS, UVC_TOTAL_BUFS * USBD_MAX_ static void uvc_flush_queue(const struct device *dev); -/* UVC public API */ - -void uvc_set_video_dev(const struct device *const dev, const struct device *const video_dev) -{ - struct uvc_data *data = dev->data; - - data->video_dev = video_dev; -} - /* UVC helper functions */ static const struct uvc_guid_quirk uvc_guid_quirks[] = { @@ -541,8 +536,8 @@ static int uvc_get_vs_probe_frame_interval(const struct device *dev, struct uvc_ const uint8_t request) { struct uvc_data *data = dev->data; - struct uvc_format_descriptor *format_desc; - struct uvc_frame_discrete_descriptor *frame_desc; + struct uvc_format_descriptor *format_desc = NULL; + struct uvc_frame_discrete_descriptor *frame_desc = NULL; int max; uvc_get_vs_fmtfrm_desc(dev, &format_desc, &frame_desc); @@ -604,7 +599,7 @@ static int uvc_get_vs_format_from_desc(const struct device *dev, struct video_fo { struct uvc_data *data = dev->data; struct uvc_format_descriptor *format_desc = NULL; - struct uvc_frame_discrete_descriptor *frame_desc; + struct uvc_frame_discrete_descriptor *frame_desc = NULL; /* Update the format based on the probe message from the host */ uvc_get_vs_fmtfrm_desc(dev, &format_desc, &frame_desc); @@ -793,8 +788,8 @@ static int uvc_get_vs_commit(const struct device *dev, struct net_buf *const buf static int uvc_set_vs_commit(const struct device *dev, const struct net_buf *const buf) { struct uvc_data *data = dev->data; - struct video_format fmt = data->video_fmt; - struct video_frmival frmival = data->video_frmival; + struct video_format *fmt = &data->video_fmt; + struct video_frmival *frmival = &data->video_frmival; int ret; __ASSERT_NO_MSG(data->video_dev != NULL); @@ -804,27 +799,9 @@ static int uvc_set_vs_commit(const struct device *dev, const struct net_buf *con return ret; } - LOG_INF("Ready to transfer, setting source format to '%s' %ux%u", - VIDEO_FOURCC_TO_STR(fmt.pixelformat), fmt.width, fmt.height); - - fmt.type = VIDEO_BUF_TYPE_OUTPUT; - - ret = video_set_format(data->video_dev, &fmt); - if (ret != 0) { - LOG_ERR("Could not set the format of %s", data->video_dev->name); - return ret; - } - - LOG_DBG("Setting frame interval of %s to %u/%u", - data->video_dev->name, - data->video_frmival.numerator, data->video_frmival.denominator); - - ret = video_set_frmival(data->video_dev, &frmival); - if (ret != 0) { - LOG_WRN("Could not set the framerate of %s", data->video_dev->name); - } - - LOG_DBG("UVC device ready, %s can now be started", data->video_dev->name); + LOG_INF("Host selected format '%s' %ux%u, frame interval %u/%u", + VIDEO_FOURCC_TO_STR(fmt->pixelformat), fmt->width, fmt->height, + frmival->numerator, frmival->denominator); if (atomic_test_bit(&data->state, UVC_STATE_STREAM_READY)) { atomic_set_bit(&data->state, UVC_STATE_STREAM_RESTART); @@ -1409,13 +1386,13 @@ static union uvc_fmt_desc *uvc_new_fmt_desc(const struct device *dev) static int uvc_add_vs_format_desc(const struct device *dev, struct uvc_format_descriptor **const format_desc, - const struct video_format_cap *const cap) + uint32_t fourcc) { const struct uvc_config *cfg = dev->config; __ASSERT_NO_MSG(format_desc != NULL); - if (cap->pixelformat == VIDEO_PIX_FMT_JPEG) { + if (fourcc == VIDEO_PIX_FMT_JPEG) { struct uvc_format_mjpeg_descriptor *desc; LOG_INF("Adding format descriptor #%u for MJPEG", @@ -1438,7 +1415,7 @@ static int uvc_add_vs_format_desc(const struct device *dev, struct uvc_format_uncomp_descriptor *desc; LOG_INF("Adding format descriptor #%u for '%s'", - cfg->desc->if1_hdr.bNumFormats + 1, VIDEO_FOURCC_TO_STR(cap->pixelformat)); + cfg->desc->if1_hdr.bNumFormats + 1, VIDEO_FOURCC_TO_STR(fourcc)); desc = &uvc_new_fmt_desc(dev)->fmt_uncomp; if (desc == NULL) { @@ -1449,8 +1426,8 @@ static int uvc_add_vs_format_desc(const struct device *dev, desc->bFormatIndex = cfg->desc->if1_hdr.bNumFormats + 1; desc->bLength = sizeof(*desc); desc->bDescriptorSubtype = UVC_VS_FORMAT_UNCOMPRESSED; - uvc_fourcc_to_guid(desc->guidFormat, cap->pixelformat); - desc->bBitsPerPixel = video_bits_per_pixel(cap->pixelformat); + uvc_fourcc_to_guid(desc->guidFormat, fourcc); + desc->bBitsPerPixel = video_bits_per_pixel(fourcc); desc->bDefaultFrameIndex = 1; cfg->desc->if1_hdr.bNumFormats++; cfg->desc->if1_hdr.wTotalLength += desc->bLength; @@ -1474,7 +1451,8 @@ static int uvc_compare_frmival_desc(const void *const a, const void *const b) } static void uvc_set_vs_bitrate_range(struct uvc_frame_discrete_descriptor *const desc, - const uint64_t frmival_nsec, struct video_format *const fmt) + const uint64_t frmival_nsec, + const struct video_format *const fmt) { uint32_t bitrate_min = sys_le32_to_cpu(desc->dwMinBitRate); uint32_t bitrate_max = sys_le32_to_cpu(desc->dwMaxBitRate); @@ -1501,7 +1479,7 @@ static void uvc_set_vs_bitrate_range(struct uvc_frame_discrete_descriptor *const static int uvc_add_vs_frame_interval(struct uvc_frame_discrete_descriptor *const desc, const struct video_frmival *const frmival, - struct video_format *const fmt) + const struct video_format *const fmt) { int i = desc->bFrameIntervalType; @@ -1521,24 +1499,19 @@ static int uvc_add_vs_frame_interval(struct uvc_frame_discrete_descriptor *const static int uvc_add_vs_frame_desc(const struct device *dev, struct uvc_format_descriptor *const format_desc, - const struct video_format_cap *const cap, const bool min) + const struct video_format *const fmt) { const struct uvc_config *cfg = dev->config; struct uvc_data *data = dev->data; struct uvc_frame_discrete_descriptor *desc; - uint16_t w = min ? cap->width_min : cap->width_max; - uint16_t h = min ? cap->height_min : cap->height_max; - uint16_t p = MAX(video_bits_per_pixel(cap->pixelformat), 8) * w / BITS_PER_BYTE; - struct video_format fmt = {.pixelformat = cap->pixelformat, - .width = w, .height = h, .pitch = p}; - struct video_frmival_enum fie = {.format = &fmt}; - uint32_t max_size = MAX(p, w) * h; + struct video_frmival_enum fie = {.format = fmt}; + uint32_t max_size = MAX(fmt->pitch, fmt->width) * fmt->height; __ASSERT_NO_MSG(data->video_dev != NULL); __ASSERT_NO_MSG(format_desc != NULL); LOG_INF("Adding frame descriptor #%u for %ux%u", - format_desc->bNumFrameDescriptors + 1, w, h); + format_desc->bNumFrameDescriptors + 1, fmt->width, fmt->height); desc = &uvc_new_fmt_desc(dev)->frm_disc; if (desc == NULL) { @@ -1548,8 +1521,8 @@ static int uvc_add_vs_frame_desc(const struct device *dev, desc->bLength = sizeof(*desc) - CONFIG_USBD_VIDEO_MAX_FRMIVAL * sizeof(uint32_t); desc->bDescriptorType = USB_DESC_CS_INTERFACE; desc->bFrameIndex = format_desc->bNumFrameDescriptors + 1; - desc->wWidth = sys_cpu_to_le16(w); - desc->wHeight = sys_cpu_to_le16(h); + desc->wWidth = sys_cpu_to_le16(fmt->width); + desc->wHeight = sys_cpu_to_le16(fmt->height); desc->dwMaxVideoFrameBufferSize = sys_cpu_to_le32(max_size); desc->bDescriptorSubtype = (format_desc->bDescriptorSubtype == UVC_VS_FORMAT_UNCOMPRESSED) ? UVC_VS_FRAME_UNCOMPRESSED : UVC_VS_FRAME_MJPEG; @@ -1561,12 +1534,12 @@ static int uvc_add_vs_frame_desc(const struct device *dev, switch (fie.type) { case VIDEO_FRMIVAL_TYPE_DISCRETE: LOG_DBG("Adding discrete frame interval %u", fie.index); - uvc_add_vs_frame_interval(desc, &fie.discrete, &fmt); + uvc_add_vs_frame_interval(desc, &fie.discrete, fmt); break; case VIDEO_FRMIVAL_TYPE_STEPWISE: LOG_DBG("Adding stepwise frame interval %u", fie.index); - uvc_add_vs_frame_interval(desc, &fie.stepwise.min, &fmt); - uvc_add_vs_frame_interval(desc, &fie.stepwise.max, &fmt); + uvc_add_vs_frame_interval(desc, &fie.stepwise.min, fmt); + uvc_add_vs_frame_interval(desc, &fie.stepwise.max, fmt); break; default: CODE_UNREACHABLE; @@ -1578,7 +1551,7 @@ static int uvc_add_vs_frame_desc(const struct device *dev, if (desc->bFrameIntervalType == 0) { struct video_frmival frmival = {.numerator = 1, .denominator = 30}; - uvc_add_vs_frame_interval(desc, &frmival, &fmt); + uvc_add_vs_frame_interval(desc, &frmival, fmt); } /* UVC requires the frame intervals to be sorted, but not Zephyr */ @@ -1620,10 +1593,6 @@ static int uvc_init(struct usbd_class_data *const c_data) const struct device *dev = usbd_class_get_private(c_data); const struct uvc_config *cfg = dev->config; struct uvc_data *data = dev->data; - struct uvc_format_descriptor *format_desc = NULL; - struct video_caps caps; - uint32_t prev_pixfmt = 0; - uint32_t mask = 0; int ret; __ASSERT_NO_MSG(data->video_dev != NULL); @@ -1633,9 +1602,41 @@ static int uvc_init(struct usbd_class_data *const c_data) return 0; } - cfg->desc->if0_hdr.baInterfaceNr[0] = cfg->desc->if1.bInterfaceNumber; + cfg->desc->if1_hdr.wTotalLength = sys_le16_to_cpu(cfg->desc->if1_hdr.wTotalLength); + cfg->desc->if1_hdr.wTotalLength += cfg->desc->if1_color.bLength; - /* Generating VideoControl descriptors (interface 0) */ + uvc_assign_desc(dev, &cfg->desc->if1_color, true, true); + uvc_assign_desc(dev, &cfg->desc->if1_ep_fs, true, false); + uvc_assign_desc(dev, &cfg->desc->if1_ep_hs, false, true); + + cfg->desc->if1_hdr.wTotalLength = sys_cpu_to_le16(cfg->desc->if1_hdr.wTotalLength); + + /* Generating the default probe message now that descriptors are complete */ + + ret = uvc_get_vs_probe_struct(dev, &data->default_probe, UVC_GET_CUR); + if (ret != 0) { + LOG_ERR("init: failed to query the default probe"); + return ret; + } + + atomic_set_bit(&data->state, UVC_STATE_INITIALIZED); + + return 0; +} + +/* UVC public API */ + +void uvc_set_video_dev(const struct device *const dev, const struct device *const video_dev) +{ + struct uvc_data *data = dev->data; + const struct uvc_config *cfg = dev->config; + uint32_t mask = 0; + + data->video_dev = video_dev; + + /* Generate VideoControl descriptors (interface 0) */ + + cfg->desc->if0_hdr.baInterfaceNr[0] = cfg->desc->if1.bInterfaceNumber; mask = uvc_get_mask(data->video_dev, uvc_control_map_ct, ARRAY_SIZE(uvc_control_map_ct)); cfg->desc->if0_ct.bmControls[0] = mask >> 0; @@ -1652,65 +1653,37 @@ static int uvc_init(struct usbd_class_data *const c_data) cfg->desc->if0_xu.bmControls[1] = mask >> 8; cfg->desc->if0_xu.bmControls[2] = mask >> 16; cfg->desc->if0_xu.bmControls[3] = mask >> 24; +} - /* Generating VideoStreaming descriptors (interface 1) */ - - caps.type = VIDEO_BUF_TYPE_OUTPUT; +int uvc_add_format(const struct device *const dev, const struct video_format *const fmt) +{ + struct uvc_data *data = dev->data; + const struct uvc_config *cfg = dev->config; + int ret; - ret = video_get_caps(data->video_dev, &caps); - if (ret != 0) { - LOG_ERR("Could not load %s video format list", data->video_dev->name); - return ret; + if (data->video_dev == NULL) { + LOG_ERR("Video device not yet configured into UVC"); + return -EINVAL; } - cfg->desc->if1_hdr.wTotalLength = sys_le16_to_cpu(cfg->desc->if1_hdr.wTotalLength); - - for (int i = 0; caps.format_caps[i].pixelformat != 0; i++) { - const struct video_format_cap *cap = &caps.format_caps[i]; - - if (prev_pixfmt != cap->pixelformat) { - if (prev_pixfmt != 0) { - cfg->desc->if1_hdr.wTotalLength += cfg->desc->if1_color.bLength; - uvc_assign_desc(dev, &cfg->desc->if1_color, true, true); - } - - ret = uvc_add_vs_format_desc(dev, &format_desc, cap); - if (ret != 0) { - return ret; - } + if (data->last_pix_fmt != fmt->pixelformat) { + if (data->last_pix_fmt != 0) { + cfg->desc->if1_hdr.wTotalLength += cfg->desc->if1_color.bLength; + uvc_assign_desc(dev, &cfg->desc->if1_color, true, true); } - ret = uvc_add_vs_frame_desc(dev, format_desc, cap, true); + ret = uvc_add_vs_format_desc(dev, &data->last_format_desc, fmt->pixelformat); if (ret != 0) { return ret; } - - if (cap->width_min != cap->width_max || cap->height_min != cap->height_max) { - ret = uvc_add_vs_frame_desc(dev, format_desc, cap, false); - if (ret != 0) { - return ret; - } - } - - prev_pixfmt = cap->pixelformat; } - cfg->desc->if1_hdr.wTotalLength += cfg->desc->if1_color.bLength; - uvc_assign_desc(dev, &cfg->desc->if1_color, true, true); - uvc_assign_desc(dev, &cfg->desc->if1_ep_fs, true, false); - uvc_assign_desc(dev, &cfg->desc->if1_ep_hs, false, true); - - cfg->desc->if1_hdr.wTotalLength = sys_cpu_to_le16(cfg->desc->if1_hdr.wTotalLength); - - /* Generating the default probe message now that descriptors are complete */ - - ret = uvc_get_vs_probe_struct(dev, &data->default_probe, UVC_GET_CUR); + ret = uvc_add_vs_frame_desc(dev, data->last_format_desc, fmt); if (ret != 0) { - LOG_ERR("init: failed to query the default probe"); return ret; } - atomic_set_bit(&data->state, UVC_STATE_INITIALIZED); + data->last_pix_fmt = fmt->pixelformat; return 0; } @@ -2055,26 +2028,27 @@ static int uvc_dequeue(const struct device *dev, struct video_buffer **const vbu static int uvc_get_format(const struct device *dev, struct video_format *const fmt) { struct uvc_data *data = dev->data; - struct video_format tmp_fmt = {0}; - int ret; - - __ASSERT_NO_MSG(data->video_dev != NULL); if (!atomic_test_bit(&data->state, UVC_STATE_ENABLED) || !atomic_test_bit(&data->state, UVC_STATE_STREAM_READY)) { return -EAGAIN; } - LOG_DBG("Querying the format from %s", data->video_dev->name); + *fmt = data->video_fmt; - tmp_fmt.type = VIDEO_BUF_TYPE_OUTPUT; + return 0; +} - ret = video_get_format(data->video_dev, &tmp_fmt); - if (ret != 0) { - return ret; +static int uvc_get_frmival(const struct device *dev, struct video_frmival *const frmival) +{ + struct uvc_data *data = dev->data; + + if (!atomic_test_bit(&data->state, UVC_STATE_ENABLED) || + !atomic_test_bit(&data->state, UVC_STATE_STREAM_READY)) { + return -EAGAIN; } - *fmt = tmp_fmt; + *frmival = data->video_frmival; return 0; } @@ -2107,6 +2081,7 @@ static int uvc_set_signal(const struct device *dev, struct k_poll_signal *const static DEVICE_API(video, uvc_video_api) = { .get_format = uvc_get_format, + .get_frmival = uvc_get_frmival, .set_stream = uvc_set_stream, .enqueue = uvc_enqueue, .dequeue = uvc_dequeue,