From 1ef7752d64cbe9af2f27cc65aba3a2ca3831c128 Mon Sep 17 00:00:00 2001 From: Jorge Ramirez-Ortiz Date: Wed, 20 Sep 2017 18:55:40 -0700 Subject: libavcodec: v4l2: add support for v4l2 mem2mem codecs This patchset enhances Alexis Ballier's original patch and validates it using Qualcomm's Venus hardware (driver recently landed upstream [1]). This has been tested on Qualcomm's DragonBoard 410c and 820c Configure/make scripts have been validated on Ubuntu 10.04 and 16.04. Tested decoders: - h264 - h263 - mpeg4 - vp8 - vp9 - hevc Tested encoders: - h264 - h263 - mpeg4 Tested transcoding (concurrent encoding/decoding) Some of the changes introduced: - v4l2: code cleanup and abstractions added - v4l2: follow the new encode/decode api. - v4l2: fix display size for NV12 output pool. - v4l2: handle EOS (EPIPE and draining) - v4l2: vp8 and mpeg4 decoding and encoding. - v4l2: hevc and vp9 support. - v4l2: generate EOF on dequeue errors. - v4l2: h264_mp4toannexb filtering. - v4l2: fixed make install and fate issues. - v4l2: codecs enabled/disabled depending on pixfmt defined - v4l2: pass timebase/framerate to the context - v4l2: runtime decoder reconfiguration. - v4l2: add more frame information - v4l2: free hardware resources on last reference being released - v4l2: encoding: disable b-frames for upstreaming (patch required) [1] https://lwn.net/Articles/697956/ System Level view: v42l_m2m_enc/dec --> v4l2_m2m --> v4l2_context --> v4l2_buffers Reviewed-by: Jorge Ramirez Reviewed-by: Alexis Ballier Tested-by: Jorge Ramirez Signed-off-by: wm4 --- libavcodec/v4l2_context.c | 667 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 667 insertions(+) create mode 100644 libavcodec/v4l2_context.c (limited to 'libavcodec/v4l2_context.c') diff --git a/libavcodec/v4l2_context.c b/libavcodec/v4l2_context.c new file mode 100644 index 0000000000..d675c55f2b --- /dev/null +++ b/libavcodec/v4l2_context.c @@ -0,0 +1,667 @@ +/* + * V4L2 context helper functions. + * + * Copyright (C) 2017 Alexis Ballier + * Copyright (C) 2017 Jorge Ramirez + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include +#include +#include +#include +#include +#include +#include "libavcodec/avcodec.h" +#include "libavcodec/internal.h" +#include "v4l2_buffers.h" +#include "v4l2_fmt.h" +#include "v4l2_m2m.h" + +struct v4l2_format_update { + uint32_t v4l2_fmt; + int update_v4l2; + + enum AVPixelFormat av_fmt; + int update_avfmt; +}; + +static inline V4L2m2mContext *ctx_to_m2mctx(V4L2Context *ctx) +{ + return V4L2_TYPE_IS_OUTPUT(ctx->type) ? + container_of(ctx, V4L2m2mContext, output) : + container_of(ctx, V4L2m2mContext, capture); +} + +static inline AVCodecContext *logger(V4L2Context *ctx) +{ + return ctx_to_m2mctx(ctx)->avctx; +} + +static inline unsigned int v4l2_get_width(struct v4l2_format *fmt) +{ + return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.width : fmt->fmt.pix.width; +} + +static inline unsigned int v4l2_get_height(struct v4l2_format *fmt) +{ + return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.height : fmt->fmt.pix.height; +} + +static inline unsigned int v4l2_resolution_changed(V4L2Context *ctx, struct v4l2_format *fmt2) +{ + struct v4l2_format *fmt1 = &ctx->format; + int ret = V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? + fmt1->fmt.pix_mp.width != fmt2->fmt.pix_mp.width || + fmt1->fmt.pix_mp.height != fmt2->fmt.pix_mp.height + : + fmt1->fmt.pix.width != fmt2->fmt.pix.width || + fmt1->fmt.pix.height != fmt2->fmt.pix.height; + + if (ret) + av_log(logger(ctx), AV_LOG_DEBUG, "%s changed (%dx%d) -> (%dx%d)\n", + ctx->name, + v4l2_get_width(fmt1), v4l2_get_height(fmt1), + v4l2_get_width(fmt2), v4l2_get_height(fmt2)); + + return ret; +} + +static inline int v4l2_type_supported(V4L2Context *ctx) +{ + return ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE || + ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE || + ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE || + ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT; +} + +static inline void v4l2_save_to_context(V4L2Context* ctx, struct v4l2_format_update *fmt) +{ + ctx->format.type = ctx->type; + + if (fmt->update_avfmt) + ctx->av_pix_fmt = fmt->av_fmt; + + if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { + /* update the sizes to handle the reconfiguration of the capture stream at runtime */ + ctx->format.fmt.pix_mp.height = ctx->height; + ctx->format.fmt.pix_mp.width = ctx->width; + if (fmt->update_v4l2) + ctx->format.fmt.pix_mp.pixelformat = fmt->v4l2_fmt; + } else { + ctx->format.fmt.pix.height = ctx->height; + ctx->format.fmt.pix.width = ctx->width; + if (fmt->update_v4l2) + ctx->format.fmt.pix.pixelformat = fmt->v4l2_fmt; + } +} + +/** + * returns 1 if reinit was succesful, negative if it failed + * returns 0 if reinit was not executed + */ +static int v4l2_handle_event(V4L2Context *ctx) +{ + V4L2m2mContext *s = ctx_to_m2mctx(ctx); + struct v4l2_format cap_fmt = s->capture.format; + struct v4l2_format out_fmt = s->output.format; + struct v4l2_event evt = { 0 }; + int full_reinit, reinit, ret; + + ret = ioctl(s->fd, VIDIOC_DQEVENT, &evt); + if (ret < 0) { + av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_DQEVENT\n", ctx->name); + return 0; + } + + if (evt.type != V4L2_EVENT_SOURCE_CHANGE) + return 0; + + ret = ioctl(s->fd, VIDIOC_G_FMT, &out_fmt); + if (ret) { + av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT\n", s->output.name); + return 0; + } + + ret = ioctl(s->fd, VIDIOC_G_FMT, &cap_fmt); + if (ret) { + av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT\n", s->capture.name); + return 0; + } + + full_reinit = v4l2_resolution_changed(&s->output, &out_fmt); + if (full_reinit) { + s->output.height = v4l2_get_height(&out_fmt); + s->output.width = v4l2_get_width(&out_fmt); + } + + reinit = v4l2_resolution_changed(&s->capture, &cap_fmt); + if (reinit) { + s->capture.height = v4l2_get_height(&cap_fmt); + s->capture.width = v4l2_get_width(&cap_fmt); + } + + if (full_reinit || reinit) + s->reinit = 1; + + if (full_reinit) { + ret = ff_v4l2_m2m_codec_full_reinit(s); + if (ret) { + av_log(logger(ctx), AV_LOG_ERROR, "v4l2_m2m_codec_full_reinit\n"); + return -EINVAL; + } + goto reinit_run; + } + + if (reinit) { + ret = ff_set_dimensions(s->avctx, s->capture.width, s->capture.height); + if (ret < 0) + av_log(logger(ctx), AV_LOG_WARNING, "update avcodec height and width\n"); + + ret = ff_v4l2_m2m_codec_reinit(s); + if (ret) { + av_log(logger(ctx), AV_LOG_ERROR, "v4l2_m2m_codec_reinit\n"); + return -EINVAL; + } + goto reinit_run; + } + + /* dummy event received */ + return 0; + + /* reinit executed */ +reinit_run: + return 1; +} + +static int v4l2_stop_decode(V4L2Context *ctx) +{ + struct v4l2_decoder_cmd cmd = { + .cmd = V4L2_DEC_CMD_STOP, + }; + int ret; + + ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DECODER_CMD, &cmd); + if (ret) { + /* DECODER_CMD is optional */ + if (errno == ENOTTY) + return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF); + } + + return 0; +} + +static int v4l2_stop_encode(V4L2Context *ctx) +{ + struct v4l2_encoder_cmd cmd = { + .cmd = V4L2_ENC_CMD_STOP, + }; + int ret; + + ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENCODER_CMD, &cmd); + if (ret) { + /* ENCODER_CMD is optional */ + if (errno == ENOTTY) + return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF); + } + + return 0; +} + +static V4L2Buffer* v4l2_dequeue_v4l2buf(V4L2Context *ctx, int timeout) +{ + struct v4l2_plane planes[VIDEO_MAX_PLANES]; + struct v4l2_buffer buf = { 0 }; + V4L2Buffer* avbuf = NULL; + struct pollfd pfd = { + .events = POLLIN | POLLRDNORM | POLLPRI | POLLOUT | POLLWRNORM, /* default blocking capture */ + .fd = ctx_to_m2mctx(ctx)->fd, + }; + int ret; + + if (V4L2_TYPE_IS_OUTPUT(ctx->type)) + pfd.events = POLLOUT | POLLWRNORM; + + for (;;) { + ret = poll(&pfd, 1, timeout); + if (ret > 0) + break; + if (errno == EINTR) + continue; + + /* timeout is being used to indicate last valid bufer when draining */ + if (ctx_to_m2mctx(ctx)->draining) + ctx->done = 1; + + return NULL; + } + + /* 0. handle errors */ + if (pfd.revents & POLLERR) { + av_log(logger(ctx), AV_LOG_WARNING, "%s POLLERR\n", ctx->name); + return NULL; + } + + /* 1. handle resolution changes */ + if (pfd.revents & POLLPRI) { + ret = v4l2_handle_event(ctx); + if (ret < 0) { + /* if re-init failed, abort */ + ctx->done = EINVAL; + return NULL; + } + if (ret) { + /* if re-init was successfull drop the buffer (if there was one) + * since we had to reconfigure capture (unmap all buffers) + */ + return NULL; + } + } + + /* 2. dequeue the buffer */ + if (pfd.revents & (POLLIN | POLLRDNORM | POLLOUT | POLLWRNORM)) { + + if (!V4L2_TYPE_IS_OUTPUT(ctx->type)) { + /* there is a capture buffer ready */ + if (pfd.revents & (POLLIN | POLLRDNORM)) + goto dequeue; + + /* the driver is ready to accept more input; instead of waiting for the capture + * buffer to complete we return NULL so input can proceed (we are single threaded) + */ + if (pfd.revents & (POLLOUT | POLLWRNORM)) + return NULL; + } + +dequeue: + memset(&buf, 0, sizeof(buf)); + buf.memory = V4L2_MEMORY_MMAP; + buf.type = ctx->type; + if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { + memset(planes, 0, sizeof(planes)); + buf.length = VIDEO_MAX_PLANES; + buf.m.planes = planes; + } + + ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DQBUF, &buf); + if (ret) { + if (errno != EAGAIN) { + ctx->done = errno; + if (errno != EPIPE) + av_log(logger(ctx), AV_LOG_DEBUG, "%s VIDIOC_DQBUF, errno (%s)\n", + ctx->name, av_err2str(AVERROR(errno))); + } + } else { + avbuf = &ctx->buffers[buf.index]; + avbuf->status = V4L2BUF_AVAILABLE; + avbuf->buf = buf; + if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { + memcpy(avbuf->planes, planes, sizeof(planes)); + avbuf->buf.m.planes = avbuf->planes; + } + } + } + + return avbuf; +} + +static V4L2Buffer* v4l2_getfree_v4l2buf(V4L2Context *ctx) +{ + int timeout = 0; /* return when no more buffers to dequeue */ + int i; + + /* get back as many output buffers as possible */ + if (V4L2_TYPE_IS_OUTPUT(ctx->type)) { + do { + } while (v4l2_dequeue_v4l2buf(ctx, timeout)); + } + + for (i = 0; i < ctx->num_buffers; i++) { + if (ctx->buffers[i].status == V4L2BUF_AVAILABLE) + return &ctx->buffers[i]; + } + + return NULL; +} + +static int v4l2_release_buffers(V4L2Context* ctx) +{ + struct v4l2_requestbuffers req = { + .memory = V4L2_MEMORY_MMAP, + .type = ctx->type, + .count = 0, /* 0 -> unmaps buffers from the driver */ + }; + int i, j; + + for (i = 0; i < ctx->num_buffers; i++) { + V4L2Buffer *buffer = &ctx->buffers[i]; + + for (j = 0; j < buffer->num_planes; j++) { + struct V4L2Plane_info *p = &buffer->plane_info[j]; + if (p->mm_addr && p->length) + if (munmap(p->mm_addr, p->length) < 0) + av_log(logger(ctx), AV_LOG_ERROR, "%s unmap plane (%s))\n", ctx->name, av_err2str(AVERROR(errno))); + } + } + + return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_REQBUFS, &req); +} + +static inline int v4l2_try_raw_format(V4L2Context* ctx, enum AVPixelFormat pixfmt) +{ + struct v4l2_format *fmt = &ctx->format; + uint32_t v4l2_fmt; + int ret; + + v4l2_fmt = ff_v4l2_format_avfmt_to_v4l2(pixfmt); + if (!v4l2_fmt) + return AVERROR(EINVAL); + + if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) + fmt->fmt.pix_mp.pixelformat = v4l2_fmt; + else + fmt->fmt.pix.pixelformat = v4l2_fmt; + + fmt->type = ctx->type; + + ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, fmt); + if (ret) + return AVERROR(EINVAL); + + return 0; +} + +static int v4l2_get_raw_format(V4L2Context* ctx, enum AVPixelFormat *p) +{ + enum AVPixelFormat pixfmt = ctx->av_pix_fmt; + struct v4l2_fmtdesc fdesc; + int ret; + + memset(&fdesc, 0, sizeof(fdesc)); + fdesc.type = ctx->type; + + if (pixfmt != AV_PIX_FMT_NONE) { + ret = v4l2_try_raw_format(ctx, pixfmt); + if (ret) + pixfmt = AV_PIX_FMT_NONE; + else + return 0; + } + + for (;;) { + ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc); + if (ret) + return AVERROR(EINVAL); + + pixfmt = ff_v4l2_format_v4l2_to_avfmt(fdesc.pixelformat, AV_CODEC_ID_RAWVIDEO); + ret = v4l2_try_raw_format(ctx, pixfmt); + if (ret){ + fdesc.index++; + continue; + } + + *p = pixfmt; + + return 0; + } + + return AVERROR(EINVAL); +} + +static int v4l2_get_coded_format(V4L2Context* ctx, uint32_t *p) +{ + struct v4l2_fmtdesc fdesc; + uint32_t v4l2_fmt; + int ret; + + /* translate to a valid v4l2 format */ + v4l2_fmt = ff_v4l2_format_avcodec_to_v4l2(ctx->av_codec_id); + if (!v4l2_fmt) + return AVERROR(EINVAL); + + /* check if the driver supports this format */ + memset(&fdesc, 0, sizeof(fdesc)); + fdesc.type = ctx->type; + + for (;;) { + ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc); + if (ret) + return AVERROR(EINVAL); + + if (fdesc.pixelformat == v4l2_fmt) + break; + + fdesc.index++; + } + + *p = v4l2_fmt; + + return 0; +} + + /***************************************************************************** + * + * V4L2 Context Interface + * + *****************************************************************************/ + +int ff_v4l2_context_set_status(V4L2Context* ctx, int cmd) +{ + int type = ctx->type; + int ret; + + ret = ioctl(ctx_to_m2mctx(ctx)->fd, cmd, &type); + if (ret < 0) + return AVERROR(errno); + + ctx->streamon = (cmd == VIDIOC_STREAMON); + + return 0; +} + +int ff_v4l2_context_enqueue_frame(V4L2Context* ctx, const AVFrame* frame) +{ + V4L2m2mContext *s = ctx_to_m2mctx(ctx); + V4L2Buffer* avbuf; + int ret; + + if (!frame) { + ret = v4l2_stop_encode(ctx); + if (ret) + av_log(logger(ctx), AV_LOG_ERROR, "%s stop_encode\n", ctx->name); + s->draining= 1; + return 0; + } + + avbuf = v4l2_getfree_v4l2buf(ctx); + if (!avbuf) + return AVERROR(ENOMEM); + + ret = ff_v4l2_buffer_avframe_to_buf(frame, avbuf); + if (ret) + return ret; + + return ff_v4l2_buffer_enqueue(avbuf); +} + +int ff_v4l2_context_enqueue_packet(V4L2Context* ctx, const AVPacket* pkt) +{ + V4L2m2mContext *s = ctx_to_m2mctx(ctx); + V4L2Buffer* avbuf; + int ret; + + if (!pkt->size) { + ret = v4l2_stop_decode(ctx); + if (ret) + av_log(logger(ctx), AV_LOG_ERROR, "%s stop_decode\n", ctx->name); + s->draining = 1; + return 0; + } + + avbuf = v4l2_getfree_v4l2buf(ctx); + if (!avbuf) + return AVERROR(ENOMEM); + + ret = ff_v4l2_buffer_avpkt_to_buf(pkt, avbuf); + if (ret) + return ret; + + return ff_v4l2_buffer_enqueue(avbuf); +} + +int ff_v4l2_context_dequeue_frame(V4L2Context* ctx, AVFrame* frame) +{ + V4L2Buffer* avbuf = NULL; + + /* if we are draining, we are no longer inputing data, therefore enable a + * timeout so we can dequeue and flag the last valid buffer. + * + * blocks until: + * 1. decoded frame available + * 2. an input buffer is ready to be dequeued + */ + avbuf = v4l2_dequeue_v4l2buf(ctx, ctx_to_m2mctx(ctx)->draining ? 200 : -1); + if (!avbuf) { + if (ctx->done) + return AVERROR_EOF; + + return AVERROR(EAGAIN); + } + + return ff_v4l2_buffer_buf_to_avframe(frame, avbuf); +} + +int ff_v4l2_context_dequeue_packet(V4L2Context* ctx, AVPacket* pkt) +{ + V4L2Buffer* avbuf = NULL; + + /* if we are draining, we are no longer inputing data, therefore enable a + * timeout so we can dequeue and flag the last valid buffer. + * + * blocks until: + * 1. encoded packet available + * 2. an input buffer ready to be dequeued + */ + avbuf = v4l2_dequeue_v4l2buf(ctx, ctx_to_m2mctx(ctx)->draining ? 200 : -1); + if (!avbuf) { + if (ctx->done) + return AVERROR_EOF; + + return AVERROR(EAGAIN); + } + + return ff_v4l2_buffer_buf_to_avpkt(pkt, avbuf); +} + +int ff_v4l2_context_get_format(V4L2Context* ctx) +{ + struct v4l2_format_update fmt = { 0 }; + int ret; + + if (ctx->av_codec_id == AV_CODEC_ID_RAWVIDEO) { + ret = v4l2_get_raw_format(ctx, &fmt.av_fmt); + if (ret) + return ret; + + fmt.update_avfmt = 1; + v4l2_save_to_context(ctx, &fmt); + + /* format has been tried already */ + return ret; + } + + ret = v4l2_get_coded_format(ctx, &fmt.v4l2_fmt); + if (ret) + return ret; + + fmt.update_v4l2 = 1; + v4l2_save_to_context(ctx, &fmt); + + return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, &ctx->format); +} + +int ff_v4l2_context_set_format(V4L2Context* ctx) +{ + return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_S_FMT, &ctx->format); +} + +void ff_v4l2_context_release(V4L2Context* ctx) +{ + int ret; + + if (!ctx->buffers) + return; + + ret = v4l2_release_buffers(ctx); + if (ret) + av_log(logger(ctx), AV_LOG_WARNING, "V4L2 failed to unmap the %s buffers\n", ctx->name); + + av_free(ctx->buffers); + ctx->buffers = NULL; +} + +int ff_v4l2_context_init(V4L2Context* ctx) +{ + V4L2m2mContext *s = ctx_to_m2mctx(ctx); + struct v4l2_requestbuffers req; + int ret, i; + + if (!v4l2_type_supported(ctx)) { + av_log(logger(ctx), AV_LOG_ERROR, "type %i not supported\n", ctx->type); + return AVERROR_PATCHWELCOME; + } + + ret = ioctl(s->fd, VIDIOC_G_FMT, &ctx->format); + if (ret) + av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT failed\n", ctx->name); + + memset(&req, 0, sizeof(req)); + req.count = ctx->num_buffers; + req.memory = V4L2_MEMORY_MMAP; + req.type = ctx->type; + ret = ioctl(s->fd, VIDIOC_REQBUFS, &req); + if (ret < 0) + return AVERROR(errno); + + ctx->num_buffers = req.count; + ctx->buffers = av_mallocz(ctx->num_buffers * sizeof(V4L2Buffer)); + if (!ctx->buffers) { + av_log(logger(ctx), AV_LOG_ERROR, "%s malloc enomem\n", ctx->name); + return AVERROR(ENOMEM); + } + + for (i = 0; i < req.count; i++) { + ctx->buffers[i].context = ctx; + ret = ff_v4l2_buffer_initialize(&ctx->buffers[i], i); + if (ret < 0) { + av_log(logger(ctx), AV_LOG_ERROR, "%s buffer initialization (%s)\n", ctx->name, av_err2str(ret)); + av_free(ctx->buffers); + return ret; + } + } + + av_log(logger(ctx), AV_LOG_DEBUG, "%s: %s %02d buffers initialized: %04ux%04u, sizeimage %08u, bytesperline %08u\n", ctx->name, + V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? av_fourcc2str(ctx->format.fmt.pix_mp.pixelformat) : av_fourcc2str(ctx->format.fmt.pix.pixelformat), + req.count, + v4l2_get_width(&ctx->format), + v4l2_get_height(&ctx->format), + V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].sizeimage : ctx->format.fmt.pix.sizeimage, + V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].bytesperline : ctx->format.fmt.pix.bytesperline); + + return 0; +} -- cgit v1.2.3