From 737298b4f7b60bc2b755fe8fa9135f50a496d94d Mon Sep 17 00:00:00 2001 From: Philip Langdale Date: Thu, 4 Aug 2022 20:24:48 -0700 Subject: lavc/vaapi_decode: add missing flag when picking best pixel format vaapi_decode_find_best_format currently does not set the VA_SURFACE_ATTRIB_SETTABLE flag on the pixel format attribute that it returns. Without this flag, the attribute will be ignored by vaCreateSurfaces, meaning that the driver's default logic for picking a pixel format will kick in. So far, this hasn't produced visible problems, but when trying to decode 4:4:4 content, at least on Intel, the driver will pick the 444P planar format, even though the decoder can only return the AYUV packed format. The hwcontext_vaapi code that sets surface attributes when picking formats does not have this bug. Applications may use their own logic for finding the best format, and so may not hit this bug. eg: mpv is unaffected. --- libavcodec/vaapi_decode.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/libavcodec/vaapi_decode.c b/libavcodec/vaapi_decode.c index db48efc3ed..bc2d3ed803 100644 --- a/libavcodec/vaapi_decode.c +++ b/libavcodec/vaapi_decode.c @@ -358,6 +358,8 @@ static int vaapi_decode_find_best_format(AVCodecContext *avctx, ctx->pixel_format_attribute = (VASurfaceAttrib) { .type = VASurfaceAttribPixelFormat, + .flags = VA_SURFACE_ATTRIB_SETTABLE, + .value.type = VAGenericValueTypeInteger, .value.value.i = best_fourcc, }; -- cgit v1.2.3