From 3d7b3996d287014697d9080ef20285c6e804e09e Mon Sep 17 00:00:00 2001 From: wm4 Date: Thu, 12 Jan 2017 13:27:57 +0100 Subject: vaapi: explicitly reject 10 bit surfaces outside of copy mode Rendering support in Mesa probably doesn't exist yet. In theory it might be possible to use VPP to convert the surfaces to 8 bit (like we do it with dxva2/d3d11va as ANGLE doesn't support rendering 10 bit surface either), but that too would require explicit mechanisms. This can't be implemented either until I have a GPU with actual support. --- video/decode/vaapi.c | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'video/decode') diff --git a/video/decode/vaapi.c b/video/decode/vaapi.c index aeadeb488e..e657663bbb 100644 --- a/video/decode/vaapi.c +++ b/video/decode/vaapi.c @@ -55,6 +55,13 @@ static int init_decoder(struct lavc_ctx *ctx, int w, int h) assert(!ctx->avctx->hw_frames_ctx); + // If we use direct rendering, disallow 10 bit - it's probably not + // implemented yet, and our downstream components can't deal with it. + if (!p->own_ctx && required_sw_format != AV_PIX_FMT_NV12) { + MP_WARN(ctx, "10 bit surfaces are currently supported.\n"); + return -1; + } + if (p->frames_ref) { AVHWFramesContext *fctx = (void *)p->frames_ref->data; if (fctx->width != w || fctx->height != h || -- cgit v1.2.3