summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--filters/f_hwtransfer.c78
-rw-r--r--video/hwdec.h4
-rw-r--r--video/out/hwdec/dmabuf_interop_wl.c3
-rw-r--r--video/out/hwdec/hwdec_vaapi.c23
4 files changed, 84 insertions, 24 deletions
diff --git a/filters/f_hwtransfer.c b/filters/f_hwtransfer.c
index a8f01e4054..9e1e06e188 100644
--- a/filters/f_hwtransfer.c
+++ b/filters/f_hwtransfer.c
@@ -260,7 +260,7 @@ static bool vo_supports(struct mp_hwdec_ctx *ctx, int hw_fmt, int sw_fmt)
return false;
}
-static bool probe_formats(struct mp_filter *f, int hw_imgfmt)
+static bool probe_formats(struct mp_filter *f, int hw_imgfmt, bool use_conversion_filter)
{
struct priv *p = f->priv;
@@ -276,6 +276,7 @@ static bool probe_formats(struct mp_filter *f, int hw_imgfmt)
struct mp_hwdec_ctx *ctx = NULL;
AVHWFramesConstraints *cstr = NULL;
+ AVHWFramesConstraints *conversion_cstr = NULL;
struct hwdec_imgfmt_request params = {
.imgfmt = hw_imgfmt,
@@ -326,6 +327,16 @@ static bool probe_formats(struct mp_filter *f, int hw_imgfmt)
}
}
+ if (use_conversion_filter) {
+ // We will not be doing a transfer, so do not probe for transfer
+ // formats. This can produce incorrect results. Instead, we need to
+ // obtain the constraints for a conversion configuration.
+
+ conversion_cstr =
+ av_hwdevice_get_hwframe_constraints(ctx->av_device_ref,
+ ctx->conversion_config);
+ }
+
for (int n = 0; cstr->valid_sw_formats &&
cstr->valid_sw_formats[n] != AV_PIX_FMT_NONE; n++)
{
@@ -345,19 +356,10 @@ static bool probe_formats(struct mp_filter *f, int hw_imgfmt)
continue;
}
- // Creates an AVHWFramesContexts with the given parameters.
- AVBufferRef *frames = NULL;
- if (!mp_update_av_hw_frames_pool(&frames, ctx->av_device_ref,
- hw_imgfmt, imgfmt, 128, 128, false))
- {
- MP_WARN(f, "failed to allocate pool\n");
- continue;
- }
-
- enum AVPixelFormat *fmts;
- if (av_hwframe_transfer_get_formats(frames,
- AV_HWFRAME_TRANSFER_DIRECTION_TO, &fmts, 0) >= 0)
- {
+ if (use_conversion_filter) {
+ // The conversion constraints are universal, and do not vary with
+ // source format, so we will associate the same set of target formats
+ // with all source formats.
int index = p->num_fmts;
MP_TARRAY_APPEND(p, p->fmts, p->num_fmts, imgfmt);
MP_TARRAY_GROW(p, p->fmt_upload_index, index);
@@ -365,7 +367,8 @@ static bool probe_formats(struct mp_filter *f, int hw_imgfmt)
p->fmt_upload_index[index] = p->num_upload_fmts;
- for (int i = 0; fmts[i] != AV_PIX_FMT_NONE; i++) {
+ enum AVPixelFormat *fmts = conversion_cstr->valid_sw_formats;
+ for (int i = 0; fmts && fmts[i] != AV_PIX_FMT_NONE; i++) {
int fmt = pixfmt2imgfmt(fmts[i]);
if (!fmt)
continue;
@@ -379,14 +382,51 @@ static bool probe_formats(struct mp_filter *f, int hw_imgfmt)
p->fmt_upload_num[index] =
p->num_upload_fmts - p->fmt_upload_index[index];
+ } else {
+ // Creates an AVHWFramesContexts with the given parameters.
+ AVBufferRef *frames = NULL;
+ if (!mp_update_av_hw_frames_pool(&frames, ctx->av_device_ref,
+ hw_imgfmt, imgfmt, 128, 128, false))
+ {
+ MP_WARN(f, "failed to allocate pool\n");
+ continue;
+ }
- av_free(fmts);
- }
+ enum AVPixelFormat *fmts;
+ if (av_hwframe_transfer_get_formats(frames,
+ AV_HWFRAME_TRANSFER_DIRECTION_TO, &fmts, 0) >= 0)
+ {
+ int index = p->num_fmts;
+ MP_TARRAY_APPEND(p, p->fmts, p->num_fmts, imgfmt);
+ MP_TARRAY_GROW(p, p->fmt_upload_index, index);
+ MP_TARRAY_GROW(p, p->fmt_upload_num, index);
+
+ p->fmt_upload_index[index] = p->num_upload_fmts;
+
+ for (int i = 0; fmts[i] != AV_PIX_FMT_NONE; i++) {
+ int fmt = pixfmt2imgfmt(fmts[i]);
+ if (!fmt)
+ continue;
+ MP_VERBOSE(f, " supports %s\n", mp_imgfmt_to_name(fmt));
+ if (!vo_supports(ctx, hw_imgfmt, fmt)) {
+ MP_VERBOSE(f, " ... not supported by VO\n");
+ continue;
+ }
+ MP_TARRAY_APPEND(p, p->upload_fmts, p->num_upload_fmts, fmt);
+ }
- av_buffer_unref(&frames);
+ p->fmt_upload_num[index] =
+ p->num_upload_fmts - p->fmt_upload_index[index];
+
+ av_free(fmts);
+ }
+
+ av_buffer_unref(&frames);
+ }
}
av_hwframe_constraints_free(&cstr);
+ av_hwframe_constraints_free(&conversion_cstr);
p->av_device_ctx = av_buffer_ref(ctx->av_device_ref);
if (!p->av_device_ctx)
return false;
@@ -407,7 +447,7 @@ struct mp_hwupload mp_hwupload_create(struct mp_filter *parent, int hw_imgfmt,
mp_filter_add_pin(f, MP_PIN_IN, "in");
mp_filter_add_pin(f, MP_PIN_OUT, "out");
- if (!probe_formats(f, hw_imgfmt)) {
+ if (!probe_formats(f, hw_imgfmt, src_is_same_hw)) {
MP_INFO(f, "hardware format not supported\n");
goto fail;
}
diff --git a/video/hwdec.h b/video/hwdec.h
index 44008dece0..723c60f427 100644
--- a/video/hwdec.h
+++ b/video/hwdec.h
@@ -23,6 +23,10 @@ struct mp_hwdec_ctx {
// This will be used for hardware conversion of frame formats.
// NULL otherwise.
const char *conversion_filter_name;
+
+ // The libavutil hwconfig to be used when querying constraints for the
+ // conversion filter. Can be NULL if no special config is required.
+ void *conversion_config;
};
// Used to communicate hardware decoder device handles from VO to video decoder.
diff --git a/video/out/hwdec/dmabuf_interop_wl.c b/video/out/hwdec/dmabuf_interop_wl.c
index 9f0d490228..606a0aa601 100644
--- a/video/out/hwdec/dmabuf_interop_wl.c
+++ b/video/out/hwdec/dmabuf_interop_wl.c
@@ -54,7 +54,8 @@ static bool map(struct ra_hwdec_mapper *mapper,
return false;
}
- MP_VERBOSE(mapper, "Supported Wayland display format: '%s(%016lx)'\n",
+ MP_VERBOSE(mapper, "Supported Wayland display format %s: '%s(%016lx)'\n",
+ mp_imgfmt_to_name(mapper->src->params.hw_subfmt),
mp_tag_str(drm_format), mapper_p->desc.objects[0].format_modifier);
return true;
diff --git a/video/out/hwdec/hwdec_vaapi.c b/video/out/hwdec/hwdec_vaapi.c
index fafc331277..2eb458fa99 100644
--- a/video/out/hwdec/hwdec_vaapi.c
+++ b/video/out/hwdec/hwdec_vaapi.c
@@ -113,8 +113,14 @@ struct priv_owner {
static void uninit(struct ra_hwdec *hw)
{
struct priv_owner *p = hw->priv;
- if (p->ctx)
+ if (p->ctx) {
hwdec_devices_remove(hw->devs, &p->ctx->hwctx);
+ if (p->ctx->hwctx.conversion_config) {
+ AVVAAPIHWConfig *hwconfig = p->ctx->hwctx.conversion_config;
+ vaDestroyConfig(p->ctx->display, hwconfig->config_id);
+ av_freep(&p->ctx->hwctx.conversion_config);
+ }
+ }
va_destroy(p->ctx);
}
@@ -131,11 +137,10 @@ const static dmabuf_interop_init interop_inits[] = {
NULL
};
-const static char *conversion_filter_name = "scale_vaapi";
-
static int init(struct ra_hwdec *hw)
{
struct priv_owner *p = hw->priv;
+ VAStatus vas;
for (int i = 0; interop_inits[i]; i++) {
if (interop_inits[i](hw, &p->dmabuf_interop)) {
@@ -173,13 +178,23 @@ static int init(struct ra_hwdec *hw)
return -1;
}
+ VAConfigID config_id;
+ AVVAAPIHWConfig *hwconfig = NULL;
+ vas = vaCreateConfig(p->display, VAProfileNone, VAEntrypointVideoProc, NULL,
+ 0, &config_id);
+ if (vas == VA_STATUS_SUCCESS) {
+ hwconfig = av_hwdevice_hwconfig_alloc(p->ctx->av_device_ref);
+ hwconfig->config_id = config_id;
+ }
+
// it's now safe to set the display resource
ra_add_native_resource(hw->ra_ctx->ra, "VADisplay", p->display);
p->ctx->hwctx.hw_imgfmt = IMGFMT_VAAPI;
p->ctx->hwctx.supported_formats = p->formats;
p->ctx->hwctx.driver_name = hw->driver->name;
- p->ctx->hwctx.conversion_filter_name = conversion_filter_name;
+ p->ctx->hwctx.conversion_filter_name = "scale_vaapi";
+ p->ctx->hwctx.conversion_config = hwconfig;
hwdec_devices_add(hw->devs, &p->ctx->hwctx);
return 0;
}