summaryrefslogtreecommitdiffstats
path: root/video
diff options
context:
space:
mode:
authorwm4 <wm4@nowhere>2015-01-29 19:53:49 +0100
committerwm4 <wm4@nowhere>2015-01-29 19:53:49 +0100
commitc80a1b7aa9936802e64189f9474847fd3fe57e8d (patch)
treeba9baf58a55ae281aa5ec7889113991144adcfb8 /video
parente0e06f0f0c9d6f00cb02b2c98b7286e231d65794 (diff)
downloadmpv-c80a1b7aa9936802e64189f9474847fd3fe57e8d.tar.bz2
mpv-c80a1b7aa9936802e64189f9474847fd3fe57e8d.tar.xz
vo_opengl: let hwdec driver report the exact image format
Hardware decoding/displaying with vo_opengl is done by replacing the normal video textures with textures provided by the hardware decoding API OpenGL interop code. Often, this changes the format (vaglx and vdpau return RGBA, vda returns packed YUV). If the format is changed, there was a chance (or at least a higher potential for bugs) that the shader generation code could be confused by the mismatch of formats, and would create incorrect conversions. Simplify this by requiring the hwdec interop driver to set the format it will return to us. This affects all fields, not just some (done by replacing the format with the value of the converted_imgfmt field in init_format), in particular fields like colorlevels. Currently, no hwdec interop driver does anything sophisticated, and the win is mostly from the mp_image_params_guess_csp() function, which will reset fields like colorlevels to expected value if RGBA is used.
Diffstat (limited to 'video')
-rw-r--r--video/out/gl_hwdec.h3
-rw-r--r--video/out/gl_hwdec_vaglx.c4
-rw-r--r--video/out/gl_hwdec_vda.c3
-rw-r--r--video/out/gl_hwdec_vdpau.c5
-rw-r--r--video/out/gl_video.c40
5 files changed, 31 insertions, 24 deletions
diff --git a/video/out/gl_hwdec.h b/video/out/gl_hwdec.h
index d4e500e329..cd2ab26892 100644
--- a/video/out/gl_hwdec.h
+++ b/video/out/gl_hwdec.h
@@ -35,7 +35,8 @@ struct gl_hwdec_driver {
int (*create)(struct gl_hwdec *hw);
// Prepare for rendering video. (E.g. create textures.)
// Called on initialization, and every time the video size changes.
- int (*reinit)(struct gl_hwdec *hw, const struct mp_image_params *params);
+ // *params must be set to the format the hw textures return.
+ int (*reinit)(struct gl_hwdec *hw, struct mp_image_params *params);
// Return textures that contain the given hw_image.
// Note that the caller keeps a reference to hw_image until unmap_image
// is called, so the hwdec driver doesn't need to do that.
diff --git a/video/out/gl_hwdec_vaglx.c b/video/out/gl_hwdec_vaglx.c
index f9dc42911d..99d29dc385 100644
--- a/video/out/gl_hwdec_vaglx.c
+++ b/video/out/gl_hwdec_vaglx.c
@@ -87,7 +87,7 @@ static int create(struct gl_hwdec *hw)
return 0;
}
-static int reinit(struct gl_hwdec *hw, const struct mp_image_params *params)
+static int reinit(struct gl_hwdec *hw, struct mp_image_params *params)
{
struct priv *p = hw->priv;
GL *gl = hw->gl;
@@ -95,6 +95,8 @@ static int reinit(struct gl_hwdec *hw, const struct mp_image_params *params)
destroy_texture(hw);
+ params->imgfmt = hw->driver->imgfmt;
+
gl->GenTextures(1, &p->gl_texture);
gl->BindTexture(GL_TEXTURE_2D, p->gl_texture);
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
diff --git a/video/out/gl_hwdec_vda.c b/video/out/gl_hwdec_vda.c
index d90b3419ae..bc18983d3d 100644
--- a/video/out/gl_hwdec_vda.c
+++ b/video/out/gl_hwdec_vda.c
@@ -97,8 +97,9 @@ static int create(struct gl_hwdec *hw)
return 0;
}
-static int reinit(struct gl_hwdec *hw, const struct mp_image_params *params)
+static int reinit(struct gl_hwdec *hw, struct mp_image_params *params)
{
+ params->imgfmt = hw->driver->imgfmt;
return 0;
}
diff --git a/video/out/gl_hwdec_vdpau.c b/video/out/gl_hwdec_vdpau.c
index 3028fa10e1..f0086b35a5 100644
--- a/video/out/gl_hwdec_vdpau.c
+++ b/video/out/gl_hwdec_vdpau.c
@@ -29,7 +29,7 @@
// follow it. I'm not sure about the original nvidia headers.
#define BRAINDEATH(x) ((void *)(uintptr_t)(x))
-static int reinit(struct gl_hwdec *hw, const struct mp_image_params *params);
+static int reinit(struct gl_hwdec *hw, struct mp_image_params *params);
struct priv {
struct mp_log *log;
@@ -119,7 +119,7 @@ static int create(struct gl_hwdec *hw)
return 0;
}
-static int reinit(struct gl_hwdec *hw, const struct mp_image_params *params)
+static int reinit(struct gl_hwdec *hw, struct mp_image_params *params)
{
struct priv *p = hw->priv;
GL *gl = hw->gl;
@@ -128,6 +128,7 @@ static int reinit(struct gl_hwdec *hw, const struct mp_image_params *params)
destroy_objects(hw);
+ params->imgfmt = hw->driver->imgfmt;
p->image_params = *params;
if (mp_vdpau_handle_preemption(p->ctx, &p->preemption_counter) < 1)
diff --git a/video/out/gl_video.c b/video/out/gl_video.c
index 49b9ba528c..2aae1ad781 100644
--- a/video/out/gl_video.c
+++ b/video/out/gl_video.c
@@ -153,7 +153,8 @@ struct gl_video {
float dither_center;
int dither_size;
- struct mp_image_params image_params;
+ struct mp_image_params real_image_params; // configured format
+ struct mp_image_params image_params; // texture format (mind hwdec case)
struct mp_imgfmt_desc image_desc;
int plane_count;
int image_w, image_h;
@@ -945,9 +946,7 @@ static void compile_shaders(struct gl_video *p)
// treated as linear.
if (is_xyz) {
gamma_fun = MP_CSP_TRC_LINEAR;
- } else if (p->image_params.colorlevels == MP_CSP_LEVELS_PC && !p->hwdec_active) {
- // FIXME: I don't know if hwdec sets the color levels to PC or not,
- // but let's avoid the bug just in case.
+ } else if (p->image_params.colorlevels == MP_CSP_LEVELS_PC) {
gamma_fun = MP_CSP_TRC_SRGB;
} else {
gamma_fun = MP_CSP_TRC_BT_1886;
@@ -1524,21 +1523,26 @@ static int align_pow2(int s)
return r;
}
-static void init_video(struct gl_video *p, const struct mp_image_params *params)
+static void init_video(struct gl_video *p)
{
GL *gl = p->gl;
- init_format(params->imgfmt, p);
+ check_gl_features(p);
+ init_format(p->image_params.imgfmt, p);
p->gl_target = p->opts.use_rectangle ? GL_TEXTURE_RECTANGLE : GL_TEXTURE_2D;
- if (p->hwdec_active)
+
+ if (p->hwdec_active) {
+ if (p->hwdec->driver->reinit(p->hwdec, &p->image_params) < 0)
+ MP_ERR(p, "Initializing texture for hardware decoding failed.\n");
+ init_format(p->image_params.imgfmt, p);
p->gl_target = p->hwdec->gl_texture_target;
+ }
- check_gl_features(p);
+ mp_image_params_guess_csp(&p->image_params);
- p->image_w = params->w;
- p->image_h = params->h;
- p->image_params = *params;
+ p->image_w = p->image_params.w;
+ p->image_h = p->image_params.h;
int eq_caps = MP_CSP_EQ_CAPS_GAMMA;
if (p->is_yuv && p->image_params.colorspace != MP_CSP_BT_2020_C)
@@ -1598,11 +1602,6 @@ static void init_video(struct gl_video *p, const struct mp_image_params *params)
debug_check_gl(p, "after video texture creation");
- if (p->hwdec_active) {
- if (p->hwdec->driver->reinit(p->hwdec, &p->image_params) < 0)
- MP_ERR(p, "Initializing texture for hardware decoding failed.\n");
- }
-
reinit_rendering(p);
}
@@ -1628,7 +1627,8 @@ static void uninit_video(struct gl_video *p)
// Invalidate image_params to ensure that gl_video_config() will call
// init_video() on uninitialized gl_video.
- p->image_params = (struct mp_image_params){0};
+ p->real_image_params = (struct mp_image_params){0};
+ p->image_params = p->real_image_params;
}
static void change_dither_trafo(struct gl_video *p)
@@ -2370,10 +2370,12 @@ void gl_video_config(struct gl_video *p, struct mp_image_params *params)
{
mp_image_unrefp(&p->image.mpi);
- if (!mp_image_params_equal(&p->image_params, params)) {
+ if (!mp_image_params_equal(&p->real_image_params, params)) {
uninit_video(p);
+ p->real_image_params = *params;
+ p->image_params = *params;
if (params->imgfmt)
- init_video(p, params);
+ init_video(p);
}
check_resize(p);