summaryrefslogtreecommitdiffstats
path: root/video
diff options
context:
space:
mode:
authorwm4 <wm4@nowhere>2016-12-30 20:02:59 +0100
committerwm4 <wm4@nowhere>2016-12-30 20:04:47 +0100
commitb14fc38590097117bd2eb2b71cf1a2f8b736f778 (patch)
treed941aa1208a878b80cf34997a18fdeee9920f9f5 /video
parent58a0c43cf45bab993e842b13d89d70335457a5a0 (diff)
downloadmpv-b14fc38590097117bd2eb2b71cf1a2f8b736f778.tar.bz2
mpv-b14fc38590097117bd2eb2b71cf1a2f8b736f778.tar.xz
vo_opengl: x11egl: fix alpha mode
The way it should (probably) work is that selecting a RGBA framebuffer format will simply make the compositor use the alpha. It works this way on Wayland. On X11, this is... not done. Instead, both GLX and EGL report two FB configs, which are exactly the same, except for the platform-specific visual. Only the latter (non-default) points to a visual that actually has alpha. So you can't make the pure GLX and EGL APIs select alpha mode, and you have to override manually. Or in other words, alpha was hacked violently into X11, in a way that doesn't really make sense for the sake of compatibility, and forces API users to wade through metaphorical cow shit to deal with it. To be fair, some other platforms actually also require you to enable alpha explicitly (rather than looking at the framebuffer type), but they skip the metaphorical cow shit step.
Diffstat (limited to 'video')
-rw-r--r--video/out/opengl/context_x11egl.c34
1 files changed, 32 insertions, 2 deletions
diff --git a/video/out/opengl/context_x11egl.c b/video/out/opengl/context_x11egl.c
index 7a9e4d31fc..2cf249fe1a 100644
--- a/video/out/opengl/context_x11egl.c
+++ b/video/out/opengl/context_x11egl.c
@@ -49,6 +49,29 @@ static void mpegl_uninit(MPGLContext *ctx)
vo_x11_uninit(ctx->vo);
}
+static int pick_xrgba_config(void *user_data, EGLConfig *configs, int num_configs)
+{
+ struct MPGLContext *ctx = user_data;
+ struct priv *p = ctx->priv;
+ struct vo *vo = ctx->vo;
+
+ for (int n = 0; n < num_configs; n++) {
+ int vID = 0, num;
+ eglGetConfigAttrib(p->egl_display, configs[n], EGL_NATIVE_VISUAL_ID, &vID);
+ XVisualInfo template = {.visualid = vID};
+ XVisualInfo *vi = XGetVisualInfo(vo->x11->display, VisualIDMask,
+ &template, &num);
+ if (vi) {
+ bool is_rgba = vo_x11_is_rgba_visual(vi);
+ XFree(vi);
+ if (is_rgba)
+ return n;
+ }
+ }
+
+ return 0;
+}
+
static int mpegl_init(struct MPGLContext *ctx, int flags)
{
struct priv *p = ctx->priv;
@@ -64,13 +87,20 @@ static int mpegl_init(struct MPGLContext *ctx, int flags)
goto uninit;
}
+ struct mpegl_opts opts = {
+ .vo_flags = flags,
+ .user_data = ctx,
+ .refine_config = (flags & VOFLAG_ALPHA) ? pick_xrgba_config : NULL,
+ };
+
EGLConfig config;
- if (!mpegl_create_context(p->egl_display, vo->log, flags, &p->egl_context,
- &config))
+ if (!mpegl_create_context_opts(p->egl_display, vo->log, &opts,
+ &p->egl_context, &config))
goto uninit;
int vID, n;
eglGetConfigAttrib(p->egl_display, config, EGL_NATIVE_VISUAL_ID, &vID);
+ MP_VERBOSE(vo, "chose visual 0x%x\n", vID);
XVisualInfo template = {.visualid = vID};
XVisualInfo *vi = XGetVisualInfo(vo->x11->display, VisualIDMask, &template, &n);