From 66dfb96fa1e49127fb76eddec8ecfce973acdc21 Mon Sep 17 00:00:00 2001 From: Niklas Haas Date: Wed, 21 Feb 2018 20:16:30 +0100 Subject: vo_gpu: don't tone-map for pure gamut reductions Based on testing with real-world non-HDR BT.2020 clips, clipping the color space looks better than attempting to gamut map using a tone mapping shader that's (by now) optimized for HDR content. If anything, we'd have to develop a separate gamut mapping shader that works in LCh space. --- video/out/gpu/video_shaders.c | 3 --- 1 file changed, 3 deletions(-) (limited to 'video') diff --git a/video/out/gpu/video_shaders.c b/video/out/gpu/video_shaders.c index b588b8e500..b3eec11066 100644 --- a/video/out/gpu/video_shaders.c +++ b/video/out/gpu/video_shaders.c @@ -797,9 +797,6 @@ void pass_color_map(struct gl_shader_cache *sc, mp_get_cms_matrix(csp_src, csp_dst, MP_INTENT_RELATIVE_COLORIMETRIC, m); gl_sc_uniform_mat3(sc, "cms_matrix", true, &m[0][0]); GLSL(color.rgb = cms_matrix * color.rgb;) - // Since this can reduce the gamut, figure out by how much - for (int c = 0; c < 3; c++) - src.sig_peak = MPMAX(src.sig_peak, m[c][c]); } // Tone map to prevent clipping when the source signal peak exceeds the -- cgit v1.2.3