summaryrefslogtreecommitdiffstats
path: root/video/out/opengl/video.h
diff options
context:
space:
mode:
authorNiklas Haas <git@haasn.xyz>2017-07-17 21:39:06 +0200
committerNiklas Haas <git@haasn.xyz>2017-07-24 17:19:31 +0200
commitb196cadf9f9f6ea210db9236c2b26523a9a2719f (patch)
tree7faa5a77c65d84e45c074eb248fe0b54a62288ad /video/out/opengl/video.h
parentaad6ba018a17eded2b3f4af2212e0123cfb29b79 (diff)
downloadmpv-b196cadf9f9f6ea210db9236c2b26523a9a2719f.tar.bz2
mpv-b196cadf9f9f6ea210db9236c2b26523a9a2719f.tar.xz
vo_opengl: support HDR peak detection
This is done via compute shaders. As a consequence, the tone mapping algorithms had to be rewritten to compute their known constants in GLSL (ahead of time), instead of doing it once. Didn't affect performance. Using shmem/SSBO atomics in this way is extremely fast on nvidia, but it might be slow on other platforms. Needs testing. Unfortunately, setting up the SSBO still requires OpenGL calls, which means I can't have it in video_shaders.c, where it belongs. But I'll defer worrying about that until the backend refactor, since then I'll be breaking up the video/video_shaders structure anyway.
Diffstat (limited to 'video/out/opengl/video.h')
-rw-r--r--video/out/opengl/video.h4
1 files changed, 4 insertions, 0 deletions
diff --git a/video/out/opengl/video.h b/video/out/opengl/video.h
index f3608626e4..b19f6e099d 100644
--- a/video/out/opengl/video.h
+++ b/video/out/opengl/video.h
@@ -99,6 +99,9 @@ enum tone_mapping {
TONE_MAPPING_LINEAR,
};
+// How many frames to average over for HDR peak detection
+#define PEAK_DETECT_FRAMES 100
+
struct gl_video_opts {
int dumb_mode;
struct scaler_config scaler[4];
@@ -109,6 +112,7 @@ struct gl_video_opts {
int target_trc;
int target_brightness;
int hdr_tone_mapping;
+ int compute_hdr_peak;
float tone_mapping_param;
float tone_mapping_desat;
int linear_scaling;