summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--DOCS/interface-changes.rst1
-rw-r--r--DOCS/man/options.rst8
-rw-r--r--video/out/gpu/video.c2
-rw-r--r--video/out/gpu/video.h1
-rw-r--r--video/out/gpu/video_shaders.c2
5 files changed, 13 insertions, 1 deletions
diff --git a/DOCS/interface-changes.rst b/DOCS/interface-changes.rst
index ce7e33176a..2fd30628d8 100644
--- a/DOCS/interface-changes.rst
+++ b/DOCS/interface-changes.rst
@@ -52,6 +52,7 @@ Interface changes
The strength now linearly blends between the linear and nonlinear tone
mapped versions of a color.
- add --hdr-peak-decay-rate and --hdr-scene-threshold-low/high
+ - add --tone-mapping-max-boost
--- mpv 0.29.0 ---
- drop --opensles-sample-rate, as --audio-samplerate should be used if desired
- drop deprecated --videotoolbox-format, --ff-aid, --ff-vid, --ff-sid,
diff --git a/DOCS/man/options.rst b/DOCS/man/options.rst
index 0f7007bf89..e5a897ba4f 100644
--- a/DOCS/man/options.rst
+++ b/DOCS/man/options.rst
@@ -5235,6 +5235,14 @@ The following video options are currently all specific to ``--vo=gpu`` and
linear
Specifies the scale factor to use while stretching. Defaults to 1.0.
+``--tone-mapping-max-boost=<1.0..10.0>``
+ Upper limit for how much the tone mapping algorithm is allowed to boost
+ the average brightness by over-exposing the image. The default value of 1.0
+ allows no additional brightness boost. A value of 2.0 would allow
+ over-exposing by a factor of 2, and so on. Raising this setting can help
+ reveal details that would otherwise be hidden in dark scenes, but raising
+ it too high will make dark scenes appear unnaturally bright.
+
``--hdr-compute-peak=<auto|yes|no>``
Compute the HDR peak and frame average brightness per-frame instead of
relying on tagged metadata. These values are averaged over local regions as
diff --git a/video/out/gpu/video.c b/video/out/gpu/video.c
index a29f09bc3d..6bf0bb31a1 100644
--- a/video/out/gpu/video.c
+++ b/video/out/gpu/video.c
@@ -316,6 +316,7 @@ static const struct gl_video_opts gl_video_opts_def = {
.tone_map = {
.curve = TONE_MAPPING_HABLE,
.curve_param = NAN,
+ .max_boost = 1.0,
.decay_rate = 100.0,
.scene_threshold_low = 50,
.scene_threshold_high = 200,
@@ -376,6 +377,7 @@ const struct m_sub_options gl_video_conf = {
OPT_INTRANGE("hdr-scene-threshold-high",
tone_map.scene_threshold_high, 0, 0, 10000),
OPT_FLOAT("tone-mapping-param", tone_map.curve_param, 0),
+ OPT_FLOATRANGE("tone-mapping-max-boost", tone_map.max_boost, 0, 1.0, 10.0),
OPT_FLOAT("tone-mapping-desaturate", tone_map.desat, 0),
OPT_FLOATRANGE("tone-mapping-desaturate-exponent",
tone_map.desat_exp, 0, 0.0, 20.0),
diff --git a/video/out/gpu/video.h b/video/out/gpu/video.h
index 077f69332f..0bd5c57e8f 100644
--- a/video/out/gpu/video.h
+++ b/video/out/gpu/video.h
@@ -98,6 +98,7 @@ enum tone_mapping {
struct gl_tone_map_opts {
int curve;
float curve_param;
+ float max_boost;
int compute_peak;
float decay_rate;
int scene_threshold_low;
diff --git a/video/out/gpu/video_shaders.c b/video/out/gpu/video_shaders.c
index 0fff8f05f2..fbccd56eb3 100644
--- a/video/out/gpu/video_shaders.c
+++ b/video/out/gpu/video_shaders.c
@@ -652,7 +652,7 @@ static void pass_tone_map(struct gl_shader_cache *sc,
}
GLSL(float sig_orig = sig[sig_idx];)
- GLSLF("float slope = min(1.0, %f / sig_avg);\n", sdr_avg);
+ GLSLF("float slope = min(%f, %f / sig_avg);\n", opts->max_boost, sdr_avg);
GLSL(sig *= slope;)
GLSL(sig_peak *= slope;)