path: root/audio/out/ao.h
diff options
authorwm4 <wm4@nowhere>2014-04-17 23:48:09 +0200
committerwm4 <wm4@nowhere>2014-04-17 23:48:09 +0200
commit5059039c95f51cfe33f828a66d55b1255ab1626d (patch)
tree5ff63cdba84e0531a234ecd4f12d8dca8596e305 /audio/out/ao.h
parent0ab3482f73a199b2e839ad4ec0a4b21adc1e75d5 (diff)
player: unrangle one aspect of audio EOF handling
For some reason, the buffered_audio variable was used to "cache" the ao_get_delay() result. But I can't really see any reason why this should be done, and it just seems to complicate everything. One reason might be that the value should be checked only if the AO buffers have been recently filled (as otherwise the delay could go low and trigger an accidental EOF condition), but this didn't work anyway, since buffered_audio is set from ao_get_delay() anyway at a later point if it was unset. And in both cases, the value is used _after_ filling the audio buffers anyway. Simplify it. Also, move the audio EOF condition to a separate function. (Note that ao_eof_reached() probably could/should whether the last ao_play() call had AOPLAY_FINAL_CHUNK set to avoid accidental EOF on underflows, but for now let's keep the code equivalent.)
Diffstat (limited to 'audio/out/ao.h')
1 files changed, 1 insertions, 5 deletions
diff --git a/audio/out/ao.h b/audio/out/ao.h
index 056cdf7295..59de0c08b1 100644
--- a/audio/out/ao.h
+++ b/audio/out/ao.h
@@ -49,11 +49,6 @@ typedef struct ao_control_vol {
float right;
} ao_control_vol_t;
-// If ao_get_delay() reaches this value after ao_play() was called with the
-// AOPLAY_FINAL_CHUNK flag set, the playback core expects that the audio has
-// all been played.
-#define AO_EOF_DELAY 0.05
struct ao;
struct mpv_global;
struct input_ctx;
@@ -77,5 +72,6 @@ void ao_reset(struct ao *ao);
void ao_pause(struct ao *ao);
void ao_resume(struct ao *ao);
void ao_drain(struct ao *ao);
+bool ao_eof_reached(struct ao *ao);
#endif /* MPLAYER_AUDIO_OUT_H */