aboutsummaryrefslogtreecommitdiffstats
path: root/src/jogl/classes/com/jogamp/opengl/util/av
diff options
context:
space:
mode:
authorSven Gothel <[email protected]>2013-08-16 20:18:36 +0200
committerSven Gothel <[email protected]>2013-08-16 20:18:36 +0200
commitc200045aa661cf82474c2b3c1db0ac69db40452a (patch)
tree669f350d6d18beb60ce18d41715c374f9dc57b1d /src/jogl/classes/com/jogamp/opengl/util/av
parentcd0e0465d753255ba0f98a21e3c72f22d8a4b598 (diff)
GLMediaPlayer Multithreaded Decoding: GLMediaPlayer* (Part-4) - WIP
- Use Platform.currentTimeMillis() for accurate timing! - GLMediaPlayer / GLMediaPlayerImpl - Add DEBUG_NATIVE property jogl.debug.GLMediaPlayer.Native for verbose impl. messages, i.e. ffmpeg/libav - Add 'synchronization' section in GLMediaPlayer API doc (WIP) - Use passive non-blocking video synchronization, i.e. repeat frames instead of 'sleep'. Thx to Xerxes's suggestion. - Add flushing of cached decoded frames, allowing to remove complicated 'videoSCR_reset_latch' - FramePusher (threaded decoding): - Always create a shared context! - Release context while pausing - Pre/post 'getNextTextureImpl()' actions only at makeCurrent/release. - newFrameAvailable(..) signal after decoded frame is enqueued - FFMPEGDynamicLibraryBundleInfo - Bind add. functions of libavcodec: + "av_init_packet", + "av_new_packet", + "av_destruct_packet", - Bind add. functions of libavformat: + "avformat_seek_file", + "av_read_play", + "av_read_pause", - DEBUG property := FFMPEGMediaPlayer.DEBUG || DynamicLibraryBundleInfo.DEBUG; - FFMPEGMediaPlayer - Use libavformat's 'av_read_play()' and 'av_read_pause()', which may get utilized for network streams, e.g. RTSP - getNextTextureImpl(..): - Fix retry loop - Use postNextTextureImpl/preNextTextureImpl if desired (PSM) - Native: - Use fixed my_av_q2i32(..) macro (again) - Use INVALID_PTS marker (synced w/ Java code) - DEBUG: Dump more detailed frame information - TODO: Consider passing frame_delay, especially for repeated frames! - Tests (MovieSimple, MovieCube): - Refine KeyEvents control for seek and speed. - TODO: - Proper audio clock calculation - difficult w/ OpenAL ! - Video / Audio sync: - seek ! - streams w/ very async A/V frames - Test Streams: - Five-minute-sync-test.mp4 - Audio-Video-Sync-Test-Calibration-23.98fps-24fps.mp4 - sound_in_sync_test.mp4 - big_buck_bunny_1080p_surround.avi
Diffstat (limited to 'src/jogl/classes/com/jogamp/opengl/util/av')
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java3
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java51
2 files changed, 49 insertions, 5 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java b/src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java
index ba785ac31..d5db73c6b 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java
@@ -71,6 +71,9 @@ public interface AudioSink {
public static final AudioDataFormat DefaultFormat = new AudioDataFormat(AudioDataType.PCM, 44100, 16, 2, true /* signed */, true /* fixed point */, true /* littleEndian */);
public static class AudioFrame {
+ /** Constant marking an invalid PTS, i.e. Integer.MIN_VALUE 0x80000000 {@value}. */
+ public static final int INVALID_PTS = 0x80000000 ; // == -2147483648 == Integer.MIN_VALUE;
+
public final ByteBuffer data;
public final int dataSize;
public final int pts;
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
index fae88ea18..1b82bb994 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
@@ -34,7 +34,6 @@ import javax.media.opengl.GL;
import javax.media.opengl.GLException;
import jogamp.opengl.Debug;
-import jogamp.opengl.util.av.GLMediaPlayerImpl;
import com.jogamp.opengl.util.texture.TextureSequence;
@@ -44,7 +43,8 @@ import com.jogamp.opengl.util.texture.TextureSequence;
* <p>
* Audio maybe supported and played back internally or via an {@link AudioSink} implementation,
* if an audio stream is selected in {@link #initGLStream(GL, int, URLConnection, int, int)}.
- * </p>
+ * </p>
+ *
* <a name="lifecycle"><h5>GLMediaPlayer Lifecycle</h5></a>
* <p>
* <table border="1">
@@ -56,6 +56,7 @@ import com.jogamp.opengl.util.texture.TextureSequence;
* <tr><td>{@link #destroy(GL)}</td> <td>ANY</td> <td>Uninitialized</td></tr>
* </table>
* </p>
+ *
* <a name="streamIDs"><h5>Audio and video Stream IDs</h5></a>
* <p>
* <table border="1">
@@ -91,14 +92,47 @@ import com.jogamp.opengl.util.texture.TextureSequence;
* Milliseconds granularity is also more than enough to deal with A-V synchronization,
* where the threshold usually lies within 100ms.
* </p>
+ *
+ * <a name="synchronization"><h5>Audio and video synchronization</h5></a>
+ * <p>
+ * The class follows a passive A/V synchronization pattern.
+ * Audio is being untouched, while {@link #getNextTexture(GL, boolean)} delivers a new video frame
+ * only, if its timestamp is less than 22ms ahead of <i>time</i>.
+ * Otherwise the early frame is cached for later retrieval and the previous frame is returned.
+ * FIXME: Refine!
+ * </p>
+ * <p>
+ * https://en.wikipedia.org/wiki/Audio_to_video_synchronization
+ * <pre>
+ * d_av = v_pts - a_pts;
+ * </pre>
+ * </p>
+ * <p>
+ * Recommendation of audio/video pts time lead/lag at production:
+ * <ul>
+ * <li>Overall: +40ms and -60ms audio ahead video / audio after video</li>
+ * <li>Each stage: +5ms and -15ms. audio ahead video / audio after video</li>
+ * </ul>
+ * </p>
+ * <p>
+ * Recommendation of av pts time lead/lag at presentation:
+ * <ul>
+ * <li>TV: +15ms and -45ms. audio ahead video / audio after video.</li>
+ * <li>Film: +22ms and -22ms. audio ahead video / audio after video.</li>
+ * </ul>
+ * </p>
*/
public interface GLMediaPlayer extends TextureSequence {
public static final boolean DEBUG = Debug.debug("GLMediaPlayer");
+ public static final boolean DEBUG_NATIVE = Debug.debug("GLMediaPlayer.Native");
/** Constant {@value} for <i>mute</i> or <i>not available</i>. See <a href="#streamIDs">Audio and video Stream IDs</a>. */
public static final int STREAM_ID_NONE = -2;
/** Constant {@value} for <i>auto</i> or <i>unspecified</i>. See <a href="#streamIDs">Audio and video Stream IDs</a>. */
public static final int STREAM_ID_AUTO = -1;
+
+ /** Maximum video frame async .. */
+ public static final int MAXIMUM_VIDEO_ASYNC = 22;
public interface GLMediaEventListener extends TexSeqEventListener<GLMediaPlayer> {
@@ -228,13 +262,14 @@ public interface GLMediaPlayer extends TextureSequence {
public int getAID();
/**
- * @return the current decoded frame count since {@link #initGLStream(GL, int, URLConnection, int, int)}.
+ * @return the current decoded frame count since {@link #play()} and {@link #seek(int)}
+ * as increased by {@link #getNextTexture(GL, boolean)} or the decoding thread.
*/
public int getDecodedFrameCount();
/**
- * @return the current presented frame count since {@link #initGLStream(GL, int, URLConnection, int, int)},
- * increased by {@link #getNextTexture(GL, boolean)}.
+ * @return the current presented frame count since {@link #play()} and {@link #seek(int)}
+ * as increased by {@link #getNextTexture(GL, boolean)} for new frames.
*/
public int getPresentedFrameCount();
@@ -250,6 +285,9 @@ public interface GLMediaPlayer extends TextureSequence {
/**
* {@inheritDoc}
+ * <p>
+ * See <a href="#synchronization">audio and video synchronization</a>.
+ * </p>
*/
@Override
public TextureSequence.TextureFrame getLastTexture() throws IllegalStateException;
@@ -260,6 +298,9 @@ public interface GLMediaPlayer extends TextureSequence {
* <p>
* In case the current state is not {@link State#Playing}, {@link #getLastTexture()} is returned.
* </p>
+ * <p>
+ * See <a href="#synchronization">audio and video synchronization</a>.
+ * </p>
*
* @see #addEventListener(GLMediaEventListener)
* @see GLMediaEventListener#newFrameAvailable(GLMediaPlayer, TextureFrame, long)