diff options
author | Sven Göthel <[email protected]> | 2024-02-02 08:47:52 +0100 |
---|---|---|
committer | Sven Göthel <[email protected]> | 2024-02-02 08:47:52 +0100 |
commit | a676fb259c310246975f9c2f67f7a9795967f7bb (patch) | |
tree | 0584367854f2f16e78ae7281fbe13a83efa499ad /src/jogl/classes | |
parent | 1672233124e425e5446e1ae87974af248784be3c (diff) |
GLMediaPlayer: Adjust API doc and certain names differentiating video (from subtititle) etc.
Diffstat (limited to 'src/jogl/classes')
-rw-r--r-- | src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java | 18 | ||||
-rw-r--r-- | src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java | 26 |
2 files changed, 28 insertions, 16 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java index 4011bddcb..958dc72b5 100644 --- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java +++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java @@ -310,6 +310,12 @@ public interface GLMediaPlayer extends TextureSequence { /** * {@inheritDoc} * <p> + * Optional Video {@link TextureFrame} listener. + * Usually one wants to use {@link GLMediaPlayer#getNextTexture(GL)} is used to retrieve the next frame and keep + * decoding going, while {@link GLMediaPlayer#getLastTexture(GL)} is used to simply retrieve the + * last decoded frame. + * </p> + * <p> * As the contract of {@link TexSeqEventListener} requests, * implementations of {@link GLMediaEventListener} shall also: * <ul> @@ -760,13 +766,13 @@ public interface GLMediaPlayer extends TextureSequence { public String getLang(int id); /** - * @return the current decoded frame count since {@link #resume()} and {@link #seek(int)} + * @return the current decoded video frame count since {@link #resume()} and {@link #seek(int)} * as increased by {@link #getNextTexture(GL)} or the decoding thread. */ public int getDecodedFrameCount(); /** - * @return the current presented frame count since {@link #resume()} and {@link #seek(int)} + * @return the current presented video frame count since {@link #resume()} and {@link #seek(int)} * as increased by {@link #getNextTexture(GL)} for new frames. */ public int getPresentedFrameCount(); @@ -810,6 +816,9 @@ public interface GLMediaPlayer extends TextureSequence { /** * {@inheritDoc} * <p> + * Returns the last decoded Video {@link TextureSequence.TextureFrame}. + * </p> + * <p> * See <a href="#synchronization">audio and video synchronization</a>. * </p> * @throws IllegalStateException if not invoked in {@link State#Paused} or {@link State#Playing} @@ -819,7 +828,10 @@ public interface GLMediaPlayer extends TextureSequence { /** * {@inheritDoc} - * + * <p> + * Returns the next Video {@link TextureSequence.TextureFrame} to be rendered in sync with {@link #getPTS()} + * and keeps decoding going. + * </p> * <p> * In case the current state is not {@link State#Playing}, {@link #getLastTexture()} is returned. * </p> diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java index 2446fb15b..836c96c08 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java +++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java @@ -1465,7 +1465,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { subTexFree.putBlocking(subTex); // return unused } if( TimeFrameI.INVALID_PTS != pts ) { - newFrameAvailable(nextVFrame, Clock.currentMillis()); + newVideoFrameAvailable(nextVFrame, Clock.currentMillis()); gotVFrame[0] = true; } else { gotVFrame[0] = false; @@ -1536,7 +1536,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { return dpts_avg_diff; } - private final void newFrameAvailable(final TextureFrame frame, final long currentMillis) { + private final void newVideoFrameAvailable(final TextureFrame frame, final long currentMillis) { decodedFrameCount++; // safe: only written-to either from stream-worker or user thread if( 0 == frame.getDuration() ) { // patch frame duration if not set already frame.setDuration( (int) frame_duration ); @@ -1689,13 +1689,13 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { }; private final WorkerThread.Callback action = (final WorkerThread self) -> { final GL gl; - TextureFrame nextFrame = null; + TextureFrame vidFrame = null; final boolean[] subTexUsed = { false }; Texture subTex = null; try { if( STREAM_ID_NONE != vid ) { - nextFrame = videoFramesFree.getBlocking(); - nextFrame.setPTS( TimeFrameI.INVALID_PTS ); // mark invalid until processed! + vidFrame = videoFramesFree.getBlocking(); + vidFrame.setPTS( TimeFrameI.INVALID_PTS ); // mark invalid until processed! gl = sharedGLCtx.getGL(); } else { gl = null; @@ -1703,7 +1703,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { if( null != gl && STREAM_ID_NONE != sid && null != subTexFree ) { subTex = subTexFree.getBlocking(); } - final int vPTS = getNextTextureImpl(gl, nextFrame, subTex, subTexUsed); + final int vPTS = getNextTextureImpl(gl, vidFrame, subTex, subTexUsed); if( null != subTex ) { if( !subTexUsed[0] ) { subTexFree.putBlocking(subTex);// return unused @@ -1713,15 +1713,15 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { } boolean audioEOS = false; if( TimeFrameI.INVALID_PTS != vPTS ) { - if( null != nextFrame ) { + if( null != vidFrame ) { if( STREAM_WORKER_DELAY > 0 ) { java.lang.Thread.sleep(STREAM_WORKER_DELAY); } - if( !videoFramesDecoded.put(nextFrame) ) { + if( !videoFramesDecoded.put(vidFrame) ) { throw new InternalError("XXX: free "+videoFramesFree+", decoded "+videoFramesDecoded+", "+GLMediaPlayerImpl.this); } - newFrameAvailable(nextFrame, Clock.currentMillis()); - nextFrame = null; + newVideoFrameAvailable(vidFrame, Clock.currentMillis()); + vidFrame = null; } else { // audio only if( TimeFrameI.END_OF_STREAM_PTS == vPTS || ( duration > 0 && duration < vPTS ) ) { @@ -1730,7 +1730,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { nullFrameCount = 0; } } - } else if( null == nextFrame ) { + } else if( null == vidFrame ) { // audio only audioEOS = maxNullFrameCountUntilEOS <= nullFrameCount; if( null == audioSink || 0 == audioSink.getEnqueuedFrameCount() ) { @@ -1746,8 +1746,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { pauseImpl(true, new GLMediaPlayer.EventMask(GLMediaPlayer.EventMask.Bit.EOS)); } } finally { - if( null != nextFrame ) { // put back - videoFramesFree.putBlocking(nextFrame); + if( null != vidFrame ) { // put back + videoFramesFree.putBlocking(vidFrame); } } }; |