From 474ce65081ecd452215bc07ab866666cb11ca8b1 Mon Sep 17 00:00:00 2001
From: Sven Gothel
Date: Fri, 23 Aug 2013 01:02:33 +0200
Subject: GLMediaPlayer Multithreaded Decoding: GLMediaPlayer* (Part-5) - WIP
- Update/fix GLMediaPlayer API doc
- GLMediaEventListener: Add event bits for all state changes to be delivered via attributesChanged(..)
- StreamWorker / Decoder Thread:
- Use StreamWorker only !
- Handle exceptions on StreamWorker via StreamException
- Handles stream initialization and decoding (-> initStream(..))
- Split initGLStream(..) -> initStream(..) + initGL(GL)
- allow initStream(..)'s implementation being executed on StreamWorker
- allow GL initialization to be 'postponed' when stream is read,
i.e. non blocking stream initialization (UI .. etc)
- Handle EOS via END_OF_STREAM_PTS -> pause/event
- Video: Use lock-free LFRingbuffer, similar to
ALAudioSink (commit f18a94b3defef16e98badd6d99f2422609aa56c5)
+++
- FFMPEGDynamicLibraryBundleInfo
- Add avcodec's:
- avcodec_get_frame_defaults, avcodec_free_frame (54.28.0), avcodec_flush_buffers,
- Add avutil's:
- av_frame_unref (55.0.0)
- Add avformat's:
- avformat_seek_file (??)
+++
- FFMPEGMediaPlayer Native:
- add 'snoop' video frames for a/v frame count relation.
disabled per default, since no more needed due to ALAudioSink's
grow-buffer usage of LFRingbuffer.
- use sp_avcodec_free_frame if available
- 'useRefCountedFrames=1' for libav 55.0 to cache more than one audio frame,
not used since ALAudioSink's OpenAL usage does not require it (copies data once).
Note: the above snooped-video frame count is used here.
- use only one cached audio-frame (-> see above, OpenAL copies data once),
while reusing the NIO buffer!
- Perform OpenGL sync (glFinish) in native code!
- find proper PTS value, i.e. either frame's PTS or DTS,
see 'PTSStats'.
- FFMPEGMediaPlayer Java:
- use private fields
- simplified code due to above changes.
+++
Working Tests: MovieSimple and MovieCube
TODO-1: Fix
- Android
- OMXGLMediaPlayer
TODO-2:
- Fix issue where async audio frames arrive much later than 1st video frame, i.e. around 300ms.
- Default TextureCount .. maybe 3 ?
- Adding Audio synchronization ?
- Find 'truth' about correlation of audio and video PTS values,
currently, we assume both to be unrelated ?
---
.../android/av/AndroidGLMediaPlayerAPI14.java | 13 +-
.../jogamp/opengl/util/av/EGLMediaPlayerImpl.java | 6 +-
.../jogamp/opengl/util/av/GLMediaPlayerImpl.java | 514 +++++++++++++--------
.../jogamp/opengl/util/av/NullGLMediaPlayer.java | 9 +-
.../av/impl/FFMPEGDynamicLibraryBundleInfo.java | 23 +-
.../opengl/util/av/impl/FFMPEGMediaPlayer.java | 239 +++++-----
.../opengl/util/av/impl/OMXGLMediaPlayer.java | 10 +-
7 files changed, 471 insertions(+), 343 deletions(-)
(limited to 'src/jogl/classes/jogamp')
diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
index f87df950c..578a219e9 100644
--- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
+++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
@@ -31,6 +31,7 @@ import java.io.IOException;
import javax.media.opengl.GL;
import javax.media.opengl.GLES2;
+import javax.media.opengl.GLException;
import com.jogamp.common.os.AndroidVersion;
import com.jogamp.common.os.Platform;
@@ -179,14 +180,14 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
public final Surface getSurface() { return surface; }
public String toString() {
- return "SurfaceTextureFrame[" + pts + "ms: " + texture + ", " + surfaceTex + "]";
+ return "SurfaceTextureFrame[pts " + pts + " ms, l " + duration + " ms, texID "+ texture.getTextureObject() + ", " + surfaceTex + "]";
}
private final SurfaceTexture surfaceTex;
private final Surface surface;
}
@Override
- protected final void initGLStreamImpl(GL gl, int vid, int aid) throws IOException {
+ protected final void initStreamImpl(int vid, int aid) throws IOException {
if(null!=mp && null!=streamLoc) {
if( GLMediaPlayer.STREAM_ID_NONE == aid ) {
mp.setVolume(0f, 0f);
@@ -220,9 +221,13 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
0, 0, mp.getDuration(), icodec, icodec);
}
}
+ @Override
+ protected final void initGLImpl(GL gl) throws IOException, GLException {
+ // NOP
+ }
@Override
- protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost) {
+ protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame) {
if(null != stex && null != mp) {
final SurfaceTextureFrame nextSFrame = (SurfaceTextureFrame) nextFrame;
final Surface nextSurface = nextSFrame.getSurface();
@@ -232,7 +237,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
// Only block once, no while-loop.
// This relaxes locking code of non crucial resources/events.
boolean update = updateSurface;
- if(!update && blocking) {
+ if( !update ) {
synchronized(updateSurfaceLock) {
if(!updateSurface) { // volatile OK.
try {
diff --git a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
index db2146cdc..ec375406d 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
@@ -69,7 +69,7 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
public final long getSync() { return sync; }
public String toString() {
- return "EGLTextureFrame[" + texture + ", img "+ image + ", sync "+ sync+", clientBuffer "+clientBuffer+"]";
+ return "EGLTextureFrame[pts " + pts + " ms, l " + duration + " ms, texID "+ texture.getTextureObject() + ", img "+ image + ", sync "+ sync+", clientBuffer "+clientBuffer+"]";
}
protected final Buffer clientBuffer;
protected final long image;
@@ -82,10 +82,6 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
this.texType = texType;
this.useKHRSync = useKHRSync;
}
- @Override
- protected final int validateTextureCount(int desiredTextureCount) {
- return desiredTextureCount>2 ? Math.max(4, desiredTextureCount) : 2;
- }
@Override
protected TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index a82c84d17..8193175b7 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -43,6 +43,8 @@ import javax.media.opengl.GLException;
import javax.media.opengl.GLProfile;
import com.jogamp.common.os.Platform;
+import com.jogamp.common.util.LFRingbuffer;
+import com.jogamp.common.util.Ringbuffer;
import com.jogamp.opengl.util.av.AudioSink;
import com.jogamp.opengl.util.av.AudioSink.AudioFrame;
import com.jogamp.opengl.util.av.GLMediaPlayer;
@@ -65,9 +67,6 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected static final String unknown = "unknown";
- /** Default texture count w/o threading, value {@value}. */
- protected static final int TEXTURE_COUNT_DEFAULT = 2;
-
protected volatile State state;
private Object stateLock = new Object();
@@ -86,33 +85,33 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected volatile float playSpeed = 1.0f;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int vid = GLMediaPlayer.STREAM_ID_AUTO;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int aid = GLMediaPlayer.STREAM_ID_AUTO;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int width = 0;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int height = 0;
- /** Video avg. fps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Video avg. fps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected float fps = 0;
- /** Video avg. frame duration in ms. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Video avg. frame duration in ms. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected float frame_duration = 0f;
- /** Stream bps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Stream bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int bps_stream = 0;
- /** Video bps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Video bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int bps_video = 0;
- /** Audio bps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Audio bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int bps_audio = 0;
- /** In frames. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** In frames. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int videoFrames = 0;
- /** In frames. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** In frames. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int audioFrames = 0;
- /** In ms. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** In ms. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int duration = 0;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected String acodec = unknown;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected String vcodec = unknown;
protected volatile int decodedFrameCount = 0;
@@ -120,7 +119,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected int displayedFrameCount = 0;
protected volatile int video_pts_last = 0;
- /** See {@link #getAudioSink()}. Set by implementation if used from within {@link #initGLStreamImpl(GL, int, int)}! */
+ /** See {@link #getAudioSink()}. Set by implementation if used from within {@link #initStreamImpl(int, int)}! */
protected AudioSink audioSink = null;
protected boolean audioSinkPlaySpeedSet = false;
@@ -145,12 +144,20 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
/** Trigger video PTS reset with given cause as bitfield. */
private boolean videoSCR_reset = false;
- protected SyncedRingbuffer videoFramesFree = null;
- protected SyncedRingbuffer videoFramesDecoded = null;
+ protected TextureFrame[] videoFramesOrig = null;
+ protected Ringbuffer videoFramesFree = null;
+ protected Ringbuffer videoFramesDecoded = null;
protected volatile TextureFrame lastFrame = null;
private ArrayList eventListeners = new ArrayList();
+ private static Ringbuffer.AllocEmptyArray rbAllocTextureFrameArray = new Ringbuffer.AllocEmptyArray() {
+ @Override
+ public TextureFrame[] newArray(int size) {
+ return new TextureFrame[size];
+ }
+ };
+
protected GLMediaPlayerImpl() {
this.textureCount=0;
this.textureTarget=GL.GL_TEXTURE_2D;
@@ -186,11 +193,21 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
public final void setTextureWrapST(int[] wrapST) { texWrapST[0] = wrapST[0]; texWrapST[1] = wrapST[1];}
public final int[] getTextureWrapST() { return texWrapST; }
+ private final void checkStreamInit() {
+ if(State.Uninitialized == state ) {
+ throw new IllegalStateException("Stream not initialized: "+this);
+ }
+ }
+
+ private final void checkGLInit() {
+ if(State.Uninitialized == state || State.Initialized == state ) {
+ throw new IllegalStateException("GL not initialized: "+this);
+ }
+ }
+
@Override
public String getRequiredExtensionsShaderStub() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
+ checkGLInit();
if(GLES2.GL_TEXTURE_EXTERNAL_OES == textureTarget) {
return TextureSequence.GL_OES_EGL_image_external_Required_Prelude;
}
@@ -199,9 +216,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
@Override
public String getTextureSampler2DType() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
+ checkGLInit();
switch(textureTarget) {
case GL.GL_TEXTURE_2D:
case GL2.GL_TEXTURE_RECTANGLE:
@@ -221,9 +236,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
*/
@Override
public String getTextureLookupFunctionName(String desiredFuncName) throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
+ checkGLInit();
return "texture2D";
}
@@ -236,9 +249,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
*/
@Override
public String getTextureLookupFragmentShaderImpl() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
+ checkGLInit();
return "";
}
@@ -246,7 +257,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
public final int getDecodedFrameCount() { return decodedFrameCount; }
@Override
- public final int getPresentedFrameCount() { return this.presentedFrameCount; }
+ public final int getPresentedFrameCount() { return presentedFrameCount; }
@Override
public final int getVideoPTS() { return video_pts_last; }
@@ -267,20 +278,21 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
+ @Override
public final State getState() { return state; }
+ @Override
public final State play() {
synchronized( stateLock ) {
switch( state ) {
case Paused:
if( playImpl() ) {
- // FIXME
resetAudioVideoPTS();
if( null != audioSink ) {
audioSink.play(); // cont. w/ new data
- }
- resumeFramePusher();
- state = State.Playing;
+ }
+ streamWorker.doResume();
+ changeState(0, State.Playing);
}
default:
}
@@ -290,15 +302,20 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
protected abstract boolean playImpl();
+ @Override
public final State pause() {
+ return pauseImpl(0);
+ }
+ private final State pauseImpl(int event_mask) {
synchronized( stateLock ) {
if( State.Playing == state ) {
+ event_mask = addStateEventMask(event_mask, GLMediaPlayer.State.Paused);
state = State.Paused;
- // FIXME
- pauseFramePusher();
+ streamWorker.doPause();
if( null != audioSink ) {
audioSink.pause();
}
+ attributesUpdated( event_mask );
if( !pauseImpl() ) {
play();
}
@@ -309,6 +326,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
protected abstract boolean pauseImpl();
+ @Override
public final int seek(int msec) {
synchronized( stateLock ) {
final int pts1;
@@ -317,14 +335,14 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
case Paused:
final State _state = state;
state = State.Paused;
- // FIXME
- pauseFramePusher();
+ streamWorker.doPause();
pts1 = seekImpl(msec);
resetAllAudioVideoSync();
if( null != audioSink && State.Playing == _state ) {
audioSink.play(); // cont. w/ new data
}
- resumeFramePusher();
+ System.err.println("SEEK XXX: "+getPerfString());
+ streamWorker.doResume();
state = _state;
break;
default:
@@ -378,41 +396,31 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
@Override
- public final State initGLStream(GL gl, int reqTextureCount, URI streamLoc, int vid, int aid) throws IllegalStateException, GLException, IOException {
+ public final void initStream(URI streamLoc, int vid, int aid, int reqTextureCount) throws IllegalStateException, IllegalArgumentException {
synchronized( stateLock ) {
if(State.Uninitialized != state) {
- throw new IllegalStateException("Instance not in state "+State.Uninitialized+", but "+state+", "+this);
+ throw new IllegalStateException("Instance not unintialized: "+this);
+ }
+ if(null == streamLoc) {
+ throw new IllegalArgumentException("streamLock is null");
+ }
+ if( STREAM_ID_NONE != vid ) {
+ textureCount = validateTextureCount(reqTextureCount);
+ if( textureCount < TEXTURE_COUNT_MIN ) {
+ throw new InternalError("Validated texture count < "+TEXTURE_COUNT_MIN+": "+textureCount);
+ }
+ } else {
+ textureCount = 0;
}
decodedFrameCount = 0;
presentedFrameCount = 0;
- displayedFrameCount = 0;
+ displayedFrameCount = 0;
this.streamLoc = streamLoc;
+ this.vid = vid;
+ this.aid = aid;
if (this.streamLoc != null) {
- try {
- if( null != gl ) {
- removeAllTextureFrames(gl);
- textureCount = validateTextureCount(reqTextureCount);
- if( textureCount < TEXTURE_COUNT_DEFAULT ) {
- throw new InternalError("Validated texture count < "+TEXTURE_COUNT_DEFAULT+": "+textureCount);
- }
- initGLStreamImpl(gl, vid, aid); // also initializes width, height, .. etc
- videoFramesFree = new SyncedRingbuffer(createTexFrames(gl, textureCount), true /* full */);
- if( TEXTURE_COUNT_DEFAULT < textureCount ) {
- videoFramesDecoded = new SyncedRingbuffer(new TextureFrame[textureCount], false /* full */);
- framePusher = new FramePusher(gl);
- framePusher.doStart();
- } else {
- videoFramesDecoded = null;
- }
- lastFrame = videoFramesFree.getBlocking(false /* clearRef */ );
- state = State.Paused;
- }
- return state;
- } catch (Throwable t) {
- throw new GLException("Error initializing GL resources", t);
- }
+ streamWorker = new StreamWorker();
}
- return state;
}
}
/**
@@ -428,19 +436,62 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
* @see #acodec
* @see #vcodec
*/
- protected abstract void initGLStreamImpl(GL gl, int vid, int aid) throws IOException;
+ protected abstract void initStreamImpl(int vid, int aid) throws Exception;
+
+ @Override
+ public final StreamException getStreamException() {
+ synchronized( stateLock ) {
+ if( null != streamWorker ) {
+ return streamWorker.getStreamErr();
+ } else {
+ return null;
+ }
+ }
+ }
+
+ @Override
+ public final void initGL(GL gl) throws IllegalStateException, StreamException, GLException {
+ synchronized( stateLock ) {
+ checkStreamInit();
+ final StreamException streamInitErr = streamWorker.getStreamErr();
+ if( null != streamInitErr ) {
+ streamWorker = null;
+ destroy(null);
+ throw streamInitErr;
+ }
+ try {
+ if( STREAM_ID_NONE != vid ) {
+ removeAllTextureFrames(gl);
+ initGLImpl(gl);
+ videoFramesOrig = createTexFrames(gl, textureCount);
+ videoFramesFree = new LFRingbuffer(videoFramesOrig, rbAllocTextureFrameArray);
+ videoFramesDecoded = new LFRingbuffer(textureCount, rbAllocTextureFrameArray);
+ lastFrame = videoFramesFree.getBlocking( );
+ streamWorker.initGL(gl);
+ } else {
+ initGLImpl(null);
+ setTextureFormat(-1, -1);
+ setTextureType(-1);
+ videoFramesFree = null;
+ videoFramesDecoded = null;
+ lastFrame = null;
+ }
+ changeState(0, State.Paused);
+ } catch (Throwable t) {
+ throw new GLException("Error initializing GL resources", t);
+ }
+ }
+ }
+ protected abstract void initGLImpl(GL gl) throws IOException, GLException;
/**
* Returns the validated number of textures to be handled.
*
- * Default is 2 textures w/o threading, last texture and the decoding texture.
- *
- *
- * > 2 textures is used for threaded decoding, a minimum of 4 textures seems reasonable in this case.
+ * Default is {@link #TEXTURE_COUNT_MIN} textures, last texture and the decoding texture.
*
*/
protected int validateTextureCount(int desiredTextureCount) {
- return TEXTURE_COUNT_DEFAULT;
+ return desiredTextureCount < TEXTURE_COUNT_MIN ? TEXTURE_COUNT_MIN : desiredTextureCount;
}
private final TextureFrame[] createTexFrames(GL gl, final int count) {
@@ -512,42 +563,43 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
@Override
public final TextureFrame getLastTexture() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
+ if( State.Paused != state && State.Playing != state ) {
+ throw new IllegalStateException("Instance not paused or playing: "+this);
}
return lastFrame;
}
private final void removeAllTextureFrames(GL gl) {
- if( null != videoFramesFree ) {
- final TextureFrame[] texFrames = videoFramesFree.getArray();
+ if( null != videoFramesOrig ) {
+ final TextureFrame[] texFrames = videoFramesOrig;
+ videoFramesOrig = null;
videoFramesFree = null;
videoFramesDecoded = null;
lastFrame = null;
for(int i=0; i Clear TexFrame["+i+"]: "+frame+" -> null");
}
}
- textureCount=0;
}
protected TextureFrame cachedFrame = null;
protected long lastTimeMillis = 0;
@Override
- public final TextureFrame getNextTexture(GL gl, boolean blocking) throws IllegalStateException {
+ public final TextureFrame getNextTexture(GL gl) throws IllegalStateException {
synchronized( stateLock ) {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
+ if( State.Paused != state && State.Playing != state ) {
+ throw new IllegalStateException("Instance not paused or playing: "+this);
}
if(State.Playing == state) {
TextureFrame nextFrame = null;
- boolean ok = true;
boolean dropFrame = false;
try {
do {
@@ -561,23 +613,16 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
nextFrame = cachedFrame;
cachedFrame = null;
presentedFrameCount--;
- currentTimeMillis = Platform.currentTimeMillis();
- } else if( TEXTURE_COUNT_DEFAULT < textureCount ) {
- nextFrame = videoFramesDecoded.getBlocking(false /* clearRef */ );
- currentTimeMillis = Platform.currentTimeMillis();
- } else {
- nextFrame = videoFramesFree.getBlocking(false /* clearRef */ );
- nextFrame.setPTS( TextureFrame.INVALID_PTS ); // mark invalid until processed!
- ok = getNextTextureImpl(gl, nextFrame, blocking, true /* issuePreAndPost */);
- currentTimeMillis = Platform.currentTimeMillis();
- if( ok ) {
- newFrameAvailable(nextFrame, currentTimeMillis);
- }
+ } else if( STREAM_ID_NONE != vid ) {
+ nextFrame = videoFramesDecoded.getBlocking();
}
- if( ok ) {
+ currentTimeMillis = Platform.currentTimeMillis();
+ if( null != nextFrame ) {
presentedFrameCount++;
final int video_pts = nextFrame.getPTS();
- if( video_pts != TextureFrame.INVALID_PTS ) {
+ if( video_pts == TextureFrame.END_OF_STREAM_PTS ) {
+ pauseImpl(GLMediaEventListener.EVENT_CHANGE_EOS);
+ } else if( video_pts != TextureFrame.INVALID_PTS ) {
final int audio_pts = getAudioPTSImpl();
final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed );
final int d_apts;
@@ -613,6 +658,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
final int dt = (int) ( video_dpts_avg_diff / playSpeed + 0.5f );
// final int dt = (int) ( d_vpts / playSpeed + 0.5f );
// final int dt = (int) ( d_avpts / playSpeed + 0.5f );
+ final TextureFrame _nextFrame = nextFrame;
if( dt > maxVideoDelay ) {
cachedFrame = nextFrame;
nextFrame = null;
@@ -625,7 +671,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
getPerfStringImpl( video_scr, video_pts, d_vpts,
audio_scr, audio_pts, d_apts,
video_dpts_avg_diff ) +
- ", avg dpy-fps "+avg_dpy_duration+" ms/f, maxD "+maxVideoDelay+" ms, "+nextFrame+", playCached " + playCached + ", dropFrame "+dropFrame);
+ ", avg dpy-fps "+avg_dpy_duration+" ms/f, maxD "+maxVideoDelay+" ms, "+_nextFrame+", playCached " + playCached + ", dropFrame "+dropFrame);
}
}
} else if( DEBUG ) {
@@ -640,14 +686,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
lastTimeMillis = currentTimeMillis;
} while( dropFrame );
} catch (InterruptedException e) {
- ok = false;
e.printStackTrace();
- } finally {
- if( !ok && null != nextFrame ) { // put back
- if( !videoFramesFree.put(nextFrame) ) {
- throw new InternalError("XXX: free "+videoFramesFree+", decoded "+videoFramesDecoded+", "+GLMediaPlayerImpl.this);
- }
- }
}
}
displayedFrameCount++;
@@ -656,8 +695,13 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
protected void preNextTextureImpl(GL gl) {}
protected void postNextTextureImpl(GL gl) {}
- protected abstract boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost);
- protected boolean syncAVRequired() { return false; }
+ /**
+ * Implementation shall care of OpenGL synchronization as required, e.g. glFinish()/glFlush()!
+ * @param gl
+ * @param nextFrame
+ * @return
+ */
+ protected abstract boolean getNextTextureImpl(GL gl, TextureFrame nextFrame);
/**
* {@inheritDoc}
@@ -667,7 +711,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
*
*
* Implementations using an {@link AudioSink} shall write it's instance to {@link #audioSink}
- * from within their {@link #initGLStreamImpl(GL, int, int)} implementation.
+ * from within their {@link #initStreamImpl(int, int)} implementation.
*