From c200045aa661cf82474c2b3c1db0ac69db40452a Mon Sep 17 00:00:00 2001
From: Sven Gothel
Date: Fri, 16 Aug 2013 20:18:36 +0200
Subject: GLMediaPlayer Multithreaded Decoding: GLMediaPlayer* (Part-4) - WIP
- Use Platform.currentTimeMillis() for accurate timing!
- GLMediaPlayer / GLMediaPlayerImpl
- Add DEBUG_NATIVE property jogl.debug.GLMediaPlayer.Native
for verbose impl. messages, i.e. ffmpeg/libav
- Add 'synchronization' section in GLMediaPlayer API doc (WIP)
- Use passive non-blocking video synchronization,
i.e. repeat frames instead of 'sleep'.
Thx to Xerxes's suggestion.
- Add flushing of cached decoded frames,
allowing to remove complicated 'videoSCR_reset_latch'
- FramePusher (threaded decoding):
- Always create a shared context!
- Release context while pausing
- Pre/post 'getNextTextureImpl()' actions only
at makeCurrent/release.
- newFrameAvailable(..) signal after decoded frame is enqueued
- FFMPEGDynamicLibraryBundleInfo
- Bind add. functions of libavcodec:
+ "av_init_packet",
+ "av_new_packet",
+ "av_destruct_packet",
- Bind add. functions of libavformat:
+ "avformat_seek_file",
+ "av_read_play",
+ "av_read_pause",
- DEBUG property := FFMPEGMediaPlayer.DEBUG || DynamicLibraryBundleInfo.DEBUG;
- FFMPEGMediaPlayer
- Use libavformat's 'av_read_play()' and 'av_read_pause()',
which may get utilized for network streams, e.g. RTSP
- getNextTextureImpl(..):
- Fix retry loop
- Use postNextTextureImpl/preNextTextureImpl if desired (PSM)
- Native:
- Use fixed my_av_q2i32(..) macro (again)
- Use INVALID_PTS marker (synced w/ Java code)
- DEBUG: Dump more detailed frame information
- TODO: Consider passing frame_delay, especially for repeated frames!
- Tests (MovieSimple, MovieCube):
- Refine KeyEvents control for seek and speed.
- TODO:
- Proper audio clock calculation - difficult w/ OpenAL !
- Video / Audio sync:
- seek !
- streams w/ very async A/V frames
- Test Streams:
- Five-minute-sync-test.mp4
- Audio-Video-Sync-Test-Calibration-23.98fps-24fps.mp4
- sound_in_sync_test.mp4
- big_buck_bunny_1080p_surround.avi
---
.../android/av/AndroidGLMediaPlayerAPI14.java | 2 +-
.../jogamp/opengl/openal/av/ALAudioSink.java | 18 +-
.../jogamp/opengl/util/av/GLMediaPlayerImpl.java | 354 ++++++++++-----------
.../jogamp/opengl/util/av/NullGLMediaPlayer.java | 9 +-
.../av/impl/FFMPEGDynamicLibraryBundleInfo.java | 22 +-
.../opengl/util/av/impl/FFMPEGMediaPlayer.java | 59 ++--
.../opengl/util/av/impl/OMXGLMediaPlayer.java | 2 +-
7 files changed, 245 insertions(+), 221 deletions(-)
(limited to 'src/jogl/classes/jogamp/opengl')
diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
index e14642c34..86e6bc121 100644
--- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
+++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
@@ -222,7 +222,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
}
@Override
- protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) {
+ protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost) {
if(null != stex && null != mp) {
final SurfaceTextureFrame nextSFrame = (SurfaceTextureFrame) nextFrame;
final Surface nextSurface = nextSFrame.getSurface();
diff --git a/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
index 5783c32f1..217ab2954 100644
--- a/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
+++ b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
@@ -80,7 +80,7 @@ public class ALAudioSink implements AudioSink {
private SyncedRingbuffer alBufferAvail = null;
private SyncedRingbuffer alBufferPlaying = null;
private volatile int alBufferBytesQueued = 0;
- private volatile int ptsPlaying = 0;
+ private volatile int playingPTS = AudioFrame.INVALID_PTS;
private volatile int enqueuedFrameCount;
private int[] alSource = null;
@@ -207,11 +207,11 @@ public class ALAudioSink implements AudioSink {
return "ALAudioSink[init "+initialized+", playRequested "+playRequested+", device "+deviceSpecifier+", ctx "+toHexString(ctxHash)+", alSource "+alSrcName+
", chosen "+chosenFormat+", alFormat "+toHexString(alFormat)+
", playSpeed "+playSpeed+", buffers[total "+alBuffersLen+", avail "+alBufferAvail.size()+", "+
- "queued["+alBufferPlaying.size()+", apts "+ptsPlaying+", "+getQueuedTime() + " ms, " + alBufferBytesQueued+" bytes]";
+ "queued["+alBufferPlaying.size()+", apts "+getPTS()+", "+getQueuedTime() + " ms, " + alBufferBytesQueued+" bytes]";
}
public final String getPerfString() {
final int alBuffersLen = null != alBuffers ? alBuffers.length : 0;
- return "Play [buffer "+alBufferPlaying.size()+"/"+alBuffersLen+", apts "+ptsPlaying+", "+getQueuedTime() + " ms, " + alBufferBytesQueued+" bytes]";
+ return "Play [buffer "+alBufferPlaying.size()+"/"+alBuffersLen+", apts "+getPTS()+", "+getQueuedTime() + " ms, " + alBufferBytesQueued+" bytes]";
}
@Override
@@ -289,9 +289,9 @@ public class ALAudioSink implements AudioSink {
t.printStackTrace();
}
}
- alBufferAvail.clear(true);
+ alBufferAvail.clear();
alBufferAvail = null;
- alBufferPlaying.clear(true);
+ alBufferPlaying.clear();
alBufferPlaying = null;
alBufferBytesQueued = 0;
alBuffers = null;
@@ -434,7 +434,11 @@ public class ALAudioSink implements AudioSink {
}
final int dequeuedBufferCount = dequeueBuffer( false /* all */, wait );
final ActiveBuffer currentBuffer = alBufferPlaying.peek();
- ptsPlaying = null != currentBuffer ? currentBuffer.pts : audioFrame.pts;
+ if( null != currentBuffer ) {
+ playingPTS = currentBuffer.pts;
+ } else {
+ playingPTS = audioFrame.pts;
+ }
if( DEBUG ) {
System.err.println(getThreadName()+": ALAudioSink: Write "+audioFrame.pts+", "+getQueuedTimeImpl(audioFrame.dataSize)+" ms, dequeued "+dequeuedBufferCount+", wait "+wait+", "+getPerfString());
}
@@ -652,5 +656,5 @@ public class ALAudioSink implements AudioSink {
}
@Override
- public final int getPTS() { return ptsPlaying; }
+ public final int getPTS() { return playingPTS; }
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index c1cfc0d95..85b599c0e 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -42,7 +42,9 @@ import javax.media.opengl.GLES2;
import javax.media.opengl.GLException;
import javax.media.opengl.GLProfile;
+import com.jogamp.common.os.Platform;
import com.jogamp.opengl.util.av.AudioSink;
+import com.jogamp.opengl.util.av.AudioSink.AudioFrame;
import com.jogamp.opengl.util.av.GLMediaPlayer;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureSequence;
@@ -92,9 +94,10 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected int width = 0;
/** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
protected int height = 0;
- /** Video fps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Video avg. fps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
protected float fps = 0;
- protected int frame_period = 0;
+ /** Video avg. frame duration in ms. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ protected float frame_duration = 0f;
/** Stream bps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
protected int bps_stream = 0;
/** Video bps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
@@ -114,6 +117,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected volatile int decodedFrameCount = 0;
protected int presentedFrameCount = 0;
+ protected int displayedFrameCount = 0;
protected volatile int video_pts_last = 0;
/** See {@link #getAudioSink()}. Set by implementation if used from within {@link #initGLStreamImpl(GL, int, int)}! */
@@ -139,17 +143,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
/** Maximum valid video pts diff. */
private static final int VIDEO_DPTS_MAX = 5000; // 5s max diff
/** Trigger video PTS reset with given cause as bitfield. */
- private volatile int videoSCR_reset = 0;
+ private boolean videoSCR_reset = false;
- private final boolean isSCRCause(int bit) { return 0 != ( bit & videoSCR_reset); }
- /** SCR reset due to: Start, Resume, Seek, .. */
- private static final int SCR_RESET_FORCE = 1 << 0;
- /** SCR reset due to: PlaySpeed */
- private static final int SCR_RESET_SPEED = 1 << 1;
-
- /** Latched video PTS reset, to wait until valid pts after invalidation of cached ones. Currently [1..{@link #VIDEO_DPTS_NUM}] frames. */
- private int videoSCR_reset_latch = 0;
-
protected SyncedRingbuffer videoFramesFree = null;
protected SyncedRingbuffer videoFramesDecoded = null;
protected volatile TextureFrame lastFrame = null;
@@ -279,16 +274,17 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
switch( state ) {
case Paused:
if( playImpl() ) {
- resetAudioVideoSCR(SCR_RESET_FORCE);
- resumeFramePusher();
+ // FIXME
+ resetAudioVideoPTS();
if( null != audioSink ) {
- audioSink.play();
- }
+ audioSink.play(); // cont. w/ new data
+ }
+ resumeFramePusher();
state = State.Playing;
}
default:
}
- if(DEBUG) { System.err.println("Start: "+toString()); }
+ if(DEBUG) { System.err.println("Play: "+toString()); }
return state;
}
}
@@ -297,16 +293,15 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
public final State pause() {
synchronized( stateLock ) {
if( State.Playing == state ) {
- State _state = state;
state = State.Paused;
- if( pauseImpl() ) {
- _state = State.Paused;
- pauseFramePusher();
- if( null != audioSink ) {
- audioSink.pause();
- }
+ // FIXME
+ pauseFramePusher();
+ if( null != audioSink ) {
+ audioSink.pause();
+ }
+ if( !pauseImpl() ) {
+ play();
}
- state = _state;
}
if(DEBUG) { System.err.println("Pause: "+toString()); }
return state;
@@ -322,14 +317,12 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
case Paused:
final State _state = state;
state = State.Paused;
+ // FIXME
pauseFramePusher();
- resetAudioVideoSCR(SCR_RESET_FORCE);
pts1 = seekImpl(msec);
- if( null != audioSink ) {
- audioSink.flush();
- if( State.Playing == _state ) {
- audioSink.play(); // cont. w/ new data
- }
+ resetAllAudioVideoSync();
+ if( null != audioSink && State.Playing == _state ) {
+ audioSink.play(); // cont. w/ new data
}
resumeFramePusher();
state = _state;
@@ -358,7 +351,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
rate = 1.0f;
}
if( setPlaySpeedImpl(rate) ) {
- resetAudioVideoSCR(SCR_RESET_SPEED);
+ resetAudioVideoPTS();
playSpeed = rate;
if(DEBUG) { System.err.println("SetPlaySpeed: "+toString()); }
res = true;
@@ -392,6 +385,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
decodedFrameCount = 0;
presentedFrameCount = 0;
+ displayedFrameCount = 0;
this.urlConn = urlConn;
if (this.urlConn != null) {
try {
@@ -405,7 +399,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
videoFramesFree = new SyncedRingbuffer(createTexFrames(gl, textureCount), true /* full */);
if( TEXTURE_COUNT_DEFAULT < textureCount ) {
videoFramesDecoded = new SyncedRingbuffer(new TextureFrame[textureCount], false /* full */);
- framePusher = new FramePusher(gl, requiresOffthreadGLCtx());
+ framePusher = new FramePusher(gl);
framePusher.doStart();
} else {
videoFramesDecoded = null;
@@ -448,7 +442,6 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected int validateTextureCount(int desiredTextureCount) {
return TEXTURE_COUNT_DEFAULT;
}
- protected boolean requiresOffthreadGLCtx() { return false; }
private final TextureFrame[] createTexFrames(GL gl, final int count) {
final int[] texNames = new int[count];
@@ -543,6 +536,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
textureCount=0;
}
+ protected TextureFrame cachedFrame = null;
+ protected long lastTimeMillis = 0;
+
@Override
public final TextureFrame getNextTexture(GL gl, boolean blocking) throws IllegalStateException {
synchronized( stateLock ) {
@@ -554,68 +550,95 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
boolean ok = true;
boolean dropFrame = false;
try {
- do {
- if( TEXTURE_COUNT_DEFAULT < textureCount ) {
+ do {
+ final long currentTimeMillis;
+ final boolean playCached = null != cachedFrame;
+ if( dropFrame ) {
+ presentedFrameCount--;
+ dropFrame = false;
+ }
+ if( playCached ) {
+ nextFrame = cachedFrame;
+ cachedFrame = null;
+ presentedFrameCount--;
+ currentTimeMillis = Platform.currentTimeMillis();
+ } else if( TEXTURE_COUNT_DEFAULT < textureCount ) {
nextFrame = videoFramesDecoded.getBlocking(false /* clearRef */ );
+ currentTimeMillis = Platform.currentTimeMillis();
} else {
nextFrame = videoFramesFree.getBlocking(false /* clearRef */ );
- if( getNextTextureImpl(gl, nextFrame, blocking) ) {
- newFrameAvailable(nextFrame);
- } else {
- ok = false;
+ nextFrame.setPTS( TextureFrame.INVALID_PTS ); // mark invalid until processed!
+ ok = getNextTextureImpl(gl, nextFrame, blocking, true /* issuePreAndPost */);
+ currentTimeMillis = Platform.currentTimeMillis();
+ if( ok ) {
+ newFrameAvailable(nextFrame, currentTimeMillis);
}
}
+ if( DEBUG ) {
+ System.err.println("> "+currentTimeMillis+", d "+(currentTimeMillis-lastTimeMillis)+", playCached "+playCached);
+ }
if( ok ) {
presentedFrameCount++;
- final int video_pts;
- if( 0 != videoSCR_reset ) {
- if( isSCRCause(SCR_RESET_FORCE) ) {
- videoSCR_reset_latch = VIDEO_DPTS_NUM / 2;
- resetVideoDPTS();
- resetAllVideoPTS();
+ final int video_pts = nextFrame.getPTS();
+ if( video_pts != TextureFrame.INVALID_PTS ) {
+ lastTimeMillis = currentTimeMillis;
+
+ final int audio_pts = getAudioPTSImpl();
+ final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed );
+ final int d_apts;
+ if( audio_pts != AudioFrame.INVALID_PTS ) {
+ d_apts = audio_pts - audio_scr;
} else {
- // SCR_RESET_SPEED
- videoSCR_reset_latch = 1;
+ d_apts = 0;
}
- videoSCR_reset = 0;
- video_pts = TextureFrame.INVALID_PTS;
- } else {
- video_pts = nextFrame.getPTS();
- }
- if( video_pts != TextureFrame.INVALID_PTS ) {
+
final int frame_period_last = video_pts - video_pts_last; // rendering loop interrupted ?
- if( videoSCR_reset_latch > 0 || frame_period_last > frame_period*10 ) {
- if( videoSCR_reset_latch > 0 ) {
- videoSCR_reset_latch--;
- }
- setFirstVideoPTS2SCR( video_pts );
+ if( videoSCR_reset || frame_period_last > frame_duration*10 ) {
+ videoSCR_reset = false;
+ video_scr_t0 = currentTimeMillis;
+ video_scr_pts = video_pts;
}
- final int scr_pts = video_scr_pts +
- (int) ( ( System.currentTimeMillis() - video_scr_t0 ) * playSpeed );
- final int d_vpts = video_pts - scr_pts;
+ final int video_scr = video_scr_pts + (int) ( ( currentTimeMillis - video_scr_t0 ) * playSpeed );
+ final int d_vpts = video_pts - video_scr;
+ // final int d_avpts = d_vpts - d_apts;
if( -VIDEO_DPTS_MAX > d_vpts || d_vpts > VIDEO_DPTS_MAX ) {
+ // if( -VIDEO_DPTS_MAX > d_avpts || d_avpts > VIDEO_DPTS_MAX ) {
if( DEBUG ) {
- System.err.println( getPerfStringImpl( scr_pts, video_pts, d_vpts, 0 ) );
+ System.err.println( "AV*: "+getPerfStringImpl( video_scr, video_pts, d_vpts, audio_scr, audio_pts, d_apts, 0 ) + ", "+nextFrame+", playCached " + playCached+ ", dropFrame "+dropFrame);
}
} else {
+ final int dpy_den = displayedFrameCount > 0 ? displayedFrameCount : 1;
+ final int avg_dpy_duration = ( (int) ( currentTimeMillis - video_scr_t0 ) ) / dpy_den ; // ms/f
+ final int maxVideoDelay = Math.min(avg_dpy_duration, MAXIMUM_VIDEO_ASYNC);
video_dpts_count++;
+ // video_dpts_cum = d_avpts + VIDEO_DPTS_COEFF * video_dpts_cum;
video_dpts_cum = d_vpts + VIDEO_DPTS_COEFF * video_dpts_cum;
- final int video_dpts_avg_diff = getVideoDPTSAvg();
- if( DEBUG ) {
- System.err.println( getPerfStringImpl( scr_pts, video_pts, d_vpts, video_dpts_avg_diff ) );
- }
- if( blocking && syncAVRequired() ) {
- if( !syncAV( (int) ( video_dpts_avg_diff / playSpeed + 0.5f ) ) ) {
- resetVideoDPTS();
- dropFrame = true;
- }
+ final int video_dpts_avg_diff = video_dpts_count >= VIDEO_DPTS_NUM ? getVideoDPTSAvg() : 0;
+ final int dt = (int) ( video_dpts_avg_diff / playSpeed + 0.5f );
+ // final int dt = (int) ( d_vpts / playSpeed + 0.5f );
+ // final int dt = (int) ( d_avpts / playSpeed + 0.5f );
+ if( dt > maxVideoDelay ) {
+ cachedFrame = nextFrame;
+ nextFrame = null;
+ } else if ( dt < -maxVideoDelay ) {
+ dropFrame = true;
}
video_pts_last = video_pts;
+ if( DEBUG ) {
+ System.err.println( "AV_: "+getPerfStringImpl( video_scr, video_pts, d_vpts,
+ audio_scr, audio_pts, d_apts,
+ video_dpts_avg_diff ) +
+ ", avg dpy-fps "+avg_dpy_duration+" ms/f, maxD "+maxVideoDelay+" ms, "+nextFrame+", playCached " + playCached + ", dropFrame "+dropFrame);
+ }
}
+ } else if( DEBUG ) {
+ System.err.println("Invalid PTS: "+nextFrame);
+ }
+ if( null != nextFrame ) {
+ final TextureFrame _lastFrame = lastFrame;
+ lastFrame = nextFrame;
+ videoFramesFree.putBlocking(_lastFrame);
}
- final TextureFrame _lastFrame = lastFrame;
- lastFrame = nextFrame;
- videoFramesFree.putBlocking(_lastFrame);
}
} while( dropFrame );
} catch (InterruptedException e) {
@@ -623,14 +646,19 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
e.printStackTrace();
} finally {
if( !ok && null != nextFrame ) { // put back
- videoFramesFree.put(nextFrame);
+ if( !videoFramesFree.put(nextFrame) ) {
+ throw new InternalError("XXX: free "+videoFramesFree+", decoded "+videoFramesDecoded+", "+GLMediaPlayerImpl.this);
+ }
}
}
}
+ displayedFrameCount++;
return lastFrame;
}
}
- protected abstract boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking);
+ protected void preNextTextureImpl(GL gl) {}
+ protected void postNextTextureImpl(GL gl) {}
+ protected abstract boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost);
protected boolean syncAVRequired() { return false; }
/**
@@ -654,95 +682,49 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
*/
protected void setFirstAudioPTS2SCR(int pts) {
if( audioSCR_reset ) {
- audio_scr_t0 = System.currentTimeMillis() - pts;
+ audio_scr_t0 = Platform.currentTimeMillis() - pts;
audioSCR_reset = false;
}
}
- private void setFirstVideoPTS2SCR(int pts) {
- // video_scr_t0 = System.currentTimeMillis() - pts;
- video_scr_t0 = System.currentTimeMillis();
- video_scr_pts = pts;
- }
- private void resetAllVideoPTS() {
+ private void flushAllVideoFrames() {
if( null != videoFramesFree ) {
- final TextureFrame[] texFrames = videoFramesFree.getArray();
- for(int i=0; i
- * https://en.wikipedia.org/wiki/Audio_to_video_synchronization
- *
- * d_av = v_pts - a_pts;
- *
- *
- *
- * Recommendation of audio/video pts time lead/lag at production:
- *
- * - Overall: +40ms and -60ms audio ahead video / audio after video
- * - Each stage: +5ms and -15ms. audio ahead video / audio after video
- *
- *
- *
- * Recommendation of av pts time lead/lag at presentation:
- *
- * - TV: +15ms and -45ms. audio ahead video / audio after video.
- * - Film: +22ms and -22ms. audio ahead video / audio after video.
- *
- *
- *
- * Maybe implemented as follows:
- *
- * d_av = vpts - apts;
- * d_av < -22: audio after video == video ahead audio -> drop
- * d_av > 22: audio ahead video == video after audio -> sleep(d_av - 10)
- *
- *
- *
- * Returns true if audio is ahead of video, otherwise false (video is ahead of audio).
- * In case of the latter (false), the video frame shall be dropped!
- *
- * @param frame
- * @return true if audio is ahead of video, otherwise false (video is ahead of audio)
- */
- protected boolean syncAV(int d_vpts) {
- if( d_vpts > 22 ) {
- if( DEBUG ) {
- System.err.println("V (sleep): "+(d_vpts - 22 / 2)+" ms");
- }
- try {
- Thread.sleep( d_vpts - 22 / 2 );
- } catch (InterruptedException e) { }
- }
- return true;
+ private final int getVideoDPTSAvg() {
+ return (int) ( video_dpts_cum * (1.0f - VIDEO_DPTS_COEFF) + 0.5f );
}
- private final void newFrameAvailable(TextureFrame frame) {
- decodedFrameCount++;
+ private final void newFrameAvailable(TextureFrame frame, long currentTimeMillis) {
+ decodedFrameCount++;
+ if( 0 == frame.getDuration() ) { // patch frame duration if not set already
+ frame.setDuration( (int) frame_duration );
+ }
synchronized(eventListenersLock) {
for(Iterator i = eventListeners.iterator(); i.hasNext(); ) {
- i.next().newFrameAvailable(this, frame, System.currentTimeMillis());
+ i.next().newFrameAvailable(this, frame, currentTimeMillis);
}
}
}
@@ -759,12 +741,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
private GLDrawable dummyDrawable = null;
private GLContext sharedGLCtx = null;
- FramePusher(GL gl, boolean createSharedCtx) {
+ FramePusher(GL gl) {
setDaemon(true);
- this.gl = createSharedCtx ? createSharedGL(gl) : gl;
- }
-
- private GL createSharedGL(GL gl) {
+
final GLContext glCtx = gl.getContext();
final boolean glCtxCurrent = glCtx.isCurrent();
final GLProfile glp = gl.getGLProfile();
@@ -779,8 +758,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
} else {
sharedGLCtx.release();
}
- return sharedGLCtx.getGL();
+ this.gl = sharedGLCtx.getGL();
}
+
private void makeCurrent(GLContext ctx) {
if( GLContext.CONTEXT_NOT_CURRENT >= ctx.makeCurrent() ) {
throw new GLException("Couldn't make ctx current: "+ctx);
@@ -789,6 +769,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
private void destroySharedGL() {
if( null != sharedGLCtx ) {
+ postNextTextureImpl(gl);
if( sharedGLCtx.isCreated() ) {
// Catch dispose GLExceptions by GLEventListener, just 'print' them
// so we can continue with the destruction.
@@ -870,9 +851,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
FramePusherInstanceId++;
synchronized ( this ) {
- if( null != sharedGLCtx ) {
- makeCurrent( sharedGLCtx );
- }
+ makeCurrent( sharedGLCtx );
+ preNextTextureImpl(gl);
isRunning = true;
this.notify(); // wake-up doStart()
}
@@ -880,10 +860,13 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
while( !shallStop ){
if( shallPause ) {
synchronized ( this ) {
+ postNextTextureImpl(gl);
+ sharedGLCtx.release();
while( shallPause && !shallStop ) {
isActive = false;
this.notify(); // wake-up doPause()
try {
+ System.err.println("!!! PAUSE ON"); // FIXME
this.wait(); // wait until resumed
} catch (InterruptedException e) {
if( !shallPause ) {
@@ -891,6 +874,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
}
+ makeCurrent(sharedGLCtx);
+ preNextTextureImpl(gl);
+ System.err.println("!!! PAUSE OFF"); // FIXME
isActive = true;
this.notify(); // wake-up doResume()
}
@@ -903,15 +889,14 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
nextFrame = videoFramesFree.getBlocking(false /* clearRef */ );
isBlocked = false;
nextFrame.setPTS( TextureFrame.INVALID_PTS ); // mark invalid until processed!
- if( getNextTextureImpl(gl, nextFrame, true) ) {
+ if( getNextTextureImpl(gl, nextFrame, true, false /* issuePreAndPost */) ) {
// gl.glFinish();
gl.glFlush(); // even better: sync object!
if( !videoFramesDecoded.put(nextFrame) ) {
- throw new InternalError("XXX: "+GLMediaPlayerImpl.this);
+ throw new InternalError("XXX: free "+videoFramesFree+", decoded "+videoFramesDecoded+", "+GLMediaPlayerImpl.this);
}
- final TextureFrame _nextFrame = nextFrame;
+ newFrameAvailable(nextFrame, Platform.currentTimeMillis());
nextFrame = null;
- newFrameAvailable(_nextFrame);
}
} catch (InterruptedException e) {
isBlocked = false;
@@ -927,6 +912,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
}
+ postNextTextureImpl(gl);
destroySharedGL();
synchronized ( this ) {
isRunning = false;
@@ -975,7 +961,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
if( this.fps != fps ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_FPS;
this.fps = fps;
- this.frame_period = (int) ( 1000f / fps + 0.5f );
+ this.frame_duration = 1000f / (float)fps;
}
if( this.bps_stream != bps_stream || this.bps_video != bps_video || this.bps_audio != bps_audio ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_BPS;
@@ -1006,7 +992,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected final void attributesUpdated(int event_mask) {
synchronized(eventListenersLock) {
for(Iterator i = eventListeners.iterator(); i.hasNext(); ) {
- i.next().attributesChanges(this, event_mask, System.currentTimeMillis());
+ i.next().attributesChanges(this, event_mask, Platform.currentTimeMillis());
}
}
}
@@ -1017,6 +1003,12 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
destroyFramePusher();
destroyImpl(gl);
removeAllTextureFrames(gl);
+ if( null != videoFramesFree ) {
+ videoFramesFree.clear();
+ }
+ if( null != videoFramesDecoded ) {
+ videoFramesDecoded.clear();
+ }
state = State.Uninitialized;
return state;
}
@@ -1094,25 +1086,28 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
final String loc = ( null != urlConn ) ? urlConn.getURL().toExternalForm() : "" ;
final int freeVideoFrames = null != videoFramesFree ? videoFramesFree.size() : 0;
final int decVideoFrames = null != videoFramesDecoded ? videoFramesDecoded.size() : 0;
- return "GLMediaPlayer["+state+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+" ("+tt+" s)], "+
+ final int video_scr = video_scr_pts + (int) ( ( Platform.currentTimeMillis() - video_scr_t0 ) * playSpeed );
+ return "GLMediaPlayer["+state+", vSCR "+video_scr+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+" ("+tt+" s)], "+
"speed "+playSpeed+", "+bps_stream+" bps, "+
"Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+", target "+toHexString(textureTarget)+", format "+toHexString(textureFormat)+", type "+toHexString(textureType)+"], "+
- "Video[id "+vid+", <"+vcodec+">, "+width+"x"+height+", "+fps+" fps, "+bps_video+" bps], "+
+ "Video[id "+vid+", <"+vcodec+">, "+width+"x"+height+", "+fps+" fps, "+frame_duration+" fdur, "+bps_video+" bps], "+
"Audio[id "+aid+", <"+acodec+">, "+bps_audio+" bps, "+audioFrames+" frames], uri "+loc+"]";
}
@Override
public final String getPerfString() {
- final int scr_pts = video_scr_pts +
- (int) ( ( System.currentTimeMillis() - video_scr_t0 ) * playSpeed );
- final int d_vpts = video_pts_last - scr_pts;
- return getPerfStringImpl( scr_pts, video_pts_last, d_vpts, getVideoDPTSAvg() );
- }
- private final String getPerfStringImpl(final int scr_pts, final int video_pts, final int d_vpts, final int video_dpts_avg_diff) {
- final float tt = getDuration() / 1000.0f;
- final int audio_scr = (int) ( ( System.currentTimeMillis() - audio_scr_t0 ) * playSpeed );
+ final long currentTimeMillis = Platform.currentTimeMillis();
+ final int video_scr = video_scr_pts + (int) ( ( currentTimeMillis - video_scr_t0 ) * playSpeed );
+ final int d_vpts = video_pts_last - video_scr;
+ final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed );
final int audio_pts = getAudioPTSImpl();
final int d_apts = audio_pts - audio_scr;
+ return getPerfStringImpl( video_scr, video_pts_last, d_vpts, audio_scr, audio_pts, d_apts, getVideoDPTSAvg() );
+ }
+ private final String getPerfStringImpl(final int video_scr, final int video_pts, final int d_vpts,
+ final int audio_scr, final int audio_pts, final int d_apts,
+ final int video_dpts_avg_diff) {
+ final float tt = getDuration() / 1000.0f;
final String audioSinkInfo;
final AudioSink audioSink = getAudioSink();
if( null != audioSink ) {
@@ -1122,8 +1117,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
final int freeVideoFrames = null != videoFramesFree ? videoFramesFree.size() : 0;
final int decVideoFrames = null != videoFramesDecoded ? videoFramesDecoded.size() : 0;
- return state+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+" ("+tt+" s)], "+
- "speed " + playSpeed+", vSCR "+scr_pts+", vpts "+video_pts+", dSCR["+d_vpts+", avrg "+video_dpts_avg_diff+"], "+
+ return state+", frames[(p "+presentedFrameCount+", d "+decodedFrameCount+") / "+videoFrames+", "+tt+" s], "+
+ "speed " + playSpeed+", dAV "+( d_vpts - d_apts )+", vSCR "+video_scr+", vpts "+video_pts+", dSCR["+d_vpts+", avrg "+video_dpts_avg_diff+"], "+
"aSCR "+audio_scr+", apts "+audio_pts+" ( "+d_apts+" ), "+audioSinkInfo+
", Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+"]";
}
@@ -1163,5 +1158,4 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected static final String toHexString(int v) {
return "0x"+Integer.toHexString(v);
}
-
}
\ No newline at end of file
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
index 5d70ca33d..ad8587e6b 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
@@ -37,6 +37,7 @@ import javax.media.opengl.GLProfile;
import jogamp.opengl.util.av.GLMediaPlayerImpl;
import com.jogamp.common.nio.Buffers;
+import com.jogamp.common.os.Platform;
import com.jogamp.common.util.IOUtil;
import com.jogamp.opengl.util.av.GLMediaPlayer;
import com.jogamp.opengl.util.texture.Texture;
@@ -51,7 +52,7 @@ import com.jogamp.opengl.util.texture.TextureSequence;
public class NullGLMediaPlayer extends GLMediaPlayerImpl {
private TextureData texData = null;
private int pos_ms = 0;
- private int pos_start = 0;
+ private long pos_start = 0;
public NullGLMediaPlayer() {
super();
@@ -64,7 +65,7 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
@Override
protected final boolean playImpl() {
- pos_start = (int)System.currentTimeMillis();
+ pos_start = Platform.currentTimeMillis();
return true;
}
@@ -81,14 +82,14 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
}
@Override
- protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) {
+ protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost) {
nextFrame.setPTS( getAudioPTSImpl() );
return true;
}
@Override
protected final int getAudioPTSImpl() {
- pos_ms = (int)System.currentTimeMillis() - pos_start;
+ pos_ms = (int) ( Platform.currentTimeMillis() - pos_start );
validatePos();
return pos_ms;
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
index 3680da1a8..cf864daa2 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
@@ -55,9 +55,11 @@ import com.jogamp.common.util.RunnableExecutor;
* Tue Feb 28 12:07:53 2012 322537478b63c6bc01e640643550ff539864d790 minor 1 -> 2
*/
class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
+ private static final boolean DEBUG = FFMPEGMediaPlayer.DEBUG || DynamicLibraryBundleInfo.DEBUG;
+
private static final List glueLibNames = new ArrayList(); // none
- private static final int symbolCount = 32;
+ private static final int symbolCount = 38;
private static final String[] symbolNames = {
"avcodec_version",
"avformat_version",
@@ -71,17 +73,20 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"avcodec_open",
"avcodec_alloc_frame",
"avcodec_default_get_buffer",
- "avcodec_default_release_buffer",
+ "avcodec_default_release_buffer",
+ "av_init_packet",
+ "av_new_packet",
+ "av_destruct_packet",
"av_free_packet",
"avcodec_decode_audio4", // 53.25.0 (opt)
"avcodec_decode_audio3", // 52.23.0
-/* 15 */ "avcodec_decode_video2", // 52.23.0
+/* 18 */ "avcodec_decode_video2", // 52.23.0
// libavutil
"av_pix_fmt_descriptors",
"av_free",
"av_get_bits_per_pixel",
-/* 19 */ "av_samples_get_buffer_size",
+/* 22 */ "av_samples_get_buffer_size",
// libavformat
"avformat_alloc_context",
@@ -93,10 +98,13 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"av_dump_format",
"av_read_frame",
"av_seek_frame",
+ "avformat_seek_file",
+ "av_read_play",
+ "av_read_pause",
"avformat_network_init", // 53.13.0 (opt)
"avformat_network_deinit", // 53.13.0 (opt)
"avformat_find_stream_info", // 53.3.0 (opt)
-/* 32 */ "av_find_stream_info",
+/* 38 */ "av_find_stream_info",
};
// alternate symbol names
@@ -203,7 +211,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
for(int j=0; !ok && j, but has alternative <"+symbolNames[si]+">");
}
}
@@ -212,7 +220,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
System.err.println("Fail: Could not resolve symbol <"+symbolNames[i]+">: not optional, no alternatives.");
return false;
}
- } else if(true || DEBUG ) { // keep it verbose per default for now ..
+ } else if(DEBUG) {
System.err.println("OK: Unresolved optional symbol <"+symbolNames[i]+">");
}
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
index dc7ceae39..8998f689a 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
@@ -156,7 +156,6 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
protected int[] vLinesize = { 0, 0, 0 }; // per plane
protected int[] vTexWidth = { 0, 0, 0 }; // per plane
protected int texWidth, texHeight; // overall (stuffing planes in one texture)
- protected ByteBuffer texCopy;
protected String singleTexComp = "r";
protected GLPixelStorageModes psm;
@@ -174,7 +173,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
if(!available) {
throw new RuntimeException("FFMPEGMediaPlayer not available");
}
- moviePtr = createInstance0(DEBUG);
+ moviePtr = createInstance0( DEBUG_NATIVE );
if(0==moviePtr) {
throw new GLException("Couldn't create FFMPEGInstance");
}
@@ -185,8 +184,6 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
protected final int validateTextureCount(int desiredTextureCount) {
return desiredTextureCount>2 ? Math.max(4, desiredTextureCount) : 2;
}
- @Override
- protected final boolean requiresOffthreadGLCtx() { return true; }
@Override
protected final void destroyImpl(GL gl) {
@@ -320,7 +317,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
case BGRA:
texWidth = vTexWidth[0]; texHeight = height;
break;
- default: // FIXME: Add more planar formats !
+ default: // FIXME: Add more formats !
throw new RuntimeException("Unsupported pixelformat: "+vPixelFmt);
}
@@ -358,7 +355,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
signed = true;
fixedP = true;
break;
- default: // FIXME: Add more planar formats !
+ default: // FIXME: Add more formats !
throw new RuntimeException("Unsupported sampleformat: "+aSampleFmt);
}
avChosenAudioFormat = new AudioDataFormat(AudioDataType.PCM, sampleRate, sampleSize, channels, signed, fixedP, true /* littleEndian */);
@@ -427,7 +424,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
" return vec4(r, g, b, 1);\n"+
"}\n"
;
- default: // FIXME: Add more planar formats !
+ default: // FIXME: Add more formats !
return super.getTextureLookupFragmentShaderImpl();
}
}
@@ -437,7 +434,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
if(0==moviePtr) {
return false;
}
- return true;
+ return play0(moviePtr);
}
@Override
@@ -445,7 +442,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
if(0==moviePtr) {
return false;
}
- return true;
+ return pause0(moviePtr);
}
@Override
@@ -457,28 +454,41 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
}
@Override
- protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) {
+ protected void preNextTextureImpl(GL gl) {
+ psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1
+ }
+
+ @Override
+ protected void postNextTextureImpl(GL gl) {
+ psm.restore(gl);
+ }
+
+ @Override
+ protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost) {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
- }
- psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1
- int avPTS = 0;
+ }
+ if( issuePreAndPost ) {
+ preNextTextureImpl(gl);
+ }
+ int vPTS = TextureFrame.INVALID_PTS;
try {
final Texture tex = nextFrame.getTexture();
gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
tex.enable(gl);
tex.bind(gl);
- /** Try decode up to 10 packets to find one containing video, i.e. vPTS > 0 */
- for(int retry=10; 0 >= avPTS && 0 < retry; retry--) {
- avPTS = readNextPacket0(moviePtr, textureTarget, textureFormat, textureType);
- retry--;
+ /** Try decode up to 10 packets to find one containing video. */
+ for(int i=0; TextureFrame.INVALID_PTS == vPTS && 10 > i; i++) {
+ vPTS = readNextPacket0(moviePtr, textureTarget, textureFormat, textureType);
}
} finally {
- psm.restore(gl);
+ if( issuePreAndPost ) {
+ postNextTextureImpl(gl);
+ }
}
- if( 0 < avPTS ) {
- nextFrame.setPTS(avPTS);
+ if( TextureFrame.INVALID_PTS != vPTS ) {
+ nextFrame.setPTS(vPTS);
return true;
} else {
return false;
@@ -492,6 +502,11 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
}
}
+ private final int getBytesPerMS(int time) {
+ final int bytesPerSample = sinkChosenAudioFormat.sampleSize >>> 3; // /8
+ return time * ( sinkChosenAudioFormat.channelCount * bytesPerSample * ( sinkChosenAudioFormat.sampleRate / 1000 ) );
+ }
+
@Override
protected final boolean syncAVRequired() { return true; }
@@ -522,10 +537,12 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
private native Buffer getAudioBuffer0(long moviePtr, int plane);
/**
- * @return resulting current PTS: audio < 0, video > 0, invalid == 0
+ * @return resulting current video PTS, or {@link TextureFrame#INVALID_PTS}
*/
private native int readNextPacket0(long moviePtr, int texTarget, int texFmt, int texType);
+ private native boolean play0(long moviePtr);
+ private native boolean pause0(long moviePtr);
private native int seek0(long moviePtr, int position);
public static enum SampleFormat {
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
index d03cad28a..c6f31d81e 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
@@ -156,7 +156,7 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) {
+ protected boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost) {
if(0==moviePtr) {
throw new GLException("OMX native instance null");
}
--
cgit v1.2.3