summaryrefslogtreecommitdiffstats
path: root/src/jogl
diff options
context:
space:
mode:
Diffstat (limited to 'src/jogl')
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java210
-rw-r--r--src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java13
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java6
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java514
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java9
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java23
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java239
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java10
-rw-r--r--src/jogl/native/libav/ffmpeg_tool.h36
-rw-r--r--src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c463
10 files changed, 1070 insertions, 453 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
index 512567f33..726eddb01 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
@@ -27,7 +27,6 @@
*/
package com.jogamp.opengl.util.av;
-import java.io.IOException;
import java.net.URI;
import javax.media.opengl.GL;
@@ -38,22 +37,56 @@ import jogamp.opengl.Debug;
import com.jogamp.opengl.util.texture.TextureSequence;
/**
- * GLMediaPlayer interface specifies a {@link TextureSequence}
- * with a video stream as it's source.
+ * GLMediaPlayer interface specifies a {@link TextureSequence} state machine
+ * using a multiplexed audio/video stream as it's source.
* <p>
- * Audio maybe supported and played back internally or via an {@link AudioSink} implementation,
- * if an audio stream is selected in {@link #initGLStream(GL, int, URI, int, int)}.
+ * Audio maybe supported and played back internally or via an {@link AudioSink} implementation.
+ * </p>
+ * <p>
+ * Audio and video streams can be selected or muted via {@link #initStream(URI, int, int, int)}
+ * using the appropriate <a href="#streamIDs">stream id</a>'s.
* </p>
*
+ * <a name="streamworker"><h5><i>StreamWorker</i> Decoding Thread</h5></a>
+ * <p>
+ * Most of the stream processing is performed on the decoding thread, a.k.a. <i>StreamWorker</i>:
+ * <ul>
+ * <li>Stream initialization triggered by {@link #initStream(URI, int, int, int) initStream(..)} - User gets notified whether the stream has been initialized or not via {@link GLMediaEventListener#attributesChanged(GLMediaPlayer, int, long) attributesChanges(..)}.</li>
+ * <li>Stream decoding - User gets notified of a new frame via {@link GLMediaEventListener#newFrameAvailable(GLMediaPlayer, com.jogamp.opengl.util.texture.TextureSequence.TextureFrame, long) newFrameAvailable(...)}.</li>
+ * <li>Caught <a href="#streamerror">exceptions on the decoding thread</a> are delivered as {@link StreamException}s.</li>
+ * </ul>
+ * <i>StreamWorker</i> generates it's own {@link GLContext}, shared with the one passed to {@link #initGL(GL)}.
+ * The shared {@link GLContext} allows the decoding thread to push the video frame data directly into
+ * the designated {@link TextureFrame}, later returned via {@link #getNextTexture(GL)} and used by the user.
+ * </p>
+ * <a name="streamerror"><h7><i>StreamWorker</i> Error Handling</h7></a>
+ * <p>
+ * Caught exceptions on <a href="#streamworker">StreamWorker</a> are delivered as {@link StreamException}s,
+ * which either degrades the {@link State} to {@link State#Uninitialized} or {@link State#Paused}.
+ * </p>
+ * <p>
+ * An occurring {@link StreamException} triggers a {@link GLMediaEventListener#EVENT_CHANGE_ERR EVENT_CHANGE_ERR} event,
+ * which can be listened to via {@link GLMediaEventListener#attributesChanged(GLMediaPlayer, int, long)}.
+ * </p>
+ * <p>
+ * An occurred {@link StreamException} can be read via {@link #getStreamException()}.
+ * </p>
+ *
+ * </p>
* <a name="lifecycle"><h5>GLMediaPlayer Lifecycle</h5></a>
* <p>
* <table border="1">
- * <tr><th>action</th> <th>state before</th> <th>state after</th></tr>
- * <tr><td>{@link #initGLStream(GL, int, URI, int, int)}</td> <td>Uninitialized</td> <td>Paused</td></tr>
- * <tr><td>{@link #play()}</td> <td>Paused</td> <td>Playing</td></tr>
- * <tr><td>{@link #pause()}</td> <td>Playing</td> <td>Paused</td></tr>
- * <tr><td>{@link #seek(int)}</td> <td>Playing, Paused</td> <td>Unchanged</td></tr>
- * <tr><td>{@link #destroy(GL)}</td> <td>ANY</td> <td>Uninitialized</td></tr>
+ * <tr><th>Action</th> <th>{@link State} Before</th> <th>{@link State} After</th> <th>{@link GLMediaEventListener Event}</th></tr>
+ * <tr><td>{@link #initStream(URI, int, int, int)}</td> <td>{@link State#Uninitialized Uninitialized}</td> <td>{@link State#Initialized Initialized}<sup><a href="#streamworker">1</a></sup>, {@link State#Uninitialized Uninitialized}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_INIT EVENT_CHANGE_INIT} or ( {@link GLMediaEventListener#EVENT_CHANGE_ERR EVENT_CHANGE_ERR} + {@link GLMediaEventListener#EVENT_CHANGE_UNINIT EVENT_CHANGE_UNINIT} )</td></tr>
+ * <tr><td>{@link #initGL(GL)}</td> <td>{@link State#Initialized Initialized}</td> <td>{@link State#Paused Paused}, {@link State#Initialized Initialized}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_PAUSE EVENT_CHANGE_PAUSE}</td></tr>
+ * <tr><td>{@link #play()}</td> <td>{@link State#Paused Paused}</td> <td>{@link State#Playing Playing}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_PLAY EVENT_CHANGE_PLAY}</td></tr>
+ * <tr><td>{@link #pause()}</td> <td>{@link State#Playing Playing}</td> <td>{@link State#Paused Paused}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_PAUSE EVENT_CHANGE_PAUSE}</td></tr>
+ * <tr><td>{@link #seek(int)}</td> <td>{@link State#Paused Paused}, {@link State#Playing Playing}</td> <td>{@link State#Paused Paused}, {@link State#Playing Playing}</td> <td>none</td></tr>
+ * <tr><td>{@link #getNextTexture(GL)}</td> <td>{@link State#Paused Paused}, {@link State#Playing Playing}</td> <td>{@link State#Paused Paused}, {@link State#Playing Playing}</td> <td>none</td></tr>
+ * <tr><td>{@link #getLastTexture()}</td> <td>{@link State#Paused Paused}, {@link State#Playing Playing}</td> <td>{@link State#Paused Paused}, {@link State#Playing Playing}</td> <td>none</td></tr>
+ * <tr><td>{@link TextureFrame#END_OF_STREAM_PTS END_OF_STREAM}</td> <td>{@link State#Playing Playing}</td> <td>{@link State#Paused Paused}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_EOS EVENT_CHANGE_EOS} + {@link GLMediaEventListener#EVENT_CHANGE_PAUSE EVENT_CHANGE_PAUSE}</td></tr>
+ * <tr><td>{@link StreamException}</td> <td>ANY</td> <td>{@link State#Paused Paused}, {@link State#Uninitialized Uninitialized}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_ERR EVENT_CHANGE_ERR} + ( {@link GLMediaEventListener#EVENT_CHANGE_PAUSE EVENT_CHANGE_PAUSE} or {@link GLMediaEventListener#EVENT_CHANGE_UNINIT EVENT_CHANGE_UNINIT} )</td></tr>
+ * <tr><td>{@link #destroy(GL)}</td> <td>ANY</td> <td>{@link State#Uninitialized Uninitialized}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_UNINIT EVENT_CHANGE_UNINIT}</td></tr>
* </table>
* </p>
*
@@ -90,16 +123,18 @@ import com.jogamp.opengl.util.texture.TextureSequence;
* will allow tracking time up 2,147,483.647 seconds or
* 24 days 20 hours 31 minutes and 23 seconds.
* Milliseconds granularity is also more than enough to deal with A-V synchronization,
- * where the threshold usually lies within 100ms.
+ * where the threshold usually lies within 22ms.
* </p>
*
* <a name="synchronization"><h5>Audio and video synchronization</h5></a>
* <p>
* The class follows a passive A/V synchronization pattern.
- * Audio is being untouched, while {@link #getNextTexture(GL, boolean)} delivers a new video frame
- * only, if its timestamp is less than 22ms ahead of <i>time</i>.
- * Otherwise the early frame is cached for later retrieval and the previous frame is returned.
- * FIXME: Refine!
+ * Audio is being untouched, while {@link #getNextTexture(GL)} delivers a new video frame
+ * only, if its timestamp is less than {@link #MAXIMUM_VIDEO_ASYNC} ahead of <i>time</i>.
+ * If its timestamp is more than {@link #MAXIMUM_VIDEO_ASYNC} ahead of <i>time</i>,
+ * the previous frame is returned.
+ * If its timestamp is more than {@link #MAXIMUM_VIDEO_ASYNC} after <i>time</i>,
+ * the frame is dropped and the next frame is being fetched.
* </p>
* <p>
* https://en.wikipedia.org/wiki/Audio_to_video_synchronization
@@ -149,37 +184,80 @@ public interface GLMediaPlayer extends TextureSequence {
public static final boolean DEBUG = Debug.debug("GLMediaPlayer");
public static final boolean DEBUG_NATIVE = Debug.debug("GLMediaPlayer.Native");
+ /** Minimum texture count, value {@value}. */
+ public static final int TEXTURE_COUNT_MIN = 4;
+
/** Constant {@value} for <i>mute</i> or <i>not available</i>. See <a href="#streamIDs">Audio and video Stream IDs</a>. */
public static final int STREAM_ID_NONE = -2;
/** Constant {@value} for <i>auto</i> or <i>unspecified</i>. See <a href="#streamIDs">Audio and video Stream IDs</a>. */
public static final int STREAM_ID_AUTO = -1;
- /** Maximum video frame async .. */
+ /** Maximum video frame async of {@value} milliseconds. */
public static final int MAXIMUM_VIDEO_ASYNC = 22;
+ /**
+ * A StreamException encapsulates a caught exception in the decoder thread, a.k.a <i>StreamWorker</i>,
+ * see See <a href="#streamerror"><i>StreamWorker</i> Error Handling</a>.
+ */
+ @SuppressWarnings("serial")
+ public static class StreamException extends Exception {
+ public StreamException(Throwable cause) {
+ super(cause);
+ }
+ public StreamException(String message, Throwable cause) {
+ super(message, cause);
+ }
+ }
public interface GLMediaEventListener extends TexSeqEventListener<GLMediaPlayer> {
- static final int EVENT_CHANGE_VID = 1<<0;
- static final int EVENT_CHANGE_AID = 1<<1;
- static final int EVENT_CHANGE_SIZE = 1<<2;
- static final int EVENT_CHANGE_FPS = 1<<3;
- static final int EVENT_CHANGE_BPS = 1<<4;
- static final int EVENT_CHANGE_LENGTH = 1<<5;
- static final int EVENT_CHANGE_CODEC = 1<<6;
+ /** State changed to {@link State#Initialized}. See <a href="#lifecycle">Lifecycle</a>.*/
+ static final int EVENT_CHANGE_INIT = 1<<0;
+ /** State changed to {@link State#Uninitialized}. See <a href="#lifecycle">Lifecycle</a>.*/
+ static final int EVENT_CHANGE_UNINIT = 1<<1;
+ /** State changed to {@link State#Playing}. See <a href="#lifecycle">Lifecycle</a>.*/
+ static final int EVENT_CHANGE_PLAY = 1<<2;
+ /** State changed to {@link State#Paused}. See <a href="#lifecycle">Lifecycle</a>.*/
+ static final int EVENT_CHANGE_PAUSE = 1<<3;
+ /** End of stream reached. See <a href="#lifecycle">Lifecycle</a>.*/
+ static final int EVENT_CHANGE_EOS = 1<<4;
+ /** An error occurred, e.g. during off-thread initialization. See {@link StreamException} and <a href="#lifecycle">Lifecycle</a>. */
+ static final int EVENT_CHANGE_ERR = 1<<5;
+
+ /** Stream video id change. */
+ static final int EVENT_CHANGE_VID = 1<<16;
+ /** Stream audio id change. */
+ static final int EVENT_CHANGE_AID = 1<<17;
+ /** TextureFrame size change. */
+ static final int EVENT_CHANGE_SIZE = 1<<18;
+ /** Stream fps change. */
+ static final int EVENT_CHANGE_FPS = 1<<19;
+ /** Stream bps change. */
+ static final int EVENT_CHANGE_BPS = 1<<20;
+ /** Stream length change. */
+ static final int EVENT_CHANGE_LENGTH = 1<<21;
+ /** Stream codec change. */
+ static final int EVENT_CHANGE_CODEC = 1<<22;
/**
* @param mp the event source
* @param event_mask the changes attributes
* @param when system time in msec.
*/
- public void attributesChanges(GLMediaPlayer mp, int event_mask, long when);
+ public void attributesChanged(GLMediaPlayer mp, int event_mask, long when);
}
/**
- * See <a href="#lifecycle">GLMediaPlayer Lifecycle</a>.
+ * See <a href="#lifecycle">Lifecycle</a>.
*/
public enum State {
- Uninitialized(0), Playing(1), Paused(2);
+ /** Uninitialized player, no resources shall be hold. */
+ Uninitialized(0),
+ /** Stream has been initialized, user may play or call {@link #initGL(GL)}. */
+ Initialized(1),
+ /** Stream is playing. */
+ Playing(2),
+ /** Stream is pausing. */
+ Paused(3);
public final int id;
@@ -202,28 +280,62 @@ public interface GLMediaPlayer extends TextureSequence {
public void setTextureWrapST(int[] wrapST);
/**
- * Sets the stream to be used. Initializes all stream related states inclusive OpenGL ones,
- * if <code>gl</code> is not null.
+ * Issues asynchronous stream initialization.
+ * <p>
+ * <a href="#lifecycle">Lifecycle</a>: {@link State#Uninitialized} -> {@link State#Initialized}<sup><a href="#streamworker">1</a></sup> or {@link State#Uninitialized}
+ * </p>
+ * <p>
+ * {@link State#Initialized} is reached asynchronous,
+ * i.e. user gets notified via {@link GLMediaEventListener#attributesChanged(GLMediaPlayer, int, long) attributesChanges(..)}.
+ * </p>
+ * <p>
+ * A possible caught asynchronous {@link StreamException} while initializing the stream off-thread
+ * will be thrown at {@link #initGL(GL)}.
+ * </p>
+ * <p>
+ * Muted audio can be achieved by passing {@link #STREAM_ID_NONE} to <code>aid</code>.
+ * </p>
* <p>
- * <a href="#lifecycle">GLMediaPlayer Lifecycle</a>: Uninitialized -> Paused
+ * Muted video can be achieved by passing {@link #STREAM_ID_NONE} to <code>vid</code>,
+ * in which case <code>textureCount</code> is ignored as well as the passed GL object of the subsequent {@link #initGL(GL)} call.
* </p>
- * @param gl current GL object. If null, no video output and textures will be available.
- * @param textureCount desired number of buffered textures to be decoded off-thread, use <code>1</code> for on-thread decoding.
* @param streamLoc the stream location
* @param vid video stream id, see <a href="#streamIDs">audio and video Stream IDs</a>
* @param aid video stream id, see <a href="#streamIDs">audio and video Stream IDs</a>
- * @return the new state
+ * @param textureCount desired number of buffered textures to be decoded off-thread, will be validated by implementation.
+ * The minimum value is {@link #TEXTURE_COUNT_MIN}.
+ * Ignored if video is muted.
+ * @throws IllegalStateException if not invoked in {@link State#Uninitialized}
+ * @throws IllegalArgumentException if arguments are invalid
+ */
+ public void initStream(URI streamLoc, int vid, int aid, int textureCount) throws IllegalStateException, IllegalArgumentException;
+
+ /**
+ * Returns the {@link StreamException} caught in the decoder thread, or <code>null</code>.
+ * @see GLMediaEventListener#EVENT_CHANGE_ERR
+ * @see StreamException
+ */
+ public StreamException getStreamException();
+
+ /**
+ * Initializes OpenGL related resources.
+ * <p>
+ * <a href="#lifecycle">Lifecycle</a>: {@link State#Initialized} -> {@link State#Paused} or {@link State#Initialized}
+ * </p>
+ * Argument <code>gl</code> is ignored if video is muted, see {@link #initStream(URI, int, int, int)}.
*
- * @throws IllegalStateException if not invoked in state Uninitialized
- * @throws IOException in case of difficulties to open or process the stream
+ * @param gl current GL object. Maybe <code>null</code>, for audio only.
+ * @throws IllegalStateException if not invoked in {@link State#Initialized}.
+ * @throws IllegalArgumentException if arguments are invalid
+ * @throws StreamException forwarded from the off-thread stream initialization
* @throws GLException in case of difficulties to initialize the GL resources
*/
- public State initGLStream(GL gl, int textureCount, URI streamLoc, int vid, int aid) throws IllegalStateException, GLException, IOException;
+ public void initGL(GL gl) throws IllegalStateException, IllegalArgumentException, StreamException, GLException;
/**
* If implementation uses a {@link AudioSink}, it's instance will be returned.
* <p>
- * The {@link AudioSink} instance is available after {@link #initGLStream(GL, int, URI, int, int)},
+ * The {@link AudioSink} instance is available after {@link #initStream(URI, int, int, int)},
* if used by implementation.
* </p>
*/
@@ -232,7 +344,7 @@ public interface GLMediaPlayer extends TextureSequence {
/**
* Releases the GL and stream resources.
* <p>
- * <a href="#lifecycle">GLMediaPlayer Lifecycle</a>: <code>ANY</code> -> Uninitialized
+ * <a href="#lifecycle">Lifecycle</a>: <code>ANY</code> -> {@link State#Uninitialized}
* </p>
*/
public State destroy(GL gl);
@@ -250,18 +362,18 @@ public interface GLMediaPlayer extends TextureSequence {
public float getPlaySpeed();
/**
- * <a href="#lifecycle">GLMediaPlayer Lifecycle</a>: Paused -> Playing
+ * <a href="#lifecycle">Lifecycle</a>: {@link State#Paused} -> {@link State#Playing}
*/
public State play();
/**
- * <a href="#lifecycle">GLMediaPlayer Lifecycle</a>: Playing -> Paused
+ * <a href="#lifecycle">Lifecycle</a>: {@link State#Playing} -> {@link State#Paused}
*/
public State pause();
/**
- * Allowed in state Playing and Paused, otherwise ignored,
- * see <a href="#lifecycle">GLMediaPlayer Lifecycle</a>.
+ * Allowed in state {@link State#Playing} and {@link State#Paused}, otherwise ignored,
+ * see <a href="#lifecycle">Lifecycle</a>.
*
* @param msec absolute desired time position in milliseconds
* @return time current position in milliseconds, after seeking to the desired position
@@ -269,8 +381,8 @@ public interface GLMediaPlayer extends TextureSequence {
public int seek(int msec);
/**
- * See <a href="#lifecycle">GLMediaPlayer Lifecycle</a>.
- * @return the current state, either Uninitialized, Playing, Paused
+ * See <a href="#lifecycle">Lifecycle</a>.
+ * @return the current state, either {@link State#Uninitialized}, {@link State#Initialized}, {@link State#Playing} or {@link State#Paused}
*/
public State getState();
@@ -286,13 +398,13 @@ public interface GLMediaPlayer extends TextureSequence {
/**
* @return the current decoded frame count since {@link #play()} and {@link #seek(int)}
- * as increased by {@link #getNextTexture(GL, boolean)} or the decoding thread.
+ * as increased by {@link #getNextTexture(GL)} or the decoding thread.
*/
public int getDecodedFrameCount();
/**
* @return the current presented frame count since {@link #play()} and {@link #seek(int)}
- * as increased by {@link #getNextTexture(GL, boolean)} for new frames.
+ * as increased by {@link #getNextTexture(GL)} for new frames.
*/
public int getPresentedFrameCount();
@@ -311,6 +423,7 @@ public interface GLMediaPlayer extends TextureSequence {
* <p>
* See <a href="#synchronization">audio and video synchronization</a>.
* </p>
+ * @throws IllegalStateException if not invoked in {@link State#Paused} or {@link State#Playing}
*/
@Override
public TextureSequence.TextureFrame getLastTexture() throws IllegalStateException;
@@ -324,14 +437,15 @@ public interface GLMediaPlayer extends TextureSequence {
* <p>
* See <a href="#synchronization">audio and video synchronization</a>.
* </p>
+ * @throws IllegalStateException if not invoked in {@link State#Paused} or {@link State#Playing}
*
* @see #addEventListener(GLMediaEventListener)
* @see GLMediaEventListener#newFrameAvailable(GLMediaPlayer, TextureFrame, long)
*/
@Override
- public TextureSequence.TextureFrame getNextTexture(GL gl, boolean blocking) throws IllegalStateException;
+ public TextureSequence.TextureFrame getNextTexture(GL gl) throws IllegalStateException;
- /** Return the stream location, as set by {@link #initGLStream(GL, int, URI, int, int)}. */
+ /** Return the stream location, as set by {@link #initStream(URI, int, int, int)}. */
public URI getURI();
/**
diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
index f87df950c..578a219e9 100644
--- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
+++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
@@ -31,6 +31,7 @@ import java.io.IOException;
import javax.media.opengl.GL;
import javax.media.opengl.GLES2;
+import javax.media.opengl.GLException;
import com.jogamp.common.os.AndroidVersion;
import com.jogamp.common.os.Platform;
@@ -179,14 +180,14 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
public final Surface getSurface() { return surface; }
public String toString() {
- return "SurfaceTextureFrame[" + pts + "ms: " + texture + ", " + surfaceTex + "]";
+ return "SurfaceTextureFrame[pts " + pts + " ms, l " + duration + " ms, texID "+ texture.getTextureObject() + ", " + surfaceTex + "]";
}
private final SurfaceTexture surfaceTex;
private final Surface surface;
}
@Override
- protected final void initGLStreamImpl(GL gl, int vid, int aid) throws IOException {
+ protected final void initStreamImpl(int vid, int aid) throws IOException {
if(null!=mp && null!=streamLoc) {
if( GLMediaPlayer.STREAM_ID_NONE == aid ) {
mp.setVolume(0f, 0f);
@@ -220,9 +221,13 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
0, 0, mp.getDuration(), icodec, icodec);
}
}
+ @Override
+ protected final void initGLImpl(GL gl) throws IOException, GLException {
+ // NOP
+ }
@Override
- protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost) {
+ protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame) {
if(null != stex && null != mp) {
final SurfaceTextureFrame nextSFrame = (SurfaceTextureFrame) nextFrame;
final Surface nextSurface = nextSFrame.getSurface();
@@ -232,7 +237,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
// Only block once, no while-loop.
// This relaxes locking code of non crucial resources/events.
boolean update = updateSurface;
- if(!update && blocking) {
+ if( !update ) {
synchronized(updateSurfaceLock) {
if(!updateSurface) { // volatile OK.
try {
diff --git a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
index db2146cdc..ec375406d 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
@@ -69,7 +69,7 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
public final long getSync() { return sync; }
public String toString() {
- return "EGLTextureFrame[" + texture + ", img "+ image + ", sync "+ sync+", clientBuffer "+clientBuffer+"]";
+ return "EGLTextureFrame[pts " + pts + " ms, l " + duration + " ms, texID "+ texture.getTextureObject() + ", img "+ image + ", sync "+ sync+", clientBuffer "+clientBuffer+"]";
}
protected final Buffer clientBuffer;
protected final long image;
@@ -82,10 +82,6 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
this.texType = texType;
this.useKHRSync = useKHRSync;
}
- @Override
- protected final int validateTextureCount(int desiredTextureCount) {
- return desiredTextureCount>2 ? Math.max(4, desiredTextureCount) : 2;
- }
@Override
protected TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index a82c84d17..8193175b7 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -43,6 +43,8 @@ import javax.media.opengl.GLException;
import javax.media.opengl.GLProfile;
import com.jogamp.common.os.Platform;
+import com.jogamp.common.util.LFRingbuffer;
+import com.jogamp.common.util.Ringbuffer;
import com.jogamp.opengl.util.av.AudioSink;
import com.jogamp.opengl.util.av.AudioSink.AudioFrame;
import com.jogamp.opengl.util.av.GLMediaPlayer;
@@ -65,9 +67,6 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected static final String unknown = "unknown";
- /** Default texture count w/o threading, value {@value}. */
- protected static final int TEXTURE_COUNT_DEFAULT = 2;
-
protected volatile State state;
private Object stateLock = new Object();
@@ -86,33 +85,33 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected volatile float playSpeed = 1.0f;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int vid = GLMediaPlayer.STREAM_ID_AUTO;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int aid = GLMediaPlayer.STREAM_ID_AUTO;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int width = 0;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int height = 0;
- /** Video avg. fps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Video avg. fps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected float fps = 0;
- /** Video avg. frame duration in ms. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Video avg. frame duration in ms. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected float frame_duration = 0f;
- /** Stream bps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Stream bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int bps_stream = 0;
- /** Video bps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Video bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int bps_video = 0;
- /** Audio bps. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Audio bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int bps_audio = 0;
- /** In frames. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** In frames. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int videoFrames = 0;
- /** In frames. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** In frames. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int audioFrames = 0;
- /** In ms. Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** In ms. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected int duration = 0;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected String acodec = unknown;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
protected String vcodec = unknown;
protected volatile int decodedFrameCount = 0;
@@ -120,7 +119,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected int displayedFrameCount = 0;
protected volatile int video_pts_last = 0;
- /** See {@link #getAudioSink()}. Set by implementation if used from within {@link #initGLStreamImpl(GL, int, int)}! */
+ /** See {@link #getAudioSink()}. Set by implementation if used from within {@link #initStreamImpl(int, int)}! */
protected AudioSink audioSink = null;
protected boolean audioSinkPlaySpeedSet = false;
@@ -145,12 +144,20 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
/** Trigger video PTS reset with given cause as bitfield. */
private boolean videoSCR_reset = false;
- protected SyncedRingbuffer<TextureFrame> videoFramesFree = null;
- protected SyncedRingbuffer<TextureFrame> videoFramesDecoded = null;
+ protected TextureFrame[] videoFramesOrig = null;
+ protected Ringbuffer<TextureFrame> videoFramesFree = null;
+ protected Ringbuffer<TextureFrame> videoFramesDecoded = null;
protected volatile TextureFrame lastFrame = null;
private ArrayList<GLMediaEventListener> eventListeners = new ArrayList<GLMediaEventListener>();
+ private static Ringbuffer.AllocEmptyArray<TextureFrame> rbAllocTextureFrameArray = new Ringbuffer.AllocEmptyArray<TextureFrame>() {
+ @Override
+ public TextureFrame[] newArray(int size) {
+ return new TextureFrame[size];
+ }
+ };
+
protected GLMediaPlayerImpl() {
this.textureCount=0;
this.textureTarget=GL.GL_TEXTURE_2D;
@@ -186,11 +193,21 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
public final void setTextureWrapST(int[] wrapST) { texWrapST[0] = wrapST[0]; texWrapST[1] = wrapST[1];}
public final int[] getTextureWrapST() { return texWrapST; }
+ private final void checkStreamInit() {
+ if(State.Uninitialized == state ) {
+ throw new IllegalStateException("Stream not initialized: "+this);
+ }
+ }
+
+ private final void checkGLInit() {
+ if(State.Uninitialized == state || State.Initialized == state ) {
+ throw new IllegalStateException("GL not initialized: "+this);
+ }
+ }
+
@Override
public String getRequiredExtensionsShaderStub() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
+ checkGLInit();
if(GLES2.GL_TEXTURE_EXTERNAL_OES == textureTarget) {
return TextureSequence.GL_OES_EGL_image_external_Required_Prelude;
}
@@ -199,9 +216,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
@Override
public String getTextureSampler2DType() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
+ checkGLInit();
switch(textureTarget) {
case GL.GL_TEXTURE_2D:
case GL2.GL_TEXTURE_RECTANGLE:
@@ -221,9 +236,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
*/
@Override
public String getTextureLookupFunctionName(String desiredFuncName) throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
+ checkGLInit();
return "texture2D";
}
@@ -236,9 +249,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
*/
@Override
public String getTextureLookupFragmentShaderImpl() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
+ checkGLInit();
return "";
}
@@ -246,7 +257,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
public final int getDecodedFrameCount() { return decodedFrameCount; }
@Override
- public final int getPresentedFrameCount() { return this.presentedFrameCount; }
+ public final int getPresentedFrameCount() { return presentedFrameCount; }
@Override
public final int getVideoPTS() { return video_pts_last; }
@@ -267,20 +278,21 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
+ @Override
public final State getState() { return state; }
+ @Override
public final State play() {
synchronized( stateLock ) {
switch( state ) {
case Paused:
if( playImpl() ) {
- // FIXME
resetAudioVideoPTS();
if( null != audioSink ) {
audioSink.play(); // cont. w/ new data
- }
- resumeFramePusher();
- state = State.Playing;
+ }
+ streamWorker.doResume();
+ changeState(0, State.Playing);
}
default:
}
@@ -290,15 +302,20 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
protected abstract boolean playImpl();
+ @Override
public final State pause() {
+ return pauseImpl(0);
+ }
+ private final State pauseImpl(int event_mask) {
synchronized( stateLock ) {
if( State.Playing == state ) {
+ event_mask = addStateEventMask(event_mask, GLMediaPlayer.State.Paused);
state = State.Paused;
- // FIXME
- pauseFramePusher();
+ streamWorker.doPause();
if( null != audioSink ) {
audioSink.pause();
}
+ attributesUpdated( event_mask );
if( !pauseImpl() ) {
play();
}
@@ -309,6 +326,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
protected abstract boolean pauseImpl();
+ @Override
public final int seek(int msec) {
synchronized( stateLock ) {
final int pts1;
@@ -317,14 +335,14 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
case Paused:
final State _state = state;
state = State.Paused;
- // FIXME
- pauseFramePusher();
+ streamWorker.doPause();
pts1 = seekImpl(msec);
resetAllAudioVideoSync();
if( null != audioSink && State.Playing == _state ) {
audioSink.play(); // cont. w/ new data
}
- resumeFramePusher();
+ System.err.println("SEEK XXX: "+getPerfString());
+ streamWorker.doResume();
state = _state;
break;
default:
@@ -378,41 +396,31 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
@Override
- public final State initGLStream(GL gl, int reqTextureCount, URI streamLoc, int vid, int aid) throws IllegalStateException, GLException, IOException {
+ public final void initStream(URI streamLoc, int vid, int aid, int reqTextureCount) throws IllegalStateException, IllegalArgumentException {
synchronized( stateLock ) {
if(State.Uninitialized != state) {
- throw new IllegalStateException("Instance not in state "+State.Uninitialized+", but "+state+", "+this);
+ throw new IllegalStateException("Instance not unintialized: "+this);
+ }
+ if(null == streamLoc) {
+ throw new IllegalArgumentException("streamLock is null");
+ }
+ if( STREAM_ID_NONE != vid ) {
+ textureCount = validateTextureCount(reqTextureCount);
+ if( textureCount < TEXTURE_COUNT_MIN ) {
+ throw new InternalError("Validated texture count < "+TEXTURE_COUNT_MIN+": "+textureCount);
+ }
+ } else {
+ textureCount = 0;
}
decodedFrameCount = 0;
presentedFrameCount = 0;
- displayedFrameCount = 0;
+ displayedFrameCount = 0;
this.streamLoc = streamLoc;
+ this.vid = vid;
+ this.aid = aid;
if (this.streamLoc != null) {
- try {
- if( null != gl ) {
- removeAllTextureFrames(gl);
- textureCount = validateTextureCount(reqTextureCount);
- if( textureCount < TEXTURE_COUNT_DEFAULT ) {
- throw new InternalError("Validated texture count < "+TEXTURE_COUNT_DEFAULT+": "+textureCount);
- }
- initGLStreamImpl(gl, vid, aid); // also initializes width, height, .. etc
- videoFramesFree = new SyncedRingbuffer<TextureFrame>(createTexFrames(gl, textureCount), true /* full */);
- if( TEXTURE_COUNT_DEFAULT < textureCount ) {
- videoFramesDecoded = new SyncedRingbuffer<TextureFrame>(new TextureFrame[textureCount], false /* full */);
- framePusher = new FramePusher(gl);
- framePusher.doStart();
- } else {
- videoFramesDecoded = null;
- }
- lastFrame = videoFramesFree.getBlocking(false /* clearRef */ );
- state = State.Paused;
- }
- return state;
- } catch (Throwable t) {
- throw new GLException("Error initializing GL resources", t);
- }
+ streamWorker = new StreamWorker();
}
- return state;
}
}
/**
@@ -428,19 +436,62 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
* @see #acodec
* @see #vcodec
*/
- protected abstract void initGLStreamImpl(GL gl, int vid, int aid) throws IOException;
+ protected abstract void initStreamImpl(int vid, int aid) throws Exception;
+
+ @Override
+ public final StreamException getStreamException() {
+ synchronized( stateLock ) {
+ if( null != streamWorker ) {
+ return streamWorker.getStreamErr();
+ } else {
+ return null;
+ }
+ }
+ }
+
+ @Override
+ public final void initGL(GL gl) throws IllegalStateException, StreamException, GLException {
+ synchronized( stateLock ) {
+ checkStreamInit();
+ final StreamException streamInitErr = streamWorker.getStreamErr();
+ if( null != streamInitErr ) {
+ streamWorker = null;
+ destroy(null);
+ throw streamInitErr;
+ }
+ try {
+ if( STREAM_ID_NONE != vid ) {
+ removeAllTextureFrames(gl);
+ initGLImpl(gl);
+ videoFramesOrig = createTexFrames(gl, textureCount);
+ videoFramesFree = new LFRingbuffer<TextureFrame>(videoFramesOrig, rbAllocTextureFrameArray);
+ videoFramesDecoded = new LFRingbuffer<TextureFrame>(textureCount, rbAllocTextureFrameArray);
+ lastFrame = videoFramesFree.getBlocking( );
+ streamWorker.initGL(gl);
+ } else {
+ initGLImpl(null);
+ setTextureFormat(-1, -1);
+ setTextureType(-1);
+ videoFramesFree = null;
+ videoFramesDecoded = null;
+ lastFrame = null;
+ }
+ changeState(0, State.Paused);
+ } catch (Throwable t) {
+ throw new GLException("Error initializing GL resources", t);
+ }
+ }
+ }
+ protected abstract void initGLImpl(GL gl) throws IOException, GLException;
/**
* Returns the validated number of textures to be handled.
* <p>
- * Default is 2 textures w/o threading, last texture and the decoding texture.
- * </p>
- * <p>
- * &gt; 2 textures is used for threaded decoding, a minimum of 4 textures seems reasonable in this case.
+ * Default is {@link #TEXTURE_COUNT_MIN} textures, last texture and the decoding texture.
* </p>
*/
protected int validateTextureCount(int desiredTextureCount) {
- return TEXTURE_COUNT_DEFAULT;
+ return desiredTextureCount < TEXTURE_COUNT_MIN ? TEXTURE_COUNT_MIN : desiredTextureCount;
}
private final TextureFrame[] createTexFrames(GL gl, final int count) {
@@ -512,42 +563,43 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
@Override
public final TextureFrame getLastTexture() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
+ if( State.Paused != state && State.Playing != state ) {
+ throw new IllegalStateException("Instance not paused or playing: "+this);
}
return lastFrame;
}
private final void removeAllTextureFrames(GL gl) {
- if( null != videoFramesFree ) {
- final TextureFrame[] texFrames = videoFramesFree.getArray();
+ if( null != videoFramesOrig ) {
+ final TextureFrame[] texFrames = videoFramesOrig;
+ videoFramesOrig = null;
videoFramesFree = null;
videoFramesDecoded = null;
lastFrame = null;
for(int i=0; i<texFrames.length; i++) {
final TextureFrame frame = texFrames[i];
if(null != frame) {
- destroyTexFrame(gl, frame);
+ if( null != gl ) {
+ destroyTexFrame(gl, frame);
+ }
texFrames[i] = null;
}
System.err.println(Thread.currentThread().getName()+"> Clear TexFrame["+i+"]: "+frame+" -> null");
}
}
- textureCount=0;
}
protected TextureFrame cachedFrame = null;
protected long lastTimeMillis = 0;
@Override
- public final TextureFrame getNextTexture(GL gl, boolean blocking) throws IllegalStateException {
+ public final TextureFrame getNextTexture(GL gl) throws IllegalStateException {
synchronized( stateLock ) {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
+ if( State.Paused != state && State.Playing != state ) {
+ throw new IllegalStateException("Instance not paused or playing: "+this);
}
if(State.Playing == state) {
TextureFrame nextFrame = null;
- boolean ok = true;
boolean dropFrame = false;
try {
do {
@@ -561,23 +613,16 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
nextFrame = cachedFrame;
cachedFrame = null;
presentedFrameCount--;
- currentTimeMillis = Platform.currentTimeMillis();
- } else if( TEXTURE_COUNT_DEFAULT < textureCount ) {
- nextFrame = videoFramesDecoded.getBlocking(false /* clearRef */ );
- currentTimeMillis = Platform.currentTimeMillis();
- } else {
- nextFrame = videoFramesFree.getBlocking(false /* clearRef */ );
- nextFrame.setPTS( TextureFrame.INVALID_PTS ); // mark invalid until processed!
- ok = getNextTextureImpl(gl, nextFrame, blocking, true /* issuePreAndPost */);
- currentTimeMillis = Platform.currentTimeMillis();
- if( ok ) {
- newFrameAvailable(nextFrame, currentTimeMillis);
- }
+ } else if( STREAM_ID_NONE != vid ) {
+ nextFrame = videoFramesDecoded.getBlocking();
}
- if( ok ) {
+ currentTimeMillis = Platform.currentTimeMillis();
+ if( null != nextFrame ) {
presentedFrameCount++;
final int video_pts = nextFrame.getPTS();
- if( video_pts != TextureFrame.INVALID_PTS ) {
+ if( video_pts == TextureFrame.END_OF_STREAM_PTS ) {
+ pauseImpl(GLMediaEventListener.EVENT_CHANGE_EOS);
+ } else if( video_pts != TextureFrame.INVALID_PTS ) {
final int audio_pts = getAudioPTSImpl();
final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed );
final int d_apts;
@@ -613,6 +658,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
final int dt = (int) ( video_dpts_avg_diff / playSpeed + 0.5f );
// final int dt = (int) ( d_vpts / playSpeed + 0.5f );
// final int dt = (int) ( d_avpts / playSpeed + 0.5f );
+ final TextureFrame _nextFrame = nextFrame;
if( dt > maxVideoDelay ) {
cachedFrame = nextFrame;
nextFrame = null;
@@ -625,7 +671,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
getPerfStringImpl( video_scr, video_pts, d_vpts,
audio_scr, audio_pts, d_apts,
video_dpts_avg_diff ) +
- ", avg dpy-fps "+avg_dpy_duration+" ms/f, maxD "+maxVideoDelay+" ms, "+nextFrame+", playCached " + playCached + ", dropFrame "+dropFrame);
+ ", avg dpy-fps "+avg_dpy_duration+" ms/f, maxD "+maxVideoDelay+" ms, "+_nextFrame+", playCached " + playCached + ", dropFrame "+dropFrame);
}
}
} else if( DEBUG ) {
@@ -640,14 +686,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
lastTimeMillis = currentTimeMillis;
} while( dropFrame );
} catch (InterruptedException e) {
- ok = false;
e.printStackTrace();
- } finally {
- if( !ok && null != nextFrame ) { // put back
- if( !videoFramesFree.put(nextFrame) ) {
- throw new InternalError("XXX: free "+videoFramesFree+", decoded "+videoFramesDecoded+", "+GLMediaPlayerImpl.this);
- }
- }
}
}
displayedFrameCount++;
@@ -656,8 +695,13 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
protected void preNextTextureImpl(GL gl) {}
protected void postNextTextureImpl(GL gl) {}
- protected abstract boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost);
- protected boolean syncAVRequired() { return false; }
+ /**
+ * Implementation shall care of OpenGL synchronization as required, e.g. glFinish()/glFlush()!
+ * @param gl
+ * @param nextFrame
+ * @return
+ */
+ protected abstract boolean getNextTextureImpl(GL gl, TextureFrame nextFrame);
/**
* {@inheritDoc}
@@ -667,7 +711,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
* </p>
* <p>
* Implementations using an {@link AudioSink} shall write it's instance to {@link #audioSink}
- * from within their {@link #initGLStreamImpl(GL, int, int)} implementation.
+ * from within their {@link #initStreamImpl(int, int)} implementation.
* </p>
*/
@Override
@@ -686,12 +730,12 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
private void flushAllVideoFrames() {
if( null != videoFramesFree ) {
- videoFramesFree.reset(true);
+ videoFramesFree.resetFull(videoFramesOrig);
}
if( null != videoFramesDecoded ) {
- videoFramesDecoded.reset(false);
+ videoFramesDecoded.clear();
}
- lastFrame = videoFramesFree.get(false /* clearRef */ );
+ lastFrame = videoFramesFree.get( );
if( null == lastFrame ) { throw new InternalError("XXX"); }
cachedFrame = null;
}
@@ -727,7 +771,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
- class FramePusher extends Thread {
+ class StreamWorker extends Thread {
private volatile boolean isRunning = false;
private volatile boolean isActive = false;
private volatile boolean isBlocked = false;
@@ -735,28 +779,21 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
private volatile boolean shallPause = true;
private volatile boolean shallStop = false;
- private final GL gl;
+ private volatile StreamException streamErr = null;
+ private volatile GLContext sharedGLCtx = null;
+ private boolean sharedGLCtxCurrent = false;
private GLDrawable dummyDrawable = null;
- private GLContext sharedGLCtx = null;
- FramePusher(GL gl) {
+ /**
+ * Starts this daemon thread,
+ * which initializes the stream first via {@link GLMediaPlayerImpl#initStreamImpl(int, int)} first.
+ * <p>
+ * After stream initialization, this thread pauses!
+ * </p>
+ **/
+ StreamWorker() {
setDaemon(true);
-
- final GLContext glCtx = gl.getContext();
- final boolean glCtxCurrent = glCtx.isCurrent();
- final GLProfile glp = gl.getGLProfile();
- final GLDrawableFactory factory = GLDrawableFactory.getFactory(glp);
- final AbstractGraphicsDevice device = glCtx.getGLDrawable().getNativeSurface().getGraphicsConfiguration().getScreen().getDevice();
- dummyDrawable = factory.createDummyDrawable(device, true, glp); // own device!
- dummyDrawable.setRealized(true);
- sharedGLCtx = dummyDrawable.createContext(glCtx);
- makeCurrent(sharedGLCtx);
- if( glCtxCurrent ) {
- makeCurrent(glCtx);
- } else {
- sharedGLCtx.release();
- }
- this.gl = sharedGLCtx.getGL();
+ start();
}
private void makeCurrent(GLContext ctx) {
@@ -767,7 +804,6 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
private void destroySharedGL() {
if( null != sharedGLCtx ) {
- postNextTextureImpl(gl);
if( sharedGLCtx.isCreated() ) {
// Catch dispose GLExceptions by GLEventListener, just 'print' them
// so we can continue with the destruction.
@@ -787,6 +823,22 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
+ public synchronized void initGL(GL gl) {
+ final GLContext glCtx = gl.getContext();
+ final boolean glCtxCurrent = glCtx.isCurrent();
+ final GLProfile glp = gl.getGLProfile();
+ final GLDrawableFactory factory = GLDrawableFactory.getFactory(glp);
+ final AbstractGraphicsDevice device = glCtx.getGLDrawable().getNativeSurface().getGraphicsConfiguration().getScreen().getDevice();
+ dummyDrawable = factory.createDummyDrawable(device, true, glp); // own device!
+ dummyDrawable.setRealized(true);
+ sharedGLCtx = dummyDrawable.createContext(glCtx);
+ makeCurrent(sharedGLCtx);
+ if( glCtxCurrent ) {
+ makeCurrent(glCtx);
+ } else {
+ sharedGLCtx.release();
+ }
+ }
public synchronized void doPause() {
if( isActive ) {
shallPause = true;
@@ -815,16 +867,6 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
}
- public synchronized void doStart() {
- start();
- while( !isRunning ) {
- try {
- this.wait(); // wait until started
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- }
- }
public synchronized void doStop() {
if( isRunning ) {
shallStop = true;
@@ -843,28 +885,38 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
public boolean isRunning() { return isRunning; }
public boolean isActive() { return isActive; }
+ public StreamException getStreamErr() { return streamErr; }
public void run() {
- setName(getName()+"-FramePusher_"+FramePusherInstanceId);
- FramePusherInstanceId++;
+ setName(getName()+"-StreamWorker_"+StreamWorkerInstanceId);
+ StreamWorkerInstanceId++;
synchronized ( this ) {
- makeCurrent( sharedGLCtx );
- preNextTextureImpl(gl);
isRunning = true;
- this.notify(); // wake-up doStart()
+ try {
+ isBlocked = true;
+ initStreamImpl(vid, aid);
+ isBlocked = false;
+ } catch (Throwable t) {
+ streamErr = new StreamException(t.getClass().getSimpleName()+" while initializing: "+GLMediaPlayerImpl.this.toString(), t);
+ isBlocked = false;
+ isRunning = false;
+ changeState(GLMediaEventListener.EVENT_CHANGE_ERR, GLMediaPlayer.State.Uninitialized);
+ return; // end of thread!
+ } // also initializes width, height, .. etc
}
while( !shallStop ){
if( shallPause ) {
synchronized ( this ) {
- postNextTextureImpl(gl);
- sharedGLCtx.release();
+ if( sharedGLCtxCurrent ) {
+ postNextTextureImpl(sharedGLCtx.getGL());
+ sharedGLCtx.release();
+ }
while( shallPause && !shallStop ) {
isActive = false;
this.notify(); // wake-up doPause()
try {
- System.err.println("!!! PAUSE ON"); // FIXME
this.wait(); // wait until resumed
} catch (InterruptedException e) {
if( !shallPause ) {
@@ -872,70 +924,119 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
}
- makeCurrent(sharedGLCtx);
- preNextTextureImpl(gl);
- System.err.println("!!! PAUSE OFF"); // FIXME
+ if( sharedGLCtxCurrent ) {
+ makeCurrent(sharedGLCtx);
+ preNextTextureImpl(sharedGLCtx.getGL());
+ }
isActive = true;
this.notify(); // wake-up doResume()
}
}
+ if( !sharedGLCtxCurrent && null != sharedGLCtx ) {
+ synchronized ( this ) {
+ if( null != sharedGLCtx ) {
+ makeCurrent( sharedGLCtx );
+ preNextTextureImpl(sharedGLCtx.getGL());
+ sharedGLCtxCurrent = true;
+ }
+ }
+ }
if( !shallStop ) {
TextureFrame nextFrame = null;
- try {
- isBlocked = true;
- nextFrame = videoFramesFree.getBlocking(false /* clearRef */ );
- isBlocked = false;
- nextFrame.setPTS( TextureFrame.INVALID_PTS ); // mark invalid until processed!
- if( getNextTextureImpl(gl, nextFrame, true, false /* issuePreAndPost */) ) {
- // gl.glFinish();
- gl.glFlush(); // even better: sync object!
- if( !videoFramesDecoded.put(nextFrame) ) {
- throw new InternalError("XXX: free "+videoFramesFree+", decoded "+videoFramesDecoded+", "+GLMediaPlayerImpl.this);
+ if( null != sharedGLCtx ) {
+ try {
+ isBlocked = true;
+ nextFrame = videoFramesFree.getBlocking( );
+ isBlocked = false;
+ nextFrame.setPTS( TextureFrame.INVALID_PTS ); // mark invalid until processed!
+ final GL gl = sharedGLCtxCurrent ? sharedGLCtx.getGL() : null;
+ if( getNextTextureImpl(gl, nextFrame) ) {
+ if( !videoFramesDecoded.put(nextFrame) ) {
+ throw new InternalError("XXX: free "+videoFramesFree+", decoded "+videoFramesDecoded+", "+GLMediaPlayerImpl.this);
+ }
+ newFrameAvailable(nextFrame, Platform.currentTimeMillis());
+ nextFrame = null;
+ }
+ } catch (InterruptedException e) {
+ isBlocked = false;
+ if( !shallStop && !shallPause ) {
+ streamErr = new StreamException("InterruptedException while decoding: "+GLMediaPlayerImpl.this.toString(), e);
+ }
+ } catch (Throwable t) {
+ streamErr = new StreamException(t.getClass().getSimpleName()+" while decoding: "+GLMediaPlayerImpl.this.toString(), t);
+ } finally {
+ if( null != nextFrame ) { // put back
+ videoFramesFree.put(nextFrame);
+ }
+ if( null != streamErr ) {
+ if( DEBUG ) {
+ final Throwable t = null != streamErr.getCause() ? streamErr.getCause() : streamErr;
+ System.err.println("Caught StreamException: "+t.getMessage());
+ t.printStackTrace();
+ }
+ // state transition incl. notification
+ shallPause = true;
+ isActive = false;
+ pause();
}
- newFrameAvailable(nextFrame, Platform.currentTimeMillis());
- nextFrame = null;
- }
- } catch (InterruptedException e) {
- isBlocked = false;
- if( !shallStop && !shallPause ) {
- e.printStackTrace(); // oops
- shallPause = false;
- shallStop = true;
- }
- } finally {
- if( null != nextFrame ) { // put back
- videoFramesFree.put(nextFrame);
}
+ } else {
+ // audio only
+ getNextTextureImpl(null, null);
}
}
}
- postNextTextureImpl(gl);
- destroySharedGL();
synchronized ( this ) {
+ if( sharedGLCtxCurrent ) {
+ postNextTextureImpl(sharedGLCtx.getGL());
+ }
+ destroySharedGL();
isRunning = false;
isActive = false;
this.notify(); // wake-up doStop()
}
}
}
- static int FramePusherInstanceId = 0;
- private FramePusher framePusher = null;
-
- private final void pauseFramePusher() {
- if( null != framePusher ) {
- framePusher.doPause();
+ static int StreamWorkerInstanceId = 0;
+ private StreamWorker streamWorker = null;
+
+ protected final int addStateEventMask(int event_mask, State newState) {
+ if( state != newState ) {
+ switch( newState ) {
+ case Uninitialized:
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_UNINIT;
+ break;
+ case Initialized:
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_INIT;
+ break;
+ case Playing:
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_PLAY;
+ break;
+ case Paused:
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_PAUSE;
+ break;
+ }
}
+ return event_mask;
}
- private final void resumeFramePusher() {
- if( null != framePusher ) {
- framePusher.doResume();
+
+ protected final void attributesUpdated(int event_mask) {
+ if( 0 != event_mask ) {
+ final long now = Platform.currentTimeMillis();
+ synchronized(eventListenersLock) {
+ for(Iterator<GLMediaEventListener> i = eventListeners.iterator(); i.hasNext(); ) {
+ i.next().attributesChanged(this, event_mask, now);
+ }
+ }
}
}
- private final void destroyFramePusher() {
- if( null != framePusher ) {
- framePusher.doStop();
- framePusher = null;
+
+ protected final void changeState(int event_mask, State newState) {
+ event_mask = addStateEventMask(event_mask, newState);
+ if( 0 != event_mask ) {
+ state = newState;
+ attributesUpdated( event_mask );
}
}
@@ -943,10 +1044,20 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
int bps_video, int bps_audio, float fps,
int videoFrames, int audioFrames, int duration, String vcodec, String acodec) {
int event_mask = 0;
+ if( state == State.Uninitialized ) {
+ event_mask |= GLMediaEventListener.EVENT_CHANGE_INIT;
+ state = State.Initialized;
+ }
+ if( STREAM_ID_AUTO == vid ) {
+ vid = STREAM_ID_NONE;
+ }
if( this.vid != vid ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_VID;
this.vid = vid;
}
+ if( STREAM_ID_AUTO == vid ) {
+ vid = STREAM_ID_NONE;
+ }
if( this.aid != aid ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_AID;
this.aid = aid;
@@ -984,30 +1095,24 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
if(0==event_mask) {
return;
}
- attributesUpdated(event_mask);
+ attributesUpdated(event_mask);
}
- protected final void attributesUpdated(int event_mask) {
- synchronized(eventListenersLock) {
- for(Iterator<GLMediaEventListener> i = eventListeners.iterator(); i.hasNext(); ) {
- i.next().attributesChanges(this, event_mask, Platform.currentTimeMillis());
- }
- }
- }
-
@Override
public final State destroy(GL gl) {
synchronized( stateLock ) {
- destroyFramePusher();
+ streamWorker.doStop();
+ streamWorker = null;
destroyImpl(gl);
removeAllTextureFrames(gl);
+ textureCount=0;
if( null != videoFramesFree ) {
videoFramesFree.clear();
}
if( null != videoFramesDecoded ) {
videoFramesDecoded.clear();
}
- state = State.Uninitialized;
+ changeState(0, State.Uninitialized);
return state;
}
}
@@ -1039,6 +1144,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
return videoFrames;
}
+ @Override
public final int getAudioFrames() {
return audioFrames;
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
index ad8587e6b..31ac55ec3 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
@@ -32,6 +32,7 @@ import java.net.URLConnection;
import java.nio.ByteBuffer;
import javax.media.opengl.GL;
+import javax.media.opengl.GLException;
import javax.media.opengl.GLProfile;
import jogamp.opengl.util.av.GLMediaPlayerImpl;
@@ -82,7 +83,7 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
}
@Override
- protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost) {
+ protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame) {
nextFrame.setPTS( getAudioPTSImpl() );
return true;
}
@@ -103,7 +104,7 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
}
@Override
- protected final void initGLStreamImpl(GL gl, int vid, int aid) throws IOException {
+ protected final void initStreamImpl(int vid, int aid) throws IOException {
try {
URLConnection urlConn = IOUtil.getResource("jogl/util/data/av/test-ntsc01-160x90.png", this.getClass().getClassLoader());
if(null != urlConn) {
@@ -137,6 +138,10 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
_w, _h, 0,
0, 0, _fps,
_totalFrames, 0, _duration, "png-static", null);
+ }
+ @Override
+ protected final void initGLImpl(GL gl) throws IOException, GLException {
+ // NOP
}
@Override
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
index cf864daa2..8865d47be 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
@@ -59,7 +59,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
private static final List<String> glueLibNames = new ArrayList<String>(); // none
- private static final int symbolCount = 38;
+ private static final int symbolCount = 42;
private static final String[] symbolNames = {
"avcodec_version",
"avformat_version",
@@ -71,22 +71,26 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"avcodec_find_decoder",
"avcodec_open2", // 53.6.0 (opt)
"avcodec_open",
- "avcodec_alloc_frame",
+ "avcodec_alloc_frame",
+ "avcodec_get_frame_defaults",
+ "avcodec_free_frame", // 54.28.0 (opt)
"avcodec_default_get_buffer",
"avcodec_default_release_buffer",
+ "avcodec_flush_buffers",
"av_init_packet",
"av_new_packet",
"av_destruct_packet",
"av_free_packet",
"avcodec_decode_audio4", // 53.25.0 (opt)
"avcodec_decode_audio3", // 52.23.0
-/* 18 */ "avcodec_decode_video2", // 52.23.0
+/* 21 */ "avcodec_decode_video2", // 52.23.0
// libavutil
- "av_pix_fmt_descriptors",
- "av_free",
+ "av_pix_fmt_descriptors",
+ "av_frame_unref", // 55.0.0 (opt)
+ "av_free",
"av_get_bits_per_pixel",
-/* 22 */ "av_samples_get_buffer_size",
+/* 26 */ "av_samples_get_buffer_size",
// libavformat
"avformat_alloc_context",
@@ -98,13 +102,13 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"av_dump_format",
"av_read_frame",
"av_seek_frame",
- "avformat_seek_file",
+ "avformat_seek_file", // ??? (opt)
"av_read_play",
"av_read_pause",
"avformat_network_init", // 53.13.0 (opt)
"avformat_network_deinit", // 53.13.0 (opt)
"avformat_find_stream_info", // 53.3.0 (opt)
-/* 38 */ "av_find_stream_info",
+/* 42 */ "av_find_stream_info",
};
// alternate symbol names
@@ -120,6 +124,9 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"avformat_free_context", // 52.96.0 (opt)
"avformat_network_init", // 53.13.0 (opt)
"avformat_network_deinit", // 53.13.0 (opt)
+ "avformat_seek_file", // ??? (opt)
+ "avcodec_free_frame", // 54.28.0 (opt)
+ "av_frame_unref", // 55.0.0 (opt)
};
private static long[] symbolAddr;
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
index 9838181ab..11073b2e3 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
@@ -108,14 +108,14 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
/** POSIX ENOSYS {@value}: Function not implemented. FIXME: Move to GlueGen ?!*/
private static final int ENOSYS = 38;
- /** Count of zeroed buffers to return before switching to real sample provider */
- private static final int TEMP_BUFFER_COUNT = 20;
+ /** Default number of audio frames per video frame */
+ private static final int AV_DEFAULT_AFRAMES = 8;
// Instance data
- public static final VersionNumber avUtilVersion;
- public static final VersionNumber avFormatVersion;
- public static final VersionNumber avCodecVersion;
- static final boolean available;
+ private static final VersionNumber avUtilVersion;
+ private static final VersionNumber avFormatVersion;
+ private static final VersionNumber avCodecVersion;
+ private static final boolean available;
static {
final boolean libAVGood = FFMPEGDynamicLibraryBundleInfo.initSingleton();
@@ -146,31 +146,33 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
// General
//
- protected long moviePtr = 0;
+ private long moviePtr = 0;
//
// Video
//
- protected PixelFormat vPixelFmt = null;
- protected int vPlanes = 0;
- protected int vBitsPerPixel = 0;
- protected int vBytesPerPixelPerPlane = 0;
- protected int[] vLinesize = { 0, 0, 0 }; // per plane
- protected int[] vTexWidth = { 0, 0, 0 }; // per plane
- protected int texWidth, texHeight; // overall (stuffing planes in one texture)
- protected String singleTexComp = "r";
- protected GLPixelStorageModes psm;
+ private PixelFormat vPixelFmt = null;
+ private int vPlanes = 0;
+ private int vBitsPerPixel = 0;
+ private int vBytesPerPixelPerPlane = 0;
+ private int[] vLinesize = { 0, 0, 0 }; // per plane
+ private int[] vTexWidth = { 0, 0, 0 }; // per plane
+ private int texWidth, texHeight; // overall (stuffing planes in one texture)
+ private String singleTexComp = "r";
+ private GLPixelStorageModes psm;
//
// Audio
//
- protected static final int AFRAMES_PER_VFRAME = 8;
- protected int aFrameCount = 0;
- protected SampleFormat aSampleFmt = null;
- protected AudioSink.AudioDataFormat avChosenAudioFormat;
- protected AudioSink.AudioDataFormat sinkChosenAudioFormat;
+ /** Initial audio frame count, ALAudioSink may grow buffer! */
+ private int initialAudioFrameCount = AV_DEFAULT_AFRAMES;
+ private final int audioFrameGrowAmount = 8;
+ private final int audioFrameLimit = 128;
+ private SampleFormat aSampleFmt = null;
+ private AudioSink.AudioDataFormat avChosenAudioFormat;
+ private AudioSink.AudioDataFormat sinkChosenAudioFormat;
public FFMPEGMediaPlayer() {
if(!available) {
@@ -183,10 +185,6 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
psm = new GLPixelStorageModes();
audioSink = null;
}
- @Override
- protected final int validateTextureCount(int desiredTextureCount) {
- return desiredTextureCount>2 ? Math.max(4, desiredTextureCount) : 2;
- }
@Override
protected final void destroyImpl(GL gl) {
@@ -205,101 +203,108 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
}
@Override
- protected final void initGLStreamImpl(GL gl, int vid, int aid) throws IOException {
+ protected final void initStreamImpl(int vid, int aid) throws IOException {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
- {
- final GLContextImpl ctx = (GLContextImpl)gl.getContext();
- final ProcAddressTable pt = ctx.getGLProcAddressTable();
- final long procAddrGLTexSubImage2D = getAddressFor(pt, "glTexSubImage2D");
- if( 0 == procAddrGLTexSubImage2D ) {
- throw new InternalError("glTexSubImage2D n/a in ProcAddressTable: "+pt.getClass().getName()+" of "+ctx.getGLVersion());
- }
- final long procAddrGLGetError = getAddressFor(pt, "glGetError");
- if( 0 == procAddrGLGetError ) {
- throw new InternalError("glGetError n/a in ProcAddressTable: "+pt.getClass().getName()+" of "+ctx.getGLVersion());
- }
- setGLFuncs0(moviePtr, procAddrGLTexSubImage2D, procAddrGLGetError);
- }
-
- final String streamLocS=streamLoc.toString();
-
- aFrameCount = AFRAMES_PER_VFRAME * textureCount + AFRAMES_PER_VFRAME/2;
-
if(DEBUG) {
- System.err.println("initGLStream: p1 "+this);
+ System.err.println("initStream: p1 "+this);
}
+
+ final String streamLocS=streamLoc.toString();
destroyAudioSink();
- AudioSink _audioSink;
if( GLMediaPlayer.STREAM_ID_NONE == aid ) {
- _audioSink = AudioSinkFactory.createNull();
+ audioSink = AudioSinkFactory.createNull();
} else {
- _audioSink = AudioSinkFactory.createDefault();
+ audioSink = AudioSinkFactory.createDefault();
}
- final AudioDataFormat preferredAudioFormat = _audioSink.getPreferredFormat();
- // setStream(..) issues updateAttributes*(..), and defines avChosenAudioFormat, vid, aid, .. etc
- setStream0(moviePtr, streamLocS, vid, aid, aFrameCount, preferredAudioFormat.channelCount, preferredAudioFormat.sampleRate);
- // final int audioBytesPerFrame = bps_audio/8000 * frame_period * textureCount;
-
+ final AudioDataFormat preferredAudioFormat = audioSink.getPreferredFormat();
if(DEBUG) {
- System.err.println("initGLStream: p2 preferred "+preferredAudioFormat+", avChosen "+avChosenAudioFormat+", "+this);
+ System.err.println("initStream: p2 preferred "+preferredAudioFormat+", "+this);
+ }
+ // setStream(..) issues updateAttributes*(..), and defines avChosenAudioFormat, vid, aid, .. etc
+ final int snoopVideoFrameCount = 0; // 10*textureCount
+ setStream0(moviePtr, streamLocS, vid, aid, snoopVideoFrameCount, preferredAudioFormat.channelCount, preferredAudioFormat.sampleRate);
+ }
+
+ @Override
+ protected final void initGLImpl(GL gl) throws IOException, GLException {
+ if(0==moviePtr) {
+ throw new GLException("FFMPEG native instance null");
+ }
+ if(null == audioSink) {
+ throw new GLException("AudioSink null");
}
- sinkChosenAudioFormat = _audioSink.initSink(avChosenAudioFormat, aFrameCount);
+ if( null != gl ) {
+ final GLContextImpl ctx = (GLContextImpl)gl.getContext();
+ AccessController.doPrivileged(new PrivilegedAction<Object>() {
+ public Object run() {
+ final ProcAddressTable pt = ctx.getGLProcAddressTable();
+ final long procAddrGLTexSubImage2D = pt.getAddressFor("glTexSubImage2D");
+ final long procAddrGLGetError = pt.getAddressFor("glGetError");
+ final long procAddrGLFlush = pt.getAddressFor("glFlush");
+ final long procAddrGLFinish = pt.getAddressFor("glFinish");
+ setGLFuncs0(moviePtr, procAddrGLTexSubImage2D, procAddrGLGetError, procAddrGLFlush, procAddrGLFinish);
+ return null;
+ } } );
+ }
+
+ sinkChosenAudioFormat = audioSink.initSink(avChosenAudioFormat, initialAudioFrameCount, audioFrameGrowAmount, audioFrameLimit);
if(DEBUG) {
- System.err.println("initGLStream: p3 avChosen "+avChosenAudioFormat+", chosen "+sinkChosenAudioFormat);
+ System.err.println("initGL: p3 avChosen "+avChosenAudioFormat+", chosen "+sinkChosenAudioFormat);
}
if( null == sinkChosenAudioFormat ) {
- System.err.println("AudioSink "+_audioSink.getClass().getName()+" does not support "+avChosenAudioFormat+", using Null");
- _audioSink.destroy();
- _audioSink = AudioSinkFactory.createNull();
- sinkChosenAudioFormat = _audioSink.initSink(avChosenAudioFormat, aFrameCount);
+ System.err.println("AudioSink "+audioSink.getClass().getName()+" does not support "+avChosenAudioFormat+", using Null");
+ audioSink.destroy();
+ audioSink = AudioSinkFactory.createNull();
+ sinkChosenAudioFormat = audioSink.initSink(avChosenAudioFormat, initialAudioFrameCount, audioFrameGrowAmount, audioFrameLimit);
}
- audioSink = _audioSink;
- int tf, tif=GL.GL_RGBA; // texture format and internal format
- switch(vBytesPerPixelPerPlane) {
- case 1:
- if( gl.isGL3ES3() ) {
- // RED is supported on ES3 and >= GL3 [core]; ALPHA is deprecated on core
- tf = GL2ES2.GL_RED; tif=GL2ES2.GL_RED; singleTexComp = "r";
- } else {
- // ALPHA is supported on ES2 and GL2, i.e. <= GL3 [core] or compatibility
- tf = GL2ES2.GL_ALPHA; tif=GL2ES2.GL_ALPHA; singleTexComp = "a";
- }
- break;
- case 3: tf = GL2ES2.GL_RGB; tif=GL.GL_RGB; break;
- case 4: tf = GL2ES2.GL_RGBA; tif=GL.GL_RGBA; break;
- default: throw new RuntimeException("Unsupported bytes-per-pixel / plane "+vBytesPerPixelPerPlane);
- }
- setTextureFormat(tif, tf);
- setTextureType(GL.GL_UNSIGNED_BYTE);
- }
+ if( null != gl ) {
+ int tf, tif=GL.GL_RGBA; // texture format and internal format
+ switch(vBytesPerPixelPerPlane) {
+ case 1:
+ if( gl.isGL3ES3() ) {
+ // RED is supported on ES3 and >= GL3 [core]; ALPHA is deprecated on core
+ tf = GL2ES2.GL_RED; tif=GL2ES2.GL_RED; singleTexComp = "r";
+ } else {
+ // ALPHA is supported on ES2 and GL2, i.e. <= GL3 [core] or compatibility
+ tf = GL2ES2.GL_ALPHA; tif=GL2ES2.GL_ALPHA; singleTexComp = "a";
+ }
+ break;
+ case 3: tf = GL2ES2.GL_RGB; tif=GL.GL_RGB; break;
+ case 4: tf = GL2ES2.GL_RGBA; tif=GL.GL_RGBA; break;
+ default: throw new RuntimeException("Unsupported bytes-per-pixel / plane "+vBytesPerPixelPerPlane);
+ }
+ setTextureFormat(tif, tf);
+ setTextureType(GL.GL_UNSIGNED_BYTE);
+ }
+ }
@Override
protected final TextureFrame createTexImage(GL gl, int texName) {
return new TextureFrame( createTexImageImpl(gl, texName, texWidth, texHeight, true) );
}
/**
- * Catches IllegalArgumentException and returns 0 if functionName is n/a,
- * otherwise the ProcAddressTable's field value.
+ * @param pixFmt
+ * @param planes
+ * @param bitsPerPixel
+ * @param bytesPerPixelPerPlane
+ * @param lSz0
+ * @param lSz1
+ * @param lSz2
+ * @param tWd0
+ * @param tWd1
+ * @param tWd2
+ * @param audioFrameCount snooped audio-frame-count per video-frame, maybe 0
+ * @param sampleFmt
+ * @param sampleRate
+ * @param channels
*/
- private final long getAddressFor(final ProcAddressTable table, final String functionName) {
- return AccessController.doPrivileged(new PrivilegedAction<Long>() {
- public Long run() {
- try {
- return Long.valueOf( table.getAddressFor(functionName) );
- } catch (IllegalArgumentException iae) {
- return Long.valueOf(0);
- }
- }
- } ).longValue();
- }
-
private void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
int lSz0, int lSz1, int lSz2,
int tWd0, int tWd1, int tWd2,
- int sampleFmt, int sampleRate, int channels) {
+ int audioFrameCount, int sampleFmt, int sampleRate, int channels) {
vPixelFmt = PixelFormat.valueOf(pixFmt);
vPlanes = planes;
vBitsPerPixel = bitsPerPixel;
@@ -329,7 +334,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
default: // FIXME: Add more formats !
throw new RuntimeException("Unsupported pixelformat: "+vPixelFmt);
}
-
+ initialAudioFrameCount = audioFrameCount > 0 ? audioFrameCount : AV_DEFAULT_AFRAMES * 2;
aSampleFmt = SampleFormat.valueOf(sampleFmt);
final int sampleSize;
final boolean signed, fixedP;
@@ -370,7 +375,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
avChosenAudioFormat = new AudioDataFormat(AudioDataType.PCM, sampleRate, sampleSize, channels, signed, fixedP, true /* littleEndian */);
if(DEBUG) {
- System.err.println("audio: fmt "+aSampleFmt+", "+avChosenAudioFormat);
+ System.err.println("audio: fmt "+aSampleFmt+", "+avChosenAudioFormat+", aFrameCount "+audioFrameCount+" -> "+initialAudioFrameCount);
System.err.println("video: fmt "+vPixelFmt+", planes "+vPlanes+", bpp "+vBitsPerPixel+"/"+vBytesPerPixelPerPlane);
for(int i=0; i<3; i++) {
System.err.println("video: "+i+": "+vTexWidth[i]+"/"+vLinesize[i]);
@@ -473,6 +478,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
@Override
protected void preNextTextureImpl(GL gl) {
psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1
+ gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
}
@Override
@@ -481,31 +487,25 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
}
@Override
- protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost) {
+ protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame) {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
- if( issuePreAndPost ) {
- preNextTextureImpl(gl);
- }
int vPTS = TextureFrame.INVALID_PTS;
- try {
+ if( null != nextFrame ) {
final Texture tex = nextFrame.getTexture();
- gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
tex.enable(gl);
tex.bind(gl);
+ }
- /** Try decode up to 10 packets to find one containing video. */
- for(int i=0; TextureFrame.INVALID_PTS == vPTS && 10 > i; i++) {
- vPTS = readNextPacket0(moviePtr, textureTarget, textureFormat, textureType);
- }
- } finally {
- if( issuePreAndPost ) {
- postNextTextureImpl(gl);
- }
+ /** Try decode up to 10 packets to find one containing video. */
+ for(int i=0; TextureFrame.INVALID_PTS == vPTS && 10 > i; i++) {
+ vPTS = readNextPacket0(moviePtr, textureTarget, textureFormat, textureType);
}
if( TextureFrame.INVALID_PTS != vPTS ) {
- nextFrame.setPTS(vPTS);
+ if( null != nextFrame ) {
+ nextFrame.setPTS(vPTS);
+ }
return true;
} else {
return false;
@@ -524,9 +524,6 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
return time * ( sinkChosenAudioFormat.channelCount * bytesPerSample * ( sinkChosenAudioFormat.sampleRate / 1000 ) );
}
- @Override
- protected final boolean syncAVRequired() { return true; }
-
private static native int getAvUtilVersion0();
private static native int getAvFormatVersion0();
private static native int getAvCodecVersion0();
@@ -543,10 +540,18 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
* [type PCM, sampleRate [10000(?)..44100..48000], sampleSize 16, channelCount 1-2, signed, littleEndian]
* </pre>
* </p>
+ *
+ * @param moviePtr
+ * @param url
+ * @param vid
+ * @param aid
+ * @param snoopVideoFrameCount snoop this number of video-frames to gather audio-frame-count per video-frame.
+ * If zero, gathering audio-frame-count is disabled!
+ * @param aChannelCount
+ * @param aSampleRate
*/
- private native void setStream0(long moviePtr, String url, int vid, int aid, int audioFrameCount,
- int aChannelCount, int aSampleRate);
- private native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError);
+ private native void setStream0(long moviePtr, String url, int vid, int aid, int snoopVideoFrameCount, int aChannelCount, int aSampleRate);
+ private native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
private native int getVideoPTS0(long moviePtr);
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
index d0517fc5f..e1b773e9b 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
@@ -96,7 +96,7 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected void initGLStreamImpl(GL gl, int vid, int aid) throws IOException {
+ protected void initStreamImpl(int vid, int aid) throws IOException {
if(0==moviePtr) {
throw new GLException("OMX native instance null");
}
@@ -116,6 +116,10 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
System.out.println("initGLStream: p2 "+this);
}
}
+ @Override
+ protected final void initGLImpl(GL gl) throws IOException, GLException {
+ // NOP
+ }
@Override
protected int getAudioPTSImpl() {
@@ -160,11 +164,11 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking, boolean issuePreAndPost) {
+ protected boolean getNextTextureImpl(GL gl, TextureFrame nextFrame) {
if(0==moviePtr) {
throw new GLException("OMX native instance null");
}
- final int nextTex = _getNextTextureID(moviePtr, blocking);
+ final int nextTex = _getNextTextureID(moviePtr, true);
if(0 < nextTex) {
/* FIXME
final TextureSequence.TextureFrame eglImgTex =
diff --git a/src/jogl/native/libav/ffmpeg_tool.h b/src/jogl/native/libav/ffmpeg_tool.h
index 06c3862db..76de406ae 100644
--- a/src/jogl/native/libav/ffmpeg_tool.h
+++ b/src/jogl/native/libav/ffmpeg_tool.h
@@ -52,6 +52,8 @@
typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const GLvoid *pixels);
typedef GLenum (APIENTRYP PFNGLGETERRORPROC) (void);
+typedef void (APIENTRYP PFNGLFLUSH) (void);
+typedef void (APIENTRYP PFNGLFINISH) (void);
/**
* AV_TIME_BASE 1000000
@@ -68,11 +70,21 @@ typedef GLenum (APIENTRYP PFNGLGETERRORPROC) (void);
/** Sync w/ GLMediaPlayer.STREAM_ID_AUTO */
#define AV_STREAM_ID_AUTO -1
-/** Constant marking an invalid PTS, i.e. Integer.MIN_VALUE 0x80000000 {@value}. Sync w/ TextureFrame.INVALID_PTS */
+/** Default number of audio frames per video frame. Sync w/ FFMPEGMediaPlayer.AV_DEFAULT_AFRAMES. */
+#define AV_DEFAULT_AFRAMES 8
+
+/** Constant PTS marking an invalid PTS, i.e. Integer.MIN_VALUE == 0x80000000 == {@value}. Sync w/ TextureFrame.INVALID_PTS */
#define INVALID_PTS 0x80000000
+/** Constant PTS marking the end of the stream, i.e. Integer.MIN_VALUE - 1 == 0x7FFFFFFF == {@value}. Sync w/ TextureFrame.END_OF_STREAM_PTS */
+#define END_OF_STREAM_PTS 0x7FFFFFFF
+
+/** Until 55.0.0 */
#define AV_HAS_API_REQUEST_CHANNELS(pAV) (AV_VERSION_MAJOR(pAV->avcodecVersion) < 55)
+/** Since 55.0.0 */
+#define AV_HAS_API_REFCOUNTED_FRAMES(pAV) (AV_VERSION_MAJOR(pAV->avcodecVersion) >= 55)
+
static inline float my_av_q2f(AVRational a){
return a.num / (float) a.den;
}
@@ -81,14 +93,32 @@ static inline int32_t my_av_q2i32(int64_t snum, AVRational a){
}
typedef struct {
+ void *origPtr;
+ jobject nioRef;
+ int32_t size;
+} NIOBuffer_t;
+
+typedef struct {
+ int64_t ptsError; // Number of backward PTS values (earlier than last PTS, excluding AV_NOPTS_VALUE)
+ int64_t dtsError; // Number of backward DTS values (earlier than last PTS, excluding AV_NOPTS_VALUE)
+ int64_t ptsLast; // PTS of the last frame
+ int64_t dtsLast; // DTS of the last frame
+} PTSStats;
+
+
+typedef struct {
int32_t verbose;
uint32_t avcodecVersion;
uint32_t avformatVersion;
uint32_t avutilVersion;
+ int32_t useRefCountedFrames;
+
PFNGLTEXSUBIMAGE2DPROC procAddrGLTexSubImage2D;
PFNGLGETERRORPROC procAddrGLGetError;
+ PFNGLFLUSH procAddrGLFlush;
+ PFNGLFINISH procAddrGLFinish;
AVFormatContext* pFormatCtx;
int32_t vid;
@@ -101,6 +131,7 @@ typedef struct {
uint32_t vBytesPerPixelPerPlane;
enum PixelFormat vPixFmt; // native decoder fmt
int32_t vPTS; // msec - overall last video PTS
+ PTSStats vPTSStats;
int32_t vLinesize[3]; // decoded video linesize in bytes for each plane
int32_t vTexWidth[3]; // decoded video tex width in bytes for each plane
@@ -110,6 +141,7 @@ typedef struct {
AVCodecContext* pACodecCtx;
AVCodec* pACodec;
AVFrame** pAFrames;
+ NIOBuffer_t* pANIOBuffers;
int32_t aFrameCount;
int32_t aFrameCurrent;
int32_t aSampleRate;
@@ -117,6 +149,8 @@ typedef struct {
int32_t aFrameSize;
enum AVSampleFormat aSampleFmt; // native decoder fmt
int32_t aPTS; // msec - overall last audio PTS
+ PTSStats aPTSStats;
+ int32_t aFramesPerVideoFrame; // is 'snooped'
float fps; // frames per seconds
int32_t bps_stream; // bits per seconds
diff --git a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c
index 99b385e3c..99ef02da5 100644
--- a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c
+++ b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c
@@ -60,8 +60,11 @@ typedef AVCodec *(APIENTRYP AVCODEC_FIND_DECODER)(enum CodecID id);
typedef int (APIENTRYP AVCODEC_OPEN2)(AVCodecContext *avctx, AVCodec *codec, AVDictionary **options); // 53.6.0
typedef int (APIENTRYP AVCODEC_OPEN)(AVCodecContext *avctx, AVCodec *codec);
typedef AVFrame *(APIENTRYP AVCODEC_ALLOC_FRAME)(void);
+typedef void (APIENTRYP AVCODEC_GET_FRAME_DEFAULTS)(AVFrame *frame);
+typedef void (APIENTRYP AVCODEC_FREE_FRAME)(AVFrame **frame);
typedef int (APIENTRYP AVCODEC_DEFAULT_GET_BUFFER)(AVCodecContext *s, AVFrame *pic);
typedef void (APIENTRYP AVCODEC_DEFAULT_RELEASE_BUFFER)(AVCodecContext *s, AVFrame *pic);
+typedef void (APIENTRYP AVCODEC_FLUSH_BUFFERS)(AVCodecContext *avctx);
typedef void (APIENTRYP AV_INIT_PACKET)(AVPacket *pkt);
typedef int (APIENTRYP AV_NEW_PACKET)(AVPacket *pkt, int size);
typedef void (APIENTRYP AV_DESTRUCT_PACKET)(AVPacket *pkt);
@@ -76,8 +79,11 @@ static AVCODEC_FIND_DECODER sp_avcodec_find_decoder;
static AVCODEC_OPEN2 sp_avcodec_open2; // 53.6.0
static AVCODEC_OPEN sp_avcodec_open;
static AVCODEC_ALLOC_FRAME sp_avcodec_alloc_frame;
+static AVCODEC_GET_FRAME_DEFAULTS sp_avcodec_get_frame_defaults;
+static AVCODEC_FREE_FRAME sp_avcodec_free_frame;
static AVCODEC_DEFAULT_GET_BUFFER sp_avcodec_default_get_buffer;
static AVCODEC_DEFAULT_RELEASE_BUFFER sp_avcodec_default_release_buffer;
+static AVCODEC_FLUSH_BUFFERS sp_avcodec_flush_buffers;
static AV_INIT_PACKET sp_av_init_packet;
static AV_NEW_PACKET sp_av_new_packet;
static AV_DESTRUCT_PACKET sp_av_destruct_packet;
@@ -85,17 +91,19 @@ static AV_FREE_PACKET sp_av_free_packet;
static AVCODEC_DECODE_AUDIO4 sp_avcodec_decode_audio4; // 53.25.0
static AVCODEC_DECODE_AUDIO3 sp_avcodec_decode_audio3; // 52.23.0
static AVCODEC_DECODE_VIDEO2 sp_avcodec_decode_video2; // 52.23.0
-// count: 15
+// count: 21
// libavutil
+typedef void (APIENTRYP AV_FRAME_UNREF)(AVFrame *frame);
typedef void (APIENTRYP AV_FREE)(void *ptr);
typedef int (APIENTRYP AV_GET_BITS_PER_PIXEL)(const AVPixFmtDescriptor *pixdesc);
typedef int (APIENTRYP AV_SAMPLES_GET_BUFFER_SIZE)(int *linesize, int nb_channels, int nb_samples, enum AVSampleFormat sample_fmt, int align);
static const AVPixFmtDescriptor* sp_av_pix_fmt_descriptors;
+static AV_FRAME_UNREF sp_av_frame_unref;
static AV_FREE sp_av_free;
static AV_GET_BITS_PER_PIXEL sp_av_get_bits_per_pixel;
static AV_SAMPLES_GET_BUFFER_SIZE sp_av_samples_get_buffer_size;
-// count: 22
+// count: 26
// libavformat
typedef AVFormatContext *(APIENTRYP AVFORMAT_ALLOC_CONTEXT)(void);
@@ -131,9 +139,9 @@ static AVFORMAT_NETWORK_INIT sp_avformat_network_init; // 53.13.0
static AVFORMAT_NETWORK_DEINIT sp_avformat_network_deinit; // 53.13.0
static AVFORMAT_FIND_STREAM_INFO sp_avformat_find_stream_info; // 53.3.0
static AV_FIND_STREAM_INFO sp_av_find_stream_info;
-// count: 38
+// count: 42
-#define SYMBOL_COUNT 38
+#define SYMBOL_COUNT 42
JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryBundleInfo_initSymbols0
(JNIEnv *env, jclass clazz, jobject jSymbols, jint count)
@@ -162,8 +170,11 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryB
sp_avcodec_open2 = (AVCODEC_OPEN2) (intptr_t) symbols[i++];
sp_avcodec_open = (AVCODEC_OPEN) (intptr_t) symbols[i++];
sp_avcodec_alloc_frame = (AVCODEC_ALLOC_FRAME) (intptr_t) symbols[i++];
+ sp_avcodec_get_frame_defaults = (AVCODEC_GET_FRAME_DEFAULTS) (intptr_t) symbols[i++];
+ sp_avcodec_free_frame = (AVCODEC_FREE_FRAME) (intptr_t) symbols[i++];
sp_avcodec_default_get_buffer = (AVCODEC_DEFAULT_GET_BUFFER) (intptr_t) symbols[i++];
sp_avcodec_default_release_buffer = (AVCODEC_DEFAULT_RELEASE_BUFFER) (intptr_t) symbols[i++];
+ sp_avcodec_flush_buffers = (AVCODEC_FLUSH_BUFFERS) (intptr_t) symbols[i++];
sp_av_init_packet = (AV_INIT_PACKET) (intptr_t) symbols[i++];
sp_av_new_packet = (AV_NEW_PACKET) (intptr_t) symbols[i++];
sp_av_destruct_packet = (AV_DESTRUCT_PACKET) (intptr_t) symbols[i++];
@@ -174,6 +185,7 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryB
// count: 18
sp_av_pix_fmt_descriptors = (const AVPixFmtDescriptor*) (intptr_t) symbols[i++];
+ sp_av_frame_unref = (AV_FRAME_UNREF) (intptr_t) symbols[i++];
sp_av_free = (AV_FREE) (intptr_t) symbols[i++];
sp_av_get_bits_per_pixel = (AV_GET_BITS_PER_PIXEL) (intptr_t) symbols[i++];
sp_av_samples_get_buffer_size = (AV_SAMPLES_GET_BUFFER_SIZE) (intptr_t) symbols[i++];
@@ -234,11 +246,11 @@ static void _updateJavaAttributes(JNIEnv *env, jobject instance, FFMPEGToolBasic
pAV->vBitsPerPixel, pAV->vBytesPerPixelPerPlane,
pAV->vLinesize[0], pAV->vLinesize[1], pAV->vLinesize[2],
pAV->vTexWidth[0], pAV->vTexWidth[1], pAV->vTexWidth[2],
- pAV->aSampleFmt, pAV->aSampleRate, pAV->aChannels);
+ pAV->aFramesPerVideoFrame, pAV->aSampleFmt, pAV->aSampleRate, pAV->aChannels);
}
}
-static void freeInstance(FFMPEGToolBasicAV_t* pAV) {
+static void freeInstance(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) {
int i;
if(NULL != pAV) {
// Close the V codec
@@ -257,12 +269,34 @@ static void freeInstance(FFMPEGToolBasicAV_t* pAV) {
// Close the frames
if(NULL != pAV->pVFrame) {
- sp_av_free(pAV->pVFrame);
+ if(HAS_FUNC(sp_avcodec_free_frame)) {
+ sp_avcodec_free_frame(&pAV->pVFrame);
+ } else {
+ sp_av_free(pAV->pVFrame);
+ }
pAV->pVFrame = NULL;
}
+ if(NULL != pAV->pANIOBuffers) {
+ for(i=0; i<pAV->aFrameCount; i++) {
+ NIOBuffer_t * pNIOBuffer = &pAV->pANIOBuffers[i];
+ if( NULL != pNIOBuffer->nioRef ) {
+ if(pAV->verbose) {
+ fprintf(stderr, "A NIO: Free.X ptr %p / ref %p, %d bytes\n",
+ pNIOBuffer->origPtr, pNIOBuffer->nioRef, pNIOBuffer->size);
+ }
+ (*env)->DeleteGlobalRef(env, pNIOBuffer->nioRef);
+ }
+ }
+ free(pAV->pANIOBuffers);
+ pAV->pANIOBuffers = NULL;
+ }
if(NULL != pAV->pAFrames) {
for(i=0; i<pAV->aFrameCount; i++) {
- sp_av_free(pAV->pAFrames[i]);
+ if(HAS_FUNC(sp_avcodec_free_frame)) {
+ sp_avcodec_free_frame(&pAV->pAFrames[i]);
+ } else {
+ sp_av_free(pAV->pAFrames[i]);
+ }
}
free(pAV->pAFrames);
pAV->pAFrames = NULL;
@@ -358,7 +392,7 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_ini
jni_mid_pushSound = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "pushSound", "(Ljava/nio/ByteBuffer;II)V");
jni_mid_updateAttributes1 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes", "(IIIIIIIFIIILjava/lang/String;Ljava/lang/String;)V");
- jni_mid_updateAttributes2 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes2", "(IIIIIIIIIIIII)V");
+ jni_mid_updateAttributes2 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes2", "(IIIIIIIIIIIIII)V");
if(jni_mid_pushSound == NULL ||
jni_mid_updateAttributes1 == NULL ||
@@ -380,6 +414,16 @@ JNIEXPORT jlong JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_create
pAV->avformatVersion = sp_avformat_version();
pAV->avutilVersion = sp_avutil_version();
+ #if LIBAVCODEC_VERSION_MAJOR >= 55
+ // TODO: We keep code on using 1 a/v frame per decoding cycle now.
+ // This is compatible w/ OpenAL's alBufferData(..)
+ // and w/ OpenGL's texture update command, both copy data immediatly.
+ // pAV->useRefCountedFrames = AV_HAS_API_REFCOUNTED_FRAMES(pAV);
+ pAV->useRefCountedFrames = 0;
+ #else
+ pAV->useRefCountedFrames = 0;
+ #endif
+
// Register all formats and codecs
sp_av_register_all();
// Network too ..
@@ -400,7 +444,7 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_destroy
FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr));
if (pAV != NULL) {
// stop assumed ..
- freeInstance(pAV);
+ freeInstance(env, pAV);
}
}
@@ -418,9 +462,14 @@ static uint64_t getDefaultAudioChannelLayout(int channelCount) {
}
}
+static int countAudioPacketsTillVideo(const int maxPackets, FFMPEGToolBasicAV_t *pAV, AVPacket* pPacket, int packetFull, AVFrame* pAFrame, int * pAudioFrames, int *pMaxDataSize);
+static int countVideoPacketsTillAudio(const int maxPackets, FFMPEGToolBasicAV_t *pAV, AVPacket* pPacket, int packetFull, int * pVideoFrames);
+static void initPTSStats(PTSStats *ptsStats);
+static int64_t evalPTS(PTSStats *ptsStats, int64_t inPTS, int64_t inDTS);
+
JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStream0
- (JNIEnv *env, jobject instance, jlong ptr, jstring jURL, jint vid, jint aid, jint audioFrameCount,
- jint aChannelCount, jint aSampleRate)
+ (JNIEnv *env, jobject instance, jlong ptr, jstring jURL, jint vid, jint aid,
+ jint snoopVideoFrameCount, jint aChannelCount, jint aSampleRate)
{
int res, i;
jboolean iscopy;
@@ -437,8 +486,8 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStre
const char *urlPath = (*env)->GetStringUTFChars(env, jURL, &iscopy);
res = sp_avformat_open_input(&pAV->pFormatCtx, urlPath, NULL, NULL);
if(res != 0) {
+ JoglCommon_throwNewRuntimeException(env, "Couldn't open URI: %s", urlPath);
(*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath);
- JoglCommon_throwNewRuntimeException(env, "Couldn't open URL");
return;
}
@@ -508,6 +557,12 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStre
}
if(0<=pAV->aid) {
+ AVFrame * pAFrame0 = sp_avcodec_alloc_frame();
+ if( NULL == pAFrame0 ) {
+ JoglCommon_throwNewRuntimeException(env, "Couldn't alloc 1st audio frame\n");
+ return;
+ }
+
// Get a pointer to the codec context for the audio stream
// FIXME: Libav Binary compatibility! JAU01
pAV->pACodecCtx=pAV->pAStream->codec;
@@ -543,6 +598,9 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStre
}
// Open codec
+ #if LIBAVCODEC_VERSION_MAJOR >= 55
+ pAV->pACodecCtx->refcounted_frames = pAV->useRefCountedFrames;
+ #endif
if(HAS_FUNC(sp_avcodec_open2)) {
res = sp_avcodec_open2(pAV->pACodecCtx, pAV->pACodec, NULL);
} else {
@@ -553,14 +611,11 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStre
return;
}
- // Allocate audio frames
- // FIXME: Libav Binary compatibility! JAU01
pAV->aSampleRate = pAV->pACodecCtx->sample_rate;
pAV->aChannels = pAV->pACodecCtx->channels;
pAV->aFrameSize = pAV->pACodecCtx->frame_size; // in samples!
pAV->aSampleFmt = pAV->pACodecCtx->sample_fmt;
pAV->frames_audio = pAV->pAStream->nb_frames;
-
if( pAV->verbose ) {
fprintf(stderr, "A channels %d, sample_rate %d, frame_size %d, frame_number %d, r_frame_rate %f, avg_frame_rate %f, nb_frames %d, \n",
pAV->aChannels, pAV->aSampleRate, pAV->aFrameSize, pAV->pACodecCtx->frame_number,
@@ -569,12 +624,97 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStre
pAV->pAStream->nb_frames);
}
- pAV->aFrameCount = audioFrameCount;
- pAV->pAFrames = calloc(audioFrameCount, sizeof(AVFrame*));
- for(i=0; i<pAV->aFrameCount; i++) {
- pAV->pAFrames[i]=sp_avcodec_alloc_frame();
- if(pAV->pAFrames[i]==NULL) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't alloc audio frame %d / %d", i, audioFrameCount);
+ if( 0 >= snoopVideoFrameCount ) {
+ pAV->aFramesPerVideoFrame = 0;
+ } else {
+ if( 0<=pAV->vid ) {
+ int aFramesPerVideoFrame;
+ int aFramesSequential = 0;
+ int aMaxDataSize = 0;
+ AVPacket packet;
+ int packetFull = 0;
+ int _aFramesBeforeVideo;
+ int _audioFramesOverlap=0;
+ int _aMaxDataSize;
+ int _vFrames;
+ int _vFramesOverlap=0;
+ int _packetCount;
+ int totalVFrames = 0;
+ int totalAFrames = 0;
+ int totalPackets = 0;
+
+ while( totalVFrames < snoopVideoFrameCount ) {
+ int _packetCount = countAudioPacketsTillVideo(40, pAV, &packet, packetFull, pAFrame0, &_aFramesBeforeVideo, &_aMaxDataSize);
+ if( _packetCount >= 0 ) {
+ totalPackets += _packetCount;
+ if( _aFramesBeforeVideo > 0 ) {
+ // one video frame!
+ _vFramesOverlap=1;
+ packetFull = 1;
+ }
+ _aFramesBeforeVideo += _audioFramesOverlap;
+ totalAFrames += _aFramesBeforeVideo;
+ if( _aFramesBeforeVideo > aFramesSequential ) {
+ aFramesSequential = _aFramesBeforeVideo;
+ }
+ if( _aMaxDataSize > aMaxDataSize ) {
+ aMaxDataSize = _aMaxDataSize;
+ }
+ _packetCount = countVideoPacketsTillAudio(40, pAV, &packet, packetFull, &_vFrames);
+ if( _packetCount >= 0 ) {
+ totalPackets += _packetCount;
+ if( _vFrames > 0 ) {
+ // one audio frame!
+ _audioFramesOverlap=1;
+ packetFull = 1;
+ }
+ _vFrames += _vFramesOverlap;
+ totalVFrames += _vFrames;
+ }
+ if( pAV->verbose ) {
+ fprintf(stderr, "Snoop Packet #%d, V-Frames: %d, A-frames %d Seq(now %d, max %d), max-size (now %d, max %d)\n",
+ totalPackets, totalVFrames, totalAFrames, _aFramesBeforeVideo, aFramesSequential, _aMaxDataSize, aMaxDataSize);
+ }
+ }
+ }
+ const int audioFramesReadAhead = totalAFrames - totalVFrames;
+ if( audioFramesReadAhead > aFramesSequential ) {
+ aFramesPerVideoFrame = audioFramesReadAhead;
+ } else {
+ aFramesPerVideoFrame = aFramesSequential;
+ }
+ if( AV_DEFAULT_AFRAMES > aFramesPerVideoFrame || aFramesPerVideoFrame > 10*AV_DEFAULT_AFRAMES ) {
+ aFramesPerVideoFrame = AV_DEFAULT_AFRAMES;
+ }
+ pAV->aFramesPerVideoFrame = aFramesPerVideoFrame;
+ sp_av_seek_frame(pAV->pFormatCtx, -1, 0, AVSEEK_FLAG_BACKWARD);
+ sp_avcodec_flush_buffers( pAV->pACodecCtx );
+ if( pAV->verbose ) {
+ fprintf(stderr, "Snooped Packets %d, V-Frames: %d, A-frames %d Seq %d, readAhead %d -> Cached %d/%d, max-size %d\n",
+ totalPackets, totalVFrames, totalAFrames, aFramesSequential, audioFramesReadAhead, aFramesPerVideoFrame, pAV->aFramesPerVideoFrame, aMaxDataSize);
+ }
+ } else {
+ pAV->aFramesPerVideoFrame = AV_DEFAULT_AFRAMES;
+ if( pAV->verbose ) {
+ fprintf(stderr, "A-frame Count %d\n", pAV->aFramesPerVideoFrame);
+ }
+ }
+ }
+
+ // Allocate audio frames
+ // FIXME: Libav Binary compatibility! JAU01
+ if( pAV->useRefCountedFrames && pAV->aFramesPerVideoFrame > 0 ) {
+ pAV->aFrameCount = pAV->aFramesPerVideoFrame;
+ } else {
+ pAV->aFrameCount = 1;
+ }
+ pAV->pANIOBuffers = calloc(pAV->aFrameCount, sizeof(NIOBuffer_t));
+ pAV->pAFrames = calloc(pAV->aFrameCount, sizeof(AVFrame*));
+ pAV->pAFrames[0] = pAFrame0;
+ for(i=1; i<pAV->aFrameCount; i++) {
+ pAV->pAFrames[i] = sp_avcodec_alloc_frame();
+ if( NULL == pAV->pAFrames[i] ) {
+ JoglCommon_throwNewRuntimeException(env, "Couldn't alloc audio frame %d / %d", i, pAV->aFrameCount);
return;
}
}
@@ -612,6 +752,9 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStre
}
// Open codec
+ #if LIBAVCODEC_VERSION_MAJOR >= 55
+ pAV->pVCodecCtx->refcounted_frames = pAV->useRefCountedFrames;
+ #endif
if(HAS_FUNC(sp_avcodec_open2)) {
res = sp_avcodec_open2(pAV->pVCodecCtx, pAV->pVCodec, NULL);
} else {
@@ -643,7 +786,7 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStre
pAV->pVStream->nb_frames);
}
- // Allocate video frames
+ // Allocate video frame
// FIXME: Libav Binary compatibility! JAU01
pAV->vPixFmt = pAV->pVCodecCtx->pix_fmt;
{
@@ -677,15 +820,19 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStre
}
pAV->vPTS=0;
pAV->aPTS=0;
+ initPTSStats(&pAV->vPTSStats);
+ initPTSStats(&pAV->aPTSStats);
_updateJavaAttributes(env, instance, pAV);
}
JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setGLFuncs0
- (JNIEnv *env, jobject instance, jlong ptr, jlong jProcAddrGLTexSubImage2D, jlong jProcAddrGLGetError)
+ (JNIEnv *env, jobject instance, jlong ptr, jlong jProcAddrGLTexSubImage2D, jlong jProcAddrGLGetError, jlong jProcAddrGLFlush, jlong jProcAddrGLFinish)
{
FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr));
pAV->procAddrGLTexSubImage2D = (PFNGLTEXSUBIMAGE2DPROC) (intptr_t)jProcAddrGLTexSubImage2D;
pAV->procAddrGLGetError = (PFNGLGETERRORPROC) (intptr_t)jProcAddrGLGetError;
+ pAV->procAddrGLFlush = (PFNGLFLUSH) (intptr_t)jProcAddrGLFlush;
+ pAV->procAddrGLFinish = (PFNGLFINISH) (intptr_t)jProcAddrGLFinish;
}
#if 0
@@ -707,15 +854,19 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
sp_av_init_packet(&packet);
- if( sp_av_read_frame(pAV->pFormatCtx, &packet) >= 0 ) {
+ const int avRes = sp_av_read_frame(pAV->pFormatCtx, &packet);
+ if( AVERROR_EOF == avRes || ( pAV->pFormatCtx->pb && pAV->pFormatCtx->pb->eof_reached ) ) {
+ resPTS = END_OF_STREAM_PTS;
+ } else if( 0 <= avRes ) {
+ if( pAV->verbose ) {
+ fprintf(stderr, "P: ptr %p, size %d\n", packet.data, packet.size);
+ }
if(packet.stream_index==pAV->aid) {
// Decode audio frame
if(NULL == pAV->pAFrames) { // no audio registered
sp_av_free_packet(&packet);
return 0;
}
- AVFrame* pAFrameCurrent = pAV->pAFrames[pAV->aFrameCurrent];
- pAV->aFrameCurrent = ( pAV->aFrameCurrent + 1 ) % pAV->aFrameCount ;
int frameCount;
int flush_complete = 0;
for ( frameCount=0; 0 < packet.size || 0 == frameCount; frameCount++ ) {
@@ -723,6 +874,13 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
if (flush_complete) {
break;
}
+ NIOBuffer_t * pNIOBufferCurrent = &pAV->pANIOBuffers[pAV->aFrameCurrent];
+ AVFrame* pAFrameCurrent = pAV->pAFrames[pAV->aFrameCurrent];
+ if( pAV->useRefCountedFrames ) {
+ sp_av_frame_unref(pAFrameCurrent);
+ pAV->aFrameCurrent = ( pAV->aFrameCurrent + 1 ) % pAV->aFrameCount ;
+ }
+ sp_avcodec_get_frame_defaults(pAFrameCurrent);
if(HAS_FUNC(sp_avcodec_decode_audio4)) {
len1 = sp_avcodec_decode_audio4(pAV->pACodecCtx, pAFrameCurrent, &frameDecoded, &packet);
} else {
@@ -763,24 +921,41 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
const AVRational time_base = pAV->pAStream->time_base;
const int64_t pkt_pts = pAFrameCurrent->pkt_pts;
- int aptsMode;
if( 0 == frameCount && AV_NOPTS_VALUE != pkt_pts ) { // 1st frame only, discard invalid PTS ..
pAV->aPTS = my_av_q2i32( pkt_pts * 1000, time_base);
- aptsMode = 0;
} else { // subsequent frames or invalid PTS ..
const int32_t bytesPerSample = 2; // av_get_bytes_per_sample( pAV->pACodecCtx->sample_fmt );
pAV->aPTS += data_size / ( pAV->aChannels * bytesPerSample * ( pAV->aSampleRate / 1000 ) );
- aptsMode = 1;
}
if( pAV->verbose ) {
int32_t aDTS = my_av_q2i32( pAFrameCurrent->pkt_dts * 1000, time_base);
- fprintf(stderr, "A pts %d [pkt_pts %ld], dts %d [pkt_dts %ld], dataSize %d, f# %d, pts-mode %d\n",
- pAV->aPTS, pkt_pts, aDTS, pAFrameCurrent->pkt_dts, data_size, frameCount, aptsMode);
+ fprintf(stderr, "A pts %d [pkt_pts %ld], dts %d [pkt_dts %ld], f# %d, aFrame %d/%d %p, dataPtr %p, dataSize %d\n",
+ pAV->aPTS, pkt_pts, aDTS, pAFrameCurrent->pkt_dts, frameCount,
+ pAV->aFrameCurrent, pAV->aFrameCount, pAFrameCurrent, pAFrameCurrent->data[0], data_size);
}
if( NULL != env ) {
- jobject jSampleData = (*env)->NewDirectByteBuffer(env, pAFrameCurrent->data[0], data_size);
- (*env)->CallVoidMethod(env, instance, jni_mid_pushSound, jSampleData, data_size, pAV->aPTS);
+ NIOBuffer_t * pNIOBufferCurrent = &pAV->pANIOBuffers[pAV->aFrameCurrent];
+ int newNIO = NULL == pNIOBufferCurrent->nioRef;
+ if( !newNIO && ( pAFrameCurrent->data[0] != pNIOBufferCurrent->origPtr || data_size > pNIOBufferCurrent->size ) ) {
+ if(pAV->verbose) {
+ fprintf(stderr, "A NIO: Free.0 ptr %p / ref %p, %d bytes\n",
+ pNIOBufferCurrent->origPtr, pNIOBufferCurrent->nioRef, pNIOBufferCurrent->size);
+ }
+ (*env)->DeleteGlobalRef(env, pNIOBufferCurrent->nioRef);
+ newNIO = 1;
+ }
+ if( newNIO ) {
+ jobject jSampleData = (*env)->NewDirectByteBuffer(env, pAFrameCurrent->data[0], data_size);
+ pNIOBufferCurrent->nioRef = (*env)->NewGlobalRef(env, jSampleData);
+ pNIOBufferCurrent->origPtr = pAFrameCurrent->data[0];
+ pNIOBufferCurrent->size = data_size;
+ if(pAV->verbose) {
+ fprintf(stderr, "A NIO: Alloc ptr %p / ref %p, %d bytes\n",
+ pNIOBufferCurrent->origPtr, pNIOBufferCurrent->nioRef, pNIOBufferCurrent->size);
+ }
+ }
+ (*env)->CallVoidMethod(env, instance, jni_mid_pushSound, pNIOBufferCurrent->nioRef, data_size, pAV->aPTS);
}
}
} else if(packet.stream_index==pAV->vid) {
@@ -796,6 +971,7 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
if (flush_complete) {
break;
}
+ sp_avcodec_get_frame_defaults(pAV->pVFrame);
len1 = sp_avcodec_decode_video2(pAV->pVCodecCtx, pAV->pVFrame, &frameDecoded, &packet);
if (len1 < 0) {
// if error, we skip the frame
@@ -816,26 +992,27 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
// FIXME: Libav Binary compatibility! JAU01
const AVRational time_base = pAV->pVStream->time_base;
const int64_t pkt_pts = pAV->pVFrame->pkt_pts;
- if(AV_NOPTS_VALUE != pkt_pts) { // discard invalid PTS ..
- pAV->vPTS = my_av_q2i32( pkt_pts * 1000, time_base);
- if( pAV->verbose ) {
- int32_t vDTS = my_av_q2i32( pAV->pVFrame->pkt_dts * 1000, time_base);
+ const int64_t pkt_dts = pAV->pVFrame->pkt_dts;
+ const int64_t fix_pts = evalPTS(&pAV->vPTSStats, pkt_pts, pkt_dts);
+ if( AV_NOPTS_VALUE != fix_pts ) { // discard invalid PTS ..
+ pAV->vPTS = my_av_q2i32( fix_pts * 1000, time_base);
+ }
+ if( pAV->verbose ) {
+ const int32_t vPTS = AV_NOPTS_VALUE != pkt_pts ? my_av_q2i32( pkt_pts * 1000, time_base) : 0;
+ const int32_t vDTS = AV_NOPTS_VALUE != pkt_dts ? my_av_q2i32( pkt_dts * 1000, time_base) : 0;
- double frame_delay_d = av_q2d(pAV->pVCodecCtx->time_base);
- double frame_repeat_d = pAV->pVFrame->repeat_pict * (frame_delay_d * 0.5);
+ const double frame_delay_d = av_q2d(pAV->pVCodecCtx->time_base);
+ const double frame_repeat_d = pAV->pVFrame->repeat_pict * (frame_delay_d * 0.5);
- int32_t frame_delay_i = my_av_q2i32(1000, pAV->pVCodecCtx->time_base);
- int32_t frame_repeat_i = pAV->pVFrame->repeat_pict * (frame_delay_i / 2);
+ const int32_t frame_delay_i = my_av_q2i32(1000, pAV->pVCodecCtx->time_base);
+ const int32_t frame_repeat_i = pAV->pVFrame->repeat_pict * (frame_delay_i / 2);
- const char * warn = frame_repeat_i > 0 ? "REPEAT" : "NORMAL" ;
+ const char * warn = frame_repeat_i > 0 ? "REPEAT" : "NORMAL" ;
- fprintf(stderr, "V pts %d [pkt_pts %ld], dts %d [pkt_dts %ld], time d(%lf s + r %lf = %lf s), i(%d ms + r %d = %d ms) - %s - f# %d\n",
- pAV->vPTS, pkt_pts, vDTS, pAV->pVFrame->pkt_dts,
- frame_delay_d, frame_repeat_d, (frame_delay_d + frame_repeat_d),
- frame_delay_i, frame_repeat_i, (frame_delay_i + frame_repeat_i), warn, frameCount);
- }
- } else if( pAV->verbose ) {
- fprintf(stderr, "V pts ?? [pkt_pts %ld], pts2 ?? [pkt_dts %ld], f# %d\n", pkt_pts, pAV->pVFrame->pkt_dts, frameCount);
+ fprintf(stderr, "V fix_pts %d, pts %d [pkt_pts %ld], dts %d [pkt_dts %ld], time d(%lf s + r %lf = %lf s), i(%d ms + r %d = %d ms) - %s - f# %d\n",
+ pAV->vPTS, vPTS, pkt_pts, vDTS, pkt_dts,
+ frame_delay_d, frame_repeat_d, (frame_delay_d + frame_repeat_d),
+ frame_delay_i, frame_repeat_i, (frame_delay_i + frame_repeat_i), warn, frameCount);
}
resPTS = pAV->vPTS; // Video Frame!
@@ -866,6 +1043,11 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
texFmt, texType, pAV->pVFrame->data[2]);
DBG_TEXSUBIMG2D_b(pAV);
} // FIXME: Add more planar formats !
+ pAV->procAddrGLFinish();
+ //pAV->procAddrGLFlush();
+ if( pAV->useRefCountedFrames ) {
+ sp_av_frame_unref(pAV->pVFrame);
+ }
}
}
@@ -877,6 +1059,135 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
return resPTS;
}
+static int countAudioPacketsTillVideo(const int maxPackets, FFMPEGToolBasicAV_t *pAV, AVPacket* pPacket, int packetFull, AVFrame* pAFrame, int * pAudioFrames, int *pMaxDataSize) {
+ int frameDecoded;
+ int audioFrames = 0;
+ int maxDataSize = 0;
+ int packetCount = 0;
+
+ for( packetCount = 0; packetCount < maxPackets; packetCount++ ) {
+ int readRes;
+ if( !packetFull ) {
+ sp_av_init_packet(pPacket);
+ readRes = sp_av_read_frame(pAV->pFormatCtx, pPacket);
+ } else {
+ readRes = 1;
+ packetFull = 0;
+ }
+ if( readRes >= 0 ) {
+ if(pPacket->stream_index==pAV->aid) {
+ // Decode audio frame
+ int frameCount;
+ int flush_complete = 0;
+ for ( frameCount=0; 0 < pPacket->size || 0 == frameCount; frameCount++ ) {
+ int len1;
+ if (flush_complete) {
+ break;
+ }
+ if(HAS_FUNC(sp_avcodec_decode_audio4)) {
+ len1 = sp_avcodec_decode_audio4(pAV->pACodecCtx, pAFrame, &frameDecoded, pPacket);
+ } else {
+ #if 0
+ len1 = sp_avcodec_decode_audio3(pAV->pACodecCtx, int16_t *samples, int *frame_size_ptr, &frameDecoded, pPacket);
+ #endif
+ return -1;
+ }
+ if (len1 < 0) {
+ // if error, we skip the frame
+ pPacket->size = 0;
+ break;
+ }
+ pPacket->data += len1;
+ pPacket->size -= len1;
+
+ if (!frameDecoded) {
+ // stop sending empty packets if the decoder is finished
+ if (!pPacket->data && pAV->pACodecCtx->codec->capabilities & CODEC_CAP_DELAY) {
+ flush_complete = 1;
+ }
+ continue;
+ }
+
+ int32_t data_size = 0;
+ if(HAS_FUNC(sp_av_samples_get_buffer_size)) {
+ data_size = sp_av_samples_get_buffer_size(NULL /* linesize, may be NULL */,
+ pAV->aChannels,
+ pAFrame->nb_samples,
+ pAFrame->format,
+ 1 /* align */);
+ if( data_size > maxDataSize ) {
+ maxDataSize = data_size;
+ }
+ }
+ if( pAV->useRefCountedFrames ) {
+ sp_av_frame_unref(pAFrame);
+ }
+ audioFrames++;
+ }
+ } else if(pPacket->stream_index==pAV->vid) {
+ if( 0 < audioFrames ) {
+ break; // done!
+ }
+ }
+ }
+ }
+ *pAudioFrames = audioFrames;
+ *pMaxDataSize = maxDataSize;
+ return packetCount;
+}
+static int countVideoPacketsTillAudio(const int maxPackets, FFMPEGToolBasicAV_t *pAV, AVPacket* pPacket, int packetFull, int * pVideoFrames) {
+ int videoFrames = 0;
+ int packetCount = 0;
+
+ for( packetCount = 0; packetCount < maxPackets; packetCount++ ) {
+ int readRes;
+ if( !packetFull ) {
+ sp_av_init_packet(pPacket);
+ readRes = sp_av_read_frame(pAV->pFormatCtx, pPacket);
+ } else {
+ readRes = 1;
+ packetFull = 0;
+ }
+ if( readRes >= 0 ) {
+ if(pPacket->stream_index==pAV->aid) {
+ if( 0 < videoFrames ) {
+ break; // done!
+ }
+ } else if(pPacket->stream_index==pAV->vid) {
+ videoFrames++;
+ }
+ }
+ }
+ *pVideoFrames = videoFrames;
+ return packetCount;
+}
+static void initPTSStats(PTSStats *ptsStats) {
+ ptsStats->ptsError = 0;
+ ptsStats->dtsError = 0;
+ ptsStats->ptsLast = INT64_MIN;
+ ptsStats->dtsLast = INT64_MIN;
+}
+static int64_t evalPTS(PTSStats *ptsStats, int64_t inPTS, int64_t inDTS) {
+ int64_t resPTS = AV_NOPTS_VALUE;
+
+ if ( inDTS != AV_NOPTS_VALUE ) {
+ ptsStats->dtsError += inDTS <= ptsStats->dtsLast;
+ ptsStats->dtsLast = inDTS;
+ }
+ if ( inPTS != AV_NOPTS_VALUE ) {
+ ptsStats->ptsError += inPTS <= ptsStats->ptsLast;
+ ptsStats->ptsLast = inPTS;
+ }
+ if ( inPTS != AV_NOPTS_VALUE &&
+ ( ptsStats->ptsError<=ptsStats->dtsError || inDTS == AV_NOPTS_VALUE ) ) {
+ resPTS = inPTS;
+ } else {
+ resPTS = inDTS;
+ }
+ return resPTS;
+}
+
+
JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_play0
(JNIEnv *env, jobject instance, jlong ptr)
{
@@ -893,21 +1204,51 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_pause0
JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_seek0
(JNIEnv *env, jobject instance, jlong ptr, jint pos1)
{
- FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr));
- int64_t pos0 = pAV->vPTS;
- int64_t pts0 = pAV->pVFrame->pkt_pts;
- int64_t pts1 = (int64_t) (pos1 * (int64_t) pAV->pVStream->time_base.den)
- / (1000 * (int64_t) pAV->pVStream->time_base.num);
-
+ const FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr));
+ const int64_t pos0 = pAV->vPTS;
+ const int64_t pts0 = pAV->pVFrame->pkt_pts;
+ int streamID;
+ AVRational time_base;
+ if( pAV->vid >= 0 ) {
+ streamID = pAV->vid;
+ time_base = pAV->pVStream->time_base;
+ } else if( pAV->aid >= 0 ) {
+ streamID = pAV->aid;
+ time_base = pAV->pAStream->time_base;
+ } else {
+ return pAV->vPTS;
+ }
+ int64_t pts1 = (int64_t) (pos1 * (int64_t) time_base.den)
+ / (1000 * (int64_t) time_base.num);
int flags = 0;
if(pos1 < pos0) {
flags |= AVSEEK_FLAG_BACKWARD;
}
- fprintf(stderr, "SEEK: pre : u %ld, p %ld -> u %ld, p %ld\n", pos0, pts0, pos1, pts1);
- sp_av_seek_frame(pAV->pFormatCtx, pAV->vid, pts1, flags);
- pAV->vPTS = my_av_q2i32( pAV->pVFrame->pkt_pts * 1000, pAV->pVStream->time_base);
- fprintf(stderr, "SEEK: post : u %ld, p %ld\n", pAV->vPTS, pAV->pVFrame->pkt_pts);
- return pAV->vPTS;
+ int res;
+ if(HAS_FUNC(sp_av_seek_frame)) {
+ if(pos1 < pos0) {
+ flags |= AVSEEK_FLAG_BACKWARD;
+ }
+ fprintf(stderr, "SEEK.0: pre : s %ld / %ld -> t %ld / %ld\n", pos0, pts0, pos1, pts1);
+ sp_av_seek_frame(pAV->pFormatCtx, streamID, pts1, flags);
+
+ } else if(HAS_FUNC(sp_avformat_seek_file)) {
+ int64_t ptsD = pts1 - pts0;
+ int64_t seek_min = ptsD > 0 ? pts1 - ptsD : INT64_MIN;
+ int64_t seek_max = ptsD < 0 ? pts1 - ptsD : INT64_MAX;
+ fprintf(stderr, "SEEK.1: pre : s %ld / %ld -> t %ld / %ld [%ld .. %ld]\n",
+ pos0, pts0, pos1, pts1, seek_min, seek_max);
+ res = sp_avformat_seek_file(pAV->pFormatCtx, -1, seek_min, pts1, seek_max, flags);
+ }
+ if(NULL != pAV->pVCodecCtx) {
+ sp_avcodec_flush_buffers( pAV->pVCodecCtx );
+ }
+ if(NULL != pAV->pACodecCtx) {
+ sp_avcodec_flush_buffers( pAV->pACodecCtx );
+ }
+ const jint vPTS = my_av_q2i32( pAV->pVFrame->pkt_pts * 1000, pAV->pVStream->time_base);
+ fprintf(stderr, "SEEK: post : res %d, u %ld, p %ld\n", res, vPTS, pAV->pVFrame->pkt_pts);
+ return vPTS;
}
JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_getVideoPTS0