aboutsummaryrefslogtreecommitdiffstats
path: root/src/jogl
diff options
context:
space:
mode:
Diffstat (limited to 'src/jogl')
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java113
-rw-r--r--src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java32
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java217
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java12
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java4
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java6
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0400Natives.java2
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0500Natives.java2
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0600Natives.java2
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java2
-rw-r--r--src/jogl/native/libav/ffmpeg_impl_template.c151
-rw-r--r--src/jogl/native/libav/ffmpeg_static.c3
-rw-r--r--src/jogl/native/libav/ffmpeg_tool.h11
13 files changed, 456 insertions, 101 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
index 4ca6ae503..d129af621 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
@@ -50,7 +50,7 @@ import com.jogamp.opengl.util.texture.TextureSequence;
* Audio maybe supported and played back internally or via an {@link AudioSink} implementation.
* </p>
* <p>
- * Audio and video streams can be selected or muted via {@link #playStream(Uri, int, int, int)}
+ * Audio and video streams can be selected or muted via {@link #playStream(Uri, int, int, int, int)}
* using the appropriate <a href="#streamIDs">stream id</a>'s.
* </p>
* <p>
@@ -61,7 +61,7 @@ import com.jogamp.opengl.util.texture.TextureSequence;
* <p>
* Most of the stream processing is performed on the decoding thread, a.k.a. <i>StreamWorker</i>:
* <ul>
- * <li>Stream initialization triggered by {@link #playStream(Uri, int, int, int) playStream(..)} - User gets notified whether the stream has been initialized or not via {@link GLMediaEventListener#attributesChanged(GLMediaPlayer, int, long) attributesChanges(..)}.</li>
+ * <li>Stream initialization triggered by {@link #playStream(Uri, int, int, int, int) playStream(..)} - User gets notified whether the stream has been initialized or not via {@link GLMediaEventListener#attributesChanged(GLMediaPlayer, int, long) attributesChanges(..)}.</li>
* <li>Stream decoding - User gets notified of a new frame via {@link GLMediaEventListener#newFrameAvailable(GLMediaPlayer, com.jogamp.opengl.util.texture.TextureSequence.TextureFrame, long) newFrameAvailable(...)}.</li>
* <li>Caught <a href="#streamerror">exceptions on the decoding thread</a> are delivered as {@link StreamException}s.</li>
* </ul>
@@ -87,7 +87,7 @@ import com.jogamp.opengl.util.texture.TextureSequence;
* <p>
* <table border="1">
* <tr><th>Action</th> <th>{@link State} Before</th> <th>{@link State} After</th> <th>{@link EventMask#Bit Event}</th></tr>
- * <tr><td>{@link #playStream(Uri, int, int, int)}</td> <td>{@link State#Uninitialized Uninitialized}</td> <td>{@link State#Initialized Initialized}<sup><a href="#streamworker">1</a></sup>, {@link State#Uninitialized Uninitialized}</td> <td>{@link EventMask.Bit#Init Init} or ( {@link EventMask.Bit#Error Error} + {@link EventMask.Bit#Uninit Uninit} )</td></tr>
+ * <tr><td>{@link #playStream(Uri, int, int, int, int)}</td> <td>{@link State#Uninitialized Uninitialized}</td> <td>{@link State#Initialized Initialized}<sup><a href="#streamworker">1</a></sup>, {@link State#Uninitialized Uninitialized}</td> <td>{@link EventMask.Bit#Init Init} or ( {@link EventMask.Bit#Error Error} + {@link EventMask.Bit#Uninit Uninit} )</td></tr>
* <tr><td>{@link #initGL(GL)}</td> <td>{@link State#Initialized Initialized}, {@link State#Uninitialized Uninitialized} </td> <td>{@link State#Playing Playing}, {@link State#Uninitialized Uninitialized}</td> <td>{@link EventMask.Bit#Play Play} or ( {@link EventMask.Bit#Error Error} + {@link EventMask.Bit#Uninit Uninit} )</td></tr>
* <tr><td>{@link #pause(boolean)}</td> <td>{@link State#Playing Playing}</td> <td>{@link State#Paused Paused}</td> <td>{@link EventMask.Bit#Pause Pause}</td></tr>
* <tr><td>{@link #resume()}</td> <td>{@link State#Paused Paused}</td> <td>{@link State#Playing Playing}</td> <td>{@link EventMask.Bit#Play Play}</td></tr>
@@ -332,16 +332,18 @@ public interface GLMediaPlayer extends TextureSequence {
VID ( 1<<16 ),
/** Stream audio id change. */
AID ( 1<<17 ),
+ /** Stream subtitle id change. */
+ SID ( 1<<18 ),
/** TextureFrame size or vertical flip change. */
- Size ( 1<<18 ),
+ Size ( 1<<19 ),
/** Stream fps change. */
- FPS ( 1<<19 ),
+ FPS ( 1<<20 ),
/** Stream bps change. */
- BPS ( 1<<20 ),
+ BPS ( 1<<21 ),
/** Stream length change. */
- Length ( 1<<21 ),
+ Length ( 1<<22 ),
/** Stream codec change. */
- Codec ( 1<<22 );
+ Codec ( 1<<23 );
Bit(final int v) { value = v; }
public final int value;
@@ -446,14 +448,14 @@ public interface GLMediaPlayer extends TextureSequence {
/**
* Limit maximum supported audio channels by user.
* <p>
- * Must be set before {@link #playStream(Uri, int, int, int)}
+ * Must be set before {@link #playStream(Uri, int, int, int, int)}
* </p>
* <p>
* May be utilized to enforce 1 channel (mono) downsampling
* in combination with JOAL/OpenAL to experience spatial 3D position effects.
* </p>
* @param cc maximum supported audio channels, will be clipped [1..x], with x being the underlying audio subsystem's maximum
- * @see #playStream(Uri, int, int, int)
+ * @see #playStream(Uri, int, int, int, int)
*/
public void setAudioChannelLimit(final int cc);
@@ -479,16 +481,31 @@ public interface GLMediaPlayer extends TextureSequence {
* </p>
* @param streamLoc the stream location
* @param vid video stream id, see <a href="#streamIDs">audio and video Stream IDs</a>
- * @param aid video stream id, see <a href="#streamIDs">audio and video Stream IDs</a>
+ * @param aid audio stream id, see <a href="#streamIDs">audio and video Stream IDs</a>
+ * @param sid subtitle stream id, see <a href="#streamIDs">audio and video Stream IDs</a>
* @param textureCount desired number of buffered textures to be decoded off-thread, will be validated by implementation.
* The minimum value is {@link #TEXTURE_COUNT_MIN} (single-threaded) or above to enable multi-threaded stream decoding.
* Default is {@link #TEXTURE_COUNT_DEFAULT}.
* Value is ignored if video is muted.
* @throws IllegalStateException if not invoked in {@link State#Uninitialized}
* @throws IllegalArgumentException if arguments are invalid
- * @since 2.3.0
+ * @since 2.6.0
*/
- public void playStream(Uri streamLoc, int vid, int aid, int textureCount) throws IllegalStateException, IllegalArgumentException;
+ public void playStream(Uri streamLoc, int vid, int aid, int sid, int textureCount) throws IllegalStateException, IllegalArgumentException;
+
+ /**
+ * Switches current {@link #playStream(Uri, int, int, int, int)} to given stream IDs and continues at same {@link #getVideoPTS()}.
+ * <p>
+ * Implementation just issues {@link #stop()}, {@link #seek(int)} and {@link #playStream(Uri, int, int, int, int)}.
+ * </p>
+ * @param vid video stream id, see <a href="#streamIDs">audio and video Stream IDs</a>
+ * @param aid audio stream id, see <a href="#streamIDs">audio and video Stream IDs</a>
+ * @param sid subtitle stream id, see <a href="#streamIDs">audio and video Stream IDs</a>
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @since 2.6.0
+ */
+ public void switchStream(final int vid, final int aid, final int sid) throws IllegalStateException, IllegalArgumentException;
/**
* Returns the {@link StreamException} caught in the decoder thread, or <code>null</code> if none occured.
@@ -505,7 +522,7 @@ public interface GLMediaPlayer extends TextureSequence {
* <p>
* <a href="#lifecycle">Lifecycle</a>: {@link State#Initialized} -> {@link State#Paused} or {@link State#Initialized}
* </p>
- * Argument <code>gl</code> is ignored if video is muted, see {@link #playStream(Uri, int, int, int)}.
+ * Argument <code>gl</code> is ignored if video is muted, see {@link #playStream(Uri, int, int, int, int)}.
*
* @param gl current GL object. Maybe <code>null</code>, for audio only.
* @throws IllegalStateException if not invoked in {@link State#Initialized}.
@@ -517,7 +534,7 @@ public interface GLMediaPlayer extends TextureSequence {
/**
* If implementation uses a {@link AudioSink}, it's instance will be returned.
* <p>
- * The {@link AudioSink} instance is available after {@link #playStream(Uri, int, int, int)},
+ * The {@link AudioSink} instance is available after {@link #playStream(Uri, int, int, int, int)},
* if used by implementation.
* </p>
*/
@@ -615,15 +632,72 @@ public interface GLMediaPlayer extends TextureSequence {
public State getState();
/**
+ * Return an array of detected video stream IDs.
+ */
+ public int[] getVStreams();
+
+ /**
+ * Return an array of detected video stream languages, matching {@link #getVStreams()} array and its indices.
+ */
+ public String[] getVLangs();
+
+ /**
* Return the video stream id, see <a href="#streamIDs">audio and video Stream IDs</a>.
*/
public int getVID();
+ /** Returns the next video stream id, rotates. */
+ public int getNextVID();
+
+ /**
+ * Return an array of detected audio stream IDs.
+ */
+ public int[] getAStreams();
+
+ /**
+ * Return an array of detected audio stream languages, matching {@link #getAStreams()} array and its indices.
+ */
+ public String[] getALangs();
+
/**
* Return the audio stream id, see <a href="#streamIDs">audio and video Stream IDs</a>.
*/
public int getAID();
+ /** Returns the next audio stream id, rotates. */
+ public int getNextAID();
+
+ /**
+ * Return an array of detected subtitle stream IDs.
+ */
+ public int[] getSStreams();
+
+ /**
+ * Return an array of detected subtitle stream languages, matching {@link #getSStreams()} array and its indices.
+ */
+ public String[] getSLangs();
+
+ /**
+ * Return the subtitle stream id, see <a href="#streamIDs">audio and video Stream IDs</a>.
+ */
+ public int getSID();
+
+ /** Returns the next subtitle stream id, rotates including no-stream*/
+ public int getNextSID();
+
+ /**
+ * Return whether the given stream ID is available, i.e. matching one of the stream IDs in {@link #getVStreams()}, {@link #getAStreams()} or {@link #getSStreams()}.
+ */
+ public boolean hasStreamID(int id);
+
+ /**
+ * Return the matching language of given stream ID, matching one of the stream IDs in {@link #getVStreams()}, {@link #getAStreams()} or {@link #getSStreams()}.
+ * <p>
+ * If the stream ID is not available, {@code "und"} is returned
+ * </p>
+ */
+ public String getLang(int id);
+
/**
* @return the current decoded frame count since {@link #resume()} and {@link #seek(int)}
* as increased by {@link #getNextTexture(GL)} or the decoding thread.
@@ -700,7 +774,7 @@ public interface GLMediaPlayer extends TextureSequence {
public TextureSequence.TextureFrame getNextTexture(GL gl) throws IllegalStateException;
/**
- * Return the stream location, as set by {@link #playStream(Uri, int, int, int)}.
+ * Return the stream location, as set by {@link #playStream(Uri, int, int, int, int)}.
* @since 2.3.0
*/
public Uri getUri();
@@ -788,7 +862,7 @@ public interface GLMediaPlayer extends TextureSequence {
/** Returns the height of the video. */
public int getHeight();
- /** Returns {@link Chapter} meta-data from stream, available after {@link State#Initialized} is reached after issuing {@link #playStream(Uri, int, int, int)}. */
+ /** Returns {@link Chapter} meta-data from stream, available after {@link State#Initialized} is reached after issuing {@link #playStream(Uri, int, int, int, int)}. */
public Chapter[] getChapters();
/**
* Returns {@link Chapter} covering given time position in milliseconds or null if none covers given time
@@ -796,6 +870,11 @@ public interface GLMediaPlayer extends TextureSequence {
*/
public Chapter getChapter(int msec);
+ /**
+ * Returns the stream language metadata for the given stream id if available, otherwise {@code null}.
+ */
+ public String getStreamLang(int id);
+
/** Returns a string representation of this player, incl. state and audio/video details. */
@Override
public String toString();
diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
index 5fc406a72..3900e4089 100644
--- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
+++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
@@ -281,7 +281,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
}
@Override
- protected final void initStreamImpl(final int vid, final int aid) throws IOException {
+ protected final void initStreamImpl(final int vid, final int aid, int sid) throws IOException {
if( null == getUri() ) {
return;
}
@@ -323,12 +323,23 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
} catch (final IOException ioe) {
throw new IOException("MediaPlayer failed to process stream <"+getUri().toString()+">: "+ioe.getMessage(), ioe);
}
- final int r_aid = GLMediaPlayer.STREAM_ID_NONE == aid ? GLMediaPlayer.STREAM_ID_NONE : 1 /* fake */;
+ final int r_aid;
+ final int[] r_aids;
+ final String[] r_alangs;
+ if( GLMediaPlayer.STREAM_ID_NONE == aid ) {
+ r_aid = GLMediaPlayer.STREAM_ID_NONE;
+ r_aids = new int[0];
+ r_alangs = new String[0];
+ } else {
+ r_aid = 1; // fake
+ r_aids = new int[] { 1 }; // fake
+ r_alangs = new String[] { "n/a" };
+ }
final String icodec = "android";
- updateAttributes(0 /* fake */, r_aid,
- mp.getVideoWidth(), mp.getVideoHeight(), 0,
- 0, 0, 0f,
- 0, 0, mp.getDuration(), icodec, icodec);
+ updateAttributes(new int[] { 0 }, new String[] { "und" }, 0 /* fake */,
+ r_aids, r_alangs, r_aid,
+ new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
+ mp.getVideoWidth(), mp.getVideoHeight(), 0, 0, 0, 0f, 0, 0, mp.getDuration(), icodec, icodec);
/**
mp.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
@Override
@@ -361,11 +372,10 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
}
}
}
- updateAttributes(0 /* fake */, GLMediaPlayer.STREAM_ID_NONE,
- size.width, size.height,
- 0, 0, 0,
- fpsRange[1]/1000f,
- 0, 0, 0, icodec, icodec);
+ updateAttributes(new int[]{0}, new String[] { "und" }, 0 /* fake */,
+ new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
+ new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
+ size.width, size.height, 0, 0, 0, fpsRange[1]/1000f, 0, 0, 0, icodec, icodec);
}
}
private static String camSz2Str(final Camera.Size csize) {
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index 0525f7f17..01a385b0d 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -32,6 +32,7 @@ import java.net.URISyntaxException;
import java.net.URLConnection;
import java.nio.ByteBuffer;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
@@ -120,7 +121,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
/**
* In case {@link #streamLoc} is a {@link GLMediaPlayer#CameraInputScheme},
* {@link #cameraPath} holds the URI's path portion
- * as parsed in {@link #playStream(Uri, int, int, int)}.
+ * as parsed in {@link #playStream(Uri, int, int, int, int)}.
* @see #cameraProps
*/
protected Uri.Encoded cameraPath = null;
@@ -130,33 +131,47 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
private volatile float playSpeed = 1.0f;
private float audioVolume = 1.0f;
- /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
+ private int[] v_streams = new int[0];
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
+ private String[] v_langs = new String[0];
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private int vid = GLMediaPlayer.STREAM_ID_NONE;
- /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
+ private int[] a_streams = new int[0];
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
+ private String[] a_langs = new String[0];
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private int aid = GLMediaPlayer.STREAM_ID_NONE;
- /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
+ private int[] s_streams = new int[0];
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
+ private String[] s_langs = new String[0];
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
+ private int sid = GLMediaPlayer.STREAM_ID_NONE;
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private int width = 0;
- /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private int height = 0;
- /** Video avg. fps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** Video avg. fps. Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private float fps = 0;
- /** Video avg. frame duration in ms. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** Video avg. frame duration in ms. Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private float frame_duration = 0f;
- /** Stream bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** Stream bps. Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private int bps_stream = 0;
- /** Video bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** Video bps. Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private int bps_video = 0;
- /** Audio bps. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** Audio bps. Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private int bps_audio = 0;
- /** In frames. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** In frames. Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private int videoFrames = 0;
- /** In frames. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** In frames. Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private int audioFrames = 0;
- /** In ms. Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** In ms. Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private int duration = 0;
- /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private String acodec = unknown;
- /** Shall be set by the {@link #initStreamImpl(int, int)} method implementation. */
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private String vcodec = unknown;
private volatile int decodedFrameCount = 0;
@@ -175,7 +190,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
private static final int MAX_FRAMELESS_MS_UNTIL_EOS = 5000;
private static final int MAX_FRAMELESS_UNTIL_EOS_DEFAULT = MAX_FRAMELESS_MS_UNTIL_EOS / 30; // default value assuming 30fps
- /** See {@link #getAudioSink()}. Set by implementation if used from within {@link #initStreamImpl(int, int)}! */
+ /** See {@link #getAudioSink()}. Set by implementation if used from within {@link #initStreamImpl(int, int, int)}! */
protected AudioSink audioSink = null;
protected boolean audioSinkPlaySpeedSet = false;
@@ -421,6 +436,13 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
streamWorker.resume();
}
changeState(new GLMediaPlayer.EventMask(), State.Playing);
+ {
+ final int _pending_seek = pending_seek;
+ pending_seek = -1;
+ if( 0 <= _pending_seek ) {
+ this.seek(_pending_seek);
+ }
+ }
}
}
if(DEBUG) { logout.println("Play: "+preState+" -> "+state+", "+toString()); }
@@ -531,12 +553,14 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
setState( _state );
break;
default:
+ pending_seek = msec;
pts1 = 0;
}
if(DEBUG) { logout.println("Seek("+msec+"): "+preState+" -> "+state+", "+toString()); }
return pts1;
}
}
+ protected int pending_seek = -1;
protected abstract int seekImpl(int msec);
@Override
@@ -650,7 +674,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
@Override
- public final void playStream(final Uri streamLoc, final int vid, final int aid, final int reqTextureCount) throws IllegalStateException, IllegalArgumentException {
+ public final void playStream(final Uri streamLoc, final int vid, final int aid, final int sid, final int reqTextureCount) throws IllegalStateException, IllegalArgumentException {
synchronized( stateLock ) {
if(State.Uninitialized != state) {
throw new IllegalStateException("Instance not in state unintialized: "+this);
@@ -692,12 +716,13 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
this.vid = vid;
this.aid = aid;
+ this.sid = sid;
new InterruptSource.Thread() {
@Override
public void run() {
try {
// StreamWorker may be used, see API-doc of StreamWorker
- initStreamImpl(vid, aid);
+ initStreamImpl(vid, aid, sid);
} catch (final Throwable t) {
streamErr = new StreamException(t.getClass().getSimpleName()+" while initializing: "+GLMediaPlayerImpl.this.toString(), t);
changeState(new GLMediaPlayer.EventMask(GLMediaPlayer.EventMask.Bit.Error), GLMediaPlayer.State.Uninitialized);
@@ -708,6 +733,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
/**
* Implementation shall set the following set of data here
+ * @param sid TODO
* @see #vid
* @see #aid
* @see #width
@@ -719,7 +745,18 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
* @see #acodec
* @see #vcodec
*/
- protected abstract void initStreamImpl(int vid, int aid) throws Exception;
+ protected abstract void initStreamImpl(int vid, int aid, int sid) throws Exception;
+
+ @Override
+ public void switchStream(final int vid, final int aid, final int sid) throws IllegalStateException, IllegalArgumentException {
+ System.err.println("XXX VID "+getVID()+" -> "+vid);
+ System.err.println("XXX AID "+getAID()+" -> "+aid);
+ System.err.println("XXX SID "+getSID()+" -> "+sid);
+ final int v_pts = getVideoPTS();
+ stop();
+ seek(v_pts);
+ playStream(getUri(), vid, aid, sid, getTextureCount());
+ }
@Override
public final StreamException getStreamException() {
@@ -916,10 +953,11 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
final float _fps = 24f;
final int _duration = 10*60*1000; // msec
final int _totalFrames = (int) ( (_duration/1000)*_fps );
- updateAttributes(GLMediaPlayer.STREAM_ID_NONE, GLMediaPlayer.STREAM_ID_NONE,
- TestTexture.singleton.getWidth(), TestTexture.singleton.getHeight(), 0,
- 0, 0, _fps,
- _totalFrames, 0, _duration, "png-static", null);
+ updateAttributes(new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
+ new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE, // audio
+ new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE, // subs
+ TestTexture.singleton.getWidth(),
+ TestTexture.singleton.getHeight(), 0, 0, 0, _fps, _totalFrames, 0, _duration, "png-static", null);
}
protected abstract TextureFrame createTexImage(GL gl, int texName);
@@ -1331,7 +1369,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
* </p>
* <p>
* Implementations using an {@link AudioSink} shall write it's instance to {@link #audioSink}
- * from within their {@link #initStreamImpl(int, int)} implementation.
+ * from within their {@link #initStreamImpl(int, int, int)} implementation.
* </p>
*/
@Override
@@ -1393,8 +1431,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
/**
- * After {@link GLMediaPlayerImpl#initStreamImpl(int, int) initStreamImpl(..)} is completed via
- * {@link GLMediaPlayerImpl#updateAttributes(int, int, int, int, int, int, int, float, int, int, int, String, String) updateAttributes(..)},
+ * After {@link GLMediaPlayerImpl#initStreamImpl(int, int, int) initStreamImpl(..)} is completed via
+ * {@link GLMediaPlayerImpl#updateAttributes(int, int, int, int, int, int, int, int, int, float, int, int, int, String, String) updateAttributes(..)},
* the latter decides whether StreamWorker is being used.
*/
private final class StreamWorker {
@@ -1435,6 +1473,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
System.err.println("ZZZ: singleDevice "+singleDevice.getClass()+", "+singleDevice);
}
device.close();
+ singleOwner = null;
+ singleDevice = null;
}
}
}
@@ -1651,8 +1691,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
/**
- * Called initially by {@link #initStreamImpl(int, int)}, which
- * is called off-thread by {@link #playStream(Uri, int, int, int)}.
+ * Called initially by {@link #initStreamImpl(int, int, int)}, which
+ * is called off-thread by {@link #playStream(Uri, int, int, int, int)}.
* <p>
* The latter catches an occurring exception and set the state delivers the error events.
* </p>
@@ -1660,9 +1700,11 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
* Further calls are issues off-thread by the decoder implementation.
* </p>
*/
- protected final void updateAttributes(int vid, final int aid, final int width, final int height, final int bps_stream,
- final int bps_video, final int bps_audio, final float fps,
- final int videoFrames, final int audioFrames, final int duration, final String vcodec, final String acodec) {
+ protected final void updateAttributes(final int[] v_streams, final String[] v_langs, int vid,
+ final int[] a_streams, final String[] a_langs, int aid,
+ final int[] s_streams, final String[] s_langs, int sid,
+ final int width, final int height, final int bps_stream,
+ final int bps_video, final int bps_audio, final float fps, final int videoFrames, final int audioFrames, final int duration, final String vcodec, final String acodec) {
final GLMediaPlayer.EventMask eventMask = new GLMediaPlayer.EventMask();
final boolean wasUninitialized = state == State.Uninitialized;
@@ -1670,20 +1712,37 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
eventMask.setBit(GLMediaPlayer.EventMask.Bit.Init);
setState( State.Initialized );
}
- if( STREAM_ID_AUTO == vid ) {
+ this.v_streams = v_streams;
+ this.v_langs = v_langs;
+ this.a_streams = a_streams;
+ this.a_langs = a_langs;
+ this.s_streams = s_streams;
+ this.s_langs = s_langs;
+
+ if( STREAM_ID_AUTO == vid || 0 == v_streams.length ) {
vid = STREAM_ID_NONE;
}
if( this.vid != vid ) {
eventMask.setBit(GLMediaPlayer.EventMask.Bit.VID);
this.vid = vid;
}
- if( STREAM_ID_AUTO == vid ) {
- vid = STREAM_ID_NONE;
+
+ if( STREAM_ID_AUTO == aid || 0 == a_streams.length ) {
+ aid = STREAM_ID_NONE;
}
if( this.aid != aid ) {
eventMask.setBit(GLMediaPlayer.EventMask.Bit.AID);
this.aid = aid;
}
+
+ if( STREAM_ID_AUTO == sid || 0 == s_streams.length ) {
+ sid = STREAM_ID_NONE;
+ }
+ if( this.sid != sid ) {
+ eventMask.setBit(GLMediaPlayer.EventMask.Bit.SID);
+ this.sid = sid;
+ }
+
if( this.width != width || this.height != height ) {
eventMask.setBit(GLMediaPlayer.EventMask.Bit.Size);
this.width = width;
@@ -1755,13 +1814,95 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
@Override
public final Uri getUri() { return streamLoc; }
+ private static int getNextImpl(final int[] array, final int current, final boolean use_no_stream) {
+ final int alt = array.length > 0 ? array[0] : STREAM_ID_NONE;
+ if( STREAM_ID_NONE == current ) {
+ return alt;
+ }
+ if( array.length > 1 ) {
+ for(int i=0; i<array.length; ++i) {
+ if( current == array[i] ) {
+ if( i+1 < array.length ) {
+ return array[i+1];
+ } else {
+ return use_no_stream ? STREAM_ID_NONE : array[0];
+ }
+ }
+ }
+ }
+ return alt;
+ }
+
+ @Override
+ public final int[] getVStreams() { return v_streams; }
+
+ @Override
+ public String[] getVLangs() { return v_langs; }
+
@Override
public final int getVID() { return vid; }
@Override
+ public int getNextVID() {
+ return getNextImpl(v_streams, vid, false);
+ }
+
+ @Override
+ public final int[] getAStreams() { return a_streams; }
+
+ @Override
+ public String[] getALangs() { return a_langs; }
+
+ @Override
public final int getAID() { return aid; }
@Override
+ public final int getNextAID() {
+ return getNextImpl(a_streams, aid, false);
+ }
+ @Override
+ public final int[] getSStreams() { return s_streams; }
+
+ @Override
+ public String[] getSLangs() { return s_langs; }
+
+ @Override
+ public final int getSID() { return sid; }
+
+ @Override
+ public int getNextSID() {
+ return getNextImpl(s_streams, sid, true);
+ }
+
+ @Override
+ public final boolean hasStreamID(final int id) {
+ for(int i = v_streams.length-1; i>=0; --i) {
+ if( v_streams[i] == id ) { return true; }
+ }
+ for(int i = a_streams.length-1; i>=0; --i) {
+ if( a_streams[i] == id ) { return true; }
+ }
+ for(int i = s_streams.length-1; i>=0; --i) {
+ if( s_streams[i] == id ) { return true; }
+ }
+ return false;
+ }
+
+ @Override
+ public String getLang(final int id) {
+ for(int i = v_streams.length-1; i>=0; --i) {
+ if( v_streams[i] == id ) { return v_langs[i]; }
+ }
+ for(int i = a_streams.length-1; i>=0; --i) {
+ if( a_streams[i] == id ) { return a_langs[i]; }
+ }
+ for(int i = s_streams.length-1; i>=0; --i) {
+ if( s_streams[i] == id ) { return s_langs[i]; }
+ }
+ return "undef";
+ }
+
+ @Override
public final String getVideoCodec() { return vcodec; }
@Override
@@ -1814,6 +1955,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
@Override
+ public String getStreamLang(final int id) { return "n/a"; }
+
+ @Override
public final String toString() {
final String tt = PTS.millisToTimeStr(getDuration());
final String loc = ( null != streamLoc ) ? streamLoc.toString() : "<undefined stream>" ;
@@ -1824,8 +1968,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
return getClass().getSimpleName()+"["+state+", vSCR "+video_scr_ms+", "+getChapters().length+" chapters, duration "+tt+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+", z "+nullFrameCount+" / "+maxNullFrameCountUntilEOS+"], "+
"speed "+playSpeed+", "+bps_stream+" bps, hasSW "+(null!=streamWorker)+
", Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+", tagt "+toHexString(textureTarget)+", ifmt "+toHexString(textureInternalFormat)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType)+"], "+
- "Video[id "+vid+", <"+vcodec+">, "+width+"x"+height+", glOrient "+isInGLOrientation+", "+fps+" fps, "+frame_duration+" fdur, "+bps_video+" bps], "+
- "Audio[id "+aid+", <"+acodec+">, "+bps_audio+" bps, "+audioFrames+" frames], uri "+loc+camPath+"]";
+ "Video[id "+vid+"/"+Arrays.toString(v_streams)+"/"+Arrays.toString(v_langs)+", <"+vcodec+">, "+width+"x"+height+", glOrient "+isInGLOrientation+", "+fps+" fps, "+frame_duration+" fdur, "+bps_video+" bps], "+
+ "Audio[id "+aid+"/"+Arrays.toString(a_streams)+"/"+Arrays.toString(a_langs)+", <"+acodec+">, "+bps_audio+" bps, "+audioFrames+" frames], "+
+ "Subs[id "+sid+"/"+Arrays.toString(s_streams)+"/"+Arrays.toString(s_langs)+"], uri "+loc+camPath+"]";
}
@Override
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
index 9b2b3869c..f88894ce4 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
@@ -38,10 +38,8 @@ import com.jogamp.opengl.GLProfile;
import com.jogamp.common.av.PTS;
import com.jogamp.common.nio.Buffers;
import com.jogamp.common.os.Clock;
-import com.jogamp.common.os.Platform;
import com.jogamp.common.util.IOUtil;
import com.jogamp.opengl.util.av.GLMediaPlayer;
-import com.jogamp.opengl.util.av.GLMediaPlayer.State;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureData;
import com.jogamp.opengl.util.texture.TextureIO;
@@ -146,15 +144,15 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
}
@Override
- protected final void initStreamImpl(final int vid, final int aid) throws IOException {
+ protected final void initStreamImpl(final int vid, final int aid, int sid) throws IOException {
texData = createTestTextureData();
final float _fps = 24f;
final int _duration = 10*60*1000; // msec
final int _totalFrames = (int) ( (_duration/1000)*_fps );
- updateAttributes(0 /* fake */, GLMediaPlayer.STREAM_ID_NONE,
- texData.getWidth(), texData.getHeight(), 0,
- 0, 0, _fps,
- _totalFrames, 0, _duration, "png-static", null);
+ updateAttributes(new int[] { 0 }, new String[] { "und" }, 0 /* fake */,
+ new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
+ new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
+ texData.getWidth(), texData.getHeight(), 0, 0, 0, _fps, _totalFrames, 0, _duration, "png-static", null);
}
@Override
protected final void initGLImpl(final GL gl) throws IOException, GLException {
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
index d62b14ffb..464b8c29d 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
@@ -343,7 +343,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
public static final String dev_video_linux = "/dev/video";
@Override
- protected final void initStreamImpl(final int vid, final int aid) throws IOException {
+ protected final void initStreamImpl(final int vid, final int aid, final int sid) throws IOException {
synchronized( moviePtrLock ) {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
@@ -413,7 +413,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
System.err.println("initStream: p3 stream "+getUri()+" -> "+streamLocS+" -> "+resStreamLocS);
System.err.println("initStream: p3 vid "+vid+", sizes "+sizes+", reqVideo "+rw+"x"+rh+"@"+rr+", aid "+aid+", aMaxChannelCount "+aMaxChannelCount+", aPrefSampleRate "+aPrefSampleRate);
}
- natives.setStream0(moviePtr, resStreamLocS, isCameraInput, vid, sizes, rw, rh, rr, aid, aMaxChannelCount, aPrefSampleRate);
+ natives.setStream0(moviePtr, resStreamLocS, isCameraInput, vid, sizes, rw, rh, rr, aid, aMaxChannelCount, aPrefSampleRate, sid);
}
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java
index 03b61b9ef..ed34d6f0c 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java
@@ -60,10 +60,12 @@ import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame;
* @param aid
* @param aPrefSampleRate
* @param aPrefChannelCount
+ * @param sid subtitle id
*/
abstract void setStream0(long moviePtr, String url, boolean isCameraInput,
- int vid, String sizes, int vWidth, int vHeight,
- int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate);
+ int vid, String sizes, int vWidth, int vHeight, int vRate,
+ int aid, int aMaxChannelCount, int aPrefSampleRate,
+ int sid);
abstract void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0400Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0400Natives.java
index bb60cbcc9..ceb4b904f 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0400Natives.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0400Natives.java
@@ -53,7 +53,7 @@ class FFMPEGv0400Natives extends FFMPEGNatives {
native void destroyInstance0(long moviePtr);
@Override
- native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate);
+ native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate, int sid);
@Override
native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0500Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0500Natives.java
index 1ab4ee50a..92c8d36bd 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0500Natives.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0500Natives.java
@@ -53,7 +53,7 @@ class FFMPEGv0500Natives extends FFMPEGNatives {
native void destroyInstance0(long moviePtr);
@Override
- native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate);
+ native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate, int sid);
@Override
native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0600Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0600Natives.java
index bf68002ff..8f33413ac 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0600Natives.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0600Natives.java
@@ -53,7 +53,7 @@ class FFMPEGv0600Natives extends FFMPEGNatives {
native void destroyInstance0(long moviePtr);
@Override
- native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate);
+ native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate, int sid);
@Override
native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
index f37083d8a..24b1ad4a9 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
@@ -105,7 +105,7 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected void initStreamImpl(final int vid, final int aid) throws IOException {
+ protected void initStreamImpl(final int vid, final int aid, int sid) throws IOException {
if(0==moviePtr) {
throw new GLException("OMX native instance null");
}
diff --git a/src/jogl/native/libav/ffmpeg_impl_template.c b/src/jogl/native/libav/ffmpeg_impl_template.c
index 6d09a9d60..b6569751a 100644
--- a/src/jogl/native/libav/ffmpeg_impl_template.c
+++ b/src/jogl/native/libav/ffmpeg_impl_template.c
@@ -182,6 +182,8 @@ static SWR_CONVERT sp_swr_convert;
static SWR_GET_OUT_SAMPLES sp_swr_get_out_samples;
// count: +6 = 61
+static const char * const ClazzNameString = "java/lang/String";
+
// We use JNI Monitor Locking, since this removes the need
// to statically link-in pthreads on window ..
// #define USE_PTHREAD_LOCKING 1
@@ -339,6 +341,27 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0)
return JNI_TRUE;
}
+static inline const char* meta_get_value(AVDictionary *tags, const char* key)
+{
+ // SECTION_ID_CHAPTER_TAGS
+ if (!tags) {
+ return NULL;
+ }
+ const AVDictionaryEntry *entry = NULL;
+ if ((entry = sp_av_dict_get(tags, key, entry, AV_DICT_IGNORE_SUFFIX))) {
+ return entry->value;
+ }
+ return NULL;
+}
+static inline const char* meta_get_chapter_title(AVChapter *chapter)
+{
+ return meta_get_value(chapter->metadata, "title");
+}
+static inline const char* meta_get_language(AVDictionary *tags)
+{
+ return meta_get_value(tags, "language");
+}
+
static int _isAudioFormatSupported(JNIEnv *env, jobject ffmpegMediaPlayer, enum AVSampleFormat aSampleFmt, int32_t aSampleRate, int32_t aChannels) {
int res = JNI_TRUE == (*env)->CallBooleanMethod(env, ffmpegMediaPlayer, ffmpeg_jni_mid_isAudioFormatSupported, aSampleFmt, aSampleRate, aChannels);
JoglCommon_ExceptionCheck1_throwNewRuntimeException(env, "FFmpeg: Exception occured at isAudioFormatSupported(..)");
@@ -346,6 +369,69 @@ static int _isAudioFormatSupported(JNIEnv *env, jobject ffmpegMediaPlayer, enum
}
static void _updateJavaAttributes(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) {
if(NULL!=env) {
+ jclass strclazz = (*env)->FindClass(env, ClazzNameString);
+ if( strclazz == NULL ) {
+ JoglCommon_throwNewRuntimeException(env, "FFmpeg: No Java String Class");
+ return;
+ }
+ jintArray a_streams = (*env)->NewIntArray(env, pAV->a_stream_count);
+ if (a_streams == NULL) {
+ JoglCommon_throwNewRuntimeException(env, "FFmpeg: Out of memory (a_streams %u)", pAV->a_stream_count);
+ return;
+ }
+ jintArray v_streams = (*env)->NewIntArray(env, pAV->v_stream_count);
+ if (v_streams == NULL) {
+ JoglCommon_throwNewRuntimeException(env, "FFmpeg: Out of memory (v_streams %u)", pAV->v_stream_count);
+ return;
+ }
+ jintArray s_streams = (*env)->NewIntArray(env, pAV->s_stream_count);
+ if (s_streams == NULL) {
+ JoglCommon_throwNewRuntimeException(env, "FFmpeg: Out of memory (s_streams %u)", pAV->s_stream_count);
+ return;
+ }
+ jobjectArray a_langs = (*env)->NewObjectArray(env, pAV->a_stream_count, strclazz, NULL);
+ if (a_langs == NULL) {
+ JoglCommon_throwNewRuntimeException(env, "FFmpeg: Out of memory (a_langs %u)", pAV->a_stream_count);
+ return;
+ }
+ jobjectArray v_langs = (*env)->NewObjectArray(env, pAV->v_stream_count, strclazz, NULL);
+ if (v_langs == NULL) {
+ JoglCommon_throwNewRuntimeException(env, "FFmpeg: Out of memory (v_langs %u)", pAV->v_stream_count);
+ return;
+ }
+ jobjectArray s_langs = (*env)->NewObjectArray(env, pAV->s_stream_count, strclazz, NULL);
+ if (s_langs == NULL) {
+ JoglCommon_throwNewRuntimeException(env, "FFmpeg: Out of memory (s_langs %u)", pAV->s_stream_count);
+ return;
+ }
+ if( 0 < pAV->a_stream_count ) {
+ (*env)->SetIntArrayRegion(env, a_streams, 0, pAV->a_stream_count, pAV->a_streams);
+ for(int i=0; i<pAV->a_stream_count; ++i) {
+ AVStream *st = pAV->pFormatCtx->streams[pAV->a_streams[i]];
+ const char* lang0 = meta_get_language(st->metadata);
+ const char* lang1 = NULL != lang0 ? lang0 : "und";
+ (*env)->SetObjectArrayElement(env, a_langs, i, (*env)->NewStringUTF(env, lang1));
+ }
+ }
+ if( 0 < pAV->v_stream_count ) {
+ (*env)->SetIntArrayRegion(env, v_streams, 0, pAV->v_stream_count, pAV->v_streams);
+ for(int i=0; i<pAV->v_stream_count; ++i) {
+ AVStream *st = pAV->pFormatCtx->streams[pAV->v_streams[i]];
+ const char* lang0 = meta_get_language(st->metadata);
+ const char* lang1 = NULL != lang0 ? lang0 : "und";
+ (*env)->SetObjectArrayElement(env, v_langs, i, (*env)->NewStringUTF(env, lang1));
+ }
+ }
+ if( 0 < pAV->s_stream_count ) {
+ (*env)->SetIntArrayRegion(env, s_streams, 0, pAV->s_stream_count, pAV->s_streams);
+ for(int i=0; i<pAV->s_stream_count; ++i) {
+ AVStream *st = pAV->pFormatCtx->streams[pAV->s_streams[i]];
+ const char* lang0 = meta_get_language(st->metadata);
+ const char* lang1 = NULL != lang0 ? lang0 : "und";
+ (*env)->SetObjectArrayElement(env, s_langs, i, (*env)->NewStringUTF(env, lang1));
+ }
+ }
+
(*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_setupFFAttributes,
pAV->vid, pAV->vPixFmt, pAV->vBufferPlanes,
pAV->vBitsPerPixel, pAV->vBytesPerPixelPerPlane,
@@ -353,8 +439,11 @@ static void _updateJavaAttributes(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) {
pAV->vWidth, pAV->vHeight,
pAV->aid, pAV->aSampleFmtOut, pAV->aSampleRateOut, pAV->aChannelsOut, pAV->aFrameSize);
JoglCommon_ExceptionCheck1_throwNewRuntimeException(env, "FFmpeg: Exception occured at setupFFAttributes(..)");
+
(*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_updateAttributes,
- pAV->vid, pAV->aid,
+ v_streams, v_langs, pAV->vid,
+ a_streams, a_langs, pAV->aid,
+ s_streams, s_langs, pAV->sid,
pAV->vWidth, pAV->vHeight,
pAV->bps_stream, pAV->bps_video, pAV->bps_audio,
pAV->fps, pAV->frames_video, pAV->frames_audio, pAV->duration,
@@ -547,6 +636,15 @@ JNIEXPORT jlong JNICALL FF_FUNC(createInstance0)
pAV->verbose = verbose;
pAV->vid=AV_STREAM_ID_AUTO;
pAV->aid=AV_STREAM_ID_AUTO;
+ pAV->sid=AV_STREAM_ID_AUTO;
+ pAV->a_stream_count=0;
+ pAV->v_stream_count=0;
+ pAV->s_stream_count=0;
+ for(int i=0; i<MAX_STREAM_COUNT; ++i) {
+ pAV->a_streams[i]=AV_STREAM_ID_NONE;
+ pAV->v_streams[i]=AV_STREAM_ID_NONE;
+ pAV->s_streams[i]=AV_STREAM_ID_NONE;
+ }
if(pAV->verbose) {
fprintf(stderr, "Info: Has swresample %d, device %d\n",
@@ -683,7 +781,8 @@ static int64_t getFrameNum(const AVCodecContext *avctx) {
JNIEXPORT void JNICALL FF_FUNC(setStream0)
(JNIEnv *env, jobject instance, jlong ptr, jstring jURL, jboolean jIsCameraInput,
jint vid, jstring jSizeS, jint vWidth, jint vHeight, jint vRate,
- jint aid, jint aMaxChannelCount, jint aPrefSampleRate)
+ jint aid, jint aMaxChannelCount, jint aPrefSampleRate,
+ jint sid)
{
char cameraName[256];
int res, i;
@@ -819,21 +918,47 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
// Find the first audio and video stream, or the one matching vid
// FIXME: Libav Binary compatibility! JAU01
- for(i=0; ( AV_STREAM_ID_AUTO==pAV->aid || AV_STREAM_ID_AUTO==pAV->vid ) && i<pAV->pFormatCtx->nb_streams; i++) {
+ pAV->a_stream_count=0;
+ pAV->v_stream_count=0;
+ pAV->s_stream_count=0;
+ for(int i=0; i<MAX_STREAM_COUNT; ++i) {
+ pAV->a_streams[i]=AV_STREAM_ID_NONE;
+ pAV->v_streams[i]=AV_STREAM_ID_NONE;
+ pAV->s_streams[i]=AV_STREAM_ID_NONE;
+ }
+ for(i=0; i<pAV->pFormatCtx->nb_streams; i++) {
AVStream *st = pAV->pFormatCtx->streams[i];
if(pAV->verbose) {
- fprintf(stderr, "Stream: %d: is-video %d, is-audio %d\n", i, (AVMEDIA_TYPE_VIDEO == st->codecpar->codec_type), AVMEDIA_TYPE_AUDIO == st->codecpar->codec_type);
+ const char* lang0 = meta_get_language(st->metadata);
+ const char* lang1 = NULL != lang0 ? lang0 : "n/a";
+ fprintf(stderr, "Stream: %d: is-video %d, is-audio %d, is-sub %d, lang %s\n", i,
+ AVMEDIA_TYPE_VIDEO == st->codecpar->codec_type, AVMEDIA_TYPE_AUDIO == st->codecpar->codec_type,
+ AVMEDIA_TYPE_SUBTITLE == st->codecpar->codec_type, lang1);
}
if(AVMEDIA_TYPE_VIDEO == st->codecpar->codec_type) {
+ if( pAV->v_stream_count < MAX_STREAM_COUNT-1 ) {
+ pAV->v_streams[pAV->v_stream_count++] = i;
+ }
if(AV_STREAM_ID_AUTO==pAV->vid && (AV_STREAM_ID_AUTO==vid || vid == i) ) {
pAV->pVStream = st;
pAV->vid=i;
}
} else if(AVMEDIA_TYPE_AUDIO == st->codecpar->codec_type) {
+ if( pAV->a_stream_count < MAX_STREAM_COUNT-1 ) {
+ pAV->a_streams[pAV->a_stream_count++] = i;
+ }
if(AV_STREAM_ID_AUTO==pAV->aid && (AV_STREAM_ID_AUTO==aid || aid == i) ) {
pAV->pAStream = st;
pAV->aid=i;
}
+ } else if(AVMEDIA_TYPE_SUBTITLE == st->codecpar->codec_type) {
+ if( pAV->s_stream_count < MAX_STREAM_COUNT-1 ) {
+ pAV->s_streams[pAV->s_stream_count++] = i;
+ }
+ if(AV_STREAM_ID_AUTO==pAV->sid && (AV_STREAM_ID_AUTO==sid || sid == i) ) {
+ pAV->pSStream = st;
+ pAV->sid=i;
+ }
}
}
if( AV_STREAM_ID_AUTO == pAV->aid ) {
@@ -844,7 +969,7 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
}
if( pAV->verbose ) {
- fprintf(stderr, "Found vid %d, aid %d\n", pAV->vid, pAV->aid);
+ fprintf(stderr, "Found vid %d, aid %d, sid %d\n", pAV->vid, pAV->aid, pAV->sid);
}
if(0<=pAV->aid) {
@@ -1709,22 +1834,6 @@ JNIEXPORT jint JNICALL FF_FUNC(getAudioPTS0)
return pAV->aPTS;
}
-static inline const char* meta_get_value(AVDictionary *tags, const char* key)
-{
- // SECTION_ID_CHAPTER_TAGS
- if (!tags) {
- return NULL;
- }
- const AVDictionaryEntry *entry = NULL;
- if ((entry = sp_av_dict_get(tags, key, entry, AV_DICT_IGNORE_SUFFIX))) {
- return entry->value;
- }
- return NULL;
-}
-static inline const char* meta_get_chapter_title(AVChapter *chapter)
-{
- return meta_get_value(chapter->metadata, "title");
-}
JNIEXPORT jint JNICALL FF_FUNC(getChapterCount0)
(JNIEnv *env, jobject instance, jlong ptr)
{
diff --git a/src/jogl/native/libav/ffmpeg_static.c b/src/jogl/native/libav/ffmpeg_static.c
index c8af59540..b1ca77e1a 100644
--- a/src/jogl/native/libav/ffmpeg_static.c
+++ b/src/jogl/native/libav/ffmpeg_static.c
@@ -65,7 +65,8 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGStaticNatives_i
}
ffmpeg_jni_mid_pushSound = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "pushSound", "(Ljava/nio/ByteBuffer;II)V");
- ffmpeg_jni_mid_updateAttributes = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes", "(IIIIIIIFIIILjava/lang/String;Ljava/lang/String;)V");
+ ffmpeg_jni_mid_updateAttributes = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes",
+ "([I[Ljava/lang/String;I[I[Ljava/lang/String;I[I[Ljava/lang/String;IIIIIIFIIILjava/lang/String;Ljava/lang/String;)V");
ffmpeg_jni_mid_setIsGLOriented = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "setIsGLOriented", "(Z)V");
ffmpeg_jni_mid_setupFFAttributes = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "setupFFAttributes", "(IIIIIIIIIIIIIII)V");
ffmpeg_jni_mid_isAudioFormatSupported = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "isAudioFormatSupported", "(III)Z");
diff --git a/src/jogl/native/libav/ffmpeg_tool.h b/src/jogl/native/libav/ffmpeg_tool.h
index 02a297f73..5e23ca882 100644
--- a/src/jogl/native/libav/ffmpeg_tool.h
+++ b/src/jogl/native/libav/ffmpeg_tool.h
@@ -134,6 +134,8 @@ typedef struct {
int64_t dtsLast; // DTS of the last frame
} PTSStats;
+#define MAX_STREAM_COUNT 64
+
typedef struct {
jobject ffmpegMediaPlayer;
int32_t verbose;
@@ -151,6 +153,8 @@ typedef struct {
AVPacket* packet;
AVFormatContext* pFormatCtx;
+ uint32_t v_stream_count;
+ int32_t v_streams[MAX_STREAM_COUNT];
int32_t vid;
AVStream* pVStream;
AVCodecParameters* pVCodecPar;
@@ -168,6 +172,8 @@ typedef struct {
int32_t vHeight;
jboolean vFlipped; // false: !GL-Orientation, true: GL-Orientation
+ uint32_t a_stream_count;
+ int32_t a_streams[MAX_STREAM_COUNT];
int32_t aid;
AVStream* pAStream;
AVCodecParameters* pACodecPar;
@@ -190,6 +196,11 @@ typedef struct {
int32_t aPTS; // msec - overall last audio PTS
PTSStats aPTSStats;
+ uint32_t s_stream_count;
+ int32_t s_streams[MAX_STREAM_COUNT];
+ int32_t sid;
+ AVStream* pSStream;
+
float fps; // frames per seconds
int32_t bps_stream; // bits per seconds
int32_t bps_video; // bits per seconds