summaryrefslogtreecommitdiffstats
path: root/src/jogl
diff options
context:
space:
mode:
authorSven Gothel <[email protected]>2013-08-30 17:31:34 +0200
committerSven Gothel <[email protected]>2013-08-30 17:31:34 +0200
commit4cb3763415bb5f82520fd02f56412076f80a84e6 (patch)
tree33f864d513f9c94a3514cb9fab38fd1130ce9d32 /src/jogl
parentad4797e6875d8fc635971ed6d8dd68326c083540 (diff)
GLMediaPlayer enhancements: State, Camera options, detect and act on orientation change (flipped), API-doc,
- State - Fix state transition (initGL() error) - Camera options - options uses ';' as query separator - don't use 'default' options, driver should know - Detect and act on orientation change (flipped) - ffmpeg impl detects if flipped changes and triggers a SIZE update event. This allows application to react, i.e. re-init GL and use new TextureCoord's. Test: Works well on Windows w/ rawvideo dshow camera driver/codec. - API-doc - TexSeqEventListener/GLMediaEventListener usage / constraints (GL, ..) - State transition fix
Diffstat (limited to 'src/jogl')
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java50
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java23
-rw-r--r--src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java2
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java2
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java72
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java5
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java28
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java1
-rw-r--r--src/jogl/native/libav/ffmpeg_impl_template.c80
-rw-r--r--src/jogl/native/libav/ffmpeg_tool.h1
10 files changed, 187 insertions, 77 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
index 1fb0608fb..5072c410d 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
@@ -34,6 +34,7 @@ import javax.media.opengl.GLException;
import jogamp.opengl.Debug;
+import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureSequence;
import com.jogamp.opengl.util.TimeFrameI;
@@ -81,8 +82,8 @@ import com.jogamp.opengl.util.TimeFrameI;
* <p>
* <table border="1">
* <tr><th>Action</th> <th>{@link State} Before</th> <th>{@link State} After</th> <th>{@link GLMediaEventListener Event}</th></tr>
- * <tr><td>{@link #initStream(URI, int, int, int)}</td> <td>{@link State#Uninitialized Uninitialized}</td> <td>{@link State#Initialized Initialized}<sup><a href="#streamworker">1</a></sup>, {@link State#Uninitialized Uninitialized}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_INIT EVENT_CHANGE_INIT} or ( {@link GLMediaEventListener#EVENT_CHANGE_ERR EVENT_CHANGE_ERR} + {@link GLMediaEventListener#EVENT_CHANGE_UNINIT EVENT_CHANGE_UNINIT} )</td></tr>
- * <tr><td>{@link #initGL(GL)}</td> <td>{@link State#Initialized Initialized}</td> <td>{@link State#Paused Paused}, {@link State#Initialized Initialized}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_PAUSE EVENT_CHANGE_PAUSE}</td></tr>
+ * <tr><td>{@link #initStream(URI, int, int, int)}</td> <td>{@link State#Uninitialized Uninitialized}</td> <td>{@link State#Initialized Initialized}<sup><a href="#streamworker">1</a></sup>, {@link State#Uninitialized Uninitialized}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_INIT EVENT_CHANGE_INIT} or ( {@link GLMediaEventListener#EVENT_CHANGE_ERR EVENT_CHANGE_ERR} + {@link GLMediaEventListener#EVENT_CHANGE_UNINIT EVENT_CHANGE_UNINIT} )</td></tr>
+ * <tr><td>{@link #initGL(GL)}</td> <td>{@link State#Initialized Initialized}</td> <td>{@link State#Paused Paused}, , {@link State#Uninitialized Uninitialized}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_PAUSE EVENT_CHANGE_PAUSE} or ( {@link GLMediaEventListener#EVENT_CHANGE_ERR EVENT_CHANGE_ERR} + {@link GLMediaEventListener#EVENT_CHANGE_UNINIT EVENT_CHANGE_UNINIT} )</td></tr>
* <tr><td>{@link #play()}</td> <td>{@link State#Paused Paused}</td> <td>{@link State#Playing Playing}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_PLAY EVENT_CHANGE_PLAY}</td></tr>
* <tr><td>{@link #pause()}</td> <td>{@link State#Playing Playing}</td> <td>{@link State#Paused Paused}</td> <td>{@link GLMediaEventListener#EVENT_CHANGE_PAUSE EVENT_CHANGE_PAUSE}</td></tr>
* <tr><td>{@link #seek(int)}</td> <td>{@link State#Paused Paused}, {@link State#Playing Playing}</td> <td>{@link State#Paused Paused}, {@link State#Playing Playing}</td> <td>none</td></tr>
@@ -205,12 +206,14 @@ public interface GLMediaPlayer extends TextureSequence {
* ranging from [0..<i>max-number</i>].
* </p>
* <p>
- * The {@link URI#getRawQuery() URI query} is used to pass options to the camera.
+ * The {@link URI#getRawQuery() URI query} is used to pass options to the camera
+ * using <i>;</i> as the separator. The latter avoids trouble w/ escaping.
* </p>
* <pre>
* camera:/<id>
* camera://somewhere/<id>
- * camera://somewhere/<id>?width=640&height=480&rate=15
+ * camera://somewhere/<id>?width=640;height=480;rate=15
+ * camera://somewhere/<id>?size=640x480;rate=15
* </pre>
* <pre>
* URI: [scheme:][//authority][path][?query][#fragment]
@@ -245,6 +248,13 @@ public interface GLMediaPlayer extends TextureSequence {
super(message, cause);
}
}
+
+ /**
+ * {@inheritDoc}
+ * <p>
+ * See {@link TexSeqEventListener} for semantics and usage.
+ * </p>
+ */
public interface GLMediaEventListener extends TexSeqEventListener<GLMediaPlayer> {
/** State changed to {@link State#Initialized}. See <a href="#lifecycle">Lifecycle</a>.*/
@@ -264,7 +274,7 @@ public interface GLMediaPlayer extends TextureSequence {
static final int EVENT_CHANGE_VID = 1<<16;
/** Stream audio id change. */
static final int EVENT_CHANGE_AID = 1<<17;
- /** TextureFrame size change. */
+ /** TextureFrame size or vertical flip change. */
static final int EVENT_CHANGE_SIZE = 1<<18;
/** Stream fps change. */
static final int EVENT_CHANGE_FPS = 1<<19;
@@ -556,18 +566,44 @@ public interface GLMediaPlayer extends TextureSequence {
*/
public float getFramerate();
+ /**
+ * Returns <code>true</code> if the video frame is oriented in
+ * OpenGL's coordinate system, <i>origin at bottom left</i>.
+ * <p>
+ * Otherwise returns <code>false</code>, i.e.
+ * video frame is oriented <i>origin at top left</i>.
+ * </p>
+ * <p>
+ * <code>false</code> is the default assumption for videos,
+ * but user shall not rely on.
+ * </p>
+ * <p>
+ * <code>false</code> GL orientation leads to
+ * {@link Texture#getMustFlipVertically()} == <code>true</code>,
+ * as reflected by all {@link TextureFrame}'s {@link Texture}s
+ * retrieved via {@link #getLastTexture()} or {@link #getNextTexture(GL)}.
+ * </p>
+ */
+ public boolean isGLOriented();
+
+ /** Returns the width of the video. */
public int getWidth();
+ /** Returns the height of the video. */
public int getHeight();
+ /** Returns a string represantation of this player, incl. state and audio/video details. */
public String toString();
+ /** Returns a string represantation of this player's performance values. */
public String getPerfString();
+ /** Adds a {@link GLMediaEventListener} to this player. */
public void addEventListener(GLMediaEventListener l);
+ /** Removes a {@link GLMediaEventListener} to this player. */
public void removeEventListener(GLMediaEventListener l);
- public GLMediaEventListener[] getEventListeners();
-
+ /** Return all {@link GLMediaEventListener} of this player. */
+ public GLMediaEventListener[] getEventListeners();
}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java b/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java
index 8b6cc1bf9..5c6b63535 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java
@@ -28,7 +28,9 @@
package com.jogamp.opengl.util.texture;
import javax.media.opengl.GL;
-
+import javax.media.opengl.GLAutoDrawable;
+import javax.media.opengl.GLRunnable;
+import javax.media.opengl.GLEventListener;
import com.jogamp.opengl.util.TimeFrameI;
/**
@@ -128,6 +130,25 @@ public interface TextureSequence {
protected final Texture texture;
}
+ /**
+ * Event listener to notify users of updates regarding the {@link TextureSequence}.
+ * <p>
+ * The implementation sending the events, and hence calling down to all listeners,
+ * does not necessarily make the user's OpenGL context current.
+ * </p>
+ * <p>
+ * Further more, the call may happen off-thread, possibly holding another, possibly shared, OpenGL context current.
+ * </p>
+ * Hence a user shall not issue <i>any</i> OpenGL, time consuming
+ * or {@link TextureSequence} lifecycle operations directly.<br>
+ * Instead, the user shall:
+ * <ul>
+ * <li>issue commands off-thread via spawning off another thread, or</li>
+ * <li>injecting {@link GLRunnable} objects via {@link GLAutoDrawable#invoke(boolean, GLRunnable)}, or</li>
+ * <li>simply changing a volatile state of their {@link GLEventListener} implementation.</li>
+ * </ul>
+ * </p>
+ * */
public interface TexSeqEventListener<T extends TextureSequence> {
/**
* Signaling listeners that a new {@link TextureFrame} is available.
diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
index 38faf62a6..35084f1c5 100644
--- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
+++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
@@ -433,7 +433,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
protected final TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
sTexFrameCount++;
if( 1 == sTexFrameCount ) {
- singleSTexFrame = new SurfaceTextureFrame( createTexImageImpl(gl, texName, width, height, true), new SurfaceTexture(texName) );
+ singleSTexFrame = new SurfaceTextureFrame( createTexImageImpl(gl, texName, width, height), new SurfaceTexture(texName) );
}
return singleSTexFrame;
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
index ec375406d..31af8e4db 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
@@ -85,7 +85,7 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
@Override
protected TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
- final Texture texture = super.createTexImageImpl(gl, texName, width, height, false);
+ final Texture texture = super.createTexImageImpl(gl, texName, width, height);
final Buffer clientBuffer;
final long image;
final long sync;
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index ab0e2eebd..205642eb0 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -165,6 +165,10 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected Ringbuffer<TextureFrame> videoFramesFree = null;
protected Ringbuffer<TextureFrame> videoFramesDecoded = null;
protected volatile TextureFrame lastFrame = null;
+ /**
+ * @see #isGLOriented()
+ */
+ protected boolean isInGLOrientation = false;
private ArrayList<GLMediaEventListener> eventListeners = new ArrayList<GLMediaEventListener>();
@@ -203,12 +207,6 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
public final void setTextureWrapST(int[] wrapST) { texWrapST[0] = wrapST[0]; texWrapST[1] = wrapST[1];}
public final int[] getTextureWrapST() { return texWrapST; }
- private final void checkStreamInit() {
- if(State.Uninitialized == state ) {
- throw new IllegalStateException("Stream not initialized: "+this);
- }
- }
-
private final void checkGLInit() {
if(State.Uninitialized == state || State.Initialized == state ) {
throw new IllegalStateException("GL not initialized: "+this);
@@ -339,6 +337,23 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected abstract boolean pauseImpl();
@Override
+ public final State destroy(GL gl) {
+ return destroyImpl(gl, 0);
+ }
+ private final State destroyImpl(GL gl, int event_mask) {
+ synchronized( stateLock ) {
+ streamWorker.doStop();
+ streamWorker = null;
+ destroyImpl(gl);
+ removeAllTextureFrames(gl);
+ textureCount=0;
+ changeState(event_mask, State.Uninitialized);
+ return state;
+ }
+ }
+ protected abstract void destroyImpl(GL gl);
+
+ @Override
public final int seek(int msec) {
synchronized( stateLock ) {
final State preState = state;
@@ -458,7 +473,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
public final void initStream(URI streamLoc, int vid, int aid, int reqTextureCount) throws IllegalStateException, IllegalArgumentException {
synchronized( stateLock ) {
if(State.Uninitialized != state) {
- throw new IllegalStateException("Instance not unintialized: "+this);
+ throw new IllegalStateException("Instance not in state unintialized: "+this);
}
if(null == streamLoc) {
throw new IllegalArgumentException("streamLock is null");
@@ -485,7 +500,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
if( null != rawPath && rawPath.length() > 0 ) {
// cut-off root fwd-slash
cameraPath = rawPath.substring(1);
- final URIQueryProps props = URIQueryProps.create(streamLoc);
+ final URIQueryProps props = URIQueryProps.create(streamLoc, ';');
cameraProps = props.getProperties();
} else {
throw new IllegalArgumentException("Camera path is empty: "+streamLoc.toString());
@@ -528,10 +543,12 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
@Override
public final void initGL(GL gl) throws IllegalStateException, StreamException, GLException {
synchronized( stateLock ) {
- checkStreamInit();
+ if(State.Initialized != state ) {
+ throw new IllegalStateException("Stream not in state initialized: "+this);
+ }
final StreamException streamInitErr = streamWorker.getStreamErr();
if( null != streamInitErr ) {
- streamWorker = null;
+ streamWorker = null; // already terminated!
destroy(null);
throw streamInitErr;
}
@@ -559,6 +576,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
changeState(0, State.Paused);
} catch (Throwable t) {
+ destroyImpl(gl, GLMediaEventListener.EVENT_CHANGE_ERR); // -> GLMediaPlayer.State.Uninitialized
throw new GLException("Error initializing GL resources", t);
}
}
@@ -602,7 +620,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
protected abstract TextureFrame createTexImage(GL gl, int texName);
- protected final Texture createTexImageImpl(GL gl, int texName, int tWidth, int tHeight, boolean mustFlipVertically) {
+ protected final Texture createTexImageImpl(GL gl, int texName, int tWidth, int tHeight) {
if( 0 > texName ) {
throw new RuntimeException("TextureName "+toHexString(texName)+" invalid.");
}
@@ -649,7 +667,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
texName, textureTarget,
tWidth, tHeight,
width, height,
- mustFlipVertically);
+ !isInGLOrientation);
}
protected void destroyTexFrame(GL gl, TextureFrame frame) {
@@ -1251,20 +1269,19 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
attributesUpdated(event_mask);
}
- @Override
- public final State destroy(GL gl) {
- synchronized( stateLock ) {
- streamWorker.doStop();
- streamWorker = null;
- destroyImpl(gl);
- removeAllTextureFrames(gl);
- textureCount=0;
- changeState(0, State.Uninitialized);
- return state;
+ protected void setIsGLOriented(boolean isGLOriented) {
+ if( isInGLOrientation != isGLOriented ) {
+ if( DEBUG ) {
+ System.err.println("XXX gl-orient "+isInGLOrientation+" -> "+isGLOriented);
+ }
+ isInGLOrientation = isGLOriented;
+ for(int i=0; i<videoFramesOrig.length; i++) {
+ videoFramesOrig[i].getTexture().setMustFlipVertically(!isGLOriented);
+ }
+ attributesUpdated(GLMediaEventListener.EVENT_CHANGE_SIZE);
}
}
- protected abstract void destroyImpl(GL gl);
-
+
@Override
public final URI getURI() {
return streamLoc;
@@ -1322,6 +1339,11 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
@Override
+ public final boolean isGLOriented() {
+ return isInGLOrientation;
+ }
+
+ @Override
public final int getWidth() {
return width;
}
@@ -1342,7 +1364,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
return "GLMediaPlayer["+state+", vSCR "+video_scr+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+" ("+tt+" s)], "+
"speed "+playSpeed+", "+bps_stream+" bps, "+
"Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+", tagt "+toHexString(textureTarget)+", ifmt "+toHexString(textureInternalFormat)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType)+"], "+
- "Video[id "+vid+", <"+vcodec+">, "+width+"x"+height+", "+fps+" fps, "+frame_duration+" fdur, "+bps_video+" bps], "+
+ "Video[id "+vid+", <"+vcodec+">, "+width+"x"+height+", glOrient "+isInGLOrientation+", "+fps+" fps, "+frame_duration+" fdur, "+bps_video+" bps], "+
"Audio[id "+aid+", <"+acodec+">, "+bps_audio+" bps, "+audioFrames+" frames], uri "+loc+camPath+"]";
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
index 6fa7c7a54..1cddaa9cf 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
@@ -57,6 +57,7 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
public NullGLMediaPlayer() {
super();
+
}
@Override
@@ -143,7 +144,7 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
}
@Override
protected final void initGLImpl(GL gl) throws IOException, GLException {
- // NOP
+ isInGLOrientation = true;
}
/**
@@ -159,7 +160,7 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
@Override
protected final TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
- final Texture texture = super.createTexImageImpl(gl, texName, width, height, false);
+ final Texture texture = super.createTexImageImpl(gl, texName, width, height);
if(null != texData) {
texture.updateImage(gl, texData);
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
index 2737a0b6a..c329e880f 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
@@ -293,7 +293,8 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
final boolean isCameraInput = null != cameraPath;
final String resStreamLocS;
- int rw=640, rh=480, rr=15;
+ // int rw=640, rh=480, rr=15;
+ int rw=-1, rh=-1, rr=-1;
String sizes = null;
if( isCameraInput ) {
switch(Platform.OS_TYPE) {
@@ -424,7 +425,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
}
@Override
protected final TextureFrame createTexImage(GL gl, int texName) {
- return new TextureFrame( createTexImageImpl(gl, texName, texWidth, texHeight, true) );
+ return new TextureFrame( createTexImageImpl(gl, texName, texWidth, texHeight) );
}
/**
@@ -521,10 +522,10 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
* @param audioChannels
* @param audioSamplesPerFrameAndChannel in audio samples per frame and channel
*/
- void updateAttributes2(int vid, int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
- int tWd0, int tWd1, int tWd2, int vW, int vH,
- int aid, int audioSampleFmt, int audioSampleRate,
- int audioChannels, int audioSamplesPerFrameAndChannel) {
+ void setupFFAttributes(int vid, int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
+ int tWd0, int tWd1, int tWd2, int vW, int vH,
+ int aid, int audioSampleFmt, int audioSampleRate,
+ int audioChannels, int audioSamplesPerFrameAndChannel) {
// defaults ..
vPixelFmt = null;
vPlanes = 0;
@@ -612,6 +613,21 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
}
/**
+ * Native callback
+ * @param isInGLOrientation
+ * @param pixFmt
+ * @param planes
+ * @param bitsPerPixel
+ * @param bytesPerPixelPerPlane
+ * @param tWd0
+ * @param tWd1
+ * @param tWd2
+ */
+ void updateVidAttributes(boolean isInGLOrientation, int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
+ int tWd0, int tWd1, int tWd2, int vW, int vH) {
+ }
+
+ /**
* {@inheritDoc}
*
* If this implementation generates a specialized shader,
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
index faa6a56c4..a5a701a4f 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
@@ -119,6 +119,7 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
@Override
protected final void initGLImpl(GL gl) throws IOException, GLException {
// NOP
+ isInGLOrientation = true;
}
@Override
diff --git a/src/jogl/native/libav/ffmpeg_impl_template.c b/src/jogl/native/libav/ffmpeg_impl_template.c
index 6d28e77b6..7035545d0 100644
--- a/src/jogl/native/libav/ffmpeg_impl_template.c
+++ b/src/jogl/native/libav/ffmpeg_impl_template.c
@@ -45,8 +45,9 @@ static const char * const ClazzNameFFMPEGMediaPlayer = "jogamp/opengl/util/av/im
static jclass ffmpegMediaPlayerClazz = NULL;
static jmethodID jni_mid_pushSound = NULL;
-static jmethodID jni_mid_updateAttributes1 = NULL;
-static jmethodID jni_mid_updateAttributes2 = NULL;
+static jmethodID jni_mid_updateAttributes = NULL;
+static jmethodID jni_mid_setIsGLOriented = NULL;
+static jmethodID jni_mid_setupFFAttributes = NULL;
static jmethodID jni_mid_isAudioFormatSupported = NULL;
#define HAS_FUNC(f) (NULL!=(f))
@@ -309,22 +310,18 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0)
return JNI_TRUE;
}
-static int _isAudioFormatSupported(JNIEnv *env, jobject ffmpegMediaPlayer, enum AVSampleFormat aSampleFmt, int32_t aSampleRate, int32_t aChannels)
-{
+static int _isAudioFormatSupported(JNIEnv *env, jobject ffmpegMediaPlayer, enum AVSampleFormat aSampleFmt, int32_t aSampleRate, int32_t aChannels) {
return JNI_TRUE == (*env)->CallBooleanMethod(env, ffmpegMediaPlayer, jni_mid_isAudioFormatSupported, aSampleFmt, aSampleRate, aChannels);
}
-static void _updateJavaAttributes(JNIEnv *env, jobject instance, FFMPEGToolBasicAV_t* pAV)
-{
- // int shallBeDetached = 0;
- // JNIEnv * env = JoglCommon_GetJNIEnv (&shallBeDetached);
+static void _updateJavaAttributes(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) {
if(NULL!=env) {
- (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, jni_mid_updateAttributes2,
+ (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, jni_mid_setupFFAttributes,
pAV->vid, pAV->vPixFmt, pAV->vBufferPlanes,
pAV->vBitsPerPixel, pAV->vBytesPerPixelPerPlane,
pAV->vTexWidth[0], pAV->vTexWidth[1], pAV->vTexWidth[2],
pAV->vWidth, pAV->vHeight,
pAV->aid, pAV->aSampleFmtOut, pAV->aSampleRateOut, pAV->aChannelsOut, pAV->aFrameSize);
- (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, jni_mid_updateAttributes1,
+ (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, jni_mid_updateAttributes,
pAV->vid, pAV->aid,
pAV->vWidth, pAV->vHeight,
pAV->bps_stream, pAV->bps_video, pAV->bps_audio,
@@ -333,6 +330,11 @@ static void _updateJavaAttributes(JNIEnv *env, jobject instance, FFMPEGToolBasic
(*env)->NewStringUTF(env, pAV->acodec) );
}
}
+static void _setIsGLOriented(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) {
+ if(NULL!=env) {
+ (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, jni_mid_setIsGLOriented, pAV->vFlipped);
+ }
+}
static void freeInstance(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) {
int i;
@@ -499,13 +501,15 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initIDs0)
}
jni_mid_pushSound = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "pushSound", "(Ljava/nio/ByteBuffer;II)V");
- jni_mid_updateAttributes1 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes", "(IIIIIIIFIIILjava/lang/String;Ljava/lang/String;)V");
- jni_mid_updateAttributes2 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes2", "(IIIIIIIIIIIIIII)V");
+ jni_mid_updateAttributes = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes", "(IIIIIIIFIIILjava/lang/String;Ljava/lang/String;)V");
+ jni_mid_setIsGLOriented = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "setIsGLOriented", "(Z)V");
+ jni_mid_setupFFAttributes = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "setupFFAttributes", "(IIIIIIIIIIIIIII)V");
jni_mid_isAudioFormatSupported = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "isAudioFormatSupported", "(III)Z");
if(jni_mid_pushSound == NULL ||
- jni_mid_updateAttributes1 == NULL ||
- jni_mid_updateAttributes2 == NULL ||
+ jni_mid_updateAttributes == NULL ||
+ jni_mid_setIsGLOriented == NULL ||
+ jni_mid_setupFFAttributes == NULL ||
jni_mid_isAudioFormatSupported == NULL) {
return JNI_FALSE;
}
@@ -726,21 +730,28 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
}
const char *sizeS = NULL != jSizeS ? (*env)->GetStringUTFChars(env, jSizeS, &iscopy) : NULL;
+ int hasSize = 0;
if( NULL != sizeS ) {
snprintf(buffer, sizeof(buffer), "%s", sizeS);
(*env)->ReleaseStringChars(env, jSizeS, (const jchar *)sizeS);
- } else {
+ hasSize = 1;
+ } else if( vWidth > 0 && vHeight > 0 ) {
snprintf(buffer, sizeof(buffer), "%dx%d", vWidth, vHeight);
+ hasSize = 1;
}
- if(pAV->verbose) {
- fprintf(stderr, "Camera: Size: %s\n", buffer);
+ if( hasSize ) {
+ if(pAV->verbose) {
+ fprintf(stderr, "Camera: Size: %s\n", buffer);
+ }
+ sp_av_dict_set(&inOpts, "video_size", buffer, 0);
}
- sp_av_dict_set(&inOpts, "video_size", buffer, 0);
- snprintf(buffer, sizeof(buffer), "%d", vRate);
- if(pAV->verbose) {
- fprintf(stderr, "Camera: FPS: %s\n", buffer);
+ if( vRate > 0 ) {
+ snprintf(buffer, sizeof(buffer), "%d", vRate);
+ if(pAV->verbose) {
+ fprintf(stderr, "Camera: FPS: %s\n", buffer);
+ }
+ sp_av_dict_set(&inOpts, "framerate", buffer, 0);
}
- sp_av_dict_set(&inOpts, "framerate", buffer, 0); // not setting a framerate causes some drivers to crash!
}
res = sp_avformat_open_input(&pAV->pFormatCtx, filename, inFmt, NULL != inOpts ? &inOpts : NULL);
if( NULL != inOpts ) {
@@ -1055,6 +1066,7 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
pAV->vWidth = pAV->pVCodecCtx->width;
pAV->vHeight = pAV->pVCodecCtx->height;
pAV->vPixFmt = pAV->pVCodecCtx->pix_fmt;
+ pAV->vFlipped = JNI_FALSE;
{
AVPixFmtDescriptor pixDesc = sp_av_pix_fmt_descriptors[pAV->vPixFmt];
pAV->vBitsPerPixel = sp_av_get_bits_per_pixel(&pixDesc);
@@ -1076,16 +1088,6 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
pAV->pVStream->nb_frames,
pAV->vWidth, pAV->vHeight, pAV->vPixFmt, pAV->vBitsPerPixel, pAV->vBufferPlanes, pAV->pVCodecCtx->codec->capabilities);
}
- #if 0
- // Check CODEC_CAP_DR1, i.e. codec must handle get_buffer(), i.e. allocs 'em.
- {
- int codecHandlesBuffers = 0 != ( pAV->pVCodecCtx->codec->capabilities & CODEC_CAP_DR1 );
- if( !codecHandlesBuffers ) {
- JoglCommon_throwNewRuntimeException(env, "Codec does not handle buffers (!CODEC_CAP_DR1)");
- return;
- }
- }
- #endif
pAV->pVFrame=sp_avcodec_alloc_frame();
if( pAV->pVFrame == NULL ) {
@@ -1152,7 +1154,7 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
pAV->aPTS=0;
initPTSStats(&pAV->vPTSStats);
initPTSStats(&pAV->aPTSStats);
- _updateJavaAttributes(env, instance, pAV);
+ _updateJavaAttributes(env, pAV);
}
JNIEXPORT void JNICALL FF_FUNC(setGLFuncs0)
@@ -1179,7 +1181,6 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0)
FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr));
AVPacket packet;
- int frameDecoded;
jint resPTS = INVALID_PTS;
uint8_t * pkt_odata;
int pkt_osize;
@@ -1206,6 +1207,7 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0)
int frameCount;
int flush_complete = 0;
for ( frameCount=0; 0 < packet.size || 0 == frameCount; frameCount++ ) {
+ int frameDecoded;
int len1;
NIOBuffer_t * pNIOBufferCurrent = &pAV->pANIOBuffers[pAV->aFrameCurrent];
AVFrame* pAFrameCurrent = pAV->pAFrames[pAV->aFrameCurrent];
@@ -1339,6 +1341,7 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0)
int frameCount;
int flush_complete = 0;
for ( frameCount=0; 0 < packet.size || 0 == frameCount; frameCount++ ) {
+ int frameDecoded;
int len1;
sp_avcodec_get_frame_defaults(pAV->pVFrame);
if (flush_complete) {
@@ -1398,6 +1401,11 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0)
int p_offset[] = { 0, 0, 0, 0 };
if( pAV->pVFrame->linesize[0] < 0 ) {
+ if( JNI_FALSE == pAV->vFlipped ) {
+ pAV->vFlipped = JNI_TRUE;
+ _setIsGLOriented(env, pAV);
+ }
+
// image bottom-up
int h_1 = pAV->pVCodecCtx->height - 1;
p_offset[0] = pAV->pVFrame->linesize[0] * h_1;
@@ -1411,7 +1419,11 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0)
if( pAV->vBufferPlanes > 3 ) {
p_offset[3] = pAV->pVFrame->linesize[3] * h_1;
} */
+ } else if( JNI_TRUE == pAV->vFlipped ) {
+ pAV->vFlipped = JNI_FALSE;
+ _setIsGLOriented(env, pAV);
}
+
// 1st plane or complete packed frame
// FIXME: Libav Binary compatibility! JAU01
DBG_TEXSUBIMG2D_a('Y',pAV,1,1,1,0);
diff --git a/src/jogl/native/libav/ffmpeg_tool.h b/src/jogl/native/libav/ffmpeg_tool.h
index 72345fbb7..61d121f24 100644
--- a/src/jogl/native/libav/ffmpeg_tool.h
+++ b/src/jogl/native/libav/ffmpeg_tool.h
@@ -163,6 +163,7 @@ typedef struct {
int32_t vTexWidth[4]; // decoded video tex width in bytes for each plane (max 4)
int32_t vWidth;
int32_t vHeight;
+ jboolean vFlipped; // false: !GL-Orientation, true: GL-Orientation
int32_t aid;
AVStream* pAStream;