diff options
18 files changed, 798 insertions, 514 deletions
diff --git a/make/scripts/tests.sh b/make/scripts/tests.sh index 454bf21f3..ba4589d14 100644 --- a/make/scripts/tests.sh +++ b/make/scripts/tests.sh @@ -131,7 +131,7 @@ function jrun() { #D_ARGS="-Djogl.1thread=true -Djogl.debug.Threading" #D_ARGS="-Djogl.debug.DebugGL -Djogl.debug.TraceGL -Djogl.debug.GLContext.TraceSwitch -Djogl.debug=all" #D_ARGS="-Djogamp.debug.IOUtil -Djogl.debug.GLSLCode -Djogl.debug.GLMediaPlayer" - D_ARGS="-Djogl.debug.GLMediaPlayer -Djogl.debug.AudioSink" + #D_ARGS="-Djogl.debug.GLMediaPlayer -Djogl.debug.AudioSink" #D_ARGS="-Djogl.debug.GLArrayData" #D_ARGS="-Djogl.debug.GLDrawable" #D_ARGS="-Djogl.debug.EGLDisplayUtil -Dnativewindow.debug.GraphicsConfiguration -Djogl.debug.GLDrawable" @@ -314,8 +314,8 @@ function testawtswt() { # av demos # #testnoawt jogamp.opengl.openal.av.ALDummyUsage $* -#testnoawt com.jogamp.opengl.test.junit.jogl.demos.es2.av.MovieCube $* -testnoawt com.jogamp.opengl.test.junit.jogl.demos.es2.av.MovieSimple $* +testnoawt com.jogamp.opengl.test.junit.jogl.demos.es2.av.MovieCube $* +#testnoawt com.jogamp.opengl.test.junit.jogl.demos.es2.av.MovieSimple $* # # core/newt (testnoawt and testawt) diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java index 1825dbd47..a36bce305 100644 --- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java +++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java @@ -40,12 +40,12 @@ import com.jogamp.opengl.util.texture.TextureSequence; /** * Lifecycle of an GLMediaPlayer: * <table border="1"> - * <tr><th>action</th> <th>state before</th> <th>state after</th></tr> - * <tr><td>{@link #initGLStream(GL, URLConnection)}</td> <td>Uninitialized</td> <td>Stopped</td></tr> - * <tr><td>{@link #start()}</td> <td>Stopped, Paused</td> <td>Playing</td></tr> - * <tr><td>{@link #stop()}</td> <td>Playing, Paused</td> <td>Stopped</td></tr> - * <tr><td>{@link #pause()}</td> <td>Playing</td> <td>Paused</td></tr> - * <tr><td>{@link #destroy(GL)}</td> <td>ANY</td> <td>Uninitialized</td></tr> + * <tr><th>action</th> <th>state before</th> <th>state after</th></tr> + * <tr><td>{@link #initGLStream(GL, int, URLConnection)}</td> <td>Uninitialized</td> <td>Stopped</td></tr> + * <tr><td>{@link #start()}</td> <td>Stopped, Paused</td> <td>Playing</td></tr> + * <tr><td>{@link #stop()}</td> <td>Playing, Paused</td> <td>Stopped</td></tr> + * <tr><td>{@link #pause()}</td> <td>Playing</td> <td>Paused</td></tr> + * <tr><td>{@link #destroy(GL)}</td> <td>ANY</td> <td>Uninitialized</td></tr> * </table> * <p> * Current implementations (check each API doc link for details): @@ -105,8 +105,12 @@ public interface GLMediaPlayer extends TextureSequence { public int getTextureCount(); - /** Defaults to 0 */ + /** Returns the texture target used by implementation. */ + public int getTextureTarget(); + + /** Sets the texture unit. Defaults to 0. */ public void setTextureUnit(int u); + /** Sets the texture min-mag filter, defaults to {@link GL#GL_NEAREST}. */ public void setTextureMinMagFilter(int[] minMagFilter); /** Sets the texture min-mag filter, defaults to {@link GL#GL_CLAMP_TO_EDGE}. */ @@ -119,6 +123,7 @@ public interface GLMediaPlayer extends TextureSequence { * Uninitialized -> Stopped * </p> * @param gl current GL object. If null, no video output and textures will be available. + * @param textureCount desired number of buffered textures to be decoded off-thread, use <code>1</code> for on-thread decoding. * @param urlConn the stream connection * @return the new state * @@ -126,7 +131,7 @@ public interface GLMediaPlayer extends TextureSequence { * @throws IOException in case of difficulties to open or process the stream * @throws GLException in case of difficulties to initialize the GL resources */ - public State initGLStream(GL gl, URLConnection urlConn) throws IllegalStateException, GLException, IOException; + public State initGLStream(GL gl, int textureCount, URLConnection urlConn) throws IllegalStateException, GLException, IOException; /** * Releases the GL and stream resources. @@ -161,11 +166,21 @@ public interface GLMediaPlayer extends TextureSequence { public State getState(); /** - * @return time current position in milliseconds + * @return current streaming position in milliseconds **/ public int getCurrentPosition(); /** + * @return current video PTS in milliseconds of {@link #getLastTexture()} + **/ + public int getVideoPTS(); + + /** + * @return current audio PTS in milliseconds. + **/ + public int getAudioPTS(); + + /** * Allowed in state Stopped, Playing and Paused, otherwise ignored. * * @param msec absolute desired time position in milliseconds @@ -187,7 +202,7 @@ public interface GLMediaPlayer extends TextureSequence { * </p> * * @see #addEventListener(GLMediaEventListener) - * @see GLMediaEventListener#newFrameAvailable(GLMediaPlayer, long) + * @see GLMediaEventListener#newFrameAvailable(GLMediaPlayer, TextureFrame, long) */ @Override public TextureSequence.TextureFrame getNextTexture(GL gl, boolean blocking) throws IllegalStateException; diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayerFactory.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayerFactory.java index f09531f7f..2707dd6d2 100644 --- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayerFactory.java +++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayerFactory.java @@ -29,18 +29,20 @@ package com.jogamp.opengl.util.av; import jogamp.opengl.util.av.NullGLMediaPlayer; -import com.jogamp.common.os.AndroidVersion; -import com.jogamp.common.os.Platform; import com.jogamp.common.util.ReflectionUtil; public class GLMediaPlayerFactory { private static final String AndroidGLMediaPlayerAPI14ClazzName = "jogamp.opengl.android.av.AndroidGLMediaPlayerAPI14"; private static final String FFMPEGMediaPlayerClazzName = "jogamp.opengl.util.av.impl.FFMPEGMediaPlayer"; + private static final String OMXGLMediaPlayerClazzName = "jogamp.opengl.util.av.impl.OMXGLMediaPlayer"; private static final String isAvailableMethodName = "isAvailable"; public static GLMediaPlayer createDefault() { final ClassLoader cl = GLMediaPlayerFactory.class.getClassLoader(); - GLMediaPlayer sink = create(cl, AndroidGLMediaPlayerAPI14ClazzName); + GLMediaPlayer sink = create(cl, OMXGLMediaPlayerClazzName); + if( null == sink ) { + sink = create(cl, AndroidGLMediaPlayerAPI14ClazzName); + } if( null == sink ) { sink = create(cl, FFMPEGMediaPlayerClazzName); } diff --git a/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java b/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java index 9f951d5da..3f739b2cc 100644 --- a/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java +++ b/src/jogl/classes/com/jogamp/opengl/util/texture/TextureSequence.java @@ -112,26 +112,35 @@ public interface TextureSequence { public static class TextureFrame { public TextureFrame(Texture t) { texture = t; + pts = 0; } public final Texture getTexture() { return texture; } + public final int getPTS() { return pts; } + public final void setPTS(int pts) { this.pts = pts; } public String toString() { - return "TextureFrame[" + texture + "]"; + return "TextureFrame[" + pts + "ms: " + texture + "]"; } protected final Texture texture; + protected int pts; } public interface TexSeqEventListener<T extends TextureSequence> { /** - * Signaling listeners that {@link TextureSequence#getNextTexture(GL, boolean)} is able to deliver a new frame. + * Signaling listeners that a new {@link TextureFrame} is available. + * <p> + * User shall utilize {@link TextureSequence#getNextTexture(GL, boolean)} to dequeue it to maintain + * a consistent queue. + * </p> * @param ts the event source + * @param newFrame the newly enqueued frame * @param when system time in msec. **/ - public void newFrameAvailable(T ts, long when); + public void newFrameAvailable(T ts, TextureFrame newFrame, long when); } - /** Return the texture unit to be used with this frame. */ + /** Return the texture unit used to render the current frame. */ public int getTextureUnit(); public int[] getTextureMinMagFilter(); diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java index 8356a2bae..765cda084 100644 --- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java +++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java @@ -28,12 +28,14 @@ package jogamp.opengl.android.av; import java.io.IOException; +import java.nio.Buffer; import javax.media.opengl.GL; import javax.media.opengl.GLES2; import com.jogamp.common.os.AndroidVersion; import com.jogamp.common.os.Platform; +import com.jogamp.opengl.util.texture.Texture; import com.jogamp.opengl.util.texture.TextureSequence; import jogamp.common.os.android.StaticContext; @@ -76,7 +78,6 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { MediaPlayer mp; volatile boolean updateSurface = false; Object updateSurfaceLock = new Object(); - TextureSequence.TextureFrame lastTexFrame = null; /** private static String toString(MediaPlayer m) { @@ -90,17 +91,16 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { throw new RuntimeException("AndroidGLMediaPlayerAPI14 not available"); } this.setTextureTarget(GLES2.GL_TEXTURE_EXTERNAL_OES); - this.setTextureCount(1); mp = new MediaPlayer(); } @Override - protected boolean setPlaySpeedImpl(float rate) { + protected final boolean setPlaySpeedImpl(float rate) { return false; } @Override - protected boolean startImpl() { + protected final boolean startImpl() { if(null != mp) { try { mp.start(); @@ -115,7 +115,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } @Override - protected boolean pauseImpl() { + protected final boolean pauseImpl() { if(null != mp) { wakeUp(false); try { @@ -131,7 +131,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } @Override - protected boolean stopImpl() { + protected final boolean stopImpl() { if(null != mp) { wakeUp(false); try { @@ -147,7 +147,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } @Override - protected int seekImpl(int msec) { + protected final int seekImpl(int msec) { if(null != mp) { mp.seekTo(msec); return mp.getCurrentPosition(); @@ -155,40 +155,6 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { return 0; } - @Override - protected TextureSequence.TextureFrame getLastTextureImpl() { - return lastTexFrame; - } - - @Override - protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) { - if(null != stex && null != mp) { - // Only block once, no while-loop. - // This relaxes locking code of non crucial resources/events. - boolean update = updateSurface; - if(!update && blocking) { - synchronized(updateSurfaceLock) { - if(!updateSurface) { // volatile OK. - try { - updateSurfaceLock.wait(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - updateSurface = false; - update = true; - } - } - if(update) { - stex.updateTexImage(); - // stex.getTransformMatrix(atex.getSTMatrix()); - lastTexFrame=texFrames[0]; - } - - } - return lastTexFrame; - } - private void wakeUp(boolean newFrame) { synchronized(updateSurfaceLock) { if(newFrame) { @@ -199,12 +165,13 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } @Override - protected int getCurrentPositionImpl() { - return null != mp ? mp.getCurrentPosition() : 0; - } + protected final int getCurrentPositionImpl() { return null != mp ? mp.getCurrentPosition() : 0; } + + @Override + protected final int getAudioPTSImpl() { return getCurrentPositionImpl(); } @Override - protected void destroyImpl(GL gl) { + protected final void destroyImpl(GL gl) { if(null != mp) { wakeUp(false); mp.release(); @@ -213,9 +180,25 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } SurfaceTexture stex = null; + public static class SurfaceTextureFrame extends TextureSequence.TextureFrame { + public SurfaceTextureFrame(Texture t, SurfaceTexture stex) { + super(t); + this.surfaceTex = stex; + this.surface = new Surface(stex); + } + + public final SurfaceTexture getSurfaceTexture() { return surfaceTex; } + public final Surface getSurface() { return surface; } + + public String toString() { + return "SurfaceTextureFrame[" + pts + "ms: " + texture + ", " + surfaceTex + "]"; + } + private final SurfaceTexture surfaceTex; + private final Surface surface; + } @Override - protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException { + protected final void initGLStreamImpl(GL gl) throws IOException { if(null!=mp && null!=urlConn) { try { final Uri uri = Uri.parse(urlConn.getURL().toExternalForm()); @@ -227,44 +210,86 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } catch (IllegalStateException e) { throw new RuntimeException(e); } - stex = new SurfaceTexture(texNames[0]); // only 1 texture - stex.setOnFrameAvailableListener(onFrameAvailableListener); + if( null == stex ) { + throw new InternalError("XXX"); + } final Surface surf = new Surface(stex); mp.setSurface(surf); surf.release(); + mp.setSurface(null); try { mp.prepare(); } catch (IOException ioe) { throw new IOException("MediaPlayer failed to process stream <"+urlConn.getURL().toExternalForm()+">: "+ioe.getMessage(), ioe); } + final String icodec = "android"; updateAttributes(mp.getVideoWidth(), mp.getVideoHeight(), 0, 0, 0, 0f, 0, mp.getDuration(), - null, null); + icodec, icodec); + } + } + + @Override + protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) { + if(null != stex && null != mp) { + final SurfaceTextureFrame nextSFrame = (SurfaceTextureFrame) nextFrame; + final Surface nextSurface = nextSFrame.getSurface(); + mp.setSurface(nextSurface); + nextSurface.release(); + + // Only block once, no while-loop. + // This relaxes locking code of non crucial resources/events. + boolean update = updateSurface; + if(!update && blocking) { + synchronized(updateSurfaceLock) { + if(!updateSurface) { // volatile OK. + try { + updateSurfaceLock.wait(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + update = updateSurface; + updateSurface = false; + } + } + if(update) { + final SurfaceTexture nextSTex = nextSFrame.getSurfaceTexture(); + nextSTex.updateTexImage(); + // nextFrame.setPTS( (int) ( nextSTex.getTimestamp() / 1000000L ) ); // nano -9 -> milli -3 + nextFrame.setPTS( mp.getCurrentPosition() ); + // stex.getTransformMatrix(atex.getSTMatrix()); + } } + return true; } + @Override + protected final void syncFrame2Audio(TextureFrame frame) {} @Override - protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) { - lastTexFrame = new TextureSequence.TextureFrame( createTexImageImpl(gl, idx, tex, width, height, true) ); - return lastTexFrame; + protected final TextureSequence.TextureFrame createTexImage(GL gl, int texName) { + if( null != stex ) { + throw new InternalError("XXX"); + } + stex = new SurfaceTexture(texName); // only 1 texture + stex.setOnFrameAvailableListener(onFrameAvailableListener); + return new TextureSequence.TextureFrame( createTexImageImpl(gl, texName, width, height, true) ); } @Override - protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) { + protected final void destroyTexFrame(GL gl, TextureSequence.TextureFrame imgTex) { if(null != stex) { stex.release(); stex = null; } - lastTexFrame = null; - super.destroyTexImage(gl, imgTex); + super.destroyTexFrame(gl, imgTex); } protected OnFrameAvailableListener onFrameAvailableListener = new OnFrameAvailableListener() { @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { wakeUp(true); - AndroidGLMediaPlayerAPI14.this.newFrameAvailable(); } - }; + }; } diff --git a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java index 274ccffd5..57d5ff625 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java +++ b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java @@ -77,19 +77,19 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl { } - protected EGLMediaPlayerImpl() { - this(TextureType.GL, false); - } - protected EGLMediaPlayerImpl(TextureType texType, boolean useKHRSync) { super(); this.texType = texType; this.useKHRSync = useKHRSync; } + @Override + protected final int validateTextureCount(int desiredTextureCount) { + return desiredTextureCount>1 ? desiredTextureCount : 2; + } @Override - protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) { - final Texture texture = super.createTexImageImpl(gl, idx, tex, width, height, false); + protected TextureSequence.TextureFrame createTexImage(GL gl, int texName) { + final Texture texture = super.createTexImageImpl(gl, texName, width, height, false); final Buffer clientBuffer; final long image; final long sync; @@ -117,7 +117,7 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl { EGLExt.EGL_GL_TEXTURE_2D_KHR, clientBuffer, nioTmp); if (0==image) { - throw new RuntimeException("EGLImage creation failed: "+EGL.eglGetError()+", ctx "+eglCtx+", tex "+tex[idx]+", err "+toHexString(EGL.eglGetError())); + throw new RuntimeException("EGLImage creation failed: "+EGL.eglGetError()+", ctx "+eglCtx+", tex "+texName+", err "+toHexString(EGL.eglGetError())); } } else { clientBuffer = null; @@ -141,7 +141,7 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl { } @Override - protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) { + protected void destroyTexFrame(GL gl, TextureSequence.TextureFrame frame) { final boolean eglUsage = TextureType.KHRImage == texType || useKHRSync ; final EGLContext eglCtx; final EGLExt eglExt; @@ -156,7 +156,7 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl { eglExt = null; eglDrawable = null; } - final EGLTextureFrame eglTex = (EGLTextureFrame) imgTex; + final EGLTextureFrame eglTex = (EGLTextureFrame) frame; if(0!=eglTex.getImage()) { eglExt.eglDestroyImageKHR(eglDrawable.getNativeSurface().getDisplayHandle(), eglTex.getImage()); @@ -164,6 +164,6 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl { if(0!=eglTex.getSync()) { eglExt.eglDestroySyncKHR(eglDrawable.getNativeSurface().getDisplayHandle(), eglTex.getSync()); } - super.destroyTexImage(gl, imgTex); + super.destroyTexFrame(gl, frame); } } diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java index 2ff91a3f6..bc297dc21 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java +++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java @@ -30,13 +30,17 @@ package jogamp.opengl.util.av; import java.io.IOException; import java.net.URLConnection; import java.util.ArrayList; -import java.util.HashMap; import java.util.Iterator; +import javax.media.nativewindow.AbstractGraphicsDevice; import javax.media.opengl.GL; import javax.media.opengl.GL2; +import javax.media.opengl.GLContext; +import javax.media.opengl.GLDrawable; +import javax.media.opengl.GLDrawableFactory; import javax.media.opengl.GLES2; import javax.media.opengl.GLException; +import javax.media.opengl.GLProfile; import com.jogamp.opengl.util.av.GLMediaPlayer; import com.jogamp.opengl.util.texture.Texture; @@ -45,7 +49,7 @@ import com.jogamp.opengl.util.texture.TextureSequence; /** * After object creation an implementation may customize the behavior: * <ul> - * <li>{@link #setTextureCount(int)}</li> + * <li>{@link #setDesTextureCount(int)}</li> * <li>{@link #setTextureTarget(int)}</li> * <li>{@link EGLMediaPlayerImpl#setEGLTexImageAttribs(boolean, boolean)}.</li> * </ul> @@ -59,6 +63,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { protected static final String unknown = "unknown"; protected State state; + protected int textureCount; protected int textureTarget; protected int textureFormat; @@ -74,35 +79,38 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { protected volatile float playSpeed = 1.0f; - /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */ + /** Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */ protected int width = 0; - /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */ + /** Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */ protected int height = 0; - /** Video fps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */ + /** Video fps. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */ protected float fps = 0; - /** Stream bps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */ + /** Stream bps. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */ protected int bps_stream = 0; - /** Video bps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */ + /** Video bps. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */ protected int bps_video = 0; - /** Audio bps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */ + /** Audio bps. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */ protected int bps_audio = 0; - /** In frames. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */ + /** In frames. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */ protected int totalFrames = 0; - /** In ms. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */ + /** In ms. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */ protected int duration = 0; - /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */ + /** Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */ protected String acodec = unknown; - /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */ + /** Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */ protected String vcodec = unknown; protected int frameNumber = 0; + protected int currentVideoPTS = 0; - protected TextureSequence.TextureFrame[] texFrames = null; - protected HashMap<Integer, TextureSequence.TextureFrame> texFrameMap = new HashMap<Integer, TextureSequence.TextureFrame>(); + protected SyncedRingbuffer<TextureFrame> videoFramesFree = null; + protected SyncedRingbuffer<TextureFrame> videoFramesDecoded = null; + protected volatile TextureFrame lastFrame = null; + private ArrayList<GLMediaEventListener> eventListeners = new ArrayList<GLMediaEventListener>(); protected GLMediaPlayerImpl() { - this.textureCount=3; + this.textureCount=0; this.textureTarget=GL.GL_TEXTURE_2D; this.textureFormat = GL.GL_RGBA; this.textureInternalFormat = GL.GL_RGBA; @@ -112,14 +120,14 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { } @Override - public void setTextureUnit(int u) { texUnit = u; } + public final void setTextureUnit(int u) { texUnit = u; } @Override - public int getTextureUnit() { return texUnit; } + public final int getTextureUnit() { return texUnit; } + + @Override + public final int getTextureTarget() { return textureTarget; } - protected final void setTextureCount(int textureCount) { - this.textureCount=textureCount; - } @Override public final int getTextureCount() { return textureCount; } @@ -134,29 +142,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { public final int[] getTextureMinMagFilter() { return texMinMagFilter; } public final void setTextureWrapST(int[] wrapST) { texWrapST[0] = wrapST[0]; texWrapST[1] = wrapST[1];} - public final int[] getTextureWrapST() { return texWrapST; } - - @Override - public final TextureSequence.TextureFrame getLastTexture() throws IllegalStateException { - if(State.Uninitialized == state) { - throw new IllegalStateException("Instance not initialized: "+this); - } - return getLastTextureImpl(); - } - protected abstract TextureSequence.TextureFrame getLastTextureImpl(); - - @Override - public final synchronized TextureSequence.TextureFrame getNextTexture(GL gl, boolean blocking) throws IllegalStateException { - if(State.Uninitialized == state) { - throw new IllegalStateException("Instance not initialized: "+this); - } - if(State.Playing == state) { - final TextureSequence.TextureFrame f = getNextTextureImpl(gl, blocking); - return f; - } - return getLastTextureImpl(); - } - protected abstract TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking); + public final int[] getTextureWrapST() { return texWrapST; } @Override public String getRequiredExtensionsShaderStub() throws IllegalStateException { @@ -229,12 +215,15 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { protected abstract boolean setPlaySpeedImpl(float rate); public final State start() { - switch(state) { + switch( state ) { case Stopped: + /** fall-through intended */ case Paused: - if(startImpl()) { + if( startImpl() ) { + resumeFramePusher(); state = State.Playing; } + default: } if(DEBUG) { System.err.println("Start: "+toString()); } return state; @@ -242,7 +231,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { protected abstract boolean startImpl(); public final State pause() { - if(State.Playing == state && pauseImpl()) { + if( State.Playing == state && pauseImpl() ) { + pauseFramePusher(); state = State.Paused; } if(DEBUG) { System.err.println("Pause: "+toString()); } @@ -251,12 +241,15 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { protected abstract boolean pauseImpl(); public final State stop() { - switch(state) { + switch( state ) { case Playing: + /** fall-through intended */ case Paused: - if(stopImpl()) { + if( stopImpl() ) { + pauseFramePusher(); state = State.Stopped; } + default: } if(DEBUG) { System.err.println("Stop: "+toString()); } return state; @@ -265,61 +258,70 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { @Override public final int getCurrentPosition() { - if(State.Uninitialized != state) { + if( State.Uninitialized != state ) { return getCurrentPositionImpl(); } return 0; } protected abstract int getCurrentPositionImpl(); + @Override + public final int getVideoPTS() { return currentVideoPTS; } + + @Override + public final int getAudioPTS() { + if( State.Uninitialized != state ) { + return getAudioPTSImpl(); + } + return 0; + } + protected abstract int getAudioPTSImpl(); + public final int seek(int msec) { - final int cp; + final int pts1; switch(state) { case Stopped: case Playing: case Paused: - cp = seekImpl(msec); + pauseFramePusher(); + pts1 = seekImpl(msec); + currentVideoPTS=pts1; + resumeFramePusher(); break; default: - cp = 0; + pts1 = 0; } if(DEBUG) { System.err.println("Seek("+msec+"): "+toString()); } - return cp; + return pts1; } protected abstract int seekImpl(int msec); public final State getState() { return state; } @Override - public final State initGLStream(GL gl, URLConnection urlConn) throws IllegalStateException, GLException, IOException { + public final State initGLStream(GL gl, int reqTextureCount, URLConnection urlConn) throws IllegalStateException, GLException, IOException { if(State.Uninitialized != state) { throw new IllegalStateException("Instance not in state "+State.Uninitialized+", but "+state+", "+this); } this.urlConn = urlConn; if (this.urlConn != null) { try { - if(null != gl) { - if(null!=texFrames) { - // re-init .. - removeAllImageTextures(gl); - } else { - texFrames = new TextureSequence.TextureFrame[textureCount]; - } - final int[] tex = new int[textureCount]; - { - gl.glGenTextures(textureCount, tex, 0); - final int err = gl.glGetError(); - if( GL.GL_NO_ERROR != err ) { - throw new RuntimeException("TextureNames creation failed (num: "+textureCount+"): err "+toHexString(err)); - } + if( null != gl ) { + removeAllTextureFrames(gl); + textureCount = validateTextureCount(reqTextureCount); + if( textureCount < 2 ) { + throw new InternalError("Validated texture count < 2: "+textureCount); } - initGLStreamImpl(gl, tex); - - for(int i=0; i<textureCount; i++) { - final TextureSequence.TextureFrame tf = createTexImage(gl, i, tex); - texFrames[i] = tf; - texFrameMap.put(tex[i], tf); + initGLStreamImpl(gl); // also initializes width, height, .. etc + videoFramesFree = new SyncedRingbuffer<TextureFrame>(createTexFrames(gl, textureCount), true /* full */); + if( 2 < textureCount ) { + videoFramesDecoded = new SyncedRingbuffer<TextureFrame>(new TextureFrame[textureCount], false /* full */); + framePusher = new FramePusher(gl, requiresOffthreadGLCtx()); + framePusher.doStart(); + } else { + videoFramesDecoded = null; } + lastFrame = videoFramesFree.getBlocking(false /* clearRef */ ); } state = State.Stopped; return state; @@ -329,35 +331,42 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { } return state; } + /** + * Returns the validated number of textures to be handled. + * <p> + * Default is always 2 textures, last texture and the decoding texture. + * </p> + */ + protected int validateTextureCount(int desiredTextureCount) { + return 2; + } + protected boolean requiresOffthreadGLCtx() { return false; } - /** - * Implementation shall set the following set of data here - * @param gl TODO - * @param texNames TODO - * @see #width - * @see #height - * @see #fps - * @see #bps_stream - * @see #totalFrames - * @see #acodec - * @see #vcodec - */ - protected abstract void initGLStreamImpl(GL gl, int[] texNames) throws IOException; - - protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) { - return new TextureSequence.TextureFrame( createTexImageImpl(gl, idx, tex, width, height, false) ); + private final TextureFrame[] createTexFrames(GL gl, final int count) { + final int[] texNames = new int[count]; + gl.glGenTextures(count, texNames, 0); + final int err = gl.glGetError(); + if( GL.GL_NO_ERROR != err ) { + throw new RuntimeException("TextureNames creation failed (num: "+count+"): err "+toHexString(err)); + } + final TextureFrame[] texFrames = new TextureFrame[count]; + for(int i=0; i<count; i++) { + texFrames[i] = createTexImage(gl, texNames[i]); + } + return texFrames; } + protected abstract TextureFrame createTexImage(GL gl, int texName); - protected Texture createTexImageImpl(GL gl, int idx, int[] tex, int tWidth, int tHeight, boolean mustFlipVertically) { - if( 0 > tex[idx] ) { - throw new RuntimeException("TextureName "+toHexString(tex[idx])+" invalid."); + protected final Texture createTexImageImpl(GL gl, int texName, int tWidth, int tHeight, boolean mustFlipVertically) { + if( 0 > texName ) { + throw new RuntimeException("TextureName "+toHexString(texName)+" invalid."); } gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit()); - gl.glBindTexture(textureTarget, tex[idx]); + gl.glBindTexture(textureTarget, texName); { final int err = gl.glGetError(); if( GL.GL_NO_ERROR != err ) { - throw new RuntimeException("Couldn't bind textureName "+toHexString(tex[idx])+" to 2D target, err "+toHexString(err)); + throw new RuntimeException("Couldn't bind textureName "+toHexString(texName)+" to 2D target, err "+toHexString(err)); } } @@ -389,30 +398,297 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_WRAP_S, texWrapST[0]); gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_WRAP_T, texWrapST[1]); - return com.jogamp.opengl.util.texture.TextureIO.newTexture(tex[idx], - textureTarget, + return com.jogamp.opengl.util.texture.TextureIO.newTexture( + texName, textureTarget, tWidth, tHeight, width, height, mustFlipVertically); } + + private final void removeAllTextureFrames(GL gl) { + if( null != videoFramesFree ) { + final TextureFrame[] texFrames = videoFramesFree.getArray(); + videoFramesFree = null; + videoFramesDecoded = null; + lastFrame = null; + for(int i=0; i<texFrames.length; i++) { + final TextureFrame frame = texFrames[i]; + if(null != frame) { + destroyTexFrame(gl, frame); + texFrames[i] = null; + } + } + } + textureCount=0; + } + protected void destroyTexFrame(GL gl, TextureFrame frame) { + frame.getTexture().destroy(gl); + } + + /** + * Implementation shall set the following set of data here + * @param gl TODO + * @see #width + * @see #height + * @see #fps + * @see #bps_stream + * @see #totalFrames + * @see #acodec + * @see #vcodec + */ + protected abstract void initGLStreamImpl(GL gl) throws IOException; - protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) { - imgTex.getTexture().destroy(gl); + @Override + public final TextureFrame getLastTexture() throws IllegalStateException { + if(State.Uninitialized == state) { + throw new IllegalStateException("Instance not initialized: "+this); + } + return lastFrame; } - protected void removeAllImageTextures(GL gl) { - if(null != texFrames) { - for(int i=0; i<textureCount; i++) { - final TextureSequence.TextureFrame imgTex = texFrames[i]; - if(null != imgTex) { - destroyTexImage(gl, imgTex); - texFrames[i] = null; + @Override + public final synchronized TextureFrame getNextTexture(GL gl, boolean blocking) throws IllegalStateException { + if(State.Uninitialized == state) { + throw new IllegalStateException("Instance not initialized: "+this); + } + if(State.Playing == state) { + TextureFrame nextFrame = null; + boolean ok = true; + try { + if( 2 < textureCount ) { + nextFrame = videoFramesDecoded.getBlocking(false /* clearRef */ ); + } else { + nextFrame = videoFramesFree.getBlocking(false /* clearRef */ ); + if( getNextTextureImpl(gl, nextFrame, blocking) ) { + newFrameAvailable(nextFrame); + } else { + ok = false; + } + } + if( ok ) { + currentVideoPTS = nextFrame.getPTS(); + if( blocking ) { + syncFrame2Audio(nextFrame); + } + final TextureFrame _lastFrame = lastFrame; + lastFrame = nextFrame; + videoFramesFree.putBlocking(_lastFrame); + } + } catch (InterruptedException e) { + ok = false; + e.printStackTrace(); + } finally { + if( !ok && null != nextFrame ) { // put back + videoFramesFree.put(nextFrame); } } } - texFrameMap.clear(); + return lastFrame; } + protected abstract boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking); + protected abstract void syncFrame2Audio(TextureFrame frame); + + private final void newFrameAvailable(TextureFrame frame) { + frameNumber++; + synchronized(eventListenersLock) { + for(Iterator<GLMediaEventListener> i = eventListeners.iterator(); i.hasNext(); ) { + i.next().newFrameAvailable(this, frame, System.currentTimeMillis()); + } + } + } + + class FramePusher extends Thread { + private volatile boolean isRunning = false; + private volatile boolean isActive = false; + + private volatile boolean shallPause = true; + private volatile boolean shallStop = false; + + private final GL gl; + private GLDrawable dummyDrawable = null; + private GLContext sharedGLCtx = null; + + FramePusher(GL gl, boolean createSharedCtx) { + setDaemon(true); + this.gl = createSharedCtx ? createSharedGL(gl) : gl; + } + + private GL createSharedGL(GL gl) { + final GLContext glCtx = gl.getContext(); + final boolean glCtxCurrent = glCtx.isCurrent(); + final GLProfile glp = gl.getGLProfile(); + final GLDrawableFactory factory = GLDrawableFactory.getFactory(glp); + final AbstractGraphicsDevice device = glCtx.getGLDrawable().getNativeSurface().getGraphicsConfiguration().getScreen().getDevice(); + dummyDrawable = factory.createDummyDrawable(device, true, glp); // own device! + dummyDrawable.setRealized(true); + sharedGLCtx = dummyDrawable.createContext(glCtx); + makeCurrent(sharedGLCtx); + if( glCtxCurrent ) { + makeCurrent(glCtx); + } else { + sharedGLCtx.release(); + } + return sharedGLCtx.getGL(); + } + private void makeCurrent(GLContext ctx) { + if( GLContext.CONTEXT_NOT_CURRENT >= ctx.makeCurrent() ) { + throw new GLException("Couldn't make ctx current: "+ctx); + } + } + + private void destroySharedGL() { + if( null != sharedGLCtx ) { + if( sharedGLCtx.isCreated() ) { + // Catch dispose GLExceptions by GLEventListener, just 'print' them + // so we can continue with the destruction. + try { + sharedGLCtx.destroy(); + } catch (GLException gle) { + gle.printStackTrace(); + } + } + sharedGLCtx = null; + } + if( null != dummyDrawable ) { + final AbstractGraphicsDevice device = dummyDrawable.getNativeSurface().getGraphicsConfiguration().getScreen().getDevice(); + dummyDrawable.setRealized(false); + dummyDrawable = null; + device.close(); + } + } + + public synchronized void doPause() { + if( isActive ) { + shallPause = true; + while( isActive ) { + try { + this.wait(); // wait until paused + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + } + public synchronized void doResume() { + if( isRunning && !isActive ) { + shallPause = false; + while( !isActive ) { + this.notify(); // wake-up pause-block + try { + this.wait(); // wait until resumed + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + } + public synchronized void doStart() { + start(); + while( !isRunning ) { + try { + this.wait(); // wait until started + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + public synchronized void doStop() { + if( isRunning ) { + shallStop = true; + while( isRunning ) { + this.notify(); // wake-up pause-block (opt) + try { + this.wait(); // wait until stopped + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + } + public boolean isRunning() { return isRunning; } + public boolean isActive() { return isActive; } + + public void run() { + setName(getName()+"-FramePusher_"+FramePusherInstanceId); + FramePusherInstanceId++; + + synchronized ( this ) { + if( null != sharedGLCtx ) { + makeCurrent( sharedGLCtx ); + } + isRunning = true; + this.notify(); // wake-up doStart() + } + + while( !shallStop ){ + if( shallPause ) { + synchronized ( this ) { + while( shallPause && !shallStop ) { + isActive = false; + this.notify(); // wake-up doPause() + try { + this.wait(); // wait until resumed + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + isActive = true; + this.notify(); // wake-up doResume() + } + } + + if( !shallStop ) { + TextureFrame nextFrame = null; + boolean ok = false; + try { + nextFrame = videoFramesFree.getBlocking(true /* clearRef */ ); + if( getNextTextureImpl(gl, nextFrame, true) ) { + gl.glFinish(); + videoFramesDecoded.putBlocking(nextFrame); + newFrameAvailable(nextFrame); + ok = true; + } + } catch (InterruptedException e) { + if( !shallStop && !shallPause ) { + e.printStackTrace(); // oops + shallPause = false; + shallStop = true; + } + } finally { + if( !ok && null != nextFrame ) { // put back + videoFramesFree.put(nextFrame); + } + } + } + } + destroySharedGL(); + synchronized ( this ) { + isRunning = false; + isActive = false; + this.notify(); // wake-up doStop() + } + } + } + static int FramePusherInstanceId = 0; + private FramePusher framePusher = null; + private final void pauseFramePusher() { + if( null != framePusher ) { + framePusher.doPause(); + } + } + private final void resumeFramePusher() { + if( null != framePusher ) { + framePusher.doResume(); + } + } + private final void destroyFramePusher() { + if( null != framePusher ) { + framePusher.doStop(); + framePusher = null; + } + } + protected final void updateAttributes(int width, int height, int bps_stream, int bps_video, int bps_audio, float fps, int totalFrames, int duration, String vcodec, String acodec) { @@ -458,19 +734,12 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { } } } - protected final void newFrameAvailable() { - frameNumber++; - synchronized(eventListenersLock) { - for(Iterator<GLMediaEventListener> i = eventListeners.iterator(); i.hasNext(); ) { - i.next().newFrameAvailable(this, System.currentTimeMillis()); - } - } - } @Override public final synchronized State destroy(GL gl) { + destroyFramePusher(); destroyImpl(gl); - removeAllImageTextures(gl); + removeAllTextureFrames(gl); state = State.Uninitialized; return state; } diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java index cd48c3962..f1ce42257 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java +++ b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java @@ -49,66 +49,68 @@ import com.jogamp.opengl.util.texture.TextureSequence; */ public class NullGLMediaPlayer extends GLMediaPlayerImpl { private TextureData texData = null; - private TextureSequence.TextureFrame frame = null; private int pos_ms = 0; private int pos_start = 0; public NullGLMediaPlayer() { super(); - this.setTextureCount(1); } @Override - protected boolean setPlaySpeedImpl(float rate) { + protected final boolean setPlaySpeedImpl(float rate) { return false; } @Override - protected boolean startImpl() { + protected final boolean startImpl() { pos_start = (int)System.currentTimeMillis(); return true; } @Override - protected boolean pauseImpl() { + protected final boolean pauseImpl() { return true; } @Override - protected boolean stopImpl() { + protected final boolean stopImpl() { return true; } @Override - protected int seekImpl(int msec) { + protected final int seekImpl(int msec) { pos_ms = msec; validatePos(); return pos_ms; } @Override - protected TextureSequence.TextureFrame getLastTextureImpl() { - return frame; + protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) { + return true; } - @Override - protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) { - return frame; - } + protected final void syncFrame2Audio(TextureFrame frame) { } @Override - protected int getCurrentPositionImpl() { + protected final int getCurrentPositionImpl() { pos_ms = (int)System.currentTimeMillis() - pos_start; validatePos(); return pos_ms; } + @Override + protected final int getAudioPTSImpl() { return getCurrentPositionImpl(); } + @Override - protected void destroyImpl(GL gl) { + protected final void destroyImpl(GL gl) { + if(null != texData) { + texData.destroy(); + texData = null; + } } - + @Override - protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException { + protected final void initGLStreamImpl(GL gl) throws IOException { try { URLConnection urlConn = IOUtil.getResource("jogl/util/data/av/test-ntsc01-160x90.png", this.getClass().getClassLoader()); if(null != urlConn) { @@ -117,44 +119,44 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl { } catch (Exception e) { e.printStackTrace(); } + final int _w, _h; if(null != texData) { - width = texData.getWidth(); - height = texData.getHeight(); + _w = texData.getWidth(); + _h = texData.getHeight(); } else { - width = 640; - height = 480; - ByteBuffer buffer = Buffers.newDirectByteBuffer(width*height*4); + _w = 640; + _h = 480; + ByteBuffer buffer = Buffers.newDirectByteBuffer(_w*_h*4); while(buffer.hasRemaining()) { buffer.put((byte) 0xEA); buffer.put((byte) 0xEA); buffer.put((byte) 0xEA); buffer.put((byte) 0xEA); } buffer.rewind(); texData = new TextureData(GLProfile.getGL2ES2(), - GL.GL_RGBA, width, height, 0, + GL.GL_RGBA, _w, _h, 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, false, false, false, buffer, null); } - fps = 24f; - duration = 10*60*1000; // msec - totalFrames = (int) ( (duration/1000)*fps ); - vcodec = "png-static"; + final float _fps = 24f; + final int _duration = 10*60*1000; // msec + final int _totalFrames = (int) ( (_duration/1000)*_fps ); + updateAttributes(_w, _h, + 0, 0, 0, + _fps, _totalFrames, _duration, + "png-static", null); } @Override - protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) { - Texture texture = super.createTexImageImpl(gl, idx, tex, width, height, false); + protected final TextureSequence.TextureFrame createTexImage(GL gl, int texName) { + final Texture texture = super.createTexImageImpl(gl, texName, width, height, false); if(null != texData) { texture.updateImage(gl, texData); - texData.destroy(); - texData = null; } - frame = new TextureSequence.TextureFrame( texture ); - return frame; + return new TextureSequence.TextureFrame( texture ); } @Override - protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) { - frame = null; - super.destroyTexImage(gl, imgTex); + protected final void destroyTexFrame(GL gl, TextureSequence.TextureFrame frame) { + super.destroyTexFrame(gl, frame); } private void validatePos() { diff --git a/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java b/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java index 5f5d69cf8..ea67387a0 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java +++ b/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java @@ -74,6 +74,8 @@ public class SyncedRingbuffer<T> { } } + public final T[] getArray() { return array; } + public final int capacity() { return capacity; } diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java index 852e5149c..3680da1a8 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java @@ -116,25 +116,32 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { private static long[] symbolAddr; private static final boolean ready; + private static final boolean libsLoaded; static { // native ffmpeg media player implementation is included in jogl_desktop and jogl_mobile GLProfile.initSingleton(); boolean _ready = false; + boolean[] _libsLoaded= { false }; try { - _ready = initSymbols(); + _ready = initSymbols(_libsLoaded); } catch (Throwable t) { t.printStackTrace(); } + libsLoaded = _libsLoaded[0]; ready = _ready; - if(!ready) { - System.err.println("FFMPEG: Not Available"); + if(!libsLoaded) { + System.err.println("LIB_AV Not Available"); + } else if(!ready) { + System.err.println("LIB_AV Not Matching"); } } + static boolean libsLoaded() { return libsLoaded; } static boolean initSingleton() { return ready; } - private static final boolean initSymbols() { + private static final boolean initSymbols(boolean[] libsLoaded) { + libsLoaded[0] = false; final DynamicLibraryBundle dl = AccessController.doPrivileged(new PrivilegedAction<DynamicLibraryBundle>() { public DynamicLibraryBundle run() { return new DynamicLibraryBundle(new FFMPEGDynamicLibraryBundleInfo()); @@ -148,6 +155,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { if(!dl.isToolLibComplete()) { throw new RuntimeException("FFMPEG Tool libraries incomplete"); } + libsLoaded[0] = true; if(symbolNames.length != symbolCount) { throw new InternalError("XXX0 "+symbolNames.length+" != "+symbolCount); } diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java index b6708b379..83a5960f1 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java @@ -44,11 +44,10 @@ import com.jogamp.opengl.util.GLPixelStorageModes; import com.jogamp.opengl.util.av.AudioSink; import com.jogamp.opengl.util.av.AudioSinkFactory; import com.jogamp.opengl.util.texture.Texture; -import com.jogamp.opengl.util.texture.TextureSequence; +import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame; import jogamp.opengl.GLContextImpl; -import jogamp.opengl.util.av.EGLMediaPlayerImpl; -import jogamp.opengl.util.av.SyncedRingbuffer; +import jogamp.opengl.util.av.GLMediaPlayerImpl; /*** * Implementation utilizes <a href="http://libav.org/">Libav</a> @@ -102,7 +101,7 @@ import jogamp.opengl.util.av.SyncedRingbuffer; * </pre></li> * </ul> */ -public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { +public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { // Count of zeroed buffers to return before switching to real sample provider private static final int TEMP_BUFFER_COUNT = 20; @@ -114,21 +113,20 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { static final boolean available; static { - if(FFMPEGDynamicLibraryBundleInfo.initSingleton()) { + final boolean libAVGood = FFMPEGDynamicLibraryBundleInfo.initSingleton(); + if( FFMPEGDynamicLibraryBundleInfo.libsLoaded() ) { avUtilVersion = getAVVersion(getAvUtilVersion0()); avFormatVersion = getAVVersion(getAvFormatVersion0()); avCodecVersion = getAVVersion(getAvCodecVersion0()); System.err.println("LIB_AV Util : "+avUtilVersion); System.err.println("LIB_AV Format: "+avFormatVersion); System.err.println("LIB_AV Codec : "+avCodecVersion); - initIDs0(); - available = true; } else { avUtilVersion = null; avFormatVersion = null; avCodecVersion = null; - available = false; } + available = libAVGood ? initIDs0() : false; } public static final boolean isAvailable() { return available; } @@ -144,8 +142,6 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { // protected long moviePtr = 0; - protected long procAddrGLTexSubImage2D = 0; - protected EGLMediaPlayerImpl.EGLTextureFrame lastTex = null; protected GLPixelStorageModes psm; protected PixelFormat vPixelFmt = null; protected int vPlanes = 0; @@ -161,21 +157,15 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { // Audio // - protected static final boolean USE_AUDIO_PUSHER = false; protected final int AudioFrameCount = 8; protected final AudioSink audioSink; protected final int maxAvailableAudio; protected AudioSink.AudioDataFormat chosenAudioFormat; - protected final SyncedRingbuffer<AudioSink.AudioFrame> audioFramesBuffer = - USE_AUDIO_PUSHER ? new SyncedRingbuffer<AudioSink.AudioFrame>(new AudioSink.AudioFrame[AudioFrameCount], false /* full */) - : null; public FFMPEGMediaPlayer() { - super(TextureType.GL, false); if(!available) { throw new RuntimeException("FFMPEGMediaPlayer not available"); } - setTextureCount(1); moviePtr = createInstance0(DEBUG); if(0==moviePtr) { throw new GLException("Couldn't create FFMPEGInstance"); @@ -184,26 +174,15 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { audioSink = AudioSinkFactory.createDefault(); maxAvailableAudio = audioSink.getQueuedByteCount(); } - @Override - protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) { - if(TextureType.GL == texType) { - final Texture texture = super.createTexImageImpl(gl, idx, tex, texWidth, texHeight, true); - lastTex = new EGLTextureFrame(null, texture, 0, 0); - } else { - throw new InternalError("n/a"); - } - return lastTex; + protected final int validateTextureCount(int desiredTextureCount) { + return desiredTextureCount>1 ? desiredTextureCount : 2; } - @Override - protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) { - lastTex = null; - super.destroyTexImage(gl, imgTex); - } - + protected final boolean requiresOffthreadGLCtx() { return true; } + @Override - protected void destroyImpl(GL gl) { + protected final void destroyImpl(GL gl) { if (moviePtr != 0) { destroyInstance0(moviePtr); moviePtr = 0; @@ -211,15 +190,29 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { } @Override - protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException { + protected final void initGLStreamImpl(GL gl) throws IOException { if(0==moviePtr) { throw new GLException("FFMPEG native instance null"); } + { + final GLContextImpl ctx = (GLContextImpl)gl.getContext(); + final ProcAddressTable pt = ctx.getGLProcAddressTable(); + final long procAddrGLTexSubImage2D = getAddressFor(pt, "glTexSubImage2D"); + if( 0 == procAddrGLTexSubImage2D ) { + throw new InternalError("glTexSubImage2D n/a in ProcAddressTable: "+pt.getClass().getName()+" of "+ctx.getGLVersion()); + } + final long procAddrGLGetError = getAddressFor(pt, "glGetError"); + if( 0 == procAddrGLGetError ) { + throw new InternalError("glGetError n/a in ProcAddressTable: "+pt.getClass().getName()+" of "+ctx.getGLVersion()); + } + setGLFuncs0(moviePtr, procAddrGLTexSubImage2D, procAddrGLGetError); + } + final String urlS=urlConn.getURL().toExternalForm(); chosenAudioFormat = audioSink.initSink(audioSink.getPreferredFormat(), AudioFrameCount); System.err.println("setURL: p1 "+this); - setStream0(moviePtr, urlS, -1, -1, AudioFrameCount); + setStream0(moviePtr, urlS, -1, -1, AudioFrameCount); // issues updateAttributes*(..) System.err.println("setURL: p2 "+this); int tf, tif=GL.GL_RGBA; // texture format and internal format @@ -239,12 +232,10 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { } setTextureFormat(tif, tf); setTextureType(GL.GL_UNSIGNED_BYTE); - final GLContextImpl ctx = (GLContextImpl)gl.getContext(); - final ProcAddressTable pt = ctx.getGLProcAddressTable(); - procAddrGLTexSubImage2D = getAddressFor(pt, "glTexSubImage2D"); - if( 0 == procAddrGLTexSubImage2D ) { - throw new InternalError("glTexSubImage2D n/a in ProcAddressTable: "+pt.getClass().getName()+" of "+ctx.getGLVersion()); - } + } + @Override + protected final TextureFrame createTexImage(GL gl, int texName) { + return new TextureFrame( createTexImageImpl(gl, texName, texWidth, texHeight, true) ); } /** @@ -263,107 +254,6 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { } ).longValue(); } - private final void pushSound(ByteBuffer sampleData, int data_size, int audio_pts) { - if( USE_AUDIO_PUSHER ) { - if( audioPusher != null && audioPusher.isRunning() ) { - try { - audioFramesBuffer.putBlocking(new AudioSink.AudioFrame(sampleData, data_size, audio_pts)); - } catch (InterruptedException e) { - e.printStackTrace(); // oops - } - } - } else { - pushAudioFrame(new AudioSink.AudioFrame(sampleData, data_size, audio_pts)); - } - } - - private final void pushAudioFrame(AudioSink.AudioFrame audioFrame) { - // poor mans audio sync .. - final long now = System.currentTimeMillis(); - final long now_d = now - lastAudioTime; - final long pts_d = audioFrame.audioPTS - lastAudioPTS; - final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ; - final boolean sleep = dt > audio_dt_d; - final long sleepP = dt - ( audio_dt_d / 2 ); - if(DEBUG) { - final int qAT = audioSink.getQueuedTime(); - System.err.println("s: pts-a "+audioFrame.audioPTS+", qAT "+qAT+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt+", sleep "+sleep+", sleepP "+sleepP+" ms"); - } - if( sleep ) { - try { - Thread.sleep( sleepP ); - } catch (InterruptedException e) { - e.printStackTrace(); // oops - } - lastAudioTime = System.currentTimeMillis(); - } else { - lastAudioTime = now; - } - if( audioSink.isDataAvailable(audioFrame.dataSize) ) { - audioSink.writeData(audioFrame); - lastAudioPTS=audioFrame.audioPTS; - } - } - - class AudioPusher extends Thread { - volatile boolean shallStop = false; - volatile boolean isBlocked = false; - - AudioPusher() { - setDaemon(true); - } - public void requestStop() { - shallStop = true; - if( isBlocked ) { - interrupt(); - } - } - public boolean isRunning() { return !shallStop; } - - public void run() { - setName(getName()+"-AudioPusher_"+AudioPusherInstanceId); - AudioPusherInstanceId++; - - while( !shallStop ){ - final AudioSink.AudioFrame audioFrame; - try { - isBlocked = true; - audioFrame = audioFramesBuffer.getBlocking(true /* clearRef */); - } catch (InterruptedException e) { - if( !shallStop ) { - e.printStackTrace(); // oops - } - shallStop = true; - return; - } - isBlocked = false; - - if( null != audioFrame ) { - FFMPEGMediaPlayer.this.pushAudioFrame(audioFrame); - } - } - } - } - static int AudioPusherInstanceId = 0; - private AudioPusher audioPusher = null; - - private final void stopAudioPusher() { - if( USE_AUDIO_PUSHER ) { - if( null != audioPusher ) { - audioPusher.requestStop(); - audioPusher = null; - } - audioFramesBuffer.clear(true); - } - } - private final void startAudioPusher() { - if( USE_AUDIO_PUSHER ) { - stopAudioPusher(); - audioPusher = new AudioPusher(); - audioPusher.start(); - } - } - private void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane, int lSz0, int lSz1, int lSz2, int tWd0, int tWd1, int tWd2) { @@ -413,7 +303,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { * Otherwise the call is delegated to it's super class. */ @Override - public String getTextureLookupFunctionName(String desiredFuncName) throws IllegalStateException { + public final String getTextureLookupFunctionName(String desiredFuncName) throws IllegalStateException { if(State.Uninitialized == state) { throw new IllegalStateException("Instance not initialized: "+this); } @@ -434,7 +324,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { * e.g. YUV420P to RGB. Otherwise the call is delegated to it's super class. */ @Override - public String getTextureLookupFragmentShaderImpl() throws IllegalStateException { + public final String getTextureLookupFragmentShaderImpl() throws IllegalStateException { if(State.Uninitialized == state) { throw new IllegalStateException("Instance not initialized: "+this); } @@ -465,134 +355,131 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { } @Override - protected synchronized int getCurrentPositionImpl() { + protected final synchronized int getCurrentPositionImpl() { return 0!=moviePtr ? getVideoPTS0(moviePtr) : 0; } @Override - protected synchronized boolean setPlaySpeedImpl(float rate) { + public final int getAudioPTSImpl() { return 0; } + + @Override + protected final synchronized boolean setPlaySpeedImpl(float rate) { return true; } @Override - public synchronized boolean startImpl() { + public final synchronized boolean startImpl() { if(0==moviePtr) { return false; } - startAudioPusher(); return true; } /** @return time position after issuing the command */ @Override - public synchronized boolean pauseImpl() { + public final synchronized boolean pauseImpl() { if(0==moviePtr) { return false; } - stopAudioPusher(); return true; } /** @return time position after issuing the command */ @Override - public synchronized boolean stopImpl() { + public final synchronized boolean stopImpl() { if(0==moviePtr) { return false; } - stopAudioPusher(); return true; } /** @return time position after issuing the command */ @Override - protected synchronized int seekImpl(int msec) { + protected final synchronized int seekImpl(int msec) { if(0==moviePtr) { throw new GLException("FFMPEG native instance null"); } - stopAudioPusher(); int pts0 = getVideoPTS0(moviePtr); int pts1 = seek0(moviePtr, msec); System.err.println("Seek: "+pts0+" -> "+msec+" : "+pts1); - lastAudioPTS=pts1; - lastVideoPTS=pts1; - startAudioPusher(); return pts1; } @Override - protected TextureSequence.TextureFrame getLastTextureImpl() { - return lastTex; + protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) { + if(0==moviePtr) { + throw new GLException("FFMPEG native instance null"); + } + psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1 + int avPTS = 0; + try { + final Texture tex = nextFrame.getTexture(); + gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit()); + tex.enable(gl); + tex.bind(gl); + + /** Try decode up to 10 packets to find one containing video, i.e. vPTS > 0 */ + for(int retry=10; 0 >= avPTS && 0 < retry; retry--) { + avPTS = readNextPacket0(moviePtr, textureTarget, textureFormat, textureType); + retry--; + } + } finally { + psm.restore(gl); + } + if( 0 < avPTS ) { + vSTS = avPTS; + nextFrame.setPTS(avPTS); + return true; + } else { + return false; + } } + private final void pushSound(ByteBuffer sampleData, int data_size, int audio_pts) { + aSTS = audio_pts; + final AudioSink.AudioFrame frame = new AudioSink.AudioFrame(sampleData, data_size, audio_pts); + if( audioSink.isDataAvailable(frame.dataSize) ) { + audioSink.writeData(frame); + } + } + + /** last audio streaming TS */ + private int aSTS = 0; + /** last video streaming TS */ + private int vSTS = 0; + private long lastAudioTime = 0; - private int lastAudioPTS = 0; private static final int audio_dt_d = 400; private long lastVideoTime = 0; - private int lastVideoPTS = 0; private static final int video_dt_d = 9; @Override - protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) { - if(0==moviePtr) { - throw new GLException("FFMPEG native instance null"); - } - if(null != lastTex) { - psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1 - try { - final Texture tex = lastTex.getTexture(); - gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit()); - tex.enable(gl); - tex.bind(gl); - - if( USE_AUDIO_PUSHER ) { - try { - audioFramesBuffer.waitForFreeSlots(2); - } catch (InterruptedException e) { - e.printStackTrace(); // oops - } - } - - /* try decode 10 packets to find one containing video - (res == 2) */ - int res = 0; - int retry = 10; - while(res!=2 && retry >= 0) { - res = readNextPacket0(moviePtr, procAddrGLTexSubImage2D, textureTarget, textureFormat, textureType); - retry--; - } - } finally { - psm.restore(gl); - } - final int pts = getVideoPTS0(moviePtr); // this frame - if(blocking) { - // poor mans video sync .. TODO: off thread 'readNextPackage0(..)' on shared GLContext and multi textures/unit! - final long now = System.currentTimeMillis(); - // Try sync video to audio - final long now_d = now - lastAudioTime; - final long pts_d = pts - lastAudioPTS - 444; /* hack 444 == play video 444ms ahead of audio */ - final long dt = Math.min(47, (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ) ; - //final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ; - final boolean sleep = dt>video_dt_d && dt<1000 && audioSink.getQueuedByteCount()<maxAvailableAudio-10000; - final long sleepP = dt-video_dt_d; - if(DEBUG) { - final int qAT = audioSink.getQueuedTime(); - System.err.println("s: pts-v "+pts+", qAT "+qAT+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt+", sleep "+sleep+", sleepP "+sleepP+" ms"); - } - // ?? Maybe use audioSink.getQueuedTime(); - if( sleep ) { - try { - Thread.sleep(sleepP); - } catch (InterruptedException e) { } - lastVideoTime = System.currentTimeMillis(); - } else { - lastVideoTime = now; - } - } - lastVideoPTS = pts; + protected final void syncFrame2Audio(TextureFrame frame) { + /** + // poor mans video sync .. TODO: off thread 'readNextPackage0(..)' on shared GLContext and multi textures/unit! + final long now = System.currentTimeMillis(); + // Try sync video to audio + final long now_d = now - lastAudioTime; + final long pts_d = vSTS - aSTS - 444; // hack 444 == play video 444ms ahead of audio + final long dt = Math.min(47, (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ) ; + //final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ; + final boolean sleep = dt>video_dt_d && dt<1000 && audioSink.getQueuedByteCount()<maxAvailableAudio-10000; + final long sleepP = dt-video_dt_d; + if(DEBUG) { + final int qAT = audioSink.getQueuedTime(); + System.err.println("s: pts-v "+vSTS+", qAT "+qAT+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt+", sleep "+sleep+", sleepP "+sleepP+" ms"); } - return lastTex; + // ?? Maybe use audioSink.getQueuedTime(); + if( sleep ) { + try { + Thread.sleep(sleepP); + } catch (InterruptedException e) { } + lastVideoTime = System.currentTimeMillis(); + } else { + lastVideoTime = now; + } + */ } - private static native int getAvUtilVersion0(); private static native int getAvFormatVersion0(); private static native int getAvCodecVersion0(); @@ -600,14 +487,22 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { private native long createInstance0(boolean verbose); private native void destroyInstance0(long moviePtr); + /** + * Issues {@link #updateAttributes(int, int, int, int, int, float, int, int, String, String)} + * and {@link #updateAttributes2(int, int, int, int, int, int, int, int, int, int)}. + */ private native void setStream0(long moviePtr, String url, int vid, int aid, int audioFrameCount); + private native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError); private native int getVideoPTS0(long moviePtr); private native int getAudioPTS0(long moviePtr); private native Buffer getAudioBuffer0(long moviePtr, int plane); - private native int readNextPacket0(long moviePtr, long procAddrGLTexSubImage2D, int texTarget, int texFmt, int texType); + /** + * @return resulting current PTS: audio < 0, video > 0, invalid == 0 + */ + private native int readNextPacket0(long moviePtr, int texTarget, int texFmt, int texType); private native int seek0(long moviePtr, int position); @@ -720,6 +615,5 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { return null; } } - } diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java index aef98fcde..a21bb40a8 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java @@ -33,7 +33,6 @@ import java.net.URL; import javax.media.opengl.GL; import javax.media.opengl.GLException; -import javax.media.opengl.GLProfile; import com.jogamp.opengl.util.texture.TextureSequence; @@ -49,17 +48,17 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl { static final boolean available; static { + available = false; + /** FIXME! // OMX binding is included in jogl_desktop and jogl_mobile GLProfile.initSingleton(); - available = initIDs0(); + available = initIDs0(); */ } public static final boolean isAvailable() { return available; } protected long moviePtr = 0; - protected TextureSequence.TextureFrame lastTex = null; - public OMXGLMediaPlayer() { super(TextureType.KHRImage, true); if(!available) { @@ -76,17 +75,15 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl { } @Override - protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) { - final EGLTextureFrame eglTex = (EGLTextureFrame) super.createTexImage(gl, idx, tex); - _setStreamEGLImageTexture2D(moviePtr, idx, tex[idx], eglTex.getImage(), eglTex.getSync()); - lastTex = eglTex; + protected TextureSequence.TextureFrame createTexImage(GL gl, int texName) { + final EGLTextureFrame eglTex = (EGLTextureFrame) super.createTexImage(gl, texName); + _setStreamEGLImageTexture2D(moviePtr, texName, eglTex.getImage(), eglTex.getSync()); return eglTex; } @Override - protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) { - lastTex = null; - super.destroyTexImage(gl, imgTex); + protected void destroyTexFrame(GL gl, TextureSequence.TextureFrame imgTex) { + super.destroyTexFrame(gl, imgTex); } @Override @@ -99,7 +96,7 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl { } @Override - protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException { + protected void initGLStreamImpl(GL gl) throws IOException { if(0==moviePtr) { throw new GLException("OMX native instance null"); } @@ -119,6 +116,10 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl { protected int getCurrentPositionImpl() { return 0!=moviePtr ? _getCurrentPosition(moviePtr) : 0; } + @Override + protected int getAudioPTSImpl() { + return getCurrentPositionImpl(); + } @Override protected boolean setPlaySpeedImpl(float rate) { @@ -168,24 +169,23 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl { } @Override - protected TextureSequence.TextureFrame getLastTextureImpl() { - return lastTex; - } - - @Override - protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) { + protected boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) { if(0==moviePtr) { throw new GLException("OMX native instance null"); } final int nextTex = _getNextTextureID(moviePtr, blocking); if(0 < nextTex) { - final TextureSequence.TextureFrame eglImgTex = texFrameMap.get(new Integer(_getNextTextureID(moviePtr, blocking))); + /* FIXME + final TextureSequence.TextureFrame eglImgTex = + texFrameMap.get(new Integer(_getNextTextureID(moviePtr, blocking))); if(null!=eglImgTex) { lastTex = eglImgTex; - } + } */ } - return lastTex; + return true; } + @Override + protected void syncFrame2Audio(TextureFrame frame) { } private String replaceAll(String orig, String search, String repl) { String dest=null; @@ -216,7 +216,7 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl { private native void _setStream(long moviePtr, int textureNum, String path); private native void _activateStream(long moviePtr); - private native void _setStreamEGLImageTexture2D(long moviePtr, int i, int tex, long image, long sync); + private native void _setStreamEGLImageTexture2D(long moviePtr, int tex, long image, long sync); private native int _seek(long moviePtr, int position); private native void _setPlaySpeed(long moviePtr, float rate); private native void _play(long moviePtr); diff --git a/src/jogl/native/libav/ffmpeg_tool.h b/src/jogl/native/libav/ffmpeg_tool.h index 5560b8617..2dff1110c 100644 --- a/src/jogl/native/libav/ffmpeg_tool.h +++ b/src/jogl/native/libav/ffmpeg_tool.h @@ -48,6 +48,11 @@ #include <stdio.h> #include <stdlib.h> +#include <GL/gl.h> + +typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const GLvoid *pixels); +typedef GLenum (APIENTRYP PFNGLGETERRORPROC) (void); + /** * AV_TIME_BASE 1000000 */ @@ -63,6 +68,9 @@ static inline int32_t my_av_q2i32(int32_t snum, AVRational a){ typedef struct { int32_t verbose; + PFNGLTEXSUBIMAGE2DPROC procAddrGLTexSubImage2D; + PFNGLGETERRORPROC procAddrGLGetError; + AVFormatContext* pFormatCtx; int32_t vid; AVStream* pVStream; diff --git a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c index 623bdcac7..346ba6c07 100644 --- a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c +++ b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c @@ -33,8 +33,6 @@ #include <libavutil/pixdesc.h> #include <GL/gl.h> -typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const GLvoid *pixels); - static const char * const ClazzNameFFMPEGMediaPlayer = "jogamp/opengl/util/av/impl/FFMPEGMediaPlayer"; static jclass ffmpegMediaPlayerClazz = NULL; @@ -583,22 +581,29 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStre _updateJavaAttributes(env, instance, pAV); } +JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setGLFuncs0 + (JNIEnv *env, jobject instance, jlong ptr, jlong jProcAddrGLTexSubImage2D, jlong jProcAddrGLGetError) +{ + FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); + pAV->procAddrGLTexSubImage2D = (PFNGLTEXSUBIMAGE2DPROC) (intptr_t)jProcAddrGLTexSubImage2D; + pAV->procAddrGLGetError = (PFNGLGETERRORPROC) (intptr_t)jProcAddrGLGetError; +} + JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNextPacket0 - (JNIEnv *env, jobject instance, jlong ptr, jlong jProcAddrGLTexSubImage2D, jint texTarget, jint texFmt, jint texType) + (JNIEnv *env, jobject instance, jlong ptr, jint texTarget, jint texFmt, jint texType) { FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); - PFNGLTEXSUBIMAGE2DPROC procAddrGLTexSubImage2D = (PFNGLTEXSUBIMAGE2DPROC) (intptr_t)jProcAddrGLTexSubImage2D; - jint res = 0; // 1 - audio, 2 - video AVPacket packet; int frameFinished; + jint resPTS = 0; // resulting current PTS: audio < 0, video > 0, invalid == 0 if(sp_av_read_frame(pAV->pFormatCtx, &packet)>=0) { if(packet.stream_index==pAV->aid) { // Decode audio frame if(NULL == pAV->pAFrames) { // no audio registered sp_av_free_packet(&packet); - return res; + return 0; } AVFrame* pAFrameCurrent = pAV->pAFrames[pAV->aFrameCurrent]; pAV->aFrameCurrent = ( pAV->aFrameCurrent + 1 ) % pAV->aFrameCount ; @@ -658,14 +663,13 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex jobject jSampleData = (*env)->NewDirectByteBuffer(env, pAFrameCurrent->data[0], data_size); (*env)->CallVoidMethod(env, instance, jni_mid_pushSound, jSampleData, data_size, pAV->aPTS); } - - res = 1; + resPTS = pAV->aPTS * -1; // Audio Frame! } } else if(packet.stream_index==pAV->vid) { // Decode video frame if(NULL == pAV->pVFrame) { sp_av_free_packet(&packet); - return res; + return 0; } int new_packet = 1; @@ -696,7 +700,6 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex continue; } - res = 2; // FIXME: Libav Binary compatibility! JAU01 const AVRational time_base = pAV->pVStream->time_base; const int64_t pts = pAV->pVFrame->pkt_pts; @@ -708,6 +711,7 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex pAV->vPTS, pAV->pVFrame->pkt_pts, time_base.num, time_base.den, (time_base.num/(double)time_base.den)); #endif } + resPTS = pAV->vPTS; // Video Frame! #if 0 printf("tex2D codec %dx%d - frame %dx%d - width %d tex / %d linesize, pixfmt 0x%X, texType 0x%x, texTarget 0x%x\n", @@ -718,24 +722,51 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex // 1st plane or complete packed frame // FIXME: Libav Binary compatibility! JAU01 - procAddrGLTexSubImage2D(texTarget, 0, + #if 0 + GLenum glerr = pAV->procAddrGLGetError(); + printf("TexSubImage2D.1 texTarget 0x%x, offset %d / %d, size %d x %d, fmt 0x%X, type 0x%X, pre-err 0x%X, ", + texTarget, 0, 0, pAV->vTexWidth[0], pAV->pVCodecCtx->height, texFmt, texType, glerr); + #endif + pAV->procAddrGLTexSubImage2D(texTarget, 0, 0, 0, pAV->vTexWidth[0], pAV->pVCodecCtx->height, texFmt, texType, pAV->pVFrame->data[0]); + #if 0 + glerr = pAV->procAddrGLGetError(); + printf("err 0x%X\n", glerr); + #endif if(pAV->vPixFmt == PIX_FMT_YUV420P) { // U plane // FIXME: Libav Binary compatibility! JAU01 - procAddrGLTexSubImage2D(texTarget, 0, + #if 0 + printf("TexSubImage2D.U texTarget 0x%x, offset %d / %d, size %d x %d, fmt 0x%X, type 0x%X, ", + texTarget, pAV->pVCodecCtx->width, 0, pAV->vTexWidth[1], pAV->pVCodecCtx->height/2, + texFmt, texType); + #endif + pAV->procAddrGLTexSubImage2D(texTarget, 0, pAV->pVCodecCtx->width, 0, pAV->vTexWidth[1], pAV->pVCodecCtx->height/2, texFmt, texType, pAV->pVFrame->data[1]); + #if 0 + glerr = pAV->procAddrGLGetError(); + printf("err 0x%X\n", glerr); + #endif // V plane // FIXME: Libav Binary compatibility! JAU01 - procAddrGLTexSubImage2D(texTarget, 0, + #if 0 + printf("TexSubImage2D.V texTarget 0x%x, offset %d / %d, size %d x %d, fmt 0x%X, type 0x%X, ", + texTarget, pAV->pVCodecCtx->width, pAV->pVCodecCtx->height/2, pAV->vTexWidth[2], pAV->pVCodecCtx->height/2, + texFmt, texType); + #endif + pAV->procAddrGLTexSubImage2D(texTarget, 0, pAV->pVCodecCtx->width, pAV->pVCodecCtx->height/2, pAV->vTexWidth[2], pAV->pVCodecCtx->height/2, texFmt, texType, pAV->pVFrame->data[2]); + #if 0 + glerr = pAV->procAddrGLGetError(); + printf("err 0x%X\n", glerr); + #endif } // FIXME: Add more planar formats ! } } @@ -745,7 +776,7 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex // TODO: check what release the packets memory. // sp_av_free_packet(&packet); } - return res; + return resPTS; } JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_seek0 diff --git a/src/jogl/native/openmax/omx_tool.c b/src/jogl/native/openmax/omx_tool.c index 16c43f6a8..26a3b93f1 100644 --- a/src/jogl/native/openmax/omx_tool.c +++ b/src/jogl/native/openmax/omx_tool.c @@ -1049,17 +1049,13 @@ void OMXToolBasicAV_SetStream(OMXToolBasicAV_t * pOMXAV, int vBufferNum, const K DBG_PRINT( "SetStream X\n"); } -void OMXToolBasicAV_SetStreamEGLImageTexture2D(OMXToolBasicAV_t * pOMXAV, KDint i, GLuint tex, EGLImageKHR image, EGLSyncKHR sync) +void OMXToolBasicAV_SetStreamEGLImageTexture2D(OMXToolBasicAV_t * pOMXAV, GLuint tex, EGLImageKHR image, EGLSyncKHR sync) { if(NULL==pOMXAV) { JoglCommon_throwNewRuntimeException(0, "OMX instance null\n"); return; } - DBG_PRINT( "SetStreamEGLImg %p #%d/%d t:%d i:%p s:%p..\n", pOMXAV, i, pOMXAV->vBufferNum, tex, image, sync); - if(i<0||i>=pOMXAV->vBufferNum) { - JoglCommon_throwNewRuntimeException(0, "Buffer index out of range: %d\n", i); - return; - } + DBG_PRINT( "SetStreamEGLImg %p count %d t:%d i:%p s:%p..\n", pOMXAV, pOMXAV->vBufferNum, tex, image, sync); kdThreadMutexLock(pOMXAV->mutex); { diff --git a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/TextureSequenceCubeES2.java b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/TextureSequenceCubeES2.java index 20c28c3ea..adccecba0 100644 --- a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/TextureSequenceCubeES2.java +++ b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/TextureSequenceCubeES2.java @@ -136,7 +136,7 @@ public class TextureSequenceCubeES2 implements GLEventListener { } } public void mouseWheelMoved(MouseEvent e) { - System.err.println("XXX "+e); + // System.err.println("XXX "+e); if( !e.isShiftDown() ) { final float o = zoom; final float d = e.getRotation()[1]/10f; // vertical: wheel diff --git a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieCube.java b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieCube.java index 921710fed..3f979e16f 100644 --- a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieCube.java +++ b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieCube.java @@ -61,9 +61,11 @@ import com.jogamp.opengl.util.Animator; import com.jogamp.opengl.util.av.GLMediaPlayer; import com.jogamp.opengl.util.av.GLMediaPlayer.GLMediaEventListener; import com.jogamp.opengl.util.av.GLMediaPlayerFactory; +import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame; public class MovieCube implements GLEventListener, GLMediaEventListener { static boolean waitForKey = false; + int textureCount = 3; // default - threaded final URLConnection stream; final float zoom0, rotx, roty; TextureSequenceCubeES2 cube=null; @@ -81,6 +83,10 @@ public class MovieCube implements GLEventListener, GLMediaEventListener { this.roty = roty; } + public void setTextureCount(int v) { + textureCount = v; + } + private final KeyListener keyAction = new KeyAdapter() { public void keyReleased(KeyEvent e) { if( !e.isPrintableKey() || e.isAutoRepeat() ) { @@ -130,7 +136,7 @@ public class MovieCube implements GLEventListener, GLMediaEventListener { } @Override - public void newFrameAvailable(GLMediaPlayer mp, long when) { + public void newFrameAvailable(GLMediaPlayer mp, TextureFrame newFrame, long when) { // System.out.println("newFrameAvailable: "+mp+", when "+when); } @@ -151,7 +157,7 @@ public class MovieCube implements GLEventListener, GLMediaEventListener { } try { System.out.println("p0 "+mPlayer); - mPlayer.initGLStream(gl, stream); + mPlayer.initGLStream(gl, textureCount, stream); System.out.println("p1 "+mPlayer); } catch (Exception e) { e.printStackTrace(); @@ -202,7 +208,7 @@ public class MovieCube implements GLEventListener, GLMediaEventListener { public static void main(String[] args) throws MalformedURLException, IOException, InterruptedException { int width = 510; int height = 300; - System.err.println("TexCubeES2.run()"); + int textureCount = 3; // default - threaded boolean forceES2 = false; boolean forceES3 = false; @@ -217,6 +223,9 @@ public class MovieCube implements GLEventListener, GLMediaEventListener { } else if(args[i].equals("-height")) { i++; height = MiscUtils.atoi(args[i], height); + } else if(args[i].equals("-textureCount")) { + i++; + textureCount = MiscUtils.atoi(args[i], textureCount); } else if(args[i].equals("-url")) { i++; url_s = args[i]; @@ -232,6 +241,7 @@ public class MovieCube implements GLEventListener, GLMediaEventListener { waitForKey = true; } } + System.err.println("textureCount "+textureCount); System.err.println("forceES2 "+forceES2); System.err.println("forceES3 "+forceES3); System.err.println("forceGL3 "+forceGL3); @@ -264,7 +274,7 @@ public class MovieCube implements GLEventListener, GLMediaEventListener { anim.stop(); } }); - // anim.setUpdateFPSFrames(60, System.err); + anim.setUpdateFPSFrames(60, System.err); anim.start(); window.setVisible(true); } diff --git a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java index 7e0dcd909..90c73661a 100644 --- a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java +++ b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java @@ -66,9 +66,11 @@ import com.jogamp.opengl.util.glsl.ShaderState; import com.jogamp.opengl.util.texture.Texture; import com.jogamp.opengl.util.texture.TextureCoords; import com.jogamp.opengl.util.texture.TextureSequence; +import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame; public class MovieSimple implements GLEventListener, GLMediaEventListener { private int winWidth, winHeight; + int textureCount = 3; // default - threaded private int prevMouseX; // , prevMouseY; private int rotate = 0; private boolean orthoProjection = true; @@ -169,6 +171,9 @@ public class MovieSimple implements GLEventListener, GLMediaEventListener { public GLMediaPlayer getGLMediaPlayer() { return mPlayer; } + public void setTextureCount(int v) { + textureCount = v; + } public void setScaleOrig(boolean v) { mPlayerScaleOrig = v; } @@ -179,7 +184,7 @@ public class MovieSimple implements GLEventListener, GLMediaEventListener { } @Override - public void newFrameAvailable(GLMediaPlayer mp, long when) { + public void newFrameAvailable(GLMediaPlayer mp, TextureFrame newFrame, long when) { // System.out.println("newFrameAvailable: "+mp+", when "+when); } @@ -252,7 +257,7 @@ public class MovieSimple implements GLEventListener, GLMediaEventListener { try { System.out.println("p0 "+mPlayer+", shared "+mPlayerShared); if(!mPlayerShared) { - mPlayer.initGLStream(gl, stream); + mPlayer.initGLStream(gl, textureCount, stream); } tex = mPlayer.getLastTexture().getTexture(); System.out.println("p1 "+mPlayer+", shared "+mPlayerShared); @@ -263,7 +268,8 @@ public class MovieSimple implements GLEventListener, GLMediaEventListener { if(!mPlayerShared) { mPlayer.setTextureMinMagFilter( new int[] { GL.GL_NEAREST, GL.GL_LINEAR } ); } - } catch (Exception glex) { + } catch (Exception glex) { + glex.printStackTrace(); if(!mPlayerShared && null != mPlayer) { mPlayer.destroy(gl); mPlayer = null; @@ -506,6 +512,7 @@ public class MovieSimple implements GLEventListener, GLMediaEventListener { public static void main(String[] args) throws IOException, MalformedURLException { int width = 640; int height = 600; + int textureCount = 3; // default - threaded boolean ortho = true; boolean zoom = false; @@ -522,6 +529,9 @@ public class MovieSimple implements GLEventListener, GLMediaEventListener { } else if(args[i].equals("-height")) { i++; height = MiscUtils.atoi(args[i], height); + } else if(args[i].equals("-textureCount")) { + i++; + textureCount = MiscUtils.atoi(args[i], textureCount); } else if(args[i].equals("-es2")) { forceES2 = true; } else if(args[i].equals("-es3")) { @@ -539,12 +549,14 @@ public class MovieSimple implements GLEventListener, GLMediaEventListener { url_s = args[i]; } } + System.err.println("textureCount "+textureCount); System.err.println("forceES2 "+forceES2); System.err.println("forceES3 "+forceES3); System.err.println("forceGL3 "+forceGL3); System.err.println("forceGLDef "+forceGLDef); final MovieSimple ms = new MovieSimple(new URL(url_s).openConnection()); + ms.setTextureCount(textureCount); ms.setScaleOrig(!zoom); ms.setOrthoProjection(ortho); @@ -570,6 +582,7 @@ public class MovieSimple implements GLEventListener, GLMediaEventListener { window.setSize(width, height); window.setVisible(true); final Animator anim = new Animator(window); + anim.setUpdateFPSFrames(60, System.err); anim.start(); window.addWindowListener(new WindowAdapter() { public void windowDestroyed(WindowEvent e) { |