diff options
author | Sven Gothel <[email protected]> | 2013-08-10 09:14:19 +0200 |
---|---|---|
committer | Sven Gothel <[email protected]> | 2013-08-10 09:14:19 +0200 |
commit | 6332e13b2f0aa9818d37802302f04c90a4fa4239 (patch) | |
tree | b615630b4a886270721f82636a323ec36dac341c /src/jogl/classes/jogamp/opengl/android | |
parent | 590d78dc2ff24ce80976a30e35a99c06ef6750b0 (diff) |
GLMediaPlayer: Add multithreaded decoding w/ textureCount > 2 where available EGL/FFMPeg. WIP!
Off-thread decoding:
If validated (impl) textureCount > 2, decoding happens on extra thread.
If decoding requires GL context, a shared context is created for decoding thread.
API Changes:
- initGLStream(..): Adds 'textureCount' as argument.
- TextureSequence.TexSeqEventListener.newFrameAvailable(..) exposes the new frame available
- TextureSequence.TextureFrame exposes the PTS (video)
Implementation:
- 'int validateTextureCount(int)': implementation decides whether textureCount can be > 2, i.e. off-thread decoding allowed,
default is NO w/ textureCount==2!
- 'boolean requiresOffthreadGLCtx()': implementation decides whether shared context is required for off-thread decoding
- 'syncFrame2Audio(TextureFrame frame)': implementation shall handle a/v sync, due to audio stream details (pts, buffered frames)
- FFMPEGMediaPlayer extends GLMediaPlayerImpl, no more EGLMediaPlayerImpl (redundant)
+++
- SyncedRingbuffer: Expose T[] array
+++
TODO:
- syncAV!
- test Android
Diffstat (limited to 'src/jogl/classes/jogamp/opengl/android')
-rw-r--r-- | src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java | 139 |
1 files changed, 82 insertions, 57 deletions
diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java index 8356a2bae..765cda084 100644 --- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java +++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java @@ -28,12 +28,14 @@ package jogamp.opengl.android.av; import java.io.IOException; +import java.nio.Buffer; import javax.media.opengl.GL; import javax.media.opengl.GLES2; import com.jogamp.common.os.AndroidVersion; import com.jogamp.common.os.Platform; +import com.jogamp.opengl.util.texture.Texture; import com.jogamp.opengl.util.texture.TextureSequence; import jogamp.common.os.android.StaticContext; @@ -76,7 +78,6 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { MediaPlayer mp; volatile boolean updateSurface = false; Object updateSurfaceLock = new Object(); - TextureSequence.TextureFrame lastTexFrame = null; /** private static String toString(MediaPlayer m) { @@ -90,17 +91,16 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { throw new RuntimeException("AndroidGLMediaPlayerAPI14 not available"); } this.setTextureTarget(GLES2.GL_TEXTURE_EXTERNAL_OES); - this.setTextureCount(1); mp = new MediaPlayer(); } @Override - protected boolean setPlaySpeedImpl(float rate) { + protected final boolean setPlaySpeedImpl(float rate) { return false; } @Override - protected boolean startImpl() { + protected final boolean startImpl() { if(null != mp) { try { mp.start(); @@ -115,7 +115,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } @Override - protected boolean pauseImpl() { + protected final boolean pauseImpl() { if(null != mp) { wakeUp(false); try { @@ -131,7 +131,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } @Override - protected boolean stopImpl() { + protected final boolean stopImpl() { if(null != mp) { wakeUp(false); try { @@ -147,7 +147,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } @Override - protected int seekImpl(int msec) { + protected final int seekImpl(int msec) { if(null != mp) { mp.seekTo(msec); return mp.getCurrentPosition(); @@ -155,40 +155,6 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { return 0; } - @Override - protected TextureSequence.TextureFrame getLastTextureImpl() { - return lastTexFrame; - } - - @Override - protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) { - if(null != stex && null != mp) { - // Only block once, no while-loop. - // This relaxes locking code of non crucial resources/events. - boolean update = updateSurface; - if(!update && blocking) { - synchronized(updateSurfaceLock) { - if(!updateSurface) { // volatile OK. - try { - updateSurfaceLock.wait(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - updateSurface = false; - update = true; - } - } - if(update) { - stex.updateTexImage(); - // stex.getTransformMatrix(atex.getSTMatrix()); - lastTexFrame=texFrames[0]; - } - - } - return lastTexFrame; - } - private void wakeUp(boolean newFrame) { synchronized(updateSurfaceLock) { if(newFrame) { @@ -199,12 +165,13 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } @Override - protected int getCurrentPositionImpl() { - return null != mp ? mp.getCurrentPosition() : 0; - } + protected final int getCurrentPositionImpl() { return null != mp ? mp.getCurrentPosition() : 0; } + + @Override + protected final int getAudioPTSImpl() { return getCurrentPositionImpl(); } @Override - protected void destroyImpl(GL gl) { + protected final void destroyImpl(GL gl) { if(null != mp) { wakeUp(false); mp.release(); @@ -213,9 +180,25 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } SurfaceTexture stex = null; + public static class SurfaceTextureFrame extends TextureSequence.TextureFrame { + public SurfaceTextureFrame(Texture t, SurfaceTexture stex) { + super(t); + this.surfaceTex = stex; + this.surface = new Surface(stex); + } + + public final SurfaceTexture getSurfaceTexture() { return surfaceTex; } + public final Surface getSurface() { return surface; } + + public String toString() { + return "SurfaceTextureFrame[" + pts + "ms: " + texture + ", " + surfaceTex + "]"; + } + private final SurfaceTexture surfaceTex; + private final Surface surface; + } @Override - protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException { + protected final void initGLStreamImpl(GL gl) throws IOException { if(null!=mp && null!=urlConn) { try { final Uri uri = Uri.parse(urlConn.getURL().toExternalForm()); @@ -227,44 +210,86 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } catch (IllegalStateException e) { throw new RuntimeException(e); } - stex = new SurfaceTexture(texNames[0]); // only 1 texture - stex.setOnFrameAvailableListener(onFrameAvailableListener); + if( null == stex ) { + throw new InternalError("XXX"); + } final Surface surf = new Surface(stex); mp.setSurface(surf); surf.release(); + mp.setSurface(null); try { mp.prepare(); } catch (IOException ioe) { throw new IOException("MediaPlayer failed to process stream <"+urlConn.getURL().toExternalForm()+">: "+ioe.getMessage(), ioe); } + final String icodec = "android"; updateAttributes(mp.getVideoWidth(), mp.getVideoHeight(), 0, 0, 0, 0f, 0, mp.getDuration(), - null, null); + icodec, icodec); + } + } + + @Override + protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) { + if(null != stex && null != mp) { + final SurfaceTextureFrame nextSFrame = (SurfaceTextureFrame) nextFrame; + final Surface nextSurface = nextSFrame.getSurface(); + mp.setSurface(nextSurface); + nextSurface.release(); + + // Only block once, no while-loop. + // This relaxes locking code of non crucial resources/events. + boolean update = updateSurface; + if(!update && blocking) { + synchronized(updateSurfaceLock) { + if(!updateSurface) { // volatile OK. + try { + updateSurfaceLock.wait(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + update = updateSurface; + updateSurface = false; + } + } + if(update) { + final SurfaceTexture nextSTex = nextSFrame.getSurfaceTexture(); + nextSTex.updateTexImage(); + // nextFrame.setPTS( (int) ( nextSTex.getTimestamp() / 1000000L ) ); // nano -9 -> milli -3 + nextFrame.setPTS( mp.getCurrentPosition() ); + // stex.getTransformMatrix(atex.getSTMatrix()); + } } + return true; } + @Override + protected final void syncFrame2Audio(TextureFrame frame) {} @Override - protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) { - lastTexFrame = new TextureSequence.TextureFrame( createTexImageImpl(gl, idx, tex, width, height, true) ); - return lastTexFrame; + protected final TextureSequence.TextureFrame createTexImage(GL gl, int texName) { + if( null != stex ) { + throw new InternalError("XXX"); + } + stex = new SurfaceTexture(texName); // only 1 texture + stex.setOnFrameAvailableListener(onFrameAvailableListener); + return new TextureSequence.TextureFrame( createTexImageImpl(gl, texName, width, height, true) ); } @Override - protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) { + protected final void destroyTexFrame(GL gl, TextureSequence.TextureFrame imgTex) { if(null != stex) { stex.release(); stex = null; } - lastTexFrame = null; - super.destroyTexImage(gl, imgTex); + super.destroyTexFrame(gl, imgTex); } protected OnFrameAvailableListener onFrameAvailableListener = new OnFrameAvailableListener() { @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { wakeUp(true); - AndroidGLMediaPlayerAPI14.this.newFrameAvailable(); } - }; + }; } |