summaryrefslogtreecommitdiffstats
path: root/src/jogl/classes/jogamp/opengl
diff options
context:
space:
mode:
authorSven Gothel <[email protected]>2013-08-10 09:14:19 +0200
committerSven Gothel <[email protected]>2013-08-10 09:14:19 +0200
commit6332e13b2f0aa9818d37802302f04c90a4fa4239 (patch)
treeb615630b4a886270721f82636a323ec36dac341c /src/jogl/classes/jogamp/opengl
parent590d78dc2ff24ce80976a30e35a99c06ef6750b0 (diff)
GLMediaPlayer: Add multithreaded decoding w/ textureCount > 2 where available EGL/FFMPeg. WIP!
Off-thread decoding: If validated (impl) textureCount > 2, decoding happens on extra thread. If decoding requires GL context, a shared context is created for decoding thread. API Changes: - initGLStream(..): Adds 'textureCount' as argument. - TextureSequence.TexSeqEventListener.newFrameAvailable(..) exposes the new frame available - TextureSequence.TextureFrame exposes the PTS (video) Implementation: - 'int validateTextureCount(int)': implementation decides whether textureCount can be > 2, i.e. off-thread decoding allowed, default is NO w/ textureCount==2! - 'boolean requiresOffthreadGLCtx()': implementation decides whether shared context is required for off-thread decoding - 'syncFrame2Audio(TextureFrame frame)': implementation shall handle a/v sync, due to audio stream details (pts, buffered frames) - FFMPEGMediaPlayer extends GLMediaPlayerImpl, no more EGLMediaPlayerImpl (redundant) +++ - SyncedRingbuffer: Expose T[] array +++ TODO: - syncAV! - test Android
Diffstat (limited to 'src/jogl/classes/jogamp/opengl')
-rw-r--r--src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java139
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java20
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java501
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java74
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java2
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java16
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java336
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java44
8 files changed, 666 insertions, 466 deletions
diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
index 8356a2bae..765cda084 100644
--- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
+++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
@@ -28,12 +28,14 @@
package jogamp.opengl.android.av;
import java.io.IOException;
+import java.nio.Buffer;
import javax.media.opengl.GL;
import javax.media.opengl.GLES2;
import com.jogamp.common.os.AndroidVersion;
import com.jogamp.common.os.Platform;
+import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureSequence;
import jogamp.common.os.android.StaticContext;
@@ -76,7 +78,6 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
MediaPlayer mp;
volatile boolean updateSurface = false;
Object updateSurfaceLock = new Object();
- TextureSequence.TextureFrame lastTexFrame = null;
/**
private static String toString(MediaPlayer m) {
@@ -90,17 +91,16 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
throw new RuntimeException("AndroidGLMediaPlayerAPI14 not available");
}
this.setTextureTarget(GLES2.GL_TEXTURE_EXTERNAL_OES);
- this.setTextureCount(1);
mp = new MediaPlayer();
}
@Override
- protected boolean setPlaySpeedImpl(float rate) {
+ protected final boolean setPlaySpeedImpl(float rate) {
return false;
}
@Override
- protected boolean startImpl() {
+ protected final boolean startImpl() {
if(null != mp) {
try {
mp.start();
@@ -115,7 +115,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
}
@Override
- protected boolean pauseImpl() {
+ protected final boolean pauseImpl() {
if(null != mp) {
wakeUp(false);
try {
@@ -131,7 +131,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
}
@Override
- protected boolean stopImpl() {
+ protected final boolean stopImpl() {
if(null != mp) {
wakeUp(false);
try {
@@ -147,7 +147,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
}
@Override
- protected int seekImpl(int msec) {
+ protected final int seekImpl(int msec) {
if(null != mp) {
mp.seekTo(msec);
return mp.getCurrentPosition();
@@ -155,40 +155,6 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
return 0;
}
- @Override
- protected TextureSequence.TextureFrame getLastTextureImpl() {
- return lastTexFrame;
- }
-
- @Override
- protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) {
- if(null != stex && null != mp) {
- // Only block once, no while-loop.
- // This relaxes locking code of non crucial resources/events.
- boolean update = updateSurface;
- if(!update && blocking) {
- synchronized(updateSurfaceLock) {
- if(!updateSurface) { // volatile OK.
- try {
- updateSurfaceLock.wait();
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- }
- updateSurface = false;
- update = true;
- }
- }
- if(update) {
- stex.updateTexImage();
- // stex.getTransformMatrix(atex.getSTMatrix());
- lastTexFrame=texFrames[0];
- }
-
- }
- return lastTexFrame;
- }
-
private void wakeUp(boolean newFrame) {
synchronized(updateSurfaceLock) {
if(newFrame) {
@@ -199,12 +165,13 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
}
@Override
- protected int getCurrentPositionImpl() {
- return null != mp ? mp.getCurrentPosition() : 0;
- }
+ protected final int getCurrentPositionImpl() { return null != mp ? mp.getCurrentPosition() : 0; }
+
+ @Override
+ protected final int getAudioPTSImpl() { return getCurrentPositionImpl(); }
@Override
- protected void destroyImpl(GL gl) {
+ protected final void destroyImpl(GL gl) {
if(null != mp) {
wakeUp(false);
mp.release();
@@ -213,9 +180,25 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
}
SurfaceTexture stex = null;
+ public static class SurfaceTextureFrame extends TextureSequence.TextureFrame {
+ public SurfaceTextureFrame(Texture t, SurfaceTexture stex) {
+ super(t);
+ this.surfaceTex = stex;
+ this.surface = new Surface(stex);
+ }
+
+ public final SurfaceTexture getSurfaceTexture() { return surfaceTex; }
+ public final Surface getSurface() { return surface; }
+
+ public String toString() {
+ return "SurfaceTextureFrame[" + pts + "ms: " + texture + ", " + surfaceTex + "]";
+ }
+ private final SurfaceTexture surfaceTex;
+ private final Surface surface;
+ }
@Override
- protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException {
+ protected final void initGLStreamImpl(GL gl) throws IOException {
if(null!=mp && null!=urlConn) {
try {
final Uri uri = Uri.parse(urlConn.getURL().toExternalForm());
@@ -227,44 +210,86 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
} catch (IllegalStateException e) {
throw new RuntimeException(e);
}
- stex = new SurfaceTexture(texNames[0]); // only 1 texture
- stex.setOnFrameAvailableListener(onFrameAvailableListener);
+ if( null == stex ) {
+ throw new InternalError("XXX");
+ }
final Surface surf = new Surface(stex);
mp.setSurface(surf);
surf.release();
+ mp.setSurface(null);
try {
mp.prepare();
} catch (IOException ioe) {
throw new IOException("MediaPlayer failed to process stream <"+urlConn.getURL().toExternalForm()+">: "+ioe.getMessage(), ioe);
}
+ final String icodec = "android";
updateAttributes(mp.getVideoWidth(), mp.getVideoHeight(),
0, 0, 0,
0f, 0, mp.getDuration(),
- null, null);
+ icodec, icodec);
+ }
+ }
+
+ @Override
+ protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) {
+ if(null != stex && null != mp) {
+ final SurfaceTextureFrame nextSFrame = (SurfaceTextureFrame) nextFrame;
+ final Surface nextSurface = nextSFrame.getSurface();
+ mp.setSurface(nextSurface);
+ nextSurface.release();
+
+ // Only block once, no while-loop.
+ // This relaxes locking code of non crucial resources/events.
+ boolean update = updateSurface;
+ if(!update && blocking) {
+ synchronized(updateSurfaceLock) {
+ if(!updateSurface) { // volatile OK.
+ try {
+ updateSurfaceLock.wait();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ update = updateSurface;
+ updateSurface = false;
+ }
+ }
+ if(update) {
+ final SurfaceTexture nextSTex = nextSFrame.getSurfaceTexture();
+ nextSTex.updateTexImage();
+ // nextFrame.setPTS( (int) ( nextSTex.getTimestamp() / 1000000L ) ); // nano -9 -> milli -3
+ nextFrame.setPTS( mp.getCurrentPosition() );
+ // stex.getTransformMatrix(atex.getSTMatrix());
+ }
}
+ return true;
}
+ @Override
+ protected final void syncFrame2Audio(TextureFrame frame) {}
@Override
- protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) {
- lastTexFrame = new TextureSequence.TextureFrame( createTexImageImpl(gl, idx, tex, width, height, true) );
- return lastTexFrame;
+ protected final TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
+ if( null != stex ) {
+ throw new InternalError("XXX");
+ }
+ stex = new SurfaceTexture(texName); // only 1 texture
+ stex.setOnFrameAvailableListener(onFrameAvailableListener);
+ return new TextureSequence.TextureFrame( createTexImageImpl(gl, texName, width, height, true) );
}
@Override
- protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) {
+ protected final void destroyTexFrame(GL gl, TextureSequence.TextureFrame imgTex) {
if(null != stex) {
stex.release();
stex = null;
}
- lastTexFrame = null;
- super.destroyTexImage(gl, imgTex);
+ super.destroyTexFrame(gl, imgTex);
}
protected OnFrameAvailableListener onFrameAvailableListener = new OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
wakeUp(true);
- AndroidGLMediaPlayerAPI14.this.newFrameAvailable();
}
- };
+ };
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
index 274ccffd5..57d5ff625 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/EGLMediaPlayerImpl.java
@@ -77,19 +77,19 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
}
- protected EGLMediaPlayerImpl() {
- this(TextureType.GL, false);
- }
-
protected EGLMediaPlayerImpl(TextureType texType, boolean useKHRSync) {
super();
this.texType = texType;
this.useKHRSync = useKHRSync;
}
+ @Override
+ protected final int validateTextureCount(int desiredTextureCount) {
+ return desiredTextureCount>1 ? desiredTextureCount : 2;
+ }
@Override
- protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) {
- final Texture texture = super.createTexImageImpl(gl, idx, tex, width, height, false);
+ protected TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
+ final Texture texture = super.createTexImageImpl(gl, texName, width, height, false);
final Buffer clientBuffer;
final long image;
final long sync;
@@ -117,7 +117,7 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
EGLExt.EGL_GL_TEXTURE_2D_KHR,
clientBuffer, nioTmp);
if (0==image) {
- throw new RuntimeException("EGLImage creation failed: "+EGL.eglGetError()+", ctx "+eglCtx+", tex "+tex[idx]+", err "+toHexString(EGL.eglGetError()));
+ throw new RuntimeException("EGLImage creation failed: "+EGL.eglGetError()+", ctx "+eglCtx+", tex "+texName+", err "+toHexString(EGL.eglGetError()));
}
} else {
clientBuffer = null;
@@ -141,7 +141,7 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
}
@Override
- protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) {
+ protected void destroyTexFrame(GL gl, TextureSequence.TextureFrame frame) {
final boolean eglUsage = TextureType.KHRImage == texType || useKHRSync ;
final EGLContext eglCtx;
final EGLExt eglExt;
@@ -156,7 +156,7 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
eglExt = null;
eglDrawable = null;
}
- final EGLTextureFrame eglTex = (EGLTextureFrame) imgTex;
+ final EGLTextureFrame eglTex = (EGLTextureFrame) frame;
if(0!=eglTex.getImage()) {
eglExt.eglDestroyImageKHR(eglDrawable.getNativeSurface().getDisplayHandle(), eglTex.getImage());
@@ -164,6 +164,6 @@ public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl {
if(0!=eglTex.getSync()) {
eglExt.eglDestroySyncKHR(eglDrawable.getNativeSurface().getDisplayHandle(), eglTex.getSync());
}
- super.destroyTexImage(gl, imgTex);
+ super.destroyTexFrame(gl, frame);
}
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index 2ff91a3f6..bc297dc21 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -30,13 +30,17 @@ package jogamp.opengl.util.av;
import java.io.IOException;
import java.net.URLConnection;
import java.util.ArrayList;
-import java.util.HashMap;
import java.util.Iterator;
+import javax.media.nativewindow.AbstractGraphicsDevice;
import javax.media.opengl.GL;
import javax.media.opengl.GL2;
+import javax.media.opengl.GLContext;
+import javax.media.opengl.GLDrawable;
+import javax.media.opengl.GLDrawableFactory;
import javax.media.opengl.GLES2;
import javax.media.opengl.GLException;
+import javax.media.opengl.GLProfile;
import com.jogamp.opengl.util.av.GLMediaPlayer;
import com.jogamp.opengl.util.texture.Texture;
@@ -45,7 +49,7 @@ import com.jogamp.opengl.util.texture.TextureSequence;
/**
* After object creation an implementation may customize the behavior:
* <ul>
- * <li>{@link #setTextureCount(int)}</li>
+ * <li>{@link #setDesTextureCount(int)}</li>
* <li>{@link #setTextureTarget(int)}</li>
* <li>{@link EGLMediaPlayerImpl#setEGLTexImageAttribs(boolean, boolean)}.</li>
* </ul>
@@ -59,6 +63,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected static final String unknown = "unknown";
protected State state;
+
protected int textureCount;
protected int textureTarget;
protected int textureFormat;
@@ -74,35 +79,38 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected volatile float playSpeed = 1.0f;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */
protected int width = 0;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */
protected int height = 0;
- /** Video fps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Video fps. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */
protected float fps = 0;
- /** Stream bps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Stream bps. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */
protected int bps_stream = 0;
- /** Video bps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Video bps. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */
protected int bps_video = 0;
- /** Audio bps. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Audio bps. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */
protected int bps_audio = 0;
- /** In frames. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** In frames. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */
protected int totalFrames = 0;
- /** In ms. Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** In ms. Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */
protected int duration = 0;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */
protected String acodec = unknown;
- /** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
+ /** Shall be set by the {@link #initGLStreamImpl(GL)} method implementation. */
protected String vcodec = unknown;
protected int frameNumber = 0;
+ protected int currentVideoPTS = 0;
- protected TextureSequence.TextureFrame[] texFrames = null;
- protected HashMap<Integer, TextureSequence.TextureFrame> texFrameMap = new HashMap<Integer, TextureSequence.TextureFrame>();
+ protected SyncedRingbuffer<TextureFrame> videoFramesFree = null;
+ protected SyncedRingbuffer<TextureFrame> videoFramesDecoded = null;
+ protected volatile TextureFrame lastFrame = null;
+
private ArrayList<GLMediaEventListener> eventListeners = new ArrayList<GLMediaEventListener>();
protected GLMediaPlayerImpl() {
- this.textureCount=3;
+ this.textureCount=0;
this.textureTarget=GL.GL_TEXTURE_2D;
this.textureFormat = GL.GL_RGBA;
this.textureInternalFormat = GL.GL_RGBA;
@@ -112,14 +120,14 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
@Override
- public void setTextureUnit(int u) { texUnit = u; }
+ public final void setTextureUnit(int u) { texUnit = u; }
@Override
- public int getTextureUnit() { return texUnit; }
+ public final int getTextureUnit() { return texUnit; }
+
+ @Override
+ public final int getTextureTarget() { return textureTarget; }
- protected final void setTextureCount(int textureCount) {
- this.textureCount=textureCount;
- }
@Override
public final int getTextureCount() { return textureCount; }
@@ -134,29 +142,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
public final int[] getTextureMinMagFilter() { return texMinMagFilter; }
public final void setTextureWrapST(int[] wrapST) { texWrapST[0] = wrapST[0]; texWrapST[1] = wrapST[1];}
- public final int[] getTextureWrapST() { return texWrapST; }
-
- @Override
- public final TextureSequence.TextureFrame getLastTexture() throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
- return getLastTextureImpl();
- }
- protected abstract TextureSequence.TextureFrame getLastTextureImpl();
-
- @Override
- public final synchronized TextureSequence.TextureFrame getNextTexture(GL gl, boolean blocking) throws IllegalStateException {
- if(State.Uninitialized == state) {
- throw new IllegalStateException("Instance not initialized: "+this);
- }
- if(State.Playing == state) {
- final TextureSequence.TextureFrame f = getNextTextureImpl(gl, blocking);
- return f;
- }
- return getLastTextureImpl();
- }
- protected abstract TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking);
+ public final int[] getTextureWrapST() { return texWrapST; }
@Override
public String getRequiredExtensionsShaderStub() throws IllegalStateException {
@@ -229,12 +215,15 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected abstract boolean setPlaySpeedImpl(float rate);
public final State start() {
- switch(state) {
+ switch( state ) {
case Stopped:
+ /** fall-through intended */
case Paused:
- if(startImpl()) {
+ if( startImpl() ) {
+ resumeFramePusher();
state = State.Playing;
}
+ default:
}
if(DEBUG) { System.err.println("Start: "+toString()); }
return state;
@@ -242,7 +231,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected abstract boolean startImpl();
public final State pause() {
- if(State.Playing == state && pauseImpl()) {
+ if( State.Playing == state && pauseImpl() ) {
+ pauseFramePusher();
state = State.Paused;
}
if(DEBUG) { System.err.println("Pause: "+toString()); }
@@ -251,12 +241,15 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected abstract boolean pauseImpl();
public final State stop() {
- switch(state) {
+ switch( state ) {
case Playing:
+ /** fall-through intended */
case Paused:
- if(stopImpl()) {
+ if( stopImpl() ) {
+ pauseFramePusher();
state = State.Stopped;
}
+ default:
}
if(DEBUG) { System.err.println("Stop: "+toString()); }
return state;
@@ -265,61 +258,70 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
@Override
public final int getCurrentPosition() {
- if(State.Uninitialized != state) {
+ if( State.Uninitialized != state ) {
return getCurrentPositionImpl();
}
return 0;
}
protected abstract int getCurrentPositionImpl();
+ @Override
+ public final int getVideoPTS() { return currentVideoPTS; }
+
+ @Override
+ public final int getAudioPTS() {
+ if( State.Uninitialized != state ) {
+ return getAudioPTSImpl();
+ }
+ return 0;
+ }
+ protected abstract int getAudioPTSImpl();
+
public final int seek(int msec) {
- final int cp;
+ final int pts1;
switch(state) {
case Stopped:
case Playing:
case Paused:
- cp = seekImpl(msec);
+ pauseFramePusher();
+ pts1 = seekImpl(msec);
+ currentVideoPTS=pts1;
+ resumeFramePusher();
break;
default:
- cp = 0;
+ pts1 = 0;
}
if(DEBUG) { System.err.println("Seek("+msec+"): "+toString()); }
- return cp;
+ return pts1;
}
protected abstract int seekImpl(int msec);
public final State getState() { return state; }
@Override
- public final State initGLStream(GL gl, URLConnection urlConn) throws IllegalStateException, GLException, IOException {
+ public final State initGLStream(GL gl, int reqTextureCount, URLConnection urlConn) throws IllegalStateException, GLException, IOException {
if(State.Uninitialized != state) {
throw new IllegalStateException("Instance not in state "+State.Uninitialized+", but "+state+", "+this);
}
this.urlConn = urlConn;
if (this.urlConn != null) {
try {
- if(null != gl) {
- if(null!=texFrames) {
- // re-init ..
- removeAllImageTextures(gl);
- } else {
- texFrames = new TextureSequence.TextureFrame[textureCount];
- }
- final int[] tex = new int[textureCount];
- {
- gl.glGenTextures(textureCount, tex, 0);
- final int err = gl.glGetError();
- if( GL.GL_NO_ERROR != err ) {
- throw new RuntimeException("TextureNames creation failed (num: "+textureCount+"): err "+toHexString(err));
- }
+ if( null != gl ) {
+ removeAllTextureFrames(gl);
+ textureCount = validateTextureCount(reqTextureCount);
+ if( textureCount < 2 ) {
+ throw new InternalError("Validated texture count < 2: "+textureCount);
}
- initGLStreamImpl(gl, tex);
-
- for(int i=0; i<textureCount; i++) {
- final TextureSequence.TextureFrame tf = createTexImage(gl, i, tex);
- texFrames[i] = tf;
- texFrameMap.put(tex[i], tf);
+ initGLStreamImpl(gl); // also initializes width, height, .. etc
+ videoFramesFree = new SyncedRingbuffer<TextureFrame>(createTexFrames(gl, textureCount), true /* full */);
+ if( 2 < textureCount ) {
+ videoFramesDecoded = new SyncedRingbuffer<TextureFrame>(new TextureFrame[textureCount], false /* full */);
+ framePusher = new FramePusher(gl, requiresOffthreadGLCtx());
+ framePusher.doStart();
+ } else {
+ videoFramesDecoded = null;
}
+ lastFrame = videoFramesFree.getBlocking(false /* clearRef */ );
}
state = State.Stopped;
return state;
@@ -329,35 +331,42 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
return state;
}
+ /**
+ * Returns the validated number of textures to be handled.
+ * <p>
+ * Default is always 2 textures, last texture and the decoding texture.
+ * </p>
+ */
+ protected int validateTextureCount(int desiredTextureCount) {
+ return 2;
+ }
+ protected boolean requiresOffthreadGLCtx() { return false; }
- /**
- * Implementation shall set the following set of data here
- * @param gl TODO
- * @param texNames TODO
- * @see #width
- * @see #height
- * @see #fps
- * @see #bps_stream
- * @see #totalFrames
- * @see #acodec
- * @see #vcodec
- */
- protected abstract void initGLStreamImpl(GL gl, int[] texNames) throws IOException;
-
- protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) {
- return new TextureSequence.TextureFrame( createTexImageImpl(gl, idx, tex, width, height, false) );
+ private final TextureFrame[] createTexFrames(GL gl, final int count) {
+ final int[] texNames = new int[count];
+ gl.glGenTextures(count, texNames, 0);
+ final int err = gl.glGetError();
+ if( GL.GL_NO_ERROR != err ) {
+ throw new RuntimeException("TextureNames creation failed (num: "+count+"): err "+toHexString(err));
+ }
+ final TextureFrame[] texFrames = new TextureFrame[count];
+ for(int i=0; i<count; i++) {
+ texFrames[i] = createTexImage(gl, texNames[i]);
+ }
+ return texFrames;
}
+ protected abstract TextureFrame createTexImage(GL gl, int texName);
- protected Texture createTexImageImpl(GL gl, int idx, int[] tex, int tWidth, int tHeight, boolean mustFlipVertically) {
- if( 0 > tex[idx] ) {
- throw new RuntimeException("TextureName "+toHexString(tex[idx])+" invalid.");
+ protected final Texture createTexImageImpl(GL gl, int texName, int tWidth, int tHeight, boolean mustFlipVertically) {
+ if( 0 > texName ) {
+ throw new RuntimeException("TextureName "+toHexString(texName)+" invalid.");
}
gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
- gl.glBindTexture(textureTarget, tex[idx]);
+ gl.glBindTexture(textureTarget, texName);
{
final int err = gl.glGetError();
if( GL.GL_NO_ERROR != err ) {
- throw new RuntimeException("Couldn't bind textureName "+toHexString(tex[idx])+" to 2D target, err "+toHexString(err));
+ throw new RuntimeException("Couldn't bind textureName "+toHexString(texName)+" to 2D target, err "+toHexString(err));
}
}
@@ -389,30 +398,297 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_WRAP_S, texWrapST[0]);
gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_WRAP_T, texWrapST[1]);
- return com.jogamp.opengl.util.texture.TextureIO.newTexture(tex[idx],
- textureTarget,
+ return com.jogamp.opengl.util.texture.TextureIO.newTexture(
+ texName, textureTarget,
tWidth, tHeight,
width, height,
mustFlipVertically);
}
+
+ private final void removeAllTextureFrames(GL gl) {
+ if( null != videoFramesFree ) {
+ final TextureFrame[] texFrames = videoFramesFree.getArray();
+ videoFramesFree = null;
+ videoFramesDecoded = null;
+ lastFrame = null;
+ for(int i=0; i<texFrames.length; i++) {
+ final TextureFrame frame = texFrames[i];
+ if(null != frame) {
+ destroyTexFrame(gl, frame);
+ texFrames[i] = null;
+ }
+ }
+ }
+ textureCount=0;
+ }
+ protected void destroyTexFrame(GL gl, TextureFrame frame) {
+ frame.getTexture().destroy(gl);
+ }
+
+ /**
+ * Implementation shall set the following set of data here
+ * @param gl TODO
+ * @see #width
+ * @see #height
+ * @see #fps
+ * @see #bps_stream
+ * @see #totalFrames
+ * @see #acodec
+ * @see #vcodec
+ */
+ protected abstract void initGLStreamImpl(GL gl) throws IOException;
- protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) {
- imgTex.getTexture().destroy(gl);
+ @Override
+ public final TextureFrame getLastTexture() throws IllegalStateException {
+ if(State.Uninitialized == state) {
+ throw new IllegalStateException("Instance not initialized: "+this);
+ }
+ return lastFrame;
}
- protected void removeAllImageTextures(GL gl) {
- if(null != texFrames) {
- for(int i=0; i<textureCount; i++) {
- final TextureSequence.TextureFrame imgTex = texFrames[i];
- if(null != imgTex) {
- destroyTexImage(gl, imgTex);
- texFrames[i] = null;
+ @Override
+ public final synchronized TextureFrame getNextTexture(GL gl, boolean blocking) throws IllegalStateException {
+ if(State.Uninitialized == state) {
+ throw new IllegalStateException("Instance not initialized: "+this);
+ }
+ if(State.Playing == state) {
+ TextureFrame nextFrame = null;
+ boolean ok = true;
+ try {
+ if( 2 < textureCount ) {
+ nextFrame = videoFramesDecoded.getBlocking(false /* clearRef */ );
+ } else {
+ nextFrame = videoFramesFree.getBlocking(false /* clearRef */ );
+ if( getNextTextureImpl(gl, nextFrame, blocking) ) {
+ newFrameAvailable(nextFrame);
+ } else {
+ ok = false;
+ }
+ }
+ if( ok ) {
+ currentVideoPTS = nextFrame.getPTS();
+ if( blocking ) {
+ syncFrame2Audio(nextFrame);
+ }
+ final TextureFrame _lastFrame = lastFrame;
+ lastFrame = nextFrame;
+ videoFramesFree.putBlocking(_lastFrame);
+ }
+ } catch (InterruptedException e) {
+ ok = false;
+ e.printStackTrace();
+ } finally {
+ if( !ok && null != nextFrame ) { // put back
+ videoFramesFree.put(nextFrame);
}
}
}
- texFrameMap.clear();
+ return lastFrame;
}
+ protected abstract boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking);
+ protected abstract void syncFrame2Audio(TextureFrame frame);
+
+ private final void newFrameAvailable(TextureFrame frame) {
+ frameNumber++;
+ synchronized(eventListenersLock) {
+ for(Iterator<GLMediaEventListener> i = eventListeners.iterator(); i.hasNext(); ) {
+ i.next().newFrameAvailable(this, frame, System.currentTimeMillis());
+ }
+ }
+ }
+
+ class FramePusher extends Thread {
+ private volatile boolean isRunning = false;
+ private volatile boolean isActive = false;
+
+ private volatile boolean shallPause = true;
+ private volatile boolean shallStop = false;
+
+ private final GL gl;
+ private GLDrawable dummyDrawable = null;
+ private GLContext sharedGLCtx = null;
+
+ FramePusher(GL gl, boolean createSharedCtx) {
+ setDaemon(true);
+ this.gl = createSharedCtx ? createSharedGL(gl) : gl;
+ }
+
+ private GL createSharedGL(GL gl) {
+ final GLContext glCtx = gl.getContext();
+ final boolean glCtxCurrent = glCtx.isCurrent();
+ final GLProfile glp = gl.getGLProfile();
+ final GLDrawableFactory factory = GLDrawableFactory.getFactory(glp);
+ final AbstractGraphicsDevice device = glCtx.getGLDrawable().getNativeSurface().getGraphicsConfiguration().getScreen().getDevice();
+ dummyDrawable = factory.createDummyDrawable(device, true, glp); // own device!
+ dummyDrawable.setRealized(true);
+ sharedGLCtx = dummyDrawable.createContext(glCtx);
+ makeCurrent(sharedGLCtx);
+ if( glCtxCurrent ) {
+ makeCurrent(glCtx);
+ } else {
+ sharedGLCtx.release();
+ }
+ return sharedGLCtx.getGL();
+ }
+ private void makeCurrent(GLContext ctx) {
+ if( GLContext.CONTEXT_NOT_CURRENT >= ctx.makeCurrent() ) {
+ throw new GLException("Couldn't make ctx current: "+ctx);
+ }
+ }
+
+ private void destroySharedGL() {
+ if( null != sharedGLCtx ) {
+ if( sharedGLCtx.isCreated() ) {
+ // Catch dispose GLExceptions by GLEventListener, just 'print' them
+ // so we can continue with the destruction.
+ try {
+ sharedGLCtx.destroy();
+ } catch (GLException gle) {
+ gle.printStackTrace();
+ }
+ }
+ sharedGLCtx = null;
+ }
+ if( null != dummyDrawable ) {
+ final AbstractGraphicsDevice device = dummyDrawable.getNativeSurface().getGraphicsConfiguration().getScreen().getDevice();
+ dummyDrawable.setRealized(false);
+ dummyDrawable = null;
+ device.close();
+ }
+ }
+
+ public synchronized void doPause() {
+ if( isActive ) {
+ shallPause = true;
+ while( isActive ) {
+ try {
+ this.wait(); // wait until paused
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ public synchronized void doResume() {
+ if( isRunning && !isActive ) {
+ shallPause = false;
+ while( !isActive ) {
+ this.notify(); // wake-up pause-block
+ try {
+ this.wait(); // wait until resumed
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ public synchronized void doStart() {
+ start();
+ while( !isRunning ) {
+ try {
+ this.wait(); // wait until started
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ public synchronized void doStop() {
+ if( isRunning ) {
+ shallStop = true;
+ while( isRunning ) {
+ this.notify(); // wake-up pause-block (opt)
+ try {
+ this.wait(); // wait until stopped
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ public boolean isRunning() { return isRunning; }
+ public boolean isActive() { return isActive; }
+
+ public void run() {
+ setName(getName()+"-FramePusher_"+FramePusherInstanceId);
+ FramePusherInstanceId++;
+
+ synchronized ( this ) {
+ if( null != sharedGLCtx ) {
+ makeCurrent( sharedGLCtx );
+ }
+ isRunning = true;
+ this.notify(); // wake-up doStart()
+ }
+
+ while( !shallStop ){
+ if( shallPause ) {
+ synchronized ( this ) {
+ while( shallPause && !shallStop ) {
+ isActive = false;
+ this.notify(); // wake-up doPause()
+ try {
+ this.wait(); // wait until resumed
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ isActive = true;
+ this.notify(); // wake-up doResume()
+ }
+ }
+
+ if( !shallStop ) {
+ TextureFrame nextFrame = null;
+ boolean ok = false;
+ try {
+ nextFrame = videoFramesFree.getBlocking(true /* clearRef */ );
+ if( getNextTextureImpl(gl, nextFrame, true) ) {
+ gl.glFinish();
+ videoFramesDecoded.putBlocking(nextFrame);
+ newFrameAvailable(nextFrame);
+ ok = true;
+ }
+ } catch (InterruptedException e) {
+ if( !shallStop && !shallPause ) {
+ e.printStackTrace(); // oops
+ shallPause = false;
+ shallStop = true;
+ }
+ } finally {
+ if( !ok && null != nextFrame ) { // put back
+ videoFramesFree.put(nextFrame);
+ }
+ }
+ }
+ }
+ destroySharedGL();
+ synchronized ( this ) {
+ isRunning = false;
+ isActive = false;
+ this.notify(); // wake-up doStop()
+ }
+ }
+ }
+ static int FramePusherInstanceId = 0;
+ private FramePusher framePusher = null;
+ private final void pauseFramePusher() {
+ if( null != framePusher ) {
+ framePusher.doPause();
+ }
+ }
+ private final void resumeFramePusher() {
+ if( null != framePusher ) {
+ framePusher.doResume();
+ }
+ }
+ private final void destroyFramePusher() {
+ if( null != framePusher ) {
+ framePusher.doStop();
+ framePusher = null;
+ }
+ }
+
protected final void updateAttributes(int width, int height, int bps_stream, int bps_video, int bps_audio,
float fps, int totalFrames, int duration,
String vcodec, String acodec) {
@@ -458,19 +734,12 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
}
- protected final void newFrameAvailable() {
- frameNumber++;
- synchronized(eventListenersLock) {
- for(Iterator<GLMediaEventListener> i = eventListeners.iterator(); i.hasNext(); ) {
- i.next().newFrameAvailable(this, System.currentTimeMillis());
- }
- }
- }
@Override
public final synchronized State destroy(GL gl) {
+ destroyFramePusher();
destroyImpl(gl);
- removeAllImageTextures(gl);
+ removeAllTextureFrames(gl);
state = State.Uninitialized;
return state;
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
index cd48c3962..f1ce42257 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
@@ -49,66 +49,68 @@ import com.jogamp.opengl.util.texture.TextureSequence;
*/
public class NullGLMediaPlayer extends GLMediaPlayerImpl {
private TextureData texData = null;
- private TextureSequence.TextureFrame frame = null;
private int pos_ms = 0;
private int pos_start = 0;
public NullGLMediaPlayer() {
super();
- this.setTextureCount(1);
}
@Override
- protected boolean setPlaySpeedImpl(float rate) {
+ protected final boolean setPlaySpeedImpl(float rate) {
return false;
}
@Override
- protected boolean startImpl() {
+ protected final boolean startImpl() {
pos_start = (int)System.currentTimeMillis();
return true;
}
@Override
- protected boolean pauseImpl() {
+ protected final boolean pauseImpl() {
return true;
}
@Override
- protected boolean stopImpl() {
+ protected final boolean stopImpl() {
return true;
}
@Override
- protected int seekImpl(int msec) {
+ protected final int seekImpl(int msec) {
pos_ms = msec;
validatePos();
return pos_ms;
}
@Override
- protected TextureSequence.TextureFrame getLastTextureImpl() {
- return frame;
+ protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) {
+ return true;
}
-
@Override
- protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) {
- return frame;
- }
+ protected final void syncFrame2Audio(TextureFrame frame) { }
@Override
- protected int getCurrentPositionImpl() {
+ protected final int getCurrentPositionImpl() {
pos_ms = (int)System.currentTimeMillis() - pos_start;
validatePos();
return pos_ms;
}
+ @Override
+ protected final int getAudioPTSImpl() { return getCurrentPositionImpl(); }
+
@Override
- protected void destroyImpl(GL gl) {
+ protected final void destroyImpl(GL gl) {
+ if(null != texData) {
+ texData.destroy();
+ texData = null;
+ }
}
-
+
@Override
- protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException {
+ protected final void initGLStreamImpl(GL gl) throws IOException {
try {
URLConnection urlConn = IOUtil.getResource("jogl/util/data/av/test-ntsc01-160x90.png", this.getClass().getClassLoader());
if(null != urlConn) {
@@ -117,44 +119,44 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
} catch (Exception e) {
e.printStackTrace();
}
+ final int _w, _h;
if(null != texData) {
- width = texData.getWidth();
- height = texData.getHeight();
+ _w = texData.getWidth();
+ _h = texData.getHeight();
} else {
- width = 640;
- height = 480;
- ByteBuffer buffer = Buffers.newDirectByteBuffer(width*height*4);
+ _w = 640;
+ _h = 480;
+ ByteBuffer buffer = Buffers.newDirectByteBuffer(_w*_h*4);
while(buffer.hasRemaining()) {
buffer.put((byte) 0xEA); buffer.put((byte) 0xEA); buffer.put((byte) 0xEA); buffer.put((byte) 0xEA);
}
buffer.rewind();
texData = new TextureData(GLProfile.getGL2ES2(),
- GL.GL_RGBA, width, height, 0,
+ GL.GL_RGBA, _w, _h, 0,
GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, false,
false, false, buffer, null);
}
- fps = 24f;
- duration = 10*60*1000; // msec
- totalFrames = (int) ( (duration/1000)*fps );
- vcodec = "png-static";
+ final float _fps = 24f;
+ final int _duration = 10*60*1000; // msec
+ final int _totalFrames = (int) ( (_duration/1000)*_fps );
+ updateAttributes(_w, _h,
+ 0, 0, 0,
+ _fps, _totalFrames, _duration,
+ "png-static", null);
}
@Override
- protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) {
- Texture texture = super.createTexImageImpl(gl, idx, tex, width, height, false);
+ protected final TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
+ final Texture texture = super.createTexImageImpl(gl, texName, width, height, false);
if(null != texData) {
texture.updateImage(gl, texData);
- texData.destroy();
- texData = null;
}
- frame = new TextureSequence.TextureFrame( texture );
- return frame;
+ return new TextureSequence.TextureFrame( texture );
}
@Override
- protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) {
- frame = null;
- super.destroyTexImage(gl, imgTex);
+ protected final void destroyTexFrame(GL gl, TextureSequence.TextureFrame frame) {
+ super.destroyTexFrame(gl, frame);
}
private void validatePos() {
diff --git a/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java b/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java
index 5f5d69cf8..ea67387a0 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java
@@ -74,6 +74,8 @@ public class SyncedRingbuffer<T> {
}
}
+ public final T[] getArray() { return array; }
+
public final int capacity() {
return capacity;
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
index 852e5149c..3680da1a8 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
@@ -116,25 +116,32 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
private static long[] symbolAddr;
private static final boolean ready;
+ private static final boolean libsLoaded;
static {
// native ffmpeg media player implementation is included in jogl_desktop and jogl_mobile
GLProfile.initSingleton();
boolean _ready = false;
+ boolean[] _libsLoaded= { false };
try {
- _ready = initSymbols();
+ _ready = initSymbols(_libsLoaded);
} catch (Throwable t) {
t.printStackTrace();
}
+ libsLoaded = _libsLoaded[0];
ready = _ready;
- if(!ready) {
- System.err.println("FFMPEG: Not Available");
+ if(!libsLoaded) {
+ System.err.println("LIB_AV Not Available");
+ } else if(!ready) {
+ System.err.println("LIB_AV Not Matching");
}
}
+ static boolean libsLoaded() { return libsLoaded; }
static boolean initSingleton() { return ready; }
- private static final boolean initSymbols() {
+ private static final boolean initSymbols(boolean[] libsLoaded) {
+ libsLoaded[0] = false;
final DynamicLibraryBundle dl = AccessController.doPrivileged(new PrivilegedAction<DynamicLibraryBundle>() {
public DynamicLibraryBundle run() {
return new DynamicLibraryBundle(new FFMPEGDynamicLibraryBundleInfo());
@@ -148,6 +155,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
if(!dl.isToolLibComplete()) {
throw new RuntimeException("FFMPEG Tool libraries incomplete");
}
+ libsLoaded[0] = true;
if(symbolNames.length != symbolCount) {
throw new InternalError("XXX0 "+symbolNames.length+" != "+symbolCount);
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
index b6708b379..83a5960f1 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
@@ -44,11 +44,10 @@ import com.jogamp.opengl.util.GLPixelStorageModes;
import com.jogamp.opengl.util.av.AudioSink;
import com.jogamp.opengl.util.av.AudioSinkFactory;
import com.jogamp.opengl.util.texture.Texture;
-import com.jogamp.opengl.util.texture.TextureSequence;
+import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame;
import jogamp.opengl.GLContextImpl;
-import jogamp.opengl.util.av.EGLMediaPlayerImpl;
-import jogamp.opengl.util.av.SyncedRingbuffer;
+import jogamp.opengl.util.av.GLMediaPlayerImpl;
/***
* Implementation utilizes <a href="http://libav.org/">Libav</a>
@@ -102,7 +101,7 @@ import jogamp.opengl.util.av.SyncedRingbuffer;
* </pre></li>
* </ul>
*/
-public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
+public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
// Count of zeroed buffers to return before switching to real sample provider
private static final int TEMP_BUFFER_COUNT = 20;
@@ -114,21 +113,20 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
static final boolean available;
static {
- if(FFMPEGDynamicLibraryBundleInfo.initSingleton()) {
+ final boolean libAVGood = FFMPEGDynamicLibraryBundleInfo.initSingleton();
+ if( FFMPEGDynamicLibraryBundleInfo.libsLoaded() ) {
avUtilVersion = getAVVersion(getAvUtilVersion0());
avFormatVersion = getAVVersion(getAvFormatVersion0());
avCodecVersion = getAVVersion(getAvCodecVersion0());
System.err.println("LIB_AV Util : "+avUtilVersion);
System.err.println("LIB_AV Format: "+avFormatVersion);
System.err.println("LIB_AV Codec : "+avCodecVersion);
- initIDs0();
- available = true;
} else {
avUtilVersion = null;
avFormatVersion = null;
avCodecVersion = null;
- available = false;
}
+ available = libAVGood ? initIDs0() : false;
}
public static final boolean isAvailable() { return available; }
@@ -144,8 +142,6 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
//
protected long moviePtr = 0;
- protected long procAddrGLTexSubImage2D = 0;
- protected EGLMediaPlayerImpl.EGLTextureFrame lastTex = null;
protected GLPixelStorageModes psm;
protected PixelFormat vPixelFmt = null;
protected int vPlanes = 0;
@@ -161,21 +157,15 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
// Audio
//
- protected static final boolean USE_AUDIO_PUSHER = false;
protected final int AudioFrameCount = 8;
protected final AudioSink audioSink;
protected final int maxAvailableAudio;
protected AudioSink.AudioDataFormat chosenAudioFormat;
- protected final SyncedRingbuffer<AudioSink.AudioFrame> audioFramesBuffer =
- USE_AUDIO_PUSHER ? new SyncedRingbuffer<AudioSink.AudioFrame>(new AudioSink.AudioFrame[AudioFrameCount], false /* full */)
- : null;
public FFMPEGMediaPlayer() {
- super(TextureType.GL, false);
if(!available) {
throw new RuntimeException("FFMPEGMediaPlayer not available");
}
- setTextureCount(1);
moviePtr = createInstance0(DEBUG);
if(0==moviePtr) {
throw new GLException("Couldn't create FFMPEGInstance");
@@ -184,26 +174,15 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
audioSink = AudioSinkFactory.createDefault();
maxAvailableAudio = audioSink.getQueuedByteCount();
}
-
@Override
- protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) {
- if(TextureType.GL == texType) {
- final Texture texture = super.createTexImageImpl(gl, idx, tex, texWidth, texHeight, true);
- lastTex = new EGLTextureFrame(null, texture, 0, 0);
- } else {
- throw new InternalError("n/a");
- }
- return lastTex;
+ protected final int validateTextureCount(int desiredTextureCount) {
+ return desiredTextureCount>1 ? desiredTextureCount : 2;
}
-
@Override
- protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) {
- lastTex = null;
- super.destroyTexImage(gl, imgTex);
- }
-
+ protected final boolean requiresOffthreadGLCtx() { return true; }
+
@Override
- protected void destroyImpl(GL gl) {
+ protected final void destroyImpl(GL gl) {
if (moviePtr != 0) {
destroyInstance0(moviePtr);
moviePtr = 0;
@@ -211,15 +190,29 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException {
+ protected final void initGLStreamImpl(GL gl) throws IOException {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
+ {
+ final GLContextImpl ctx = (GLContextImpl)gl.getContext();
+ final ProcAddressTable pt = ctx.getGLProcAddressTable();
+ final long procAddrGLTexSubImage2D = getAddressFor(pt, "glTexSubImage2D");
+ if( 0 == procAddrGLTexSubImage2D ) {
+ throw new InternalError("glTexSubImage2D n/a in ProcAddressTable: "+pt.getClass().getName()+" of "+ctx.getGLVersion());
+ }
+ final long procAddrGLGetError = getAddressFor(pt, "glGetError");
+ if( 0 == procAddrGLGetError ) {
+ throw new InternalError("glGetError n/a in ProcAddressTable: "+pt.getClass().getName()+" of "+ctx.getGLVersion());
+ }
+ setGLFuncs0(moviePtr, procAddrGLTexSubImage2D, procAddrGLGetError);
+ }
+
final String urlS=urlConn.getURL().toExternalForm();
chosenAudioFormat = audioSink.initSink(audioSink.getPreferredFormat(), AudioFrameCount);
System.err.println("setURL: p1 "+this);
- setStream0(moviePtr, urlS, -1, -1, AudioFrameCount);
+ setStream0(moviePtr, urlS, -1, -1, AudioFrameCount); // issues updateAttributes*(..)
System.err.println("setURL: p2 "+this);
int tf, tif=GL.GL_RGBA; // texture format and internal format
@@ -239,12 +232,10 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
}
setTextureFormat(tif, tf);
setTextureType(GL.GL_UNSIGNED_BYTE);
- final GLContextImpl ctx = (GLContextImpl)gl.getContext();
- final ProcAddressTable pt = ctx.getGLProcAddressTable();
- procAddrGLTexSubImage2D = getAddressFor(pt, "glTexSubImage2D");
- if( 0 == procAddrGLTexSubImage2D ) {
- throw new InternalError("glTexSubImage2D n/a in ProcAddressTable: "+pt.getClass().getName()+" of "+ctx.getGLVersion());
- }
+ }
+ @Override
+ protected final TextureFrame createTexImage(GL gl, int texName) {
+ return new TextureFrame( createTexImageImpl(gl, texName, texWidth, texHeight, true) );
}
/**
@@ -263,107 +254,6 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
} ).longValue();
}
- private final void pushSound(ByteBuffer sampleData, int data_size, int audio_pts) {
- if( USE_AUDIO_PUSHER ) {
- if( audioPusher != null && audioPusher.isRunning() ) {
- try {
- audioFramesBuffer.putBlocking(new AudioSink.AudioFrame(sampleData, data_size, audio_pts));
- } catch (InterruptedException e) {
- e.printStackTrace(); // oops
- }
- }
- } else {
- pushAudioFrame(new AudioSink.AudioFrame(sampleData, data_size, audio_pts));
- }
- }
-
- private final void pushAudioFrame(AudioSink.AudioFrame audioFrame) {
- // poor mans audio sync ..
- final long now = System.currentTimeMillis();
- final long now_d = now - lastAudioTime;
- final long pts_d = audioFrame.audioPTS - lastAudioPTS;
- final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
- final boolean sleep = dt > audio_dt_d;
- final long sleepP = dt - ( audio_dt_d / 2 );
- if(DEBUG) {
- final int qAT = audioSink.getQueuedTime();
- System.err.println("s: pts-a "+audioFrame.audioPTS+", qAT "+qAT+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt+", sleep "+sleep+", sleepP "+sleepP+" ms");
- }
- if( sleep ) {
- try {
- Thread.sleep( sleepP );
- } catch (InterruptedException e) {
- e.printStackTrace(); // oops
- }
- lastAudioTime = System.currentTimeMillis();
- } else {
- lastAudioTime = now;
- }
- if( audioSink.isDataAvailable(audioFrame.dataSize) ) {
- audioSink.writeData(audioFrame);
- lastAudioPTS=audioFrame.audioPTS;
- }
- }
-
- class AudioPusher extends Thread {
- volatile boolean shallStop = false;
- volatile boolean isBlocked = false;
-
- AudioPusher() {
- setDaemon(true);
- }
- public void requestStop() {
- shallStop = true;
- if( isBlocked ) {
- interrupt();
- }
- }
- public boolean isRunning() { return !shallStop; }
-
- public void run() {
- setName(getName()+"-AudioPusher_"+AudioPusherInstanceId);
- AudioPusherInstanceId++;
-
- while( !shallStop ){
- final AudioSink.AudioFrame audioFrame;
- try {
- isBlocked = true;
- audioFrame = audioFramesBuffer.getBlocking(true /* clearRef */);
- } catch (InterruptedException e) {
- if( !shallStop ) {
- e.printStackTrace(); // oops
- }
- shallStop = true;
- return;
- }
- isBlocked = false;
-
- if( null != audioFrame ) {
- FFMPEGMediaPlayer.this.pushAudioFrame(audioFrame);
- }
- }
- }
- }
- static int AudioPusherInstanceId = 0;
- private AudioPusher audioPusher = null;
-
- private final void stopAudioPusher() {
- if( USE_AUDIO_PUSHER ) {
- if( null != audioPusher ) {
- audioPusher.requestStop();
- audioPusher = null;
- }
- audioFramesBuffer.clear(true);
- }
- }
- private final void startAudioPusher() {
- if( USE_AUDIO_PUSHER ) {
- stopAudioPusher();
- audioPusher = new AudioPusher();
- audioPusher.start();
- }
- }
-
private void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
int lSz0, int lSz1, int lSz2,
int tWd0, int tWd1, int tWd2) {
@@ -413,7 +303,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
* Otherwise the call is delegated to it's super class.
*/
@Override
- public String getTextureLookupFunctionName(String desiredFuncName) throws IllegalStateException {
+ public final String getTextureLookupFunctionName(String desiredFuncName) throws IllegalStateException {
if(State.Uninitialized == state) {
throw new IllegalStateException("Instance not initialized: "+this);
}
@@ -434,7 +324,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
* e.g. YUV420P to RGB. Otherwise the call is delegated to it's super class.
*/
@Override
- public String getTextureLookupFragmentShaderImpl() throws IllegalStateException {
+ public final String getTextureLookupFragmentShaderImpl() throws IllegalStateException {
if(State.Uninitialized == state) {
throw new IllegalStateException("Instance not initialized: "+this);
}
@@ -465,134 +355,131 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected synchronized int getCurrentPositionImpl() {
+ protected final synchronized int getCurrentPositionImpl() {
return 0!=moviePtr ? getVideoPTS0(moviePtr) : 0;
}
@Override
- protected synchronized boolean setPlaySpeedImpl(float rate) {
+ public final int getAudioPTSImpl() { return 0; }
+
+ @Override
+ protected final synchronized boolean setPlaySpeedImpl(float rate) {
return true;
}
@Override
- public synchronized boolean startImpl() {
+ public final synchronized boolean startImpl() {
if(0==moviePtr) {
return false;
}
- startAudioPusher();
return true;
}
/** @return time position after issuing the command */
@Override
- public synchronized boolean pauseImpl() {
+ public final synchronized boolean pauseImpl() {
if(0==moviePtr) {
return false;
}
- stopAudioPusher();
return true;
}
/** @return time position after issuing the command */
@Override
- public synchronized boolean stopImpl() {
+ public final synchronized boolean stopImpl() {
if(0==moviePtr) {
return false;
}
- stopAudioPusher();
return true;
}
/** @return time position after issuing the command */
@Override
- protected synchronized int seekImpl(int msec) {
+ protected final synchronized int seekImpl(int msec) {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
- stopAudioPusher();
int pts0 = getVideoPTS0(moviePtr);
int pts1 = seek0(moviePtr, msec);
System.err.println("Seek: "+pts0+" -> "+msec+" : "+pts1);
- lastAudioPTS=pts1;
- lastVideoPTS=pts1;
- startAudioPusher();
return pts1;
}
@Override
- protected TextureSequence.TextureFrame getLastTextureImpl() {
- return lastTex;
+ protected final boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) {
+ if(0==moviePtr) {
+ throw new GLException("FFMPEG native instance null");
+ }
+ psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1
+ int avPTS = 0;
+ try {
+ final Texture tex = nextFrame.getTexture();
+ gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
+ tex.enable(gl);
+ tex.bind(gl);
+
+ /** Try decode up to 10 packets to find one containing video, i.e. vPTS > 0 */
+ for(int retry=10; 0 >= avPTS && 0 < retry; retry--) {
+ avPTS = readNextPacket0(moviePtr, textureTarget, textureFormat, textureType);
+ retry--;
+ }
+ } finally {
+ psm.restore(gl);
+ }
+ if( 0 < avPTS ) {
+ vSTS = avPTS;
+ nextFrame.setPTS(avPTS);
+ return true;
+ } else {
+ return false;
+ }
}
+ private final void pushSound(ByteBuffer sampleData, int data_size, int audio_pts) {
+ aSTS = audio_pts;
+ final AudioSink.AudioFrame frame = new AudioSink.AudioFrame(sampleData, data_size, audio_pts);
+ if( audioSink.isDataAvailable(frame.dataSize) ) {
+ audioSink.writeData(frame);
+ }
+ }
+
+ /** last audio streaming TS */
+ private int aSTS = 0;
+ /** last video streaming TS */
+ private int vSTS = 0;
+
private long lastAudioTime = 0;
- private int lastAudioPTS = 0;
private static final int audio_dt_d = 400;
private long lastVideoTime = 0;
- private int lastVideoPTS = 0;
private static final int video_dt_d = 9;
@Override
- protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) {
- if(0==moviePtr) {
- throw new GLException("FFMPEG native instance null");
- }
- if(null != lastTex) {
- psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1
- try {
- final Texture tex = lastTex.getTexture();
- gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
- tex.enable(gl);
- tex.bind(gl);
-
- if( USE_AUDIO_PUSHER ) {
- try {
- audioFramesBuffer.waitForFreeSlots(2);
- } catch (InterruptedException e) {
- e.printStackTrace(); // oops
- }
- }
-
- /* try decode 10 packets to find one containing video
- (res == 2) */
- int res = 0;
- int retry = 10;
- while(res!=2 && retry >= 0) {
- res = readNextPacket0(moviePtr, procAddrGLTexSubImage2D, textureTarget, textureFormat, textureType);
- retry--;
- }
- } finally {
- psm.restore(gl);
- }
- final int pts = getVideoPTS0(moviePtr); // this frame
- if(blocking) {
- // poor mans video sync .. TODO: off thread 'readNextPackage0(..)' on shared GLContext and multi textures/unit!
- final long now = System.currentTimeMillis();
- // Try sync video to audio
- final long now_d = now - lastAudioTime;
- final long pts_d = pts - lastAudioPTS - 444; /* hack 444 == play video 444ms ahead of audio */
- final long dt = Math.min(47, (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ) ;
- //final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
- final boolean sleep = dt>video_dt_d && dt<1000 && audioSink.getQueuedByteCount()<maxAvailableAudio-10000;
- final long sleepP = dt-video_dt_d;
- if(DEBUG) {
- final int qAT = audioSink.getQueuedTime();
- System.err.println("s: pts-v "+pts+", qAT "+qAT+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt+", sleep "+sleep+", sleepP "+sleepP+" ms");
- }
- // ?? Maybe use audioSink.getQueuedTime();
- if( sleep ) {
- try {
- Thread.sleep(sleepP);
- } catch (InterruptedException e) { }
- lastVideoTime = System.currentTimeMillis();
- } else {
- lastVideoTime = now;
- }
- }
- lastVideoPTS = pts;
+ protected final void syncFrame2Audio(TextureFrame frame) {
+ /**
+ // poor mans video sync .. TODO: off thread 'readNextPackage0(..)' on shared GLContext and multi textures/unit!
+ final long now = System.currentTimeMillis();
+ // Try sync video to audio
+ final long now_d = now - lastAudioTime;
+ final long pts_d = vSTS - aSTS - 444; // hack 444 == play video 444ms ahead of audio
+ final long dt = Math.min(47, (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ) ;
+ //final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
+ final boolean sleep = dt>video_dt_d && dt<1000 && audioSink.getQueuedByteCount()<maxAvailableAudio-10000;
+ final long sleepP = dt-video_dt_d;
+ if(DEBUG) {
+ final int qAT = audioSink.getQueuedTime();
+ System.err.println("s: pts-v "+vSTS+", qAT "+qAT+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt+", sleep "+sleep+", sleepP "+sleepP+" ms");
}
- return lastTex;
+ // ?? Maybe use audioSink.getQueuedTime();
+ if( sleep ) {
+ try {
+ Thread.sleep(sleepP);
+ } catch (InterruptedException e) { }
+ lastVideoTime = System.currentTimeMillis();
+ } else {
+ lastVideoTime = now;
+ }
+ */
}
-
private static native int getAvUtilVersion0();
private static native int getAvFormatVersion0();
private static native int getAvCodecVersion0();
@@ -600,14 +487,22 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
private native long createInstance0(boolean verbose);
private native void destroyInstance0(long moviePtr);
+ /**
+ * Issues {@link #updateAttributes(int, int, int, int, int, float, int, int, String, String)}
+ * and {@link #updateAttributes2(int, int, int, int, int, int, int, int, int, int)}.
+ */
private native void setStream0(long moviePtr, String url, int vid, int aid, int audioFrameCount);
+ private native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError);
private native int getVideoPTS0(long moviePtr);
private native int getAudioPTS0(long moviePtr);
private native Buffer getAudioBuffer0(long moviePtr, int plane);
- private native int readNextPacket0(long moviePtr, long procAddrGLTexSubImage2D, int texTarget, int texFmt, int texType);
+ /**
+ * @return resulting current PTS: audio < 0, video > 0, invalid == 0
+ */
+ private native int readNextPacket0(long moviePtr, int texTarget, int texFmt, int texType);
private native int seek0(long moviePtr, int position);
@@ -720,6 +615,5 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
return null;
}
}
-
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
index aef98fcde..a21bb40a8 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
@@ -33,7 +33,6 @@ import java.net.URL;
import javax.media.opengl.GL;
import javax.media.opengl.GLException;
-import javax.media.opengl.GLProfile;
import com.jogamp.opengl.util.texture.TextureSequence;
@@ -49,17 +48,17 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
static final boolean available;
static {
+ available = false;
+ /** FIXME!
// OMX binding is included in jogl_desktop and jogl_mobile
GLProfile.initSingleton();
- available = initIDs0();
+ available = initIDs0(); */
}
public static final boolean isAvailable() { return available; }
protected long moviePtr = 0;
- protected TextureSequence.TextureFrame lastTex = null;
-
public OMXGLMediaPlayer() {
super(TextureType.KHRImage, true);
if(!available) {
@@ -76,17 +75,15 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) {
- final EGLTextureFrame eglTex = (EGLTextureFrame) super.createTexImage(gl, idx, tex);
- _setStreamEGLImageTexture2D(moviePtr, idx, tex[idx], eglTex.getImage(), eglTex.getSync());
- lastTex = eglTex;
+ protected TextureSequence.TextureFrame createTexImage(GL gl, int texName) {
+ final EGLTextureFrame eglTex = (EGLTextureFrame) super.createTexImage(gl, texName);
+ _setStreamEGLImageTexture2D(moviePtr, texName, eglTex.getImage(), eglTex.getSync());
return eglTex;
}
@Override
- protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) {
- lastTex = null;
- super.destroyTexImage(gl, imgTex);
+ protected void destroyTexFrame(GL gl, TextureSequence.TextureFrame imgTex) {
+ super.destroyTexFrame(gl, imgTex);
}
@Override
@@ -99,7 +96,7 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException {
+ protected void initGLStreamImpl(GL gl) throws IOException {
if(0==moviePtr) {
throw new GLException("OMX native instance null");
}
@@ -119,6 +116,10 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
protected int getCurrentPositionImpl() {
return 0!=moviePtr ? _getCurrentPosition(moviePtr) : 0;
}
+ @Override
+ protected int getAudioPTSImpl() {
+ return getCurrentPositionImpl();
+ }
@Override
protected boolean setPlaySpeedImpl(float rate) {
@@ -168,24 +169,23 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected TextureSequence.TextureFrame getLastTextureImpl() {
- return lastTex;
- }
-
- @Override
- protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) {
+ protected boolean getNextTextureImpl(GL gl, TextureFrame nextFrame, boolean blocking) {
if(0==moviePtr) {
throw new GLException("OMX native instance null");
}
final int nextTex = _getNextTextureID(moviePtr, blocking);
if(0 < nextTex) {
- final TextureSequence.TextureFrame eglImgTex = texFrameMap.get(new Integer(_getNextTextureID(moviePtr, blocking)));
+ /* FIXME
+ final TextureSequence.TextureFrame eglImgTex =
+ texFrameMap.get(new Integer(_getNextTextureID(moviePtr, blocking)));
if(null!=eglImgTex) {
lastTex = eglImgTex;
- }
+ } */
}
- return lastTex;
+ return true;
}
+ @Override
+ protected void syncFrame2Audio(TextureFrame frame) { }
private String replaceAll(String orig, String search, String repl) {
String dest=null;
@@ -216,7 +216,7 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
private native void _setStream(long moviePtr, int textureNum, String path);
private native void _activateStream(long moviePtr);
- private native void _setStreamEGLImageTexture2D(long moviePtr, int i, int tex, long image, long sync);
+ private native void _setStreamEGLImageTexture2D(long moviePtr, int tex, long image, long sync);
private native int _seek(long moviePtr, int position);
private native void _setPlaySpeed(long moviePtr, float rate);
private native void _play(long moviePtr);