From 5911b729b69b7fb327e441b33f22eb1ef31a03df Mon Sep 17 00:00:00 2001 From: Sven Gothel Date: Mon, 2 Apr 2012 08:36:38 +0200 Subject: Initial commit for AudioVideo (com.jogamp.opengl.av) rework, introducing Android API 14 MediaPlayer impl of GLMediaPlayer. Android API 14 MediaPlayer allows usage of OMX AL direct decode to texture via libstagefright (OMX AL usage included). Status: Untested, not working - Need to fix native OMX IL (stream detect and split) and/or GStreamer implementation. --- .../jogamp/opengl/av/EGLMediaPlayerImpl.java | 109 ++++++++ .../jogamp/opengl/av/GLMediaPlayerImpl.java | 307 +++++++++++++++++++++ 2 files changed, 416 insertions(+) create mode 100644 src/jogl/classes/jogamp/opengl/av/EGLMediaPlayerImpl.java create mode 100644 src/jogl/classes/jogamp/opengl/av/GLMediaPlayerImpl.java (limited to 'src/jogl/classes/jogamp/opengl/av') diff --git a/src/jogl/classes/jogamp/opengl/av/EGLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/av/EGLMediaPlayerImpl.java new file mode 100644 index 000000000..ef3d5072c --- /dev/null +++ b/src/jogl/classes/jogamp/opengl/av/EGLMediaPlayerImpl.java @@ -0,0 +1,109 @@ +package jogamp.opengl.av; + +import javax.media.opengl.GLContext; + +import com.jogamp.opengl.util.texture.Texture; + +import jogamp.opengl.egl.EGL; +import jogamp.opengl.egl.EGLContext; +import jogamp.opengl.egl.EGLDrawable; +import jogamp.opengl.egl.EGLExt; + +public abstract class EGLMediaPlayerImpl extends GLMediaPlayerImpl { + TextureType texType; + boolean useKHRSync; + + public enum TextureType { + GL(0), KHRImage(1); + + public final int id; + + TextureType(int id){ + this.id = id; + } + } + + public static class EGLTextureFrame extends TextureFrame { + + public EGLTextureFrame(Texture t, long khrImage, long khrSync) { + super(t); + this.image = khrImage; + this.sync = khrSync; + } + + public final long getImage() { return image; } + public final long getSync() { return sync; } + + public String toString() { + return "EGLTextureFrame[" + texture + ", img "+ image + ", sync "+ sync+"]"; + } + protected final long image; + protected final long sync; + } + + + protected EGLMediaPlayerImpl() { + this(TextureType.GL, false); + } + + protected EGLMediaPlayerImpl(TextureType texType, boolean useKHRSync) { + super(); + this.texType = texType; + this.useKHRSync = useKHRSync; + } + + @Override + protected TextureFrame createTexImage(GLContext ctx, int idx, int[] tex) { + final Texture texture = super.createTexImageImpl(ctx, idx, tex, true); + final long image; + final long sync; + + final EGLContext eglCtx = (EGLContext) ctx; + final EGLExt eglExt = eglCtx.getEGLExt(); + final EGLDrawable eglDrawable = (EGLDrawable) eglCtx.getGLDrawable(); + int[] tmp = new int[1]; + + if(TextureType.KHRImage == texType) { + // create EGLImage from texture + tmp[0] = EGL.EGL_NONE; + image = eglExt.eglCreateImageKHR( eglDrawable.getDisplay(), eglCtx.getHandle(), + EGLExt.EGL_GL_TEXTURE_2D_KHR, + tex[idx], tmp, 0); + if (0==image) { + throw new RuntimeException("EGLImage creation failed: "+EGL.eglGetError()+", ctx "+eglCtx+", tex "+tex[idx]+", err "+toHexString(EGL.eglGetError())); + } + } else { + image = 0; + } + + if(useKHRSync) { + // Create sync object so that we can be sure that gl has finished + // rendering the EGLImage texture before we tell OpenMAX to fill + // it with a new frame. + tmp[0] = EGL.EGL_NONE; + sync = eglExt.eglCreateSyncKHR(eglDrawable.getDisplay(), EGLExt.EGL_SYNC_FENCE_KHR, tmp, 0); + if (0==sync) { + throw new RuntimeException("EGLSync creation failed: "+EGL.eglGetError()+", ctx "+eglCtx+", err "+toHexString(EGL.eglGetError())); + } + } else { + sync = 0; + } + return new EGLTextureFrame(texture, image, sync); + } + + @Override + protected void destroyTexImage(GLContext ctx, TextureFrame imgTex) { + final EGLContext eglCtx = (EGLContext) ctx; + final EGLExt eglExt = eglCtx.getEGLExt(); + final EGLDrawable eglDrawable = (EGLDrawable) eglCtx.getGLDrawable(); + final EGLTextureFrame eglTex = (EGLTextureFrame) imgTex; + + if(0!=eglTex.getImage()) { + eglExt.eglDestroyImageKHR(eglDrawable.getDisplay(), eglTex.getImage()); + } + if(0!=eglTex.getSync()) { + eglExt.eglDestroySyncKHR(eglDrawable.getDisplay(), eglTex.getSync()); + } + super.destroyTexImage(ctx, imgTex); + } +} diff --git a/src/jogl/classes/jogamp/opengl/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/av/GLMediaPlayerImpl.java new file mode 100644 index 000000000..826bb6953 --- /dev/null +++ b/src/jogl/classes/jogamp/opengl/av/GLMediaPlayerImpl.java @@ -0,0 +1,307 @@ +package jogamp.opengl.av; + +import java.io.IOException; +import java.net.URL; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; + +import javax.media.opengl.GL; +import javax.media.opengl.GLContext; +import javax.media.opengl.GLDrawable; +import javax.media.opengl.GLES2; + +import com.jogamp.opengl.av.GLMediaPlayer; +import com.jogamp.opengl.av.GLMediaEventListener; +import com.jogamp.opengl.util.texture.Texture; + +/** + * After object creation an implementation may customize the behavior: + * + * + *

+ * See {@link GLMediaPlayer}. + *

+ */ +public abstract class GLMediaPlayerImpl implements GLMediaPlayer { + + protected int textureCount; + protected int textureTarget; + + private int sWidth = 0; + private int sHeight = 0; + protected URL url = null; + + protected Texture texture = null; + protected float playSpeed = 1.0f; + + /** Shall be set by the {@link #setStreamImpl()} method implementation. */ + protected int width = 0; + /** Shall be set by the {@link #setStreamImpl()} method implementation. */ + protected int height = 0; + /** Shall be set by the {@link #setStreamImpl()} method implementation. */ + protected int fps = 0; + /** Shall be set by the {@link #setStreamImpl()} method implementation. */ + protected long bps = 0; + /** Shall be set by the {@link #setStreamImpl()} method implementation. */ + protected long totalFrames = 0; + /** Shall be set by the {@link #setStreamImpl()} method implementation. */ + protected String acodec = null; + /** Shall be set by the {@link #setStreamImpl()} method implementation. */ + protected String vcodec = null; + + protected long frameNumber = 0; + + private TextureFrame[] texFrames = null; + protected HashMap texFrameMap = new HashMap(); + private ArrayList eventListeners = new ArrayList(); + + protected GLMediaPlayerImpl() { + this.textureCount=3; + this.textureTarget=GL.GL_TEXTURE_2D; + } + + protected final void setTextureCount(int textureCount) { + this.textureCount=textureCount; + } + protected final void setTextureTarget(int textureTarget) { + this.textureTarget=textureTarget; + } + + @Override + public final void setStream(GL gl, URL url) throws IOException { + this.url = url; + if (this.url == null) { + System.out.println("setURL (null)"); + stop(); + return; + } + setStreamImpl(); + init(gl); + } + + /** + * Implementation shall set the following set of data: + * @see #width + * @see #height + * @see #fps + * @see #bps + * @see #totalFrames + * @see #acodec + * @see #vcodec + */ + protected abstract void setStreamImpl() throws IOException; + + protected final void init(GL gl) { + final GLContext ctx = gl.getContext(); + if(!ctx.isCurrent()) { + throw new RuntimeException("Not current: "+ctx); + } + + final GLDrawable drawable = ctx.getGLDrawable(); + sWidth = drawable.getWidth(); + sHeight = drawable.getHeight(); + System.out.println("surface size: "+sWidth+"x"+sHeight); + System.out.println("Platform Extensions : "+ctx.getPlatformExtensionsString()); + + if(null!=texFrames) { + removeAllImageTextures(ctx); + } else { + texFrames = new TextureFrame[textureCount]; + } + + final int[] tex = new int[textureCount]; + { + gl.glGenTextures(textureCount, tex, 0); + final int err = gl.glGetError(); + if( GL.GL_NO_ERROR != err ) { + throw new RuntimeException("TextureNames creation failed (num: "+textureCount+"): err "+toHexString(err)); + } + } + + for(int i=0; i tex[idx] ) { + throw new RuntimeException("TextureName "+toHexString(tex[idx])+" invalid."); + } + gl.glBindTexture(textureTarget, tex[idx]); + { + final int err = gl.glGetError(); + if( GL.GL_NO_ERROR != err ) { + throw new RuntimeException("Couldn't bind textureName "+toHexString(tex[idx])+" to 2D target, err "+toHexString(err)); + } + } + + // create space for buffer with a texture + gl.glTexImage2D( + textureTarget, // target + 0, // level + GL.GL_RGBA, // internal format + width, // width + height, // height + 0, // border + GL.GL_RGBA, // format + GL.GL_UNSIGNED_BYTE, // type + null); // pixels -- will be provided later + { + final int err = gl.glGetError(); + if( GL.GL_NO_ERROR != err ) { + throw new RuntimeException("Couldn't create TexImage2D RGBA "+width+"x"+height+", err "+toHexString(err)); + } + } + gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST); + gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST); + // Clamp to edge is only option. + gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE); + gl.glTexParameteri(textureTarget, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE); + + return com.jogamp.opengl.util.texture.TextureIO.newTexture(tex[idx], + textureTarget, + width, height, + width, height, + mustFlipVertically); + } + + protected void destroyTexImage(GLContext ctx, TextureFrame imgTex) { + imgTex.getTexture().destroy(ctx.getGL()); + } + + protected void removeAllImageTextures(GLContext ctx) { + texture = null; + for(int i=0; i i = eventListeners.iterator(); i.hasNext(); ) { + i.next().attributesChanges(this, event_mask); + } + } + } + protected void newFrameAvailable(TextureFrame frame) { + synchronized(eventListenersLock) { + for(Iterator i = eventListeners.iterator(); i.hasNext(); ) { + i.next().newFrameAvailable(this, frame); + } + } + } + + @Override + public synchronized float getPlaySpeed() { + return playSpeed; + } + + @Override + public synchronized Texture getLastTextureID() { + return texture; + } + + @Override + public synchronized void destroy(GL gl) { + destroyImpl(gl); + removeAllImageTextures(gl.getContext()); + } + protected abstract void destroyImpl(GL gl); + + @Override + public synchronized URL getURL() { + return url; + } + + @Override + public synchronized String getVideoCodec() { + return vcodec; + } + + @Override + public synchronized String getAudioCodec() { + return acodec; + } + + @Override + public synchronized long getTotalFrames() { + return totalFrames; + } + + @Override + public synchronized long getBitrate() { + return bps; + } + + @Override + public synchronized int getFramerate() { + return fps; + } + + @Override + public synchronized int getWidth() { + return width; + } + + @Override + public synchronized int getHeight() { + return height; + } + + @Override + public synchronized String toString() { + return "GLMediaPlayer [ stream [ video [ "+vcodec+", "+width+"x"+height+", "+fps+"fps, "+bps+"bsp, "+totalFrames+"f ] ] ]"; + } + + @Override + public void addEventListener(GLMediaEventListener l) { + if(l == null) { + return; + } + synchronized(eventListenersLock) { + eventListeners.add(l); + } + } + + @Override + public void removeEventListener(GLMediaEventListener l) { + if (l == null) { + return; + } + synchronized(eventListenersLock) { + eventListeners.remove(l); + } + } + + @Override + public synchronized GLMediaEventListener[] getEventListeners() { + synchronized(eventListenersLock) { + return eventListeners.toArray(new GLMediaEventListener[eventListeners.size()]); + } + } + + private Object eventListenersLock = new Object(); + + protected static final String toHexString(long v) { + return "0x"+Long.toHexString(v); + } + protected static final String toHexString(int v) { + return "0x"+Integer.toHexString(v); + } + +} \ No newline at end of file -- cgit v1.2.3