/** * Copyright 2012 JogAmp Community. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list * of conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are those of the * authors and should not be interpreted as representing official policies, either expressed * or implied, of JogAmp Community. */ package jogamp.opengl.util.av.impl; import java.io.IOException; import java.nio.Buffer; import java.nio.ByteBuffer; import java.security.AccessController; import java.security.PrivilegedAction; import javax.media.opengl.GL; import javax.media.opengl.GL2ES2; import javax.media.opengl.GLException; import com.jogamp.common.util.VersionNumber; import com.jogamp.gluegen.runtime.ProcAddressTable; import com.jogamp.opengl.util.GLPixelStorageModes; import com.jogamp.opengl.util.av.AudioSink; import com.jogamp.opengl.util.av.AudioSinkFactory; import com.jogamp.opengl.util.texture.Texture; import com.jogamp.opengl.util.texture.TextureSequence; import jogamp.opengl.GLContextImpl; import jogamp.opengl.util.av.EGLMediaPlayerImpl; import jogamp.opengl.util.av.SyncedRingbuffer; /*** * Implementation utilizes Libav * or FFmpeg which is ubiquitous * available and usually pre-installed on Unix platforms. Due to legal * reasons we cannot deploy binaries of it, which contains patented codecs. * Besides the default BSD/Linux/.. repositories and installations, * precompiled binaries can be found at the listed location below. *

* Implements YUV420P to RGB fragment shader conversion * and the usual packed RGB formats. * The decoded video frame is written directly into an OpenGL texture * on the GPU in it's native format. A custom fragment shader converts * the native pixelformat to a usable RGB format if required. * Hence only 1 copy is required before bloating the picture * from YUV to RGB, for example. *

*

* Utilizes a slim dynamic and native binding to the Lib_av * libraries: *

*

*

* http://libav.org/ *

*

* Check tag 'FIXME: Add more planar formats !' * here and in the corresponding native code * jogl/src/jogl/native/ffmpeg/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c *

*

* TODO: *

*

* Pre-compiled Libav / FFmpeg packages: * */ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { // Count of zeroed buffers to return before switching to real sample provider private static final int TEMP_BUFFER_COUNT = 20; // Instance data public static final VersionNumber avUtilVersion; public static final VersionNumber avFormatVersion; public static final VersionNumber avCodecVersion; static final boolean available; static { if(FFMPEGDynamicLibraryBundleInfo.initSingleton()) { avUtilVersion = getAVVersion(getAvUtilVersion0()); avFormatVersion = getAVVersion(getAvFormatVersion0()); avCodecVersion = getAVVersion(getAvCodecVersion0()); System.err.println("LIB_AV Util : "+avUtilVersion); System.err.println("LIB_AV Format: "+avFormatVersion); System.err.println("LIB_AV Codec : "+avCodecVersion); initIDs0(); available = true; } else { avUtilVersion = null; avFormatVersion = null; avCodecVersion = null; available = false; } } public static final boolean isAvailable() { return available; } private static VersionNumber getAVVersion(int vers) { return new VersionNumber( ( vers >> 16 ) & 0xFF, ( vers >> 8 ) & 0xFF, ( vers >> 0 ) & 0xFF ); } // // Video // protected long moviePtr = 0; protected long procAddrGLTexSubImage2D = 0; protected EGLMediaPlayerImpl.EGLTextureFrame lastTex = null; protected GLPixelStorageModes psm; protected PixelFormat vPixelFmt = null; protected int vPlanes = 0; protected int vBitsPerPixel = 0; protected int vBytesPerPixelPerPlane = 0; protected int[] vLinesize = { 0, 0, 0 }; // per plane protected int[] vTexWidth = { 0, 0, 0 }; // per plane protected int texWidth, texHeight; // overall (stuffing planes in one texture) protected ByteBuffer texCopy; // // Audio // protected final int AudioFrameCount = 8; protected final AudioSink audioSink; protected final int maxAvailableAudio; protected AudioSink.AudioDataFormat chosenAudioFormat; protected final SyncedRingbuffer audioFramesBuffer = new SyncedRingbuffer(new AudioSink.AudioFrame[AudioFrameCount], false /* full */); public FFMPEGMediaPlayer() { super(TextureType.GL, false); if(!available) { throw new RuntimeException("FFMPEGMediaPlayer not available"); } setTextureCount(1); moviePtr = createInstance0(DEBUG); if(0==moviePtr) { throw new GLException("Couldn't create FFMPEGInstance"); } psm = new GLPixelStorageModes(); audioSink = AudioSinkFactory.createDefault(); maxAvailableAudio = audioSink.getQueuedByteCount(); } @Override protected TextureSequence.TextureFrame createTexImage(GL gl, int idx, int[] tex) { if(TextureType.GL == texType) { final Texture texture = super.createTexImageImpl(gl, idx, tex, texWidth, texHeight, true); lastTex = new EGLTextureFrame(null, texture, 0, 0); } else { throw new InternalError("n/a"); } return lastTex; } @Override protected void destroyTexImage(GL gl, TextureSequence.TextureFrame imgTex) { lastTex = null; super.destroyTexImage(gl, imgTex); } @Override protected void destroyImpl(GL gl) { if (moviePtr != 0) { destroyInstance0(moviePtr); moviePtr = 0; } } @Override protected void initGLStreamImpl(GL gl, int[] texNames) throws IOException { if(0==moviePtr) { throw new GLException("FFMPEG native instance null"); } final String urlS=urlConn.getURL().toExternalForm(); chosenAudioFormat = audioSink.initSink(audioSink.getPreferredFormat(), AudioFrameCount); System.err.println("setURL: p1 "+this); setStream0(moviePtr, urlS, -1, -1, AudioFrameCount); System.err.println("setURL: p2 "+this); int tf, tif=GL.GL_RGBA; // texture format and internal format switch(vBytesPerPixelPerPlane) { case 1: if( gl.isGL3ES3() ) { tf = GL2ES2.GL_RED; tif=GL2ES2.GL_RED; // RED is supported on ES3 and >= GL3 [core]; ALPHA is deprecated on core! } else { tf = GL2ES2.GL_ALPHA; tif=GL2ES2.GL_ALPHA; // ALPHA is supported on ES2 and GL2 } break; case 3: tf = GL2ES2.GL_RGB; tif=GL.GL_RGB; break; case 4: tf = GL2ES2.GL_RGBA; tif=GL.GL_RGBA; break; default: throw new RuntimeException("Unsupported bytes-per-pixel / plane "+vBytesPerPixelPerPlane); } setTextureFormat(tif, tf); setTextureType(GL.GL_UNSIGNED_BYTE); final GLContextImpl ctx = (GLContextImpl)gl.getContext(); final ProcAddressTable pt = ctx.getGLProcAddressTable(); procAddrGLTexSubImage2D = getAddressFor(pt, "glTexSubImage2D"); if( 0 == procAddrGLTexSubImage2D ) { throw new InternalError("glTexSubImage2D n/a in ProcAddressTable: "+pt.getClass().getName()+" of "+ctx.getGLVersion()); } } /** * Catches IllegalArgumentException and returns 0 if functionName is n/a, * otherwise the ProcAddressTable's field value. */ private final long getAddressFor(final ProcAddressTable table, final String functionName) { return AccessController.doPrivileged(new PrivilegedAction() { public Long run() { try { return Long.valueOf( table.getAddressFor(functionName) ); } catch (IllegalArgumentException iae) { return Long.valueOf(0); } } } ).longValue(); } private final void pushSound(ByteBuffer sampleData, int data_size, int audio_pts) { if( audioPusher != null && audioPusher.isRunning() ) { try { audioFramesBuffer.putBlocking(new AudioSink.AudioFrame(sampleData, data_size, audio_pts)); } catch (InterruptedException e) { e.printStackTrace(); // oops } if( null != audioPusher ) { audioPusher.pushOne(); } } } class AudioPusher extends Thread { volatile boolean shallStop = false; volatile boolean isBlocked = false; AudioPusher() { setDaemon(true); } public void requestStop() { shallStop = true; if( isBlocked ) { // interrupt(); } } public boolean isRunning() { return !shallStop; } public void run() { setName(getName()+"-AudioPusher_"+AudioPusherInstanceId); AudioPusherInstanceId++; while( !shallStop ){ pushOne(); } } public void pushOne() { final AudioSink.AudioFrame audioFrame; try { isBlocked = true; audioFrame = audioFramesBuffer.getBlocking(true /* clearRef */); } catch (InterruptedException e) { if( !shallStop ) { e.printStackTrace(); // oops } shallStop = true; return; } isBlocked = false; if( null != audioFrame ) { // poor mans audio sync .. final long now = System.currentTimeMillis(); final long now_d = now - lastAudioTime; final long pts_d = audioFrame.audioPTS - lastAudioPTS; final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ; final boolean sleep = dt > audio_dt_d && !shallStop; final long sleepP = dt - ( audio_dt_d / 2 ); if(DEBUG) { final int qAT = audioSink.getQueuedTime(); System.err.println("s: pts-a "+audioFrame.audioPTS+", qAT "+qAT+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt+", sleep "+sleep+", sleepP "+sleepP+" ms"); } if( sleep ) { try { isBlocked = true; Thread.sleep( sleepP ); } catch (InterruptedException e) { e.printStackTrace(); // oops } isBlocked = false; lastAudioTime = System.currentTimeMillis(); } else { lastAudioTime = now; } if( !shallStop && audioSink.isDataAvailable(audioFrame.dataSize) ) { audioSink.writeData(audioFrame); lastAudioPTS=audioFrame.audioPTS; } } } } static int AudioPusherInstanceId = 0; private AudioPusher audioPusher = null; private final void stopAudioPusher() { if( null != audioPusher ) { audioPusher.requestStop(); audioPusher = null; } audioFramesBuffer.clear(true); } private final void startAudioPusher() { stopAudioPusher(); audioPusher = new AudioPusher(); // audioPusher.start(); } private void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane, int lSz0, int lSz1, int lSz2, int tWd0, int tWd1, int tWd2) { vPixelFmt = PixelFormat.valueOf(pixFmt); vPlanes = planes; vBitsPerPixel = bitsPerPixel; vBytesPerPixelPerPlane = bytesPerPixelPerPlane; vLinesize[0] = lSz0; vLinesize[1] = lSz1; vLinesize[2] = lSz2; vTexWidth[0] = tWd0; vTexWidth[1] = tWd1; vTexWidth[2] = tWd2; switch(vPixelFmt) { case YUV420P: // YUV420P: Adding U+V on right side of fixed height texture, // since width is already aligned by decoder. // Y=w*h, Y=w/2*h/2, U=w/2*h/2 // w*h + 2 ( w/2 * h/2 ) // w*h + w*h/2 // 2*w/2 * h texWidth = vTexWidth[0] + vTexWidth[1]; texHeight = height; break; // case PIX_FMT_YUYV422: case RGB24: case BGR24: case ARGB: case RGBA: case ABGR: case BGRA: texWidth = vTexWidth[0]; texHeight = height; break; default: // FIXME: Add more planar formats ! throw new RuntimeException("Unsupported pixelformat: "+vPixelFmt); } if(DEBUG) { System.err.println("XXX0: fmt "+vPixelFmt+", planes "+vPlanes+", bpp "+vBitsPerPixel+"/"+vBytesPerPixelPerPlane); for(int i=0; i<3; i++) { System.err.println("XXX0 "+i+": "+vTexWidth[i]+"/"+vLinesize[i]); } System.err.println("XXX0 total tex "+texWidth+"x"+texHeight); } } /** * {@inheritDoc} * * If this implementation generates a specialized shader, * it allows the user to override the default function name ffmpegTexture2D. * Otherwise the call is delegated to it's super class. */ @Override public String getTextureLookupFunctionName(String desiredFuncName) throws IllegalStateException { if(State.Uninitialized == state) { throw new IllegalStateException("Instance not initialized: "+this); } if(PixelFormat.YUV420P == vPixelFmt) { if(null != desiredFuncName && desiredFuncName.length()>0) { textureLookupFunctionName = desiredFuncName; } return textureLookupFunctionName; } return super.getTextureLookupFunctionName(desiredFuncName); } private String textureLookupFunctionName = "ffmpegTexture2D"; /** * {@inheritDoc} * * Depending on the pixelformat, a specific conversion shader is being created, * e.g. YUV420P to RGB. Otherwise the call is delegated to it's super class. */ @Override public String getTextureLookupFragmentShaderImpl() throws IllegalStateException { if(State.Uninitialized == state) { throw new IllegalStateException("Instance not initialized: "+this); } final float tc_w_1 = (float)getWidth() / (float)texWidth; switch(vPixelFmt) { case YUV420P: return "vec4 "+textureLookupFunctionName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+ " vec2 u_off = vec2("+tc_w_1+", 0.0);\n"+ " vec2 v_off = vec2("+tc_w_1+", 0.5);\n"+ " vec2 tc_half = texCoord*0.5;\n"+ " float y,u,v,r,g,b;\n"+ " y = texture2D(image, texCoord).r;\n"+ " u = texture2D(image, u_off+tc_half).r;\n"+ " v = texture2D(image, v_off+tc_half).r;\n"+ " y = 1.1643*(y-0.0625);\n"+ " u = u-0.5;\n"+ " v = v-0.5;\n"+ " r = y+1.5958*v;\n"+ " g = y-0.39173*u-0.81290*v;\n"+ " b = y+2.017*u;\n"+ " return vec4(r, g, b, 1);\n"+ "}\n" ; default: // FIXME: Add more planar formats ! return super.getTextureLookupFragmentShaderImpl(); } } @Override protected synchronized int getCurrentPositionImpl() { return 0!=moviePtr ? getVideoPTS0(moviePtr) : 0; } @Override protected synchronized boolean setPlaySpeedImpl(float rate) { return true; } @Override public synchronized boolean startImpl() { if(0==moviePtr) { return false; } startAudioPusher(); return true; } /** @return time position after issuing the command */ @Override public synchronized boolean pauseImpl() { if(0==moviePtr) { return false; } stopAudioPusher(); return true; } /** @return time position after issuing the command */ @Override public synchronized boolean stopImpl() { if(0==moviePtr) { return false; } stopAudioPusher(); return true; } /** @return time position after issuing the command */ @Override protected synchronized int seekImpl(int msec) { if(0==moviePtr) { throw new GLException("FFMPEG native instance null"); } stopAudioPusher(); int pts0 = getVideoPTS0(moviePtr); int pts1 = seek0(moviePtr, msec); System.err.println("Seek: "+pts0+" -> "+msec+" : "+pts1); lastAudioPTS=pts1; lastVideoPTS=pts1; startAudioPusher(); return pts1; } @Override protected TextureSequence.TextureFrame getLastTextureImpl() { return lastTex; } private long lastAudioTime = 0; private int lastAudioPTS = 0; private static final int audio_dt_d = 400; private long lastVideoTime = 0; private int lastVideoPTS = 0; private static final int video_dt_d = 9; @Override protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) { if(0==moviePtr) { throw new GLException("FFMPEG native instance null"); } if(null != lastTex) { psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1 try { final Texture tex = lastTex.getTexture(); gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit()); tex.enable(gl); tex.bind(gl); try { audioFramesBuffer.waitForFreeSlots(2); } catch (InterruptedException e) { e.printStackTrace(); // oops } /* try decode 10 packets to find one containing video (res == 2) */ int res = 0; int retry = 10; while(res!=2 && retry >= 0) { res = readNextPacket0(moviePtr, procAddrGLTexSubImage2D, textureTarget, textureFormat, textureType); retry--; } } finally { psm.restore(gl); } final int pts = getVideoPTS0(moviePtr); // this frame if(blocking) { // poor mans video sync .. TODO: off thread 'readNextPackage0(..)' on shared GLContext and multi textures/unit! final long now = System.currentTimeMillis(); // Try sync video to audio final long now_d = now - lastAudioTime; final long pts_d = pts - lastAudioPTS - 444; /* hack 444 == play video 444ms ahead of audio */ final long dt = Math.min(47, (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ) ; //final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ; final boolean sleep = dt>video_dt_d && dt<1000 && audioSink.getQueuedByteCount()