/** * Copyright 2012 JogAmp Community. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list * of conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are those of the * authors and should not be interpreted as representing official policies, either expressed * or implied, of JogAmp Community. */ package jogamp.opengl.util.av.impl; import java.io.IOException; import java.nio.Buffer; import java.nio.ByteBuffer; import java.security.AccessController; import java.security.PrivilegedAction; import javax.media.opengl.GL; import javax.media.opengl.GL2ES2; import javax.media.opengl.GLException; import com.jogamp.common.util.VersionNumber; import com.jogamp.gluegen.runtime.ProcAddressTable; import com.jogamp.opengl.util.GLPixelStorageModes; import com.jogamp.opengl.util.av.AudioSink; import com.jogamp.opengl.util.av.AudioSinkFactory; import com.jogamp.opengl.util.texture.Texture; import com.jogamp.opengl.util.texture.TextureSequence; import jogamp.opengl.GLContextImpl; import jogamp.opengl.util.av.EGLMediaPlayerImpl; import jogamp.opengl.util.av.SyncedRingbuffer; /*** * Implementation utilizes Libav * or FFmpeg which is ubiquitous * available and usually pre-installed on Unix platforms. Due to legal * reasons we cannot deploy binaries of it, which contains patented codecs. * Besides the default BSD/Linux/.. repositories and installations, * precompiled binaries can be found at the listed location below. *
* Implements YUV420P to RGB fragment shader conversion * and the usual packed RGB formats. * The decoded video frame is written directly into an OpenGL texture * on the GPU in it's native format. A custom fragment shader converts * the native pixelformat to a usable RGB format if required. * Hence only 1 copy is required before bloating the picture * from YUV to RGB, for example. *
** Utilizes a slim dynamic and native binding to the Lib_av * libraries: *
* http://libav.org/ *
*
* Check tag 'FIXME: Add more planar formats !'
* here and in the corresponding native code
* jogl/src/jogl/native/ffmpeg/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c
*
* TODO: *
* pkg set-publisher -p http://pkg.openindiana.org/sfe-encumbered. * pkt install pkg:/video/ffmpeg *
ffmpegTexture2D
.
* Otherwise the call is delegated to it's super class.
*/
@Override
public String getTextureLookupFunctionName(String desiredFuncName) throws IllegalStateException {
if(State.Uninitialized == state) {
throw new IllegalStateException("Instance not initialized: "+this);
}
if(PixelFormat.YUV420P == vPixelFmt) {
if(null != desiredFuncName && desiredFuncName.length()>0) {
textureLookupFunctionName = desiredFuncName;
}
return textureLookupFunctionName;
}
return super.getTextureLookupFunctionName(desiredFuncName);
}
private String textureLookupFunctionName = "ffmpegTexture2D";
/**
* {@inheritDoc}
*
* Depending on the pixelformat, a specific conversion shader is being created,
* e.g. YUV420P to RGB. Otherwise the call is delegated to it's super class.
*/
@Override
public String getTextureLookupFragmentShaderImpl() throws IllegalStateException {
if(State.Uninitialized == state) {
throw new IllegalStateException("Instance not initialized: "+this);
}
final float tc_w_1 = (float)getWidth() / (float)texWidth;
switch(vPixelFmt) {
case YUV420P:
return
"vec4 "+textureLookupFunctionName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" vec2 u_off = vec2("+tc_w_1+", 0.0);\n"+
" vec2 v_off = vec2("+tc_w_1+", 0.5);\n"+
" vec2 tc_half = texCoord*0.5;\n"+
" float y,u,v,r,g,b;\n"+
" y = texture2D(image, texCoord).r;\n"+
" u = texture2D(image, u_off+tc_half).r;\n"+
" v = texture2D(image, v_off+tc_half).r;\n"+
" y = 1.1643*(y-0.0625);\n"+
" u = u-0.5;\n"+
" v = v-0.5;\n"+
" r = y+1.5958*v;\n"+
" g = y-0.39173*u-0.81290*v;\n"+
" b = y+2.017*u;\n"+
" return vec4(r, g, b, 1);\n"+
"}\n"
;
default: // FIXME: Add more planar formats !
return super.getTextureLookupFragmentShaderImpl();
}
}
@Override
protected synchronized int getCurrentPositionImpl() {
return 0!=moviePtr ? getVideoPTS0(moviePtr) : 0;
}
@Override
protected synchronized boolean setPlaySpeedImpl(float rate) {
return true;
}
@Override
public synchronized boolean startImpl() {
if(0==moviePtr) {
return false;
}
startAudioPusher();
return true;
}
/** @return time position after issuing the command */
@Override
public synchronized boolean pauseImpl() {
if(0==moviePtr) {
return false;
}
stopAudioPusher();
return true;
}
/** @return time position after issuing the command */
@Override
public synchronized boolean stopImpl() {
if(0==moviePtr) {
return false;
}
stopAudioPusher();
return true;
}
/** @return time position after issuing the command */
@Override
protected synchronized int seekImpl(int msec) {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
stopAudioPusher();
int pts0 = getVideoPTS0(moviePtr);
int pts1 = seek0(moviePtr, msec);
System.err.println("Seek: "+pts0+" -> "+msec+" : "+pts1);
lastAudioPTS=pts1;
lastVideoPTS=pts1;
startAudioPusher();
return pts1;
}
@Override
protected TextureSequence.TextureFrame getLastTextureImpl() {
return lastTex;
}
private long lastAudioTime = 0;
private int lastAudioPTS = 0;
private static final int audio_dt_d = 400;
private long lastVideoTime = 0;
private int lastVideoPTS = 0;
private static final int video_dt_d = 9;
@Override
protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
if(null != lastTex) {
psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1
try {
final Texture tex = lastTex.getTexture();
gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
tex.enable(gl);
tex.bind(gl);
try {
audioFramesBuffer.waitForFreeSlots(2);
} catch (InterruptedException e) {
e.printStackTrace(); // oops
}
/* try decode 10 packets to find one containing video
(res == 2) */
int res = 0;
int retry = 10;
while(res!=2 && retry >= 0) {
res = readNextPacket0(moviePtr, procAddrGLTexSubImage2D, textureTarget, textureFormat, textureType);
retry--;
}
} finally {
psm.restore(gl);
}
final int pts = getVideoPTS0(moviePtr); // this frame
if(blocking) {
// poor mans video sync .. TODO: off thread 'readNextPackage0(..)' on shared GLContext and multi textures/unit!
final long now = System.currentTimeMillis();
// Try sync video to audio
final long now_d = now - lastAudioTime;
final long pts_d = pts - lastAudioPTS - 444; /* hack 444 == play video 444ms ahead of audio */
final long dt = Math.min(47, (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ) ;
//final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
final boolean sleep = dt>video_dt_d && dt<1000 && audioSink.getQueuedByteCount()