/** * Copyright 2012-2024 JogAmp Community. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list * of conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are those of the * authors and should not be interpreted as representing official policies, either expressed * or implied, of JogAmp Community. */ package jogamp.opengl.util.av.impl; import java.io.IOException; import java.io.PrintStream; import java.nio.ByteBuffer; import java.security.PrivilegedAction; import com.jogamp.opengl.GL; import com.jogamp.opengl.GL2ES2; import com.jogamp.opengl.GLES2; import com.jogamp.opengl.GLException; import com.jogamp.common.av.AudioFormat; import com.jogamp.common.av.AudioSink; import com.jogamp.common.av.AudioSinkFactory; import com.jogamp.common.av.TimeFrameI; import com.jogamp.common.os.Clock; import com.jogamp.common.util.IOUtil; import com.jogamp.common.util.PropertyAccess; import com.jogamp.common.util.SecurityUtil; import com.jogamp.gluegen.runtime.ProcAddressTable; import com.jogamp.opengl.util.GLPixelStorageModes; import com.jogamp.opengl.util.av.ASSEventLine; import com.jogamp.opengl.util.av.GLMediaPlayer; import com.jogamp.opengl.util.texture.Texture; import jogamp.common.os.PlatformPropsImpl; import jogamp.opengl.GLContextImpl; import jogamp.opengl.util.av.AudioSampleFormat; import jogamp.opengl.util.av.GLMediaPlayerImpl; import jogamp.opengl.util.av.VideoPixelFormat; import jogamp.opengl.util.av.impl.FFMPEGDynamicLibraryBundleInfo.VersionedLib; /*** * Implementation utilizes FFmpeg which is ubiquitous * available and usually pre-installed on Unix platforms. *
* Besides the default BSD/Linux/.. repositories and installations, * precompiled binaries can be found at the * listed location below. *
* ** The decoded video frame is written directly into an OpenGL texture * on the GPU in it's native format. A custom fragment shader converts * the native pixelformat to a usable RGB format if required. * Hence only 1 copy is required before bloating the picture * from YUV* to RGB, for example. *
** Implements pixel format conversion to RGB via * fragment shader texture-lookup functions: *
*
* FFmpeg Specifics
*
* Utilizes a slim dynamic and native binding to the FFmpeg libraries: *
* Currently we are binary compatible with the following major versions: *
ffmpeg | avcodec | avformat | avdevice | avutil | swresample | FFMPEG* class |
---|---|---|---|---|---|---|
4 | 58 | 58 | 58 | 56 | 03 | FFMPEGv0400 |
5 | 59 | 59 | 59 | 57 | 04 | FFMPEGv0500 |
6 | 60 | 60 | 60 | 58 | 04 | FFMPEGv0600 |
* See FFmpeg: *
* Check tag 'FIXME: Add more planar formats !'
* here and in the corresponding native code
* jogl/src/jogl/native/libav/ffmpeg_impl_template.c
*
*
*
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" brew install ffmpeg *
pkg set-publisher -p http://pkg.openindiana.org/sfe-encumbered. pkt install pkg:/video/ffmpeg *
* Property {@code jogl.ffmpeg.lib} set to {@code internal} * will set {@code PREFER_SYSTEM_LIBS} to {@code false}. *
*
* Non system internal libraries are named 'internal_
* System default libraries are named '
* If {@code PREFER_SYSTEM_LIBS} is {@code true} (default), * we lookup the default library first, * then the versioned library names and last the internal library. *
** If {@code PREFER_SYSTEM_LIBS} is {@code false}, * we lookup the internal library first, * then the versioned library names and last the default library. *
*/ /* pp */ static final boolean PREFER_SYSTEM_LIBS; /** POSIX ENOSYS {@value}: Function not implemented. FIXME: Move to GlueGen ?!*/ private static final int ENOSYS = 38; // Instance data private static final FFMPEGNatives natives; private static final int avUtilMajorVersionCC; private static final int avFormatMajorVersionCC; private static final int avCodecMajorVersionCC; private static final int avDeviceMajorVersionCC; private static final int swResampleMajorVersionCC; private static final boolean available; static { // PREFER_SYSTEM_LIBS default on all systems is true for now! final String choice = PropertyAccess.getProperty("jogl.ffmpeg.lib", true); PREFER_SYSTEM_LIBS = null == choice || !choice.equals("internal"); final boolean libAVGood = FFMPEGDynamicLibraryBundleInfo.initSingleton(); final boolean libAVVersionGood; if( FFMPEGDynamicLibraryBundleInfo.libsLoaded() ) { natives = FFMPEGDynamicLibraryBundleInfo.getNatives(); if( null != natives ) { avCodecMajorVersionCC = natives.getAvCodecMajorVersionCC0(); avFormatMajorVersionCC = natives.getAvFormatMajorVersionCC0(); avUtilMajorVersionCC = natives.getAvUtilMajorVersionCC0(); avDeviceMajorVersionCC = natives.getAvDeviceMajorVersionCC0(); swResampleMajorVersionCC = natives.getSwResampleMajorVersionCC0(); } else { avUtilMajorVersionCC = 0; avFormatMajorVersionCC = 0; avCodecMajorVersionCC = 0; avDeviceMajorVersionCC = 0; swResampleMajorVersionCC = 0; } final VersionedLib avCodec = FFMPEGDynamicLibraryBundleInfo.avCodec; final VersionedLib avFormat = FFMPEGDynamicLibraryBundleInfo.avFormat; final VersionedLib avUtil = FFMPEGDynamicLibraryBundleInfo.avUtil; final VersionedLib avDevice = FFMPEGDynamicLibraryBundleInfo.avDevice; final VersionedLib swResample = FFMPEGDynamicLibraryBundleInfo.swResample; // final boolean avDeviceLoaded = FFMPEGDynamicLibraryBundleInfo.avDeviceLoaded(); // final boolean swResampleLoaded = FFMPEGDynamicLibraryBundleInfo.swResampleLoaded(); final int avCodecMajor = avCodec.version.getMajor(); final int avFormatMajor = avFormat.version.getMajor(); final int avUtilMajor = avUtil.version.getMajor(); final int avDeviceMajor = avDevice.version.getMajor(); final int swResampleMajor = swResample.version.getMajor(); libAVVersionGood = avCodecMajorVersionCC == avCodecMajor && avFormatMajorVersionCC == avFormatMajor && avUtilMajorVersionCC == avUtilMajor && ( avDeviceMajorVersionCC == avDeviceMajor || 0 == avDeviceMajor ) && swResampleMajorVersionCC == swResampleMajor; if( !libAVVersionGood ) { System.err.println("FFmpeg Not Matching Compile-Time / Runtime Major-Version"); } if( !libAVVersionGood || DEBUG ) { printNativeInfoImpl(System.err); } } else { natives = null; avUtilMajorVersionCC = 0; avFormatMajorVersionCC = 0; avCodecMajorVersionCC = 0; avDeviceMajorVersionCC = 0; swResampleMajorVersionCC = 0; libAVVersionGood = false; } available = libAVGood && libAVVersionGood && null != natives; } public static final boolean isAvailable() { return available; } private static final void printNativeInfoImpl(final PrintStream out) { out.println("FFmpeg Codec : "+FFMPEGDynamicLibraryBundleInfo.avCodec+" [cc "+avCodecMajorVersionCC+"]"); out.println("FFmpeg Format : "+FFMPEGDynamicLibraryBundleInfo.avFormat+" [cc "+avFormatMajorVersionCC+"]"); out.println("FFmpeg Util : "+FFMPEGDynamicLibraryBundleInfo.avUtil+" [cc "+avUtilMajorVersionCC+"]"); out.println("FFmpeg Device : "+FFMPEGDynamicLibraryBundleInfo.avDevice+" [cc "+avDeviceMajorVersionCC+", loaded "+FFMPEGDynamicLibraryBundleInfo.avDeviceLoaded()+"]"); out.println("FFmpeg Resample: "+FFMPEGDynamicLibraryBundleInfo.swResample+" [cc "+swResampleMajorVersionCC+", loaded "+FFMPEGDynamicLibraryBundleInfo.swResampleLoaded()+"]"); out.println("FFmpeg Class : "+(null!= natives ? natives.getClass().getSimpleName() : "n/a")); } @Override public final void printNativeInfo(final PrintStream out) { printNativeInfoImpl(out); } // // General // private final Object moviePtrLock = new Object(); private long moviePtr = 0; // // Video // private VideoPixelFormat vPixelFmt = VideoPixelFormat.RGBA; private int vPlanes = 0; private int vBitsPerPixel = 0; private int vBytesPerPixelPerPlane = 0; private int vWidth = 0, vHeight = 0; private int texWidth = 0, texHeight = 0; // overall (stuffing planes in one texture) private String singleTexComp = "r"; private final GLPixelStorageModes psm; // // Audio // private AudioFormat avChosenAudioFormat; private int audioSamplesPerFrameAndChannel = 0; public FFMPEGMediaPlayer() { if(!available) { throw new RuntimeException("FFMPEGMediaPlayer not available"); } psm = new GLPixelStorageModes(); initSelf(); } private void initSelf() { moviePtr = natives.createInstance0(this, DEBUG_NATIVE); if(0==moviePtr) { throw new GLException("Couldn't create FFMPEGInstance"); } audioSink = null; } @Override protected final void destroyImpl() { synchronized( moviePtrLock ) { if (moviePtr != 0) { natives.destroyInstance0(moviePtr); moviePtr = 0; } } destroyAudioSink(); } private final void destroyAudioSink() { final AudioSink _audioSink = audioSink; if( null != _audioSink ) { audioSink = null; _audioSink.destroy(); } } @Override protected void stopImpl() { destroyImpl(); initSelf(); } public static final String dev_video_linux = "/dev/video"; @Override protected final void initStreamImpl(final int vid, final int aid, final int sid) throws IOException { synchronized( moviePtrLock ) { if(0==moviePtr) { throw new GLException("FFMPEG native instance null"); } if(DEBUG) { System.err.println("initStream: p1 "+this); } final String streamLocS = IOUtil.getUriFilePathOrASCII(getUri()); destroyAudioSink(); if( GLMediaPlayer.STREAM_ID_NONE == aid ) { audioSink = AudioSinkFactory.createNull(); } else { // audioSink = new jogamp.common.av.JavaSoundAudioSink(); audioSink = AudioSinkFactory.createDefault(FFMPEGMediaPlayer.class.getClassLoader()); } { final int audioChannelLimit = getAudioChannelLimit(); if( audioChannelLimit >= 1 ) { audioSink.setChannelLimit(audioChannelLimit); } } final AudioFormat preferredAudioFormat = audioSink.getPreferredFormat(); if(DEBUG) { System.err.println("initStream: p2 aid "+aid+", preferred "+preferredAudioFormat+" on "+audioSink+", "+this); } final boolean isCameraInput = null != cameraPath; final String resStreamLocS; // int rw=640, rh=480, rr=15; int rw=-1, rh=-1, rr=-1; String sizes = null; if( isCameraInput ) { switch(PlatformPropsImpl.OS_TYPE) { case ANDROID: // ?? case FREEBSD: case HPUX: case LINUX: case SUNOS: resStreamLocS = dev_video_linux + cameraPath.decode(); break; case WINDOWS: case MACOS: case OPENKODE: default: resStreamLocS = cameraPath.decode(); break; } if( null != cameraProps ) { sizes = cameraProps.get(CameraPropSizeS); int v = getPropIntVal(cameraProps, CameraPropWidth); if( v > 0 ) { rw = v; } v = getPropIntVal(cameraProps, CameraPropHeight); if( v > 0 ) { rh = v; } v = getPropIntVal(cameraProps, CameraPropRate); if( v > 0 ) { rr = v; } } } else { resStreamLocS = streamLocS; } final int aMaxChannelCount = preferredAudioFormat.channelCount; final int aPrefSampleRate = preferredAudioFormat.sampleRate; // setStream(..) issues updateAttributes*(..), and defines avChosenAudioFormat, vid, aid, .. etc if(DEBUG) { System.err.println("initStream: p3 cameraPath "+cameraPath+", isCameraInput "+isCameraInput); System.err.println("initStream: p3 stream "+getUri()+" -> "+streamLocS+" -> "+resStreamLocS); System.err.println("initStream: p3 vid "+vid+", sizes "+sizes+", reqVideo "+rw+"x"+rh+"@"+rr+", aid "+aid+", aMaxChannelCount "+aMaxChannelCount+", aPrefSampleRate "+aPrefSampleRate); } natives.setStream0(moviePtr, resStreamLocS, isCameraInput, vid, sizes, rw, rh, rr, aid, aMaxChannelCount, aPrefSampleRate, sid); } } @Override protected void updateMetadata() { final Chapter[] chapters = new Chapter[natives.getChapterCount0(moviePtr)]; for(int i=0; iffmpegTexture2D
.
* Otherwise the call is delegated to it's super class.
*/
@Override
public String setTextureLookupFunctionName(final String texLookupFuncName) throws IllegalStateException {
if(null != texLookupFuncName && texLookupFuncName.length()>0) {
if( texLookupFuncName.equals("texture2D") ) {
throw new IllegalArgumentException("Build in 'texture2D' lookup-func-name not allowed!");
}
textureLookupFunctionName = texLookupFuncName;
} else {
textureLookupFunctionName = "ffmpegTexture2D";
}
resetTextureFragmentShaderHashCode();
return textureLookupFunctionName;
}
/**
* {@inheritDoc}
*
* Depending on the pixelformat, a specific conversion shader is being created,
* e.g. YUV420P to RGB. Otherwise the call is delegated to it's super class.
*/
@Override
public final String getTextureLookupFragmentShaderImpl() {
final float tc_w_1 = (float)getWidth() / (float)texWidth;
final String texLookupFuncName = getTextureLookupFunctionName();
if( texLookupFuncName.equals("texture2D") ) {
throw new IllegalArgumentException("Build in 'texture2D' lookup-func-name not allowed!");
}
switch(vPixelFmt) {
case YUVJ420P:
case YUV420P: // < planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
return
"// YUV420P: planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)\n"+
"vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" const vec2 u_off = vec2("+tc_w_1+", 0.0);\n"+
" const vec2 v_off = vec2("+tc_w_1+", 0.5);\n"+
" vec2 tc_half = texCoord*0.5;\n"+
" float y,u,v,r,g,b;\n"+
" y = texture2D(image, texCoord)."+singleTexComp+";\n"+
" u = texture2D(image, u_off+tc_half)."+singleTexComp+";\n"+
" v = texture2D(image, v_off+tc_half)."+singleTexComp+";\n"+
" y = 1.1643*(y-0.0625);\n"+
" u = u-0.5;\n"+
" v = v-0.5;\n"+
" r = y+1.5958*v;\n"+
" g = y-0.39173*u-0.81290*v;\n"+
" b = y+2.017*u;\n"+
" return vec4(r, g, b, 1);\n"+
"}\n"
;
case YUVJ422P:
case YUV422P: // < planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
return
"// YUV422P: planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)\n"+
"vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" const vec2 u_off = vec2("+tc_w_1+" , 0.0);\n"+
" const vec2 v_off = vec2("+tc_w_1+" * 1.5, 0.0);\n"+
" vec2 tc_halfw = vec2(texCoord.x*0.5, texCoord.y);\n"+
" float y,u,v,r,g,b;\n"+
" y = texture2D(image, texCoord)."+singleTexComp+";\n"+
" u = texture2D(image, u_off+tc_halfw)."+singleTexComp+";\n"+
" v = texture2D(image, v_off+tc_halfw)."+singleTexComp+";\n"+
" y = 1.1643*(y-0.0625);\n"+
" u = u-0.5;\n"+
" v = v-0.5;\n"+
" r = y+1.5958*v;\n"+
" g = y-0.39173*u-0.81290*v;\n"+
" b = y+2.017*u;\n"+
" return vec4(r, g, b, 1);\n"+
"}\n"
;
case YUYV422: // < packed YUV 4:2:2, 2 x 16bpp, [Y0 Cb] [Y1 Cr]
// Stuffed into RGBA half width texture
return
"// YUYV422: packed YUV 4:2:2, 2 x 16bpp, [Y0 Cb] [Y1 Cr]\n"+
"vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" "+
" float y1,u,y2,v,y,r,g,b;\n"+
" vec2 tc_halfw = vec2(texCoord.x*0.5, texCoord.y);\n"+
" vec4 yuyv = texture2D(image, tc_halfw).rgba;\n"+
" y1 = yuyv.r;\n"+
" u = yuyv.g;\n"+
" y2 = yuyv.b;\n"+
" v = yuyv.a;\n"+
" y = mix( y1, y2, mod(gl_FragCoord.x, 2) ); /* avoid branching! */\n"+
" y = 1.1643*(y-0.0625);\n"+
" u = u-0.5;\n"+
" v = v-0.5;\n"+
" r = y+1.5958*v;\n"+
" g = y-0.39173*u-0.81290*v;\n"+
" b = y+2.017*u;\n"+
" return vec4(r, g, b, 1);\n"+
"}\n"
;
case UYVY422: // < packed YUV 4:2:2, 2 x 16bpp, Cb Y0 Cr Y1
// Stuffed into RGBA half width texture
return
"// UYVY422: packed YUV 4:2:2, 2 x 16bpp, Cb Y0 Cr Y1\n"+
"vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" "+
" float y1,u,y2,v,y,r,g,b;\n"+
" vec2 tc_halfw = vec2(texCoord.x*0.5, texCoord.y);\n"+
" vec4 uyvy = texture2D(image, tc_halfw).rgba;\n"+
" u = uyvy.r;\n"+
" y1 = uyvy.g;\n"+
" v = uyvy.b;\n"+
" y2 = uyvy.a;\n"+
" y = mix( y1, y2, mod(gl_FragCoord.x, 2) ); /* avoid branching! */\n"+
" y = 1.1643*(y-0.0625);\n"+
" u = u-0.5;\n"+
" v = v-0.5;\n"+
" r = y+1.5958*v;\n"+
" g = y-0.39173*u-0.81290*v;\n"+
" b = y+2.017*u;\n"+
" return vec4(r, g, b, 1);\n"+
"}\n"
;
case BGR24:
return
"// BGR24\n"+
"vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" "+
" vec3 bgr = texture2D(image, texCoord).rgb;\n"+
" return vec4(bgr.b, bgr.g, bgr.r, 1);\n"+ /* just swizzle */
"}\n"
;
case RGB24:
return
"// RGB24\n"+
"vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" "+
" vec3 t = texture2D(image, texCoord).rgb;\n"+
" return vec4(t.rgb, 1);\n"+
"}\n"
;
case ARGB:
return
"// ARGB\n"+
"vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" "+
" vec4 t = texture2D(image, texCoord);\n"+
" return vec4(t.g, t.b, t.a, t.r);\n"+ /* just swizzle */
"}\n"
;
case RGBA:
return
"// RGBA\n"+
"vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" "+
" return texture2D(image, texCoord);\n"+
"}\n"
;
case ABGR:
return
"// ABGR\n"+
"vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" "+
" vec4 t = texture2D(image, texCoord);\n"+
" return vec4(t.a, t.b, t.g, t.r);\n"+ /* just swizzle */
"}\n"
;
case BGRA:
return
"// BGR24\n"+
"vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+
" "+
" vec3 bgr = texture2D(image, texCoord).rgb;\n"+
" return vec4(bgr.b, bgr.g, bgr.r, 1);\n"+ /* just swizzle */
"}\n"
;
default: // FIXME: Add more formats !
throw new InternalError("Add proper mapping of: vPixelFmt "+vPixelFmt);
}
}
@Override
public final String getTextureFragmentShaderHashID() {
// return getTextureSampler2DType()+";"+getTextureLookupFunctionName()+";"+getTextureLookupFragmentShaderImpl();
return getTextureLookupFragmentShaderImpl();
}
@Override
public final boolean resumeImpl() {
if(0==moviePtr) {
return false;
}
final int errno = natives.play0(moviePtr);
if( DEBUG_NATIVE && errno != 0 && errno != -ENOSYS) {
System.err.println("ffmpeg play err: "+errno);
}
return true;
}
@Override
public final boolean pauseImpl() {
if(0==moviePtr) {
return false;
}
final int errno = natives.pause0(moviePtr);
if( DEBUG_NATIVE && errno != 0 && errno != -ENOSYS) {
System.err.println("ffmpeg pause err: "+errno);
}
return true;
}
@Override
protected final synchronized int seekImpl(final int msec) {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
return natives.seek0(moviePtr, msec);
}
@Override
protected void preNextTextureImpl(final GL gl) {
psm.setUnpackAlignment(gl, 1); // RGBA ? 4 : 1
gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
}
@Override
protected void postNextTextureImpl(final GL gl) {
psm.restore(gl);
}
@Override
protected final int getNextTextureImpl(final GL gl, final TextureFrame nextFrame) {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
int vPTS = TimeFrameI.INVALID_PTS;
int vTexID = 0; // invalid
if( null != gl ) {
// glEnable() and glBindTexture() are performed in native readNextPacket0()
// final Texture tex = nextFrame.getTexture();
// tex.enable(gl);
// tex.bind(gl);
vTexID = nextFrame.getTexture().getTextureObject();
}
/** Try decode up to 10 packets to find one containing video. */
for(int i=0; TimeFrameI.INVALID_PTS == vPTS && 10 > i; i++) {
vPTS = natives.readNextPacket0(moviePtr, getTextureTarget(), vTexID, getTextureFormat(), getTextureType(), GL.GL_TEXTURE_2D, 0);
}
if( null != nextFrame ) {
nextFrame.setPTS(vPTS);
}
return vPTS;
}
final void pushSound(final ByteBuffer sampleData, final int data_size, final int audio_pts) {
if( audioStreamEnabled() ) {
audioSink.enqueueData( audio_pts, sampleData, data_size);
}
}
final void pushSubtitleText(final String text, final int pts, final int start_display_pts, final int end_display_pts) {
if( null != assEventListener ) {
if( start_display_pts > getPTS().get(Clock.currentMillis()) ) {
assEventListener.run( new ASSEventLine(ASSEventLine.Format.TEXT, text, start_display_pts, end_display_pts) );
}
}
}
final void pushSubtitleASS(final String ass, final int pts, final int start_display_pts, final int end_display_pts) {
if( null != assEventListener ) {
assEventListener.run( new ASSEventLine(ASSEventLine.Format.FFMPEG, ass, start_display_pts, end_display_pts) );
}
}
final void pushSubtitleTex(final int texID, final int x, final int y, final int width, final int height, final int pts, final int start_display_pts, final int end_display_pts) {
// System.err.println("SubTex["+texID+"]: "+x+"/"+y+" "+width+"x"+height+", pts "+pts+" ["+start_display_pts+".."+end_display_pts+"] "+(end_display_pts-start_display_pts+1));
}
}