diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java | 38 | ||||
-rw-r--r-- | src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java | 275 | ||||
-rw-r--r-- | src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java | 127 | ||||
-rw-r--r-- | src/jogl/native/GLContext.c | 46 | ||||
-rw-r--r-- | src/jogl/native/JoglCommon.c | 40 | ||||
-rw-r--r-- | src/jogl/native/libav/ffmpeg_impl_template.c | 160 |
6 files changed, 413 insertions, 273 deletions
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java index 040d152f5..f327cddd4 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java @@ -46,14 +46,7 @@ import com.jogamp.common.util.RunnableExecutor; import com.jogamp.common.util.VersionNumber; /** - * FIXME: We need native structure access methods to deal with API changes - * in the libav headers, which break binary compatibility! - * Currently we are binary compatible w/ [0.6 ?, ] 0.7 and 0.8 but not w/ trunk. - * - * ChangeList for trunk: - * Thu Jan 12 11:21:02 2012 a17479dfce67fbea2d0a1bf303010dce1e79059f major 53 -> 54 - * Mon Feb 27 22:40:11 2012 ee42df8a35c2b795f524c856834d0823dbd4e75d reorder AVStream and AVFormatContext - * Tue Feb 28 12:07:53 2012 322537478b63c6bc01e640643550ff539864d790 minor 1 -> 2 + * See {@link FFMPEGMediaPlayer#compatibility}. */ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { private static final boolean DEBUG = FFMPEGMediaPlayer.DEBUG || DynamicLibraryBundleInfo.DEBUG; @@ -73,7 +66,6 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { "avcodec_string", "avcodec_find_decoder", "avcodec_open2", // 53.6.0 (opt) - "avcodec_open", "avcodec_alloc_frame", "avcodec_get_frame_defaults", "avcodec_free_frame", // 54.28.0 (opt) @@ -85,8 +77,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { "av_destruct_packet", "av_free_packet", "avcodec_decode_audio4", // 53.25.0 (opt) - "avcodec_decode_audio3", // 52.23.0 -/* 23 */ "avcodec_decode_video2", // 52.23.0 +/* 21 */ "avcodec_decode_video2", // 52.23.0 // libavutil "av_pix_fmt_descriptors", @@ -96,13 +87,16 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { "av_get_bits_per_pixel", "av_samples_get_buffer_size", "av_get_bytes_per_sample", // 51.4.0 -/* 31 */ "av_opt_set_int", // 51.12.0 - + "av_opt_set_int", // 51.12.0 + "av_dict_get", + "av_dict_count", // 54.* (opt) + "av_dict_set", +/* 33 */ "av_dict_free", + // libavformat "avformat_alloc_context", "avformat_free_context", // 52.96.0 (opt) "avformat_close_input", // 53.17.0 (opt) - "av_close_input_file", "av_register_all", "av_find_input_format", "avformat_open_input", @@ -114,8 +108,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { "av_read_pause", "avformat_network_init", // 53.13.0 (opt) "avformat_network_deinit", // 53.13.0 (opt) - "avformat_find_stream_info", // 53.3.0 (opt) -/* 48 */ "av_find_stream_info", +/* 48 */ "avformat_find_stream_info", // 53.3.0 (opt) // libavdevice /* 49 */ "avdevice_register_all", // ??? @@ -130,20 +123,15 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { // alternate symbol names private static final String[][] altSymbolNames = { - { "avcodec_open", "avcodec_open2" }, // old, 53.6.0 - { "avcodec_decode_audio3", "avcodec_decode_audio4" }, // old, 53.25.0 - { "av_close_input_file", "avformat_close_input" }, // old, 53.17.0 - { "av_find_stream_info", "avformat_find_stream_info" }, // old, 53.3.0 + // { "av_find_stream_info", "avformat_find_stream_info" }, // old, 53.3.0 }; // optional symbol names private static final String[] optionalSymbolNames = { - "avformat_free_context", // 52.96.0 (opt) - "avformat_network_init", // 53.13.0 (opt) - "avformat_network_deinit", // 53.13.0 (opt) "avformat_seek_file", // ??? (opt) "avcodec_free_frame", // 54.28.0 (opt) "av_frame_unref", // 55.0.0 (opt) + "av_dict_count", // 54.* (opt) // libavdevice "avdevice_register_all", // 53.0.0 (opt) // libavresample @@ -377,10 +365,10 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { avdevice.add("avdevice"); // default avdevice.add("libavdevice.so.54"); // dummy future proof - avdevice.add("libavdevice.so.53"); // 8 && 9 + avdevice.add("libavdevice.so.53"); // 0.8 && 9 avdevice.add("avdevice-54"); // dummy future proof - avdevice.add("avdevice-53"); // 8 && 9 + avdevice.add("avdevice-53"); // 0.8 && 9 libsList.add(avdevice); final List<String> avresample = new ArrayList<String>(); diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java index 952587ed9..2dd60074c 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java @@ -38,6 +38,7 @@ import javax.media.opengl.GL2ES2; import javax.media.opengl.GLException; import com.jogamp.common.os.Platform; +import com.jogamp.common.util.VersionNumber; import com.jogamp.gluegen.runtime.ProcAddressTable; import com.jogamp.opengl.util.TimeFrameI; import com.jogamp.opengl.util.GLPixelStorageModes; @@ -55,54 +56,87 @@ import jogamp.opengl.util.av.impl.FFMPEGNatives.SampleFormat; /*** * Implementation utilizes <a href="http://libav.org/">Libav</a> * or <a href="http://ffmpeg.org/">FFmpeg</a> which is ubiquitous - * available and usually pre-installed on Unix platforms. Due to legal - * reasons we cannot deploy binaries of it, which contains patented codecs. + * available and usually pre-installed on Unix platforms. + * <p> + * Due to legal reasons we cannot deploy binaries of it, which contains patented codecs. + * </p> + * <p> * Besides the default BSD/Linux/.. repositories and installations, - * precompiled binaries can be found at the listed location below. + * precompiled binaries can be found at the + * <a href="#libavavail">listed location below</a>. + * </p> + * + * <a name="implspecifics"><h5>Implementation specifics</h5></a> * <p> - * Implements YUV420P to RGB fragment shader conversion - * and the usual packed RGB formats. * The decoded video frame is written directly into an OpenGL texture * on the GPU in it's native format. A custom fragment shader converts - * the native pixelformat to a usable RGB format if required. + * the native pixelformat to a usable <i>RGB</i> format if required. * Hence only 1 copy is required before bloating the picture - * from YUV to RGB, for example. + * from <i>YUV*</i> to <i>RGB</i>, for example. * </p> * <p> + * Implements pixel format conversion to <i>RGB</i> via + * fragment shader texture-lookup functions: + * <ul> + * <li>{@link PixelFormat#YUV420P}</li> + * <li>{@link PixelFormat#YUVJ420P}</li> + * <li>{@link PixelFormat#YUV422P}</li> + * <li>{@link PixelFormat#YUVJ422P}</li> + * <li>{@link PixelFormat#YUYV422}</li> + * </ul> + * </p> + * <p> + * + * <a name="libavspecifics"><h5>Libav Specifics</h5></a> + * <p> * Utilizes a slim dynamic and native binding to the Lib_av * libraries: * <ul> - * <li>libavutil</li> - * <li>libavformat</li> * <li>libavcodec</li> + * <li>libavformat</li> + * <li>libavutil</li> + * <li>libavresample (opt)</li> + * <li>libavdevice (opt)</li> * </ul> * </p> + * + * <a name="compatibility"><h5>LibAV Compatibility</h5></a> + * <p> + * Currently we are binary compatible w/: + * <table border="1"> + * <tr><th>release</th><th>lavc</th><th>lavf</th><th>lavu</th><th>lavr</th> <th>FFMPEG* class</th></tr> + * <tr><td>0.8</td> <td>53</td><td>53</td><td>51</td><td></td> <td>FFMPEGv08</td></tr> + * <tr><td>9.0</td> <td>54</td><td>54</td><td>52</td><td>01</td> <td>FFMPEGv09</td></tr> + * </table> + * </p> * <p> - * http://libav.org/ + * See http://upstream-tracker.org/versions/libav.html * </p> * <p> * Check tag 'FIXME: Add more planar formats !' * here and in the corresponding native code - * <code>jogl/src/jogl/native/ffmpeg/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c</code> + * <code>jogl/src/jogl/native/libav/ffmpeg_impl_template.c</code> * </p> + * + * + * <a name="todo"><h5>TODO:</h5></a> * <p> - * TODO: * <ul> - * <li>Audio Output</li> - * <li>Off thread <i>next frame</i> processing using multiple target textures</li> * <li>better pts sync handling</li> - * <li>fix seek</li> * </ul> * </p> - * Pre-compiled Libav / FFmpeg packages: + * + * <a name="libavavail"><h5>LibAV Availability</h5></a> + * <p> * <ul> - * <li>Windows: http://ffmpeg.zeranoe.com/builds/</li> - * <li>MacOSX: http://www.ffmpegx.com/</li> + * <li>Windows: http://win32.libav.org/releases/</li> + * <li>MacOSX: http://ffmpegmac.net/</li> * <li>OpenIndiana/Solaris:<pre> * pkg set-publisher -p http://pkg.openindiana.org/sfe-encumbered. * pkt install pkg:/video/ffmpeg * </pre></li> - * </ul> + * </ul> + * </p> */ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { @@ -122,19 +156,24 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { final boolean libAVVersionGood; if( FFMPEGDynamicLibraryBundleInfo.libsLoaded() ) { natives = FFMPEGDynamicLibraryBundleInfo.getNatives(); - avUtilMajorVersionCC = natives.getAvUtilMajorVersionCC0(); - avFormatMajorVersionCC = natives.getAvFormatMajorVersionCC0(); avCodecMajorVersionCC = natives.getAvCodecMajorVersionCC0(); + avFormatMajorVersionCC = natives.getAvFormatMajorVersionCC0(); + avUtilMajorVersionCC = natives.getAvUtilMajorVersionCC0(); avResampleMajorVersionCC = natives.getAvResampleMajorVersionCC0(); - System.err.println("LIB_AV Util : "+FFMPEGDynamicLibraryBundleInfo.avUtilVersion+" [cc "+avUtilMajorVersionCC+"]"); - System.err.println("LIB_AV Format : "+FFMPEGDynamicLibraryBundleInfo.avFormatVersion+" [cc "+avFormatMajorVersionCC+"]"); - System.err.println("LIB_AV Codec : "+FFMPEGDynamicLibraryBundleInfo.avCodecVersion+" [cc "+avCodecMajorVersionCC+"]"); - System.err.println("LIB_AV Device : [loaded "+FFMPEGDynamicLibraryBundleInfo.avDeviceLoaded()+"]"); + final VersionNumber avCodecVersion = FFMPEGDynamicLibraryBundleInfo.avCodecVersion; + final VersionNumber avFormatVersion = FFMPEGDynamicLibraryBundleInfo.avFormatVersion; + final VersionNumber avUtilVersion = FFMPEGDynamicLibraryBundleInfo.avUtilVersion; + final VersionNumber avResampleVersion = FFMPEGDynamicLibraryBundleInfo.avResampleVersion; + System.err.println("LIB_AV Codec : "+avCodecVersion+" [cc "+avCodecMajorVersionCC+"]"); + System.err.println("LIB_AV Format : "+avFormatVersion+" [cc "+avFormatMajorVersionCC+"]"); + System.err.println("LIB_AV Util : "+avUtilVersion+" [cc "+avUtilMajorVersionCC+"]"); System.err.println("LIB_AV Resample: "+FFMPEGDynamicLibraryBundleInfo.avResampleVersion+" [cc "+avResampleMajorVersionCC+", loaded "+FFMPEGDynamicLibraryBundleInfo.avResampleLoaded()+"]"); - libAVVersionGood = avUtilMajorVersionCC == FFMPEGDynamicLibraryBundleInfo.avUtilVersion.getMajor() && - avFormatMajorVersionCC == FFMPEGDynamicLibraryBundleInfo.avFormatVersion.getMajor() && - avCodecMajorVersionCC == FFMPEGDynamicLibraryBundleInfo.avCodecVersion.getMajor() && - avResampleMajorVersionCC == FFMPEGDynamicLibraryBundleInfo.avResampleVersion.getMajor(); + System.err.println("LIB_AV Device : [loaded "+FFMPEGDynamicLibraryBundleInfo.avDeviceLoaded()+"]"); + System.err.println("LIB_AV Class : "+natives.getClass().getSimpleName()); + libAVVersionGood = avCodecMajorVersionCC == avCodecVersion.getMajor() && + avFormatMajorVersionCC == avFormatVersion.getMajor() && + avUtilMajorVersionCC == avUtilVersion.getMajor() && + avResampleMajorVersionCC == avResampleVersion.getMajor(); if( !libAVVersionGood ) { System.err.println("LIB_AV Not Matching Compile-Time / Runtime Major-Version"); } @@ -167,8 +206,6 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { private int vPlanes = 0; private int vBitsPerPixel = 0; private int vBytesPerPixelPerPlane = 0; - private int[] vLinesize = { 0, 0, 0 }; // per plane - private int[] vTexWidth = { 0, 0, 0 }; // per plane private int texWidth, texHeight; // overall (stuffing planes in one texture) private String singleTexComp = "r"; private GLPixelStorageModes psm; @@ -270,7 +307,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { throw new GLException("AudioSink null"); } final int audioQueueLimit; - if( null != gl ) { + if( null != gl && STREAM_ID_NONE != vid ) { final GLContextImpl ctx = (GLContextImpl)gl.getContext(); AccessController.doPrivileged(new PrivilegedAction<Object>() { public Object run() { @@ -286,12 +323,6 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { } else { audioQueueLimit = AudioSink.DefaultQueueLimitAudioOnly; } - final float frameDuration; - if( audioSamplesPerFrameAndChannel > 0 ) { - frameDuration= avChosenAudioFormat.getSamplesDuration(audioSamplesPerFrameAndChannel); - } else { - frameDuration = AudioSink.DefaultFrameDuration; - } if(DEBUG) { System.err.println("initGL: p3 avChosen "+avChosenAudioFormat); } @@ -299,20 +330,28 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { if( STREAM_ID_NONE == aid ) { audioSink.destroy(); audioSink = AudioSinkFactory.createNull(); - } - final boolean audioSinkOK = audioSink.init(avChosenAudioFormat, frameDuration, AudioSink.DefaultInitialQueueSize, AudioSink.DefaultQueueGrowAmount, audioQueueLimit); - if( !audioSinkOK ) { - System.err.println("AudioSink "+audioSink.getClass().getName()+" does not support "+avChosenAudioFormat+", using Null"); - audioSink.destroy(); - audioSink = AudioSinkFactory.createNull(); - audioSink.init(avChosenAudioFormat, 0, AudioSink.DefaultInitialQueueSize, AudioSink.DefaultQueueGrowAmount, audioQueueLimit); + audioSink.init(AudioSink.DefaultFormat, 0, AudioSink.DefaultInitialQueueSize, AudioSink.DefaultQueueGrowAmount, audioQueueLimit); + } else { + final float frameDuration; + if( audioSamplesPerFrameAndChannel > 0 ) { + frameDuration= avChosenAudioFormat.getSamplesDuration(audioSamplesPerFrameAndChannel); + } else { + frameDuration = AudioSink.DefaultFrameDuration; + } + final boolean audioSinkOK = audioSink.init(avChosenAudioFormat, frameDuration, AudioSink.DefaultInitialQueueSize, AudioSink.DefaultQueueGrowAmount, audioQueueLimit); + if( !audioSinkOK ) { + System.err.println("AudioSink "+audioSink.getClass().getName()+" does not support "+avChosenAudioFormat+", using Null"); + audioSink.destroy(); + audioSink = AudioSinkFactory.createNull(); + audioSink.init(avChosenAudioFormat, 0, AudioSink.DefaultInitialQueueSize, AudioSink.DefaultQueueGrowAmount, audioQueueLimit); + } } if(DEBUG) { System.err.println("initGL: p4 chosen "+avChosenAudioFormat); System.err.println("initGL: p4 chosen "+audioSink); } - if( null != gl ) { + if( null != gl && STREAM_ID_NONE != vid ) { int tf, tif=GL.GL_RGBA; // texture format and internal format int tt = GL.GL_UNSIGNED_BYTE; switch(vBytesPerPixelPerPlane) { @@ -325,7 +364,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { tf = GL2ES2.GL_ALPHA; tif=GL2ES2.GL_ALPHA; singleTexComp = "a"; } break; - + case 2: if( vPixelFmt == PixelFormat.YUYV422 ) { // YUYV422: // < packed YUV 4:2:2, 2x 16bpp, Y0 Cb Y1 Cr // Stuffed into RGBA half width texture @@ -426,6 +465,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { /** * Native callback + * @param vid * @param pixFmt * @param planes * @param bitsPerPixel @@ -436,58 +476,96 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { * @param tWd0 * @param tWd1 * @param tWd2 + * @param aid * @param audioSampleFmt * @param audioSampleRate * @param audioChannels * @param audioSamplesPerFrameAndChannel in audio samples per frame and channel */ - void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane, + void updateAttributes2(int vid, int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane, int lSz0, int lSz1, int lSz2, int tWd0, int tWd1, int tWd2, int vW, int vH, - int audioSampleFmt, int audioSampleRate, + int aid, int audioSampleFmt, int audioSampleRate, int audioChannels, int audioSamplesPerFrameAndChannel) { - vPixelFmt = PixelFormat.valueOf(pixFmt); - vPlanes = planes; - vBitsPerPixel = bitsPerPixel; - vBytesPerPixelPerPlane = bytesPerPixelPerPlane; - vLinesize[0] = lSz0; vLinesize[1] = lSz1; vLinesize[2] = lSz2; - vTexWidth[0] = tWd0; vTexWidth[1] = tWd1; vTexWidth[2] = tWd2; + // defaults .. + vPixelFmt = null; + vPlanes = 0; + vBitsPerPixel = 0; + vBytesPerPixelPerPlane = 0; + usesTexLookupShader = false; + texWidth = 0; texHeight = 0; - switch(vPixelFmt) { - case YUV420P: // < planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) - usesTexLookupShader = true; - // YUV420P: Adding U+V on right side of fixed height texture, - // since width is already aligned by decoder. - // Y=w*h, Y=w/2*h/2, U=w/2*h/2 - // w*h + 2 ( w/2 * h/2 ) - // w*h + w*h/2 - // 2*w/2 * h - texWidth = vTexWidth[0] + vTexWidth[1]; texHeight = vH; - break; - case YUYV422: // < packed YUV 4:2:2, 2x 16bpp, Y0 Cb Y1 Cr - stuffed into RGBA half width texture - usesTexLookupShader = true; - texWidth = vTexWidth[0]; texHeight = vH; - break; - case RGB24: - case BGR24: - case ARGB: - case RGBA: - case ABGR: - case BGRA: - usesTexLookupShader = false; - texWidth = vTexWidth[0]; texHeight = vH; - break; - default: // FIXME: Add more formats ! - throw new RuntimeException("Unsupported pixelformat: "+vPixelFmt); + final int[] vLinesize = { 0, 0, 0 }; // per plane + final int[] vTexWidth = { 0, 0, 0 }; // per plane + + if( STREAM_ID_NONE != vid ) { + vPixelFmt = PixelFormat.valueOf(pixFmt); + vPlanes = planes; + vBitsPerPixel = bitsPerPixel; + vBytesPerPixelPerPlane = bytesPerPixelPerPlane; + vLinesize[0] = lSz0; vLinesize[1] = lSz1; vLinesize[2] = lSz2; + vTexWidth[0] = tWd0; vTexWidth[1] = tWd1; vTexWidth[2] = tWd2; + + switch(vPixelFmt) { + case YUVJ420P: + case YUV420P: // < planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) + usesTexLookupShader = true; + // YUV420P: Adding U+V on right side of fixed height texture, + // since width is already aligned by decoder. + // Splitting texture to 4 quadrants: + // Y covers left top/low quadrant + // U on top-right quadrant. + // V on low-right quadrant. + // Y=w*h, U=w/2*h/2, V=w/2*h/2 + // w*h + 2 ( w/2 * h/2 ) + // w*h + w*h/2 + texWidth = vTexWidth[0] + vTexWidth[1]; texHeight = vH; + break; + case YUVJ422P: + case YUV422P: + usesTexLookupShader = true; + // YUV422P: Adding U+V on right side of fixed height texture, + // since width is already aligned by decoder. + // Splitting texture to 4 columns + // Y covers columns 1+2 + // U covers columns 3 + // V covers columns 4 + texWidth = vTexWidth[0] + vTexWidth[1] + vTexWidth[2]; texHeight = vH; + break; + case YUYV422: // < packed YUV 4:2:2, 2x 16bpp, Y0 Cb Y1 Cr - stuffed into RGBA half width texture + usesTexLookupShader = true; + texWidth = vTexWidth[0]; texHeight = vH; + break; + case RGB24: + case BGR24: + case ARGB: + case RGBA: + case ABGR: + case BGRA: + usesTexLookupShader = false; + texWidth = vTexWidth[0]; texHeight = vH; + break; + default: // FIXME: Add more formats ! + throw new RuntimeException("Unsupported pixelformat: "+vPixelFmt); + } + } + + // defaults .. + final SampleFormat aSampleFmt; + avChosenAudioFormat = null;; + this.audioSamplesPerFrameAndChannel = 0; + + if( STREAM_ID_NONE != aid ) { + aSampleFmt = SampleFormat.valueOf(audioSampleFmt); + avChosenAudioFormat = avAudioFormat2Local(aSampleFmt, audioSampleRate, audioChannels); + this.audioSamplesPerFrameAndChannel = audioSamplesPerFrameAndChannel; + } else { + aSampleFmt = null; } - final SampleFormat aSampleFmt = SampleFormat.valueOf(audioSampleFmt); - avChosenAudioFormat = avAudioFormat2Local(aSampleFmt, audioSampleRate, audioChannels); - - this.audioSamplesPerFrameAndChannel = audioSamplesPerFrameAndChannel; if(DEBUG) { - System.err.println("audio: fmt "+aSampleFmt+", "+avChosenAudioFormat+", aFrameSize/fc "+audioSamplesPerFrameAndChannel); - System.err.println("video: fmt "+vW+"x"+vH+", "+vPixelFmt+", planes "+vPlanes+", bpp "+vBitsPerPixel+"/"+vBytesPerPixelPerPlane+", usesTexLookupShader "+usesTexLookupShader); + System.err.println("audio: id "+aid+", fmt "+aSampleFmt+", "+avChosenAudioFormat+", aFrameSize/fc "+audioSamplesPerFrameAndChannel); + System.err.println("video: id "+vid+", fmt "+vW+"x"+vH+", "+vPixelFmt+", planes "+vPlanes+", bpp "+vBitsPerPixel+"/"+vBytesPerPixelPerPlane+", usesTexLookupShader "+usesTexLookupShader); for(int i=0; i<3; i++) { System.err.println("video: "+i+": "+vTexWidth[i]+"/"+vLinesize[i]); } @@ -532,6 +610,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { } final float tc_w_1 = (float)getWidth() / (float)texWidth; switch(vPixelFmt) { + case YUVJ420P: case YUV420P: // < planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) return "vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+ @@ -551,6 +630,28 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { " return vec4(r, g, b, 1);\n"+ "}\n" ; + + case YUVJ422P: + case YUV422P: ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) + return + "vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+ + " vec2 u_off = vec2("+tc_w_1+" , 0.0);\n"+ + " vec2 v_off = vec2("+tc_w_1+" * 1.5, 0.0);\n"+ + " vec2 tc_halfw = vec2(texCoord.x*0.5, texCoord.y);\n"+ + " float y,u,v,r,g,b;\n"+ + " y = texture2D(image, texCoord)."+singleTexComp+";\n"+ + " u = texture2D(image, u_off+tc_halfw)."+singleTexComp+";\n"+ + " v = texture2D(image, v_off+tc_halfw)."+singleTexComp+";\n"+ + " y = 1.1643*(y-0.0625);\n"+ + " u = u-0.5;\n"+ + " v = v-0.5;\n"+ + " r = y+1.5958*v;\n"+ + " g = y-0.39173*u-0.81290*v;\n"+ + " b = y+2.017*u;\n"+ + " return vec4(r, g, b, 1);\n"+ + "}\n" + ; + case YUYV422: // < packed YUV 4:2:2, 2 x 16bpp, [Y0 Cb] [Y1 Cr] // Stuffed into RGBA half width texture return diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java index 9dd1ac74a..3ee87b5da 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java @@ -105,51 +105,94 @@ interface FFMPEGNatives { /** FFMPEG/libAV Pixel Format */ public static enum PixelFormat { // NONE= -1, - YUV420P, ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) - YUYV422, ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr - RGB24, ///< packed RGB 8:8:8, 24bpp, RGBRGB... - BGR24, ///< packed RGB 8:8:8, 24bpp, BGRBGR... - YUV422P, ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) - YUV444P, ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples) - YUV410P, ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples) - YUV411P, ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) - GRAY8, ///< Y , 8bpp - MONOWHITE, ///< Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb - MONOBLACK, ///< Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb - PAL8, ///< 8 bit with RGB32 palette - YUVJ420P, ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of YUV420P and setting color_range - YUVJ422P, ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of YUV422P and setting color_range - YUVJ444P, ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of YUV444P and setting color_range - XVMC_MPEG2_MC,///< XVideo Motion Acceleration via common packet passing + /** planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) */ + YUV420P, + /** packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr ( sharing Cb and Cr w/ 2 pixels )*/ + YUYV422, + /** packed RGB 8:8:8, 24bpp, RGBRGB... */ + RGB24, + /** packed RGB 8:8:8, 24bpp, BGRBGR... */ + BGR24, + /** planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) */ + YUV422P, + /** planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples) */ + YUV444P, + /** planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples) */ + YUV410P, + /** planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) */ + YUV411P, + /** Y, 8bpp */ + GRAY8, + /** Y, 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb */ + MONOWHITE, + /** Y, 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb */ + MONOBLACK, + /** 8 bit with RGB32 palette */ + PAL8, + /** planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of YUV420P and setting color_range */ + YUVJ420P, + /** planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of YUV422P and setting color_range */ + YUVJ422P, + /** planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of YUV444P and setting color_range */ + YUVJ444P, + /** XVideo Motion Acceleration via common packet passing */ + XVMC_MPEG2_MC, + /** */ XVMC_MPEG2_IDCT, - UYVY422, ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1 - UYYVYY411, ///< packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3 - BGR8, ///< packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb) - BGR4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits - BGR4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb) - RGB8, ///< packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb) - RGB4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits - RGB4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb) - NV12, ///< planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) - NV21, ///< as above, but U and V bytes are swapped + /** packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1 */ + UYVY422, + /** packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3 */ + UYYVYY411, + /** packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb) */ + BGR8, + /** packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits */ + BGR4, + /** packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb) */ + BGR4_BYTE, + /** packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb) */ + RGB8, + /** packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits */ + RGB4, + /** packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb) */ + RGB4_BYTE, + /** planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) */ + NV12, + /** as above, but U and V bytes are swapped */ + NV21, - ARGB, ///< packed ARGB 8:8:8:8, 32bpp, ARGBARGB... - RGBA, ///< packed RGBA 8:8:8:8, 32bpp, RGBARGBA... - ABGR, ///< packed ABGR 8:8:8:8, 32bpp, ABGRABGR... - BGRA, ///< packed BGRA 8:8:8:8, 32bpp, BGRABGRA... + /** packed ARGB 8:8:8:8, 32bpp, ARGBARGB... */ + ARGB, + /** packed RGBA 8:8:8:8, 32bpp, RGBARGBA... */ + RGBA, + /** packed ABGR 8:8:8:8, 32bpp, ABGRABGR... */ + ABGR, + /** packed BGRA 8:8:8:8, 32bpp, BGRABGRA... */ + BGRA, - GRAY16BE, ///< Y , 16bpp, big-endian - GRAY16LE, ///< Y , 16bpp, little-endian - YUV440P, ///< planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples) - YUVJ440P, ///< planar YUV 4:4:0 full scale (JPEG), deprecated in favor of YUV440P and setting color_range - YUVA420P, ///< planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples) - VDPAU_H264,///< H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers - VDPAU_MPEG1,///< MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers - VDPAU_MPEG2,///< MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers - VDPAU_WMV3,///< WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers - VDPAU_VC1, ///< VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers - RGB48BE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian - RGB48LE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian + /** Y, 16bpp, big-endian */ + GRAY16BE, + /** Y , 16bpp, little-endian */ + GRAY16LE, + /** planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples) */ + YUV440P, + /** planar YUV 4:4:0 full scale (JPEG), deprecated in favor of YUV440P and setting color_range */ + YUVJ440P, + /** planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples) */ + YUVA420P, + /** H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers */ + VDPAU_H264, + /** MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers */ + VDPAU_MPEG1, + /** MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers */ + VDPAU_MPEG2, + /** WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers */ + VDPAU_WMV3, + /** VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers */ + VDPAU_VC1, + /** packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian */ + RGB48BE, + /** packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian */ + RGB48LE, RGB565BE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian RGB565LE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian diff --git a/src/jogl/native/GLContext.c b/src/jogl/native/GLContext.c new file mode 100644 index 000000000..f10d0e421 --- /dev/null +++ b/src/jogl/native/GLContext.c @@ -0,0 +1,46 @@ + +#include "jogamp_opengl_GLContextImpl.h" +#include "JoglCommon.h" + +#include <assert.h> +#include <KHR/khrplatform.h> + +/* + * Class: jogamp_opengl_GLContextImpl + * Method: glGetStringInt + * Signature: (IJ)Ljava/lang/String; + */ +JNIEXPORT jstring JNICALL +Java_jogamp_opengl_GLContextImpl_glGetStringInt(JNIEnv *env, jclass _unused, jint name, jlong procAddress) { + typedef const khronos_uint8_t * (KHRONOS_APIENTRY*_local_PFNGLGETSTRINGPROC)(unsigned int name); + _local_PFNGLGETSTRINGPROC ptr_glGetString; + const khronos_uint8_t * _res; + ptr_glGetString = (_local_PFNGLGETSTRINGPROC) (intptr_t) procAddress; + assert(ptr_glGetString != NULL); + _res = (* ptr_glGetString) ((unsigned int) name); + if (NULL == _res) return NULL; + return (*env)->NewStringUTF(env, _res); +} + +/* + * Class: jogamp_opengl_GLContextImpl + * Method: glGetIntegervInt + * Signature: (ILjava/lang/Object;I)V + */ +JNIEXPORT void JNICALL +Java_jogamp_opengl_GLContextImpl_glGetIntegervInt(JNIEnv *env, jclass _unused, jint pname, jobject params, jint params_byte_offset, jlong procAddress) { + typedef void (KHRONOS_APIENTRY*_local_PFNGLGETINTEGERVPROC)(unsigned int pname, int * params); + + _local_PFNGLGETINTEGERVPROC ptr_glGetIntegerv; + int * _params_ptr = NULL; + if ( NULL != params ) { + _params_ptr = (int *) (((char*) (*env)->GetPrimitiveArrayCritical(env, params, NULL) ) + params_byte_offset); + } + ptr_glGetIntegerv = (_local_PFNGLGETINTEGERVPROC) (intptr_t) procAddress; + assert(ptr_glGetIntegerv != NULL); + (* ptr_glGetIntegerv) ((unsigned int) pname, (int *) _params_ptr); + if ( NULL != params ) { + (*env)->ReleasePrimitiveArrayCritical(env, params, _params_ptr, 0); + } +} + diff --git a/src/jogl/native/JoglCommon.c b/src/jogl/native/JoglCommon.c index 62dd8ef21..4170b13ec 100644 --- a/src/jogl/native/JoglCommon.c +++ b/src/jogl/native/JoglCommon.c @@ -1,5 +1,4 @@ -#include "jogamp_opengl_GLContextImpl.h" #include "JoglCommon.h" #include <assert.h> @@ -132,42 +131,3 @@ void JoglCommon_ReleaseJNIEnv (int shallBeDetached) { } } -/* - * Class: jogamp_opengl_GLContextImpl - * Method: glGetStringInt - * Signature: (IJ)Ljava/lang/String; - */ -JNIEXPORT jstring JNICALL -Java_jogamp_opengl_GLContextImpl_glGetStringInt(JNIEnv *env, jclass _unused, jint name, jlong procAddress) { - typedef const khronos_uint8_t * (KHRONOS_APIENTRY*_local_PFNGLGETSTRINGPROC)(unsigned int name); - _local_PFNGLGETSTRINGPROC ptr_glGetString; - const khronos_uint8_t * _res; - ptr_glGetString = (_local_PFNGLGETSTRINGPROC) (intptr_t) procAddress; - assert(ptr_glGetString != NULL); - _res = (* ptr_glGetString) ((unsigned int) name); - if (NULL == _res) return NULL; - return (*env)->NewStringUTF(env, _res); -} - -/* - * Class: jogamp_opengl_GLContextImpl - * Method: glGetIntegervInt - * Signature: (ILjava/lang/Object;I)V - */ -JNIEXPORT void JNICALL -Java_jogamp_opengl_GLContextImpl_glGetIntegervInt(JNIEnv *env, jclass _unused, jint pname, jobject params, jint params_byte_offset, jlong procAddress) { - typedef void (KHRONOS_APIENTRY*_local_PFNGLGETINTEGERVPROC)(unsigned int pname, int * params); - - _local_PFNGLGETINTEGERVPROC ptr_glGetIntegerv; - int * _params_ptr = NULL; - if ( NULL != params ) { - _params_ptr = (int *) (((char*) (*env)->GetPrimitiveArrayCritical(env, params, NULL) ) + params_byte_offset); - } - ptr_glGetIntegerv = (_local_PFNGLGETINTEGERVPROC) (intptr_t) procAddress; - assert(ptr_glGetIntegerv != NULL); - (* ptr_glGetIntegerv) ((unsigned int) pname, (int *) _params_ptr); - if ( NULL != params ) { - (*env)->ReleasePrimitiveArrayCritical(env, params, _params_ptr, 0); - } -} - diff --git a/src/jogl/native/libav/ffmpeg_impl_template.c b/src/jogl/native/libav/ffmpeg_impl_template.c index 60cddef45..822007136 100644 --- a/src/jogl/native/libav/ffmpeg_impl_template.c +++ b/src/jogl/native/libav/ffmpeg_impl_template.c @@ -67,7 +67,6 @@ typedef int (APIENTRYP AVCODEC_CLOSE)(AVCodecContext *avctx); typedef void (APIENTRYP AVCODEC_STRING)(char *buf, int buf_size, AVCodecContext *enc, int encode); typedef AVCodec *(APIENTRYP AVCODEC_FIND_DECODER)(enum CodecID id); typedef int (APIENTRYP AVCODEC_OPEN2)(AVCodecContext *avctx, AVCodec *codec, AVDictionary **options); // 53.6.0 -typedef int (APIENTRYP AVCODEC_OPEN)(AVCodecContext *avctx, AVCodec *codec); typedef AVFrame *(APIENTRYP AVCODEC_ALLOC_FRAME)(void); typedef void (APIENTRYP AVCODEC_GET_FRAME_DEFAULTS)(AVFrame *frame); typedef void (APIENTRYP AVCODEC_FREE_FRAME)(AVFrame **frame); @@ -79,7 +78,6 @@ typedef int (APIENTRYP AV_NEW_PACKET)(AVPacket *pkt, int size); typedef void (APIENTRYP AV_DESTRUCT_PACKET)(AVPacket *pkt); typedef void (APIENTRYP AV_FREE_PACKET)(AVPacket *pkt); typedef int (APIENTRYP AVCODEC_DECODE_AUDIO4)(AVCodecContext *avctx, AVFrame *frame, int *got_frame_ptr, AVPacket *avpkt); // 53.25.0 -typedef int (APIENTRYP AVCODEC_DECODE_AUDIO3)(AVCodecContext *avctx, int16_t *samples, int *frame_size_ptr, AVPacket *avpkt); // 52.23.0 typedef int (APIENTRYP AVCODEC_DECODE_VIDEO2)(AVCodecContext *avctx, AVFrame *picture, int *got_picture_ptr, AVPacket *avpkt); // 52.23.0 static AVCODEC_REGISTER_ALL sp_avcodec_register_all; @@ -87,7 +85,6 @@ static AVCODEC_CLOSE sp_avcodec_close; static AVCODEC_STRING sp_avcodec_string; static AVCODEC_FIND_DECODER sp_avcodec_find_decoder; static AVCODEC_OPEN2 sp_avcodec_open2; // 53.6.0 -static AVCODEC_OPEN sp_avcodec_open; static AVCODEC_ALLOC_FRAME sp_avcodec_alloc_frame; static AVCODEC_GET_FRAME_DEFAULTS sp_avcodec_get_frame_defaults; static AVCODEC_FREE_FRAME sp_avcodec_free_frame; @@ -99,9 +96,8 @@ static AV_NEW_PACKET sp_av_new_packet; static AV_DESTRUCT_PACKET sp_av_destruct_packet; static AV_FREE_PACKET sp_av_free_packet; static AVCODEC_DECODE_AUDIO4 sp_avcodec_decode_audio4; // 53.25.0 -static AVCODEC_DECODE_AUDIO3 sp_avcodec_decode_audio3; // 52.23.0 static AVCODEC_DECODE_VIDEO2 sp_avcodec_decode_video2; // 52.23.0 -// count: 23 +// count: 21 // libavutil typedef void (APIENTRYP AV_FRAME_UNREF)(AVFrame *frame); @@ -111,6 +107,11 @@ typedef int (APIENTRYP AV_GET_BITS_PER_PIXEL)(const AVPixFmtDescriptor *pixdesc) typedef int (APIENTRYP AV_SAMPLES_GET_BUFFER_SIZE)(int *linesize, int nb_channels, int nb_samples, enum AVSampleFormat sample_fmt, int align); typedef int (APIENTRYP AV_GET_BYTES_PER_SAMPLE)(enum AVSampleFormat sample_fmt); typedef int (APIENTRYP AV_OPT_SET_INT)(void *obj, const char *name, int64_t val, int search_flags); +typedef AVDictionaryEntry* (APIENTRYP AV_DICT_GET)(AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags); +typedef int (APIENTRYP AV_DICT_COUNT)(AVDictionary **m); +typedef int (APIENTRYP AV_DICT_SET)(AVDictionary **pm, const char *key, const char *value, int flags); +typedef void (APIENTRYP AV_DICT_FREE)(AVDictionary **m); + static const AVPixFmtDescriptor* sp_av_pix_fmt_descriptors; static AV_FRAME_UNREF sp_av_frame_unref; static AV_REALLOC sp_av_realloc; @@ -119,13 +120,16 @@ static AV_GET_BITS_PER_PIXEL sp_av_get_bits_per_pixel; static AV_SAMPLES_GET_BUFFER_SIZE sp_av_samples_get_buffer_size; static AV_GET_BYTES_PER_SAMPLE sp_av_get_bytes_per_sample; static AV_OPT_SET_INT sp_av_opt_set_int; -// count: 30 +static AV_DICT_GET sp_av_dict_get; +static AV_DICT_COUNT sp_av_dict_count; +static AV_DICT_SET sp_av_dict_set; +static AV_DICT_FREE sp_av_dict_free; +// count: 33 // libavformat typedef AVFormatContext *(APIENTRYP AVFORMAT_ALLOC_CONTEXT)(void); typedef void (APIENTRYP AVFORMAT_FREE_CONTEXT)(AVFormatContext *s); // 52.96.0 typedef void (APIENTRYP AVFORMAT_CLOSE_INPUT)(AVFormatContext **s); // 53.17.0 -typedef void (APIENTRYP AV_CLOSE_INPUT_FILE)(AVFormatContext *s); typedef void (APIENTRYP AV_REGISTER_ALL)(void); typedef AVInputFormat *(APIENTRYP AV_FIND_INPUT_FORMAT)(const char *short_name); typedef int (APIENTRYP AVFORMAT_OPEN_INPUT)(AVFormatContext **ps, const char *filename, AVInputFormat *fmt, AVDictionary **options); @@ -138,12 +142,10 @@ typedef int (APIENTRYP AV_READ_PAUSE)(AVFormatContext *s); typedef int (APIENTRYP AVFORMAT_NETWORK_INIT)(void); // 53.13.0 typedef int (APIENTRYP AVFORMAT_NETWORK_DEINIT)(void); // 53.13.0 typedef int (APIENTRYP AVFORMAT_FIND_STREAM_INFO)(AVFormatContext *ic, AVDictionary **options); // 53.3.0 -typedef int (APIENTRYP AV_FIND_STREAM_INFO)(AVFormatContext *ic); static AVFORMAT_ALLOC_CONTEXT sp_avformat_alloc_context; -static AVFORMAT_FREE_CONTEXT sp_avformat_free_context; // 52.96.0 +static AVFORMAT_FREE_CONTEXT sp_avformat_free_context; // 52.96.0 (not used, only for outfile cts) static AVFORMAT_CLOSE_INPUT sp_avformat_close_input; // 53.17.0 -static AV_CLOSE_INPUT_FILE sp_av_close_input_file; static AV_REGISTER_ALL sp_av_register_all; static AV_FIND_INPUT_FORMAT sp_av_find_input_format; static AVFORMAT_OPEN_INPUT sp_avformat_open_input; @@ -156,8 +158,7 @@ static AV_READ_PAUSE sp_av_read_pause; static AVFORMAT_NETWORK_INIT sp_avformat_network_init; // 53.13.0 static AVFORMAT_NETWORK_DEINIT sp_avformat_network_deinit; // 53.13.0 static AVFORMAT_FIND_STREAM_INFO sp_avformat_find_stream_info; // 53.3.0 -static AV_FIND_STREAM_INFO sp_av_find_stream_info; -// count: 47 +// count: 48 // libavdevice [53.0.0] typedef int (APIENTRYP AVDEVICE_REGISTER_ALL)(void); @@ -201,14 +202,12 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0) sp_avformat_version = (AVFORMAT_VERSION) (intptr_t) symbols[i++]; sp_avutil_version = (AVUTIL_VERSION) (intptr_t) symbols[i++]; sp_avresample_version = (AVRESAMPLE_VERSION) (intptr_t) symbols[i++]; - // count: 4 sp_avcodec_register_all = (AVCODEC_REGISTER_ALL) (intptr_t) symbols[i++]; sp_avcodec_close = (AVCODEC_CLOSE) (intptr_t) symbols[i++]; sp_avcodec_string = (AVCODEC_STRING) (intptr_t) symbols[i++]; sp_avcodec_find_decoder = (AVCODEC_FIND_DECODER) (intptr_t) symbols[i++]; sp_avcodec_open2 = (AVCODEC_OPEN2) (intptr_t) symbols[i++]; - sp_avcodec_open = (AVCODEC_OPEN) (intptr_t) symbols[i++]; sp_avcodec_alloc_frame = (AVCODEC_ALLOC_FRAME) (intptr_t) symbols[i++]; sp_avcodec_get_frame_defaults = (AVCODEC_GET_FRAME_DEFAULTS) (intptr_t) symbols[i++]; sp_avcodec_free_frame = (AVCODEC_FREE_FRAME) (intptr_t) symbols[i++]; @@ -220,9 +219,7 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0) sp_av_destruct_packet = (AV_DESTRUCT_PACKET) (intptr_t) symbols[i++]; sp_av_free_packet = (AV_FREE_PACKET) (intptr_t) symbols[i++]; sp_avcodec_decode_audio4 = (AVCODEC_DECODE_AUDIO4) (intptr_t) symbols[i++]; - sp_avcodec_decode_audio3 = (AVCODEC_DECODE_AUDIO3) (intptr_t) symbols[i++]; sp_avcodec_decode_video2 = (AVCODEC_DECODE_VIDEO2) (intptr_t) symbols[i++]; - // count: 23 sp_av_pix_fmt_descriptors = (const AVPixFmtDescriptor*) (intptr_t) symbols[i++]; sp_av_frame_unref = (AV_FRAME_UNREF) (intptr_t) symbols[i++]; @@ -232,12 +229,14 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0) sp_av_samples_get_buffer_size = (AV_SAMPLES_GET_BUFFER_SIZE) (intptr_t) symbols[i++]; sp_av_get_bytes_per_sample = (AV_GET_BYTES_PER_SAMPLE) (intptr_t) symbols[i++]; sp_av_opt_set_int = (AV_OPT_SET_INT) (intptr_t) symbols[i++]; - // count: 31 + sp_av_dict_get = (AV_DICT_GET) (intptr_t) symbols[i++]; + sp_av_dict_count = (AV_DICT_COUNT) (intptr_t) symbols[i++]; + sp_av_dict_set = (AV_DICT_SET) (intptr_t) symbols[i++]; + sp_av_dict_free = (AV_DICT_FREE) (intptr_t) symbols[i++]; sp_avformat_alloc_context = (AVFORMAT_ALLOC_CONTEXT) (intptr_t) symbols[i++];; sp_avformat_free_context = (AVFORMAT_FREE_CONTEXT) (intptr_t) symbols[i++]; sp_avformat_close_input = (AVFORMAT_CLOSE_INPUT) (intptr_t) symbols[i++]; - sp_av_close_input_file = (AV_CLOSE_INPUT_FILE) (intptr_t) symbols[i++]; sp_av_register_all = (AV_REGISTER_ALL) (intptr_t) symbols[i++]; sp_av_find_input_format = (AV_FIND_INPUT_FORMAT) (intptr_t) symbols[i++]; sp_avformat_open_input = (AVFORMAT_OPEN_INPUT) (intptr_t) symbols[i++]; @@ -250,18 +249,14 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0) sp_avformat_network_init = (AVFORMAT_NETWORK_INIT) (intptr_t) symbols[i++]; sp_avformat_network_deinit = (AVFORMAT_NETWORK_DEINIT) (intptr_t) symbols[i++]; sp_avformat_find_stream_info = (AVFORMAT_FIND_STREAM_INFO) (intptr_t) symbols[i++]; - sp_av_find_stream_info = (AV_FIND_STREAM_INFO) (intptr_t) symbols[i++]; - // count: 48 sp_avdevice_register_all = (AVDEVICE_REGISTER_ALL) (intptr_t) symbols[i++]; - // count: 49 sp_avresample_alloc_context = (AVRESAMPLE_ALLOC_CONTEXT) (intptr_t) symbols[i++]; sp_avresample_open = (AVRESAMPLE_OPEN) (intptr_t) symbols[i++]; sp_avresample_close = (AVRESAMPLE_CLOSE) (intptr_t) symbols[i++]; sp_avresample_free = (AVRESAMPLE_FREE) (intptr_t) symbols[i++]; sp_avresample_convert = (AVRESAMPLE_CONVERT) (intptr_t) symbols[i++]; - // count: 54 (*env)->ReleasePrimitiveArrayCritical(env, jSymbols, symbols, 0); @@ -285,12 +280,12 @@ static void _updateJavaAttributes(JNIEnv *env, jobject instance, FFMPEGToolBasic // JNIEnv * env = JoglCommon_GetJNIEnv (&shallBeDetached); if(NULL!=env) { (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, jni_mid_updateAttributes2, - pAV->vPixFmt, pAV->vBufferPlanes, + pAV->vid, pAV->vPixFmt, pAV->vBufferPlanes, pAV->vBitsPerPixel, pAV->vBytesPerPixelPerPlane, pAV->vLinesize[0], pAV->vLinesize[1], pAV->vLinesize[2], pAV->vTexWidth[0], pAV->vTexWidth[1], pAV->vTexWidth[2], pAV->vWidth, pAV->vHeight, - pAV->aSampleFmtOut, pAV->aSampleRateOut, pAV->aChannelsOut, pAV->aFrameSize); + pAV->aid, pAV->aSampleFmtOut, pAV->aSampleRateOut, pAV->aChannelsOut, pAV->aFrameSize); (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, jni_mid_updateAttributes1, pAV->vid, pAV->aid, pAV->vWidth, pAV->vHeight, @@ -365,14 +360,9 @@ static void freeInstance(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) { // Close the video file if(NULL != pAV->pFormatCtx) { - if(HAS_FUNC(sp_avformat_close_input)) { - sp_avformat_close_input(&pAV->pFormatCtx); - } else { - sp_av_close_input_file(pAV->pFormatCtx); - if(HAS_FUNC(sp_avformat_free_context)) { - sp_avformat_free_context(pAV->pFormatCtx); - } - } + sp_avformat_close_input(&pAV->pFormatCtx); + // Only for output files! + // sp_avformat_free_context(pAV->pFormatCtx); pAV->pFormatCtx = NULL; } if( NULL != pAV->ffmpegMediaPlayer ) { @@ -462,7 +452,7 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initIDs0) jni_mid_pushSound = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "pushSound", "(Ljava/nio/ByteBuffer;II)V"); jni_mid_updateAttributes1 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes", "(IIIIIIIFIIILjava/lang/String;Ljava/lang/String;)V"); - jni_mid_updateAttributes2 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes2", "(IIIIIIIIIIIIIIII)V"); + jni_mid_updateAttributes2 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes2", "(IIIIIIIIIIIIIIIIII)V"); jni_mid_isAudioFormatSupported = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "isAudioFormatSupported", "(III)Z"); if(jni_mid_pushSound == NULL || @@ -548,10 +538,10 @@ static AVInputFormat* tryAVInputFormat(const char * name, int verbose) { return inFmt; } static const char * inFmtNames[] = { - "video4linux2", - "video4linux", - "vfwcap", - "dshow", + "video4linux2", // linux + "video4linux", // linux (old) + "dshow", // windows + "vfwcap", // windows (old) "mpg", "yuv2", "mjpeg", @@ -601,30 +591,41 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0) pAV->pFormatCtx = sp_avformat_alloc_context(); - // Open video file - AVInputFormat* inFmt = jIsCameraInput ? findAVInputFormat(pAV->verbose) : NULL; - const char *urlPath = (*env)->GetStringUTFChars(env, jURL, &iscopy); - res = sp_avformat_open_input(&pAV->pFormatCtx, urlPath, inFmt, NULL); + + // Open video file + AVDictionary *inOpts = NULL; + AVInputFormat* inFmt = NULL; + if( jIsCameraInput ) { + inFmt = findAVInputFormat(pAV->verbose); + if( NULL == inFmt ) { + JoglCommon_throwNewRuntimeException(env, "Couldn't find input format for camera: %s", urlPath); + (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); + return; + } + // set maximum values, driver shall 'degrade' .. + // sp_av_dict_set(&inOpts, "video_size", "640x480", 0); + // sp_av_dict_set(&inOpts, "video_size", "1280x720", 0); + sp_av_dict_set(&inOpts, "video_size", "hd720", 0); // video4linux, vfwcap, .. + // sp_av_dict_set(&inOpts, "video_size", "1280x1024", 0); + // sp_av_dict_set(&inOpts, "video_size", "320x240", 0); + sp_av_dict_set(&inOpts, "framerate", "60", 0); // not setting a framerate causes some drivers to crash! + } + res = sp_avformat_open_input(&pAV->pFormatCtx, urlPath, inFmt, NULL != inOpts ? &inOpts : NULL); + if( NULL != inOpts ) { + sp_av_dict_free(&inOpts); + } if(res != 0) { - JoglCommon_throwNewRuntimeException(env, "Couldn't open URI: %s", urlPath); + JoglCommon_throwNewRuntimeException(env, "Couldn't open URI: %s, err %d", urlPath, res); (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); return; } // Retrieve detailed stream information - if(HAS_FUNC(sp_avformat_find_stream_info)) { - if(sp_avformat_find_stream_info(pAV->pFormatCtx, NULL)<0) { - (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); - JoglCommon_throwNewRuntimeException(env, "Couldn't find stream information"); - return; - } - } else { - if(sp_av_find_stream_info(pAV->pFormatCtx)<0) { - (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); - JoglCommon_throwNewRuntimeException(env, "Couldn't find stream information"); - return; - } + if(sp_avformat_find_stream_info(pAV->pFormatCtx, NULL)<0) { + (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); + JoglCommon_throwNewRuntimeException(env, "Couldn't find stream information"); + return; } if(pAV->verbose) { @@ -725,11 +726,7 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0) #if LIBAVCODEC_VERSION_MAJOR >= 55 pAV->pACodecCtx->refcounted_frames = pAV->useRefCountedFrames; #endif - if(HAS_FUNC(sp_avcodec_open2)) { - res = sp_avcodec_open2(pAV->pACodecCtx, pAV->pACodec, NULL); - } else { - res = sp_avcodec_open(pAV->pACodecCtx, pAV->pACodec); - } + res = sp_avcodec_open2(pAV->pACodecCtx, pAV->pACodec, NULL); if(res<0) { JoglCommon_throwNewRuntimeException(env, "Couldn't open audio codec %d, %s", pAV->pACodecCtx->codec_id, pAV->acodec); return; @@ -858,11 +855,7 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0) #if LIBAVCODEC_VERSION_MAJOR >= 55 pAV->pVCodecCtx->refcounted_frames = pAV->useRefCountedFrames; #endif - if(HAS_FUNC(sp_avcodec_open2)) { - res = sp_avcodec_open2(pAV->pVCodecCtx, pAV->pVCodec, NULL); - } else { - res = sp_avcodec_open(pAV->pVCodecCtx, pAV->pVCodec); - } + res = sp_avcodec_open2(pAV->pVCodecCtx, pAV->pVCodec, NULL); if(res<0) { JoglCommon_throwNewRuntimeException(env, "Couldn't open video codec %d, %s", pAV->pVCodecCtx->codec_id, pAV->vcodec); return; @@ -953,10 +946,10 @@ JNIEXPORT void JNICALL FF_FUNC(setGLFuncs0) } #if 0 -#define DBG_TEXSUBIMG2D_a(c,p,d,i) fprintf(stderr, "TexSubImage2D.%c offset %d / %d, size %d x %d, ", c, p->pVCodecCtx->width, p->pVCodecCtx->height/d, p->vTexWidth[i], p->pVCodecCtx->height/d) +#define DBG_TEXSUBIMG2D_a(c,p,w1,w2,h,i) fprintf(stderr, "TexSubImage2D.%c offset %d / %d, size %d x %d, ", c, (w1*p->pVCodecCtx->width)/w2, p->pVCodecCtx->height/h, p->vTexWidth[i], p->pVCodecCtx->height/h) #define DBG_TEXSUBIMG2D_b(p) fprintf(stderr, "err 0x%X\n", pAV->procAddrGLGetError()) #else -#define DBG_TEXSUBIMG2D_a(c,p,d,i) +#define DBG_TEXSUBIMG2D_a(c,p,w1,w2,h,i) #define DBG_TEXSUBIMG2D_b(p) #endif @@ -1005,15 +998,7 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) if (flush_complete) { break; } - if(HAS_FUNC(sp_avcodec_decode_audio4)) { - len1 = sp_avcodec_decode_audio4(pAV->pACodecCtx, pAFrameCurrent, &frameDecoded, &packet); - } else { - #if 0 - len1 = sp_avcodec_decode_audio3(pAV->pACodecCtx, int16_t *samples, int *frame_size_ptr, &frameDecoded, &packet); - #endif - JoglCommon_throwNewRuntimeException(env, "Unimplemented: FFMPEGNatives sp_avcodec_decode_audio3 fallback"); - return 0; - } + len1 = sp_avcodec_decode_audio4(pAV->pACodecCtx, pAFrameCurrent, &frameDecoded, &packet); if (len1 < 0) { // if error, we skip the frame packet.size = 0; @@ -1179,17 +1164,17 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) // 1st plane or complete packed frame // FIXME: Libav Binary compatibility! JAU01 - DBG_TEXSUBIMG2D_a('Y',pAV,1,0); + DBG_TEXSUBIMG2D_a('Y',pAV,1,1,1,0); pAV->procAddrGLTexSubImage2D(texTarget, 0, 0, 0, pAV->vTexWidth[0], pAV->pVCodecCtx->height, texFmt, texType, pAV->pVFrame->data[0]); DBG_TEXSUBIMG2D_b(pAV); - if(pAV->vPixFmt == PIX_FMT_YUV420P) { + if( pAV->vPixFmt == PIX_FMT_YUV420P || pAV->vPixFmt == PIX_FMT_YUVJ420P ) { // U plane // FIXME: Libav Binary compatibility! JAU01 - DBG_TEXSUBIMG2D_a('U',pAV,2,1); + DBG_TEXSUBIMG2D_a('U',pAV,1,1,2,1); pAV->procAddrGLTexSubImage2D(texTarget, 0, pAV->pVCodecCtx->width, 0, pAV->vTexWidth[1], pAV->pVCodecCtx->height/2, @@ -1197,12 +1182,29 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) DBG_TEXSUBIMG2D_b(pAV); // V plane // FIXME: Libav Binary compatibility! JAU01 - DBG_TEXSUBIMG2D_a('V',pAV,2,2); + DBG_TEXSUBIMG2D_a('V',pAV,1,1,2,2); pAV->procAddrGLTexSubImage2D(texTarget, 0, pAV->pVCodecCtx->width, pAV->pVCodecCtx->height/2, pAV->vTexWidth[2], pAV->pVCodecCtx->height/2, texFmt, texType, pAV->pVFrame->data[2]); DBG_TEXSUBIMG2D_b(pAV); + } else if( pAV->vPixFmt == PIX_FMT_YUV422P || pAV->vPixFmt == PIX_FMT_YUVJ422P ) { + // U plane + // FIXME: Libav Binary compatibility! JAU01 + DBG_TEXSUBIMG2D_a('U',pAV,1,1,1,1); + pAV->procAddrGLTexSubImage2D(texTarget, 0, + pAV->pVCodecCtx->width, 0, + pAV->vTexWidth[1], pAV->pVCodecCtx->height, + texFmt, texType, pAV->pVFrame->data[1]); + DBG_TEXSUBIMG2D_b(pAV); + // V plane + // FIXME: Libav Binary compatibility! JAU01 + DBG_TEXSUBIMG2D_a('V',pAV,3,2,1,1); + pAV->procAddrGLTexSubImage2D(texTarget, 0, + pAV->pVCodecCtx->width+pAV->pVCodecCtx->width/2, 0, + pAV->vTexWidth[2], pAV->pVCodecCtx->height, + texFmt, texType, pAV->pVFrame->data[2]); + DBG_TEXSUBIMG2D_b(pAV); } // FIXME: Add more planar formats ! pAV->procAddrGLFinish(); //pAV->procAddrGLFlush(); |