aboutsummaryrefslogtreecommitdiffstats
path: root/src/jogl/classes
diff options
context:
space:
mode:
authorSven Gothel <[email protected]>2013-08-26 13:38:40 +0200
committerSven Gothel <[email protected]>2013-08-26 13:38:40 +0200
commitb5876b598ecf3eb32ceb183ecbd5d29b8885a304 (patch)
tree9b63eb4a58e12e1df27e1406e0c041218b7fe2c5 /src/jogl/classes
parentee64b0b3e47cc5d52fb3679c582d3ad0134adb9e (diff)
libav/ffmpeg: Compile/Link 2 versions of native FFMPEGMediaPlayer methods FFMPEGNatives -> FFMPEGv08Natives + FFMPEGv09Natives
Enables FFMPEGMediaPlayer to work w/ either ffmpeg/libav version 8 or 9 w/ same JOGL binary Same C source code is compiled against 1: version 0.8 FFMPEGv08Natives lavc53.lavf53.lavu51 2: version 0.9 FFMPEGv09Natives lavc54.lavf54.lavu52.lavr01 FFMPEGv08Natives and FFMPEGv09Natives implements FFMPEGNatives, native C code uses CPP '##' macro concatenation to produce unique function names. To enable 'cpp' to find the libav* header files matching the desired version, we have placed them in the c-file's folder, issued '#include "path/file.h" and added symbolic links to allow finding same module and 'sister modules': ls -l libavformat/ .. lrwxrwxrwx 1 sven sven 13 Aug 26 12:56 libavcodec -> ../libavcodec lrwxrwxrwx 1 sven sven 14 Aug 26 12:56 libavformat -> ../libavformat lrwxrwxrwx 1 sven sven 12 Aug 26 12:57 libavutil -> ../libavutil .. At static init FFMPEGDynamicLibraryBundleInfo, determines the runtime version and instantiates the matching FFMPEGNatives, or null if non matches. FFMPEGMediaPlayer still compares the compile-time and runtime versions. FFMPEGMediaPlayer passes it's own instance to FFMPEGNatives for callbacks.
Diffstat (limited to 'src/jogl/classes')
-rw-r--r--src/jogl/classes/jogamp/opengl/openal/av/ALDummyUsage.java3
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java56
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java269
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java188
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java15
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java51
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java51
7 files changed, 383 insertions, 250 deletions
diff --git a/src/jogl/classes/jogamp/opengl/openal/av/ALDummyUsage.java b/src/jogl/classes/jogamp/opengl/openal/av/ALDummyUsage.java
index 69223d0b9..2c1dfa237 100644
--- a/src/jogl/classes/jogamp/opengl/openal/av/ALDummyUsage.java
+++ b/src/jogl/classes/jogamp/opengl/openal/av/ALDummyUsage.java
@@ -1,7 +1,5 @@
package jogamp.opengl.openal.av;
-import jogamp.opengl.util.av.impl.FFMPEGMediaPlayer;
-
import com.jogamp.openal.AL;
import com.jogamp.openal.JoalVersion;
@@ -10,7 +8,6 @@ import com.jogamp.openal.JoalVersion;
*/
public class ALDummyUsage {
static AL al;
- static FFMPEGMediaPlayer.PixelFormat pfmt;
public static void main(String args[]) {
System.err.println("JOGL> Hello JOAL");
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
index 2f92f9bf3..ab16c5e5e 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
@@ -43,6 +43,7 @@ import javax.media.opengl.GLProfile;
import com.jogamp.common.os.DynamicLibraryBundle;
import com.jogamp.common.os.DynamicLibraryBundleInfo;
import com.jogamp.common.util.RunnableExecutor;
+import com.jogamp.common.util.VersionNumber;
/**
* FIXME: We need native structure access methods to deal with API changes
@@ -148,10 +149,15 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"avresample_convert",
};
- private static long[] symbolAddr;
+ private static final long[] symbolAddr = new long[symbolCount];
private static final boolean ready;
private static final boolean libsLoaded;
private static final boolean avresampleLoaded; // optional
+ static final VersionNumber avCodecVersion;
+ static final VersionNumber avFormatVersion;
+ static final VersionNumber avUtilVersion;
+ static final VersionNumber avResampleVersion;
+ private static final FFMPEGNatives natives;
static {
// native ffmpeg media player implementation is included in jogl_desktop and jogl_mobile
@@ -159,26 +165,51 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
boolean _ready = false;
boolean[] _libsLoaded= { false };
boolean[] _avresampleLoaded= { false };
+ VersionNumber[] _versions = new VersionNumber[4];
try {
- _ready = initSymbols(_libsLoaded, _avresampleLoaded);
+ _ready = initSymbols(_libsLoaded, _avresampleLoaded, _versions);
} catch (Throwable t) {
t.printStackTrace();
}
libsLoaded = _libsLoaded[0];
avresampleLoaded = _avresampleLoaded[0];
- ready = _ready;
+ avCodecVersion = _versions[0];
+ avFormatVersion = _versions[1];
+ avUtilVersion = _versions[2];
+ avResampleVersion = _versions[3];
if(!libsLoaded) {
System.err.println("LIB_AV Not Available");
- } else if(!ready) {
+ natives = null;
+ ready = false;
+ } else if(!_ready) {
System.err.println("LIB_AV Not Matching");
+ natives = null;
+ ready = false;
+ } else {
+ if( avCodecVersion.getMajor() <= 53 && avFormatVersion.getMajor() <= 53 && avUtilVersion.getMajor() <= 51 ) {
+ // lavc53.lavf53.lavu51
+ natives = new FFMPEGv08Natives();
+ } else if( avCodecVersion.getMajor() == 54 && avFormatVersion.getMajor() <= 54 && avUtilVersion.getMajor() <= 52 ) {
+ // lavc54.lavf54.lavu52.lavr01
+ natives = new FFMPEGv09Natives();
+ } else {
+ System.err.println("LIB_AV No Version/Native-Impl Match");
+ natives = null;
+ }
+ if( null != natives ) {
+ ready = natives.initSymbols0(symbolAddr, symbolCount);
+ } else {
+ ready = false;
+ }
}
}
static boolean libsLoaded() { return libsLoaded; }
static boolean avResampleLoaded() { return avresampleLoaded; }
+ static FFMPEGNatives getNatives() { return natives; }
static boolean initSingleton() { return ready; }
- private static final boolean initSymbols(boolean[] libsLoaded, boolean[] avresampleLoaded) {
+ private static final boolean initSymbols(boolean[] libsLoaded, boolean[] avresampleLoaded, VersionNumber[] versions) {
libsLoaded[0] = false;
final DynamicLibraryBundle dl = AccessController.doPrivileged(new PrivilegedAction<DynamicLibraryBundle>() {
public DynamicLibraryBundle run() {
@@ -191,15 +222,11 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
throw new RuntimeException("FFMPEG Tool library incomplete: [ avutil "+avutilLoaded+", avformat "+avformatLoaded+", avcodec "+avcodecLoaded+"]");
}
avresampleLoaded[0] = dl.isToolLibLoaded(3);
- /** Ignore .. due to optional libavresample
- if(!dl.isToolLibComplete()) {
- throw new RuntimeException("FFMPEG Tool libraries incomplete");
- } */
libsLoaded[0] = true;
+
if(symbolNames.length != symbolCount) {
throw new InternalError("XXX0 "+symbolNames.length+" != "+symbolCount);
}
- symbolAddr = new long[symbolCount];
// optional symbol name set
final Set<String> optionalSymbolNameSet = new HashSet<String>();
@@ -258,7 +285,12 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
}
}
}
- return initSymbols0(symbolAddr, symbolCount) && res;
+ versions[0] = FFMPEGStaticNatives.getAVVersion(FFMPEGStaticNatives.getAvCodecVersion0(symbolAddr[0]));
+ versions[1] = FFMPEGStaticNatives.getAVVersion(FFMPEGStaticNatives.getAvFormatVersion0(symbolAddr[1]));
+ versions[2] = FFMPEGStaticNatives.getAVVersion(FFMPEGStaticNatives.getAvUtilVersion0(symbolAddr[2]));
+ versions[3] = FFMPEGStaticNatives.getAVVersion(FFMPEGStaticNatives.getAvResampleVersion0(symbolAddr[3]));
+
+ return res;
}
protected FFMPEGDynamicLibraryBundleInfo() {
@@ -361,6 +393,4 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
public final RunnableExecutor getLibLoaderExecutor() {
return DynamicLibraryBundle.getDefaultRunnableExecutor();
}
-
- private static native boolean initSymbols0(long[] symbols, int count);
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
index 258c6757b..bcf4994b5 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
@@ -29,7 +29,6 @@
package jogamp.opengl.util.av.impl;
import java.io.IOException;
-import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.security.AccessController;
import java.security.PrivilegedAction;
@@ -39,7 +38,6 @@ import javax.media.opengl.GL2ES2;
import javax.media.opengl.GLException;
import com.jogamp.common.os.Platform;
-import com.jogamp.common.util.VersionNumber;
import com.jogamp.gluegen.runtime.ProcAddressTable;
import com.jogamp.opengl.util.TimeFrameI;
import com.jogamp.opengl.util.GLPixelStorageModes;
@@ -51,6 +49,8 @@ import com.jogamp.opengl.util.texture.Texture;
import jogamp.opengl.GLContextImpl;
import jogamp.opengl.util.av.GLMediaPlayerImpl;
+import jogamp.opengl.util.av.impl.FFMPEGNatives.PixelFormat;
+import jogamp.opengl.util.av.impl.FFMPEGNatives.SampleFormat;
/***
* Implementation utilizes <a href="http://libav.org/">Libav</a>
@@ -109,65 +109,47 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
/** POSIX ENOSYS {@value}: Function not implemented. FIXME: Move to GlueGen ?!*/
private static final int ENOSYS = 38;
- /** Default number of audio frames per video frame */
- private static final int AV_DEFAULT_AFRAMES = 8;
-
// Instance data
+ private static final FFMPEGNatives natives;
private static final int avUtilMajorVersionCC;
private static final int avFormatMajorVersionCC;
private static final int avCodecMajorVersionCC;
private static final int avResampleMajorVersionCC;
- private static final VersionNumber avUtilVersion;
- private static final VersionNumber avFormatVersion;
- private static final VersionNumber avCodecVersion;
- private static final VersionNumber avResampleVersion;
private static final boolean available;
static {
final boolean libAVGood = FFMPEGDynamicLibraryBundleInfo.initSingleton();
final boolean libAVVersionGood;
if( FFMPEGDynamicLibraryBundleInfo.libsLoaded() ) {
- avUtilMajorVersionCC = getAvUtilMajorVersionCC0();
- avFormatMajorVersionCC = getAvFormatMajorVersionCC0();
- avCodecMajorVersionCC = getAvCodecMajorVersionCC0();
- avResampleMajorVersionCC = getAvResampleMajorVersionCC0();
- avUtilVersion = getAVVersion(getAvUtilVersion0());
- avFormatVersion = getAVVersion(getAvFormatVersion0());
- avCodecVersion = getAVVersion(getAvCodecVersion0());
- avResampleVersion = getAVVersion(getAvResampleVersion0());
- System.err.println("LIB_AV Util : "+avUtilVersion+" [cc "+avUtilMajorVersionCC+"]");
- System.err.println("LIB_AV Format : "+avFormatVersion+" [cc "+avFormatMajorVersionCC+"]");
- System.err.println("LIB_AV Codec : "+avCodecVersion+" [cc "+avCodecMajorVersionCC+"]");
- System.err.println("LIB_AV Resample: "+avResampleVersion+" [cc "+avResampleMajorVersionCC+"]");
- libAVVersionGood = avUtilMajorVersionCC == avUtilVersion.getMajor() &&
- avFormatMajorVersionCC == avFormatVersion.getMajor() &&
- avCodecMajorVersionCC == avCodecVersion.getMajor() &&
- avResampleMajorVersionCC == avResampleVersion.getMajor();
+ natives = FFMPEGDynamicLibraryBundleInfo.getNatives();
+ avUtilMajorVersionCC = natives.getAvUtilMajorVersionCC0();
+ avFormatMajorVersionCC = natives.getAvFormatMajorVersionCC0();
+ avCodecMajorVersionCC = natives.getAvCodecMajorVersionCC0();
+ avResampleMajorVersionCC = natives.getAvResampleMajorVersionCC0();
+ System.err.println("LIB_AV Util : "+FFMPEGDynamicLibraryBundleInfo.avUtilVersion+" [cc "+avUtilMajorVersionCC+"]");
+ System.err.println("LIB_AV Format : "+FFMPEGDynamicLibraryBundleInfo.avFormatVersion+" [cc "+avFormatMajorVersionCC+"]");
+ System.err.println("LIB_AV Codec : "+FFMPEGDynamicLibraryBundleInfo.avCodecVersion+" [cc "+avCodecMajorVersionCC+"]");
+ System.err.println("LIB_AV Resample: "+FFMPEGDynamicLibraryBundleInfo.avResampleVersion+" [cc "+avResampleMajorVersionCC+"]");
+ libAVVersionGood = avUtilMajorVersionCC == FFMPEGDynamicLibraryBundleInfo.avUtilVersion.getMajor() &&
+ avFormatMajorVersionCC == FFMPEGDynamicLibraryBundleInfo.avFormatVersion.getMajor() &&
+ avCodecMajorVersionCC == FFMPEGDynamicLibraryBundleInfo.avCodecVersion.getMajor() &&
+ avResampleMajorVersionCC == FFMPEGDynamicLibraryBundleInfo.avResampleVersion.getMajor();
if( !libAVVersionGood ) {
System.err.println("LIB_AV Not Matching Compile-Time / Runtime Major-Version");
}
} else {
+ natives = null;
avUtilMajorVersionCC = 0;
avFormatMajorVersionCC = 0;
avCodecMajorVersionCC = 0;
avResampleMajorVersionCC = 0;
- avUtilVersion = null;
- avFormatVersion = null;
- avCodecVersion = null;
- avResampleVersion = null;
libAVVersionGood = false;
}
- available = libAVGood && libAVVersionGood ? initIDs0() : false;
+ available = libAVGood && libAVVersionGood && null != natives ? natives.initIDs0() : false;
}
public static final boolean isAvailable() { return available; }
- private static VersionNumber getAVVersion(int vers) {
- return new VersionNumber( ( vers >> 16 ) & 0xFF,
- ( vers >> 8 ) & 0xFF,
- ( vers >> 0 ) & 0xFF );
- }
-
//
// General
//
@@ -199,7 +181,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
if(!available) {
throw new RuntimeException("FFMPEGMediaPlayer not available");
}
- moviePtr = createInstance0( DEBUG_NATIVE );
+ moviePtr = natives.createInstance0(this, DEBUG_NATIVE);
if(0==moviePtr) {
throw new GLException("Couldn't create FFMPEGInstance");
}
@@ -210,7 +192,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
@Override
protected final void destroyImpl(GL gl) {
if (moviePtr != 0) {
- destroyInstance0(moviePtr);
+ natives.destroyInstance0(moviePtr);
moviePtr = 0;
}
destroyAudioSink();
@@ -280,7 +262,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
final int aMaxChannelCount = audioSink.getMaxSupportedChannels();
final int aPrefSampleRate = preferredAudioFormat.sampleRate;
// setStream(..) issues updateAttributes*(..), and defines avChosenAudioFormat, vid, aid, .. etc
- setStream0(moviePtr, resStreamLocS, inFormat, vid, aid, aMaxChannelCount, aPrefSampleRate);
+ natives.setStream0(moviePtr, resStreamLocS, inFormat, vid, aid, aMaxChannelCount, aPrefSampleRate);
}
@Override
@@ -301,7 +283,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
final long procAddrGLGetError = pt.getAddressFor("glGetError");
final long procAddrGLFlush = pt.getAddressFor("glFlush");
final long procAddrGLFinish = pt.getAddressFor("glFinish");
- setGLFuncs0(moviePtr, procAddrGLTexSubImage2D, procAddrGLGetError, procAddrGLFlush, procAddrGLFinish);
+ natives.setGLFuncs0(moviePtr, procAddrGLTexSubImage2D, procAddrGLGetError, procAddrGLFlush, procAddrGLFinish);
return null;
} } );
audioQueueLimit = AudioSink.DefaultQueueLimitWithVideo;
@@ -367,12 +349,13 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
*/
/**
+ * Native callback
* Converts the given libav/ffmpeg values to {@link AudioFormat} and returns {@link AudioSink#isSupported(AudioFormat)}.
* @param audioSampleFmt ffmpeg/libav audio-sample-format, see {@link SampleFormat}.
* @param audioSampleRate sample rate in Hz (1/s)
* @param audioChannels number of channels
*/
- private final boolean isAudioFormatSupported(int audioSampleFmt, int audioSampleRate, int audioChannels) {
+ final boolean isAudioFormatSupported(int audioSampleFmt, int audioSampleRate, int audioChannels) {
final AudioFormat audioFormat = avAudioFormat2Local(SampleFormat.valueOf(audioSampleFmt), audioSampleRate, audioChannels);
final boolean res = audioSink.isSupported(audioFormat);
if( DEBUG ) {
@@ -434,6 +417,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
}
/**
+ * Native callback
* @param pixFmt
* @param planes
* @param bitsPerPixel
@@ -449,11 +433,11 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
* @param audioChannels
* @param audioSamplesPerFrameAndChannel in audio samples per frame and channel
*/
- private void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
- int lSz0, int lSz1, int lSz2,
- int tWd0, int tWd1, int tWd2, int tH,
- int audioSampleFmt, int audioSampleRate,
- int audioChannels, int audioSamplesPerFrameAndChannel) {
+ void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
+ int lSz0, int lSz1, int lSz2,
+ int tWd0, int tWd1, int tWd2, int tH,
+ int audioSampleFmt, int audioSampleRate,
+ int audioChannels, int audioSamplesPerFrameAndChannel) {
vPixelFmt = PixelFormat.valueOf(pixFmt);
vPlanes = planes;
vBitsPerPixel = bitsPerPixel;
@@ -562,7 +546,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
if(0==moviePtr) {
return false;
}
- final int errno = play0(moviePtr);
+ final int errno = natives.play0(moviePtr);
if( DEBUG_NATIVE && errno != 0 && errno != -ENOSYS) {
System.err.println("libav play err: "+errno);
}
@@ -574,7 +558,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
if(0==moviePtr) {
return false;
}
- final int errno = pause0(moviePtr);
+ final int errno = natives.pause0(moviePtr);
if( DEBUG_NATIVE && errno != 0 && errno != -ENOSYS) {
System.err.println("libav pause err: "+errno);
}
@@ -586,7 +570,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
- return seek0(moviePtr, msec);
+ return natives.seek0(moviePtr, msec);
}
@Override
@@ -614,7 +598,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
/** Try decode up to 10 packets to find one containing video. */
for(int i=0; TimeFrameI.INVALID_PTS == vPTS && 10 > i; i++) {
- vPTS = readNextPacket0(moviePtr, textureTarget, textureFormat, textureType);
+ vPTS = natives.readNextPacket0(moviePtr, textureTarget, textureFormat, textureType);
}
if( null != nextFrame ) {
nextFrame.setPTS(vPTS);
@@ -622,195 +606,12 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
return vPTS;
}
- private final void pushSound(ByteBuffer sampleData, int data_size, int audio_pts) {
+ final void pushSound(ByteBuffer sampleData, int data_size, int audio_pts) {
setFirstAudioPTS2SCR( audio_pts );
if( 1.0f == playSpeed || audioSinkPlaySpeedSet ) {
audioSink.enqueueData( audio_pts, sampleData, data_size);
}
}
- private static native int getAvUtilVersion0();
- private static native int getAvUtilMajorVersionCC0();
- private static native int getAvFormatVersion0();
- private static native int getAvFormatMajorVersionCC0();
- private static native int getAvCodecVersion0();
- private static native int getAvCodecMajorVersionCC0();
- private static native int getAvResampleVersion0();
- private static native int getAvResampleMajorVersionCC0();
- private static native boolean initIDs0();
- private native long createInstance0(boolean verbose);
- private native void destroyInstance0(long moviePtr);
-
- /**
- * Issues {@link #updateAttributes(int, int, int, int, int, int, int, float, int, int, String, String)}
- * and {@link #updateAttributes2(int, int, int, int, int, int, int, int, int, int)}.
- * <p>
- * Always uses {@link AudioSink.AudioFormat}:
- * <pre>
- * [type PCM, sampleRate [10000(?)..44100..48000], sampleSize 16, channelCount 1-2, signed, littleEndian]
- * </pre>
- * </p>
- *
- * @param moviePtr
- * @param url
- * @param vid
- * @param aid
- * @param aPrefChannelCount
- * @param aPrefSampleRate
- */
- private native void setStream0(long moviePtr, String url, String inFormat, int vid, int aid, int aMaxChannelCount, int aPrefSampleRate);
- private native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
-
- private native int getVideoPTS0(long moviePtr);
-
- private native int getAudioPTS0(long moviePtr);
- private native Buffer getAudioBuffer0(long moviePtr, int plane);
-
- /**
- * @return resulting current video PTS, or {@link TextureFrame#INVALID_PTS}
- */
- private native int readNextPacket0(long moviePtr, int texTarget, int texFmt, int texType);
-
- private native int play0(long moviePtr);
- private native int pause0(long moviePtr);
- private native int seek0(long moviePtr, int position);
-
- /** FFMPEG/libAV Audio Sample Format */
- public static enum SampleFormat {
- // NONE = -1,
- U8, ///< unsigned 8 bits
- S16, ///< signed 16 bits
- S32, ///< signed 32 bits
- FLT, ///< float
- DBL, ///< double
-
- U8P, ///< unsigned 8 bits, planar
- S16P, ///< signed 16 bits, planar
- S32P, ///< signed 32 bits, planar
- FLTP, ///< float, planar
- DBLP, ///< double, planar
-
- COUNT; ///< Number of sample formats.
-
- public static SampleFormat valueOf(int i) {
- for (SampleFormat fmt : SampleFormat.values()) {
- if(fmt.ordinal() == i) {
- return fmt;
- }
- }
- return null;
- }
- };
-
- /** FFMPEG/libAV Pixel Format */
- public static enum PixelFormat {
- // NONE= -1,
- YUV420P, ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
- YUYV422, ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
- RGB24, ///< packed RGB 8:8:8, 24bpp, RGBRGB...
- BGR24, ///< packed RGB 8:8:8, 24bpp, BGRBGR...
- YUV422P, ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
- YUV444P, ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
- YUV410P, ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
- YUV411P, ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
- GRAY8, ///< Y , 8bpp
- MONOWHITE, ///< Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb
- MONOBLACK, ///< Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb
- PAL8, ///< 8 bit with RGB32 palette
- YUVJ420P, ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of YUV420P and setting color_range
- YUVJ422P, ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of YUV422P and setting color_range
- YUVJ444P, ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of YUV444P and setting color_range
- XVMC_MPEG2_MC,///< XVideo Motion Acceleration via common packet passing
- XVMC_MPEG2_IDCT,
- UYVY422, ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
- UYYVYY411, ///< packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
- BGR8, ///< packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
- BGR4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
- BGR4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
- RGB8, ///< packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
- RGB4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
- RGB4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
- NV12, ///< planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
- NV21, ///< as above, but U and V bytes are swapped
-
- ARGB, ///< packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
- RGBA, ///< packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
- ABGR, ///< packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
- BGRA, ///< packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
-
- GRAY16BE, ///< Y , 16bpp, big-endian
- GRAY16LE, ///< Y , 16bpp, little-endian
- YUV440P, ///< planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
- YUVJ440P, ///< planar YUV 4:4:0 full scale (JPEG), deprecated in favor of YUV440P and setting color_range
- YUVA420P, ///< planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
- VDPAU_H264,///< H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- VDPAU_MPEG1,///< MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- VDPAU_MPEG2,///< MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- VDPAU_WMV3,///< WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- VDPAU_VC1, ///< VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- RGB48BE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian
- RGB48LE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian
-
- RGB565BE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
- RGB565LE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
- RGB555BE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), big-endian, most significant bit to 0
- RGB555LE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), little-endian, most significant bit to 0
-
- BGR565BE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
- BGR565LE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
- BGR555BE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), big-endian, most significant bit to 1
- BGR555LE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), little-endian, most significant bit to 1
-
- VAAPI_MOCO, ///< HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers
- VAAPI_IDCT, ///< HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers
- VAAPI_VLD, ///< HW decoding through VA API, Picture.data[3] contains a vaapi_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
-
- YUV420P16LE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
- YUV420P16BE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
- YUV422P16LE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- YUV422P16BE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- YUV444P16LE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
- YUV444P16BE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
- VDPAU_MPEG4, ///< MPEG4 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
- DXVA2_VLD, ///< HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
-
- RGB444LE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), little-endian, most significant bits to 0
- RGB444BE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), big-endian, most significant bits to 0
- BGR444LE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), little-endian, most significant bits to 1
- BGR444BE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), big-endian, most significant bits to 1
- Y400A, ///< 8bit gray, 8bit alpha
- BGR48BE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
- BGR48LE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
- YUV420P9BE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
- YUV420P9LE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
- YUV420P10BE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
- YUV420P10LE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
- YUV422P10BE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- YUV422P10LE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- YUV444P9BE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
- YUV444P9LE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
- YUV444P10BE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
- YUV444P10LE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
- YUV422P9BE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- YUV422P9LE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- VDA_VLD, ///< hardware decoding through VDA
- GBRP, ///< planar GBR 4:4:4 24bpp
- GBRP9BE, ///< planar GBR 4:4:4 27bpp, big endian
- GBRP9LE, ///< planar GBR 4:4:4 27bpp, little endian
- GBRP10BE, ///< planar GBR 4:4:4 30bpp, big endian
- GBRP10LE, ///< planar GBR 4:4:4 30bpp, little endian
- GBRP16BE, ///< planar GBR 4:4:4 48bpp, big endian
- GBRP16LE, ///< planar GBR 4:4:4 48bpp, little endian
- COUNT ///< number of pixel formats in this list
- ;
- public static PixelFormat valueOf(int i) {
- for (PixelFormat fmt : PixelFormat.values()) {
- if(fmt.ordinal() == i) {
- return fmt;
- }
- }
- return null;
- }
- }
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java
new file mode 100644
index 000000000..5ff15564c
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java
@@ -0,0 +1,188 @@
+package jogamp.opengl.util.av.impl;
+
+import com.jogamp.opengl.util.av.AudioSink;
+import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame;
+
+interface FFMPEGNatives {
+
+ boolean initSymbols0(long[] symbols, int count);
+ int getAvUtilMajorVersionCC0();
+ int getAvFormatMajorVersionCC0();
+ int getAvCodecMajorVersionCC0();
+ int getAvResampleMajorVersionCC0();
+ boolean initIDs0();
+
+ long createInstance0(FFMPEGMediaPlayer upstream, boolean verbose);
+ void destroyInstance0(long moviePtr);
+
+ /**
+ * Issues {@link #updateAttributes(int, int, int, int, int, int, int, float, int, int, String, String)}
+ * and {@link #updateAttributes2(int, int, int, int, int, int, int, int, int, int)}.
+ * <p>
+ * Always uses {@link AudioSink.AudioFormat}:
+ * <pre>
+ * [type PCM, sampleRate [10000(?)..44100..48000], sampleSize 16, channelCount 1-2, signed, littleEndian]
+ * </pre>
+ * </p>
+ *
+ * @param moviePtr
+ * @param url
+ * @param vid
+ * @param aid
+ * @param aPrefChannelCount
+ * @param aPrefSampleRate
+ */
+ void setStream0(long moviePtr, String url, String inFormat, int vid, int aid, int aMaxChannelCount, int aPrefSampleRate);
+ void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
+
+ int getVideoPTS0(long moviePtr);
+
+ int getAudioPTS0(long moviePtr);
+
+ /**
+ * @return resulting current video PTS, or {@link TextureFrame#INVALID_PTS}
+ */
+ int readNextPacket0(long moviePtr, int texTarget, int texFmt, int texType);
+
+ int play0(long moviePtr);
+ int pause0(long moviePtr);
+ int seek0(long moviePtr, int position);
+
+ /** FFMPEG/libAV Audio Sample Format */
+ public static enum SampleFormat {
+ // NONE = -1,
+ U8, ///< unsigned 8 bits
+ S16, ///< signed 16 bits
+ S32, ///< signed 32 bits
+ FLT, ///< float
+ DBL, ///< double
+
+ U8P, ///< unsigned 8 bits, planar
+ S16P, ///< signed 16 bits, planar
+ S32P, ///< signed 32 bits, planar
+ FLTP, ///< float, planar
+ DBLP, ///< double, planar
+
+ COUNT; ///< Number of sample formats.
+
+ public static SampleFormat valueOf(int i) {
+ for (SampleFormat fmt : SampleFormat.values()) {
+ if(fmt.ordinal() == i) {
+ return fmt;
+ }
+ }
+ return null;
+ }
+ };
+
+ /** FFMPEG/libAV Pixel Format */
+ public static enum PixelFormat {
+ // NONE= -1,
+ YUV420P, ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
+ YUYV422, ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
+ RGB24, ///< packed RGB 8:8:8, 24bpp, RGBRGB...
+ BGR24, ///< packed RGB 8:8:8, 24bpp, BGRBGR...
+ YUV422P, ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
+ YUV444P, ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
+ YUV410P, ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
+ YUV411P, ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
+ GRAY8, ///< Y , 8bpp
+ MONOWHITE, ///< Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb
+ MONOBLACK, ///< Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb
+ PAL8, ///< 8 bit with RGB32 palette
+ YUVJ420P, ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of YUV420P and setting color_range
+ YUVJ422P, ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of YUV422P and setting color_range
+ YUVJ444P, ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of YUV444P and setting color_range
+ XVMC_MPEG2_MC,///< XVideo Motion Acceleration via common packet passing
+ XVMC_MPEG2_IDCT,
+ UYVY422, ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
+ UYYVYY411, ///< packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
+ BGR8, ///< packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
+ BGR4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
+ BGR4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
+ RGB8, ///< packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
+ RGB4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
+ RGB4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
+ NV12, ///< planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
+ NV21, ///< as above, but U and V bytes are swapped
+
+ ARGB, ///< packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
+ RGBA, ///< packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
+ ABGR, ///< packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
+ BGRA, ///< packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
+
+ GRAY16BE, ///< Y , 16bpp, big-endian
+ GRAY16LE, ///< Y , 16bpp, little-endian
+ YUV440P, ///< planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
+ YUVJ440P, ///< planar YUV 4:4:0 full scale (JPEG), deprecated in favor of YUV440P and setting color_range
+ YUVA420P, ///< planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
+ VDPAU_H264,///< H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ VDPAU_MPEG1,///< MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ VDPAU_MPEG2,///< MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ VDPAU_WMV3,///< WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ VDPAU_VC1, ///< VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ RGB48BE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian
+ RGB48LE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian
+
+ RGB565BE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
+ RGB565LE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
+ RGB555BE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), big-endian, most significant bit to 0
+ RGB555LE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), little-endian, most significant bit to 0
+
+ BGR565BE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
+ BGR565LE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
+ BGR555BE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), big-endian, most significant bit to 1
+ BGR555LE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), little-endian, most significant bit to 1
+
+ VAAPI_MOCO, ///< HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers
+ VAAPI_IDCT, ///< HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers
+ VAAPI_VLD, ///< HW decoding through VA API, Picture.data[3] contains a vaapi_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+
+ YUV420P16LE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ YUV420P16BE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ YUV422P16LE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ YUV422P16BE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ YUV444P16LE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ YUV444P16BE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ VDPAU_MPEG4, ///< MPEG4 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
+ DXVA2_VLD, ///< HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
+
+ RGB444LE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), little-endian, most significant bits to 0
+ RGB444BE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), big-endian, most significant bits to 0
+ BGR444LE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), little-endian, most significant bits to 1
+ BGR444BE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), big-endian, most significant bits to 1
+ Y400A, ///< 8bit gray, 8bit alpha
+ BGR48BE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
+ BGR48LE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
+ YUV420P9BE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ YUV420P9LE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ YUV420P10BE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ YUV420P10LE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ YUV422P10BE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ YUV422P10LE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ YUV444P9BE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ YUV444P9LE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ YUV444P10BE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ YUV444P10LE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ YUV422P9BE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ YUV422P9LE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ VDA_VLD, ///< hardware decoding through VDA
+ GBRP, ///< planar GBR 4:4:4 24bpp
+ GBRP9BE, ///< planar GBR 4:4:4 27bpp, big endian
+ GBRP9LE, ///< planar GBR 4:4:4 27bpp, little endian
+ GBRP10BE, ///< planar GBR 4:4:4 30bpp, big endian
+ GBRP10LE, ///< planar GBR 4:4:4 30bpp, little endian
+ GBRP16BE, ///< planar GBR 4:4:4 48bpp, big endian
+ GBRP16LE, ///< planar GBR 4:4:4 48bpp, little endian
+ COUNT ///< number of pixel formats in this list
+ ;
+ public static PixelFormat valueOf(int i) {
+ for (PixelFormat fmt : PixelFormat.values()) {
+ if(fmt.ordinal() == i) {
+ return fmt;
+ }
+ }
+ return null;
+ }
+ }
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java
new file mode 100644
index 000000000..3e9c4bf36
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java
@@ -0,0 +1,15 @@
+package jogamp.opengl.util.av.impl;
+
+import com.jogamp.common.util.VersionNumber;
+
+class FFMPEGStaticNatives {
+ static VersionNumber getAVVersion(int vers) {
+ return new VersionNumber( ( vers >> 16 ) & 0xFF,
+ ( vers >> 8 ) & 0xFF,
+ ( vers >> 0 ) & 0xFF );
+ }
+ static native int getAvUtilVersion0(long func);
+ static native int getAvFormatVersion0(long func);
+ static native int getAvCodecVersion0(long func);
+ static native int getAvResampleVersion0(long func);
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java
new file mode 100644
index 000000000..2a4d20f37
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java
@@ -0,0 +1,51 @@
+package jogamp.opengl.util.av.impl;
+
+class FFMPEGv08Natives implements FFMPEGNatives {
+ @Override
+ public native boolean initSymbols0(long[] symbols, int count);
+
+ @Override
+ public native int getAvUtilMajorVersionCC0();
+
+ @Override
+ public native int getAvFormatMajorVersionCC0();
+
+ @Override
+ public native int getAvCodecMajorVersionCC0();
+
+ @Override
+ public native int getAvResampleMajorVersionCC0();
+
+ @Override
+ public native boolean initIDs0();
+
+ @Override
+ public native long createInstance0(FFMPEGMediaPlayer upstream, boolean verbose);
+
+ @Override
+ public native void destroyInstance0(long moviePtr);
+
+ @Override
+ public native void setStream0(long moviePtr, String url, String inFormat, int vid, int aid, int aMaxChannelCount, int aPrefSampleRate);
+
+ @Override
+ public native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
+
+ @Override
+ public native int getVideoPTS0(long moviePtr);
+
+ @Override
+ public native int getAudioPTS0(long moviePtr);
+
+ @Override
+ public native int readNextPacket0(long moviePtr, int texTarget, int texFmt, int texType);
+
+ @Override
+ public native int play0(long moviePtr);
+
+ @Override
+ public native int pause0(long moviePtr);
+
+ @Override
+ public native int seek0(long moviePtr, int position);
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java
new file mode 100644
index 000000000..b250fbac8
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java
@@ -0,0 +1,51 @@
+package jogamp.opengl.util.av.impl;
+
+class FFMPEGv09Natives implements FFMPEGNatives {
+ @Override
+ public native boolean initSymbols0(long[] symbols, int count);
+
+ @Override
+ public native int getAvUtilMajorVersionCC0();
+
+ @Override
+ public native int getAvFormatMajorVersionCC0();
+
+ @Override
+ public native int getAvCodecMajorVersionCC0();
+
+ @Override
+ public native int getAvResampleMajorVersionCC0();
+
+ @Override
+ public native boolean initIDs0();
+
+ @Override
+ public native long createInstance0(FFMPEGMediaPlayer upstream, boolean verbose);
+
+ @Override
+ public native void destroyInstance0(long moviePtr);
+
+ @Override
+ public native void setStream0(long moviePtr, String url, String inFormat, int vid, int aid, int aMaxChannelCount, int aPrefSampleRate);
+
+ @Override
+ public native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish);
+
+ @Override
+ public native int getVideoPTS0(long moviePtr);
+
+ @Override
+ public native int getAudioPTS0(long moviePtr);
+
+ @Override
+ public native int readNextPacket0(long moviePtr, int texTarget, int texFmt, int texType);
+
+ @Override
+ public native int play0(long moviePtr);
+
+ @Override
+ public native int pause0(long moviePtr);
+
+ @Override
+ public native int seek0(long moviePtr, int position);
+}