summaryrefslogtreecommitdiffstats
path: root/src/jogl
diff options
context:
space:
mode:
authorSven Gothel <[email protected]>2013-07-17 16:18:27 +0200
committerSven Gothel <[email protected]>2013-07-17 16:18:27 +0200
commitf26d591b484c7516e6addaaff0b6b40cb4138da9 (patch)
tree297529ecdb4bd9d7a3d265d2194df6994a635b93 /src/jogl
parent5a1bd0c70ffce4a0530de4f3902050a87c669343 (diff)
parent5c8699690ac907882615ba8f01eebb9e39699f50 (diff)
Merge remote-tracking branch 'xranby/FFMPEGMediaPlayer'
Diffstat (limited to 'src/jogl')
-rw-r--r--src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java176
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/AudioSink.java13
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java79
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java23
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java7
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java143
-rw-r--r--src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c130
7 files changed, 537 insertions, 34 deletions
diff --git a/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
new file mode 100644
index 000000000..690948c5a
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
@@ -0,0 +1,176 @@
+package jogamp.opengl.openal.av;
+
+import java.nio.Buffer;
+import java.nio.ByteBuffer;
+
+import java.lang.InterruptedException;
+
+import jogamp.opengl.util.av.AudioSink;
+
+import com.jogamp.common.nio.Buffers;
+import com.jogamp.openal.*;
+
+public class ALAudioSink implements AudioSink {
+
+ static ALC alc;
+ static AL al;
+ static ALCdevice device;
+ static ALCcontext context;
+
+ // AudioFormat parameters
+ public static final int SAMPLE_RATE = 44100;
+
+ // Chunk of audio processed at one time
+ public static final int BUFFER_SIZE = 1000;
+ public static final int SAMPLES_PER_BUFFER = BUFFER_SIZE / 2;
+
+ // Sample time values
+ public static final double SAMPLE_TIME_IN_SECS = 1.0 / SAMPLE_RATE;
+ public static final double BUFFER_TIME_IN_SECS = SAMPLE_TIME_IN_SECS * SAMPLES_PER_BUFFER;
+
+ private static int NUM_BUFFERS = 5;
+ private static int bufferNumber = 0;
+ private static int[] buffers = new int[NUM_BUFFERS];
+ private static int[] source = new int[1];
+ private static boolean initBuffer = true;
+ private static int frequency = 44100;
+ private static int format = AL.AL_FORMAT_STEREO16;
+
+ private static boolean available = false;
+
+ static {
+
+ boolean joalFound = false;
+ try {
+ Class.forName("com.jogamp.openal.ALFactory");
+ joalFound = true;
+ } catch(ClassNotFoundException e){
+ // Joal not found on classpath
+ }
+
+ if(joalFound) {
+
+ alc = ALFactory.getALC();
+ String deviceSpecifier;
+
+ // Get handle to default device.
+ device = alc.alcOpenDevice(null);
+ if (device == null) {
+ throw new ALException("Error opening default OpenAL device");
+ }
+
+ // Get the device specifier.
+ deviceSpecifier = alc.alcGetString(device, ALC.ALC_DEVICE_SPECIFIER);
+ if (deviceSpecifier == null) {
+ throw new ALException("Error getting specifier for default OpenAL device");
+ }
+
+ // Create audio context.
+ context = alc.alcCreateContext(device, null);
+ if (context == null) {
+ throw new ALException("Error creating OpenAL context");
+ }
+
+ // Set active context.
+ alc.alcMakeContextCurrent(context);
+
+ // Check for an error.
+ if (alc.alcGetError(device) != ALC.ALC_NO_ERROR) {
+ throw new ALException("Error making OpenAL context current");
+ }
+
+ al = ALFactory.getAL();
+
+ // Allocate buffers
+ al.alGenBuffers(NUM_BUFFERS, buffers, 0);
+ al.alGenSources(1, source, 0);
+
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new ALException("Error generating :(");
+ }
+
+ System.out.println("OpenAL audio sink using device: " + deviceSpecifier);
+ available = true;
+ }
+ }
+
+ @Override
+ public boolean isDataAvailable(int data_size) {
+ return true;
+ }
+
+ @Override
+ public void writeData(byte[] sampleData, int data_size) {
+ // OpenAL consumes buffers in the background
+ // we first need to initialize the OpenAL buffers then
+ // start continous playback.
+ alc.alcMakeContextCurrent(context);
+ if(initBuffer) {
+
+ ByteBuffer data = Buffers.newDirectByteBuffer(sampleData);
+ al.alBufferData(buffers[bufferNumber], format, data, data_size, frequency);
+ int error = al.alGetError();
+ if(error != AL.AL_NO_ERROR) {
+ System.out.println("bufferNumber"+bufferNumber+" Data "+sampleData+" size"+data_size);
+ throw new ALException("Error loading :( error code: " + error);
+ }
+
+ if(bufferNumber==NUM_BUFFERS-1){
+ // all buffers queued
+ al.alSourceQueueBuffers(source[0], NUM_BUFFERS, buffers, 0);
+ // start playback
+ al.alSourcePlay(source[0]);
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new ALException("Error starting :(");
+ }
+ initBuffer=false;
+ }
+
+ // update buffer number to fill
+ bufferNumber=(bufferNumber+1)%NUM_BUFFERS;
+ } else {
+ // OpenAL is playing in the background.
+ // one new frame with audio data is ready
+
+ // first wait for openal to release one buffer
+ int[] buffer=new int[1];
+ int[] val=new int[1];
+ do {
+ al.alGetSourcei(source[0], AL.AL_BUFFERS_PROCESSED, val, 0);
+ if(val[0] <= 0){
+ try {
+ Thread.sleep(1);
+ } catch (InterruptedException e){
+ }
+ }
+ } while (val[0] <= 0);
+
+ // fill and requeue the empty buffer
+ al.alSourceUnqueueBuffers(source[0], 1, buffer , 0);
+ Buffer data = Buffers.newDirectByteBuffer(sampleData);
+ al.alBufferData(buffer[0], format, data, data_size, frequency);
+ al.alSourceQueueBuffers(source[0], 1, buffer, 0);
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new ALException("Error buffering :(");
+ }
+
+ // Restart openal playback if needed
+ al.alGetSourcei(source[0], AL.AL_SOURCE_STATE, val, 0);
+ if(val[0] != al.AL_PLAYING) {
+ al.alSourcePlay(source[0]);
+ }
+ }
+ }
+
+ @Override
+ public int getDataAvailable() {
+ int[] val=new int[1];
+ al.alGetSourcei(source[0], AL.AL_BUFFERS_PROCESSED, val, 0);
+ return (NUM_BUFFERS-val[0])*4096;
+ }
+
+ @Override
+ public boolean isAudioSinkAvailable() {
+ return available;
+ }
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/AudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/AudioSink.java
new file mode 100644
index 000000000..504e4b2db
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/AudioSink.java
@@ -0,0 +1,13 @@
+package jogamp.opengl.util.av;
+
+public interface AudioSink {
+
+ boolean isAudioSinkAvailable();
+
+ int getDataAvailable();
+
+ boolean isDataAvailable(int data_size);
+
+ void writeData(byte[] sampleData, int data_size);
+
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
new file mode 100644
index 000000000..653a02111
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
@@ -0,0 +1,79 @@
+package jogamp.opengl.util.av;
+
+import java.util.Arrays;
+
+import javax.sound.sampled.AudioFormat;
+import javax.sound.sampled.AudioSystem;
+import javax.sound.sampled.DataLine;
+import javax.sound.sampled.SourceDataLine;
+
+public class JavaSoundAudioSink implements AudioSink {
+
+ // AudioFormat parameters
+ public static final int SAMPLE_RATE = 44100;
+ private static final int SAMPLE_SIZE = 16;
+ private static final int CHANNELS = 2;
+ private static final boolean SIGNED = true;
+ private static final boolean BIG_ENDIAN = false;
+
+ // Chunk of audio processed at one time
+ public static final int BUFFER_SIZE = 1000;
+ public static final int SAMPLES_PER_BUFFER = BUFFER_SIZE / 2;
+
+ // Sample time values
+ public static final double SAMPLE_TIME_IN_SECS = 1.0 / SAMPLE_RATE;
+ public static final double BUFFER_TIME_IN_SECS = SAMPLE_TIME_IN_SECS * SAMPLES_PER_BUFFER;
+
+ private static AudioFormat format;
+ private static DataLine.Info info;
+ private static SourceDataLine auline;
+ private static int bufferCount;
+ private static byte [] sampleData = new byte[BUFFER_SIZE];
+
+ private static boolean available;
+
+ static {
+ // Create the audio format we wish to use
+ format = new AudioFormat(SAMPLE_RATE, SAMPLE_SIZE, CHANNELS, SIGNED, BIG_ENDIAN);
+
+ // Create dataline info object describing line format
+ info = new DataLine.Info(SourceDataLine.class, format);
+
+ // Clear buffer initially
+ Arrays.fill(sampleData, (byte) 0);
+ try{
+ // Get line to write data to
+ auline = (SourceDataLine) AudioSystem.getLine(info);
+ auline.open(format);
+ auline.start();
+ System.out.println("JavaSound audio sink");
+ available=true;
+ } catch (Exception e) {
+ available=false;
+ }
+ }
+
+ public void writeData(byte[] sampleData, int data_size) {
+ int written = 0;
+ int len;
+ while (data_size > 0) {
+ len = auline.write(sampleData, written, data_size);
+ data_size -= len;
+ written += len;
+ }
+ }
+
+ public int getDataAvailable() {
+ return auline.available();
+ }
+
+ public boolean isDataAvailable(int data_size) {
+ return auline.available()>=data_size;
+ }
+
+ @Override
+ public boolean isAudioSinkAvailable() {
+ return available;
+ }
+
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
new file mode 100644
index 000000000..d92967849
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
@@ -0,0 +1,23 @@
+package jogamp.opengl.util.av;
+
+public class NullAudioSink implements AudioSink {
+
+ @Override
+ public int getDataAvailable() {
+ return 0;
+ }
+
+ @Override
+ public boolean isDataAvailable(int data_size) {
+ return false;
+ }
+
+ @Override
+ public void writeData(byte[] sampleData, int data_size) {
+ }
+
+ @Override
+ public boolean isAudioSinkAvailable() {
+ return true;
+ }
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
index 2d40fe4ec..852e5149c 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
@@ -57,7 +57,7 @@ import com.jogamp.common.util.RunnableExecutor;
class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
private static final List<String> glueLibNames = new ArrayList<String>(); // none
- private static final int symbolCount = 31;
+ private static final int symbolCount = 32;
private static final String[] symbolNames = {
"avcodec_version",
"avformat_version",
@@ -80,7 +80,8 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
// libavutil
"av_pix_fmt_descriptors",
"av_free",
-/* 18 */ "av_get_bits_per_pixel",
+ "av_get_bits_per_pixel",
+/* 19 */ "av_samples_get_buffer_size",
// libavformat
"avformat_alloc_context",
@@ -95,7 +96,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"avformat_network_init", // 53.13.0 (opt)
"avformat_network_deinit", // 53.13.0 (opt)
"avformat_find_stream_info", // 53.3.0 (opt)
-/* 29 */ "av_find_stream_info",
+/* 32 */ "av_find_stream_info",
};
// alternate symbol names
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
index f416bb1f8..33b5b3b20 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
@@ -38,13 +38,19 @@ import javax.media.opengl.GL;
import javax.media.opengl.GL2ES2;
import javax.media.opengl.GLException;
+import java.util.Arrays;
+import java.util.Queue;
+
+import com.jogamp.common.util.ReflectionUtil;
import com.jogamp.common.util.VersionNumber;
import com.jogamp.gluegen.runtime.ProcAddressTable;
import com.jogamp.opengl.util.GLPixelStorageModes;
+import com.jogamp.opengl.util.av.GLMediaPlayerFactory;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureSequence;
import jogamp.opengl.GLContextImpl;
+import jogamp.opengl.util.av.AudioSink;
import jogamp.opengl.util.av.EGLMediaPlayerImpl;
/***
@@ -100,10 +106,18 @@ import jogamp.opengl.util.av.EGLMediaPlayerImpl;
* </ul>
*/
public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
+
+ // Count of zeroed buffers to return before switching to real sample provider
+ private static final int TEMP_BUFFER_COUNT = 20;
+
+ // Instance data
+ private static AudioSink audioSink;
+ private static int maxAvailableAudio;
+
public static final VersionNumber avUtilVersion;
public static final VersionNumber avFormatVersion;
public static final VersionNumber avCodecVersion;
- static final boolean available;
+ static boolean available;
static {
if(FFMPEGDynamicLibraryBundleInfo.initSingleton()) {
@@ -113,7 +127,26 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
System.err.println("LIB_AV Util : "+avUtilVersion);
System.err.println("LIB_AV Format: "+avFormatVersion);
System.err.println("LIB_AV Codec : "+avCodecVersion);
- available = initIDs0();
+ initIDs0();
+ available = true;
+ final ClassLoader cl = GLMediaPlayerFactory.class.getClassLoader();
+
+ if(ReflectionUtil.isClassAvailable("com.jogamp.openal.ALFactory", cl)){
+ // Only instance ALAudioSink if JOAL is found on the classpath.
+ audioSink = (AudioSink) ReflectionUtil.createInstance("jogamp.opengl.openal.av.ALAudioSink", cl);
+ if(!audioSink.isAudioSinkAvailable()){
+ // Failed to initialize OpenAL.
+ audioSink=null;
+ }
+ }
+ if(audioSink==null) {
+ audioSink = (AudioSink) ReflectionUtil.createInstance("jogamp.opengl.util.av.JavaSoundAudioSink", cl);
+ if(!audioSink.isAudioSinkAvailable()) {
+ audioSink = (AudioSink) ReflectionUtil.createInstance("jogamp.opengl.util.av.NullAudioSink", cl);
+ }
+ }
+ maxAvailableAudio = audioSink.getDataAvailable();
+
} else {
avUtilVersion = null;
avFormatVersion = null;
@@ -229,7 +262,77 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
}
} ).longValue();
}
-
+
+
+ private class AudioFrame {
+ final byte[] sampleData;
+ final int data_size;
+ final int audio_pts;
+ AudioFrame(byte[] sampleData, int data_size, int audio_pts) {
+ this.sampleData=sampleData;
+ this.data_size=data_size;
+ this.audio_pts=audio_pts;
+ }
+ }
+
+ static final Queue<AudioFrame> audioFrameBuffer = new java.util.LinkedList<AudioFrame>();
+
+ private void updateSound(byte[] sampleData, int data_size, int audio_pts) {
+/*
+ // Visualize incomming data
+ int c=0;
+ for(byte b: sampleData){
+ if(b<0) {
+ System.out.print(" ");
+ } else if(b<64) {
+ System.out.print("_");
+ } else if(b < 128) {
+ System.out.print("-");
+ } else if(b == 128) {
+ System.out.print("=");
+ } else if(b < 256-64) {
+ System.out.print("\"");
+ } else {
+ System.out.print("'");
+ }
+
+ c++;
+ if(c>=40)
+ break;
+ }
+ System.out.println("jA");
+*/
+
+ //TODO reduce GC
+ audioFrameBuffer.add(new AudioFrame(sampleData, data_size, audio_pts));
+ pumpAudio();
+ }
+
+ private void pumpAudio() {
+ if(audioSink.getDataAvailable()==maxAvailableAudio){
+ System.out.println("warning: audio buffer underrun");
+ }
+ while(audioFrameBuffer.peek()!=null){
+ AudioFrame a = audioFrameBuffer.peek();
+
+ // poor mans audio sync .. TODO: off thread
+ final long now = System.currentTimeMillis();
+ final long now_d = now - lastAudioTime;
+ final long pts_d = a.audio_pts - lastAudioPTS;
+ final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
+
+ System.err.println("s: pts-a "+a.audio_pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
+ lastAudioTime = now;
+ if( (dt<audio_dt_d ) && audioSink.isDataAvailable(a.data_size)) {
+ audioFrameBuffer.poll(); /* remove first item from the queue */
+ audioSink.writeData(a.sampleData, a.data_size);
+ lastAudioPTS=a.audio_pts;
+ } else {
+ break;
+ }
+ }
+ }
+
private void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
int lSz0, int lSz1, int lSz2,
int tWd0, int tWd1, int tWd2) {
@@ -375,6 +478,9 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
int pts0 = getVideoPTS0(moviePtr);
int pts1 = seek0(moviePtr, msec);
System.err.println("Seek: "+pts0+" -> "+msec+" : "+pts1);
+ audioFrameBuffer.clear();
+ lastAudioPTS=pts1;
+ lastVideoPTS=pts1;
return pts1;
}
@@ -383,9 +489,12 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
return lastTex;
}
+ private long lastAudioTime = 0;
+ private int lastAudioPTS = 0;
+ private static final int audio_dt_d = 400;
private long lastVideoTime = 0;
private int lastVideoPTS = 0;
- private static final int dt_d = 9;
+ private static final int video_dt_d = 9;
@Override
protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) {
@@ -399,7 +508,15 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit());
tex.enable(gl);
tex.bind(gl);
- readNextPacket0(moviePtr, procAddrGLTexSubImage2D, textureTarget, textureFormat, textureType);
+
+ /* try decode 10 packets to find one containing video
+ (res == 2) */
+ int res = 0;
+ int retry = 10;
+ while(res!=2 && retry >= 0) {
+ res = readNextPacket0(moviePtr, procAddrGLTexSubImage2D, textureTarget, textureFormat, textureType);
+ retry--;
+ }
} finally {
psm.restore(gl);
}
@@ -407,19 +524,23 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
if(blocking) {
// poor mans video sync .. TODO: off thread 'readNextPackage0(..)' on shared GLContext and multi textures/unit!
final long now = System.currentTimeMillis();
- final long now_d = now - lastVideoTime;
- final long pts_d = pts - lastVideoPTS;
- final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
+ // Try sync video to audio
+ final long now_d = now - lastAudioTime;
+ final long pts_d = pts - lastAudioPTS - 444; /* hack 444 == play video 444ms ahead of audio */
+ final long dt = Math.min(47, (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ) ;
+ //final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
lastVideoTime = now;
- // System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
- if(dt>dt_d) {
+ System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
+
+ if(dt>video_dt_d && dt<1000 && audioSink.getDataAvailable()<maxAvailableAudio-10000) {
try {
- Thread.sleep(dt-dt_d);
+ Thread.sleep(dt-video_dt_d);
} catch (InterruptedException e) { }
} /* else if(0>pts_d) {
System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
} */
}
+ pumpAudio();
lastVideoPTS = pts;
}
return lastTex;
diff --git a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c
index 28e9e852c..d93caa30b 100644
--- a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c
+++ b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c
@@ -38,6 +38,7 @@ typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLi
static const char * const ClazzNameFFMPEGMediaPlayer = "jogamp/opengl/util/av/impl/FFMPEGMediaPlayer";
static jclass ffmpegMediaPlayerClazz = NULL;
+static jmethodID jni_mid_updateSound = NULL;
static jmethodID jni_mid_updateAttributes1 = NULL;
static jmethodID jni_mid_updateAttributes2 = NULL;
@@ -84,10 +85,12 @@ static AVCODEC_DECODE_VIDEO2 sp_avcodec_decode_video2; // 52.23.0
// libavutil
typedef void (APIENTRYP AV_FREE)(void *ptr);
typedef int (APIENTRYP AV_GET_BITS_PER_PIXEL)(const AVPixFmtDescriptor *pixdesc);
+typedef int (APIENTRYP AV_SAMPLES_GET_BUFFER_SIZE)(int *linesize, int nb_channels, int nb_samples, enum AVSampleFormat sample_fmt, int align);
static const AVPixFmtDescriptor* sp_av_pix_fmt_descriptors;
static AV_FREE sp_av_free;
static AV_GET_BITS_PER_PIXEL sp_av_get_bits_per_pixel;
-// count: 18
+static AV_SAMPLES_GET_BUFFER_SIZE sp_av_samples_get_buffer_size;
+// count: 19
// libavformat
typedef AVFormatContext *(APIENTRYP AVFORMAT_ALLOC_CONTEXT)(void);
@@ -117,9 +120,9 @@ static AVFORMAT_NETWORK_INIT sp_avformat_network_init; // 53.13.0
static AVFORMAT_NETWORK_DEINIT sp_avformat_network_deinit; // 53.13.0
static AVFORMAT_FIND_STREAM_INFO sp_avformat_find_stream_info; // 53.3.0
static AV_FIND_STREAM_INFO sp_av_find_stream_info;
-// count: 31
+// count: 32
-#define SYMBOL_COUNT 31
+#define SYMBOL_COUNT 32
JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryBundleInfo_initSymbols0
(JNIEnv *env, jclass clazz, jobject jSymbols, jint count)
@@ -159,7 +162,8 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryB
sp_av_pix_fmt_descriptors = (const AVPixFmtDescriptor*) (intptr_t) symbols[i++];
sp_av_free = (AV_FREE) (intptr_t) symbols[i++];
sp_av_get_bits_per_pixel = (AV_GET_BITS_PER_PIXEL) (intptr_t) symbols[i++];
- // count: 18
+ sp_av_samples_get_buffer_size = (AV_SAMPLES_GET_BUFFER_SIZE) (intptr_t) symbols[i++];
+ // count: 19
sp_avformat_alloc_context = (AVFORMAT_ALLOC_CONTEXT) (intptr_t) symbols[i++];;
sp_avformat_free_context = (AVFORMAT_FREE_CONTEXT) (intptr_t) symbols[i++];
@@ -174,7 +178,7 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryB
sp_avformat_network_deinit = (AVFORMAT_NETWORK_DEINIT) (intptr_t) symbols[i++];
sp_avformat_find_stream_info = (AVFORMAT_FIND_STREAM_INFO) (intptr_t) symbols[i++];
sp_av_find_stream_info = (AV_FIND_STREAM_INFO) (intptr_t) symbols[i++];
- // count: 31
+ // count: 32
(*env)->ReleasePrimitiveArrayCritical(env, jSymbols, symbols, 0);
@@ -188,6 +192,41 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryB
return JNI_TRUE;
}
+static void _updateSound(JNIEnv *env, jobject instance, int8_t *data, int32_t data_size, int32_t aPTS) {
+ if(NULL!=env) {
+ jbyteArray jbArray = (*env)->NewByteArray(env, data_size);
+ if (jbArray == NULL) {
+ fprintf(stderr, "FFMPEGMediaPlayer out of memory at native code _updateSound");
+ return; /* out of memory error thrown */
+ }
+
+/*
+ // Visualize sample waveform
+ int i;
+ for(i=0;i<40;i++){
+ int8_t b = data[i];
+ if(b<0) {
+ printf(" ");
+ } else if(b<64) {
+ printf("_");
+ } else if(b < 128) {
+ printf("-");
+ } else if(b == 128) {
+ printf("=");
+ } else if(b < 256-64) {
+ printf("\"");
+ } else {
+ printf("'");
+ }
+ }
+ printf("nA\n");
+*/
+
+ (*env)->SetByteArrayRegion(env, jbArray, 0, data_size, data);
+ (*env)->CallVoidMethod(env, instance, jni_mid_updateSound, jbArray, data_size, aPTS);
+ }
+}
+
static void _updateJavaAttributes(JNIEnv *env, jobject instance, FFMPEGToolBasicAV_t* pAV)
{
// int shallBeDetached = 0;
@@ -331,10 +370,12 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_ini
JoglCommon_FatalError(env, "JOGL FFMPEG: can't use %s", ClazzNameFFMPEGMediaPlayer);
}
+ jni_mid_updateSound = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateSound", "([BII)V");
jni_mid_updateAttributes1 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes", "(IIIIIFIILjava/lang/String;Ljava/lang/String;)V");
jni_mid_updateAttributes2 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes2", "(IIIIIIIIII)V");
- if(jni_mid_updateAttributes1 == NULL ||
+ if(jni_mid_updateSound == NULL ||
+ jni_mid_updateAttributes1 == NULL ||
jni_mid_updateAttributes2 == NULL) {
return JNI_FALSE;
}
@@ -579,7 +620,6 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
int frameFinished;
if(sp_av_read_frame(pAV->pFormatCtx, &packet)>=0) {
- /**
if(packet.stream_index==pAV->aid) {
// Decode audio frame
if(NULL == pAV->pAFrame) {
@@ -590,16 +630,19 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
int new_packet = 1;
int len1;
int flush_complete = 0;
- int data_size = 0;
while (packet.size > 0 || (!packet.data && new_packet)) {
new_packet = 0;
if (flush_complete) {
break;
}
if(HAS_FUNC(sp_avcodec_decode_audio4)) {
- len1 = sp_avcodec_decode_audio4(pAV->pVCodecCtx, pAV->pAFrame, &frameFinished, &packet);
+ len1 = sp_avcodec_decode_audio4(pAV->pACodecCtx, pAV->pAFrame, &frameFinished, &packet);
} else {
- len1 = sp_avcodec_decode_audio3(pAV->pVCodecCtx, int16_t *samples, int *frame_size_ptr, &frameFinished, &packet);
+ #if 0
+ len1 = sp_avcodec_decode_audio3(pAV->pACodecCtx, int16_t *samples, int *frame_size_ptr, &frameFinished, &packet);
+ #endif
+ JoglCommon_throwNewRuntimeException(env, "Unimplemented: FFMPEGMediaPlayer sp_avcodec_decode_audio3 fallback");
+ return 0;
}
if (len1 < 0) {
// if error, we skip the frame
@@ -611,28 +654,73 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
if (!frameFinished) {
// stop sending empty packets if the decoder is finished
- if (!packet.data && pAV->pVCodecCtx->codec->capabilities & CODEC_CAP_DELAY) {
+ if (!packet.data && pAV->pACodecCtx->codec->capabilities & CODEC_CAP_DELAY) {
flush_complete = 1;
}
continue;
}
- int32_t pts = pAV->pAFrame->pkt_pts * my_av_q2i32(1000, pAV->pAStream->time_base);
- pAV->aPTS += ( data_size * 1000 ) / (2 * pAV->pVCodecCtx->channels * pAV->pVCodecCtx->sample_rate);
- printf("A pts %d - %d\n", pts, pAV->aPTS);
+ int data_size = 0;
+ if(HAS_FUNC(sp_av_samples_get_buffer_size)) {
+ data_size = sp_av_samples_get_buffer_size(NULL /* linesize, may be NULL */,
+ pAV->aChannels,
+ pAV->pAFrame->nb_samples,
+ pAV->pAFrame->format,
+ 1 /* align */);
+ }
+ int32_t pts = (int64_t) ( pAV->pAFrame->pkt_pts * (int64_t) 1000 * (int64_t) pAV->pAStream->time_base.num )
+ / (int64_t) pAV->pAStream->time_base.den;
+ #if 0
+ printf("channels %d sample_rate %d \n", pAV->aChannels , pAV->aSampleRate);
+ printf("data %d \n", pAV->aFrameSize);
+ #endif
+ pAV->aPTS += (int64_t) ( data_size * (int64_t) 1000 )
+ / (int64_t) (2 * (int64_t) pAV->aChannels * (int64_t) pAV->aSampleRate);
+ if( pAV->verbose ) {
+ printf("A pts %d - %d\n", pts, pAV->aPTS);
+ }
+ // TODO: Wrap audio buffer data in a com.jogamp.openal.sound3d.Buffer or similar
+ // and hand it over to the user using a suitable API.
+ // TODO: OR send the audio buffer data down to sound card directly using JOAL.
+ _updateSound(env, instance, pAV->pAFrame->data[0], data_size, pAV->aPTS);
+
res = 1;
}
- } else */ if(packet.stream_index==pAV->vid) {
+ } else if(packet.stream_index==pAV->vid) {
// Decode video frame
if(NULL == pAV->pVFrame) {
sp_av_free_packet(&packet);
return res;
}
- sp_avcodec_decode_video2(pAV->pVCodecCtx, pAV->pVFrame, &frameFinished, &packet);
- // Did we get a video frame?
- if(frameFinished)
- {
+ int new_packet = 1;
+ int len1;
+ int flush_complete = 0;
+ while (packet.size > 0 || (!packet.data && new_packet)) {
+
+ new_packet = 0;
+ if (flush_complete) {
+ break;
+ }
+
+ len1 = sp_avcodec_decode_video2(pAV->pVCodecCtx, pAV->pVFrame, &frameFinished, &packet);
+
+ if (len1 < 0) {
+ // if error, we skip the frame
+ packet.size = 0;
+ break;
+ }
+ packet.data += len1;
+ packet.size -= len1;
+
+ if (!frameFinished) {
+ // stop sending empty packets if the decoder is finished
+ if (!packet.data && pAV->pVCodecCtx->codec->capabilities & CODEC_CAP_DELAY) {
+ flush_complete = 1;
+ }
+ continue;
+ }
+
res = 2;
// FIXME: Libav Binary compatibility! JAU01
const AVRational time_base = pAV->pVStream->time_base;
@@ -678,7 +766,9 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
}
// Free the packet that was allocated by av_read_frame
- sp_av_free_packet(&packet);
+ // This code cause a double free and have been commented out.
+ // TODO: check what release the packets memory.
+ // sp_av_free_packet(&packet);
}
return res;
}