aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorSven Gothel <[email protected]>2013-07-19 05:08:34 +0200
committerSven Gothel <[email protected]>2013-07-19 05:08:34 +0200
commitda7210c6498b6fcc2fbf829684ea399a6b4c3f65 (patch)
tree8acf7efc5006cfeabc8e3f453c344ff3b4ddc456 /src
parent24ba0ac528603ee527f002d350bcfb19754f2457 (diff)
FFMPEGPlayer Audio Sink Refactoring ..
- AudioSink w/ AudioFrame and formats public - ALAudioSink uses a circular buffer now, hence relaxes the one-threaded player mode - FFMPEGMediaPlayer uses multiple audio frames (equal to the ALAudioSink number) and wraps data to NIO buffer w/o copy. - FFMPEGMediaPlayer audio threading currently disabled: distorted sound Seems that the ALAudioSink's circular buffer usage is good enough for now. - Verbosity only w/ DEBUG flag - New SyncedRingbuffer for effcient synced buffering
Diffstat (limited to 'src')
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java168
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/AudioSinkFactory.java65
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java8
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayerFactory.java26
-rw-r--r--src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java10
-rw-r--r--src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java484
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/AudioSink.java13
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java4
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java120
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java41
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java286
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java245
-rw-r--r--src/jogl/native/libav/ffmpeg_tool.h4
-rw-r--r--src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c87
-rw-r--r--src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieCube.java2
-rw-r--r--src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java2
16 files changed, 1219 insertions, 346 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java b/src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java
new file mode 100644
index 000000000..5caeb969a
--- /dev/null
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java
@@ -0,0 +1,168 @@
+/**
+ * Copyright 2013 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package com.jogamp.opengl.util.av;
+
+import java.nio.ByteBuffer;
+
+import jogamp.opengl.Debug;
+
+public interface AudioSink {
+ public static final boolean DEBUG = Debug.debug("AudioSink");
+
+ /** Specifies the audio data type. Currently only PCM is supported. */
+ public static enum AudioDataType { PCM };
+
+ /**
+ * Specifies the audio data format.
+ */
+ public static class AudioDataFormat {
+ public AudioDataFormat(AudioDataType dataType, int sampleRate, int sampleSize, int channelCount, boolean signed, boolean littleEndian) {
+ this.dataType = dataType;
+ this.sampleRate = sampleRate;
+ this.sampleSize = sampleSize;
+ this.channelCount = channelCount;
+ this.signed = signed;
+ this.littleEndian = littleEndian;
+ }
+ /** Audio data type. */
+ public final AudioDataType dataType;
+ /** Sample rate in Hz (1/s). */
+ public final int sampleRate;
+ /** Sample size in bits. */
+ public final int sampleSize;
+ /** Number of channels. */
+ public final int channelCount;
+ public final boolean signed;
+ public final boolean littleEndian;
+
+ public String toString() {
+ return "AudioDataFormat[type "+dataType+", sampleRate "+sampleRate+", sampleSize "+sampleSize+", channelCount "+channelCount+
+ ", signed "+signed+", "+(littleEndian?"little":"big")+"endian]"; }
+ }
+ /** Default {@link AudioDataFormat}, [type PCM, sampleRate 44100, sampleSize 16, channelCount 2, signed, littleEndian]. */
+ public static final AudioDataFormat DefaultFormat = new AudioDataFormat(AudioDataType.PCM, 44100, 16, 2, true /* signed */, true /* littleEndian */);
+
+ public static class AudioFrame {
+ public final ByteBuffer data;
+ public final int dataSize;
+ public final int audioPTS;
+
+ public AudioFrame(ByteBuffer data, int dataSize, int audioPTS) {
+ if( dataSize > data.remaining() ) {
+ throw new IllegalArgumentException("Give size "+dataSize+" exceeds remaining bytes in ls "+data+". "+this);
+ }
+ this.data=data;
+ this.dataSize=dataSize;
+ this.audioPTS=audioPTS;
+ }
+
+ public String toString() { return "AudioFrame[apts "+audioPTS+", data "+data+", payloadSize "+dataSize+"]"; }
+ }
+
+ /**
+ * Returns the <code>initialized state</code> of this instance.
+ * <p>
+ * The <code>initialized state</code> is affected by this instance
+ * overall availability, i.e. after instantiation,
+ * as well as by {@link #destroy()}.
+ * </p>
+ */
+ public boolean isInitialized();
+
+ /**
+ * Returns the preferred {@link AudioDataFormat} by this sink.
+ * <p>
+ * The preferred format shall reflect this sinks most native format,
+ * i.e. best performance w/o data conversion.
+ * </p>
+ * @see #initSink(AudioDataFormat)
+ */
+ public AudioDataFormat getPreferredFormat();
+
+ /**
+ * Initializes the sink.
+ * <p>
+ * Implementation shall try to match the given <code>requestedFormat</code> {@link AudioDataFormat}
+ * as close as possible, regarding it's capabilities.
+ * </p>
+ * <p>
+ * A user may consider {@link #getPreferredFormat()} and pass this value
+ * to utilize best performance and <i>behavior</i>.
+ * </p>
+ * The {@link #DefaultFormat} <i>should be</i> supported by all implementations.
+ * </p>
+ * @param requestedFormat the requested {@link AudioDataFormat}.
+ * @param bufferCount number of buffers for sink
+ * @return if successful the chosen AudioDataFormat based on the <code>requestedFormat</code> and this sinks capabilities, otherwise <code>null</code>.
+ */
+ public AudioDataFormat initSink(AudioDataFormat requestedFormat, int bufferCount);
+
+
+ /** Destroys this instance, i.e. closes all streams and devices allocated. */
+ public void destroy();
+
+ /**
+ * Returns the number of bytes queued for playing.
+ * <p>
+ * {@link #initSink(AudioDataFormat)} must be called first.
+ * </p>
+ */
+ public int getQueuedByteCount();
+
+ /**
+ * Returns the queued buffer time in milliseconds for playing.
+ * <p>
+ * {@link #initSink(AudioDataFormat)} must be called first.
+ * </p>
+ */
+ public int getQueuedTime();
+
+ /**
+ * Returns the number of buffers in the sink available for writing.
+ * <p>
+ * {@link #initSink(AudioDataFormat)} must be called first.
+ * </p>
+ */
+ public int getWritableBufferCount();
+
+ /**
+ * Returns true if data is available to be written in the sink.
+ * <p>
+ * {@link #initSink(AudioDataFormat)} must be called first.
+ * </p>
+ */
+ public boolean isDataAvailable(int data_size);
+
+ /**
+ * Writes the remaining bytes of the given direct ByteBuffer to this sink.
+ * <p>
+ * The data must comply with the chosen {@link AudioDataFormat} as returned by {@link #initSink(AudioDataFormat)}.
+ * </p>
+ */
+ public void writeData(AudioFrame audioFrame);
+}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/AudioSinkFactory.java b/src/jogl/classes/com/jogamp/opengl/util/av/AudioSinkFactory.java
new file mode 100644
index 000000000..40321fb6f
--- /dev/null
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/AudioSinkFactory.java
@@ -0,0 +1,65 @@
+/**
+ * Copyright 2013 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package com.jogamp.opengl.util.av;
+
+import jogamp.opengl.util.av.NullAudioSink;
+
+import com.jogamp.common.util.ReflectionUtil;
+
+public class AudioSinkFactory {
+ private static final String ALAudioSinkClazzName = "jogamp.opengl.openal.av.ALAudioSink";
+ private static final String JavaAudioSinkClazzName = "jogamp.opengl.util.av.JavaSoundAudioSink";
+
+ public static AudioSink createDefault() {
+ final ClassLoader cl = GLMediaPlayerFactory.class.getClassLoader();
+ AudioSink sink = create(cl, ALAudioSinkClazzName);
+ if( null == sink ) {
+ sink = create(cl, JavaAudioSinkClazzName);
+ }
+ if( null == sink ) {
+ sink = new NullAudioSink();
+ }
+ return sink;
+ }
+
+ public static AudioSink create(final ClassLoader cl, String implName) {
+ final AudioSink audioSink;
+ if(ReflectionUtil.isClassAvailable(implName, cl)){
+ try {
+ audioSink = (AudioSink) ReflectionUtil.createInstance(implName, cl);
+ if( audioSink.isInitialized() ) {
+ return audioSink;
+ }
+ } catch (Throwable t) {
+ if(AudioSink.DEBUG) { System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage()); t.printStackTrace(); }
+ }
+ }
+ return null;
+ }
+
+}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
index 3eca01986..1825dbd47 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
@@ -57,6 +57,14 @@ import com.jogamp.opengl.util.texture.TextureSequence;
* </ul>
* </p>
* <p>
+ * Implementations of this interface must implement:
+ * <pre>
+ * public static final boolean isAvailable();
+ * </pre>
+ * to be properly considered by {@link GLMediaPlayerFactory#create(ClassLoader, String)}
+ * and {@link GLMediaPlayerFactory#createDefault()}.
+ * </p>
+ * <p>
* Variable type, value range and dimension has been chosen to suit embedded CPUs
* and characteristics of audio and video streaming.
* Milliseconds of type integer with a maximum value of {@link Integer#MAX_VALUE}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayerFactory.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayerFactory.java
index 6fcf20ed2..f09531f7f 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayerFactory.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayerFactory.java
@@ -38,18 +38,24 @@ public class GLMediaPlayerFactory {
private static final String FFMPEGMediaPlayerClazzName = "jogamp.opengl.util.av.impl.FFMPEGMediaPlayer";
private static final String isAvailableMethodName = "isAvailable";
- public static GLMediaPlayer create() {
+ public static GLMediaPlayer createDefault() {
final ClassLoader cl = GLMediaPlayerFactory.class.getClassLoader();
- if(Platform.OS_TYPE.equals(Platform.OSType.ANDROID)) {
- if(AndroidVersion.SDK_INT >= 14) {
- if(((Boolean)ReflectionUtil.callStaticMethod(AndroidGLMediaPlayerAPI14ClazzName, isAvailableMethodName, null, null, cl)).booleanValue()) {
- return (GLMediaPlayer) ReflectionUtil.createInstance(AndroidGLMediaPlayerAPI14ClazzName, cl);
- }
- }
+ GLMediaPlayer sink = create(cl, AndroidGLMediaPlayerAPI14ClazzName);
+ if( null == sink ) {
+ sink = create(cl, FFMPEGMediaPlayerClazzName);
}
- if(((Boolean)ReflectionUtil.callStaticMethod(FFMPEGMediaPlayerClazzName, isAvailableMethodName, null, null, cl)).booleanValue()) {
- return (GLMediaPlayer) ReflectionUtil.createInstance(FFMPEGMediaPlayerClazzName, cl);
+ if( null == sink ) {
+ sink = new NullGLMediaPlayer();
}
- return new NullGLMediaPlayer();
+ return sink;
+ }
+
+ public static GLMediaPlayer create(final ClassLoader cl, String implName) {
+ try {
+ if(((Boolean)ReflectionUtil.callStaticMethod(implName, isAvailableMethodName, null, null, cl)).booleanValue()) {
+ return (GLMediaPlayer) ReflectionUtil.createInstance(implName, cl);
+ }
+ } catch (Throwable t) { if(GLMediaPlayer.DEBUG) { System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage()); t.printStackTrace(); } }
+ return null;
}
}
diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
index 23f9161d4..8356a2bae 100644
--- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
+++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
@@ -32,6 +32,8 @@ import java.io.IOException;
import javax.media.opengl.GL;
import javax.media.opengl.GLES2;
+import com.jogamp.common.os.AndroidVersion;
+import com.jogamp.common.os.Platform;
import com.jogamp.opengl.util.texture.TextureSequence;
import jogamp.common.os.android.StaticContext;
@@ -60,7 +62,13 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
static final boolean available;
static {
- available = true; // default .. TODO: May restrict availability ?
+ boolean _avail = false;
+ if(Platform.OS_TYPE.equals(Platform.OSType.ANDROID)) {
+ if(AndroidVersion.SDK_INT >= 14) {
+ _avail = true;
+ }
+ }
+ available = _avail;
}
public static final boolean isAvailable() { return available; }
diff --git a/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
index 690948c5a..87c7b937a 100644
--- a/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
+++ b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
@@ -1,176 +1,428 @@
+/**
+ * Copyright 2013 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
package jogamp.opengl.openal.av;
-import java.nio.Buffer;
-import java.nio.ByteBuffer;
-import java.lang.InterruptedException;
+import jogamp.opengl.util.av.SyncedRingbuffer;
-import jogamp.opengl.util.av.AudioSink;
-
-import com.jogamp.common.nio.Buffers;
-import com.jogamp.openal.*;
+import com.jogamp.openal.AL;
+import com.jogamp.openal.ALC;
+import com.jogamp.openal.ALCcontext;
+import com.jogamp.openal.ALCdevice;
+import com.jogamp.openal.ALFactory;
+import com.jogamp.opengl.util.av.AudioSink;
+/***
+ * OpenAL Audio Sink
+ */
public class ALAudioSink implements AudioSink {
- static ALC alc;
- static AL al;
- static ALCdevice device;
- static ALCcontext context;
-
- // AudioFormat parameters
- public static final int SAMPLE_RATE = 44100;
-
- // Chunk of audio processed at one time
- public static final int BUFFER_SIZE = 1000;
+ /** Chunk of audio processed at one time. FIXME: Parameterize .. */
+ public static final int BUFFER_SIZE = 4096;
public static final int SAMPLES_PER_BUFFER = BUFFER_SIZE / 2;
- // Sample time values
- public static final double SAMPLE_TIME_IN_SECS = 1.0 / SAMPLE_RATE;
- public static final double BUFFER_TIME_IN_SECS = SAMPLE_TIME_IN_SECS * SAMPLES_PER_BUFFER;
+ private static final ALC alc;
+ private static final AL al;
+ private static final boolean staticAvailable;
- private static int NUM_BUFFERS = 5;
- private static int bufferNumber = 0;
- private static int[] buffers = new int[NUM_BUFFERS];
- private static int[] source = new int[1];
- private static boolean initBuffer = true;
- private static int frequency = 44100;
- private static int format = AL.AL_FORMAT_STEREO16;
+ private String deviceSpecifier;
+ private ALCdevice device;
+ private ALCcontext context;
+
+ /** Sample period in seconds */
+ public float samplePeriod;
+ /** Buffer period in seconds */
+ public float bufferPeriod;
- private static boolean available = false;
+ static class ActiveBuffer {
+ ActiveBuffer(Integer name, int size) {
+ this.name = name;
+ this.size = size;
+ }
+ public final Integer name;
+ public final int size;
+ public String toString() { return "ABuffer[name "+name+", size "+size+"]"; }
+ }
+ int[] alBuffers = null;
+ private SyncedRingbuffer<Integer> alBufferAvail = null;
+ private SyncedRingbuffer<ActiveBuffer> alBufferPlaying = null;
+ private int alBufferBytesQueued = 0;
+
+ private int[] alSource = null;
+ private AudioDataFormat chosenFormat;
+ private int alFormat;
+ private boolean initialized;
+
static {
-
- boolean joalFound = false;
+ ALC _alc = null;
+ AL _al = null;
try {
- Class.forName("com.jogamp.openal.ALFactory");
- joalFound = true;
- } catch(ClassNotFoundException e){
- // Joal not found on classpath
- }
-
- if(joalFound) {
+ _alc = ALFactory.getALC();
+ _al = ALFactory.getAL();
+ } catch(Throwable t) {
+ if( DEBUG ) {
+ System.err.println("ALAudioSink: Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
+ }
+ }
+ alc = _alc;
+ al = _al;
+ staticAvailable = null != alc && null != al;
+ }
+
+ public ALAudioSink() {
+ initialized = false;
+ chosenFormat = null;
- alc = ALFactory.getALC();
- String deviceSpecifier;
-
+ if( !staticAvailable ) {
+ return;
+ }
+
+ try {
// Get handle to default device.
device = alc.alcOpenDevice(null);
if (device == null) {
- throw new ALException("Error opening default OpenAL device");
+ throw new RuntimeException("ALAudioSink: Error opening default OpenAL device");
}
// Get the device specifier.
deviceSpecifier = alc.alcGetString(device, ALC.ALC_DEVICE_SPECIFIER);
if (deviceSpecifier == null) {
- throw new ALException("Error getting specifier for default OpenAL device");
+ throw new RuntimeException("ALAudioSink: Error getting specifier for default OpenAL device");
}
// Create audio context.
context = alc.alcCreateContext(device, null);
if (context == null) {
- throw new ALException("Error creating OpenAL context");
+ throw new RuntimeException("ALAudioSink: Error creating OpenAL context");
}
// Set active context.
alc.alcMakeContextCurrent(context);
// Check for an error.
- if (alc.alcGetError(device) != ALC.ALC_NO_ERROR) {
- throw new ALException("Error making OpenAL context current");
+ if ( alc.alcGetError(device) != ALC.ALC_NO_ERROR ) {
+ throw new RuntimeException("ALAudioSink: Error making OpenAL context current");
}
- al = ALFactory.getAL();
-
- // Allocate buffers
- al.alGenBuffers(NUM_BUFFERS, buffers, 0);
- al.alGenSources(1, source, 0);
+ // Create source
+ {
+ alSource = new int[1];
+ al.alGenSources(1, alSource, 0);
+ final int err = al.alGetError();
+ if( err != AL.AL_NO_ERROR ) {
+ alSource = null;
+ throw new RuntimeException("ALAudioSink: Error generating Source: 0x"+Integer.toHexString(err));
+ }
+ }
- if(al.alGetError() != AL.AL_NO_ERROR) {
- throw new ALException("Error generating :(");
- }
-
- System.out.println("OpenAL audio sink using device: " + deviceSpecifier);
- available = true;
+ if( DEBUG ) {
+ System.err.println("ALAudioSink: Using device: " + deviceSpecifier);
+ }
+ initialized = true;
+ return;
+ } catch ( Exception e ) {
+ if( DEBUG ) {
+ System.err.println(e.getMessage());
+ }
+ destroy();
}
}
@Override
- public boolean isDataAvailable(int data_size) {
- return true;
+ public String toString() {
+ final int alSrcName = null != alSource ? alSource[0] : 0;
+ final int alBuffersLen = null != alBuffers ? alBuffers.length : 0;
+ return "ALAudioSink[init "+initialized+", device "+deviceSpecifier+", ctx "+context+", alSource "+alSrcName+
+ ", chosen "+chosenFormat+", alFormat "+toHexString(alFormat)+
+ ", buffers[total "+alBuffersLen+", avail "+alBufferAvail.size()+", "+alBufferPlaying.getFreeSlots()+
+ ", queued[bufferCount "+alBufferPlaying.size()+", "+getQueuedTime() + " ms, " + alBufferBytesQueued+" bytes]";
}
@Override
- public void writeData(byte[] sampleData, int data_size) {
- // OpenAL consumes buffers in the background
- // we first need to initialize the OpenAL buffers then
- // start continous playback.
- alc.alcMakeContextCurrent(context);
- if(initBuffer) {
-
- ByteBuffer data = Buffers.newDirectByteBuffer(sampleData);
- al.alBufferData(buffers[bufferNumber], format, data, data_size, frequency);
- int error = al.alGetError();
- if(error != AL.AL_NO_ERROR) {
- System.out.println("bufferNumber"+bufferNumber+" Data "+sampleData+" size"+data_size);
- throw new ALException("Error loading :( error code: " + error);
+ public AudioDataFormat getPreferredFormat() {
+ return DefaultFormat;
+ }
+
+ @Override
+ public AudioDataFormat initSink(AudioDataFormat requestedFormat, int bufferCount) {
+ if( !staticAvailable ) {
+ return null;
+ }
+ samplePeriod = 1.0f / requestedFormat.sampleRate;
+ bufferPeriod = samplePeriod * SAMPLES_PER_BUFFER;
+ switch( requestedFormat.channelCount ) {
+ case 1: {
+ switch ( requestedFormat.sampleSize ) {
+ case 8:
+ alFormat = AL.AL_FORMAT_MONO8; break;
+ case 16:
+ alFormat = AL.AL_FORMAT_MONO16; break;
+ default:
+ return null;
+ }
+ } break;
+ case 2:
+ switch ( requestedFormat.sampleSize ) {
+ case 8:
+ alFormat = AL.AL_FORMAT_STEREO8; break;
+ case 16:
+ alFormat = AL.AL_FORMAT_STEREO16; break;
+ default:
+ return null;
+ }
+ }
+ // Allocate buffers
+ destroyBuffers();
+ {
+ alBuffers = new int[bufferCount];
+ al.alGenBuffers(bufferCount, alBuffers, 0);
+ final int err = al.alGetError();
+ if( err != AL.AL_NO_ERROR ) {
+ alBuffers = null;
+ throw new RuntimeException("ALAudioSink: Error generating Buffers: 0x"+Integer.toHexString(err));
}
-
- if(bufferNumber==NUM_BUFFERS-1){
- // all buffers queued
- al.alSourceQueueBuffers(source[0], NUM_BUFFERS, buffers, 0);
- // start playback
- al.alSourcePlay(source[0]);
- if(al.alGetError() != AL.AL_NO_ERROR) {
- throw new ALException("Error starting :(");
+ final Integer[] alBufferRingArray = new Integer[bufferCount];
+ for(int i=0; i<bufferCount; i++) {
+ alBufferRingArray[i] = Integer.valueOf(alBuffers[i]);
+ }
+ alBufferAvail = new SyncedRingbuffer<Integer>(alBufferRingArray, true /* full */);
+ alBufferPlaying = new SyncedRingbuffer<ActiveBuffer>(new ActiveBuffer[bufferCount], false /* full */);
+ }
+
+
+ chosenFormat = requestedFormat;
+ return chosenFormat;
+ }
+
+ private void destroyBuffers() {
+ if( !staticAvailable ) {
+ return;
+ }
+ if( null != alBuffers ) {
+ try {
+ al.alDeleteBuffers(alBufferAvail.capacity(), alBuffers, 0);
+ } catch (Throwable t) {
+ if( DEBUG ) {
+ System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
}
- initBuffer=false;
}
-
- // update buffer number to fill
- bufferNumber=(bufferNumber+1)%NUM_BUFFERS;
- } else {
- // OpenAL is playing in the background.
- // one new frame with audio data is ready
-
- // first wait for openal to release one buffer
- int[] buffer=new int[1];
- int[] val=new int[1];
- do {
- al.alGetSourcei(source[0], AL.AL_BUFFERS_PROCESSED, val, 0);
- if(val[0] <= 0){
- try {
- Thread.sleep(1);
- } catch (InterruptedException e){
- }
+ alBufferAvail.clear(true);
+ alBufferAvail = null;
+ alBufferPlaying.clear(true);
+ alBufferPlaying = null;
+ alBufferBytesQueued = 0;
+ alBuffers = null;
+ }
+ }
+
+ @Override
+ public void destroy() {
+ initialized = false;
+ if( !staticAvailable ) {
+ return;
+ }
+ if( null != alSource ) {
+ try {
+ al.alDeleteSources(1, alSource, 0);
+ } catch (Throwable t) {
+ if( DEBUG ) {
+ System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
}
- } while (val[0] <= 0);
-
- // fill and requeue the empty buffer
- al.alSourceUnqueueBuffers(source[0], 1, buffer , 0);
- Buffer data = Buffers.newDirectByteBuffer(sampleData);
- al.alBufferData(buffer[0], format, data, data_size, frequency);
- al.alSourceQueueBuffers(source[0], 1, buffer, 0);
- if(al.alGetError() != AL.AL_NO_ERROR) {
- throw new ALException("Error buffering :(");
}
+ alSource = null;
+ }
+
+ destroyBuffers();
- // Restart openal playback if needed
- al.alGetSourcei(source[0], AL.AL_SOURCE_STATE, val, 0);
- if(val[0] != al.AL_PLAYING) {
- al.alSourcePlay(source[0]);
+ if( null != context ) {
+ try {
+ alc.alcDestroyContext(context);
+ } catch (Throwable t) {
+ if( DEBUG ) {
+ System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
+ }
+ }
+ context = null;
+ }
+ if( null != device ) {
+ try {
+ alc.alcCloseDevice(device);
+ } catch (Throwable t) {
+ if( DEBUG ) {
+ System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
+ }
}
+ device = null;
}
+ chosenFormat = null;
+ }
+
+ @Override
+ public boolean isInitialized() {
+ return initialized;
}
+
+ private final void dequeueBuffer(boolean wait) {
+ int alErr = AL.AL_NO_ERROR;
+ final int[] val=new int[1];
+ do {
+ al.alGetSourcei(alSource[0], AL.AL_BUFFERS_PROCESSED, val, 0);
+ alErr = al.alGetError();
+ if( AL.AL_NO_ERROR != alErr ) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while quering processed buffers at source. "+this);
+ }
+ if( wait && val[0] <= 0 ) {
+ try {
+ Thread.sleep(1);
+ } catch (InterruptedException e){
+ }
+ }
+ } while (val[0] <= 0);
+ final int processedBuffers = val[0];
+ if( processedBuffers > 0 ) {
+ int[] buffers=new int[processedBuffers];
+ al.alSourceUnqueueBuffers(alSource[0], processedBuffers, buffers, 0);
+ alErr = al.alGetError();
+ if( AL.AL_NO_ERROR != alErr ) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while dequeueing "+processedBuffers+" processed buffers. "+this);
+ }
+ for ( int i=0; i<processedBuffers; i++ ) {
+ final ActiveBuffer releasedBuffer = alBufferPlaying.get(true /* clearRef */);
+ if( null == releasedBuffer ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ if( releasedBuffer.name.intValue() != buffers[i] ) {
+ throw new InternalError("Buffer name mismatch: dequeued: "+buffers[i]+", released "+releasedBuffer);
+ // System.err.println("XXX ["+i+"]: dequeued: "+buffers[i]+", released "+releasedBuffer);
+ }
+ alBufferBytesQueued -= releasedBuffer.size;
+ if( !alBufferAvail.put(releasedBuffer.name) ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ if( DEBUG ) {
+ System.err.println("ALAudioSink: Dequeued "+processedBuffers+", wait "+wait+", "+this);
+ }
+ }
+ }
+ }
+
+ private static final String toHexString(int v) {
+ return "0x"+Integer.toHexString(v);
+ }
+
@Override
- public int getDataAvailable() {
- int[] val=new int[1];
- al.alGetSourcei(source[0], AL.AL_BUFFERS_PROCESSED, val, 0);
- return (NUM_BUFFERS-val[0])*4096;
+ public void writeData(AudioFrame audioFrame) {
+ if( !initialized || null == chosenFormat ) {
+ return;
+ }
+ int alErr = AL.AL_NO_ERROR;
+
+ // OpenAL consumes buffers in the background
+ // we first need to initialize the OpenAL buffers then
+ // start continuous playback.
+ alc.alcMakeContextCurrent(context);
+ alErr = al.alGetError();
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while makeCurrent. "+this);
+ }
+
+ if( alBufferAvail.isEmpty() ) {
+ dequeueBuffer(true);
+ }
+
+ final Integer alBufferName = alBufferAvail.get(true /* clearRef */);
+ if( null == alBufferName ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ if( !alBufferPlaying.put( new ActiveBuffer(alBufferName, audioFrame.dataSize) ) ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ al.alBufferData(alBufferName.intValue(), alFormat, audioFrame.data, audioFrame.dataSize, chosenFormat.sampleRate);
+ final int[] alBufferNames = new int[] { alBufferName.intValue() };
+ al.alSourceQueueBuffers(alSource[0], 1, alBufferNames, 0);
+ alErr = al.alGetError();
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while queueing buffer "+toHexString(alBufferNames[0])+". "+this);
+ }
+ alBufferBytesQueued += audioFrame.dataSize;
+
+ // Restart openal playback if needed
+ {
+ int[] val = new int[1];
+ al.alGetSourcei(alSource[0], AL.AL_SOURCE_STATE, val, 0);
+ if(val[0] != AL.AL_PLAYING) {
+ if( DEBUG ) {
+ System.err.println("ALAudioSink: Start playing: "+this);
+ }
+ al.alSourcePlay(alSource[0]);
+ alErr = al.alGetError();
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while start playing. "+this);
+ }
+ }
+ }
}
@Override
- public boolean isAudioSinkAvailable() {
- return available;
+ public int getQueuedByteCount() {
+ if( !initialized || null == chosenFormat ) {
+ return 0;
+ }
+ return alBufferBytesQueued;
+ }
+
+ @Override
+ public int getQueuedTime() {
+ if( !initialized || null == chosenFormat ) {
+ return 0;
+ }
+ final int bps = chosenFormat.sampleSize / 8;
+ return alBufferBytesQueued / ( chosenFormat.channelCount * bps * ( chosenFormat.sampleRate / 1000 ) );
}
+
+ @Override
+ public int getWritableBufferCount() {
+ if( !initialized || null == chosenFormat ) {
+ return 0;
+ }
+ return alBufferPlaying.getFreeSlots();
+ }
+
+ @Override
+ public boolean isDataAvailable(int data_size) {
+ return initialized && null != chosenFormat;
+ }
+
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/AudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/AudioSink.java
deleted file mode 100644
index 504e4b2db..000000000
--- a/src/jogl/classes/jogamp/opengl/util/av/AudioSink.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package jogamp.opengl.util.av;
-
-public interface AudioSink {
-
- boolean isAudioSinkAvailable();
-
- int getDataAvailable();
-
- boolean isDataAvailable(int data_size);
-
- void writeData(byte[] sampleData, int data_size);
-
-}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index 27c926704..2ff91a3f6 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -72,7 +72,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected URLConnection urlConn = null;
- protected float playSpeed = 1.0f;
+ protected volatile float playSpeed = 1.0f;
/** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
protected int width = 0;
@@ -215,7 +215,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
@Override
- public final synchronized float getPlaySpeed() {
+ public final float getPlaySpeed() {
return playSpeed;
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
index 653a02111..a5fedce59 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
@@ -2,39 +2,67 @@ package jogamp.opengl.util.av;
import java.util.Arrays;
-import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.SourceDataLine;
-public class JavaSoundAudioSink implements AudioSink {
+import com.jogamp.opengl.util.av.AudioSink;
- // AudioFormat parameters
- public static final int SAMPLE_RATE = 44100;
- private static final int SAMPLE_SIZE = 16;
- private static final int CHANNELS = 2;
- private static final boolean SIGNED = true;
- private static final boolean BIG_ENDIAN = false;
+/***
+ * JavaSound Audio Sink
+ * <p>
+ * FIXME: Parameterize .. all configs .. best via an init-method, passing requested
+ * audio capabilities
+ * </p>
+ */
+public class JavaSoundAudioSink implements AudioSink {
// Chunk of audio processed at one time
public static final int BUFFER_SIZE = 1000;
public static final int SAMPLES_PER_BUFFER = BUFFER_SIZE / 2;
+ private static final boolean staticAvailable;
// Sample time values
- public static final double SAMPLE_TIME_IN_SECS = 1.0 / SAMPLE_RATE;
- public static final double BUFFER_TIME_IN_SECS = SAMPLE_TIME_IN_SECS * SAMPLES_PER_BUFFER;
+ // public static final double SAMPLE_TIME_IN_SECS = 1.0 / DEFAULT_SAMPLE_RATE;
+ // public static final double BUFFER_TIME_IN_SECS = SAMPLE_TIME_IN_SECS * SAMPLES_PER_BUFFER;
- private static AudioFormat format;
- private static DataLine.Info info;
- private static SourceDataLine auline;
- private static int bufferCount;
- private static byte [] sampleData = new byte[BUFFER_SIZE];
-
- private static boolean available;
+ private javax.sound.sampled.AudioFormat format;
+ private DataLine.Info info;
+ private SourceDataLine auline;
+ private int bufferCount;
+ private byte [] sampleData = new byte[BUFFER_SIZE];
+ private boolean initialized = false;
+ private AudioDataFormat chosenFormat = null;
static {
- // Create the audio format we wish to use
- format = new AudioFormat(SAMPLE_RATE, SAMPLE_SIZE, CHANNELS, SIGNED, BIG_ENDIAN);
+ boolean ok = false;
+ try {
+ AudioSystem.getAudioFileTypes();
+ ok = true;
+ } catch (Throwable t) {
+
+ }
+ staticAvailable=ok;
+ }
+
+ @Override
+ public String toString() {
+ return "JavaSoundSink[init "+initialized+", dataLine "+info+", source "+auline+", bufferCount "+bufferCount+
+ ", chosen "+chosenFormat+", jsFormat "+format;
+ }
+
+ @Override
+ public AudioDataFormat getPreferredFormat() {
+ return DefaultFormat;
+ }
+
+ @Override
+ public AudioDataFormat initSink(AudioDataFormat requestedFormat, int bufferCount) {
+ if( !staticAvailable ) {
+ return null;
+ }
+ // Create the audio format we wish to use
+ format = new javax.sound.sampled.AudioFormat(requestedFormat.sampleRate, requestedFormat.sampleSize, requestedFormat.channelCount, requestedFormat.signed, !requestedFormat.littleEndian);
// Create dataline info object describing line format
info = new DataLine.Info(SourceDataLine.class, format);
@@ -47,33 +75,61 @@ public class JavaSoundAudioSink implements AudioSink {
auline.open(format);
auline.start();
System.out.println("JavaSound audio sink");
- available=true;
+ initialized=true;
+ chosenFormat = requestedFormat;
} catch (Exception e) {
- available=false;
+ initialized=false;
}
- }
+ return chosenFormat;
+ }
- public void writeData(byte[] sampleData, int data_size) {
- int written = 0;
- int len;
- while (data_size > 0) {
- len = auline.write(sampleData, written, data_size);
+ @Override
+ public boolean isInitialized() {
+ return initialized;
+ }
+
+ @Override
+ public void destroy() {
+ initialized = false;
+ chosenFormat = null;
+ // FIXEM: complete code!
+ }
+
+ public void writeData(AudioFrame audioFrame) {
+ int data_size = audioFrame.dataSize;
+ final byte[] lala = new byte[data_size];
+ final int p = audioFrame.data.position();
+ audioFrame.data.get(lala, 0, data_size);
+ audioFrame.data.position(p);
+
+ int written = 0;
+ int len;
+ while (data_size > 0) {
+ // Nope: We don't make compromises for this crappy API !
+ len = auline.write(lala, written, data_size);
data_size -= len;
written += len;
}
}
- public int getDataAvailable() {
+ @Override
+ public int getQueuedByteCount() {
return auline.available();
}
- public boolean isDataAvailable(int data_size) {
- return auline.available()>=data_size;
+ @Override
+ public int getQueuedTime() {
+ return 0; // FIXME
}
+
@Override
- public boolean isAudioSinkAvailable() {
- return available;
+ public int getWritableBufferCount() {
+ return 1;
+ }
+
+ public boolean isDataAvailable(int data_size) {
+ return auline.available()>=data_size;
}
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
index d92967849..cef1c3361 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
@@ -1,23 +1,50 @@
package jogamp.opengl.util.av;
+
+import com.jogamp.opengl.util.av.AudioSink;
+
public class NullAudioSink implements AudioSink {
@Override
- public int getDataAvailable() {
- return 0;
+ public boolean isInitialized() {
+ return true;
}
@Override
- public boolean isDataAvailable(int data_size) {
- return false;
+ public AudioDataFormat getPreferredFormat() {
+ return DefaultFormat;
+ }
+
+ @Override
+ public AudioDataFormat initSink(AudioDataFormat requestedFormat, int bufferCount) {
+ return requestedFormat;
+ }
+
+ @Override
+ public void destroy() {
+ }
+
+ @Override
+ public int getQueuedByteCount() {
+ return 0;
+ }
+
+ @Override
+ public int getQueuedTime() {
+ return 0;
}
@Override
- public void writeData(byte[] sampleData, int data_size) {
+ public int getWritableBufferCount() {
+ return 1;
+ }
+
+ @Override
+ public boolean isDataAvailable(int data_size) {
+ return false;
}
@Override
- public boolean isAudioSinkAvailable() {
- return true;
+ public void writeData(AudioFrame audioFrame) {
}
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java b/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java
new file mode 100644
index 000000000..5f5d69cf8
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java
@@ -0,0 +1,286 @@
+/**
+ * Copyright 2013 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+
+package jogamp.opengl.util.av;
+
+/**
+ * Simple synchronized ring buffer implementation.
+ * <p>
+ * Caller can chose whether to block until get / put is able to proceed or not.
+ * </p>
+ * <p>
+ * Caller can chose whether to pass an empty array and clear references at get,
+ * or using a preset array for circular access of same objects.
+ * </p>
+ * <p>
+ * Circular write position is equal to the read position if buffer is full or if buffer is empty.
+ * </p>
+ */
+public class SyncedRingbuffer<T> {
+
+ protected final Object sync = new Object();
+ protected final T[] array;
+ protected final int capacity;
+ protected int readPos;
+ protected int writePos;
+ protected int size;
+
+ public final String toString() {
+ return "SyncedRingbuffer<?>[filled "+size+" / "+capacity+", writePos "+writePos+", readPos "+readPos+"]";
+ }
+
+ /**
+ * Create instance w/ the given array and it's capacity, e.g.:
+ * <pre>
+ * SyncedRingbuffer r = new SyncedRingbuffer<Integer>(new Integer[10]);
+ * </pre>
+ * <p>
+ * The array may either be clear, or preset w/ elements!
+ * </p>
+ * @param full if true, given array is assumed to be full, i.e. {@link #isFull()} will return true.
+ * @param array
+ */
+ public SyncedRingbuffer(T[] array, boolean full) {
+ this.array = array;
+ this.capacity = array.length;
+ clearImpl(false);
+ if(full) {
+ size = capacity;
+ }
+ }
+
+ public final int capacity() {
+ return capacity;
+ }
+
+ /**
+ * Resets all ring buffer pointer to zero.
+ * <p>
+ * {@link #isEmpty()} will return <code>true</code> after calling this method.
+ * </p>
+ * <p>
+ * If <code>clearRefs</code> is true, all ring buffer slots will be set to <code>null</code>.
+ * </p>
+ * @param clearRefs if true, all ring buffer slots will be flushed, otherwise they remain intact.
+ */
+ public final void clear(boolean clearRefs) {
+ synchronized ( sync ) {
+ clearImpl(clearRefs);
+ }
+ }
+
+ private final void clearImpl(boolean clearRefs) {
+ readPos = 0;
+ writePos = 0;
+ size = 0;
+ if( clearRefs ) {
+ for(int i=0; i<capacity; i++) {
+ this.array[i] = null;
+ }
+ }
+ }
+
+ /** Returns the number of elements in this ring buffer. */
+ public final int size() {
+ synchronized ( sync ) {
+ return size;
+ }
+ }
+
+ /** Returns the number of free slots available to put. */
+ public final int getFreeSlots() {
+ synchronized ( sync ) {
+ return capacity - size;
+ }
+ }
+
+ /** Returns true if this ring buffer is empty, otherwise false. */
+ public final boolean isEmpty() {
+ synchronized ( sync ) {
+ return 0 == size;
+ }
+ }
+
+ /** Returns true if this ring buffer is full, otherwise false. */
+ public final boolean isFull() {
+ synchronized ( sync ) {
+ return capacity == size;
+ }
+ }
+
+ /**
+ * Returns the oldest put element if available, otherwise null.
+ * <p>
+ * Impl. returns the element at the current read position
+ * and advances the read position - if available.
+ * </p>
+ * <p>
+ * If <code>clearRef</code> is true, the returned ring buffer slot will be set to <code>null</code>.
+ * </p>
+ * <p>
+ * Method is non blocking and returns immediately;.
+ * </p>
+ * @param clearRef if true, the returned ring buffer slot will be flushed, otherwise it remains intact.
+ * @return the oldest put element if available, otherwise null.
+ */
+ public final T get(boolean clearRef) {
+ try {
+ return getImpl(clearRef, false, false);
+ } catch (InterruptedException ie) { throw new RuntimeException(ie); }
+ }
+
+ /**
+ * Returns the oldest put element.
+ * <p>
+ * Impl. returns the element at the current read position
+ * and advances the read position.
+ * </p>
+ * <p>
+ * If <code>clearRef</code> is true, the returned ring buffer slot will be set to <code>null</code>.
+ * </p>
+ * <p>
+ * Methods blocks until an element becomes available via put.
+ * </p>
+ * @param clearRef if true, the returned ring buffer slot will be flushed, otherwise it remains intact.
+ * @return the oldest put element
+ * @throws InterruptedException
+ */
+ public final T getBlocking(boolean clearRef) throws InterruptedException {
+ return getImpl(clearRef, true, false);
+ }
+
+ public final T peek() throws InterruptedException {
+ return getImpl(false, false, true);
+ }
+ public final T peekBlocking() throws InterruptedException {
+ return getImpl(false, true, true);
+ }
+
+ private final T getImpl(boolean clearRef, boolean blocking, boolean peek) throws InterruptedException {
+ synchronized ( sync ) {
+ if( 0 == size ) {
+ if( blocking ) {
+ while( 0 == size ) {
+ sync.wait();
+ }
+ } else {
+ return null;
+ }
+ }
+ final T r = array[readPos];
+ if( !peek ) {
+ if( clearRef ) {
+ array[readPos] = null;
+ }
+ readPos = (readPos + 1) % capacity;
+ size--;
+ sync.notifyAll(); // notify waiting putter
+ }
+ return r;
+ }
+ }
+
+ /**
+ * Puts the element <code>e</code> at the current write position
+ * and advances the write position.
+ * <p>
+ * Returns true if successful, otherwise false in case buffer is full.
+ * </p>
+ * <p>
+ * Method is non blocking and returns immediately;.
+ * </p>
+ */
+ public final boolean put(T e) {
+ try {
+ return putImpl(e, false, false);
+ } catch (InterruptedException ie) { throw new RuntimeException(ie); }
+ }
+
+ /**
+ * Puts the element <code>e</code> at the current write position
+ * and advances the write position.
+ * <p>
+ * Method blocks until a free slot becomes available via get.
+ * </p>
+ * @throws InterruptedException
+ */
+ public final void putBlocking(T e) throws InterruptedException {
+ if( !putImpl(e, false, true) ) {
+ throw new InternalError("Blocking put failed: "+this);
+ }
+ }
+
+ /**
+ * Keeps the element at the current write position intact
+ * and advances the write position.
+ * <p>
+ * Returns true if successful, otherwise false in case buffer is full.
+ * </p>
+ * <p>
+ * If <code>blocking</code> is true, method blocks until a free slot becomes available via get.
+ * </p>
+ * @param blocking if true, wait until a free slot becomes available via get.
+ * @throws InterruptedException
+ */
+ public final boolean putSame(boolean blocking) throws InterruptedException {
+ return putImpl(null, true, blocking);
+ }
+
+ private final boolean putImpl(T e, boolean sameRef, boolean blocking) throws InterruptedException {
+ synchronized ( sync ) {
+ if( capacity <= size ) {
+ if( blocking ) {
+ while( capacity <= size ) {
+ sync.wait();
+ }
+ } else {
+ return false;
+ }
+ }
+ if( !sameRef ) {
+ array[ writePos ] = e;
+ }
+ writePos = (writePos + 1) % capacity;
+ size++;
+ sync.notifyAll(); // notify waiting getter
+ return true;
+ }
+ }
+
+ public final void waitForFreeSlots(int count) throws InterruptedException {
+ synchronized ( sync ) {
+ if( capacity - size < count ) {
+ while( capacity - size < count ) {
+ System.err.println("XXXX AAA XXX");
+ sync.wait();
+ }
+ }
+ }
+ }
+
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
index 33b5b3b20..65b867ba1 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
@@ -38,20 +38,17 @@ import javax.media.opengl.GL;
import javax.media.opengl.GL2ES2;
import javax.media.opengl.GLException;
-import java.util.Arrays;
-import java.util.Queue;
-
-import com.jogamp.common.util.ReflectionUtil;
import com.jogamp.common.util.VersionNumber;
import com.jogamp.gluegen.runtime.ProcAddressTable;
import com.jogamp.opengl.util.GLPixelStorageModes;
-import com.jogamp.opengl.util.av.GLMediaPlayerFactory;
+import com.jogamp.opengl.util.av.AudioSink;
+import com.jogamp.opengl.util.av.AudioSinkFactory;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureSequence;
import jogamp.opengl.GLContextImpl;
-import jogamp.opengl.util.av.AudioSink;
import jogamp.opengl.util.av.EGLMediaPlayerImpl;
+import jogamp.opengl.util.av.SyncedRingbuffer;
/***
* Implementation utilizes <a href="http://libav.org/">Libav</a>
@@ -111,13 +108,10 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
private static final int TEMP_BUFFER_COUNT = 20;
// Instance data
- private static AudioSink audioSink;
- private static int maxAvailableAudio;
-
public static final VersionNumber avUtilVersion;
public static final VersionNumber avFormatVersion;
public static final VersionNumber avCodecVersion;
- static boolean available;
+ static final boolean available;
static {
if(FFMPEGDynamicLibraryBundleInfo.initSingleton()) {
@@ -129,24 +123,6 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
System.err.println("LIB_AV Codec : "+avCodecVersion);
initIDs0();
available = true;
- final ClassLoader cl = GLMediaPlayerFactory.class.getClassLoader();
-
- if(ReflectionUtil.isClassAvailable("com.jogamp.openal.ALFactory", cl)){
- // Only instance ALAudioSink if JOAL is found on the classpath.
- audioSink = (AudioSink) ReflectionUtil.createInstance("jogamp.opengl.openal.av.ALAudioSink", cl);
- if(!audioSink.isAudioSinkAvailable()){
- // Failed to initialize OpenAL.
- audioSink=null;
- }
- }
- if(audioSink==null) {
- audioSink = (AudioSink) ReflectionUtil.createInstance("jogamp.opengl.util.av.JavaSoundAudioSink", cl);
- if(!audioSink.isAudioSinkAvailable()) {
- audioSink = (AudioSink) ReflectionUtil.createInstance("jogamp.opengl.util.av.NullAudioSink", cl);
- }
- }
- maxAvailableAudio = audioSink.getDataAvailable();
-
} else {
avUtilVersion = null;
avFormatVersion = null;
@@ -163,6 +139,10 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
( vers >> 0 ) & 0xFF );
}
+ //
+ // Video
+ //
+
protected long moviePtr = 0;
protected long procAddrGLTexSubImage2D = 0;
protected EGLMediaPlayerImpl.EGLTextureFrame lastTex = null;
@@ -176,17 +156,29 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
protected int texWidth, texHeight; // overall (stuffing planes in one texture)
protected ByteBuffer texCopy;
+ //
+ // Audio
+ //
+
+ protected final int AudioFrameCount = 8;
+ protected final AudioSink audioSink;
+ protected final int maxAvailableAudio;
+ protected AudioSink.AudioDataFormat chosenAudioFormat;
+ protected final SyncedRingbuffer<AudioSink.AudioFrame> audioFramesBuffer = new SyncedRingbuffer<AudioSink.AudioFrame>(new AudioSink.AudioFrame[AudioFrameCount], false /* full */);
+
public FFMPEGMediaPlayer() {
super(TextureType.GL, false);
if(!available) {
throw new RuntimeException("FFMPEGMediaPlayer not available");
}
setTextureCount(1);
- moviePtr = createInstance0(true);
+ moviePtr = createInstance0(DEBUG);
if(0==moviePtr) {
throw new GLException("Couldn't create FFMPEGInstance");
}
psm = new GLPixelStorageModes();
+ audioSink = AudioSinkFactory.createDefault();
+ maxAvailableAudio = audioSink.getQueuedByteCount();
}
@Override
@@ -221,9 +213,11 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
}
final String urlS=urlConn.getURL().toExternalForm();
- System.out.println("setURL: p1 "+this);
- setStream0(moviePtr, urlS, -1, -1);
- System.out.println("setURL: p2 "+this);
+ chosenAudioFormat = audioSink.initSink(audioSink.getPreferredFormat(), AudioFrameCount);
+ System.err.println("setURL: p1 "+this);
+ setStream0(moviePtr, urlS, -1, -1, AudioFrameCount);
+ System.err.println("setURL: p2 "+this);
+
int tf, tif=GL.GL_RGBA; // texture format and internal format
switch(vBytesPerPixelPerPlane) {
case 1:
@@ -264,74 +258,103 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
}
- private class AudioFrame {
- final byte[] sampleData;
- final int data_size;
- final int audio_pts;
- AudioFrame(byte[] sampleData, int data_size, int audio_pts) {
- this.sampleData=sampleData;
- this.data_size=data_size;
- this.audio_pts=audio_pts;
+ private final void pushSound(ByteBuffer sampleData, int data_size, int audio_pts) {
+ if( audioPusher != null && audioPusher.isRunning() ) {
+ try {
+ audioFramesBuffer.putBlocking(new AudioSink.AudioFrame(sampleData, data_size, audio_pts));
+ } catch (InterruptedException e) {
+ e.printStackTrace(); // oops
+ }
+ if( null != audioPusher ) {
+ audioPusher.pushOne();
+ }
}
}
- static final Queue<AudioFrame> audioFrameBuffer = new java.util.LinkedList<AudioFrame>();
-
- private void updateSound(byte[] sampleData, int data_size, int audio_pts) {
-/*
- // Visualize incomming data
- int c=0;
- for(byte b: sampleData){
- if(b<0) {
- System.out.print(" ");
- } else if(b<64) {
- System.out.print("_");
- } else if(b < 128) {
- System.out.print("-");
- } else if(b == 128) {
- System.out.print("=");
- } else if(b < 256-64) {
- System.out.print("\"");
- } else {
- System.out.print("'");
+ class AudioPusher extends Thread {
+ volatile boolean shallStop = false;
+ volatile boolean isBlocked = false;
+
+ AudioPusher() {
+ setDaemon(true);
+ }
+ public void requestStop() {
+ shallStop = true;
+ if( isBlocked ) {
+ // interrupt();
}
-
- c++;
- if(c>=40)
- break;
}
- System.out.println("jA");
-*/
-
- //TODO reduce GC
- audioFrameBuffer.add(new AudioFrame(sampleData, data_size, audio_pts));
- pumpAudio();
- }
-
- private void pumpAudio() {
- if(audioSink.getDataAvailable()==maxAvailableAudio){
- System.out.println("warning: audio buffer underrun");
+ public boolean isRunning() { return !shallStop; }
+
+ public void run() {
+ setName(getName()+"-AudioPusher_"+AudioPusherInstanceId);
+ AudioPusherInstanceId++;
+
+ while( !shallStop ){
+ pushOne();
+ }
}
- while(audioFrameBuffer.peek()!=null){
- AudioFrame a = audioFrameBuffer.peek();
-
- // poor mans audio sync .. TODO: off thread
- final long now = System.currentTimeMillis();
- final long now_d = now - lastAudioTime;
- final long pts_d = a.audio_pts - lastAudioPTS;
- final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
-
- System.err.println("s: pts-a "+a.audio_pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
- lastAudioTime = now;
- if( (dt<audio_dt_d ) && audioSink.isDataAvailable(a.data_size)) {
- audioFrameBuffer.poll(); /* remove first item from the queue */
- audioSink.writeData(a.sampleData, a.data_size);
- lastAudioPTS=a.audio_pts;
- } else {
- break;
+ public void pushOne() {
+ final AudioSink.AudioFrame audioFrame;
+ try {
+ isBlocked = true;
+ audioFrame = audioFramesBuffer.getBlocking(true /* clearRef */);
+ } catch (InterruptedException e) {
+ if( !shallStop ) {
+ e.printStackTrace(); // oops
+ }
+ shallStop = true;
+ return;
+ }
+ isBlocked = false;
+
+ if( null != audioFrame ) {
+ // poor mans audio sync ..
+ final long now = System.currentTimeMillis();
+ final long now_d = now - lastAudioTime;
+ final long pts_d = audioFrame.audioPTS - lastAudioPTS;
+ final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
+ final boolean sleep = dt > audio_dt_d && !shallStop;
+ final long sleepP = dt - ( audio_dt_d / 2 );
+ if(DEBUG) {
+ final int qAT = audioSink.getQueuedTime();
+ System.err.println("s: pts-a "+audioFrame.audioPTS+", qAT "+qAT+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt+", sleep "+sleep+", sleepP "+sleepP+" ms");
+ }
+ if( sleep ) {
+ try {
+ isBlocked = true;
+ Thread.sleep( sleepP );
+ } catch (InterruptedException e) {
+ e.printStackTrace(); // oops
+ }
+ isBlocked = false;
+ lastAudioTime = System.currentTimeMillis();
+ } else {
+ lastAudioTime = now;
+ }
+ if( !shallStop && audioSink.isDataAvailable(audioFrame.dataSize) ) {
+ audioSink.writeData(audioFrame);
+ lastAudioPTS=audioFrame.audioPTS;
+ }
}
}
}
+
+ static int AudioPusherInstanceId = 0;
+ private AudioPusher audioPusher = null;
+
+ private final void stopAudioPusher() {
+ if( null != audioPusher ) {
+ audioPusher.requestStop();
+ audioPusher = null;
+ }
+ audioFramesBuffer.clear(true);
+ }
+ private final void startAudioPusher() {
+ stopAudioPusher();
+ audioPusher = new AudioPusher();
+ // audioPusher.start();
+ }
private void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
int lSz0, int lSz1, int lSz2,
@@ -448,6 +471,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
if(0==moviePtr) {
return false;
}
+ startAudioPusher();
return true;
}
@@ -457,6 +481,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
if(0==moviePtr) {
return false;
}
+ stopAudioPusher();
return true;
}
@@ -466,6 +491,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
if(0==moviePtr) {
return false;
}
+ stopAudioPusher();
return true;
}
@@ -475,12 +501,13 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
+ stopAudioPusher();
int pts0 = getVideoPTS0(moviePtr);
int pts1 = seek0(moviePtr, msec);
System.err.println("Seek: "+pts0+" -> "+msec+" : "+pts1);
- audioFrameBuffer.clear();
lastAudioPTS=pts1;
lastVideoPTS=pts1;
+ startAudioPusher();
return pts1;
}
@@ -509,6 +536,12 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
tex.enable(gl);
tex.bind(gl);
+ try {
+ audioFramesBuffer.waitForFreeSlots(2);
+ } catch (InterruptedException e) {
+ e.printStackTrace(); // oops
+ }
+
/* try decode 10 packets to find one containing video
(res == 2) */
int res = 0;
@@ -529,27 +562,27 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
final long pts_d = pts - lastAudioPTS - 444; /* hack 444 == play video 444ms ahead of audio */
final long dt = Math.min(47, (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ) ;
//final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
- lastVideoTime = now;
- System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
-
- if(dt>video_dt_d && dt<1000 && audioSink.getDataAvailable()<maxAvailableAudio-10000) {
+ final boolean sleep = dt>video_dt_d && dt<1000 && audioSink.getQueuedByteCount()<maxAvailableAudio-10000;
+ final long sleepP = dt-video_dt_d;
+ if(DEBUG) {
+ final int qAT = audioSink.getQueuedTime();
+ System.err.println("s: pts-v "+pts+", qAT "+qAT+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt+", sleep "+sleep+", sleepP "+sleepP+" ms");
+ }
+ // ?? Maybe use audioSink.getQueuedTime();
+ if( sleep ) {
try {
- Thread.sleep(dt-video_dt_d);
+ Thread.sleep(sleepP);
} catch (InterruptedException e) { }
- } /* else if(0>pts_d) {
- System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
- } */
+ lastVideoTime = System.currentTimeMillis();
+ } else {
+ lastVideoTime = now;
+ }
}
- pumpAudio();
lastVideoPTS = pts;
}
return lastTex;
}
- private void consumeAudio(int len) {
-
- }
-
private static native int getAvUtilVersion0();
private static native int getAvFormatVersion0();
private static native int getAvCodecVersion0();
@@ -557,7 +590,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
private native long createInstance0(boolean verbose);
private native void destroyInstance0(long moviePtr);
- private native void setStream0(long moviePtr, String url, int vid, int aid);
+ private native void setStream0(long moviePtr, String url, int vid, int aid, int audioFrameCount);
private native int getVideoPTS0(long moviePtr);
diff --git a/src/jogl/native/libav/ffmpeg_tool.h b/src/jogl/native/libav/ffmpeg_tool.h
index 3181a8a8f..5560b8617 100644
--- a/src/jogl/native/libav/ffmpeg_tool.h
+++ b/src/jogl/native/libav/ffmpeg_tool.h
@@ -82,7 +82,9 @@ typedef struct {
AVStream* pAStream;
AVCodecContext* pACodecCtx;
AVCodec* pACodec;
- AVFrame* pAFrame;
+ AVFrame** pAFrames;
+ int32_t aFrameCount;
+ int32_t aFrameCurrent;
int32_t aSampleRate;
int32_t aChannels;
int32_t aFrameSize;
diff --git a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c
index d93caa30b..623bdcac7 100644
--- a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c
+++ b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c
@@ -38,7 +38,7 @@ typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLi
static const char * const ClazzNameFFMPEGMediaPlayer = "jogamp/opengl/util/av/impl/FFMPEGMediaPlayer";
static jclass ffmpegMediaPlayerClazz = NULL;
-static jmethodID jni_mid_updateSound = NULL;
+static jmethodID jni_mid_pushSound = NULL;
static jmethodID jni_mid_updateAttributes1 = NULL;
static jmethodID jni_mid_updateAttributes2 = NULL;
@@ -192,41 +192,6 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryB
return JNI_TRUE;
}
-static void _updateSound(JNIEnv *env, jobject instance, int8_t *data, int32_t data_size, int32_t aPTS) {
- if(NULL!=env) {
- jbyteArray jbArray = (*env)->NewByteArray(env, data_size);
- if (jbArray == NULL) {
- fprintf(stderr, "FFMPEGMediaPlayer out of memory at native code _updateSound");
- return; /* out of memory error thrown */
- }
-
-/*
- // Visualize sample waveform
- int i;
- for(i=0;i<40;i++){
- int8_t b = data[i];
- if(b<0) {
- printf(" ");
- } else if(b<64) {
- printf("_");
- } else if(b < 128) {
- printf("-");
- } else if(b == 128) {
- printf("=");
- } else if(b < 256-64) {
- printf("\"");
- } else {
- printf("'");
- }
- }
- printf("nA\n");
-*/
-
- (*env)->SetByteArrayRegion(env, jbArray, 0, data_size, data);
- (*env)->CallVoidMethod(env, instance, jni_mid_updateSound, jbArray, data_size, aPTS);
- }
-}
-
static void _updateJavaAttributes(JNIEnv *env, jobject instance, FFMPEGToolBasicAV_t* pAV)
{
// int shallBeDetached = 0;
@@ -277,9 +242,12 @@ static void freeInstance(FFMPEGToolBasicAV_t* pAV) {
sp_av_free(pAV->pVFrame);
pAV->pVFrame = NULL;
}
- if(NULL != pAV->pAFrame) {
- sp_av_free(pAV->pAFrame);
- pAV->pAFrame = NULL;
+ if(NULL != pAV->pAFrames) {
+ for(i=0; i<pAV->aFrameCount; i++) {
+ sp_av_free(pAV->pAFrames[i]);
+ }
+ free(pAV->pAFrames);
+ pAV->pAFrames = NULL;
}
// Close the video file
@@ -370,11 +338,11 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_ini
JoglCommon_FatalError(env, "JOGL FFMPEG: can't use %s", ClazzNameFFMPEGMediaPlayer);
}
- jni_mid_updateSound = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateSound", "([BII)V");
+ jni_mid_pushSound = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "pushSound", "(Ljava/nio/ByteBuffer;II)V");
jni_mid_updateAttributes1 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes", "(IIIIIFIILjava/lang/String;Ljava/lang/String;)V");
jni_mid_updateAttributes2 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes2", "(IIIIIIIIII)V");
- if(jni_mid_updateSound == NULL ||
+ if(jni_mid_pushSound == NULL ||
jni_mid_updateAttributes1 == NULL ||
jni_mid_updateAttributes2 == NULL) {
return JNI_FALSE;
@@ -415,7 +383,7 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_destroy
}
JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStream0
- (JNIEnv *env, jobject instance, jlong ptr, jstring jURL, jint vid, jint aid)
+ (JNIEnv *env, jobject instance, jlong ptr, jstring jURL, jint vid, jint aid, jint audioFrameCount)
{
int res, i;
jboolean iscopy;
@@ -525,11 +493,17 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStre
pAV->aChannels = pAV->pACodecCtx->channels;
pAV->aFrameSize = pAV->pACodecCtx->frame_size;
pAV->aSampleFmt = pAV->pACodecCtx->sample_fmt;
- pAV->pAFrame=sp_avcodec_alloc_frame();
- if(pAV->pAFrame==NULL) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't alloc audio frame");
- return;
+
+ pAV->aFrameCount = audioFrameCount;
+ pAV->pAFrames = calloc(audioFrameCount, sizeof(AVFrame*));
+ for(i=0; i<pAV->aFrameCount; i++) {
+ pAV->pAFrames[i]=sp_avcodec_alloc_frame();
+ if(pAV->pAFrames[i]==NULL) {
+ JoglCommon_throwNewRuntimeException(env, "Couldn't alloc audio frame %d / %d", i, audioFrameCount);
+ return;
+ }
}
+ pAV->aFrameCurrent = 0;
}
if(0<=pAV->vid) {
@@ -622,11 +596,12 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
if(sp_av_read_frame(pAV->pFormatCtx, &packet)>=0) {
if(packet.stream_index==pAV->aid) {
// Decode audio frame
- if(NULL == pAV->pAFrame) {
+ if(NULL == pAV->pAFrames) { // no audio registered
sp_av_free_packet(&packet);
return res;
}
-
+ AVFrame* pAFrameCurrent = pAV->pAFrames[pAV->aFrameCurrent];
+ pAV->aFrameCurrent = ( pAV->aFrameCurrent + 1 ) % pAV->aFrameCount ;
int new_packet = 1;
int len1;
int flush_complete = 0;
@@ -636,7 +611,7 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
break;
}
if(HAS_FUNC(sp_avcodec_decode_audio4)) {
- len1 = sp_avcodec_decode_audio4(pAV->pACodecCtx, pAV->pAFrame, &frameFinished, &packet);
+ len1 = sp_avcodec_decode_audio4(pAV->pACodecCtx, pAFrameCurrent, &frameFinished, &packet);
} else {
#if 0
len1 = sp_avcodec_decode_audio3(pAV->pACodecCtx, int16_t *samples, int *frame_size_ptr, &frameFinished, &packet);
@@ -664,11 +639,11 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
if(HAS_FUNC(sp_av_samples_get_buffer_size)) {
data_size = sp_av_samples_get_buffer_size(NULL /* linesize, may be NULL */,
pAV->aChannels,
- pAV->pAFrame->nb_samples,
- pAV->pAFrame->format,
+ pAFrameCurrent->nb_samples,
+ pAFrameCurrent->format,
1 /* align */);
}
- int32_t pts = (int64_t) ( pAV->pAFrame->pkt_pts * (int64_t) 1000 * (int64_t) pAV->pAStream->time_base.num )
+ int32_t pts = (int64_t) ( pAFrameCurrent->pkt_pts * (int64_t) 1000 * (int64_t) pAV->pAStream->time_base.num )
/ (int64_t) pAV->pAStream->time_base.den;
#if 0
printf("channels %d sample_rate %d \n", pAV->aChannels , pAV->aSampleRate);
@@ -679,10 +654,10 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex
if( pAV->verbose ) {
printf("A pts %d - %d\n", pts, pAV->aPTS);
}
- // TODO: Wrap audio buffer data in a com.jogamp.openal.sound3d.Buffer or similar
- // and hand it over to the user using a suitable API.
- // TODO: OR send the audio buffer data down to sound card directly using JOAL.
- _updateSound(env, instance, pAV->pAFrame->data[0], data_size, pAV->aPTS);
+ if( NULL != env ) {
+ jobject jSampleData = (*env)->NewDirectByteBuffer(env, pAFrameCurrent->data[0], data_size);
+ (*env)->CallVoidMethod(env, instance, jni_mid_pushSound, jSampleData, data_size, pAV->aPTS);
+ }
res = 1;
}
diff --git a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieCube.java b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieCube.java
index ad096c7a6..921710fed 100644
--- a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieCube.java
+++ b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieCube.java
@@ -138,7 +138,7 @@ public class MovieCube implements GLEventListener, GLMediaEventListener {
GL2ES2 gl = drawable.getGL().getGL2ES2();
System.err.println(JoglVersion.getGLInfo(gl, null));
- mPlayer = GLMediaPlayerFactory.create();
+ mPlayer = GLMediaPlayerFactory.createDefault();
mPlayer.addEventListener(this);
cube = new TextureSequenceCubeES2(mPlayer, false, zoom0, rotx, roty);
diff --git a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java
index 5bf3145d0..7e0dcd909 100644
--- a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java
+++ b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java
@@ -151,7 +151,7 @@ public class MovieSimple implements GLEventListener, GLMediaEventListener {
mPlayerScaleOrig = false;
mPlayerShared = false;
mPlayerExternal = false;
- mPlayer = GLMediaPlayerFactory.create();
+ mPlayer = GLMediaPlayerFactory.createDefault();
mPlayer.addEventListener(this);
this.stream = stream;
System.out.println("pC.1 "+mPlayer);