summaryrefslogtreecommitdiffstats
path: root/src/jogl/classes/jogamp
diff options
context:
space:
mode:
Diffstat (limited to 'src/jogl/classes/jogamp')
-rw-r--r--src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java10
-rw-r--r--src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java484
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/AudioSink.java13
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java4
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java120
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java41
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java286
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java245
8 files changed, 926 insertions, 277 deletions
diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
index 23f9161d4..8356a2bae 100644
--- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
+++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
@@ -32,6 +32,8 @@ import java.io.IOException;
import javax.media.opengl.GL;
import javax.media.opengl.GLES2;
+import com.jogamp.common.os.AndroidVersion;
+import com.jogamp.common.os.Platform;
import com.jogamp.opengl.util.texture.TextureSequence;
import jogamp.common.os.android.StaticContext;
@@ -60,7 +62,13 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
static final boolean available;
static {
- available = true; // default .. TODO: May restrict availability ?
+ boolean _avail = false;
+ if(Platform.OS_TYPE.equals(Platform.OSType.ANDROID)) {
+ if(AndroidVersion.SDK_INT >= 14) {
+ _avail = true;
+ }
+ }
+ available = _avail;
}
public static final boolean isAvailable() { return available; }
diff --git a/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
index 690948c5a..87c7b937a 100644
--- a/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
+++ b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
@@ -1,176 +1,428 @@
+/**
+ * Copyright 2013 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
package jogamp.opengl.openal.av;
-import java.nio.Buffer;
-import java.nio.ByteBuffer;
-import java.lang.InterruptedException;
+import jogamp.opengl.util.av.SyncedRingbuffer;
-import jogamp.opengl.util.av.AudioSink;
-
-import com.jogamp.common.nio.Buffers;
-import com.jogamp.openal.*;
+import com.jogamp.openal.AL;
+import com.jogamp.openal.ALC;
+import com.jogamp.openal.ALCcontext;
+import com.jogamp.openal.ALCdevice;
+import com.jogamp.openal.ALFactory;
+import com.jogamp.opengl.util.av.AudioSink;
+/***
+ * OpenAL Audio Sink
+ */
public class ALAudioSink implements AudioSink {
- static ALC alc;
- static AL al;
- static ALCdevice device;
- static ALCcontext context;
-
- // AudioFormat parameters
- public static final int SAMPLE_RATE = 44100;
-
- // Chunk of audio processed at one time
- public static final int BUFFER_SIZE = 1000;
+ /** Chunk of audio processed at one time. FIXME: Parameterize .. */
+ public static final int BUFFER_SIZE = 4096;
public static final int SAMPLES_PER_BUFFER = BUFFER_SIZE / 2;
- // Sample time values
- public static final double SAMPLE_TIME_IN_SECS = 1.0 / SAMPLE_RATE;
- public static final double BUFFER_TIME_IN_SECS = SAMPLE_TIME_IN_SECS * SAMPLES_PER_BUFFER;
+ private static final ALC alc;
+ private static final AL al;
+ private static final boolean staticAvailable;
- private static int NUM_BUFFERS = 5;
- private static int bufferNumber = 0;
- private static int[] buffers = new int[NUM_BUFFERS];
- private static int[] source = new int[1];
- private static boolean initBuffer = true;
- private static int frequency = 44100;
- private static int format = AL.AL_FORMAT_STEREO16;
+ private String deviceSpecifier;
+ private ALCdevice device;
+ private ALCcontext context;
+
+ /** Sample period in seconds */
+ public float samplePeriod;
+ /** Buffer period in seconds */
+ public float bufferPeriod;
- private static boolean available = false;
+ static class ActiveBuffer {
+ ActiveBuffer(Integer name, int size) {
+ this.name = name;
+ this.size = size;
+ }
+ public final Integer name;
+ public final int size;
+ public String toString() { return "ABuffer[name "+name+", size "+size+"]"; }
+ }
+ int[] alBuffers = null;
+ private SyncedRingbuffer<Integer> alBufferAvail = null;
+ private SyncedRingbuffer<ActiveBuffer> alBufferPlaying = null;
+ private int alBufferBytesQueued = 0;
+
+ private int[] alSource = null;
+ private AudioDataFormat chosenFormat;
+ private int alFormat;
+ private boolean initialized;
+
static {
-
- boolean joalFound = false;
+ ALC _alc = null;
+ AL _al = null;
try {
- Class.forName("com.jogamp.openal.ALFactory");
- joalFound = true;
- } catch(ClassNotFoundException e){
- // Joal not found on classpath
- }
-
- if(joalFound) {
+ _alc = ALFactory.getALC();
+ _al = ALFactory.getAL();
+ } catch(Throwable t) {
+ if( DEBUG ) {
+ System.err.println("ALAudioSink: Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
+ }
+ }
+ alc = _alc;
+ al = _al;
+ staticAvailable = null != alc && null != al;
+ }
+
+ public ALAudioSink() {
+ initialized = false;
+ chosenFormat = null;
- alc = ALFactory.getALC();
- String deviceSpecifier;
-
+ if( !staticAvailable ) {
+ return;
+ }
+
+ try {
// Get handle to default device.
device = alc.alcOpenDevice(null);
if (device == null) {
- throw new ALException("Error opening default OpenAL device");
+ throw new RuntimeException("ALAudioSink: Error opening default OpenAL device");
}
// Get the device specifier.
deviceSpecifier = alc.alcGetString(device, ALC.ALC_DEVICE_SPECIFIER);
if (deviceSpecifier == null) {
- throw new ALException("Error getting specifier for default OpenAL device");
+ throw new RuntimeException("ALAudioSink: Error getting specifier for default OpenAL device");
}
// Create audio context.
context = alc.alcCreateContext(device, null);
if (context == null) {
- throw new ALException("Error creating OpenAL context");
+ throw new RuntimeException("ALAudioSink: Error creating OpenAL context");
}
// Set active context.
alc.alcMakeContextCurrent(context);
// Check for an error.
- if (alc.alcGetError(device) != ALC.ALC_NO_ERROR) {
- throw new ALException("Error making OpenAL context current");
+ if ( alc.alcGetError(device) != ALC.ALC_NO_ERROR ) {
+ throw new RuntimeException("ALAudioSink: Error making OpenAL context current");
}
- al = ALFactory.getAL();
-
- // Allocate buffers
- al.alGenBuffers(NUM_BUFFERS, buffers, 0);
- al.alGenSources(1, source, 0);
+ // Create source
+ {
+ alSource = new int[1];
+ al.alGenSources(1, alSource, 0);
+ final int err = al.alGetError();
+ if( err != AL.AL_NO_ERROR ) {
+ alSource = null;
+ throw new RuntimeException("ALAudioSink: Error generating Source: 0x"+Integer.toHexString(err));
+ }
+ }
- if(al.alGetError() != AL.AL_NO_ERROR) {
- throw new ALException("Error generating :(");
- }
-
- System.out.println("OpenAL audio sink using device: " + deviceSpecifier);
- available = true;
+ if( DEBUG ) {
+ System.err.println("ALAudioSink: Using device: " + deviceSpecifier);
+ }
+ initialized = true;
+ return;
+ } catch ( Exception e ) {
+ if( DEBUG ) {
+ System.err.println(e.getMessage());
+ }
+ destroy();
}
}
@Override
- public boolean isDataAvailable(int data_size) {
- return true;
+ public String toString() {
+ final int alSrcName = null != alSource ? alSource[0] : 0;
+ final int alBuffersLen = null != alBuffers ? alBuffers.length : 0;
+ return "ALAudioSink[init "+initialized+", device "+deviceSpecifier+", ctx "+context+", alSource "+alSrcName+
+ ", chosen "+chosenFormat+", alFormat "+toHexString(alFormat)+
+ ", buffers[total "+alBuffersLen+", avail "+alBufferAvail.size()+", "+alBufferPlaying.getFreeSlots()+
+ ", queued[bufferCount "+alBufferPlaying.size()+", "+getQueuedTime() + " ms, " + alBufferBytesQueued+" bytes]";
}
@Override
- public void writeData(byte[] sampleData, int data_size) {
- // OpenAL consumes buffers in the background
- // we first need to initialize the OpenAL buffers then
- // start continous playback.
- alc.alcMakeContextCurrent(context);
- if(initBuffer) {
-
- ByteBuffer data = Buffers.newDirectByteBuffer(sampleData);
- al.alBufferData(buffers[bufferNumber], format, data, data_size, frequency);
- int error = al.alGetError();
- if(error != AL.AL_NO_ERROR) {
- System.out.println("bufferNumber"+bufferNumber+" Data "+sampleData+" size"+data_size);
- throw new ALException("Error loading :( error code: " + error);
+ public AudioDataFormat getPreferredFormat() {
+ return DefaultFormat;
+ }
+
+ @Override
+ public AudioDataFormat initSink(AudioDataFormat requestedFormat, int bufferCount) {
+ if( !staticAvailable ) {
+ return null;
+ }
+ samplePeriod = 1.0f / requestedFormat.sampleRate;
+ bufferPeriod = samplePeriod * SAMPLES_PER_BUFFER;
+ switch( requestedFormat.channelCount ) {
+ case 1: {
+ switch ( requestedFormat.sampleSize ) {
+ case 8:
+ alFormat = AL.AL_FORMAT_MONO8; break;
+ case 16:
+ alFormat = AL.AL_FORMAT_MONO16; break;
+ default:
+ return null;
+ }
+ } break;
+ case 2:
+ switch ( requestedFormat.sampleSize ) {
+ case 8:
+ alFormat = AL.AL_FORMAT_STEREO8; break;
+ case 16:
+ alFormat = AL.AL_FORMAT_STEREO16; break;
+ default:
+ return null;
+ }
+ }
+ // Allocate buffers
+ destroyBuffers();
+ {
+ alBuffers = new int[bufferCount];
+ al.alGenBuffers(bufferCount, alBuffers, 0);
+ final int err = al.alGetError();
+ if( err != AL.AL_NO_ERROR ) {
+ alBuffers = null;
+ throw new RuntimeException("ALAudioSink: Error generating Buffers: 0x"+Integer.toHexString(err));
}
-
- if(bufferNumber==NUM_BUFFERS-1){
- // all buffers queued
- al.alSourceQueueBuffers(source[0], NUM_BUFFERS, buffers, 0);
- // start playback
- al.alSourcePlay(source[0]);
- if(al.alGetError() != AL.AL_NO_ERROR) {
- throw new ALException("Error starting :(");
+ final Integer[] alBufferRingArray = new Integer[bufferCount];
+ for(int i=0; i<bufferCount; i++) {
+ alBufferRingArray[i] = Integer.valueOf(alBuffers[i]);
+ }
+ alBufferAvail = new SyncedRingbuffer<Integer>(alBufferRingArray, true /* full */);
+ alBufferPlaying = new SyncedRingbuffer<ActiveBuffer>(new ActiveBuffer[bufferCount], false /* full */);
+ }
+
+
+ chosenFormat = requestedFormat;
+ return chosenFormat;
+ }
+
+ private void destroyBuffers() {
+ if( !staticAvailable ) {
+ return;
+ }
+ if( null != alBuffers ) {
+ try {
+ al.alDeleteBuffers(alBufferAvail.capacity(), alBuffers, 0);
+ } catch (Throwable t) {
+ if( DEBUG ) {
+ System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
}
- initBuffer=false;
}
-
- // update buffer number to fill
- bufferNumber=(bufferNumber+1)%NUM_BUFFERS;
- } else {
- // OpenAL is playing in the background.
- // one new frame with audio data is ready
-
- // first wait for openal to release one buffer
- int[] buffer=new int[1];
- int[] val=new int[1];
- do {
- al.alGetSourcei(source[0], AL.AL_BUFFERS_PROCESSED, val, 0);
- if(val[0] <= 0){
- try {
- Thread.sleep(1);
- } catch (InterruptedException e){
- }
+ alBufferAvail.clear(true);
+ alBufferAvail = null;
+ alBufferPlaying.clear(true);
+ alBufferPlaying = null;
+ alBufferBytesQueued = 0;
+ alBuffers = null;
+ }
+ }
+
+ @Override
+ public void destroy() {
+ initialized = false;
+ if( !staticAvailable ) {
+ return;
+ }
+ if( null != alSource ) {
+ try {
+ al.alDeleteSources(1, alSource, 0);
+ } catch (Throwable t) {
+ if( DEBUG ) {
+ System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
}
- } while (val[0] <= 0);
-
- // fill and requeue the empty buffer
- al.alSourceUnqueueBuffers(source[0], 1, buffer , 0);
- Buffer data = Buffers.newDirectByteBuffer(sampleData);
- al.alBufferData(buffer[0], format, data, data_size, frequency);
- al.alSourceQueueBuffers(source[0], 1, buffer, 0);
- if(al.alGetError() != AL.AL_NO_ERROR) {
- throw new ALException("Error buffering :(");
}
+ alSource = null;
+ }
+
+ destroyBuffers();
- // Restart openal playback if needed
- al.alGetSourcei(source[0], AL.AL_SOURCE_STATE, val, 0);
- if(val[0] != al.AL_PLAYING) {
- al.alSourcePlay(source[0]);
+ if( null != context ) {
+ try {
+ alc.alcDestroyContext(context);
+ } catch (Throwable t) {
+ if( DEBUG ) {
+ System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
+ }
+ }
+ context = null;
+ }
+ if( null != device ) {
+ try {
+ alc.alcCloseDevice(device);
+ } catch (Throwable t) {
+ if( DEBUG ) {
+ System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
+ }
}
+ device = null;
}
+ chosenFormat = null;
+ }
+
+ @Override
+ public boolean isInitialized() {
+ return initialized;
}
+
+ private final void dequeueBuffer(boolean wait) {
+ int alErr = AL.AL_NO_ERROR;
+ final int[] val=new int[1];
+ do {
+ al.alGetSourcei(alSource[0], AL.AL_BUFFERS_PROCESSED, val, 0);
+ alErr = al.alGetError();
+ if( AL.AL_NO_ERROR != alErr ) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while quering processed buffers at source. "+this);
+ }
+ if( wait && val[0] <= 0 ) {
+ try {
+ Thread.sleep(1);
+ } catch (InterruptedException e){
+ }
+ }
+ } while (val[0] <= 0);
+ final int processedBuffers = val[0];
+ if( processedBuffers > 0 ) {
+ int[] buffers=new int[processedBuffers];
+ al.alSourceUnqueueBuffers(alSource[0], processedBuffers, buffers, 0);
+ alErr = al.alGetError();
+ if( AL.AL_NO_ERROR != alErr ) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while dequeueing "+processedBuffers+" processed buffers. "+this);
+ }
+ for ( int i=0; i<processedBuffers; i++ ) {
+ final ActiveBuffer releasedBuffer = alBufferPlaying.get(true /* clearRef */);
+ if( null == releasedBuffer ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ if( releasedBuffer.name.intValue() != buffers[i] ) {
+ throw new InternalError("Buffer name mismatch: dequeued: "+buffers[i]+", released "+releasedBuffer);
+ // System.err.println("XXX ["+i+"]: dequeued: "+buffers[i]+", released "+releasedBuffer);
+ }
+ alBufferBytesQueued -= releasedBuffer.size;
+ if( !alBufferAvail.put(releasedBuffer.name) ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ if( DEBUG ) {
+ System.err.println("ALAudioSink: Dequeued "+processedBuffers+", wait "+wait+", "+this);
+ }
+ }
+ }
+ }
+
+ private static final String toHexString(int v) {
+ return "0x"+Integer.toHexString(v);
+ }
+
@Override
- public int getDataAvailable() {
- int[] val=new int[1];
- al.alGetSourcei(source[0], AL.AL_BUFFERS_PROCESSED, val, 0);
- return (NUM_BUFFERS-val[0])*4096;
+ public void writeData(AudioFrame audioFrame) {
+ if( !initialized || null == chosenFormat ) {
+ return;
+ }
+ int alErr = AL.AL_NO_ERROR;
+
+ // OpenAL consumes buffers in the background
+ // we first need to initialize the OpenAL buffers then
+ // start continuous playback.
+ alc.alcMakeContextCurrent(context);
+ alErr = al.alGetError();
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while makeCurrent. "+this);
+ }
+
+ if( alBufferAvail.isEmpty() ) {
+ dequeueBuffer(true);
+ }
+
+ final Integer alBufferName = alBufferAvail.get(true /* clearRef */);
+ if( null == alBufferName ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ if( !alBufferPlaying.put( new ActiveBuffer(alBufferName, audioFrame.dataSize) ) ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ al.alBufferData(alBufferName.intValue(), alFormat, audioFrame.data, audioFrame.dataSize, chosenFormat.sampleRate);
+ final int[] alBufferNames = new int[] { alBufferName.intValue() };
+ al.alSourceQueueBuffers(alSource[0], 1, alBufferNames, 0);
+ alErr = al.alGetError();
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while queueing buffer "+toHexString(alBufferNames[0])+". "+this);
+ }
+ alBufferBytesQueued += audioFrame.dataSize;
+
+ // Restart openal playback if needed
+ {
+ int[] val = new int[1];
+ al.alGetSourcei(alSource[0], AL.AL_SOURCE_STATE, val, 0);
+ if(val[0] != AL.AL_PLAYING) {
+ if( DEBUG ) {
+ System.err.println("ALAudioSink: Start playing: "+this);
+ }
+ al.alSourcePlay(alSource[0]);
+ alErr = al.alGetError();
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while start playing. "+this);
+ }
+ }
+ }
}
@Override
- public boolean isAudioSinkAvailable() {
- return available;
+ public int getQueuedByteCount() {
+ if( !initialized || null == chosenFormat ) {
+ return 0;
+ }
+ return alBufferBytesQueued;
+ }
+
+ @Override
+ public int getQueuedTime() {
+ if( !initialized || null == chosenFormat ) {
+ return 0;
+ }
+ final int bps = chosenFormat.sampleSize / 8;
+ return alBufferBytesQueued / ( chosenFormat.channelCount * bps * ( chosenFormat.sampleRate / 1000 ) );
}
+
+ @Override
+ public int getWritableBufferCount() {
+ if( !initialized || null == chosenFormat ) {
+ return 0;
+ }
+ return alBufferPlaying.getFreeSlots();
+ }
+
+ @Override
+ public boolean isDataAvailable(int data_size) {
+ return initialized && null != chosenFormat;
+ }
+
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/AudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/AudioSink.java
deleted file mode 100644
index 504e4b2db..000000000
--- a/src/jogl/classes/jogamp/opengl/util/av/AudioSink.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package jogamp.opengl.util.av;
-
-public interface AudioSink {
-
- boolean isAudioSinkAvailable();
-
- int getDataAvailable();
-
- boolean isDataAvailable(int data_size);
-
- void writeData(byte[] sampleData, int data_size);
-
-}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index 27c926704..2ff91a3f6 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -72,7 +72,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected URLConnection urlConn = null;
- protected float playSpeed = 1.0f;
+ protected volatile float playSpeed = 1.0f;
/** Shall be set by the {@link #initGLStreamImpl(GL, int[])} method implementation. */
protected int width = 0;
@@ -215,7 +215,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
@Override
- public final synchronized float getPlaySpeed() {
+ public final float getPlaySpeed() {
return playSpeed;
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
index 653a02111..a5fedce59 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
@@ -2,39 +2,67 @@ package jogamp.opengl.util.av;
import java.util.Arrays;
-import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.SourceDataLine;
-public class JavaSoundAudioSink implements AudioSink {
+import com.jogamp.opengl.util.av.AudioSink;
- // AudioFormat parameters
- public static final int SAMPLE_RATE = 44100;
- private static final int SAMPLE_SIZE = 16;
- private static final int CHANNELS = 2;
- private static final boolean SIGNED = true;
- private static final boolean BIG_ENDIAN = false;
+/***
+ * JavaSound Audio Sink
+ * <p>
+ * FIXME: Parameterize .. all configs .. best via an init-method, passing requested
+ * audio capabilities
+ * </p>
+ */
+public class JavaSoundAudioSink implements AudioSink {
// Chunk of audio processed at one time
public static final int BUFFER_SIZE = 1000;
public static final int SAMPLES_PER_BUFFER = BUFFER_SIZE / 2;
+ private static final boolean staticAvailable;
// Sample time values
- public static final double SAMPLE_TIME_IN_SECS = 1.0 / SAMPLE_RATE;
- public static final double BUFFER_TIME_IN_SECS = SAMPLE_TIME_IN_SECS * SAMPLES_PER_BUFFER;
+ // public static final double SAMPLE_TIME_IN_SECS = 1.0 / DEFAULT_SAMPLE_RATE;
+ // public static final double BUFFER_TIME_IN_SECS = SAMPLE_TIME_IN_SECS * SAMPLES_PER_BUFFER;
- private static AudioFormat format;
- private static DataLine.Info info;
- private static SourceDataLine auline;
- private static int bufferCount;
- private static byte [] sampleData = new byte[BUFFER_SIZE];
-
- private static boolean available;
+ private javax.sound.sampled.AudioFormat format;
+ private DataLine.Info info;
+ private SourceDataLine auline;
+ private int bufferCount;
+ private byte [] sampleData = new byte[BUFFER_SIZE];
+ private boolean initialized = false;
+ private AudioDataFormat chosenFormat = null;
static {
- // Create the audio format we wish to use
- format = new AudioFormat(SAMPLE_RATE, SAMPLE_SIZE, CHANNELS, SIGNED, BIG_ENDIAN);
+ boolean ok = false;
+ try {
+ AudioSystem.getAudioFileTypes();
+ ok = true;
+ } catch (Throwable t) {
+
+ }
+ staticAvailable=ok;
+ }
+
+ @Override
+ public String toString() {
+ return "JavaSoundSink[init "+initialized+", dataLine "+info+", source "+auline+", bufferCount "+bufferCount+
+ ", chosen "+chosenFormat+", jsFormat "+format;
+ }
+
+ @Override
+ public AudioDataFormat getPreferredFormat() {
+ return DefaultFormat;
+ }
+
+ @Override
+ public AudioDataFormat initSink(AudioDataFormat requestedFormat, int bufferCount) {
+ if( !staticAvailable ) {
+ return null;
+ }
+ // Create the audio format we wish to use
+ format = new javax.sound.sampled.AudioFormat(requestedFormat.sampleRate, requestedFormat.sampleSize, requestedFormat.channelCount, requestedFormat.signed, !requestedFormat.littleEndian);
// Create dataline info object describing line format
info = new DataLine.Info(SourceDataLine.class, format);
@@ -47,33 +75,61 @@ public class JavaSoundAudioSink implements AudioSink {
auline.open(format);
auline.start();
System.out.println("JavaSound audio sink");
- available=true;
+ initialized=true;
+ chosenFormat = requestedFormat;
} catch (Exception e) {
- available=false;
+ initialized=false;
}
- }
+ return chosenFormat;
+ }
- public void writeData(byte[] sampleData, int data_size) {
- int written = 0;
- int len;
- while (data_size > 0) {
- len = auline.write(sampleData, written, data_size);
+ @Override
+ public boolean isInitialized() {
+ return initialized;
+ }
+
+ @Override
+ public void destroy() {
+ initialized = false;
+ chosenFormat = null;
+ // FIXEM: complete code!
+ }
+
+ public void writeData(AudioFrame audioFrame) {
+ int data_size = audioFrame.dataSize;
+ final byte[] lala = new byte[data_size];
+ final int p = audioFrame.data.position();
+ audioFrame.data.get(lala, 0, data_size);
+ audioFrame.data.position(p);
+
+ int written = 0;
+ int len;
+ while (data_size > 0) {
+ // Nope: We don't make compromises for this crappy API !
+ len = auline.write(lala, written, data_size);
data_size -= len;
written += len;
}
}
- public int getDataAvailable() {
+ @Override
+ public int getQueuedByteCount() {
return auline.available();
}
- public boolean isDataAvailable(int data_size) {
- return auline.available()>=data_size;
+ @Override
+ public int getQueuedTime() {
+ return 0; // FIXME
}
+
@Override
- public boolean isAudioSinkAvailable() {
- return available;
+ public int getWritableBufferCount() {
+ return 1;
+ }
+
+ public boolean isDataAvailable(int data_size) {
+ return auline.available()>=data_size;
}
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
index d92967849..cef1c3361 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
@@ -1,23 +1,50 @@
package jogamp.opengl.util.av;
+
+import com.jogamp.opengl.util.av.AudioSink;
+
public class NullAudioSink implements AudioSink {
@Override
- public int getDataAvailable() {
- return 0;
+ public boolean isInitialized() {
+ return true;
}
@Override
- public boolean isDataAvailable(int data_size) {
- return false;
+ public AudioDataFormat getPreferredFormat() {
+ return DefaultFormat;
+ }
+
+ @Override
+ public AudioDataFormat initSink(AudioDataFormat requestedFormat, int bufferCount) {
+ return requestedFormat;
+ }
+
+ @Override
+ public void destroy() {
+ }
+
+ @Override
+ public int getQueuedByteCount() {
+ return 0;
+ }
+
+ @Override
+ public int getQueuedTime() {
+ return 0;
}
@Override
- public void writeData(byte[] sampleData, int data_size) {
+ public int getWritableBufferCount() {
+ return 1;
+ }
+
+ @Override
+ public boolean isDataAvailable(int data_size) {
+ return false;
}
@Override
- public boolean isAudioSinkAvailable() {
- return true;
+ public void writeData(AudioFrame audioFrame) {
}
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java b/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java
new file mode 100644
index 000000000..5f5d69cf8
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/av/SyncedRingbuffer.java
@@ -0,0 +1,286 @@
+/**
+ * Copyright 2013 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+
+package jogamp.opengl.util.av;
+
+/**
+ * Simple synchronized ring buffer implementation.
+ * <p>
+ * Caller can chose whether to block until get / put is able to proceed or not.
+ * </p>
+ * <p>
+ * Caller can chose whether to pass an empty array and clear references at get,
+ * or using a preset array for circular access of same objects.
+ * </p>
+ * <p>
+ * Circular write position is equal to the read position if buffer is full or if buffer is empty.
+ * </p>
+ */
+public class SyncedRingbuffer<T> {
+
+ protected final Object sync = new Object();
+ protected final T[] array;
+ protected final int capacity;
+ protected int readPos;
+ protected int writePos;
+ protected int size;
+
+ public final String toString() {
+ return "SyncedRingbuffer<?>[filled "+size+" / "+capacity+", writePos "+writePos+", readPos "+readPos+"]";
+ }
+
+ /**
+ * Create instance w/ the given array and it's capacity, e.g.:
+ * <pre>
+ * SyncedRingbuffer r = new SyncedRingbuffer<Integer>(new Integer[10]);
+ * </pre>
+ * <p>
+ * The array may either be clear, or preset w/ elements!
+ * </p>
+ * @param full if true, given array is assumed to be full, i.e. {@link #isFull()} will return true.
+ * @param array
+ */
+ public SyncedRingbuffer(T[] array, boolean full) {
+ this.array = array;
+ this.capacity = array.length;
+ clearImpl(false);
+ if(full) {
+ size = capacity;
+ }
+ }
+
+ public final int capacity() {
+ return capacity;
+ }
+
+ /**
+ * Resets all ring buffer pointer to zero.
+ * <p>
+ * {@link #isEmpty()} will return <code>true</code> after calling this method.
+ * </p>
+ * <p>
+ * If <code>clearRefs</code> is true, all ring buffer slots will be set to <code>null</code>.
+ * </p>
+ * @param clearRefs if true, all ring buffer slots will be flushed, otherwise they remain intact.
+ */
+ public final void clear(boolean clearRefs) {
+ synchronized ( sync ) {
+ clearImpl(clearRefs);
+ }
+ }
+
+ private final void clearImpl(boolean clearRefs) {
+ readPos = 0;
+ writePos = 0;
+ size = 0;
+ if( clearRefs ) {
+ for(int i=0; i<capacity; i++) {
+ this.array[i] = null;
+ }
+ }
+ }
+
+ /** Returns the number of elements in this ring buffer. */
+ public final int size() {
+ synchronized ( sync ) {
+ return size;
+ }
+ }
+
+ /** Returns the number of free slots available to put. */
+ public final int getFreeSlots() {
+ synchronized ( sync ) {
+ return capacity - size;
+ }
+ }
+
+ /** Returns true if this ring buffer is empty, otherwise false. */
+ public final boolean isEmpty() {
+ synchronized ( sync ) {
+ return 0 == size;
+ }
+ }
+
+ /** Returns true if this ring buffer is full, otherwise false. */
+ public final boolean isFull() {
+ synchronized ( sync ) {
+ return capacity == size;
+ }
+ }
+
+ /**
+ * Returns the oldest put element if available, otherwise null.
+ * <p>
+ * Impl. returns the element at the current read position
+ * and advances the read position - if available.
+ * </p>
+ * <p>
+ * If <code>clearRef</code> is true, the returned ring buffer slot will be set to <code>null</code>.
+ * </p>
+ * <p>
+ * Method is non blocking and returns immediately;.
+ * </p>
+ * @param clearRef if true, the returned ring buffer slot will be flushed, otherwise it remains intact.
+ * @return the oldest put element if available, otherwise null.
+ */
+ public final T get(boolean clearRef) {
+ try {
+ return getImpl(clearRef, false, false);
+ } catch (InterruptedException ie) { throw new RuntimeException(ie); }
+ }
+
+ /**
+ * Returns the oldest put element.
+ * <p>
+ * Impl. returns the element at the current read position
+ * and advances the read position.
+ * </p>
+ * <p>
+ * If <code>clearRef</code> is true, the returned ring buffer slot will be set to <code>null</code>.
+ * </p>
+ * <p>
+ * Methods blocks until an element becomes available via put.
+ * </p>
+ * @param clearRef if true, the returned ring buffer slot will be flushed, otherwise it remains intact.
+ * @return the oldest put element
+ * @throws InterruptedException
+ */
+ public final T getBlocking(boolean clearRef) throws InterruptedException {
+ return getImpl(clearRef, true, false);
+ }
+
+ public final T peek() throws InterruptedException {
+ return getImpl(false, false, true);
+ }
+ public final T peekBlocking() throws InterruptedException {
+ return getImpl(false, true, true);
+ }
+
+ private final T getImpl(boolean clearRef, boolean blocking, boolean peek) throws InterruptedException {
+ synchronized ( sync ) {
+ if( 0 == size ) {
+ if( blocking ) {
+ while( 0 == size ) {
+ sync.wait();
+ }
+ } else {
+ return null;
+ }
+ }
+ final T r = array[readPos];
+ if( !peek ) {
+ if( clearRef ) {
+ array[readPos] = null;
+ }
+ readPos = (readPos + 1) % capacity;
+ size--;
+ sync.notifyAll(); // notify waiting putter
+ }
+ return r;
+ }
+ }
+
+ /**
+ * Puts the element <code>e</code> at the current write position
+ * and advances the write position.
+ * <p>
+ * Returns true if successful, otherwise false in case buffer is full.
+ * </p>
+ * <p>
+ * Method is non blocking and returns immediately;.
+ * </p>
+ */
+ public final boolean put(T e) {
+ try {
+ return putImpl(e, false, false);
+ } catch (InterruptedException ie) { throw new RuntimeException(ie); }
+ }
+
+ /**
+ * Puts the element <code>e</code> at the current write position
+ * and advances the write position.
+ * <p>
+ * Method blocks until a free slot becomes available via get.
+ * </p>
+ * @throws InterruptedException
+ */
+ public final void putBlocking(T e) throws InterruptedException {
+ if( !putImpl(e, false, true) ) {
+ throw new InternalError("Blocking put failed: "+this);
+ }
+ }
+
+ /**
+ * Keeps the element at the current write position intact
+ * and advances the write position.
+ * <p>
+ * Returns true if successful, otherwise false in case buffer is full.
+ * </p>
+ * <p>
+ * If <code>blocking</code> is true, method blocks until a free slot becomes available via get.
+ * </p>
+ * @param blocking if true, wait until a free slot becomes available via get.
+ * @throws InterruptedException
+ */
+ public final boolean putSame(boolean blocking) throws InterruptedException {
+ return putImpl(null, true, blocking);
+ }
+
+ private final boolean putImpl(T e, boolean sameRef, boolean blocking) throws InterruptedException {
+ synchronized ( sync ) {
+ if( capacity <= size ) {
+ if( blocking ) {
+ while( capacity <= size ) {
+ sync.wait();
+ }
+ } else {
+ return false;
+ }
+ }
+ if( !sameRef ) {
+ array[ writePos ] = e;
+ }
+ writePos = (writePos + 1) % capacity;
+ size++;
+ sync.notifyAll(); // notify waiting getter
+ return true;
+ }
+ }
+
+ public final void waitForFreeSlots(int count) throws InterruptedException {
+ synchronized ( sync ) {
+ if( capacity - size < count ) {
+ while( capacity - size < count ) {
+ System.err.println("XXXX AAA XXX");
+ sync.wait();
+ }
+ }
+ }
+ }
+
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
index 33b5b3b20..65b867ba1 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
@@ -38,20 +38,17 @@ import javax.media.opengl.GL;
import javax.media.opengl.GL2ES2;
import javax.media.opengl.GLException;
-import java.util.Arrays;
-import java.util.Queue;
-
-import com.jogamp.common.util.ReflectionUtil;
import com.jogamp.common.util.VersionNumber;
import com.jogamp.gluegen.runtime.ProcAddressTable;
import com.jogamp.opengl.util.GLPixelStorageModes;
-import com.jogamp.opengl.util.av.GLMediaPlayerFactory;
+import com.jogamp.opengl.util.av.AudioSink;
+import com.jogamp.opengl.util.av.AudioSinkFactory;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureSequence;
import jogamp.opengl.GLContextImpl;
-import jogamp.opengl.util.av.AudioSink;
import jogamp.opengl.util.av.EGLMediaPlayerImpl;
+import jogamp.opengl.util.av.SyncedRingbuffer;
/***
* Implementation utilizes <a href="http://libav.org/">Libav</a>
@@ -111,13 +108,10 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
private static final int TEMP_BUFFER_COUNT = 20;
// Instance data
- private static AudioSink audioSink;
- private static int maxAvailableAudio;
-
public static final VersionNumber avUtilVersion;
public static final VersionNumber avFormatVersion;
public static final VersionNumber avCodecVersion;
- static boolean available;
+ static final boolean available;
static {
if(FFMPEGDynamicLibraryBundleInfo.initSingleton()) {
@@ -129,24 +123,6 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
System.err.println("LIB_AV Codec : "+avCodecVersion);
initIDs0();
available = true;
- final ClassLoader cl = GLMediaPlayerFactory.class.getClassLoader();
-
- if(ReflectionUtil.isClassAvailable("com.jogamp.openal.ALFactory", cl)){
- // Only instance ALAudioSink if JOAL is found on the classpath.
- audioSink = (AudioSink) ReflectionUtil.createInstance("jogamp.opengl.openal.av.ALAudioSink", cl);
- if(!audioSink.isAudioSinkAvailable()){
- // Failed to initialize OpenAL.
- audioSink=null;
- }
- }
- if(audioSink==null) {
- audioSink = (AudioSink) ReflectionUtil.createInstance("jogamp.opengl.util.av.JavaSoundAudioSink", cl);
- if(!audioSink.isAudioSinkAvailable()) {
- audioSink = (AudioSink) ReflectionUtil.createInstance("jogamp.opengl.util.av.NullAudioSink", cl);
- }
- }
- maxAvailableAudio = audioSink.getDataAvailable();
-
} else {
avUtilVersion = null;
avFormatVersion = null;
@@ -163,6 +139,10 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
( vers >> 0 ) & 0xFF );
}
+ //
+ // Video
+ //
+
protected long moviePtr = 0;
protected long procAddrGLTexSubImage2D = 0;
protected EGLMediaPlayerImpl.EGLTextureFrame lastTex = null;
@@ -176,17 +156,29 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
protected int texWidth, texHeight; // overall (stuffing planes in one texture)
protected ByteBuffer texCopy;
+ //
+ // Audio
+ //
+
+ protected final int AudioFrameCount = 8;
+ protected final AudioSink audioSink;
+ protected final int maxAvailableAudio;
+ protected AudioSink.AudioDataFormat chosenAudioFormat;
+ protected final SyncedRingbuffer<AudioSink.AudioFrame> audioFramesBuffer = new SyncedRingbuffer<AudioSink.AudioFrame>(new AudioSink.AudioFrame[AudioFrameCount], false /* full */);
+
public FFMPEGMediaPlayer() {
super(TextureType.GL, false);
if(!available) {
throw new RuntimeException("FFMPEGMediaPlayer not available");
}
setTextureCount(1);
- moviePtr = createInstance0(true);
+ moviePtr = createInstance0(DEBUG);
if(0==moviePtr) {
throw new GLException("Couldn't create FFMPEGInstance");
}
psm = new GLPixelStorageModes();
+ audioSink = AudioSinkFactory.createDefault();
+ maxAvailableAudio = audioSink.getQueuedByteCount();
}
@Override
@@ -221,9 +213,11 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
}
final String urlS=urlConn.getURL().toExternalForm();
- System.out.println("setURL: p1 "+this);
- setStream0(moviePtr, urlS, -1, -1);
- System.out.println("setURL: p2 "+this);
+ chosenAudioFormat = audioSink.initSink(audioSink.getPreferredFormat(), AudioFrameCount);
+ System.err.println("setURL: p1 "+this);
+ setStream0(moviePtr, urlS, -1, -1, AudioFrameCount);
+ System.err.println("setURL: p2 "+this);
+
int tf, tif=GL.GL_RGBA; // texture format and internal format
switch(vBytesPerPixelPerPlane) {
case 1:
@@ -264,74 +258,103 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
}
- private class AudioFrame {
- final byte[] sampleData;
- final int data_size;
- final int audio_pts;
- AudioFrame(byte[] sampleData, int data_size, int audio_pts) {
- this.sampleData=sampleData;
- this.data_size=data_size;
- this.audio_pts=audio_pts;
+ private final void pushSound(ByteBuffer sampleData, int data_size, int audio_pts) {
+ if( audioPusher != null && audioPusher.isRunning() ) {
+ try {
+ audioFramesBuffer.putBlocking(new AudioSink.AudioFrame(sampleData, data_size, audio_pts));
+ } catch (InterruptedException e) {
+ e.printStackTrace(); // oops
+ }
+ if( null != audioPusher ) {
+ audioPusher.pushOne();
+ }
}
}
- static final Queue<AudioFrame> audioFrameBuffer = new java.util.LinkedList<AudioFrame>();
-
- private void updateSound(byte[] sampleData, int data_size, int audio_pts) {
-/*
- // Visualize incomming data
- int c=0;
- for(byte b: sampleData){
- if(b<0) {
- System.out.print(" ");
- } else if(b<64) {
- System.out.print("_");
- } else if(b < 128) {
- System.out.print("-");
- } else if(b == 128) {
- System.out.print("=");
- } else if(b < 256-64) {
- System.out.print("\"");
- } else {
- System.out.print("'");
+ class AudioPusher extends Thread {
+ volatile boolean shallStop = false;
+ volatile boolean isBlocked = false;
+
+ AudioPusher() {
+ setDaemon(true);
+ }
+ public void requestStop() {
+ shallStop = true;
+ if( isBlocked ) {
+ // interrupt();
}
-
- c++;
- if(c>=40)
- break;
}
- System.out.println("jA");
-*/
-
- //TODO reduce GC
- audioFrameBuffer.add(new AudioFrame(sampleData, data_size, audio_pts));
- pumpAudio();
- }
-
- private void pumpAudio() {
- if(audioSink.getDataAvailable()==maxAvailableAudio){
- System.out.println("warning: audio buffer underrun");
+ public boolean isRunning() { return !shallStop; }
+
+ public void run() {
+ setName(getName()+"-AudioPusher_"+AudioPusherInstanceId);
+ AudioPusherInstanceId++;
+
+ while( !shallStop ){
+ pushOne();
+ }
}
- while(audioFrameBuffer.peek()!=null){
- AudioFrame a = audioFrameBuffer.peek();
-
- // poor mans audio sync .. TODO: off thread
- final long now = System.currentTimeMillis();
- final long now_d = now - lastAudioTime;
- final long pts_d = a.audio_pts - lastAudioPTS;
- final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
-
- System.err.println("s: pts-a "+a.audio_pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
- lastAudioTime = now;
- if( (dt<audio_dt_d ) && audioSink.isDataAvailable(a.data_size)) {
- audioFrameBuffer.poll(); /* remove first item from the queue */
- audioSink.writeData(a.sampleData, a.data_size);
- lastAudioPTS=a.audio_pts;
- } else {
- break;
+ public void pushOne() {
+ final AudioSink.AudioFrame audioFrame;
+ try {
+ isBlocked = true;
+ audioFrame = audioFramesBuffer.getBlocking(true /* clearRef */);
+ } catch (InterruptedException e) {
+ if( !shallStop ) {
+ e.printStackTrace(); // oops
+ }
+ shallStop = true;
+ return;
+ }
+ isBlocked = false;
+
+ if( null != audioFrame ) {
+ // poor mans audio sync ..
+ final long now = System.currentTimeMillis();
+ final long now_d = now - lastAudioTime;
+ final long pts_d = audioFrame.audioPTS - lastAudioPTS;
+ final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
+ final boolean sleep = dt > audio_dt_d && !shallStop;
+ final long sleepP = dt - ( audio_dt_d / 2 );
+ if(DEBUG) {
+ final int qAT = audioSink.getQueuedTime();
+ System.err.println("s: pts-a "+audioFrame.audioPTS+", qAT "+qAT+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt+", sleep "+sleep+", sleepP "+sleepP+" ms");
+ }
+ if( sleep ) {
+ try {
+ isBlocked = true;
+ Thread.sleep( sleepP );
+ } catch (InterruptedException e) {
+ e.printStackTrace(); // oops
+ }
+ isBlocked = false;
+ lastAudioTime = System.currentTimeMillis();
+ } else {
+ lastAudioTime = now;
+ }
+ if( !shallStop && audioSink.isDataAvailable(audioFrame.dataSize) ) {
+ audioSink.writeData(audioFrame);
+ lastAudioPTS=audioFrame.audioPTS;
+ }
}
}
}
+
+ static int AudioPusherInstanceId = 0;
+ private AudioPusher audioPusher = null;
+
+ private final void stopAudioPusher() {
+ if( null != audioPusher ) {
+ audioPusher.requestStop();
+ audioPusher = null;
+ }
+ audioFramesBuffer.clear(true);
+ }
+ private final void startAudioPusher() {
+ stopAudioPusher();
+ audioPusher = new AudioPusher();
+ // audioPusher.start();
+ }
private void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane,
int lSz0, int lSz1, int lSz2,
@@ -448,6 +471,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
if(0==moviePtr) {
return false;
}
+ startAudioPusher();
return true;
}
@@ -457,6 +481,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
if(0==moviePtr) {
return false;
}
+ stopAudioPusher();
return true;
}
@@ -466,6 +491,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
if(0==moviePtr) {
return false;
}
+ stopAudioPusher();
return true;
}
@@ -475,12 +501,13 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
+ stopAudioPusher();
int pts0 = getVideoPTS0(moviePtr);
int pts1 = seek0(moviePtr, msec);
System.err.println("Seek: "+pts0+" -> "+msec+" : "+pts1);
- audioFrameBuffer.clear();
lastAudioPTS=pts1;
lastVideoPTS=pts1;
+ startAudioPusher();
return pts1;
}
@@ -509,6 +536,12 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
tex.enable(gl);
tex.bind(gl);
+ try {
+ audioFramesBuffer.waitForFreeSlots(2);
+ } catch (InterruptedException e) {
+ e.printStackTrace(); // oops
+ }
+
/* try decode 10 packets to find one containing video
(res == 2) */
int res = 0;
@@ -529,27 +562,27 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
final long pts_d = pts - lastAudioPTS - 444; /* hack 444 == play video 444ms ahead of audio */
final long dt = Math.min(47, (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ) ;
//final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ;
- lastVideoTime = now;
- System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
-
- if(dt>video_dt_d && dt<1000 && audioSink.getDataAvailable()<maxAvailableAudio-10000) {
+ final boolean sleep = dt>video_dt_d && dt<1000 && audioSink.getQueuedByteCount()<maxAvailableAudio-10000;
+ final long sleepP = dt-video_dt_d;
+ if(DEBUG) {
+ final int qAT = audioSink.getQueuedTime();
+ System.err.println("s: pts-v "+pts+", qAT "+qAT+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt+", sleep "+sleep+", sleepP "+sleepP+" ms");
+ }
+ // ?? Maybe use audioSink.getQueuedTime();
+ if( sleep ) {
try {
- Thread.sleep(dt-video_dt_d);
+ Thread.sleep(sleepP);
} catch (InterruptedException e) { }
- } /* else if(0>pts_d) {
- System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt);
- } */
+ lastVideoTime = System.currentTimeMillis();
+ } else {
+ lastVideoTime = now;
+ }
}
- pumpAudio();
lastVideoPTS = pts;
}
return lastTex;
}
- private void consumeAudio(int len) {
-
- }
-
private static native int getAvUtilVersion0();
private static native int getAvFormatVersion0();
private static native int getAvCodecVersion0();
@@ -557,7 +590,7 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl {
private native long createInstance0(boolean verbose);
private native void destroyInstance0(long moviePtr);
- private native void setStream0(long moviePtr, String url, int vid, int aid);
+ private native void setStream0(long moviePtr, String url, int vid, int aid, int audioFrameCount);
private native int getVideoPTS0(long moviePtr);