summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSven Gothel <sgothel@jausoft.com>2013-08-14 06:43:42 +0200
committerSven Gothel <sgothel@jausoft.com>2013-08-14 06:43:42 +0200
commitbc3776633ccad81199a96ff8116195133d862395 (patch)
treec5edc7fc7eb8d5ffb3def1efffea44af96f8515a
parentf53b7713e5eb58a86faf0db06db8be35cfa413d9 (diff)
GLMediaPlayer Multithreaded Decoding: AudioSink (Part-2) - WIP
- AudioSink.AudioDataFormat - add fixedP (fixed-point or floating-point) - AudioSink - rename 'buffer count' to 'frame count' - add setPlaySpeed(..) - add isPlaying() - add play() - add pause() - add flush() - add: getFrameCount(), getQueuedFrameCount(), getFreeFrameCount(), getEnqueuedFrameCount(), - rename: writeData() -> enqueueData(..) - ALAudioSink - multithreaded usage - make ALCcontext current per thread, now required for multithreaded use Use RecursiveLock encapsulating the ALCcontext's makeCurrent/release/destroy, since the native operations seem to be buggy. NOTE: Think about adding these general methods to ALCcontext - implement new methods -
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java117
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/AudioSinkFactory.java5
-rw-r--r--src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java520
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java88
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java67
5 files changed, 601 insertions, 196 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java b/src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java
index 5caeb969a..ba785ac31 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/AudioSink.java
@@ -41,12 +41,13 @@ public interface AudioSink {
* Specifies the audio data format.
*/
public static class AudioDataFormat {
- public AudioDataFormat(AudioDataType dataType, int sampleRate, int sampleSize, int channelCount, boolean signed, boolean littleEndian) {
+ public AudioDataFormat(AudioDataType dataType, int sampleRate, int sampleSize, int channelCount, boolean signed, boolean fixedP, boolean littleEndian) {
this.dataType = dataType;
this.sampleRate = sampleRate;
this.sampleSize = sampleSize;
this.channelCount = channelCount;
this.signed = signed;
+ this.fixedP = fixedP;
this.littleEndian = littleEndian;
}
/** Audio data type. */
@@ -58,30 +59,32 @@ public interface AudioSink {
/** Number of channels. */
public final int channelCount;
public final boolean signed;
+ /** Fixed or floating point values. Floating point 'float' has {@link #sampleSize} 32, 'double' has {@link #sampleSize} 64, */
+ public final boolean fixedP;
public final boolean littleEndian;
public String toString() {
return "AudioDataFormat[type "+dataType+", sampleRate "+sampleRate+", sampleSize "+sampleSize+", channelCount "+channelCount+
- ", signed "+signed+", "+(littleEndian?"little":"big")+"endian]"; }
+ ", signed "+signed+", fixedP "+fixedP+", "+(littleEndian?"little":"big")+"endian]"; }
}
- /** Default {@link AudioDataFormat}, [type PCM, sampleRate 44100, sampleSize 16, channelCount 2, signed, littleEndian]. */
- public static final AudioDataFormat DefaultFormat = new AudioDataFormat(AudioDataType.PCM, 44100, 16, 2, true /* signed */, true /* littleEndian */);
+ /** Default {@link AudioDataFormat}, [type PCM, sampleRate 44100, sampleSize 16, channelCount 2, signed, fixedP, littleEndian]. */
+ public static final AudioDataFormat DefaultFormat = new AudioDataFormat(AudioDataType.PCM, 44100, 16, 2, true /* signed */, true /* fixed point */, true /* littleEndian */);
public static class AudioFrame {
public final ByteBuffer data;
public final int dataSize;
- public final int audioPTS;
+ public final int pts;
- public AudioFrame(ByteBuffer data, int dataSize, int audioPTS) {
+ public AudioFrame(ByteBuffer data, int dataSize, int pts) {
if( dataSize > data.remaining() ) {
throw new IllegalArgumentException("Give size "+dataSize+" exceeds remaining bytes in ls "+data+". "+this);
}
this.data=data;
this.dataSize=dataSize;
- this.audioPTS=audioPTS;
+ this.pts=pts;
}
- public String toString() { return "AudioFrame[apts "+audioPTS+", data "+data+", payloadSize "+dataSize+"]"; }
+ public String toString() { return "AudioFrame[apts "+pts+", data "+data+", payloadSize "+dataSize+"]"; }
}
/**
@@ -94,6 +97,19 @@ public interface AudioSink {
*/
public boolean isInitialized();
+ /** Returns the playback speed. */
+ public float getPlaySpeed();
+
+ /**
+ * Sets the playback speed.
+ * <p>
+ * Play speed is set to <i>normal</i>, i.e. <code>1.0f</code>
+ * if <code> abs(1.0f - rate) < 0.01f</code> to simplify test.
+ * </p>
+ * @return true if successful, otherwise false, i.e. due to unsupported value range of implementation.
+ */
+ public boolean setPlaySpeed(float s);
+
/**
* Returns the preferred {@link AudioDataFormat} by this sink.
* <p>
@@ -117,52 +133,101 @@ public interface AudioSink {
* The {@link #DefaultFormat} <i>should be</i> supported by all implementations.
* </p>
* @param requestedFormat the requested {@link AudioDataFormat}.
- * @param bufferCount number of buffers for sink
+ * @param frameCount number of frames to queue in this sink
* @return if successful the chosen AudioDataFormat based on the <code>requestedFormat</code> and this sinks capabilities, otherwise <code>null</code>.
*/
- public AudioDataFormat initSink(AudioDataFormat requestedFormat, int bufferCount);
-
+ public AudioDataFormat initSink(AudioDataFormat requestedFormat, int frameCount);
+
+ /**
+ * Returns true, if {@link #play()} has been requested <i>and</i> the sink is still playing,
+ * otherwise false.
+ */
+ public boolean isPlaying();
+
+ /**
+ * Play buffers queued via {@link #enqueueData(AudioFrame)} from current internal position.
+ * If no buffers are yet queued or the queue runs empty, playback is being continued when buffers are enqueued later on.
+ * @see #enqueueData(AudioFrame)
+ * @see #pause()
+ */
+ public void play();
+
+ /**
+ * Pause playing buffers while keeping enqueued data incl. it's internal position.
+ * @see #play()
+ * @see #flush()
+ * @see #enqueueData(AudioFrame)
+ */
+ public void pause();
+
+ /**
+ * Flush all queued buffers, implies {@link #pause()}.
+ * <p>
+ * {@link #initSink(AudioDataFormat, int)} must be called first.
+ * </p>
+ * @see #play()
+ * @see #pause()
+ * @see #enqueueData(AudioFrame)
+ */
+ public void flush();
/** Destroys this instance, i.e. closes all streams and devices allocated. */
public void destroy();
/**
- * Returns the number of bytes queued for playing.
+ * Returns the number of allocated buffers as requested by
+ * {@link #initSink(AudioDataFormat, int)}.
+ */
+ public int getFrameCount();
+
+ /** @return the current enqueued frames count since {@link #initSink(AudioDataFormat, int)}. */
+ public int getEnqueuedFrameCount();
+
+ /**
+ * Returns the current number of frames queued for playing.
+ * <p>
+ * {@link #initSink(AudioDataFormat, int)} must be called first.
+ * </p>
+ */
+ public int getQueuedFrameCount();
+
+ /**
+ * Returns the current number of bytes queued for playing.
* <p>
- * {@link #initSink(AudioDataFormat)} must be called first.
+ * {@link #initSink(AudioDataFormat, int)} must be called first.
* </p>
*/
public int getQueuedByteCount();
/**
- * Returns the queued buffer time in milliseconds for playing.
+ * Returns the current queued frame time in milliseconds for playing.
* <p>
- * {@link #initSink(AudioDataFormat)} must be called first.
+ * {@link #initSink(AudioDataFormat, int)} must be called first.
* </p>
*/
public int getQueuedTime();
/**
- * Returns the number of buffers in the sink available for writing.
- * <p>
- * {@link #initSink(AudioDataFormat)} must be called first.
- * </p>
+ * Return the current audio presentation timestamp (PTS) in milliseconds.
*/
- public int getWritableBufferCount();
+ public int getPTS();
/**
- * Returns true if data is available to be written in the sink.
+ * Returns the current number of frames in the sink available for writing.
* <p>
- * {@link #initSink(AudioDataFormat)} must be called first.
+ * {@link #initSink(AudioDataFormat, int)} must be called first.
* </p>
*/
- public boolean isDataAvailable(int data_size);
-
+ public int getFreeFrameCount();
+
/**
- * Writes the remaining bytes of the given direct ByteBuffer to this sink.
+ * Enqueue the remaining bytes of the given {@link AudioFrame}'s direct ByteBuffer to this sink.
* <p>
* The data must comply with the chosen {@link AudioDataFormat} as returned by {@link #initSink(AudioDataFormat)}.
* </p>
+ * <p>
+ * {@link #initSink(AudioDataFormat, int)} must be called first.
+ * </p>
*/
- public void writeData(AudioFrame audioFrame);
+ public void enqueueData(AudioFrame audioFrame);
}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/AudioSinkFactory.java b/src/jogl/classes/com/jogamp/opengl/util/av/AudioSinkFactory.java
index 40321fb6f..a6a14f7dd 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/AudioSinkFactory.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/AudioSinkFactory.java
@@ -42,10 +42,13 @@ public class AudioSinkFactory {
sink = create(cl, JavaAudioSinkClazzName);
}
if( null == sink ) {
- sink = new NullAudioSink();
+ sink = createNull();
}
return sink;
}
+ public static AudioSink createNull() {
+ return new NullAudioSink();
+ }
public static AudioSink create(final ClassLoader cl, String implName) {
final AudioSink audioSink;
diff --git a/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
index 87c7b937a..5783c32f1 100644
--- a/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
+++ b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
@@ -30,6 +30,8 @@ package jogamp.opengl.openal.av;
import jogamp.opengl.util.av.SyncedRingbuffer;
+import com.jogamp.common.util.locks.LockFactory;
+import com.jogamp.common.util.locks.RecursiveLock;
import com.jogamp.openal.AL;
import com.jogamp.openal.ALC;
import com.jogamp.openal.ALCcontext;
@@ -53,31 +55,40 @@ public class ALAudioSink implements AudioSink {
private String deviceSpecifier;
private ALCdevice device;
private ALCcontext context;
+ // private static final ThreadLocal<GLContext> currentContext = new ThreadLocal<GLContext>();
+ protected final RecursiveLock lock = LockFactory.createRecursiveLock();
/** Sample period in seconds */
public float samplePeriod;
- /** Buffer period in seconds */
- public float bufferPeriod;
+
+ /** Playback speed, range [0.5 - 2.0], default 1.0. */
+ public float playSpeed;
static class ActiveBuffer {
- ActiveBuffer(Integer name, int size) {
+ ActiveBuffer(Integer name, int pts, int size) {
this.name = name;
+ this.pts = pts;
this.size = size;
}
public final Integer name;
+ public final int pts;
public final int size;
- public String toString() { return "ABuffer[name "+name+", size "+size+"]"; }
+ public String toString() { return "ABuffer[name "+name+", pts "+pts+", size "+size+"]"; }
}
int[] alBuffers = null;
private SyncedRingbuffer<Integer> alBufferAvail = null;
private SyncedRingbuffer<ActiveBuffer> alBufferPlaying = null;
- private int alBufferBytesQueued = 0;
+ private volatile int alBufferBytesQueued = 0;
+ private volatile int ptsPlaying = 0;
+ private volatile int enqueuedFrameCount;
private int[] alSource = null;
private AudioDataFormat chosenFormat;
private int alFormat;
private boolean initialized;
+
+ private volatile boolean playRequested = false;
static {
ALC _alc = null;
@@ -123,29 +134,31 @@ public class ALAudioSink implements AudioSink {
throw new RuntimeException("ALAudioSink: Error creating OpenAL context");
}
- // Set active context.
- alc.alcMakeContextCurrent(context);
-
- // Check for an error.
- if ( alc.alcGetError(device) != ALC.ALC_NO_ERROR ) {
- throw new RuntimeException("ALAudioSink: Error making OpenAL context current");
- }
-
- // Create source
- {
- alSource = new int[1];
- al.alGenSources(1, alSource, 0);
- final int err = al.alGetError();
- if( err != AL.AL_NO_ERROR ) {
- alSource = null;
- throw new RuntimeException("ALAudioSink: Error generating Source: 0x"+Integer.toHexString(err));
- }
- }
-
- if( DEBUG ) {
- System.err.println("ALAudioSink: Using device: " + deviceSpecifier);
+ lockContext();
+ try {
+ // Check for an error.
+ if ( alc.alcGetError(device) != ALC.ALC_NO_ERROR ) {
+ throw new RuntimeException("ALAudioSink: Error making OpenAL context current");
+ }
+
+ // Create source
+ {
+ alSource = new int[1];
+ al.alGenSources(1, alSource, 0);
+ final int err = al.alGetError();
+ if( err != AL.AL_NO_ERROR ) {
+ alSource = null;
+ throw new RuntimeException("ALAudioSink: Error generating Source: 0x"+Integer.toHexString(err));
+ }
+ }
+
+ if( DEBUG ) {
+ System.err.println("ALAudioSink: Using device: " + deviceSpecifier);
+ }
+ initialized = true;
+ } finally {
+ unlockContext();
}
- initialized = true;
return;
} catch ( Exception e ) {
if( DEBUG ) {
@@ -155,28 +168,70 @@ public class ALAudioSink implements AudioSink {
}
}
+ private final void lockContext() {
+ lock.lock();
+ alc.alcMakeContextCurrent(context);
+ }
+ private final void unlockContext() {
+ alc.alcMakeContextCurrent(null);
+ lock.unlock();
+ }
+ private final void destroyContext() {
+ lock.lock();
+ try {
+ if( null != context ) {
+ try {
+ alc.alcDestroyContext(context);
+ } catch (Throwable t) {
+ if( DEBUG ) {
+ System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
+ }
+ }
+ context = null;
+ }
+ // unroll lock !
+ while(lock.getHoldCount() > 1) {
+ lock.unlock();
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
@Override
- public String toString() {
+ public final String toString() {
final int alSrcName = null != alSource ? alSource[0] : 0;
final int alBuffersLen = null != alBuffers ? alBuffers.length : 0;
- return "ALAudioSink[init "+initialized+", device "+deviceSpecifier+", ctx "+context+", alSource "+alSrcName+
+ final int ctxHash = context != null ? context.hashCode() : 0;
+ return "ALAudioSink[init "+initialized+", playRequested "+playRequested+", device "+deviceSpecifier+", ctx "+toHexString(ctxHash)+", alSource "+alSrcName+
", chosen "+chosenFormat+", alFormat "+toHexString(alFormat)+
- ", buffers[total "+alBuffersLen+", avail "+alBufferAvail.size()+", "+alBufferPlaying.getFreeSlots()+
- ", queued[bufferCount "+alBufferPlaying.size()+", "+getQueuedTime() + " ms, " + alBufferBytesQueued+" bytes]";
+ ", playSpeed "+playSpeed+", buffers[total "+alBuffersLen+", avail "+alBufferAvail.size()+", "+
+ "queued["+alBufferPlaying.size()+", apts "+ptsPlaying+", "+getQueuedTime() + " ms, " + alBufferBytesQueued+" bytes]";
+ }
+ public final String getPerfString() {
+ final int alBuffersLen = null != alBuffers ? alBuffers.length : 0;
+ return "Play [buffer "+alBufferPlaying.size()+"/"+alBuffersLen+", apts "+ptsPlaying+", "+getQueuedTime() + " ms, " + alBufferBytesQueued+" bytes]";
}
@Override
- public AudioDataFormat getPreferredFormat() {
+ public final AudioDataFormat getPreferredFormat() {
return DefaultFormat;
}
@Override
- public AudioDataFormat initSink(AudioDataFormat requestedFormat, int bufferCount) {
+ public final AudioDataFormat initSink(AudioDataFormat requestedFormat, int frameCount) {
if( !staticAvailable ) {
return null;
}
+ if( !requestedFormat.fixedP ||
+ !requestedFormat.littleEndian ||
+ ( 1 != requestedFormat.channelCount && requestedFormat.channelCount != 2 ) ||
+ ( 8 != requestedFormat.sampleSize && requestedFormat.sampleSize != 16 )
+ ) {
+ return null; // not supported w/ OpenAL
+ }
samplePeriod = 1.0f / requestedFormat.sampleRate;
- bufferPeriod = samplePeriod * SAMPLES_PER_BUFFER;
switch( requestedFormat.channelCount ) {
case 1: {
switch ( requestedFormat.sampleSize ) {
@@ -184,8 +239,6 @@ public class ALAudioSink implements AudioSink {
alFormat = AL.AL_FORMAT_MONO8; break;
case 16:
alFormat = AL.AL_FORMAT_MONO16; break;
- default:
- return null;
}
} break;
case 2:
@@ -194,29 +247,31 @@ public class ALAudioSink implements AudioSink {
alFormat = AL.AL_FORMAT_STEREO8; break;
case 16:
alFormat = AL.AL_FORMAT_STEREO16; break;
- default:
- return null;
}
}
- // Allocate buffers
- destroyBuffers();
- {
- alBuffers = new int[bufferCount];
- al.alGenBuffers(bufferCount, alBuffers, 0);
- final int err = al.alGetError();
- if( err != AL.AL_NO_ERROR ) {
- alBuffers = null;
- throw new RuntimeException("ALAudioSink: Error generating Buffers: 0x"+Integer.toHexString(err));
- }
- final Integer[] alBufferRingArray = new Integer[bufferCount];
- for(int i=0; i<bufferCount; i++) {
- alBufferRingArray[i] = Integer.valueOf(alBuffers[i]);
+ lockContext();
+ try {
+ // Allocate buffers
+ destroyBuffers();
+ {
+ alBuffers = new int[frameCount];
+ al.alGenBuffers(frameCount, alBuffers, 0);
+ final int err = al.alGetError();
+ if( err != AL.AL_NO_ERROR ) {
+ alBuffers = null;
+ throw new RuntimeException("ALAudioSink: Error generating Buffers: 0x"+Integer.toHexString(err));
+ }
+ final Integer[] alBufferRingArray = new Integer[frameCount];
+ for(int i=0; i<frameCount; i++) {
+ alBufferRingArray[i] = Integer.valueOf(alBuffers[i]);
+ }
+ alBufferAvail = new SyncedRingbuffer<Integer>(alBufferRingArray, true /* full */);
+ alBufferPlaying = new SyncedRingbuffer<ActiveBuffer>(new ActiveBuffer[frameCount], false /* full */);
}
- alBufferAvail = new SyncedRingbuffer<Integer>(alBufferRingArray, true /* full */);
- alBufferPlaying = new SyncedRingbuffer<ActiveBuffer>(new ActiveBuffer[bufferCount], false /* full */);
+ } finally {
+ unlockContext();
}
-
chosenFormat = requestedFormat;
return chosenFormat;
}
@@ -244,35 +299,31 @@ public class ALAudioSink implements AudioSink {
}
@Override
- public void destroy() {
+ public final void destroy() {
initialized = false;
if( !staticAvailable ) {
return;
}
- if( null != alSource ) {
- try {
- al.alDeleteSources(1, alSource, 0);
- } catch (Throwable t) {
- if( DEBUG ) {
- System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
- t.printStackTrace();
- }
- }
- alSource = null;
- }
-
- destroyBuffers();
-
if( null != context ) {
- try {
- alc.alcDestroyContext(context);
- } catch (Throwable t) {
- if( DEBUG ) {
- System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
- t.printStackTrace();
+ lockContext();
+ }
+ try {
+ stopImpl();
+ if( null != alSource ) {
+ try {
+ al.alDeleteSources(1, alSource, 0);
+ } catch (Throwable t) {
+ if( DEBUG ) {
+ System.err.println("Catched "+t.getClass().getName()+": "+t.getMessage());
+ t.printStackTrace();
+ }
}
+ alSource = null;
}
- context = null;
+
+ destroyBuffers();
+ } finally {
+ destroyContext();
}
if( null != device ) {
try {
@@ -289,36 +340,56 @@ public class ALAudioSink implements AudioSink {
}
@Override
- public boolean isInitialized() {
+ public final boolean isInitialized() {
return initialized;
}
- private final void dequeueBuffer(boolean wait) {
+ private final int dequeueBuffer(boolean all, boolean wait) {
+ if( !lock.isLocked() ) {
+ throw new InternalError("XXX");
+ }
int alErr = AL.AL_NO_ERROR;
- final int[] val=new int[1];
- do {
- al.alGetSourcei(alSource[0], AL.AL_BUFFERS_PROCESSED, val, 0);
- alErr = al.alGetError();
- if( AL.AL_NO_ERROR != alErr ) {
- throw new RuntimeException("ALError "+toHexString(alErr)+" while quering processed buffers at source. "+this);
- }
- if( wait && val[0] <= 0 ) {
- try {
- Thread.sleep(1);
- } catch (InterruptedException e){
+ final int releaseBufferCount;
+ if( all ) {
+ releaseBufferCount = alBufferPlaying.size();
+ } else if( alBufferBytesQueued > 0 ) {
+ final int[] val=new int[1];
+ int i=0;
+ do {
+ al.alGetSourcei(alSource[0], AL.AL_BUFFERS_PROCESSED, val, 0);
+ alErr = al.alGetError();
+ if( AL.AL_NO_ERROR != alErr ) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while quering processed buffers at source. "+this);
}
- }
- } while (val[0] <= 0);
- final int processedBuffers = val[0];
+ if( wait && val[0] <= 0 ) {
+ i++;
+ // clip wait at 60Hz - min 1ms
+ final int sleep = Math.max(1, Math.min(15, getQueuedTimeImpl( alBufferBytesQueued / alBufferPlaying.size() ) ));
+ if( DEBUG ) {
+ System.err.println(getThreadName()+": ALAudioSink: Dequeue.wait["+i+"]: sleep "+sleep+" ms, playImpl "+isPlayingImpl1()+", processed "+val[0]+", "+this);
+ }
+ unlockContext();
+ try {
+ Thread.sleep(sleep);
+ } catch (InterruptedException e) {
+ } finally {
+ lockContext();
+ }
+ }
+ } while ( wait && val[0] <= 0 && alBufferBytesQueued > 0 );
+ releaseBufferCount = val[0];
+ } else {
+ releaseBufferCount = 0;
+ }
- if( processedBuffers > 0 ) {
- int[] buffers=new int[processedBuffers];
- al.alSourceUnqueueBuffers(alSource[0], processedBuffers, buffers, 0);
+ if( releaseBufferCount > 0 ) {
+ int[] buffers=new int[releaseBufferCount];
+ al.alSourceUnqueueBuffers(alSource[0], releaseBufferCount, buffers, 0);
alErr = al.alGetError();
if( AL.AL_NO_ERROR != alErr ) {
- throw new RuntimeException("ALError "+toHexString(alErr)+" while dequeueing "+processedBuffers+" processed buffers. "+this);
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while dequeueing "+releaseBufferCount+" buffers. "+this);
}
- for ( int i=0; i<processedBuffers; i++ ) {
+ for ( int i=0; i<releaseBufferCount; i++ ) {
final ActiveBuffer releasedBuffer = alBufferPlaying.get(true /* clearRef */);
if( null == releasedBuffer ) {
throw new InternalError("Internal Error: "+this);
@@ -331,19 +402,16 @@ public class ALAudioSink implements AudioSink {
if( !alBufferAvail.put(releasedBuffer.name) ) {
throw new InternalError("Internal Error: "+this);
}
- if( DEBUG ) {
- System.err.println("ALAudioSink: Dequeued "+processedBuffers+", wait "+wait+", "+this);
- }
}
}
+ return releaseBufferCount;
}
- private static final String toHexString(int v) {
- return "0x"+Integer.toHexString(v);
- }
+ private static final String toHexString(int v) { return "0x"+Integer.toHexString(v); }
+ private static final String getThreadName() { return Thread.currentThread().getName(); }
@Override
- public void writeData(AudioFrame audioFrame) {
+ public final void enqueueData(AudioFrame audioFrame) {
if( !initialized || null == chosenFormat ) {
return;
}
@@ -352,77 +420,237 @@ public class ALAudioSink implements AudioSink {
// OpenAL consumes buffers in the background
// we first need to initialize the OpenAL buffers then
// start continuous playback.
- alc.alcMakeContextCurrent(context);
- alErr = al.alGetError();
- if(al.alGetError() != AL.AL_NO_ERROR) {
- throw new RuntimeException("ALError "+toHexString(alErr)+" while makeCurrent. "+this);
+ lockContext();
+ try {
+ alErr = al.alGetError();
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while makeCurrent. "+this);
+ }
+
+ if( isPlayingImpl0() ) { // dequeue only possible if playing ..
+ final boolean wait = alBufferAvail.size() <= 1;
+ if( DEBUG ) {
+ System.err.println(getThreadName()+": ALAudioSink: Dequeue: playImpl "+isPlayingImpl1()+", wait "+wait+", "+this);
+ }
+ final int dequeuedBufferCount = dequeueBuffer( false /* all */, wait );
+ final ActiveBuffer currentBuffer = alBufferPlaying.peek();
+ ptsPlaying = null != currentBuffer ? currentBuffer.pts : audioFrame.pts;
+ if( DEBUG ) {
+ System.err.println(getThreadName()+": ALAudioSink: Write "+audioFrame.pts+", "+getQueuedTimeImpl(audioFrame.dataSize)+" ms, dequeued "+dequeuedBufferCount+", wait "+wait+", "+getPerfString());
+ }
+ }
+
+ final Integer alBufferName = alBufferAvail.get(true /* clearRef */);
+ if( null == alBufferName ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ if( !alBufferPlaying.put( new ActiveBuffer(alBufferName, audioFrame.pts, audioFrame.dataSize) ) ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ al.alBufferData(alBufferName.intValue(), alFormat, audioFrame.data, audioFrame.dataSize, chosenFormat.sampleRate);
+ final int[] alBufferNames = new int[] { alBufferName.intValue() };
+ al.alSourceQueueBuffers(alSource[0], 1, alBufferNames, 0);
+ alErr = al.alGetError();
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while queueing buffer "+toHexString(alBufferNames[0])+". "+this);
+ }
+ alBufferBytesQueued += audioFrame.dataSize;
+ enqueuedFrameCount++;
+
+ playImpl(); // continue playing, fixes issue where we ran out of enqueued data!
+ } finally {
+ unlockContext();
}
-
- if( alBufferAvail.isEmpty() ) {
- dequeueBuffer(true);
+ }
+
+ @Override
+ public final boolean isPlaying() {
+ if( !initialized || null == chosenFormat ) {
+ return false;
}
-
- final Integer alBufferName = alBufferAvail.get(true /* clearRef */);
- if( null == alBufferName ) {
- throw new InternalError("Internal Error: "+this);
+ if( playRequested ) {
+ lockContext();
+ try {
+ return isPlayingImpl0();
+ } finally {
+ unlockContext();
+ }
+ } else {
+ return false;
}
- if( !alBufferPlaying.put( new ActiveBuffer(alBufferName, audioFrame.dataSize) ) ) {
- throw new InternalError("Internal Error: "+this);
+ }
+ private final boolean isPlayingImpl0() {
+ if( playRequested ) {
+ return isPlayingImpl1();
+ } else {
+ return false;
}
- al.alBufferData(alBufferName.intValue(), alFormat, audioFrame.data, audioFrame.dataSize, chosenFormat.sampleRate);
- final int[] alBufferNames = new int[] { alBufferName.intValue() };
- al.alSourceQueueBuffers(alSource[0], 1, alBufferNames, 0);
- alErr = al.alGetError();
+ }
+ private final boolean isPlayingImpl1() {
+ final int[] val = new int[1];
+ al.alGetSourcei(alSource[0], AL.AL_SOURCE_STATE, val, 0);
+ final int alErr = al.alGetError();
if(al.alGetError() != AL.AL_NO_ERROR) {
- throw new RuntimeException("ALError "+toHexString(alErr)+" while queueing buffer "+toHexString(alBufferNames[0])+". "+this);
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while querying isPlaying. "+this);
}
- alBufferBytesQueued += audioFrame.dataSize;
-
- // Restart openal playback if needed
- {
- int[] val = new int[1];
- al.alGetSourcei(alSource[0], AL.AL_SOURCE_STATE, val, 0);
- if(val[0] != AL.AL_PLAYING) {
+ return val[0] == AL.AL_PLAYING;
+ }
+
+ @Override
+ public final void play() {
+ if( !initialized || null == chosenFormat ) {
+ return;
+ }
+ playRequested = true;
+ lockContext();
+ try {
+ playImpl();
+ if( DEBUG ) {
+ System.err.println(getThreadName()+": ALAudioSink: PLAY playImpl "+isPlayingImpl1()+", "+this);
+ }
+ } finally {
+ unlockContext();
+ }
+ }
+ private final void playImpl() {
+ if( playRequested && !isPlayingImpl1() ) {
+ al.alSourcePlay(alSource[0]);
+ final int alErr = al.alGetError();
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while start playing. "+this);
+ }
+ }
+ }
+
+ @Override
+ public final void pause() {
+ if( !initialized || null == chosenFormat ) {
+ return;
+ }
+ if( playRequested ) {
+ lockContext();
+ try {
+ pauseImpl();
if( DEBUG ) {
- System.err.println("ALAudioSink: Start playing: "+this);
- }
- al.alSourcePlay(alSource[0]);
- alErr = al.alGetError();
- if(al.alGetError() != AL.AL_NO_ERROR) {
- throw new RuntimeException("ALError "+toHexString(alErr)+" while start playing. "+this);
- }
+ System.err.println(getThreadName()+": ALAudioSink: PAUSE playImpl "+isPlayingImpl1()+", "+this);
+ }
+ } finally {
+ unlockContext();
}
}
}
-
+ private final void pauseImpl() {
+ if( isPlayingImpl0() ) {
+ playRequested = false;
+ al.alSourcePause(alSource[0]);
+ final int alErr = al.alGetError();
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while pausing. "+this);
+ }
+ }
+ }
+ private final void stopImpl() {
+ if( isPlayingImpl0() ) {
+ playRequested = false;
+ al.alSourceStop(alSource[0]);
+ final int alErr = al.alGetError();
+ if(al.alGetError() != AL.AL_NO_ERROR) {
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while pausing. "+this);
+ }
+ }
+ }
+
+ @Override
+ public final float getPlaySpeed() { return playSpeed; }
+
+ @Override
+ public final boolean setPlaySpeed(float rate) {
+ if( !initialized || null == chosenFormat ) {
+ return false;
+ }
+ lockContext();
+ try {
+ if( Math.abs(1.0f - rate) < 0.01f ) {
+ rate = 1.0f;
+ }
+ if( 0.5f <= rate && rate <= 2.0f ) { // OpenAL limits
+ playSpeed = rate;
+ al.alSourcef(alSource[0], AL.AL_PITCH, playSpeed);
+ return true;
+ }
+ } finally {
+ unlockContext();
+ }
+ return false;
+ }
+
+ @Override
+ public final void flush() {
+ if( !initialized || null == chosenFormat ) {
+ return;
+ }
+ lockContext();
+ try {
+ // pauseImpl();
+ stopImpl();
+ dequeueBuffer( true /* all */, false /* wait */ );
+ if( alBuffers.length != alBufferAvail.size() || alBufferPlaying.size() != 0 ) {
+ throw new InternalError("XXX: "+this);
+ }
+ if( DEBUG ) {
+ System.err.println(getThreadName()+": ALAudioSink: FLUSH playImpl "+isPlayingImpl1()+", "+this);
+ }
+ } finally {
+ unlockContext();
+ }
+ }
+
@Override
- public int getQueuedByteCount() {
+ public final int getEnqueuedFrameCount() {
+ return enqueuedFrameCount;
+ }
+
+ @Override
+ public final int getFrameCount() {
+ return null != alBuffers ? alBuffers.length : 0;
+ }
+
+ @Override
+ public final int getQueuedFrameCount() {
if( !initialized || null == chosenFormat ) {
return 0;
}
- return alBufferBytesQueued;
+ return alBufferPlaying.size();
}
@Override
- public int getQueuedTime() {
+ public final int getFreeFrameCount() {
if( !initialized || null == chosenFormat ) {
return 0;
}
- final int bps = chosenFormat.sampleSize / 8;
- return alBufferBytesQueued / ( chosenFormat.channelCount * bps * ( chosenFormat.sampleRate / 1000 ) );
+ return alBufferAvail.size();
}
@Override
- public int getWritableBufferCount() {
+ public final int getQueuedByteCount() {
if( !initialized || null == chosenFormat ) {
return 0;
}
- return alBufferPlaying.getFreeSlots();
+ return alBufferBytesQueued;
}
@Override
- public boolean isDataAvailable(int data_size) {
- return initialized && null != chosenFormat;
+ public final int getQueuedTime() {
+ if( !initialized || null == chosenFormat ) {
+ return 0;
+ }
+ return getQueuedTimeImpl(alBufferBytesQueued);
+ }
+ private final int getQueuedTimeImpl(int byteCount) {
+ final int bytesPerSample = chosenFormat.sampleSize >>> 3; // /8
+ return byteCount / ( chosenFormat.channelCount * bytesPerSample * ( chosenFormat.sampleRate / 1000 ) );
}
+ @Override
+ public final int getPTS() { return ptsPlaying; }
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
index a5fedce59..e96bb6a50 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/JavaSoundAudioSink.java
@@ -34,6 +34,8 @@ public class JavaSoundAudioSink implements AudioSink {
private boolean initialized = false;
private AudioDataFormat chosenFormat = null;
+ private volatile boolean playRequested = false;
+
static {
boolean ok = false;
try {
@@ -52,12 +54,20 @@ public class JavaSoundAudioSink implements AudioSink {
}
@Override
+ public final float getPlaySpeed() { return 1.0f; } // FIXME
+
+ @Override
+ public final boolean setPlaySpeed(float rate) {
+ return false; // FIXME
+ }
+
+ @Override
public AudioDataFormat getPreferredFormat() {
return DefaultFormat;
}
@Override
- public AudioDataFormat initSink(AudioDataFormat requestedFormat, int bufferCount) {
+ public AudioDataFormat initSink(AudioDataFormat requestedFormat, int frameCount) {
if( !staticAvailable ) {
return null;
}
@@ -84,6 +94,56 @@ public class JavaSoundAudioSink implements AudioSink {
}
@Override
+ public boolean isPlaying() {
+ return playRequested && auline.isRunning();
+ }
+
+ @Override
+ public void play() {
+ if( null != auline ) {
+ playRequested = true;
+ playImpl();
+ }
+ }
+ private void playImpl() {
+ if( playRequested && !auline.isRunning() ) {
+ auline.start();
+ }
+ }
+
+ @Override
+ public void pause() {
+ if( null != auline ) {
+ playRequested = false;
+ auline.stop();
+ }
+ }
+
+ @Override
+ public void flush() {
+ if( null != auline ) {
+ playRequested = false;
+ auline.stop();
+ auline.flush();
+ }
+ }
+
+ @Override
+ public final int getEnqueuedFrameCount() {
+ return 0; // FIXME
+ }
+
+ @Override
+ public int getFrameCount() {
+ return 1;
+ }
+
+ @Override
+ public int getQueuedFrameCount() {
+ return 0;
+ }
+
+ @Override
public boolean isInitialized() {
return initialized;
}
@@ -95,7 +155,7 @@ public class JavaSoundAudioSink implements AudioSink {
// FIXEM: complete code!
}
- public void writeData(AudioFrame audioFrame) {
+ public void enqueueData(AudioFrame audioFrame) {
int data_size = audioFrame.dataSize;
final byte[] lala = new byte[data_size];
final int p = audioFrame.data.position();
@@ -109,27 +169,29 @@ public class JavaSoundAudioSink implements AudioSink {
len = auline.write(lala, written, data_size);
data_size -= len;
written += len;
- }
+ }
+ playImpl();
}
-
+
@Override
public int getQueuedByteCount() {
- return auline.available();
+ return auline.getBufferSize() - auline.available();
}
@Override
- public int getQueuedTime() {
- return 0; // FIXME
+ public int getFreeFrameCount() {
+ return auline.available();
}
-
@Override
- public int getWritableBufferCount() {
- return 1;
+ public int getQueuedTime() {
+ return getQueuedTimeImpl( getQueuedByteCount() );
}
-
- public boolean isDataAvailable(int data_size) {
- return auline.available()>=data_size;
+ private final int getQueuedTimeImpl(int byteCount) {
+ final int bytesPerSample = chosenFormat.sampleSize >>> 3; // /8
+ return byteCount / ( chosenFormat.channelCount * bytesPerSample * ( chosenFormat.sampleRate / 1000 ) );
}
+ @Override
+ public final int getPTS() { return 0; } // FIXME
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java b/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
index cef1c3361..c7fecae0b 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullAudioSink.java
@@ -10,41 +10,88 @@ public class NullAudioSink implements AudioSink {
return true;
}
+ private volatile float playSpeed = 1.0f;
+ private volatile boolean playRequested = false;
+
+ @Override
+ public final float getPlaySpeed() { return playSpeed; }
+
+ @Override
+ public final boolean setPlaySpeed(float rate) {
+ if( Math.abs(1.0f - rate) < 0.01f ) {
+ rate = 1.0f;
+ }
+ playSpeed = rate;
+ return true;
+ }
+
@Override
public AudioDataFormat getPreferredFormat() {
return DefaultFormat;
}
@Override
- public AudioDataFormat initSink(AudioDataFormat requestedFormat, int bufferCount) {
+ public AudioDataFormat initSink(AudioDataFormat requestedFormat, int frameCount) {
return requestedFormat;
}
@Override
+ public boolean isPlaying() {
+ return playRequested;
+ }
+
+ @Override
+ public void play() {
+ playRequested = true;
+ }
+
+ @Override
+ public void pause() {
+ playRequested = false;
+ }
+
+ @Override
+ public void flush() {
+ }
+
+ @Override
public void destroy() {
}
@Override
- public int getQueuedByteCount() {
+ public final int getEnqueuedFrameCount() {
return 0;
}
@Override
- public int getQueuedTime() {
+ public int getFrameCount() {
return 0;
}
-
+
@Override
- public int getWritableBufferCount() {
- return 1;
+ public int getQueuedFrameCount() {
+ return 0;
}
@Override
- public boolean isDataAvailable(int data_size) {
- return false;
+ public int getQueuedByteCount() {
+ return 0;
}
-
+
@Override
- public void writeData(AudioFrame audioFrame) {
+ public int getQueuedTime() {
+ return 0;
}
+
+ @Override
+ public final int getPTS() { return 0; }
+
+ @Override
+ public int getFreeFrameCount() {
+ return 1;
+ }
+
+ @Override
+ public void enqueueData(AudioFrame audioFrame) {
+ }
}