aboutsummaryrefslogtreecommitdiffstats
path: root/src/jogl
diff options
context:
space:
mode:
Diffstat (limited to 'src/jogl')
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java5
-rw-r--r--src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java240
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java279
-rw-r--r--src/jogl/native/libav/ffmpeg_impl_template.c23
4 files changed, 328 insertions, 219 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
index b0a645cbb..22a5cfb32 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
@@ -358,7 +358,10 @@ public interface GLMediaPlayer extends TextureSequence {
public void initStream(URI streamLoc, int vid, int aid, int textureCount) throws IllegalStateException, IllegalArgumentException;
/**
- * Returns the {@link StreamException} caught in the decoder thread, or <code>null</code>.
+ * Returns the {@link StreamException} caught in the decoder thread, or <code>null</code> if none occured.
+ * <p>
+ * Method clears the cached {@link StreamException}, hence an immediate subsequent call will return <code>null</code>.
+ * </p>
* @see GLMediaEventListener#EVENT_CHANGE_ERR
* @see StreamException
*/
diff --git a/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
index ecb6b60e8..1229eb7b8 100644
--- a/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
+++ b/src/jogl/classes/jogamp/opengl/openal/av/ALAudioSink.java
@@ -3,14 +3,14 @@
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
- *
+ *
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
- *
+ *
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
- *
+ *
* THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
@@ -20,7 +20,7 @@
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
+ *
* The views and conclusions contained in the software and documentation are those of the
* authors and should not be interpreted as representing official policies, either expressed
* or implied, of JogAmp Community.
@@ -53,22 +53,22 @@ public class ALAudioSink implements AudioSink {
private static final ALC alc;
private static final AL al;
private static final ALExt alExt;
- private static final boolean staticAvailable;
-
+ private static final boolean staticAvailable;
+
private String deviceSpecifier;
private ALCdevice device;
private boolean hasSOFTBufferSamples;
- private AudioFormat preferredAudioFormat;
+ private AudioFormat preferredAudioFormat;
private ALCcontext context;
private final RecursiveLock lock = LockFactory.createRecursiveLock();
/** Playback speed, range [0.5 - 2.0], default 1.0. */
private float playSpeed;
private float volume = 1.0f;
-
+
static class ALAudioFrame extends AudioFrame {
private final int alBuffer;
-
+
ALAudioFrame(int alBuffer) {
this.alBuffer = alBuffer;
}
@@ -76,20 +76,20 @@ public class ALAudioSink implements AudioSink {
super(pts, duration, dataSize);
this.alBuffer = alBuffer;
}
-
+
/** Get this frame's OpenAL buffer name */
public final int getALBuffer() { return alBuffer; }
-
- public String toString() {
+
+ public String toString() {
return "ALAudioFrame[pts " + pts + " ms, l " + duration + " ms, " + byteSize + " bytes, buffer "+alBuffer+"]";
}
}
-
+
// private ALAudioFrame[] alFrames = null;
private int[] alBufferNames = null;
private int frameGrowAmount = 0;
private int frameLimit = 0;
-
+
private Ringbuffer<ALAudioFrame> alFramesAvail = null;
private Ringbuffer<ALAudioFrame> alFramesPlaying = null;
private volatile int alBufferBytesQueued = 0;
@@ -102,7 +102,7 @@ public class ALAudioSink implements AudioSink {
private int alSampleType;
private int alFormat;
private boolean initialized;
-
+
private volatile boolean playRequested = false;
static {
@@ -110,7 +110,7 @@ public class ALAudioSink implements AudioSink {
AL _al = null;
ALExt _alExt = null;
try {
- _alc = ALFactory.getALC();
+ _alc = ALFactory.getALC();
_al = ALFactory.getAL();
_alExt = ALFactory.getALExt();
} catch(Throwable t) {
@@ -124,41 +124,41 @@ public class ALAudioSink implements AudioSink {
alExt = _alExt;
staticAvailable = null != alc && null != al && null != alExt;
}
-
+
public ALAudioSink() {
initialized = false;
chosenFormat = null;
-
+
if( !staticAvailable ) {
return;
}
-
+
try {
// Get handle to default device.
device = alc.alcOpenDevice(null);
if (device == null) {
throw new RuntimeException("ALAudioSink: Error opening default OpenAL device");
}
-
+
// Get the device specifier.
deviceSpecifier = alc.alcGetString(device, ALC.ALC_DEVICE_SPECIFIER);
if (deviceSpecifier == null) {
throw new RuntimeException("ALAudioSink: Error getting specifier for default OpenAL device");
}
-
+
// Create audio context.
context = alc.alcCreateContext(device, null);
if (context == null) {
throw new RuntimeException("ALAudioSink: Error creating OpenAL context for "+deviceSpecifier);
}
-
+
lockContext();
try {
// Check for an error.
if ( alc.alcGetError(device) != ALC.ALC_NO_ERROR ) {
throw new RuntimeException("ALAudioSink: Error making OpenAL context current");
}
-
+
hasSOFTBufferSamples = al.alIsExtensionPresent(AL_SOFT_buffer_samples);
preferredAudioFormat = queryPreferredAudioFormat();
if( DEBUG ) {
@@ -168,7 +168,7 @@ public class ALAudioSink implements AudioSink {
System.out.println("ALAudioSink: hasSOFTBufferSamples "+hasSOFTBufferSamples);
System.out.println("ALAudioSink: preferredAudioFormat "+preferredAudioFormat);
}
-
+
// Create source
{
alSource = new int[1];
@@ -177,10 +177,10 @@ public class ALAudioSink implements AudioSink {
if( err != AL.AL_NO_ERROR ) {
alSource = null;
throw new RuntimeException("ALAudioSink: Error generating Source: 0x"+Integer.toHexString(err));
- }
+ }
}
-
- if( DEBUG ) {
+
+ if( DEBUG ) {
System.err.println("ALAudioSink: Using device: " + deviceSpecifier);
}
initialized = true;
@@ -195,7 +195,7 @@ public class ALAudioSink implements AudioSink {
destroy();
}
}
-
+
private final AudioFormat queryPreferredAudioFormat() {
int sampleRate = DefaultFormat.sampleRate;
final int[] value = new int[1];
@@ -205,7 +205,7 @@ public class ALAudioSink implements AudioSink {
}
return new AudioFormat(sampleRate, DefaultFormat.sampleSize, DefaultFormat.channelCount, DefaultFormat.signed, DefaultFormat.fixedP, DefaultFormat.planar, DefaultFormat.littleEndian);
}
-
+
private final void lockContext() {
lock.lock();
alc.alcMakeContextCurrent(context);
@@ -236,12 +236,12 @@ public class ALAudioSink implements AudioSink {
lock.unlock();
}
}
-
+
@Override
public final String toString() {
final int alSrcName = null != alSource ? alSource[0] : 0;
final int alBuffersLen = null != alBufferNames ? alBufferNames.length : 0;
- final int ctxHash = context != null ? context.hashCode() : 0;
+ final int ctxHash = context != null ? context.hashCode() : 0;
return "ALAudioSink[init "+initialized+", playRequested "+playRequested+", device "+deviceSpecifier+", ctx "+toHexString(ctxHash)+", alSource "+alSrcName+
", chosen "+chosenFormat+
", al[chan "+ALHelpers.alChannelLayoutName(alChannelLayout)+", type "+ALHelpers.alSampleTypeName(alSampleType)+
@@ -250,12 +250,12 @@ public class ALAudioSink implements AudioSink {
"queued["+alFramesPlaying.size()+", apts "+getPTS()+", "+getQueuedTime() + " ms, " + alBufferBytesQueued+" bytes], "+
"queue[g "+frameGrowAmount+", l "+frameLimit+"]";
}
-
+
public final String getPerfString() {
final int alBuffersLen = null != alBufferNames ? alBufferNames.length : 0;
return "Play [buffer "+alFramesPlaying.size()+"/"+alBuffersLen+", apts "+getPTS()+", "+getQueuedTime() + " ms, " + alBufferBytesQueued+" bytes]";
}
-
+
@Override
public final AudioFormat getPreferredFormat() {
if( !staticAvailable ) {
@@ -263,7 +263,7 @@ public class ALAudioSink implements AudioSink {
}
return preferredAudioFormat;
}
-
+
@Override
public final int getMaxSupportedChannels() {
if( !staticAvailable ) {
@@ -271,14 +271,14 @@ public class ALAudioSink implements AudioSink {
}
return hasSOFTBufferSamples ? 8 : 2;
}
-
+
@Override
public final boolean isSupported(AudioFormat format) {
if( !staticAvailable ) {
return false;
}
if( format.planar || !format.littleEndian ) {
- // FIXME big-endian supported w/ SOFT where it's native format!
+ // FIXME big-endian supported w/ SOFT where it's native format!
return false;
}
final int alChannelLayout = ALHelpers.getDefaultALChannelLayout(format.channelCount);
@@ -286,7 +286,7 @@ public class ALAudioSink implements AudioSink {
final int alSampleType = ALHelpers.getALSampleType(format.sampleSize, format.signed, format.fixedP);
if( AL.AL_NONE != alSampleType ) {
lockContext();
- try {
+ try {
final int alFormat = ALHelpers.getALFormat(alChannelLayout, alSampleType, hasSOFTBufferSamples, al, alExt);
return AL.AL_NONE != alFormat;
} finally {
@@ -296,7 +296,7 @@ public class ALAudioSink implements AudioSink {
}
return false;
}
-
+
@Override
public final boolean init(AudioFormat requestedFormat, float frameDuration, int initialQueueSize, int queueGrowAmount, int queueLimit) {
if( !staticAvailable ) {
@@ -318,10 +318,10 @@ public class ALAudioSink implements AudioSink {
// Allocate buffers
destroyBuffers();
{
- final float useFrameDuration = frameDuration > 1f ? frameDuration : AudioSink.DefaultFrameDuration;
+ final float useFrameDuration = frameDuration > 1f ? frameDuration : AudioSink.DefaultFrameDuration;
final int initialFrameCount = requestedFormat.getFrameCount(
initialQueueSize > 0 ? initialQueueSize : AudioSink.DefaultInitialQueueSize, useFrameDuration);
- // frameDuration, int initialQueueSize, int queueGrowAmount, int queueLimit) {
+ // frameDuration, int initialQueueSize, int queueGrowAmount, int queueLimit) {
alBufferNames = new int[initialFrameCount];
al.alGenBuffers(initialFrameCount, alBufferNames, 0);
final int err = al.alGetError();
@@ -333,7 +333,7 @@ public class ALAudioSink implements AudioSink {
for(int i=0; i<initialFrameCount; i++) {
alFrames[i] = new ALAudioFrame(alBufferNames[i]);
}
-
+
alFramesAvail = new LFRingbuffer<ALAudioFrame>(alFrames);
alFramesPlaying = new LFRingbuffer<ALAudioFrame>(ALAudioFrame[].class, initialFrameCount);
this.frameGrowAmount = requestedFormat.getFrameCount(
@@ -344,11 +344,11 @@ public class ALAudioSink implements AudioSink {
} finally {
unlockContext();
}
-
+
chosenFormat = requestedFormat;
return true;
}
-
+
private static int[] concat(int[] first, int[] second) {
final int[] result = Arrays.copyOf(first, first.length + second.length);
System.arraycopy(second, 0, result, first.length, second.length);
@@ -360,7 +360,7 @@ public class ALAudioSink implements AudioSink {
System.arraycopy(second, 0, result, first.length, second.length);
return result;
} */
-
+
private boolean growBuffers() {
if( !alFramesAvail.isEmpty() || !alFramesPlaying.isFull() ) {
throw new InternalError("Buffers: Avail is !empty "+alFramesAvail+" or Playing is !full "+alFramesPlaying);
@@ -371,7 +371,7 @@ public class ALAudioSink implements AudioSink {
}
return false;
}
-
+
final int[] newALBufferNames = new int[frameGrowAmount];
al.alGenBuffers(frameGrowAmount, newALBufferNames, 0);
final int err = al.alGetError();
@@ -382,7 +382,7 @@ public class ALAudioSink implements AudioSink {
return false;
}
alBufferNames = concat(alBufferNames, newALBufferNames);
-
+
final ALAudioFrame[] newALBuffers = new ALAudioFrame[frameGrowAmount];
for(int i=0; i<frameGrowAmount; i++) {
newALBuffers[i] = new ALAudioFrame(newALBufferNames[i]);
@@ -399,7 +399,7 @@ public class ALAudioSink implements AudioSink {
}
return true;
}
-
+
private void destroyBuffers() {
if( !staticAvailable ) {
return;
@@ -422,7 +422,7 @@ public class ALAudioSink implements AudioSink {
alBufferNames = null;
}
}
-
+
@Override
public final void destroy() {
initialized = false;
@@ -445,7 +445,7 @@ public class ALAudioSink implements AudioSink {
}
alSource = null;
}
-
+
destroyBuffers();
} finally {
destroyContext();
@@ -459,22 +459,20 @@ public class ALAudioSink implements AudioSink {
t.printStackTrace();
}
}
- device = null;
+ device = null;
}
chosenFormat = null;
}
-
+
@Override
public final boolean isInitialized() {
return initialized;
}
-
- private final int dequeueBuffer(boolean flush, boolean wait) {
+
+ private final int dequeueBuffer(boolean wait) {
int alErr = AL.AL_NO_ERROR;
final int releaseBufferCount;
- if( flush ) {
- releaseBufferCount = alFramesPlaying.size();
- } else if( alBufferBytesQueued > 0 ) {
+ if( alBufferBytesQueued > 0 ) {
final int releaseBufferLimes = Math.max(1, alFramesPlaying.size() / 4 );
final int[] val=new int[1];
int i=0;
@@ -490,8 +488,8 @@ public class ALAudioSink implements AudioSink {
final int avgBufferDura = chosenFormat.getBytesDuration( alBufferBytesQueued / alFramesPlaying.size() );
final int sleep = Math.max(2, Math.min(100, releaseBufferLimes * avgBufferDura));
if( DEBUG || true ) {
- System.err.println(getThreadName()+": ALAudioSink: Dequeue.wait["+i+"]: avgBufferDura "+avgBufferDura+", releaseBufferLimes "+releaseBufferLimes+", sleep "+sleep+" ms, playImpl "+isPlayingImpl1()+", processed "+val[0]+", "+this);
- }
+ System.err.println(getThreadName()+": ALAudioSink: Dequeue.wait["+i+"]: avgBufferDura "+avgBufferDura+", releaseBufferLimes "+releaseBufferLimes+", sleep "+sleep+" ms, playImpl "+(AL.AL_PLAYING == getSourceState())+", processed "+val[0]+", "+this);
+ }
unlockContext();
try {
Thread.sleep( sleep - 1 );
@@ -511,7 +509,7 @@ public class ALAudioSink implements AudioSink {
al.alSourceUnqueueBuffers(alSource[0], releaseBufferCount, buffers, 0);
alErr = al.alGetError();
if( AL.AL_NO_ERROR != alErr ) {
- throw new RuntimeException("ALError "+toHexString(alErr)+" while dequeueing "+releaseBufferCount+" buffers. "+this);
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while dequeueing "+releaseBufferCount+" buffers. "+this);
}
for ( int i=0; i<releaseBufferCount; i++ ) {
final ALAudioFrame releasedBuffer = alFramesPlaying.get();
@@ -520,7 +518,7 @@ public class ALAudioSink implements AudioSink {
}
if( releasedBuffer.alBuffer != buffers[i] ) {
alFramesAvail.dump(System.err, "Avail-deq02-post");
- alFramesPlaying.dump(System.err, "Playi-deq02-post");
+ alFramesPlaying.dump(System.err, "Playi-deq02-post");
throw new InternalError("Buffer name mismatch: dequeued: "+buffers[i]+", released "+releasedBuffer+", "+this);
}
alBufferBytesQueued -= releasedBuffer.getByteSize();
@@ -528,17 +526,27 @@ public class ALAudioSink implements AudioSink {
throw new InternalError("Internal Error: "+this);
}
}
- if( flush && ( !alFramesAvail.isFull() || !alFramesPlaying.isEmpty() ) ) {
- alFramesAvail.dump(System.err, "Avail-deq03-post");
- alFramesPlaying.dump(System.err, "Playi-deq03-post");
- throw new InternalError("Flush failure: "+this);
- }
}
return releaseBufferCount;
}
-
+ private final void dequeueForceAll() {
+ while ( !alFramesPlaying.isEmpty() ) {
+ final ALAudioFrame releasedBuffer = alFramesPlaying.get();
+ if( null == releasedBuffer ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ alBufferBytesQueued -= releasedBuffer.getByteSize();
+ if( !alFramesAvail.put(releasedBuffer) ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ }
+ if( 0 != alBufferBytesQueued ) {
+ throw new InternalError("Internal Error: "+this);
+ }
+ }
+
private final int dequeueBuffer(boolean wait, int inPTS, int inDuration) {
- final int dequeuedBufferCount = dequeueBuffer( false /* flush */, wait );
+ final int dequeuedBufferCount = dequeueBuffer( wait );
final ALAudioFrame currentBuffer = alFramesPlaying.peek();
if( null != currentBuffer ) {
playingPTS = currentBuffer.getPTS();
@@ -552,12 +560,12 @@ public class ALAudioSink implements AudioSink {
}
return dequeuedBufferCount;
}
-
+
@Override
public final AudioFrame enqueueData(AudioDataFrame audioDataFrame) {
return enqueueData(audioDataFrame.getPTS(), audioDataFrame.getData(), audioDataFrame.getByteSize());
}
-
+
@Override
public final AudioFrame enqueueData(int pts, ByteBuffer bytes, int byteCount) {
if( !initialized || null == chosenFormat ) {
@@ -565,7 +573,7 @@ public class ALAudioSink implements AudioSink {
}
final ALAudioFrame alFrame;
int alErr = AL.AL_NO_ERROR;
-
+
// OpenAL consumes buffers in the background
// we first need to initialize the OpenAL buffers then
// start continuous playback.
@@ -575,7 +583,7 @@ public class ALAudioSink implements AudioSink {
if(al.alGetError() != AL.AL_NO_ERROR) {
throw new RuntimeException("ALError "+toHexString(alErr)+" while makeCurrent. "+this);
}
-
+
final int duration = chosenFormat.getBytesDuration(byteCount);
final boolean dequeueDone;
if( alFramesAvail.isEmpty() ) {
@@ -592,7 +600,7 @@ public class ALAudioSink implements AudioSink {
final boolean wait = isPlayingImpl0() && alFramesAvail.isEmpty(); // possible if grow failed or already exceeds it's limit!
dequeueBuffer(wait, pts, duration);
}
-
+
alFrame = alFramesAvail.get();
if( null == alFrame ) {
alFramesAvail.dump(System.err, "Avail");
@@ -612,7 +620,7 @@ public class ALAudioSink implements AudioSink {
} else {
al.alBufferData(alFrame.alBuffer, alFormat, bytes, byteCount, chosenFormat.sampleRate);
}
-
+
final int[] alBufferNames = new int[] { alFrame.alBuffer };
al.alSourceQueueBuffers(alSource[0], 1, alBufferNames, 0);
alErr = al.alGetError();
@@ -621,7 +629,7 @@ public class ALAudioSink implements AudioSink {
}
alBufferBytesQueued += byteCount;
enqueuedFrameCount++;
-
+
playImpl(); // continue playing, fixes issue where we ran out of enqueued data!
} finally {
unlockContext();
@@ -640,28 +648,28 @@ public class ALAudioSink implements AudioSink {
return isPlayingImpl0();
} finally {
unlockContext();
- }
+ }
} else {
return false;
}
}
private final boolean isPlayingImpl0() {
if( playRequested ) {
- return isPlayingImpl1();
+ return AL.AL_PLAYING == getSourceState();
} else {
return false;
}
}
- private final boolean isPlayingImpl1() {
+ private final int getSourceState() {
final int[] val = new int[1];
al.alGetSourcei(alSource[0], AL.AL_SOURCE_STATE, val, 0);
final int alErr = al.alGetError();
if(al.alGetError() != AL.AL_NO_ERROR) {
- throw new RuntimeException("ALError "+toHexString(alErr)+" while querying isPlaying. "+this);
+ throw new RuntimeException("ALError "+toHexString(alErr)+" while querying SOURCE_STATE. "+this);
}
- return val[0] == AL.AL_PLAYING;
+ return val[0];
}
-
+
@Override
public final void play() {
if( !initialized || null == chosenFormat ) {
@@ -672,22 +680,22 @@ public class ALAudioSink implements AudioSink {
try {
playImpl();
if( DEBUG ) {
- System.err.println(getThreadName()+": ALAudioSink: PLAY playImpl "+isPlayingImpl1()+", "+this);
- }
+ System.err.println(getThreadName()+": ALAudioSink: PLAY playImpl "+(AL.AL_PLAYING == getSourceState())+", "+this);
+ }
} finally {
unlockContext();
- }
+ }
}
private final void playImpl() {
- if( playRequested && !isPlayingImpl1() ) {
+ if( playRequested && AL.AL_PLAYING != getSourceState() ) {
al.alSourcePlay(alSource[0]);
final int alErr = al.alGetError();
if(al.alGetError() != AL.AL_NO_ERROR) {
throw new RuntimeException("ALError "+toHexString(alErr)+" while start playing. "+this);
}
- }
+ }
}
-
+
@Override
public final void pause() {
if( !initialized || null == chosenFormat ) {
@@ -698,8 +706,8 @@ public class ALAudioSink implements AudioSink {
try {
pauseImpl();
if( DEBUG ) {
- System.err.println(getThreadName()+": ALAudioSink: PAUSE playImpl "+isPlayingImpl1()+", "+this);
- }
+ System.err.println(getThreadName()+": ALAudioSink: PAUSE playImpl "+(AL.AL_PLAYING == getSourceState())+", "+this);
+ }
} finally {
unlockContext();
}
@@ -716,7 +724,7 @@ public class ALAudioSink implements AudioSink {
}
}
private final void stopImpl() {
- if( isPlayingImpl0() ) {
+ if( AL.AL_STOPPED != getSourceState() ) {
playRequested = false;
al.alSourceStop(alSource[0]);
final int alErr = al.alGetError();
@@ -725,12 +733,12 @@ public class ALAudioSink implements AudioSink {
}
}
}
-
+
@Override
public final float getPlaySpeed() { return playSpeed; }
-
+
@Override
- public final boolean setPlaySpeed(float rate) {
+ public final boolean setPlaySpeed(float rate) {
if( !initialized || null == chosenFormat ) {
return false;
}
@@ -739,22 +747,22 @@ public class ALAudioSink implements AudioSink {
if( Math.abs(1.0f - rate) < 0.01f ) {
rate = 1.0f;
}
- if( 0.5f <= rate && rate <= 2.0f ) { // OpenAL limits
+ if( 0.5f <= rate && rate <= 2.0f ) { // OpenAL limits
playSpeed = rate;
al.alSourcef(alSource[0], AL.AL_PITCH, playSpeed);
return true;
- }
+ }
} finally {
unlockContext();
}
- return false;
+ return false;
}
-
+
@Override
public final float getVolume() {
- return volume;
+ return volume;
}
-
+
@Override
public final boolean setVolume(float v) {
if( !initialized || null == chosenFormat ) {
@@ -767,17 +775,17 @@ public class ALAudioSink implements AudioSink {
} else if( Math.abs(1.0f - v) < 0.01f ) {
v = 1.0f;
}
- if( 0.0f <= v && v <= 1.0f ) { // OpenAL limits
+ if( 0.0f <= v && v <= 1.0f ) { // OpenAL limits
volume = v;
al.alSourcef(alSource[0], AL.AL_GAIN, v);
return true;
- }
+ }
} finally {
unlockContext();
}
- return false;
+ return false;
}
-
+
@Override
public final void flush() {
if( !initialized || null == chosenFormat ) {
@@ -787,28 +795,30 @@ public class ALAudioSink implements AudioSink {
try {
// pauseImpl();
stopImpl();
- dequeueBuffer( true /* flush */, false /* wait */ );
+ al.alSourcei(alSource[0], AL.AL_BUFFER, 0); // explicit force zero buffer!
+ dequeueBuffer( false /* wait */ );
+ dequeueForceAll();
if( alBufferNames.length != alFramesAvail.size() || alFramesPlaying.size() != 0 ) {
throw new InternalError("XXX: "+this);
}
if( DEBUG ) {
- System.err.println(getThreadName()+": ALAudioSink: FLUSH playImpl "+isPlayingImpl1()+", "+this);
- }
+ System.err.println(getThreadName()+": ALAudioSink: FLUSH playImpl "+(AL.AL_PLAYING == getSourceState())+", "+this);
+ }
} finally {
unlockContext();
- }
+ }
}
-
+
@Override
public final int getEnqueuedFrameCount() {
return enqueuedFrameCount;
}
-
+
@Override
public final int getFrameCount() {
return null != alBufferNames ? alBufferNames.length : 0;
}
-
+
@Override
public final int getQueuedFrameCount() {
if( !initialized || null == chosenFormat ) {
@@ -816,7 +826,7 @@ public class ALAudioSink implements AudioSink {
}
return alFramesPlaying.size();
}
-
+
@Override
public final int getFreeFrameCount() {
if( !initialized || null == chosenFormat ) {
@@ -824,7 +834,7 @@ public class ALAudioSink implements AudioSink {
}
return alFramesAvail.size();
}
-
+
@Override
public final int getQueuedByteCount() {
if( !initialized || null == chosenFormat ) {
@@ -832,7 +842,7 @@ public class ALAudioSink implements AudioSink {
}
return alBufferBytesQueued;
}
-
+
@Override
public final int getQueuedTime() {
if( !initialized || null == chosenFormat ) {
@@ -840,10 +850,10 @@ public class ALAudioSink implements AudioSink {
}
return chosenFormat.getBytesDuration(alBufferBytesQueued);
}
-
+
@Override
public final int getPTS() { return playingPTS; }
-
+
private static final String toHexString(int v) { return "0x"+Integer.toHexString(v); }
- private static final String getThreadName() { return Thread.currentThread().getName(); }
+ private static final String getThreadName() { return Thread.currentThread().getName(); }
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index 86e19c920..0de308cd1 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -138,6 +138,18 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected int displayedFrameCount = 0;
protected volatile int video_pts_last = 0;
+ /**
+ * Help detect EOS, limit is {@link #MAX_FRAMELESS_MS_UNTIL_EOS}.
+ * To be used either by getNextTexture(..) or StreamWorker for audio-only.
+ */
+ private int nullFrameCount = 0;
+ private int maxNullFrameCountUntilEOS = 0;
+ /**
+ * Help detect EOS, limit {@value} milliseconds without a valid frame.
+ */
+ private static final int MAX_FRAMELESS_MS_UNTIL_EOS = 5000;
+ private static final int MAX_FRAMELESS_UNTIL_EOS_DEFAULT = MAX_FRAMELESS_MS_UNTIL_EOS / 30; // default value assuming 30fps
+
/** See {@link #getAudioSink()}. Set by implementation if used from within {@link #initStreamImpl(int, int)}! */
protected AudioSink audioSink = null;
protected boolean audioSinkPlaySpeedSet = false;
@@ -380,12 +392,20 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
if( null != streamWorker ) {
streamWorker.doPause();
}
+ // Adjust target ..
+ if( msec >= duration ) {
+ msec = duration - (int)Math.floor(frame_duration);
+ } else if( msec < 0 ) {
+ msec = 0;
+ }
pts1 = seekImpl(msec);
resetAVPTSAndFlush();
if( null != audioSink && State.Playing == _state ) {
audioSink.play(); // cont. w/ new data
}
- System.err.println("SEEK XXX: "+getPerfString());
+ if(DEBUG) {
+ System.err.println("Seek("+msec+"): "+getPerfString());
+ }
if( null != streamWorker ) {
streamWorker.doResume();
}
@@ -507,6 +527,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
decodedFrameCount = 0;
presentedFrameCount = 0;
displayedFrameCount = 0;
+ nullFrameCount = 0;
+ maxNullFrameCountUntilEOS = MAX_FRAMELESS_UNTIL_EOS_DEFAULT;
this.streamLoc = streamLoc;
// Pre-parse for camera-input scheme
@@ -528,20 +550,17 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
this.vid = vid;
this.aid = aid;
if ( this.streamLoc != null ) {
- if( TEXTURE_COUNT_MIN < textureCount ) {
- streamWorker = new StreamWorker();
- } else {
- new Thread() {
- public void run() {
- try {
- initStreamImpl(vid, aid);
- } catch (Throwable t) {
- streamErr = new StreamException(t.getClass().getSimpleName()+" while initializing: "+GLMediaPlayerImpl.this.toString(), t);
- changeState(GLMediaEventListener.EVENT_CHANGE_ERR, GLMediaPlayer.State.Uninitialized);
- } // also initializes width, height, .. etc
- }
- }.start();
- }
+ new Thread() {
+ public void run() {
+ try {
+ // StreamWorker may be used, see API-doc of StreamWorker
+ initStreamImpl(vid, aid);
+ } catch (Throwable t) {
+ streamErr = new StreamException(t.getClass().getSimpleName()+" while initializing: "+GLMediaPlayerImpl.this.toString(), t);
+ changeState(GLMediaEventListener.EVENT_CHANGE_ERR, GLMediaPlayer.State.Uninitialized);
+ } // also initializes width, height, .. etc
+ }
+ }.start();
}
}
}
@@ -746,6 +765,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
protected TextureFrame cachedFrame = null;
protected long lastTimeMillis = 0;
+ private final boolean[] stGotVFrame = { false };
+
@Override
public final TextureFrame getNextTexture(GL gl) throws IllegalStateException {
synchronized( stateLock ) {
@@ -753,12 +774,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
throw new IllegalStateException("Instance not paused or playing: "+this);
}
if(State.Playing == state) {
- TextureFrame nextFrame = null;
boolean dropFrame = false;
try {
do {
- final long currentTimeMillis;
- final boolean playCached = null != cachedFrame;
final boolean droppedFrame;
if( dropFrame ) {
presentedFrameCount--;
@@ -767,24 +785,69 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
} else {
droppedFrame = false;
}
+ final boolean playCached = null != cachedFrame;
+ final int video_pts;
+ final boolean hasVideoFrame;
+ TextureFrame nextFrame;
if( playCached ) {
nextFrame = cachedFrame;
cachedFrame = null;
presentedFrameCount--;
- } else if( STREAM_ID_NONE != vid ) {
- if( null != videoFramesDecoded ) { // single threaded ? TEXTURE_COUNT_MIN == textureCount
+ video_pts = nextFrame.getPTS();
+ hasVideoFrame = true;
+ } else {
+ if( null != videoFramesDecoded ) {
+ // multi-threaded and video available
nextFrame = videoFramesDecoded.get();
+ if( null != nextFrame ) {
+ video_pts = nextFrame.getPTS();
+ hasVideoFrame = true;
+ } else {
+ video_pts = TimeFrameI.INVALID_PTS;
+ hasVideoFrame = false;
+ }
} else {
- nextFrame = getNextSingleThreaded(gl, lastFrame);
+ // single-threaded or audio-only
+ video_pts = getNextSingleThreaded(gl, lastFrame, stGotVFrame);
+ nextFrame = lastFrame;
+ hasVideoFrame = stGotVFrame[0];
}
}
- currentTimeMillis = Platform.currentTimeMillis();
- if( null != nextFrame ) {
- presentedFrameCount++;
- final int video_pts = nextFrame.getPTS();
- if( video_pts == TimeFrameI.END_OF_STREAM_PTS ) {
- pauseImpl(true, GLMediaEventListener.EVENT_CHANGE_EOS);
- } else if( video_pts != TimeFrameI.INVALID_PTS ) {
+ final long currentTimeMillis = Platform.currentTimeMillis();
+
+ if( TimeFrameI.END_OF_STREAM_PTS == video_pts ||
+ ( duration > 0 && duration <= video_pts ) || maxNullFrameCountUntilEOS <= nullFrameCount )
+ {
+ // EOS
+ if( DEBUG ) {
+ System.err.println( "AV-EOS (getNextTexture): EOS_PTS "+(TimeFrameI.END_OF_STREAM_PTS == video_pts)+", "+this);
+ }
+ pauseImpl(true, GLMediaEventListener.EVENT_CHANGE_EOS);
+
+ } else if( TimeFrameI.INVALID_PTS == video_pts ) { // no audio or video frame
+ if( null == videoFramesDecoded || !videoFramesDecoded.isEmpty() ) {
+ nullFrameCount++;
+ }
+ if( DEBUG ) {
+ final int audio_pts = getAudioPTSImpl();
+ final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed );
+ final int d_apts;
+ if( audio_pts != TimeFrameI.INVALID_PTS ) {
+ d_apts = audio_pts - audio_scr;
+ } else {
+ d_apts = 0;
+ }
+ final int video_scr = video_scr_pts + (int) ( ( currentTimeMillis - video_scr_t0 ) * playSpeed );
+ final int d_vpts = video_pts - video_scr;
+ System.err.println( "AV~: dT "+(currentTimeMillis-lastTimeMillis)+", nullFrames "+nullFrameCount+
+ getPerfStringImpl( video_scr, video_pts, d_vpts, audio_scr, audio_pts, d_apts, 0 ) + ", droppedFrame "+droppedFrame);
+ }
+ } else { // valid pts: has audio or video frame
+ nullFrameCount=0;
+
+ if( hasVideoFrame ) { // has video frame
+ presentedFrameCount++;
+
final int audio_pts = getAudioPTSImpl();
final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed );
final int d_apts;
@@ -838,29 +901,16 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
", avg dpy-fps "+avg_dpy_duration+" ms/f, maxD "+maxVideoDelay+" ms, "+_nextFrame+", playCached " + playCached + ", dropFrame "+dropFrame);
}
}
- } else if( DEBUG ) {
- System.err.println("Invalid PTS: "+nextFrame);
- }
- if( null != nextFrame && null != videoFramesFree ) {
- // Had frame and not single threaded ? (TEXTURE_COUNT_MIN < textureCount)
- final TextureFrame _lastFrame = lastFrame;
- lastFrame = nextFrame;
+ } // has video frame
+ } // has audio or video frame
+
+ if( null != videoFramesFree && null != nextFrame ) {
+ // Had frame and not single threaded ? (TEXTURE_COUNT_MIN < textureCount)
+ final TextureFrame _lastFrame = lastFrame;
+ lastFrame = nextFrame;
+ if( null != _lastFrame ) {
videoFramesFree.putBlocking(_lastFrame);
}
- } else if( DEBUG ) {
- final int video_pts = lastFrame.getPTS();
- final int audio_pts = getAudioPTSImpl();
- final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed );
- final int d_apts;
- if( audio_pts != TimeFrameI.INVALID_PTS ) {
- d_apts = audio_pts - audio_scr;
- } else {
- d_apts = 0;
- }
- final int video_scr = video_scr_pts + (int) ( ( currentTimeMillis - video_scr_t0 ) * playSpeed );
- final int d_vpts = video_pts - video_scr;
- System.err.println( "AV~: dT "+(currentTimeMillis-lastTimeMillis)+", "+
- getPerfStringImpl( video_scr, video_pts, d_vpts, audio_scr, audio_pts, d_apts, 0 ) + ", droppedFrame "+droppedFrame);
}
lastTimeMillis = currentTimeMillis;
} while( dropFrame );
@@ -897,24 +947,24 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
*/
protected abstract int getNextTextureImpl(GL gl, TextureFrame nextFrame);
- protected final TextureFrame getNextSingleThreaded(final GL gl, final TextureFrame nextFrame) throws InterruptedException {
+ protected final int getNextSingleThreaded(final GL gl, final TextureFrame nextFrame, boolean[] gotVFrame) throws InterruptedException {
+ final int pts;
if( STREAM_ID_NONE != vid ) {
preNextTextureImpl(gl);
- final int vPTS = getNextTextureImpl(gl, nextFrame);
+ pts = getNextTextureImpl(gl, nextFrame);
postNextTextureImpl(gl);
- if( TimeFrameI.INVALID_PTS != vPTS ) {
+ if( TimeFrameI.INVALID_PTS != pts ) {
newFrameAvailable(nextFrame, Platform.currentTimeMillis());
- return nextFrame;
+ gotVFrame[0] = true;
+ } else {
+ gotVFrame[0] = false;
}
} else {
// audio only
- final int vPTS = getNextTextureImpl(null, null);
- if( TimeFrameI.INVALID_PTS != vPTS && TimeFrameI.END_OF_STREAM_PTS == vPTS ) {
- // state transition incl. notification
- pauseImpl(true, GLMediaEventListener.EVENT_CHANGE_EOS);
- }
+ pts = getNextTextureImpl(null, null);
+ gotVFrame[0] = false;
}
- return null;
+ return pts;
}
@@ -962,6 +1012,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
private void resetAVPTS() {
+ nullFrameCount = 0;
presentedFrameCount = 0;
displayedFrameCount = 0;
decodedFrameCount = 0;
@@ -984,6 +1035,11 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
+ /**
+ * After {@link GLMediaPlayerImpl#initStreamImpl(int, int) initStreamImpl(..)} is completed via
+ * {@link GLMediaPlayerImpl#updateAttributes(int, int, int, int, int, int, int, float, int, int, int, String, String) updateAttributes(..)},
+ * the latter decides whether StreamWorker is being used.
+ */
class StreamWorker extends Thread {
private volatile boolean isRunning = false;
private volatile boolean isActive = false;
@@ -998,14 +1054,23 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
/**
* Starts this daemon thread,
- * which initializes the stream first via {@link GLMediaPlayerImpl#initStreamImpl(int, int)} first.
* <p>
- * After stream initialization, this thread pauses!
+ * This thread pauses after it's started!
* </p>
**/
StreamWorker() {
setDaemon(true);
- start();
+ synchronized(this) {
+ start();
+ while( !isRunning ) {
+ this.notifyAll(); // wake-up startup-block
+ try {
+ this.wait(); // wait until started
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
}
private void makeCurrent(GLContext ctx) {
@@ -1073,7 +1138,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
shallPause = false;
if( Thread.currentThread() != this ) {
while( !isActive ) {
- this.notify(); // wake-up pause-block
+ this.notifyAll(); // wake-up pause-block
try {
this.wait(); // wait until resumed
} catch (InterruptedException e) {
@@ -1091,7 +1156,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
this.interrupt();
}
while( isRunning ) {
- this.notify(); // wake-up pause-block (opt)
+ this.notifyAll(); // wake-up pause-block (opt)
try {
this.wait(); // wait until stopped
} catch (InterruptedException e) {
@@ -1111,17 +1176,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
synchronized ( this ) {
isRunning = true;
- try {
- isBlocked = true;
- initStreamImpl(vid, aid);
- isBlocked = false;
- } catch (Throwable t) {
- streamErr = new StreamException(t.getClass().getSimpleName()+" while initializing: "+GLMediaPlayerImpl.this.toString(), t);
- isBlocked = false;
- isRunning = false;
- changeState(GLMediaEventListener.EVENT_CHANGE_ERR, GLMediaPlayer.State.Uninitialized);
- return; // end of thread!
- } // also initializes width, height, .. etc
+ this.notifyAll(); // wake-up ctor()
}
while( !shallStop ){
@@ -1133,7 +1188,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
while( shallPause && !shallStop ) {
isActive = false;
- this.notify(); // wake-up doPause()
+ this.notifyAll(); // wake-up doPause()
try {
this.wait(); // wait until resumed
} catch (InterruptedException e) {
@@ -1147,7 +1202,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
preNextTextureImpl(sharedGLCtx.getGL());
}
isActive = true;
- this.notify(); // wake-up doResume()
+ this.notifyAll(); // wake-up doResume()
}
}
if( !sharedGLCtxCurrent && null != sharedGLCtx ) {
@@ -1177,6 +1232,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
isBlocked = false;
final int vPTS = getNextTextureImpl(gl, nextFrame);
+ boolean audioEOS = false;
if( TimeFrameI.INVALID_PTS != vPTS ) {
if( null != nextFrame ) {
if( STREAM_WORKER_DELAY > 0 ) {
@@ -1189,13 +1245,30 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
nextFrame = null;
} else {
// audio only
- if( TimeFrameI.END_OF_STREAM_PTS == vPTS ) {
- // state transition incl. notification
- shallPause = true;
- isActive = false;
- pauseImpl(true, GLMediaEventListener.EVENT_CHANGE_EOS);
+ if( TimeFrameI.END_OF_STREAM_PTS == vPTS || ( duration > 0 && duration < vPTS ) ) {
+ audioEOS = true;
+ } else {
+ nullFrameCount = 0;
}
}
+ } else if( null == nextFrame ) {
+ // audio only
+ audioEOS = maxNullFrameCountUntilEOS <= nullFrameCount;
+ if( null == audioSink || 0 == audioSink.getEnqueuedFrameCount() ) {
+ nullFrameCount++;
+ }
+ }
+ if( audioEOS ) {
+ // state transition incl. notification
+ synchronized ( this ) {
+ shallPause = true;
+ isActive = false;
+ this.notifyAll(); // wake-up potential do*()
+ }
+ if( DEBUG ) {
+ System.err.println( "AV-EOS (StreamWorker): EOS_PTS "+(TimeFrameI.END_OF_STREAM_PTS == vPTS)+", "+GLMediaPlayerImpl.this);
+ }
+ pauseImpl(true, GLMediaEventListener.EVENT_CHANGE_EOS);
}
} catch (InterruptedException e) {
isBlocked = false;
@@ -1215,8 +1288,11 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
t.printStackTrace();
}
// state transition incl. notification
- shallPause = true;
- isActive = false;
+ synchronized ( this ) {
+ shallPause = true;
+ isActive = false;
+ this.notifyAll(); // wake-up potential do*()
+ }
pauseImpl(true, GLMediaEventListener.EVENT_CHANGE_ERR);
}
}
@@ -1229,12 +1305,12 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
destroySharedGL();
isRunning = false;
isActive = false;
- this.notify(); // wake-up doStop()
+ this.notifyAll(); // wake-up doStop()
}
}
}
static int StreamWorkerInstanceId = 0;
- private StreamWorker streamWorker = null;
+ private volatile StreamWorker streamWorker = null;
private volatile StreamException streamErr = null;
protected final int addStateEventMask(int event_mask, State newState) {
@@ -1280,7 +1356,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
int bps_video, int bps_audio, float fps,
int videoFrames, int audioFrames, int duration, String vcodec, String acodec) {
int event_mask = 0;
- if( state == State.Uninitialized ) {
+ final boolean wasUninitialized = state == State.Uninitialized;
+
+ if( wasUninitialized ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_INIT;
state = State.Initialized;
}
@@ -1306,7 +1384,13 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
if( this.fps != fps ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_FPS;
this.fps = fps;
- this.frame_duration = 1000f / fps;
+ if( 0 != fps ) {
+ this.frame_duration = 1000f / fps;
+ this.maxNullFrameCountUntilEOS = MAX_FRAMELESS_MS_UNTIL_EOS / (int)this.frame_duration;
+ } else {
+ this.frame_duration = 0;
+ this.maxNullFrameCountUntilEOS = MAX_FRAMELESS_UNTIL_EOS_DEFAULT;
+ }
}
if( this.bps_stream != bps_stream || this.bps_video != bps_video || this.bps_audio != bps_audio ) {
event_mask |= GLMediaEventListener.EVENT_CHANGE_BPS;
@@ -1331,6 +1415,17 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
if(0==event_mask) {
return;
}
+ if( wasUninitialized ) {
+ if( null != streamWorker ) {
+ throw new InternalError("XXX: StreamWorker not null - "+this);
+ }
+ if( TEXTURE_COUNT_MIN < textureCount || STREAM_ID_NONE == vid ) { // Enable StreamWorker for 'audio only' as well (Bug 918).
+ streamWorker = new StreamWorker();
+ }
+ if( DEBUG ) {
+ System.err.println("XXX Initialize @ updateAttributes: "+this);
+ }
+ }
attributesUpdated(event_mask);
}
@@ -1426,9 +1521,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
final int decVideoFrames = null != videoFramesDecoded ? videoFramesDecoded.size() : 0;
final int video_scr = video_scr_pts + (int) ( ( Platform.currentTimeMillis() - video_scr_t0 ) * playSpeed );
final String camPath = null != cameraPath ? ", camera: "+cameraPath : "";
- return "GLMediaPlayer["+state+", vSCR "+video_scr+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+" ("+tt+" s)], "+
- "speed "+playSpeed+", "+bps_stream+" bps, "+
- "Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+", tagt "+toHexString(textureTarget)+", ifmt "+toHexString(textureInternalFormat)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType)+"], "+
+ return "GLMediaPlayer["+state+", vSCR "+video_scr+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+" ("+tt+" s), z "+nullFrameCount+" / "+maxNullFrameCountUntilEOS+"], "+
+ "speed "+playSpeed+", "+bps_stream+" bps, hasSW "+(null!=streamWorker)+
+ ", Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+", tagt "+toHexString(textureTarget)+", ifmt "+toHexString(textureInternalFormat)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType)+"], "+
"Video[id "+vid+", <"+vcodec+">, "+width+"x"+height+", glOrient "+isInGLOrientation+", "+fps+" fps, "+frame_duration+" fdur, "+bps_video+" bps], "+
"Audio[id "+aid+", <"+acodec+">, "+bps_audio+" bps, "+audioFrames+" frames], uri "+loc+camPath+"]";
}
@@ -1462,7 +1557,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
freeVideoFrames = 0;
decVideoFrames = 0;
}
- return state+", frames[(p "+presentedFrameCount+", d "+decodedFrameCount+") / "+videoFrames+", "+tt+" s], "+
+ return state+", frames[(p "+presentedFrameCount+", d "+decodedFrameCount+") / "+videoFrames+", "+tt+" s, z "+nullFrameCount+" / "+maxNullFrameCountUntilEOS+"], "+
"speed " + playSpeed+", dAV "+( d_vpts - d_apts )+", vSCR "+video_scr+", vpts "+video_pts+", dSCR["+d_vpts+", avrg "+video_dpts_avg_diff+"], "+
"aSCR "+audio_scr+", apts "+audio_pts+" ( "+d_apts+" ), "+audioSinkInfo+
", Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+"]";
diff --git a/src/jogl/native/libav/ffmpeg_impl_template.c b/src/jogl/native/libav/ffmpeg_impl_template.c
index 24fddd2c0..44acfe46a 100644
--- a/src/jogl/native/libav/ffmpeg_impl_template.c
+++ b/src/jogl/native/libav/ffmpeg_impl_template.c
@@ -846,14 +846,9 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
pAV->frames_audio = pAV->pAStream->nb_frames;
pAV->aSinkSupport = _isAudioFormatSupported(env, pAV->ffmpegMediaPlayer, pAV->aSampleFmt, pAV->aSampleRate, pAV->aChannels);
if( pAV->verbose ) {
- fprintf(stderr, "A channels %d [l %"PRId64"], sample_rate %d, frame_size %d, frame_number %d, [afps %f, rfps %f, cfps %f, sfps %f], nb_frames %"PRId64", [maxChan %d, prefRate %d, req_chan_layout %"PRId64", req_chan %d], sink-support %d \n",
+ fprintf(stderr, "A channels %d [l %"PRId64"], sample_rate %d, frame_size %d, frame_number %d, [afps %f, cfps %f, sfps %f], nb_frames %"PRId64", [maxChan %d, prefRate %d, req_chan_layout %"PRId64", req_chan %d], sink-support %d \n",
pAV->aChannels, pAV->pACodecCtx->channel_layout, pAV->aSampleRate, pAV->aFrameSize, pAV->pACodecCtx->frame_number,
my_av_q2f(pAV->pAStream->avg_frame_rate),
- #if LIBAVCODEC_VERSION_MAJOR < 55
- my_av_q2f(pAV->pVStream->r_frame_rate),
- #else
- 0.0f,
- #endif
my_av_q2f_r(pAV->pAStream->codec->time_base),
my_av_q2f_r(pAV->pAStream->time_base),
pAV->pAStream->nb_frames,
@@ -1149,6 +1144,11 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0)
pkt_odata = packet.data;
pkt_osize = packet.size;
if( AVERROR_EOF == avRes || ( pAV->pFormatCtx->pb && pAV->pFormatCtx->pb->eof_reached ) ) {
+ if( pAV->verbose ) {
+ fprintf(stderr, "EOS: avRes[res %d, eos %d], pb-EOS %d\n",
+ avRes, AVERROR_EOF == avRes,
+ ( pAV->pFormatCtx->pb && pAV->pFormatCtx->pb->eof_reached ) );
+ }
resPTS = END_OF_STREAM_PTS;
} else if( 0 <= avRes ) {
if( pAV->verbose ) {
@@ -1480,15 +1480,16 @@ JNIEXPORT jint JNICALL FF_FUNC(seek0)
(JNIEnv *env, jobject instance, jlong ptr, jint pos1)
{
const FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr));
- const int64_t pos0 = pAV->vPTS;
- int64_t pts0;
+ int64_t pos0, pts0;
int streamID;
AVRational time_base;
if( pAV->vid >= 0 ) {
+ pos0 = pAV->vPTS;
streamID = pAV->vid;
time_base = pAV->pVStream->time_base;
pts0 = pAV->pVFrame->pkt_pts;
} else if( pAV->aid >= 0 ) {
+ pos0 = pAV->aPTS;
streamID = pAV->aid;
time_base = pAV->pAStream->time_base;
pts0 = pAV->pAFrames[pAV->aFrameCurrent]->pkt_pts;
@@ -1498,16 +1499,16 @@ JNIEXPORT jint JNICALL FF_FUNC(seek0)
int64_t pts1 = (int64_t) (pos1 * (int64_t) time_base.den)
/ (1000 * (int64_t) time_base.num);
if(pAV->verbose) {
- fprintf(stderr, "SEEK: vid %d, aid %d, pos1 %d, pts: %"PRId64" -> %"PRId64"\n", pAV->vid, pAV->aid, pos1, pts0, pts1);
+ fprintf(stderr, "SEEK: vid %d, aid %d, pos0 %d, pos1 %d, pts: %"PRId64" -> %"PRId64"\n", pAV->vid, pAV->aid, pos0, pos1, pts0, pts1);
}
int flags = 0;
if(pos1 < pos0) {
flags |= AVSEEK_FLAG_BACKWARD;
}
- int res;
+ int res = -2;
if(HAS_FUNC(sp_av_seek_frame)) {
if(pAV->verbose) {
- fprintf(stderr, "SEEK.0: pre : s %"PRId64" / %"PRId64" -> t %d / %"PRId64"\n", pos0, pts0, pos1, pts1);
+ fprintf(stderr, "SEEK.0: pre : s %d / %"PRId64" -> t %d / %"PRId64"\n", pos0, pts0, pos1, pts1);
}
sp_av_seek_frame(pAV->pFormatCtx, streamID, pts1, flags);
} else if(HAS_FUNC(sp_avformat_seek_file)) {