diff options
author | Xerxes Rånby <[email protected]> | 2013-06-19 15:29:37 +0200 |
---|---|---|
committer | Xerxes Rånby <[email protected]> | 2013-06-19 15:29:37 +0200 |
commit | 2f85c5c0fa7518287d2584dc0a586064559c6827 (patch) | |
tree | 5aa049338e81731fbfce71445276aeb0147c0d9d /src | |
parent | 5b77c7f345d1e36c152d70982db0e022be6cfa2a (diff) | |
parent | 32880bae630a43540fbe36fe2c787f2fdd5d2380 (diff) |
Merge remote-tracking branch 'xranby/github/FFMPEGMediaPlayer-audio' into FFMPEGMediaPlayer
Diffstat (limited to 'src')
3 files changed, 273 insertions, 33 deletions
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java index 883c13f51..0971ac78e 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java @@ -57,7 +57,7 @@ import com.jogamp.common.util.RunnableExecutor; class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { private static List<String> glueLibNames = new ArrayList<String>(); // none - private static final int symbolCount = 31; + private static final int symbolCount = 32; private static String[] symbolNames = { "avcodec_version", "avformat_version", @@ -80,7 +80,8 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { // libavutil "av_pix_fmt_descriptors", "av_free", -/* 18 */ "av_get_bits_per_pixel", + "av_get_bits_per_pixel", +/* 19 */ "av_samples_get_buffer_size", // libavformat "avformat_alloc_context", @@ -95,7 +96,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { "avformat_network_init", // 53.13.0 (opt) "avformat_network_deinit", // 53.13.0 (opt) "avformat_find_stream_info", // 53.3.0 (opt) -/* 29 */ "av_find_stream_info", +/* 32 */ "av_find_stream_info", }; // alternate symbol names diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java index 4be2bcb58..68914639d 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java @@ -36,6 +36,10 @@ import javax.media.opengl.GL; import javax.media.opengl.GL2ES2; import javax.media.opengl.GLException; +import java.util.Arrays; +import java.util.Queue; +import javax.sound.sampled.*; + import com.jogamp.common.util.VersionNumber; import com.jogamp.gluegen.runtime.ProcAddressTable; import com.jogamp.opengl.util.GLPixelStorageModes; @@ -101,10 +105,38 @@ import jogamp.opengl.util.av.EGLMediaPlayerImpl; * </ul> */ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { + + // Count of zeroed buffers to return before switching to real sample provider + private static final int TEMP_BUFFER_COUNT = 20; + + // AudioFormat parameters + public static final int SAMPLE_RATE = 44100; + private static final int SAMPLE_SIZE = 16; + private static final int CHANNELS = 2; + private static final boolean SIGNED = true; + private static final boolean BIG_ENDIAN = false; + + // Chunk of audio processed at one time + public static final int BUFFER_SIZE = 1000; + public static final int SAMPLES_PER_BUFFER = BUFFER_SIZE / 2; + + // Sample time values + public static final double SAMPLE_TIME_IN_SECS = 1.0 / SAMPLE_RATE; + public static final double BUFFER_TIME_IN_SECS = SAMPLE_TIME_IN_SECS * SAMPLES_PER_BUFFER; + + // Instance data + private static AudioFormat format; + private static DataLine.Info info; + private static SourceDataLine auline; + private static int bufferCount; + private static byte [] sampleData = new byte[BUFFER_SIZE]; + + private static int maxAvailableAudio; + public static final VersionNumber avUtilVersion; public static final VersionNumber avFormatVersion; public static final VersionNumber avCodecVersion; - static final boolean available; + static boolean available; static { if(FFMPEGDynamicLibraryBundleInfo.initSingleton()) { @@ -114,7 +146,29 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { System.err.println("LIB_AV Util : "+avUtilVersion); System.err.println("LIB_AV Format: "+avFormatVersion); System.err.println("LIB_AV Codec : "+avCodecVersion); - available = initIDs0(); + if(initIDs0()) { + // init audio + // Create the audio format we wish to use + format = new AudioFormat(SAMPLE_RATE, SAMPLE_SIZE, CHANNELS, SIGNED, BIG_ENDIAN); + + // Create dataline info object describing line format + info = new DataLine.Info(SourceDataLine.class, format); + + // Clear buffer initially + Arrays.fill(sampleData, (byte) 0); + try{ + // Get line to write data to + auline = (SourceDataLine) AudioSystem.getLine(info); + auline.open(format); + auline.start(); + maxAvailableAudio = auline.available(); + available = true; + } catch (LineUnavailableException e){ + maxAvailableAudio = 0; + available = false; + } + } + } else { avUtilVersion = null; avFormatVersion = null; @@ -213,6 +267,83 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { throw new InternalError("Unknown ProcAddressTable: "+pt.getClass().getName()+" of "+ctx.getClass().getName()); } } + + private class AudioFrame { + final byte[] sampleData; + final int data_size; + final int audio_pts; + AudioFrame(byte[] sampleData, int data_size, int audio_pts) { + this.sampleData=sampleData; + this.data_size=data_size; + this.audio_pts=audio_pts; + } + } + + static final Queue<AudioFrame> audioFrameBuffer = new java.util.LinkedList<AudioFrame>(); + + private void updateSound(byte[] sampleData, int data_size, int audio_pts) { +/* + // Visualize incomming data + int c=0; + for(byte b: sampleData){ + if(b<0) { + System.out.print(" "); + } else if(b<64) { + System.out.print("_"); + } else if(b < 128) { + System.out.print("-"); + } else if(b == 128) { + System.out.print("="); + } else if(b < 256-64) { + System.out.print("\""); + } else { + System.out.print("'"); + } + + c++; + if(c>=40) + break; + } + System.out.println("jA"); +*/ + + //TODO reduce GC + audioFrameBuffer.add(new AudioFrame(sampleData, data_size, audio_pts)); + pumpAudio(); + } + + private void pumpAudio() { + if(auline.available()==maxAvailableAudio){ + System.out.println("warning: audio buffer underrun"); + } + while(audioFrameBuffer.peek()!=null){ + AudioFrame a = audioFrameBuffer.peek(); + + // poor mans audio sync .. TODO: off thread + final long now = System.currentTimeMillis(); + final long now_d = now - lastAudioTime; + final long pts_d = a.audio_pts - lastAudioPTS; + final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ; + + System.err.println("s: pts-a "+a.audio_pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt); + lastAudioTime = now; + if( (dt<audio_dt_d ) && auline.available()>a.data_size ) { + audioFrameBuffer.poll(); /* remove first item from the queue */ + int written = 0; + int len; + int data_size = a.data_size; + while (data_size > 0) { + len = auline.write(a.sampleData, written, data_size); + data_size -= len; + written += len; + } + lastAudioPTS=a.audio_pts; + } else { + break; + } + } + } + private void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane, int lSz0, int lSz1, int lSz2, int tWd0, int tWd1, int tWd2) { @@ -358,6 +489,9 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { int pts0 = getVideoPTS0(moviePtr); int pts1 = seek0(moviePtr, msec); System.err.println("Seek: "+pts0+" -> "+msec+" : "+pts1); + audioFrameBuffer.clear(); + lastAudioPTS=pts1; + lastVideoPTS=pts1; return pts1; } @@ -366,9 +500,12 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { return lastTex; } + private long lastAudioTime = 0; + private int lastAudioPTS = 0; + private static final int audio_dt_d = 400; private long lastVideoTime = 0; private int lastVideoPTS = 0; - private static final int dt_d = 9; + private static final int video_dt_d = 9; @Override protected TextureSequence.TextureFrame getNextTextureImpl(GL gl, boolean blocking) { @@ -382,7 +519,15 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { gl.glActiveTexture(GL.GL_TEXTURE0+getTextureUnit()); tex.enable(gl); tex.bind(gl); - readNextPacket0(moviePtr, procAddrGLTexSubImage2D, textureTarget, textureFormat, textureType); + + /* try decode 10 packets to find one containing video + (res == 2) */ + int res = 0; + int retry = 10; + while(res!=2 && retry >= 0) { + res = readNextPacket0(moviePtr, procAddrGLTexSubImage2D, textureTarget, textureFormat, textureType); + retry--; + } } finally { psm.restore(gl); } @@ -390,19 +535,23 @@ public class FFMPEGMediaPlayer extends EGLMediaPlayerImpl { if(blocking) { // poor mans video sync .. TODO: off thread 'readNextPackage0(..)' on shared GLContext and multi textures/unit! final long now = System.currentTimeMillis(); - final long now_d = now - lastVideoTime; - final long pts_d = pts - lastVideoPTS; - final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ; + // Try sync video to audio + final long now_d = now - lastAudioTime; + final long pts_d = pts - lastAudioPTS - 444; /* hack 444 == play video 444ms ahead of audio */ + final long dt = Math.min(47, (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ) ; + //final long dt = (long) ( (float) ( pts_d - now_d ) / getPlaySpeed() ) ; lastVideoTime = now; - // System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt); - if(dt>dt_d) { + System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt); + + if(dt>video_dt_d && dt<1000 && auline.available()<maxAvailableAudio-10000) { try { - Thread.sleep(dt-dt_d); + Thread.sleep(dt-video_dt_d); } catch (InterruptedException e) { } } /* else if(0>pts_d) { System.err.println("s: pts-v "+pts+", pts-d "+pts_d+", now_d "+now_d+", dt "+dt); } */ } + pumpAudio(); lastVideoPTS = pts; } return lastTex; diff --git a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c index 28e9e852c..d93caa30b 100644 --- a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c +++ b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c @@ -38,6 +38,7 @@ typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLi static const char * const ClazzNameFFMPEGMediaPlayer = "jogamp/opengl/util/av/impl/FFMPEGMediaPlayer"; static jclass ffmpegMediaPlayerClazz = NULL; +static jmethodID jni_mid_updateSound = NULL; static jmethodID jni_mid_updateAttributes1 = NULL; static jmethodID jni_mid_updateAttributes2 = NULL; @@ -84,10 +85,12 @@ static AVCODEC_DECODE_VIDEO2 sp_avcodec_decode_video2; // 52.23.0 // libavutil typedef void (APIENTRYP AV_FREE)(void *ptr); typedef int (APIENTRYP AV_GET_BITS_PER_PIXEL)(const AVPixFmtDescriptor *pixdesc); +typedef int (APIENTRYP AV_SAMPLES_GET_BUFFER_SIZE)(int *linesize, int nb_channels, int nb_samples, enum AVSampleFormat sample_fmt, int align); static const AVPixFmtDescriptor* sp_av_pix_fmt_descriptors; static AV_FREE sp_av_free; static AV_GET_BITS_PER_PIXEL sp_av_get_bits_per_pixel; -// count: 18 +static AV_SAMPLES_GET_BUFFER_SIZE sp_av_samples_get_buffer_size; +// count: 19 // libavformat typedef AVFormatContext *(APIENTRYP AVFORMAT_ALLOC_CONTEXT)(void); @@ -117,9 +120,9 @@ static AVFORMAT_NETWORK_INIT sp_avformat_network_init; // 53.13.0 static AVFORMAT_NETWORK_DEINIT sp_avformat_network_deinit; // 53.13.0 static AVFORMAT_FIND_STREAM_INFO sp_avformat_find_stream_info; // 53.3.0 static AV_FIND_STREAM_INFO sp_av_find_stream_info; -// count: 31 +// count: 32 -#define SYMBOL_COUNT 31 +#define SYMBOL_COUNT 32 JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryBundleInfo_initSymbols0 (JNIEnv *env, jclass clazz, jobject jSymbols, jint count) @@ -159,7 +162,8 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryB sp_av_pix_fmt_descriptors = (const AVPixFmtDescriptor*) (intptr_t) symbols[i++]; sp_av_free = (AV_FREE) (intptr_t) symbols[i++]; sp_av_get_bits_per_pixel = (AV_GET_BITS_PER_PIXEL) (intptr_t) symbols[i++]; - // count: 18 + sp_av_samples_get_buffer_size = (AV_SAMPLES_GET_BUFFER_SIZE) (intptr_t) symbols[i++]; + // count: 19 sp_avformat_alloc_context = (AVFORMAT_ALLOC_CONTEXT) (intptr_t) symbols[i++];; sp_avformat_free_context = (AVFORMAT_FREE_CONTEXT) (intptr_t) symbols[i++]; @@ -174,7 +178,7 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryB sp_avformat_network_deinit = (AVFORMAT_NETWORK_DEINIT) (intptr_t) symbols[i++]; sp_avformat_find_stream_info = (AVFORMAT_FIND_STREAM_INFO) (intptr_t) symbols[i++]; sp_av_find_stream_info = (AV_FIND_STREAM_INFO) (intptr_t) symbols[i++]; - // count: 31 + // count: 32 (*env)->ReleasePrimitiveArrayCritical(env, jSymbols, symbols, 0); @@ -188,6 +192,41 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryB return JNI_TRUE; } +static void _updateSound(JNIEnv *env, jobject instance, int8_t *data, int32_t data_size, int32_t aPTS) { + if(NULL!=env) { + jbyteArray jbArray = (*env)->NewByteArray(env, data_size); + if (jbArray == NULL) { + fprintf(stderr, "FFMPEGMediaPlayer out of memory at native code _updateSound"); + return; /* out of memory error thrown */ + } + +/* + // Visualize sample waveform + int i; + for(i=0;i<40;i++){ + int8_t b = data[i]; + if(b<0) { + printf(" "); + } else if(b<64) { + printf("_"); + } else if(b < 128) { + printf("-"); + } else if(b == 128) { + printf("="); + } else if(b < 256-64) { + printf("\""); + } else { + printf("'"); + } + } + printf("nA\n"); +*/ + + (*env)->SetByteArrayRegion(env, jbArray, 0, data_size, data); + (*env)->CallVoidMethod(env, instance, jni_mid_updateSound, jbArray, data_size, aPTS); + } +} + static void _updateJavaAttributes(JNIEnv *env, jobject instance, FFMPEGToolBasicAV_t* pAV) { // int shallBeDetached = 0; @@ -331,10 +370,12 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_ini JoglCommon_FatalError(env, "JOGL FFMPEG: can't use %s", ClazzNameFFMPEGMediaPlayer); } + jni_mid_updateSound = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateSound", "([BII)V"); jni_mid_updateAttributes1 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes", "(IIIIIFIILjava/lang/String;Ljava/lang/String;)V"); jni_mid_updateAttributes2 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes2", "(IIIIIIIIII)V"); - if(jni_mid_updateAttributes1 == NULL || + if(jni_mid_updateSound == NULL || + jni_mid_updateAttributes1 == NULL || jni_mid_updateAttributes2 == NULL) { return JNI_FALSE; } @@ -579,7 +620,6 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex int frameFinished; if(sp_av_read_frame(pAV->pFormatCtx, &packet)>=0) { - /** if(packet.stream_index==pAV->aid) { // Decode audio frame if(NULL == pAV->pAFrame) { @@ -590,16 +630,19 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex int new_packet = 1; int len1; int flush_complete = 0; - int data_size = 0; while (packet.size > 0 || (!packet.data && new_packet)) { new_packet = 0; if (flush_complete) { break; } if(HAS_FUNC(sp_avcodec_decode_audio4)) { - len1 = sp_avcodec_decode_audio4(pAV->pVCodecCtx, pAV->pAFrame, &frameFinished, &packet); + len1 = sp_avcodec_decode_audio4(pAV->pACodecCtx, pAV->pAFrame, &frameFinished, &packet); } else { - len1 = sp_avcodec_decode_audio3(pAV->pVCodecCtx, int16_t *samples, int *frame_size_ptr, &frameFinished, &packet); + #if 0 + len1 = sp_avcodec_decode_audio3(pAV->pACodecCtx, int16_t *samples, int *frame_size_ptr, &frameFinished, &packet); + #endif + JoglCommon_throwNewRuntimeException(env, "Unimplemented: FFMPEGMediaPlayer sp_avcodec_decode_audio3 fallback"); + return 0; } if (len1 < 0) { // if error, we skip the frame @@ -611,28 +654,73 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex if (!frameFinished) { // stop sending empty packets if the decoder is finished - if (!packet.data && pAV->pVCodecCtx->codec->capabilities & CODEC_CAP_DELAY) { + if (!packet.data && pAV->pACodecCtx->codec->capabilities & CODEC_CAP_DELAY) { flush_complete = 1; } continue; } - int32_t pts = pAV->pAFrame->pkt_pts * my_av_q2i32(1000, pAV->pAStream->time_base); - pAV->aPTS += ( data_size * 1000 ) / (2 * pAV->pVCodecCtx->channels * pAV->pVCodecCtx->sample_rate); - printf("A pts %d - %d\n", pts, pAV->aPTS); + int data_size = 0; + if(HAS_FUNC(sp_av_samples_get_buffer_size)) { + data_size = sp_av_samples_get_buffer_size(NULL /* linesize, may be NULL */, + pAV->aChannels, + pAV->pAFrame->nb_samples, + pAV->pAFrame->format, + 1 /* align */); + } + int32_t pts = (int64_t) ( pAV->pAFrame->pkt_pts * (int64_t) 1000 * (int64_t) pAV->pAStream->time_base.num ) + / (int64_t) pAV->pAStream->time_base.den; + #if 0 + printf("channels %d sample_rate %d \n", pAV->aChannels , pAV->aSampleRate); + printf("data %d \n", pAV->aFrameSize); + #endif + pAV->aPTS += (int64_t) ( data_size * (int64_t) 1000 ) + / (int64_t) (2 * (int64_t) pAV->aChannels * (int64_t) pAV->aSampleRate); + if( pAV->verbose ) { + printf("A pts %d - %d\n", pts, pAV->aPTS); + } + // TODO: Wrap audio buffer data in a com.jogamp.openal.sound3d.Buffer or similar + // and hand it over to the user using a suitable API. + // TODO: OR send the audio buffer data down to sound card directly using JOAL. + _updateSound(env, instance, pAV->pAFrame->data[0], data_size, pAV->aPTS); + res = 1; } - } else */ if(packet.stream_index==pAV->vid) { + } else if(packet.stream_index==pAV->vid) { // Decode video frame if(NULL == pAV->pVFrame) { sp_av_free_packet(&packet); return res; } - sp_avcodec_decode_video2(pAV->pVCodecCtx, pAV->pVFrame, &frameFinished, &packet); - // Did we get a video frame? - if(frameFinished) - { + int new_packet = 1; + int len1; + int flush_complete = 0; + while (packet.size > 0 || (!packet.data && new_packet)) { + + new_packet = 0; + if (flush_complete) { + break; + } + + len1 = sp_avcodec_decode_video2(pAV->pVCodecCtx, pAV->pVFrame, &frameFinished, &packet); + + if (len1 < 0) { + // if error, we skip the frame + packet.size = 0; + break; + } + packet.data += len1; + packet.size -= len1; + + if (!frameFinished) { + // stop sending empty packets if the decoder is finished + if (!packet.data && pAV->pVCodecCtx->codec->capabilities & CODEC_CAP_DELAY) { + flush_complete = 1; + } + continue; + } + res = 2; // FIXME: Libav Binary compatibility! JAU01 const AVRational time_base = pAV->pVStream->time_base; @@ -678,7 +766,9 @@ JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNex } // Free the packet that was allocated by av_read_frame - sp_av_free_packet(&packet); + // This code cause a double free and have been commented out. + // TODO: check what release the packets memory. + // sp_av_free_packet(&packet); } return res; } |