diff options
author | Sven Gothel <[email protected]> | 2014-02-23 14:51:06 +0100 |
---|---|---|
committer | Sven Gothel <[email protected]> | 2014-02-23 14:51:06 +0100 |
commit | 3352601e0860584509adf2b76f993d03893ded4b (patch) | |
tree | 974fccc8c0eb2f5ad9d4ffd741dfc35869ed67b5 /src/jogl/native | |
parent | f51933f0ebe9ae030c26c066e59a728ce08b8559 (diff) | |
parent | c67de337a8aaf52e36104c3f13e273aa19d21f1f (diff) |
Merge branch 'master' into stash_glyphcache
Conflicts:
make/scripts/tests.sh
src/jogl/classes/com/jogamp/graph/curve/OutlineShape.java
src/jogl/classes/com/jogamp/graph/curve/Region.java
src/jogl/classes/com/jogamp/graph/curve/opengl/GLRegion.java
src/jogl/classes/com/jogamp/graph/curve/opengl/RegionRenderer.java
src/jogl/classes/com/jogamp/graph/curve/opengl/Renderer.java
src/jogl/classes/com/jogamp/graph/curve/opengl/TextRenderer.java
src/jogl/classes/com/jogamp/graph/font/Font.java
src/jogl/classes/com/jogamp/opengl/math/VectorUtil.java
src/jogl/classes/jogamp/graph/curve/text/GlyphShape.java
src/jogl/classes/jogamp/graph/curve/text/GlyphString.java
src/jogl/classes/jogamp/graph/font/typecast/TypecastFont.java
src/jogl/classes/jogamp/graph/font/typecast/TypecastGlyph.java
src/jogl/classes/jogamp/graph/font/typecast/TypecastRenderer.java
Diffstat (limited to 'src/jogl/native')
23 files changed, 3460 insertions, 1473 deletions
diff --git a/src/jogl/native/GLContext.c b/src/jogl/native/GLContext.c new file mode 100644 index 000000000..9be9f82af --- /dev/null +++ b/src/jogl/native/GLContext.c @@ -0,0 +1,46 @@ + +#include "jogamp_opengl_GLContextImpl.h" +#include "JoglCommon.h" + +#include <assert.h> +#include <KHR/khrplatform.h> + +/* + * Class: jogamp_opengl_GLContextImpl + * Method: glGetStringInt + * Signature: (IJ)Ljava/lang/String; + */ +JNIEXPORT jstring JNICALL +Java_jogamp_opengl_GLContextImpl_glGetStringInt(JNIEnv *env, jclass _unused, jint name, jlong procAddress) { + typedef const khronos_uint8_t * (KHRONOS_APIENTRY*_local_PFNGLGETSTRINGPROC)(unsigned int name); + _local_PFNGLGETSTRINGPROC ptr_glGetString; + const khronos_uint8_t * _res; + ptr_glGetString = (_local_PFNGLGETSTRINGPROC) (intptr_t) procAddress; + assert(ptr_glGetString != NULL); + _res = (* ptr_glGetString) ((unsigned int) name); + if (NULL == _res) return NULL; + return (*env)->NewStringUTF(env, (const char *)_res); +} + +/* + * Class: jogamp_opengl_GLContextImpl + * Method: glGetIntegervInt + * Signature: (ILjava/lang/Object;I)V + */ +JNIEXPORT void JNICALL +Java_jogamp_opengl_GLContextImpl_glGetIntegervInt(JNIEnv *env, jclass _unused, jint pname, jobject params, jint params_byte_offset, jlong procAddress) { + typedef void (KHRONOS_APIENTRY*_local_PFNGLGETINTEGERVPROC)(unsigned int pname, int * params); + + _local_PFNGLGETINTEGERVPROC ptr_glGetIntegerv; + int * _params_ptr = NULL; + if ( NULL != params ) { + _params_ptr = (int *) (((char*) (*env)->GetPrimitiveArrayCritical(env, params, NULL) ) + params_byte_offset); + } + ptr_glGetIntegerv = (_local_PFNGLGETINTEGERVPROC) (intptr_t) procAddress; + assert(ptr_glGetIntegerv != NULL); + (* ptr_glGetIntegerv) ((unsigned int) pname, (int *) _params_ptr); + if ( NULL != params ) { + (*env)->ReleasePrimitiveArrayCritical(env, params, _params_ptr, 0); + } +} + diff --git a/src/jogl/native/GLDebugMessageHandler.c b/src/jogl/native/GLDebugMessageHandler.c index fea9d90ce..0aa7a01e7 100644 --- a/src/jogl/native/GLDebugMessageHandler.c +++ b/src/jogl/native/GLDebugMessageHandler.c @@ -21,11 +21,11 @@ static jmethodID glDebugMessageARB = NULL; // int source, int type, int id, int severity, String msg static jmethodID glDebugMessageAMD = NULL; // int id, int category, int severity, String msg -typedef void (GLAPIENTRY* _local_PFNGLDEBUGMESSAGECALLBACKARBPROC) (GLDEBUGPROCARB callback, const GLvoid *userParam); -typedef void (GLAPIENTRY* _local_GLDEBUGPROCARB)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,GLvoid *userParam); +typedef void (APIENTRY* _local_PFNGLDEBUGMESSAGECALLBACKARBPROC) (GLDEBUGPROCARB callback, const GLvoid *userParam); +typedef void (APIENTRY* _local_GLDEBUGPROCARB)(GLenum source,GLenum type,GLuint id,GLenum severity,GLsizei length,const GLchar *message,GLvoid *userParam); -typedef void (GLAPIENTRY* _local_PFNGLDEBUGMESSAGECALLBACKAMDPROC) (GLDEBUGPROCAMD callback, const GLvoid *userParam); -typedef void (GLAPIENTRY* _local_GLDEBUGPROCAMD)(GLuint id,GLenum category,GLenum severity,GLsizei length,const GLchar *message,GLvoid *userParam); +typedef void (APIENTRY* _local_PFNGLDEBUGMESSAGECALLBACKAMDPROC) (GLDEBUGPROCAMD callback, const GLvoid *userParam); +typedef void (APIENTRY* _local_GLDEBUGPROCAMD)(GLuint id,GLenum category,GLenum severity,GLsizei length,const GLchar *message,GLvoid *userParam); /* * Class: jogamp_opengl_GLDebugMessageHandler @@ -49,8 +49,6 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_GLDebugMessageHandler_initIDs0 } typedef struct { - JavaVM *vm; - int version; jobject obj; int extType; } DebugHandlerType; @@ -60,39 +58,21 @@ typedef struct { static void GLDebugMessageARBCallback(GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar *message, GLvoid *userParam) { DebugHandlerType * handle = (DebugHandlerType*) (intptr_t) userParam; - JavaVM *vm = handle->vm; - int version = handle->version; jobject obj = handle->obj; - JNIEnv *curEnv = NULL; - JNIEnv *newEnv = NULL; - int envRes ; - DBG_PRINT("GLDebugMessageARBCallback: 00 - %s, vm %p, version 0x%X, jobject %p, extType %d\n", - message, handle->vm, handle->version, (void*)handle->obj, handle->extType); - - // retrieve this thread's JNIEnv curEnv - or detect it's detached - envRes = (*vm)->GetEnv(vm, (void **) &curEnv, version) ; - DBG_PRINT("GLDebugMessageARBCallback: 01 - JVM Env: curEnv %p, res 0x%X\n", curEnv, envRes); - if( JNI_EDETACHED == envRes ) { - // detached thread - attach to JVM - if( JNI_OK != ( envRes = (*vm)->AttachCurrentThread(vm, (void**) &newEnv, NULL) ) ) { - fprintf(stderr, "GLDebugMessageARBCallback: can't attach thread: %d\n", envRes); - return; - } - curEnv = newEnv; - DBG_PRINT("GLDebugMessageARBCallback: 02 - attached .. \n"); - } else if( JNI_OK != envRes ) { - // oops .. - fprintf(stderr, "GLDebugMessageARBCallback: can't GetEnv: %d\n", envRes); + JNIEnv *env = NULL; + int shallBeDetached ; + DBG_PRINT("GLDebugMessageARBCallback: 00 - %s, jobject %p, extType %d\n", message, (void*)handle->obj, handle->extType); + + env = JoglCommon_GetJNIEnv (1 /* asDaemon */, &shallBeDetached); + if( NULL == env ) { + DBG_PRINT("GLDebugMessageARBCallback: Null JNIEnv\n"); return; } - (*curEnv)->CallVoidMethod(curEnv, obj, glDebugMessageARB, + (*env)->CallVoidMethod(env, obj, glDebugMessageARB, (jint) source, (jint) type, (jint) id, (jint) severity, - (*curEnv)->NewStringUTF(curEnv, message)); - if( NULL != newEnv ) { - // detached attached thread - (*vm)->DetachCurrentThread(vm); - DBG_PRINT("GLDebugMessageARBCallback: 04 - detached .. \n"); - } + (*env)->NewStringUTF(env, message)); + // detaching thread not required - daemon + // JoglCommon_ReleaseJNIEnv(shallBeDetached); DBG_PRINT("GLDebugMessageARBCallback: 0X\n"); /** * On Java 32bit on 64bit Windows and w/ GL_DEBUG_OUTPUT_SYNCHRONOUS_ARB disables, @@ -104,39 +84,21 @@ static void GLDebugMessageARBCallback(GLenum source, GLenum type, GLuint id, GLe static void GLDebugMessageAMDCallback(GLuint id, GLenum category, GLenum severity, GLsizei length, const GLchar *message, GLvoid *userParam) { DebugHandlerType * handle = (DebugHandlerType*) (intptr_t) userParam; - JavaVM *vm = handle->vm; - int version = handle->version; jobject obj = handle->obj; - JNIEnv *curEnv = NULL; - JNIEnv *newEnv = NULL; - int envRes ; - DBG_PRINT("GLDebugMessageAMDCallback: 00 - %s, vm %p, version 0x%X, jobject %p, extType %d\n", - message, handle->vm, handle->version, (void*)handle->obj, handle->extType); - - // retrieve this thread's JNIEnv curEnv - or detect it's detached - envRes = (*vm)->GetEnv(vm, (void **) &curEnv, version) ; - DBG_PRINT("GLDebugMessageAMDCallback: 01 - JVM Env: curEnv %p, res 0x%X\n", curEnv, envRes); - if( JNI_EDETACHED == envRes ) { - // detached thread - attach to JVM - if( JNI_OK != ( envRes = (*vm)->AttachCurrentThread(vm, (void**) &newEnv, NULL) ) ) { - fprintf(stderr, "GLDebugMessageAMDCallback: can't attach thread: %d\n", envRes); - return; - } - curEnv = newEnv; - DBG_PRINT("GLDebugMessageAMDCallback: 02 - attached .. \n"); - } else if( JNI_OK != envRes ) { - // oops .. - fprintf(stderr, "GLDebugMessageAMDCallback: can't GetEnv: %d\n", envRes); + JNIEnv *env = NULL; + int shallBeDetached ; + DBG_PRINT("GLDebugMessageAMDCallback: 00 - %s, jobject %p, extType %d\n", message, (void*)handle->obj, handle->extType); + + env = JoglCommon_GetJNIEnv (1 /* asDaemon */, &shallBeDetached); + if( NULL == env ) { + DBG_PRINT("GLDebugMessageARBCallback: Null JNIEnv\n"); return; } - (*curEnv)->CallVoidMethod(curEnv, obj, glDebugMessageAMD, + (*env)->CallVoidMethod(env, obj, glDebugMessageAMD, (jint) id, (jint) category, (jint) severity, - (*curEnv)->NewStringUTF(curEnv, message)); - if( NULL != newEnv ) { - // detached attached thread - (*vm)->DetachCurrentThread(vm); - DBG_PRINT("GLDebugMessageAMDCallback: 04 - detached .. \n"); - } + (*env)->NewStringUTF(env, message)); + // detached attached thread not required - daemon + // JoglCommon_ReleaseJNIEnv(shallBeDetached); DBG_PRINT("GLDebugMessageAMDCallback: 0X\n"); /** * On Java 32bit on 64bit Windows, @@ -153,18 +115,10 @@ static void GLDebugMessageAMDCallback(GLuint id, GLenum category, GLenum severit JNIEXPORT jlong JNICALL Java_jogamp_opengl_GLDebugMessageHandler_register0 (JNIEnv *env, jobject obj, jlong procAddress, jint extType) { - JavaVM *vm; DebugHandlerType * handle = malloc(sizeof(DebugHandlerType)); - if(0 != (*env)->GetJavaVM(env, &vm)) { - vm = NULL; - JoglCommon_throwNewRuntimeException(env, "GetJavaVM failed"); - } - handle->vm = vm; - handle->version = (*env)->GetVersion(env); handle->obj = (*env)->NewGlobalRef(env, obj); handle->extType = extType; - DBG_PRINT("GLDebugMessageHandler.register0: vm %p, version 0x%X, jobject %p, extType %d\n", - handle->vm, handle->version, (void*)handle->obj, handle->extType); + DBG_PRINT("GLDebugMessageHandler.register0: jobject %p, extType %d\n", (void*)handle->obj, handle->extType); if(jogamp_opengl_GLDebugMessageHandler_EXT_ARB == extType) { _local_PFNGLDEBUGMESSAGECALLBACKARBPROC ptr_glDebugMessageCallbackARB; @@ -191,8 +145,7 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_GLDebugMessageHandler_unregister0 { DebugHandlerType * handle = (DebugHandlerType*) (intptr_t) jhandle; - DBG_PRINT("GLDebugMessageHandler.unregister0: vm %p, version 0x%X, jobject %p, extType %d\n", - handle->vm, handle->version, (void*)handle->obj, handle->extType); + DBG_PRINT("GLDebugMessageHandler.unregister0: jobject %p, extType %d\n", (void*)handle->obj, handle->extType); if(JNI_FALSE == (*env)->IsSameObject(env, obj, handle->obj)) { JoglCommon_throwNewRuntimeException(env, "wrong handle (obj doesn't match)"); diff --git a/src/jogl/native/JoglCommon.c b/src/jogl/native/JoglCommon.c index d9f5edd49..e9984ada2 100644 --- a/src/jogl/native/JoglCommon.c +++ b/src/jogl/native/JoglCommon.c @@ -1,5 +1,4 @@ -#include "jogamp_opengl_GLContextImpl.h" #include "JoglCommon.h" #include <assert.h> @@ -12,42 +11,38 @@ static JavaVM *_jvmHandle = NULL; static int _jvmVersion = 0; void JoglCommon_init(JNIEnv *env) { - if(NULL==runtimeExceptionClz) { + if(NULL==_jvmHandle) { + if(0 != (*env)->GetJavaVM(env, &_jvmHandle)) { + JoglCommon_FatalError(env, "JOGL: Can't fetch JavaVM handle"); + } else { + _jvmVersion = (*env)->GetVersion(env); + } jclass c = (*env)->FindClass(env, ClazzNameRuntimeException); if(NULL==c) { - JoglCommon_FatalError(env, "JOGL: can't find %s", ClazzNameRuntimeException); + JoglCommon_FatalError(env, "JOGL: Can't find %s", ClazzNameRuntimeException); } runtimeExceptionClz = (jclass)(*env)->NewGlobalRef(env, c); (*env)->DeleteLocalRef(env, c); if(NULL==runtimeExceptionClz) { - JoglCommon_FatalError(env, "JOGL: can't use %s", ClazzNameRuntimeException); + JoglCommon_FatalError(env, "JOGL: Can't use %s", ClazzNameRuntimeException); } } - if(0 != (*env)->GetJavaVM(env, &_jvmHandle)) { - JoglCommon_FatalError(env, "JOGL: can't fetch JavaVM handle"); - } else { - _jvmVersion = (*env)->GetVersion(env); - } } void JoglCommon_FatalError(JNIEnv *env, const char* msg, ...) { char buffer[512]; va_list ap; - int shallBeDetached = 0; - if(NULL == env) { - env = JoglCommon_GetJNIEnv (&shallBeDetached); - } + if( NULL != msg ) { + va_start(ap, msg); + vsnprintf(buffer, sizeof(buffer), msg, ap); + va_end(ap); - va_start(ap, msg); - vsnprintf(buffer, sizeof(buffer), msg, ap); - va_end(ap); - - fprintf(stderr, "%s\n", buffer); - if(NULL != env) { - (*env)->FatalError(env, buffer); - JoglCommon_ReleaseJNIEnv (shallBeDetached); + fprintf(stderr, "%s\n", buffer); + if(NULL != env) { + (*env)->FatalError(env, buffer); + } } } @@ -55,48 +50,42 @@ void JoglCommon_throwNewRuntimeException(JNIEnv *env, const char* msg, ...) { char buffer[512]; va_list ap; - int shallBeDetached = 0; - if(NULL == env) { - env = JoglCommon_GetJNIEnv (&shallBeDetached); + if(NULL==_jvmHandle) { + JoglCommon_FatalError(env, "JOGL: NULL JVM handle, call JoglCommon_init 1st\n"); + return; } - va_start(ap, msg); - vsnprintf(buffer, sizeof(buffer), msg, ap); - va_end(ap); + if( NULL != msg ) { + va_start(ap, msg); + vsnprintf(buffer, sizeof(buffer), msg, ap); + va_end(ap); - if(NULL != env) { - (*env)->ThrowNew(env, runtimeExceptionClz, buffer); - JoglCommon_ReleaseJNIEnv (shallBeDetached); + if(NULL != env) { + (*env)->ThrowNew(env, runtimeExceptionClz, buffer); + } } } -JavaVM *JoglCommon_GetJVMHandle() { - return _jvmHandle; -} - -int JoglCommon_GetJVMVersion() { - return _jvmVersion; -} - jchar* JoglCommon_GetNullTerminatedStringChars(JNIEnv* env, jstring str) { jchar* strChars = NULL; - strChars = calloc((*env)->GetStringLength(env, str) + 1, sizeof(jchar)); - if (strChars != NULL) { - (*env)->GetStringRegion(env, str, 0, (*env)->GetStringLength(env, str), strChars); + if( NULL != env && 0 != str ) { + strChars = calloc((*env)->GetStringLength(env, str) + 1, sizeof(jchar)); + if (strChars != NULL) { + (*env)->GetStringRegion(env, str, 0, (*env)->GetStringLength(env, str), strChars); + } } return strChars; } -JNIEnv* JoglCommon_GetJNIEnv (int * shallBeDetached) -{ +JNIEnv* JoglCommon_GetJNIEnv (int asDaemon, int * shallBeDetached) { JNIEnv* curEnv = NULL; JNIEnv* newEnv = NULL; int envRes; - if(NULL == _jvmHandle) { - fprintf(stderr, "JOGL: No JavaVM handle registered, call JoglCommon_init(..) 1st"); + if(NULL==_jvmHandle) { + fprintf(stderr, "JOGL GetJNIEnv: NULL JVM handle, call JoglCommon_init 1st\n"); return NULL; } @@ -104,18 +93,23 @@ JNIEnv* JoglCommon_GetJNIEnv (int * shallBeDetached) envRes = (*_jvmHandle)->GetEnv(_jvmHandle, (void **) &curEnv, _jvmVersion) ; if( JNI_EDETACHED == envRes ) { // detached thread - attach to JVM - if( JNI_OK != ( envRes = (*_jvmHandle)->AttachCurrentThread(_jvmHandle, (void**) &newEnv, NULL) ) ) { - fprintf(stderr, "JNIEnv: can't attach thread: %d\n", envRes); + if( asDaemon ) { + envRes = (*_jvmHandle)->AttachCurrentThreadAsDaemon(_jvmHandle, (void**) &newEnv, NULL); + } else { + envRes = (*_jvmHandle)->AttachCurrentThread(_jvmHandle, (void**) &newEnv, NULL); + } + if( JNI_OK != envRes ) { + fprintf(stderr, "JOGL GetJNIEnv: Can't attach thread: %d\n", envRes); return NULL; } curEnv = newEnv; } else if( JNI_OK != envRes ) { // oops .. - fprintf(stderr, "can't GetEnv: %d\n", envRes); + fprintf(stderr, "JOGL GetJNIEnv: Can't GetEnv: %d\n", envRes); return NULL; } if (curEnv==NULL) { - fprintf(stderr, "env is NULL\n"); + fprintf(stderr, "JOGL GetJNIEnv: env is NULL\n"); return NULL; } *shallBeDetached = NULL != newEnv; @@ -124,28 +118,9 @@ JNIEnv* JoglCommon_GetJNIEnv (int * shallBeDetached) void JoglCommon_ReleaseJNIEnv (int shallBeDetached) { if(NULL == _jvmHandle) { - fprintf(stderr, "JOGL: No JavaVM handle registered, call JoglCommon_init(..) 1st"); - } - - if(shallBeDetached) { + fprintf(stderr, "JOGL ReleaseJNIEnv: No JavaVM handle registered, call JoglCommon_init(..) 1st"); + } else if(shallBeDetached) { (*_jvmHandle)->DetachCurrentThread(_jvmHandle); } } -/* - * Class: jogamp_opengl_GLContextImpl - * Method: glGetStringInt - * Signature: (IJ)Ljava/lang/String; - */ -JNIEXPORT jstring JNICALL -Java_jogamp_opengl_GLContextImpl_glGetStringInt(JNIEnv *env, jclass _unused, jint name, jlong procAddress) { - typedef const khronos_uint8_t * (KHRONOS_APIENTRY*_local_PFNGLGETSTRINGPROC)(unsigned int name); - _local_PFNGLGETSTRINGPROC ptr_glGetString; - const khronos_uint8_t * _res; - ptr_glGetString = (_local_PFNGLGETSTRINGPROC) (intptr_t) procAddress; - assert(ptr_glGetString != NULL); - _res = (* ptr_glGetString) ((unsigned int) name); - if (NULL == _res) return NULL; - return (*env)->NewStringUTF(env, _res); -} - diff --git a/src/jogl/native/JoglCommon.h b/src/jogl/native/JoglCommon.h index 023b4be03..2aeaf7d1d 100644 --- a/src/jogl/native/JoglCommon.h +++ b/src/jogl/native/JoglCommon.h @@ -1,3 +1,30 @@ +/** + * Copyright 2011 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ #ifndef JOGL_COMMON_H #define JOGL_COMMON_H 1 @@ -7,29 +34,25 @@ void JoglCommon_init(JNIEnv *env); -/** Set by JoglCommon_init */ -JavaVM *JoglCommon_GetJVMHandle(); - -/** Set by JoglCommon_init */ -int JoglCommon_GetJVMVersion(); - jchar* JoglCommon_GetNullTerminatedStringChars(JNIEnv* env, jstring str); -/** env may be NULL, in which case JoglCommon_GetJNIEnv() is being used. */ void JoglCommon_FatalError(JNIEnv *env, const char* msg, ...); - -/** env may be NULL, in which case JoglCommon_GetJNIEnv() is being used. */ void JoglCommon_throwNewRuntimeException(JNIEnv *env, const char* msg, ...); /** * - * 1) Store jvmHandle and jvmVersion is done by 'JoglCommon_init(JNIEnv*)' - * and internally used by 'JoglCommon_GetJNIEnv(..)' and 'JoglCommon_ReleaseJNIEnv(..)'. + * 1) Init static jvmHandle, jvmVersion and clazz references + * from an early initialization call w/ valid 'JNIEnv * env' + + JoglCommon_init(env); + * * 2) Use current thread JNIEnv or attach current thread to JVM, generating new JNIEnv * + + int asDaemon = 0; int shallBeDetached = 0; - JNIEnv* env = NewtCommon_GetJNIEnv(&shallBeDetached); + JNIEnv* env = JoglCommon_GetJNIEnv(asDaemon, &shallBeDetached); if(NULL==env) { DBG_PRINT("drawRect: null JNIEnv\n"); return; @@ -41,11 +64,13 @@ void JoglCommon_throwNewRuntimeException(JNIEnv *env, const char* msg, ...); .. your JNIEnv code here .. * - * 4) Detach thread from JVM, if required + * 4) Detach thread from JVM if required, i.e. not attached as daemon! + * Not recommended for recurring _daemon_ threads (performance) * - JoglCommon_ReleaseJNIEnv (shallBeDetached); + JoglCommon_ReleaseJNIEnv(shallBeDetached); */ -JNIEnv* JoglCommon_GetJNIEnv (int * shallBeDetached); +JNIEnv* JoglCommon_GetJNIEnv (int asDaemon, int * shallBeDetached); + void JoglCommon_ReleaseJNIEnv (int shallBeDetached); #endif diff --git a/src/jogl/native/libav/ffmpeg_dshow.c b/src/jogl/native/libav/ffmpeg_dshow.c new file mode 100644 index 000000000..4f1523a81 --- /dev/null +++ b/src/jogl/native/libav/ffmpeg_dshow.c @@ -0,0 +1,215 @@ +/** + * Copyright 2013 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ + +#include "ffmpeg_dshow.h" + +#ifdef _WIN32 + +#include <stdio.h> +#include <string.h> + +// dshow includes strsafe.h, hence tchar.h cannot be used +// include strsafe.h here for documentation +// #include <tchar.h> +#include <strsafe.h> +#include <dshow.h> + +static HRESULT EnumerateDevices(REFGUID category, IEnumMoniker **ppEnum) +{ + // Create the System Device Enumerator. + ICreateDevEnum *pDevEnum; + void *pv = NULL; + + HRESULT hr = CoCreateInstance(&CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, &IID_ICreateDevEnum, (void**)&pDevEnum); + + if (SUCCEEDED(hr)) { + // Create an enumerator for the category. + hr = pDevEnum->lpVtbl->CreateClassEnumerator(pDevEnum, category, ppEnum,0); + if (hr == S_FALSE) + { + hr = VFW_E_NOT_FOUND; // The category is empty. Treat as an error. + } + pDevEnum->lpVtbl->Release(pDevEnum); + } + return hr; +} + +static void getBSTRChars(BSTR bstr, char *pDest, int destLen) { + + #ifdef UNICODE + // _sntprintf(pDest, destLen, _T("%s"), bstr); + StringCbPrintfW(pDest, destLen, L"%s", bstr); + #else + // _sntprintf(pDest, destLen, _T("%S"), bstr); + StringCchPrintfA(pDest, destLen, "%S", bstr); + #endif +} + + +static int GetDeviceInformation(IEnumMoniker *pEnum, int verbose, int devIdx, + char *pDescr, int descrSize, + char *pName, int nameSize, + char *pPath, int pathSize, int *pWaveID) { + IMoniker *pMoniker = NULL; + int i=0; + int res = devIdx >= 0 ? -1 : 0; + + if( NULL != pDescr ) { + *pDescr=0; + } + if( NULL != pName ) { + *pName=0; + } + if( NULL != pPath ) { + *pPath=0; + } + if( NULL != pWaveID ) { + *pWaveID=0; + } + + while (pEnum->lpVtbl->Next(pEnum, 1, &pMoniker, NULL) == S_OK) { + IPropertyBag *pPropBag; + HRESULT hr; + + hr = pMoniker->lpVtbl->BindToStorage(pMoniker, 0, 0, &IID_IPropertyBag, (void**)&pPropBag); + if (FAILED(hr)) { + if( verbose ) { + fprintf(stderr, "DShowParser: Dev[%d]: bind failed ...\n", i); + } + pMoniker->lpVtbl->Release(pMoniker); + continue; + } + VARIANT var; + VariantInit(&var); + + // Get description or friendly name. + hr = pPropBag->lpVtbl->Read(pPropBag, L"Description", &var, 0); + if (SUCCEEDED(hr)) { + if( i == devIdx && NULL != pDescr ) { + res = 0; + getBSTRChars(var.bstrVal, pDescr, descrSize); + } + if( verbose ) { + fprintf(stderr, "DShowParser: Dev[%d]: Descr %S\n", i, var.bstrVal); + } + VariantClear(&var); + } else if( verbose ) { + fprintf(stderr, "DShowParser: Dev[%d]: cannot read Descr..\n", i); + } + + hr = pPropBag->lpVtbl->Read(pPropBag, L"FriendlyName", &var, 0); + if (SUCCEEDED(hr)) { + if( i == devIdx && NULL != pName ) { + res = 0; + getBSTRChars(var.bstrVal, pName, nameSize); + } + if( verbose ) { + fprintf(stderr, "DShowParser: Dev[%d]: Name %S\n", i, var.bstrVal); + } + VariantClear(&var); + } else if( verbose ) { + fprintf(stderr, "DShowParser: Dev[%d]: cannot read Name..\n", i); + } + + hr = pPropBag->lpVtbl->Write(pPropBag, L"FriendlyName", &var); + + // WaveInID applies only to audio capture devices. + hr = pPropBag->lpVtbl->Read(pPropBag, L"WaveInID", &var, 0); + if (SUCCEEDED(hr)) { + if( i == devIdx && NULL != pWaveID ) { + res = 0; + *pWaveID=(int)var.lVal; + } + if( verbose ) { + fprintf(stderr, "DShowParser: Dev[%d]: WaveInID %d\n", i, var.lVal); + } + VariantClear(&var); + } + + hr = pPropBag->lpVtbl->Read(pPropBag, L"DevicePath", &var, 0); + if (SUCCEEDED(hr)) { + if( i == devIdx && NULL != pPath ) { + res = 0; + getBSTRChars(var.bstrVal, pPath, pathSize); + } + if( verbose ) { + fprintf(stderr, "DShowParser: Dev[%d]: Path %S\n", i, var.bstrVal); + } + VariantClear(&var); + } + + pPropBag->lpVtbl->Release(pPropBag); + pMoniker->lpVtbl->Release(pMoniker); + + if( devIdx >= 0 && i == devIdx ) { + break; // done! + } + i++; + } + return res; +} + +int findDShowVideoDevice(char * dest, int destSize, int devIdx, int verbose) { + int res = -1; + + HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED); + if (SUCCEEDED(hr)) { + IEnumMoniker *pEnum; + + hr = EnumerateDevices(&CLSID_VideoInputDeviceCategory, &pEnum); + if (SUCCEEDED(hr)) { + res = GetDeviceInformation(pEnum, verbose, devIdx, NULL /* pDescr */, 0, dest, destSize, NULL /* pPath */, 0, NULL /* pWaveID */); + pEnum->lpVtbl->Release(pEnum); + if( verbose ) { + fprintf(stderr, "DShowParser: Get VideoInputDevice: res %d, '%s'\n", res, dest); + } + } else if( verbose ) { + fprintf(stderr, "DShowParser: Get VideoInputDevice failed\n"); + } + /** + hr = EnumerateDevices(&CLSID_AudioInputDeviceCategory, &pEnum); + if (SUCCEEDED(hr)) { + res = GetDeviceInformation(pEnum, verbose, devIdx, NULL, 0, NULL, 0, NULL, 0, NULL); + pEnum->lpVtbl->Release(pEnum); + } else if( verbose ) { + fprintf(stderr, "DShowParser: Get AudioInputDevice failed\n"); + } */ + CoUninitialize(); + } else if( verbose ) { + fprintf(stderr, "DShowParser: CoInit failed\n"); + } + return res; +} + +#else + +int findDShowVideoDevice(char * dest, int destSize, int devIdx, int verbose) { + return -1; +} + +#endif diff --git a/src/jogl/native/libav/ffmpeg_dshow.h b/src/jogl/native/libav/ffmpeg_dshow.h new file mode 100644 index 000000000..44524bf4b --- /dev/null +++ b/src/jogl/native/libav/ffmpeg_dshow.h @@ -0,0 +1,47 @@ +/** + * Copyright 2013 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ + +#ifndef _FFMPEG_DSHOW_H +#define _FFMPEG_DSHOW_H + +#ifdef _WIN32 + +#include <windows.h> + +#endif // _WIN32 + +#include <gluegen_stdint.h> +#include <gluegen_inttypes.h> +#include <gluegen_stddef.h> +#include <gluegen_stdint.h> + +extern int findDShowVideoDevice(char * dest, int destSize, int devIdx, int verbose); + + +#endif // _FFMPEG_DSHOW_H + diff --git a/src/jogl/native/libav/ffmpeg_impl_template.c b/src/jogl/native/libav/ffmpeg_impl_template.c new file mode 100644 index 000000000..a7e9f4857 --- /dev/null +++ b/src/jogl/native/libav/ffmpeg_impl_template.c @@ -0,0 +1,1611 @@ +/** + * Copyright 2012 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ + +// #define FF_FUNC(METHOD) Java_jogamp_opengl_util_av_impl_FFMPEGv08 ## METHOD + +#include "JoglCommon.h" +#include "ffmpeg_tool.h" +#include "ffmpeg_static.h" +#include "ffmpeg_dshow.h" + +#include "libavutil/pixdesc.h" +#include "libavutil/samplefmt.h" +#if LIBAVUTIL_VERSION_MAJOR < 53 + #include "libavutil/audioconvert.h" + // 52: #include "libavutil/channel_layout.h" +#endif + +#include <GL/gl.h> + +#define HAS_FUNC(f) (NULL!=(f)) + +typedef unsigned (APIENTRYP AVUTIL_VERSION)(void); +typedef unsigned (APIENTRYP AVFORMAT_VERSION)(void); +typedef unsigned (APIENTRYP AVCODEC_VERSION)(void); +typedef unsigned (APIENTRYP AVRESAMPLE_VERSION)(void); +typedef unsigned (APIENTRYP SWRESAMPLE_VERSION)(void); + +static AVUTIL_VERSION sp_avutil_version; +static AVFORMAT_VERSION sp_avformat_version; +static AVCODEC_VERSION sp_avcodec_version; +static AVRESAMPLE_VERSION sp_avresample_version; +static SWRESAMPLE_VERSION sp_swresample_version; +// count: 5 + +// libavcodec +typedef int (APIENTRYP AVCODEC_REGISTER_ALL)(void); +typedef int (APIENTRYP AVCODEC_CLOSE)(AVCodecContext *avctx); +typedef void (APIENTRYP AVCODEC_STRING)(char *buf, int buf_size, AVCodecContext *enc, int encode); +typedef AVCodec *(APIENTRYP AVCODEC_FIND_DECODER)(int avCodecID); // lavc 53: 'enum CodecID id', lavc 54: 'enum AVCodecID id' +typedef int (APIENTRYP AVCODEC_OPEN2)(AVCodecContext *avctx, AVCodec *codec, AVDictionary **options); // 53.6.0 +typedef AVFrame *(APIENTRYP AVCODEC_ALLOC_FRAME)(void); +typedef void (APIENTRYP AVCODEC_GET_FRAME_DEFAULTS)(AVFrame *frame); +typedef void (APIENTRYP AVCODEC_FREE_FRAME)(AVFrame **frame); +typedef int (APIENTRYP AVCODEC_DEFAULT_GET_BUFFER)(AVCodecContext *s, AVFrame *pic); // <= 54 (opt), else AVCODEC_DEFAULT_GET_BUFFER2 +typedef void (APIENTRYP AVCODEC_DEFAULT_RELEASE_BUFFER)(AVCodecContext *s, AVFrame *pic); // <= 54 (opt), else AV_FRAME_UNREF +typedef int (APIENTRYP AVCODEC_DEFAULT_GET_BUFFER2)(AVCodecContext *s, AVFrame *frame, int flags); // 55. (opt) +typedef int (APIENTRYP AVCODEC_GET_EDGE_WIDTH)(); +typedef int (APIENTRYP AV_IMAGE_FILL_LINESIZES)(int linesizes[4], int pix_fmt, int width); // lavu 51: 'enum PixelFormat pix_fmt', lavu 53: 'enum AVPixelFormat pix_fmt' +typedef void (APIENTRYP AVCODEC_ALIGN_DIMENSIONS)(AVCodecContext *s, int *width, int *height); +typedef void (APIENTRYP AVCODEC_ALIGN_DIMENSIONS2)(AVCodecContext *s, int *width, int *height, int linesize_align[AV_NUM_DATA_POINTERS]); +typedef void (APIENTRYP AVCODEC_FLUSH_BUFFERS)(AVCodecContext *avctx); +typedef void (APIENTRYP AV_INIT_PACKET)(AVPacket *pkt); +typedef int (APIENTRYP AV_NEW_PACKET)(AVPacket *pkt, int size); +typedef void (APIENTRYP AV_DESTRUCT_PACKET)(AVPacket *pkt); +typedef void (APIENTRYP AV_FREE_PACKET)(AVPacket *pkt); +typedef int (APIENTRYP AVCODEC_DECODE_AUDIO4)(AVCodecContext *avctx, AVFrame *frame, int *got_frame_ptr, AVPacket *avpkt); // 53.25.0 +typedef int (APIENTRYP AVCODEC_DECODE_VIDEO2)(AVCodecContext *avctx, AVFrame *picture, int *got_picture_ptr, AVPacket *avpkt); // 52.23.0 + +static AVCODEC_REGISTER_ALL sp_avcodec_register_all; +static AVCODEC_CLOSE sp_avcodec_close; +static AVCODEC_STRING sp_avcodec_string; +static AVCODEC_FIND_DECODER sp_avcodec_find_decoder; +static AVCODEC_OPEN2 sp_avcodec_open2; // 53.6.0 +static AVCODEC_ALLOC_FRAME sp_avcodec_alloc_frame; +static AVCODEC_GET_FRAME_DEFAULTS sp_avcodec_get_frame_defaults; +static AVCODEC_FREE_FRAME sp_avcodec_free_frame; +static AVCODEC_DEFAULT_GET_BUFFER sp_avcodec_default_get_buffer; // <= 54 (opt), else sp_avcodec_default_get_buffer2 +static AVCODEC_DEFAULT_RELEASE_BUFFER sp_avcodec_default_release_buffer; // <= 54 (opt), else sp_av_frame_unref +static AVCODEC_DEFAULT_GET_BUFFER2 sp_avcodec_default_get_buffer2; // 55. (opt) +static AVCODEC_GET_EDGE_WIDTH sp_avcodec_get_edge_width; +static AV_IMAGE_FILL_LINESIZES sp_av_image_fill_linesizes; +static AVCODEC_ALIGN_DIMENSIONS sp_avcodec_align_dimensions; +static AVCODEC_ALIGN_DIMENSIONS2 sp_avcodec_align_dimensions2; +static AVCODEC_FLUSH_BUFFERS sp_avcodec_flush_buffers; +static AV_INIT_PACKET sp_av_init_packet; +static AV_NEW_PACKET sp_av_new_packet; +static AV_DESTRUCT_PACKET sp_av_destruct_packet; +static AV_FREE_PACKET sp_av_free_packet; +static AVCODEC_DECODE_AUDIO4 sp_avcodec_decode_audio4; // 53.25.0 +static AVCODEC_DECODE_VIDEO2 sp_avcodec_decode_video2; // 52.23.0 +// count: 27 + +// libavutil +typedef void (APIENTRYP AV_FRAME_UNREF)(AVFrame *frame); +typedef void* (APIENTRYP AV_REALLOC)(void *ptr, size_t size); +typedef void (APIENTRYP AV_FREE)(void *ptr); +typedef int (APIENTRYP AV_GET_BITS_PER_PIXEL)(const AVPixFmtDescriptor *pixdesc); +typedef int (APIENTRYP AV_SAMPLES_GET_BUFFER_SIZE)(int *linesize, int nb_channels, int nb_samples, enum AVSampleFormat sample_fmt, int align); +typedef int (APIENTRYP AV_GET_BYTES_PER_SAMPLE)(enum AVSampleFormat sample_fmt); +typedef int (APIENTRYP AV_OPT_SET_INT)(void *obj, const char *name, int64_t val, int search_flags); +typedef AVDictionaryEntry* (APIENTRYP AV_DICT_GET)(AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags); +typedef int (APIENTRYP AV_DICT_COUNT)(AVDictionary *m); +typedef int (APIENTRYP AV_DICT_SET)(AVDictionary **pm, const char *key, const char *value, int flags); +typedef void (APIENTRYP AV_DICT_FREE)(AVDictionary **m); + +static const AVPixFmtDescriptor* sp_av_pix_fmt_descriptors; +static AV_FRAME_UNREF sp_av_frame_unref; +static AV_REALLOC sp_av_realloc; +static AV_FREE sp_av_free; +static AV_GET_BITS_PER_PIXEL sp_av_get_bits_per_pixel; +static AV_SAMPLES_GET_BUFFER_SIZE sp_av_samples_get_buffer_size; +static AV_GET_BYTES_PER_SAMPLE sp_av_get_bytes_per_sample; +static AV_OPT_SET_INT sp_av_opt_set_int; +static AV_DICT_GET sp_av_dict_get; +static AV_DICT_COUNT sp_av_dict_count; +static AV_DICT_SET sp_av_dict_set; +static AV_DICT_FREE sp_av_dict_free; +// count: 39 + +// libavformat +typedef AVFormatContext *(APIENTRYP AVFORMAT_ALLOC_CONTEXT)(void); +typedef void (APIENTRYP AVFORMAT_FREE_CONTEXT)(AVFormatContext *s); // 52.96.0 +typedef void (APIENTRYP AVFORMAT_CLOSE_INPUT)(AVFormatContext **s); // 53.17.0 +typedef void (APIENTRYP AV_REGISTER_ALL)(void); +typedef AVInputFormat *(APIENTRYP AV_FIND_INPUT_FORMAT)(const char *short_name); +typedef int (APIENTRYP AVFORMAT_OPEN_INPUT)(AVFormatContext **ps, const char *filename, AVInputFormat *fmt, AVDictionary **options); +typedef void (APIENTRYP AV_DUMP_FORMAT)(AVFormatContext *ic, int index, const char *url, int is_output); +typedef int (APIENTRYP AV_READ_FRAME)(AVFormatContext *s, AVPacket *pkt); +typedef int (APIENTRYP AV_SEEK_FRAME)(AVFormatContext *s, int stream_index, int64_t timestamp, int flags); +typedef int (APIENTRYP AVFORMAT_SEEK_FILE)(AVFormatContext *s, int stream_index, int64_t min_ts, int64_t ts, int64_t max_ts, int flags); +typedef int (APIENTRYP AV_READ_PLAY)(AVFormatContext *s); +typedef int (APIENTRYP AV_READ_PAUSE)(AVFormatContext *s); +typedef int (APIENTRYP AVFORMAT_NETWORK_INIT)(void); // 53.13.0 +typedef int (APIENTRYP AVFORMAT_NETWORK_DEINIT)(void); // 53.13.0 +typedef int (APIENTRYP AVFORMAT_FIND_STREAM_INFO)(AVFormatContext *ic, AVDictionary **options); // 53.3.0 + +static AVFORMAT_ALLOC_CONTEXT sp_avformat_alloc_context; +static AVFORMAT_FREE_CONTEXT sp_avformat_free_context; // 52.96.0 (not used, only for outfile cts) +static AVFORMAT_CLOSE_INPUT sp_avformat_close_input; // 53.17.0 +static AV_REGISTER_ALL sp_av_register_all; +static AV_FIND_INPUT_FORMAT sp_av_find_input_format; +static AVFORMAT_OPEN_INPUT sp_avformat_open_input; +static AV_DUMP_FORMAT sp_av_dump_format; +static AV_READ_FRAME sp_av_read_frame; +static AV_SEEK_FRAME sp_av_seek_frame; +static AVFORMAT_SEEK_FILE sp_avformat_seek_file; +static AV_READ_PLAY sp_av_read_play; +static AV_READ_PAUSE sp_av_read_pause; +static AVFORMAT_NETWORK_INIT sp_avformat_network_init; // 53.13.0 +static AVFORMAT_NETWORK_DEINIT sp_avformat_network_deinit; // 53.13.0 +static AVFORMAT_FIND_STREAM_INFO sp_avformat_find_stream_info; // 53.3.0 +// count: 54 + +// libavdevice [53.0.0] +typedef int (APIENTRYP AVDEVICE_REGISTER_ALL)(void); +static AVDEVICE_REGISTER_ALL sp_avdevice_register_all; +// count: 55 + +// libavresample [1.0.1] +typedef AVAudioResampleContext* (APIENTRYP AVRESAMPLE_ALLOC_CONTEXT)(void); // 1.0.1 +typedef int (APIENTRYP AVRESAMPLE_OPEN)(AVAudioResampleContext *avr); // 1.0.1 +typedef void (APIENTRYP AVRESAMPLE_CLOSE)(AVAudioResampleContext *avr); // 1.0.1 +typedef void (APIENTRYP AVRESAMPLE_FREE)(AVAudioResampleContext **avr); // 1.0.1 +typedef int (APIENTRYP AVRESAMPLE_CONVERT)(AVAudioResampleContext *avr, uint8_t **output, + int out_plane_size, int out_samples, uint8_t **input, + int in_plane_size, int in_samples); // 1.0.1 +static AVRESAMPLE_ALLOC_CONTEXT sp_avresample_alloc_context; +static AVRESAMPLE_OPEN sp_avresample_open; +static AVRESAMPLE_CLOSE sp_avresample_close; +static AVRESAMPLE_FREE sp_avresample_free; +static AVRESAMPLE_CONVERT sp_avresample_convert; +// count: 60 + +// libswresample [1...] +typedef int (APIENTRYP AV_OPT_SET_SAMPLE_FMT)(void *obj, const char *name, enum AVSampleFormat fmt, int search_flags); // actually lavu .. but exist only w/ swresample! +typedef struct SwrContext *(APIENTRYP SWR_ALLOC)(void); +typedef int (APIENTRYP SWR_INIT)(struct SwrContext *s); +typedef void (APIENTRYP SWR_FREE)(struct SwrContext **s); +typedef int (APIENTRYP SWR_CONVERT)(struct SwrContext *s, uint8_t **out, int out_count, const uint8_t **in , int in_count); + +static AV_OPT_SET_SAMPLE_FMT sp_av_opt_set_sample_fmt; +static SWR_ALLOC sp_swr_alloc; +static SWR_INIT sp_swr_init; +static SWR_FREE sp_swr_free; +static SWR_CONVERT sp_swr_convert; +// count: 65 + +// We use JNI Monitor Locking, since this removes the need +// to statically link-in pthreads on window .. +// #define USE_PTHREAD_LOCKING 1 +// +#define USE_JNI_LOCKING 1 + +#if defined (USE_PTHREAD_LOCKING) + #include <pthread.h> + #warning USE LOCKING PTHREAD + static pthread_mutex_t mutex_avcodec_openclose; + #define MY_MUTEX_LOCK(e,s) pthread_mutex_lock(&(s)) + #define MY_MUTEX_UNLOCK(e,s) pthread_mutex_unlock(&(s)) +#elif defined (USE_JNI_LOCKING) + static jobject mutex_avcodec_openclose; + #define MY_MUTEX_LOCK(e,s) (*e)->MonitorEnter(e, s) + #define MY_MUTEX_UNLOCK(e,s) (*e)->MonitorExit(e, s) +#else + #warning USE LOCKING NONE + #define MY_MUTEX_LOCK(e,s) + #define MY_MUTEX_UNLOCK(e,s) +#endif + +#define SYMBOL_COUNT 65 + +JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0) + (JNIEnv *env, jobject instance, jobject jmutex_avcodec_openclose, jobject jSymbols, jint count) +{ + #ifdef USE_PTHREAD_LOCKING + pthread_mutexattr_t renderLockAttr; + #endif + int64_t* symbols; // jlong -> int64_t -> intptr_t -> FUNC_PTR + int i; + + if(SYMBOL_COUNT != count) { + fprintf(stderr, "FFMPEGNatives.initSymbols0: Wrong symbol count: Expected %d, Is %d\n", + SYMBOL_COUNT, count); + return JNI_FALSE; + } + JoglCommon_init(env); + + i = 0; + symbols = (int64_t *) (*env)->GetPrimitiveArrayCritical(env, jSymbols, NULL); + + sp_avutil_version = (AVUTIL_VERSION) (intptr_t) symbols[i++]; + sp_avformat_version = (AVFORMAT_VERSION) (intptr_t) symbols[i++]; + sp_avcodec_version = (AVCODEC_VERSION) (intptr_t) symbols[i++]; + sp_avresample_version = (AVRESAMPLE_VERSION) (intptr_t) symbols[i++]; + sp_swresample_version = (SWRESAMPLE_VERSION) (intptr_t) symbols[i++]; + + sp_avcodec_register_all = (AVCODEC_REGISTER_ALL) (intptr_t) symbols[i++]; + sp_avcodec_close = (AVCODEC_CLOSE) (intptr_t) symbols[i++]; + sp_avcodec_string = (AVCODEC_STRING) (intptr_t) symbols[i++]; + sp_avcodec_find_decoder = (AVCODEC_FIND_DECODER) (intptr_t) symbols[i++]; + sp_avcodec_open2 = (AVCODEC_OPEN2) (intptr_t) symbols[i++]; + sp_avcodec_alloc_frame = (AVCODEC_ALLOC_FRAME) (intptr_t) symbols[i++]; + sp_avcodec_get_frame_defaults = (AVCODEC_GET_FRAME_DEFAULTS) (intptr_t) symbols[i++]; + sp_avcodec_free_frame = (AVCODEC_FREE_FRAME) (intptr_t) symbols[i++]; + sp_avcodec_default_get_buffer = (AVCODEC_DEFAULT_GET_BUFFER) (intptr_t) symbols[i++]; + sp_avcodec_default_release_buffer = (AVCODEC_DEFAULT_RELEASE_BUFFER) (intptr_t) symbols[i++]; + sp_avcodec_default_get_buffer2 = (AVCODEC_DEFAULT_GET_BUFFER2) (intptr_t) symbols[i++]; + sp_avcodec_get_edge_width = (AVCODEC_GET_EDGE_WIDTH) (intptr_t) symbols[i++]; + sp_av_image_fill_linesizes = (AV_IMAGE_FILL_LINESIZES) (intptr_t) symbols[i++]; + sp_avcodec_align_dimensions = (AVCODEC_ALIGN_DIMENSIONS) (intptr_t) symbols[i++]; + sp_avcodec_align_dimensions2 = (AVCODEC_ALIGN_DIMENSIONS2) (intptr_t) symbols[i++]; + sp_avcodec_flush_buffers = (AVCODEC_FLUSH_BUFFERS) (intptr_t) symbols[i++]; + sp_av_init_packet = (AV_INIT_PACKET) (intptr_t) symbols[i++]; + sp_av_new_packet = (AV_NEW_PACKET) (intptr_t) symbols[i++]; + sp_av_destruct_packet = (AV_DESTRUCT_PACKET) (intptr_t) symbols[i++]; + sp_av_free_packet = (AV_FREE_PACKET) (intptr_t) symbols[i++]; + sp_avcodec_decode_audio4 = (AVCODEC_DECODE_AUDIO4) (intptr_t) symbols[i++]; + sp_avcodec_decode_video2 = (AVCODEC_DECODE_VIDEO2) (intptr_t) symbols[i++]; + + sp_av_pix_fmt_descriptors = (const AVPixFmtDescriptor*) (intptr_t) symbols[i++]; + sp_av_frame_unref = (AV_FRAME_UNREF) (intptr_t) symbols[i++]; + sp_av_realloc = (AV_REALLOC) (intptr_t) symbols[i++]; + sp_av_free = (AV_FREE) (intptr_t) symbols[i++]; + sp_av_get_bits_per_pixel = (AV_GET_BITS_PER_PIXEL) (intptr_t) symbols[i++]; + sp_av_samples_get_buffer_size = (AV_SAMPLES_GET_BUFFER_SIZE) (intptr_t) symbols[i++]; + sp_av_get_bytes_per_sample = (AV_GET_BYTES_PER_SAMPLE) (intptr_t) symbols[i++]; + sp_av_opt_set_int = (AV_OPT_SET_INT) (intptr_t) symbols[i++]; + sp_av_dict_get = (AV_DICT_GET) (intptr_t) symbols[i++]; + sp_av_dict_count = (AV_DICT_COUNT) (intptr_t) symbols[i++]; + sp_av_dict_set = (AV_DICT_SET) (intptr_t) symbols[i++]; + sp_av_dict_free = (AV_DICT_FREE) (intptr_t) symbols[i++]; + + sp_avformat_alloc_context = (AVFORMAT_ALLOC_CONTEXT) (intptr_t) symbols[i++];; + sp_avformat_free_context = (AVFORMAT_FREE_CONTEXT) (intptr_t) symbols[i++]; + sp_avformat_close_input = (AVFORMAT_CLOSE_INPUT) (intptr_t) symbols[i++]; + sp_av_register_all = (AV_REGISTER_ALL) (intptr_t) symbols[i++]; + sp_av_find_input_format = (AV_FIND_INPUT_FORMAT) (intptr_t) symbols[i++]; + sp_avformat_open_input = (AVFORMAT_OPEN_INPUT) (intptr_t) symbols[i++]; + sp_av_dump_format = (AV_DUMP_FORMAT) (intptr_t) symbols[i++]; + sp_av_read_frame = (AV_READ_FRAME) (intptr_t) symbols[i++]; + sp_av_seek_frame = (AV_SEEK_FRAME) (intptr_t) symbols[i++]; + sp_avformat_seek_file = (AVFORMAT_SEEK_FILE) (intptr_t) symbols[i++]; + sp_av_read_play = (AV_READ_PLAY) (intptr_t) symbols[i++]; + sp_av_read_pause = (AV_READ_PAUSE) (intptr_t) symbols[i++]; + sp_avformat_network_init = (AVFORMAT_NETWORK_INIT) (intptr_t) symbols[i++]; + sp_avformat_network_deinit = (AVFORMAT_NETWORK_DEINIT) (intptr_t) symbols[i++]; + sp_avformat_find_stream_info = (AVFORMAT_FIND_STREAM_INFO) (intptr_t) symbols[i++]; + + sp_avdevice_register_all = (AVDEVICE_REGISTER_ALL) (intptr_t) symbols[i++]; + + sp_avresample_alloc_context = (AVRESAMPLE_ALLOC_CONTEXT) (intptr_t) symbols[i++]; + sp_avresample_open = (AVRESAMPLE_OPEN) (intptr_t) symbols[i++]; + sp_avresample_close = (AVRESAMPLE_CLOSE) (intptr_t) symbols[i++]; + sp_avresample_free = (AVRESAMPLE_FREE) (intptr_t) symbols[i++]; + sp_avresample_convert = (AVRESAMPLE_CONVERT) (intptr_t) symbols[i++]; + + sp_av_opt_set_sample_fmt = (AV_OPT_SET_SAMPLE_FMT) (intptr_t) symbols[i++]; + sp_swr_alloc = (SWR_ALLOC) (intptr_t) symbols[i++]; + sp_swr_init = (SWR_INIT) (intptr_t) symbols[i++]; + sp_swr_free = (SWR_FREE) (intptr_t) symbols[i++]; + sp_swr_convert = (SWR_CONVERT) (intptr_t) symbols[i++]; + + (*env)->ReleasePrimitiveArrayCritical(env, jSymbols, symbols, 0); + + if(SYMBOL_COUNT != i) { + // boom + fprintf(stderr, "FFMPEGNatives.initSymbols0: Wrong symbol assignment count: Expected %d, Is %d\n", + SYMBOL_COUNT, i); + return JNI_FALSE; + } + + #if LIBAVCODEC_VERSION_MAJOR >= 55 + if(!HAS_FUNC(sp_avcodec_default_get_buffer2) || + !HAS_FUNC(sp_av_frame_unref) ) { + fprintf(stderr, "avcodec >= 55: avcodec_default_get_buffer2 %p, av_frame_unref %p\n", + sp_avcodec_default_get_buffer2, sp_av_frame_unref); + return JNI_FALSE; + } + #else + if(!HAS_FUNC(sp_avcodec_default_get_buffer) || + !HAS_FUNC(sp_avcodec_default_release_buffer)) { + fprintf(stderr, "avcodec < 55: avcodec_default_get_buffer %p, sp_avcodec_default_release_buffer %p\n", + sp_avcodec_default_get_buffer2, sp_avcodec_default_release_buffer); + return JNI_FALSE; + } + #endif + + #if defined (USE_PTHREAD_LOCKING) + pthread_mutexattr_init(&renderLockAttr); + pthread_mutexattr_settype(&renderLockAttr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&mutex_avcodec_openclose, &renderLockAttr); // recursive + #elif defined (USE_JNI_LOCKING) + mutex_avcodec_openclose = (*env)->NewGlobalRef(env, jmutex_avcodec_openclose); + #endif + + /** At static destroy: Never + #if defined (USE_PTHREAD_LOCKING) + pthread_mutex_unlock(&mutex_avcodec_openclose); + pthread_mutex_destroy(&mutex_avcodec_openclose); + #elif defined (USE_JNI_LOCKING) + (*env)->DeleteGlobalRef(env, mutex_avcodec_openclose); + #endif + */ + + return JNI_TRUE; +} + +static int _isAudioFormatSupported(JNIEnv *env, jobject ffmpegMediaPlayer, enum AVSampleFormat aSampleFmt, int32_t aSampleRate, int32_t aChannels) { + return JNI_TRUE == (*env)->CallBooleanMethod(env, ffmpegMediaPlayer, ffmpeg_jni_mid_isAudioFormatSupported, aSampleFmt, aSampleRate, aChannels); +} +static void _updateJavaAttributes(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) { + if(NULL!=env) { + (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_setupFFAttributes, + pAV->vid, pAV->vPixFmt, pAV->vBufferPlanes, + pAV->vBitsPerPixel, pAV->vBytesPerPixelPerPlane, + pAV->vTexWidth[0], pAV->vTexWidth[1], pAV->vTexWidth[2], + pAV->vWidth, pAV->vHeight, + pAV->aid, pAV->aSampleFmtOut, pAV->aSampleRateOut, pAV->aChannelsOut, pAV->aFrameSize); + (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_updateAttributes, + pAV->vid, pAV->aid, + pAV->vWidth, pAV->vHeight, + pAV->bps_stream, pAV->bps_video, pAV->bps_audio, + pAV->fps, pAV->frames_video, pAV->frames_audio, pAV->duration, + (*env)->NewStringUTF(env, pAV->vcodec), + (*env)->NewStringUTF(env, pAV->acodec) ); + } +} +static void _setIsGLOriented(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) { + if(NULL!=env) { + (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_setIsGLOriented, pAV->vFlipped); + } +} + +static void freeInstance(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) { + int i; + if(NULL != pAV) { + // Close the A resampler + if( NULL != pAV->avResampleCtx ) { + sp_avresample_free(&pAV->avResampleCtx); + pAV->avResampleCtx = NULL; + } + if( NULL != pAV->swResampleCtx ) { + sp_swr_free(&pAV->swResampleCtx); + pAV->swResampleCtx = NULL; + } + if( NULL != pAV->aResampleBuffer ) { + sp_av_free(pAV->aResampleBuffer); + pAV->aResampleBuffer = NULL; + } + + MY_MUTEX_LOCK(env, mutex_avcodec_openclose); + { + // Close the V codec + if(NULL != pAV->pVCodecCtx) { + sp_avcodec_close(pAV->pVCodecCtx); + pAV->pVCodecCtx = NULL; + } + pAV->pVCodec=NULL; + + // Close the A codec + if(NULL != pAV->pACodecCtx) { + sp_avcodec_close(pAV->pACodecCtx); + pAV->pACodecCtx = NULL; + } + pAV->pACodec=NULL; + } + MY_MUTEX_UNLOCK(env, mutex_avcodec_openclose); + + // Close the frames + if(NULL != pAV->pVFrame) { + if(HAS_FUNC(sp_avcodec_free_frame)) { + sp_avcodec_free_frame(&pAV->pVFrame); + } else { + sp_av_free(pAV->pVFrame); + } + pAV->pVFrame = NULL; + } + if(NULL != pAV->pANIOBuffers) { + for(i=0; i<pAV->aFrameCount; i++) { + NIOBuffer_t * pNIOBuffer = &pAV->pANIOBuffers[i]; + if( NULL != pNIOBuffer->nioRef ) { + if(pAV->verbose) { + fprintf(stderr, "A NIO: Free.X ptr %p / ref %p, %d bytes\n", + pNIOBuffer->origPtr, pNIOBuffer->nioRef, pNIOBuffer->size); + } + (*env)->DeleteGlobalRef(env, pNIOBuffer->nioRef); + } + } + free(pAV->pANIOBuffers); + pAV->pANIOBuffers = NULL; + } + if(NULL != pAV->pAFrames) { + for(i=0; i<pAV->aFrameCount; i++) { + if(HAS_FUNC(sp_avcodec_free_frame)) { + sp_avcodec_free_frame(&pAV->pAFrames[i]); + } else { + sp_av_free(pAV->pAFrames[i]); + } + } + free(pAV->pAFrames); + pAV->pAFrames = NULL; + } + + // Close the video file + if(NULL != pAV->pFormatCtx) { + sp_avformat_close_input(&pAV->pFormatCtx); + // Only for output files! + // sp_avformat_free_context(pAV->pFormatCtx); + pAV->pFormatCtx = NULL; + } + if( NULL != pAV->ffmpegMediaPlayer ) { + (*env)->DeleteGlobalRef(env, pAV->ffmpegMediaPlayer); + pAV->ffmpegMediaPlayer = NULL; + } + + free(pAV); + } +} + +static int my_getPlaneCount(AVPixFmtDescriptor *pDesc) { + int i, p=-1; + for(i=pDesc->nb_components-1; i>=0; i--) { + int p0 = pDesc->comp[i].plane; + if( p < p0 ) { + p = p0; + } + } + return p+1; +} + +#if 0 +static int my_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt) { + return sp_av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL; +} + +static enum PixelFormat my_get_format(struct AVCodecContext *s, const enum PixelFormat * fmt) { + int i=0; + enum PixelFormat f0, fR = PIX_FMT_NONE; + char buf[256]; + + fprintf(stderr, "get_format ****\n"); + while (fmt[i] != PIX_FMT_NONE /* && ff_is_hwaccel_pix_fmt(fmt[i]) */) { + f0 = fmt[i]; + if(fR==PIX_FMT_NONE && !my_is_hwaccel_pix_fmt(f0)) { + fR = f0; + } + sp_av_get_pix_fmt_string(buf, sizeof(buf), f0); + fprintf(stderr, "get_format %d: %d - %s - %s\n", i, f0, sp_av_get_pix_fmt_name(f0), buf); + ++i; + } + fprintf(stderr, "get_format %d - %s *** \n", fR, sp_av_get_pix_fmt_name(fR)); + fflush(NULL); + return fR; +} +#endif + +JNIEXPORT jint JNICALL FF_FUNC(getAvUtilMajorVersionCC0) + (JNIEnv *env, jobject instance) { + return (jint) LIBAVUTIL_VERSION_MAJOR; +} + +JNIEXPORT jint JNICALL FF_FUNC(getAvFormatMajorVersionCC0) + (JNIEnv *env, jobject instance) { + return (jint) LIBAVFORMAT_VERSION_MAJOR; +} + +JNIEXPORT jint JNICALL FF_FUNC(getAvCodecMajorVersionCC0) + (JNIEnv *env, jobject instance) { + return (jint) LIBAVCODEC_VERSION_MAJOR; +} + +JNIEXPORT jint JNICALL FF_FUNC(getAvResampleMajorVersionCC0) + (JNIEnv *env, jobject instance) { + return (jint) LIBAVRESAMPLE_VERSION_MAJOR; +} + +JNIEXPORT jint JNICALL FF_FUNC(getSwResampleMajorVersionCC0) + (JNIEnv *env, jobject instance) { + return (jint) LIBSWRESAMPLE_VERSION_MAJOR; +} + +JNIEXPORT jlong JNICALL FF_FUNC(createInstance0) + (JNIEnv *env, jobject instance, jobject ffmpegMediaPlayer, jboolean verbose) +{ + FFMPEGToolBasicAV_t * pAV = calloc(1, sizeof(FFMPEGToolBasicAV_t)); + if(NULL==pAV) { + JoglCommon_throwNewRuntimeException(env, "Couldn't alloc instance"); + return 0; + } + pAV->avcodecVersion = sp_avcodec_version(); + pAV->avformatVersion = sp_avformat_version(); + pAV->avutilVersion = sp_avutil_version(); + if(HAS_FUNC(sp_avresample_version)) { + pAV->avresampleVersion = sp_avresample_version(); + } else { + pAV->avresampleVersion = 0; + } + if(HAS_FUNC(sp_swresample_version)) { + pAV->swresampleVersion = sp_swresample_version(); + } else { + pAV->swresampleVersion = 0; + } + + #if LIBAVCODEC_VERSION_MAJOR >= 55 + // TODO: We keep code on using 1 a/v frame per decoding cycle now. + // This is compatible w/ OpenAL's alBufferData(..) + // and w/ OpenGL's texture update command, both copy data immediatly. + // pAV->useRefCountedFrames = AV_HAS_API_REFCOUNTED_FRAMES(pAV); + pAV->useRefCountedFrames = 0; + #else + pAV->useRefCountedFrames = 0; + #endif + + pAV->ffmpegMediaPlayer = (*env)->NewGlobalRef(env, ffmpegMediaPlayer); + pAV->verbose = verbose; + pAV->vid=AV_STREAM_ID_AUTO; + pAV->aid=AV_STREAM_ID_AUTO; + + if(pAV->verbose) { + fprintf(stderr, "Info: Use avresample %d, swresample %d, device %d, refCount %d\n", + AV_HAS_API_AVRESAMPLE(pAV), AV_HAS_API_SWRESAMPLE(pAV), HAS_FUNC(sp_avdevice_register_all), pAV->useRefCountedFrames); + } + return (jlong) (intptr_t) pAV; +} + +JNIEXPORT void JNICALL FF_FUNC(destroyInstance0) + (JNIEnv *env, jobject instance, jlong ptr) +{ + FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); + if (pAV != NULL) { + // stop assumed .. + freeInstance(env, pAV); + } +} + +static uint64_t getDefaultAudioChannelLayout(int channelCount) { + switch(channelCount) { + case 1: return AV_CH_LAYOUT_MONO; + case 2: return AV_CH_LAYOUT_STEREO; + case 3: return AV_CH_LAYOUT_SURROUND; + case 4: return AV_CH_LAYOUT_QUAD; + case 5: return AV_CH_LAYOUT_5POINT0; + case 6: return AV_CH_LAYOUT_5POINT1; + case 7: return AV_CH_LAYOUT_6POINT1; + case 8: return AV_CH_LAYOUT_7POINT1; + default: return AV_CH_LAYOUT_NATIVE; + } +} + +static void initPTSStats(PTSStats *ptsStats); +static int64_t evalPTS(PTSStats *ptsStats, int64_t inPTS, int64_t inDTS); + +static AVInputFormat* tryAVInputFormat(const char * name, int verbose) { + AVInputFormat* inFmt = sp_av_find_input_format(name); + if( verbose) { + if ( NULL == inFmt ) { + fprintf(stderr, "Warning: Could not find input format '%s'\n", name); + } else { + fprintf(stderr, "Info: Found input format '%s'\n", name); + } + } + return inFmt; +} +static const char * inFmtNames[] = { + "video4linux2", // linux + "video4linux", // linux (old) + "dshow", // windows + "vfwcap", // windows (old) + "mpg", + "yuv2", + "mjpeg", + "avi", + "wmv", + "libx264", + "h264", + "mpegts" +}; +static AVInputFormat* findAVInputFormat(int verbose) { + AVInputFormat* inFmt = NULL; + const char *inFmtName; + int i=0; + do { + inFmtName = inFmtNames[i++]; + if( NULL == inFmtName ) { + break; + } + inFmt = tryAVInputFormat(inFmtName, verbose); + } while ( NULL == inFmt ); + return inFmt; +} + +#if 0 +static void getAlignedLinesizes(AVCodecContext *avctx, int linesize[/*4*/]) { + int stride_align[AV_NUM_DATA_POINTERS]; + int w = avctx->width; + int h = avctx->height; + int unaligned; + int i; + + sp_avcodec_align_dimensions2(avctx, &w, &h, stride_align); + + if (!(avctx->flags & CODEC_FLAG_EMU_EDGE)) { + int edge_width = sp_avcodec_get_edge_width(); + w += edge_width * 2; + h += edge_width * 2; + } + + do { + // Get alignment for all planes (-> YUVP .. etc) + sp_av_image_fill_linesizes(linesize, avctx->pix_fmt, w); + // increase alignment of w for next try (rhs gives the lowest bit set in w) + w += w & ~(w - 1); + + unaligned = 0; + for (i = 0; i < 4; i++) + unaligned |= linesize[i] % stride_align[i]; + } while (unaligned); +} +#endif + +JNIEXPORT void JNICALL FF_FUNC(setStream0) + (JNIEnv *env, jobject instance, jlong ptr, jstring jURL, jboolean jIsCameraInput, + jint vid, jstring jSizeS, jint vWidth, jint vHeight, jint vRate, + jint aid, jint aMaxChannelCount, jint aPrefSampleRate) +{ + char cameraName[256]; + int res, i; + jboolean iscopy; + FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)(intptr_t)ptr; + + if (pAV == NULL) { + JoglCommon_throwNewRuntimeException(env, "NULL AV ptr"); + return; + } + + // Register all formats and codecs + sp_avcodec_register_all(); + if( jIsCameraInput && HAS_FUNC(sp_avdevice_register_all) ) { + sp_avdevice_register_all(); + } + sp_av_register_all(); + // Network too .. + if(HAS_FUNC(sp_avformat_network_init)) { + sp_avformat_network_init(); + } + + pAV->pFormatCtx = sp_avformat_alloc_context(); + + const char *urlPath = (*env)->GetStringUTFChars(env, jURL, &iscopy); + const char *filename = urlPath; // allow changing path for camera .. + + // Open video file + AVDictionary *inOpts = NULL; + AVInputFormat* inFmt = NULL; + if( jIsCameraInput ) { + char buffer[256]; + inFmt = findAVInputFormat(pAV->verbose); + if( NULL == inFmt ) { + JoglCommon_throwNewRuntimeException(env, "Couldn't find input format for camera: %s", urlPath); + (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); + return; + } + if(pAV->verbose) { + fprintf(stderr, "Camera: Format: %s (%s)\n", inFmt->long_name, inFmt->name); + } + if( 0 == strncmp(inFmt->name, "dshow", 255) ) { + int devIdx = atoi(urlPath); + strncpy(cameraName, "video=", sizeof(cameraName)); + res = findDShowVideoDevice(cameraName+6, sizeof(cameraName)-6, devIdx, pAV->verbose); + if( 0 == res ) { + if(pAV->verbose) { + fprintf(stderr, "Camera %d found: %s\n", devIdx, cameraName); + } + filename = cameraName; + } else if(pAV->verbose) { + fprintf(stderr, "Camera %d not found\n", devIdx); + } + } + + const char *sizeS = NULL != jSizeS ? (*env)->GetStringUTFChars(env, jSizeS, &iscopy) : NULL; + int hasSize = 0; + if( NULL != sizeS ) { + snprintf(buffer, sizeof(buffer), "%s", sizeS); + (*env)->ReleaseStringChars(env, jSizeS, (const jchar *)sizeS); + hasSize = 1; + } else if( vWidth > 0 && vHeight > 0 ) { + snprintf(buffer, sizeof(buffer), "%dx%d", vWidth, vHeight); + hasSize = 1; + } + if( hasSize ) { + if(pAV->verbose) { + fprintf(stderr, "Camera: Size: %s\n", buffer); + } + sp_av_dict_set(&inOpts, "video_size", buffer, 0); + } + if( vRate > 0 ) { + snprintf(buffer, sizeof(buffer), "%d", vRate); + if(pAV->verbose) { + fprintf(stderr, "Camera: FPS: %s\n", buffer); + } + sp_av_dict_set(&inOpts, "framerate", buffer, 0); + } + } + + MY_MUTEX_LOCK(env, mutex_avcodec_openclose); + { + res = sp_avformat_open_input(&pAV->pFormatCtx, filename, inFmt, NULL != inOpts ? &inOpts : NULL); + if( NULL != inOpts ) { + sp_av_dict_free(&inOpts); + } + if(res != 0) { + MY_MUTEX_UNLOCK(env, mutex_avcodec_openclose); + JoglCommon_throwNewRuntimeException(env, "Couldn't open URI: %s [%dx%d @ %d hz], err %d", filename, vWidth, vHeight, vRate, res); + (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); + return; + } + + // Retrieve detailed stream information + if(sp_avformat_find_stream_info(pAV->pFormatCtx, NULL)<0) { + MY_MUTEX_UNLOCK(env, mutex_avcodec_openclose); + (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); + JoglCommon_throwNewRuntimeException(env, "Couldn't find stream information"); + return; + } + } + MY_MUTEX_UNLOCK(env, mutex_avcodec_openclose); + + if(pAV->verbose) { + // Dump information about file onto standard error + sp_av_dump_format(pAV->pFormatCtx, 0, filename, JNI_FALSE); + } + (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); + + // FIXME: Libav Binary compatibility! JAU01 + if (pAV->pFormatCtx->duration != AV_NOPTS_VALUE) { + pAV->duration = pAV->pFormatCtx->duration / AV_TIME_BASE_MSEC; + } + if (pAV->pFormatCtx->start_time != AV_NOPTS_VALUE) { + pAV->start_time = pAV->pFormatCtx->start_time / AV_TIME_BASE_MSEC; + } + if (pAV->pFormatCtx->bit_rate) { + pAV->bps_stream = pAV->pFormatCtx->bit_rate; + } + + if(pAV->verbose) { + fprintf(stderr, "Streams: %d, req vid %d aid %d\n", pAV->pFormatCtx->nb_streams, vid, aid); + } + + // Find the first audio and video stream, or the one matching vid + // FIXME: Libav Binary compatibility! JAU01 + for(i=0; ( AV_STREAM_ID_AUTO==pAV->aid || AV_STREAM_ID_AUTO==pAV->vid ) && i<pAV->pFormatCtx->nb_streams; i++) { + AVStream *st = pAV->pFormatCtx->streams[i]; + if(pAV->verbose) { + fprintf(stderr, "Stream: %d: is-video %d, is-audio %d\n", i, (AVMEDIA_TYPE_VIDEO == st->codec->codec_type), AVMEDIA_TYPE_AUDIO == st->codec->codec_type); + } + if(AVMEDIA_TYPE_VIDEO == st->codec->codec_type) { + if(AV_STREAM_ID_AUTO==pAV->vid && (AV_STREAM_ID_AUTO==vid || vid == i) ) { + pAV->pVStream = st; + pAV->vid=i; + } + } else if(AVMEDIA_TYPE_AUDIO == st->codec->codec_type) { + if(AV_STREAM_ID_AUTO==pAV->aid && (AV_STREAM_ID_AUTO==aid || aid == i) ) { + pAV->pAStream = st; + pAV->aid=i; + } + } + } + if( AV_STREAM_ID_AUTO == pAV->aid ) { + pAV->aid = AV_STREAM_ID_NONE; + } + if( AV_STREAM_ID_AUTO == pAV->vid ) { + pAV->vid = AV_STREAM_ID_NONE; + } + + if( pAV->verbose ) { + fprintf(stderr, "Found vid %d, aid %d\n", pAV->vid, pAV->aid); + } + + if(0<=pAV->aid) { + AVFrame * pAFrame0 = sp_avcodec_alloc_frame(); + if( NULL == pAFrame0 ) { + JoglCommon_throwNewRuntimeException(env, "Couldn't alloc 1st audio frame\n"); + return; + } + + // Get a pointer to the codec context for the audio stream + // FIXME: Libav Binary compatibility! JAU01 + pAV->pACodecCtx=pAV->pAStream->codec; + + // FIXME: Libav Binary compatibility! JAU01 + if (pAV->pACodecCtx->bit_rate) { + pAV->bps_audio = pAV->pACodecCtx->bit_rate; + } + + // Customize .. + // pAV->pACodecCtx->thread_count=2; + // pAV->pACodecCtx->thread_type=FF_THREAD_FRAME|FF_THREAD_SLICE; // Decode more than one frame at once + pAV->pACodecCtx->thread_count=0; + pAV->pACodecCtx->thread_type=0; + pAV->pACodecCtx->workaround_bugs=FF_BUG_AUTODETECT; + pAV->pACodecCtx->skip_frame=AVDISCARD_DEFAULT; + + // Note: OpenAL well supports n-channel by now (SOFT), + // however - AFAIK AV_SAMPLE_FMT_S16 would allow no conversion! + pAV->pACodecCtx->request_sample_fmt=AV_SAMPLE_FMT_S16; + if( 1 <= aMaxChannelCount && aMaxChannelCount <= 2 ) { + pAV->pACodecCtx->request_channel_layout=getDefaultAudioChannelLayout(aMaxChannelCount); + #if LIBAVCODEC_VERSION_MAJOR < 54 + /** Until 55.0.0, but stopped working w/ 54 already :( */ + pAV->pACodecCtx->request_channels=aMaxChannelCount; + #endif + } + pAV->pACodecCtx->skip_frame=AVDISCARD_DEFAULT; + + sp_avcodec_string(pAV->acodec, sizeof(pAV->acodec), pAV->pACodecCtx, 0); + + // Find the decoder for the audio stream + pAV->pACodec=sp_avcodec_find_decoder(pAV->pACodecCtx->codec_id); + if(pAV->pACodec==NULL) { + JoglCommon_throwNewRuntimeException(env, "Couldn't find audio codec %d, %s", pAV->pACodecCtx->codec_id, pAV->acodec); + return; + } + + // Open codec + MY_MUTEX_LOCK(env, mutex_avcodec_openclose); + { + #if LIBAVCODEC_VERSION_MAJOR >= 55 + pAV->pACodecCtx->refcounted_frames = pAV->useRefCountedFrames; + #endif + res = sp_avcodec_open2(pAV->pACodecCtx, pAV->pACodec, NULL); + } + MY_MUTEX_UNLOCK(env, mutex_avcodec_openclose); + if(res<0) { + JoglCommon_throwNewRuntimeException(env, "Couldn't open audio codec %d, %s", pAV->pACodecCtx->codec_id, pAV->acodec); + return; + } + if (!pAV->pACodecCtx->channel_layout) { + pAV->pACodecCtx->channel_layout = getDefaultAudioChannelLayout(pAV->pACodecCtx->channels); + } + if (!pAV->pACodecCtx->channel_layout) { + JoglCommon_throwNewRuntimeException(env, "Couldn't determine channel layout of %d channels\n", pAV->pACodecCtx->channels); + return; + } + pAV->aSampleRate = pAV->pACodecCtx->sample_rate; + pAV->aChannels = pAV->pACodecCtx->channels; + pAV->aFrameSize = pAV->pACodecCtx->frame_size; // in samples per channel! + pAV->aSampleFmt = pAV->pACodecCtx->sample_fmt; + pAV->frames_audio = pAV->pAStream->nb_frames; + pAV->aSinkSupport = _isAudioFormatSupported(env, pAV->ffmpegMediaPlayer, pAV->aSampleFmt, pAV->aSampleRate, pAV->aChannels); + if( pAV->verbose ) { + fprintf(stderr, "A channels %d [l %"PRId64"], sample_rate %d, frame_size %d, frame_number %d, [afps %f, cfps %f, sfps %f], nb_frames %"PRId64", [maxChan %d, prefRate %d, req_chan_layout %"PRId64", req_chan %d], sink-support %d \n", + pAV->aChannels, pAV->pACodecCtx->channel_layout, pAV->aSampleRate, pAV->aFrameSize, pAV->pACodecCtx->frame_number, + my_av_q2f(pAV->pAStream->avg_frame_rate), + my_av_q2f_r(pAV->pAStream->codec->time_base), + my_av_q2f_r(pAV->pAStream->time_base), + pAV->pAStream->nb_frames, + aMaxChannelCount, aPrefSampleRate, pAV->pACodecCtx->request_channel_layout, + #if LIBAVCODEC_VERSION_MAJOR < 54 + pAV->pACodecCtx->request_channels, + #else + 0, + #endif + pAV->aSinkSupport); + } + + // default + pAV->aSampleFmtOut = pAV->aSampleFmt; + pAV->aChannelsOut = pAV->aChannels; + pAV->aSampleRateOut = pAV->aSampleRate; + + if( ( AV_HAS_API_AVRESAMPLE(pAV) || AV_HAS_API_SWRESAMPLE(pAV) ) && + ( pAV->aSampleFmt != AV_SAMPLE_FMT_S16 || + ( 0 != aPrefSampleRate && pAV->aSampleRate != aPrefSampleRate ) || + !pAV->aSinkSupport ) ) { + + if( 0 == aPrefSampleRate ) { + aPrefSampleRate = pAV->aSampleRate; + } + int32_t aSinkSupport = 0; + enum AVSampleFormat aSampleFmtOut = AV_SAMPLE_FMT_S16; + int32_t aChannelsOut; + int32_t aSampleRateOut; + int32_t minChannelCount = MIN_INT(aMaxChannelCount,pAV->pACodecCtx->channels); + + if( _isAudioFormatSupported(env, pAV->ffmpegMediaPlayer, aSampleFmtOut, aPrefSampleRate, pAV->pACodecCtx->channels) ) { + aChannelsOut = pAV->pACodecCtx->channels; + aSampleRateOut = aPrefSampleRate; + aSinkSupport = 1; + } else if( _isAudioFormatSupported(env, pAV->ffmpegMediaPlayer, aSampleFmtOut, aPrefSampleRate, minChannelCount) ) { + aChannelsOut = minChannelCount; + aSampleRateOut = aPrefSampleRate; + aSinkSupport = 1; + } + + if( aSinkSupport ) { + if( AV_HAS_API_AVRESAMPLE(pAV) ) { + pAV->avResampleCtx = sp_avresample_alloc_context(); + sp_av_opt_set_int(pAV->avResampleCtx, "in_channel_layout", pAV->pACodecCtx->channel_layout, 0); + sp_av_opt_set_int(pAV->avResampleCtx, "out_channel_layout", getDefaultAudioChannelLayout(aChannelsOut), 0); + sp_av_opt_set_int(pAV->avResampleCtx, "in_sample_rate", pAV->aSampleRate, 0); + sp_av_opt_set_int(pAV->avResampleCtx, "out_sample_rate", aSampleRateOut, 0); + sp_av_opt_set_int(pAV->avResampleCtx, "in_sample_fmt", pAV->aSampleFmt, 0); + sp_av_opt_set_int(pAV->avResampleCtx, "out_sample_fmt", aSampleFmtOut, 0); + + if ( sp_avresample_open(pAV->avResampleCtx) < 0 ) { + sp_avresample_free(&pAV->avResampleCtx); + pAV->avResampleCtx = NULL; + fprintf(stderr, "error initializing avresample ctx\n"); + } else { + // OK + pAV->aSampleFmtOut = aSampleFmtOut; + pAV->aChannelsOut = aChannelsOut; + pAV->aSampleRateOut = aSampleRateOut; + pAV->aSinkSupport = 1; + } + } else if( AV_HAS_API_SWRESAMPLE(pAV) ) { + pAV->swResampleCtx = sp_swr_alloc(); + sp_av_opt_set_int(pAV->swResampleCtx, "in_channel_layout", pAV->pACodecCtx->channel_layout, 0); + sp_av_opt_set_int(pAV->swResampleCtx, "out_channel_layout", getDefaultAudioChannelLayout(aChannelsOut), 0); + sp_av_opt_set_int(pAV->swResampleCtx, "in_sample_rate", pAV->aSampleRate, 0); + sp_av_opt_set_int(pAV->swResampleCtx, "out_sample_rate", aSampleRateOut, 0); + sp_av_opt_set_sample_fmt(pAV->swResampleCtx, "in_sample_fmt", pAV->aSampleFmt, 0); + sp_av_opt_set_sample_fmt(pAV->swResampleCtx, "out_sample_fmt", aSampleFmtOut, 0); + + if ( sp_swr_init(pAV->swResampleCtx) < 0 ) { + sp_swr_free(&pAV->swResampleCtx); + pAV->swResampleCtx = NULL; + fprintf(stderr, "error initializing swresample ctx\n"); + } else { + // OK + pAV->aSampleFmtOut = aSampleFmtOut; + pAV->aChannelsOut = aChannelsOut; + pAV->aSampleRateOut = aSampleRateOut; + pAV->aSinkSupport = 1; + } + } + } + } + + // Allocate audio frames + // FIXME: Libav Binary compatibility! JAU01 + pAV->aFrameCount = 1; + pAV->pANIOBuffers = calloc(pAV->aFrameCount, sizeof(NIOBuffer_t)); + pAV->pAFrames = calloc(pAV->aFrameCount, sizeof(AVFrame*)); + pAV->pAFrames[0] = pAFrame0; + for(i=1; i<pAV->aFrameCount; i++) { + pAV->pAFrames[i] = sp_avcodec_alloc_frame(); + if( NULL == pAV->pAFrames[i] ) { + JoglCommon_throwNewRuntimeException(env, "Couldn't alloc audio frame %d / %d", i, pAV->aFrameCount); + return; + } + } + pAV->aFrameCurrent = 0; + } + + if(0<=pAV->vid) { + // Get a pointer to the codec context for the video stream + // FIXME: Libav Binary compatibility! JAU01 + pAV->pVCodecCtx=pAV->pVStream->codec; + #if 0 + pAV->pVCodecCtx->get_format = my_get_format; + #endif + + if (pAV->pVCodecCtx->bit_rate) { + // FIXME: Libav Binary compatibility! JAU01 + pAV->bps_video = pAV->pVCodecCtx->bit_rate; + } + + // Customize .. + // pAV->pVCodecCtx->thread_count=2; + // pAV->pVCodecCtx->thread_type=FF_THREAD_FRAME|FF_THREAD_SLICE; // Decode more than one frame at once + pAV->pVCodecCtx->thread_count=0; + pAV->pVCodecCtx->thread_type=0; + pAV->pVCodecCtx->workaround_bugs=FF_BUG_AUTODETECT; + pAV->pVCodecCtx->skip_frame=AVDISCARD_DEFAULT; + + sp_avcodec_string(pAV->vcodec, sizeof(pAV->vcodec), pAV->pVCodecCtx, 0); + + // Find the decoder for the video stream + pAV->pVCodec=sp_avcodec_find_decoder(pAV->pVCodecCtx->codec_id); + if(pAV->pVCodec==NULL) { + JoglCommon_throwNewRuntimeException(env, "Couldn't find video codec %d, %s", pAV->pVCodecCtx->codec_id, pAV->vcodec); + return; + } + + // Open codec + MY_MUTEX_LOCK(env, mutex_avcodec_openclose); + { + #if LIBAVCODEC_VERSION_MAJOR >= 55 + pAV->pVCodecCtx->refcounted_frames = pAV->useRefCountedFrames; + #endif + res = sp_avcodec_open2(pAV->pVCodecCtx, pAV->pVCodec, NULL); + } + MY_MUTEX_UNLOCK(env, mutex_avcodec_openclose); + if(res<0) { + JoglCommon_throwNewRuntimeException(env, "Couldn't open video codec %d, %s", pAV->pVCodecCtx->codec_id, pAV->vcodec); + return; + } + + // Hack to correct wrong frame rates that seem to be generated by some codecs + // FIXME: Libav Binary compatibility! JAU01 + if(pAV->pVCodecCtx->time_base.num>1000 && pAV->pVCodecCtx->time_base.den==1) { + pAV->pVCodecCtx->time_base.den=1000; + } + // FIXME: Libav Binary compatibility! JAU01 + if( pAV->pVStream->avg_frame_rate.den && pAV->pVStream->avg_frame_rate.num ) { + pAV->fps = my_av_q2f(pAV->pVStream->avg_frame_rate); + #if LIBAVCODEC_VERSION_MAJOR < 55 + } else if( pAV->pVStream->r_frame_rate.den && pAV->pVStream->r_frame_rate.num ) { + pAV->fps = my_av_q2f(pAV->pVStream->r_frame_rate); + #endif + } else if( pAV->pVStream->codec->time_base.den && pAV->pVStream->codec->time_base.num ) { + pAV->fps = my_av_q2f_r(pAV->pVStream->codec->time_base); + } else if( pAV->pVStream->time_base.den && pAV->pVStream->time_base.num ) { + pAV->fps = my_av_q2f_r(pAV->pVStream->time_base); + } else { + pAV->fps = 0.0f; // duh! + } + pAV->frames_video = pAV->pVStream->nb_frames; + + // Allocate video frame + // FIXME: Libav Binary compatibility! JAU01 + pAV->vWidth = pAV->pVCodecCtx->width; + pAV->vHeight = pAV->pVCodecCtx->height; + pAV->vPixFmt = pAV->pVCodecCtx->pix_fmt; + pAV->vFlipped = JNI_FALSE; + { + AVPixFmtDescriptor pixDesc = sp_av_pix_fmt_descriptors[pAV->vPixFmt]; + pAV->vBitsPerPixel = sp_av_get_bits_per_pixel(&pixDesc); + pAV->vBufferPlanes = my_getPlaneCount(&pixDesc); + } + + if( pAV->verbose ) { + fprintf(stderr, "V frame_size %d, frame_number %d, [afps %f, rfps %f, cfps %f, sfps %f] -> %f fps, nb_frames %"PRId64", size %dx%d, fmt 0x%X, bpp %d, planes %d, codecCaps 0x%X\n", + pAV->pVCodecCtx->frame_size, pAV->pVCodecCtx->frame_number, + my_av_q2f(pAV->pVStream->avg_frame_rate), + #if LIBAVCODEC_VERSION_MAJOR < 55 + my_av_q2f(pAV->pVStream->r_frame_rate), + #else + 0.0f, + #endif + my_av_q2f_r(pAV->pVStream->codec->time_base), + my_av_q2f_r(pAV->pVStream->time_base), + pAV->fps, + pAV->pVStream->nb_frames, + pAV->vWidth, pAV->vHeight, pAV->vPixFmt, pAV->vBitsPerPixel, pAV->vBufferPlanes, pAV->pVCodecCtx->codec->capabilities); + } + + pAV->pVFrame=sp_avcodec_alloc_frame(); + if( pAV->pVFrame == NULL ) { + JoglCommon_throwNewRuntimeException(env, "Couldn't alloc video frame"); + return; + } + // Min. requirement for 'get_buffer2' ! + pAV->pVFrame->width = pAV->pVCodecCtx->width; + pAV->pVFrame->height = pAV->pVCodecCtx->height; + pAV->pVFrame->format = pAV->pVCodecCtx->pix_fmt; + #if LIBAVCODEC_VERSION_MAJOR >= 55 + res = sp_avcodec_default_get_buffer2(pAV->pVCodecCtx, pAV->pVFrame, 0); + #else + res = sp_avcodec_default_get_buffer(pAV->pVCodecCtx, pAV->pVFrame); + #endif + if(0!=res) { + JoglCommon_throwNewRuntimeException(env, "Couldn't peek video buffer dimension"); + return; + } + { + const int32_t bytesPerPixel = ( pAV->vBitsPerPixel + 7 ) / 8 ; + if(1 == pAV->vBufferPlanes) { + pAV->vBytesPerPixelPerPlane = bytesPerPixel; + } else { + pAV->vBytesPerPixelPerPlane = 1; + } + int32_t vLinesize[4]; + if( pAV->vBufferPlanes > 1 ) { + #if 0 + getAlignedLinesizes(pAV->pVCodecCtx, vLinesize); + for(i=0; i<pAV->vBufferPlanes; i++) { + // FIXME: Libav Binary compatibility! JAU01 + pAV->vTexWidth[i] = vLinesize[i] / pAV->vBytesPerPixelPerPlane ; + } + #else + for(i=0; i<pAV->vBufferPlanes; i++) { + // FIXME: Libav Binary compatibility! JAU01 + vLinesize[i] = pAV->pVFrame->linesize[i]; + pAV->vTexWidth[i] = vLinesize[i] / pAV->vBytesPerPixelPerPlane ; + } + #endif + } else { + vLinesize[0] = pAV->pVFrame->linesize[0]; + if( pAV->vPixFmt == PIX_FMT_YUYV422 ) { + // Stuff 2x 16bpp (YUYV) into one RGBA pixel! + pAV->vTexWidth[0] = pAV->pVCodecCtx->width / 2; + } else { + pAV->vTexWidth[0] = pAV->pVCodecCtx->width; + } + } + if( pAV->verbose ) { + for(i=0; i<pAV->vBufferPlanes; i++) { + fprintf(stderr, "Video: P[%d]: %d texw * %d bytesPP -> %d line\n", i, pAV->vTexWidth[i], pAV->vBytesPerPixelPerPlane, vLinesize[i]); + } + } + } + #if LIBAVCODEC_VERSION_MAJOR >= 55 + sp_av_frame_unref(pAV->pVFrame); + #else + sp_avcodec_default_release_buffer(pAV->pVCodecCtx, pAV->pVFrame); + #endif + } + pAV->vPTS=0; + pAV->aPTS=0; + initPTSStats(&pAV->vPTSStats); + initPTSStats(&pAV->aPTSStats); + _updateJavaAttributes(env, pAV); +} + +JNIEXPORT void JNICALL FF_FUNC(setGLFuncs0) + (JNIEnv *env, jobject instance, jlong ptr, jlong jProcAddrGLTexSubImage2D, jlong jProcAddrGLGetError, jlong jProcAddrGLFlush, jlong jProcAddrGLFinish) +{ + FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); + pAV->procAddrGLTexSubImage2D = (PFNGLTEXSUBIMAGE2DPROC) (intptr_t)jProcAddrGLTexSubImage2D; + pAV->procAddrGLGetError = (PFNGLGETERRORPROC) (intptr_t)jProcAddrGLGetError; + pAV->procAddrGLFlush = (PFNGLFLUSH) (intptr_t)jProcAddrGLFlush; + pAV->procAddrGLFinish = (PFNGLFINISH) (intptr_t)jProcAddrGLFinish; +} + +#if 0 +#define DBG_TEXSUBIMG2D_a(c,p,w1,w2,h,i) fprintf(stderr, "TexSubImage2D.%c offset %d / %d, size %d x %d, ", c, (w1*p->pVCodecCtx->width)/w2, p->pVCodecCtx->height/h, p->vTexWidth[i], p->pVCodecCtx->height/h) +#define DBG_TEXSUBIMG2D_b(p) fprintf(stderr, "err 0x%X\n", pAV->procAddrGLGetError()) +#else +#define DBG_TEXSUBIMG2D_a(c,p,w1,w2,h,i) +#define DBG_TEXSUBIMG2D_b(p) +#endif + +JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) + (JNIEnv *env, jobject instance, jlong ptr, jint texTarget, jint texFmt, jint texType) +{ + FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); + + AVPacket packet; + jint resPTS = INVALID_PTS; + uint8_t * pkt_odata; + int pkt_osize; + + packet.data = NULL; // minimum + packet.size = 0; // requirement + sp_av_init_packet(&packet); + + const int avRes = sp_av_read_frame(pAV->pFormatCtx, &packet); + pkt_odata = packet.data; + pkt_osize = packet.size; + if( AVERROR_EOF == avRes || ( pAV->pFormatCtx->pb && pAV->pFormatCtx->pb->eof_reached ) ) { + if( pAV->verbose ) { + fprintf(stderr, "EOS: avRes[res %d, eos %d], pb-EOS %d\n", + avRes, AVERROR_EOF == avRes, + ( pAV->pFormatCtx->pb && pAV->pFormatCtx->pb->eof_reached ) ); + } + resPTS = END_OF_STREAM_PTS; + } else if( 0 <= avRes ) { + if( pAV->verbose ) { + fprintf(stderr, "P: ptr %p, size %d\n", packet.data, packet.size); + } + if(packet.stream_index==pAV->aid) { + // Decode audio frame + if(NULL == pAV->pAFrames) { // no audio registered + sp_av_free_packet(&packet); + return INVALID_PTS; + } + int frameCount; + int flush_complete = 0; + for ( frameCount=0; 0 < packet.size || 0 == frameCount; frameCount++ ) { + int frameDecoded; + int len1; + AVFrame* pAFrameCurrent = pAV->pAFrames[pAV->aFrameCurrent]; + if( pAV->useRefCountedFrames ) { + sp_av_frame_unref(pAFrameCurrent); + pAV->aFrameCurrent = ( pAV->aFrameCurrent + 1 ) % pAV->aFrameCount ; + } + sp_avcodec_get_frame_defaults(pAFrameCurrent); + + if (flush_complete) { + break; + } + len1 = sp_avcodec_decode_audio4(pAV->pACodecCtx, pAFrameCurrent, &frameDecoded, &packet); + if (len1 < 0) { + // if error, we skip the frame + packet.size = 0; + break; + } + packet.data += len1; + packet.size -= len1; + + if (!frameDecoded) { + // stop sending empty packets if the decoder is finished + if (!packet.data && pAV->pACodecCtx->codec->capabilities & CODEC_CAP_DELAY) { + flush_complete = 1; + } + continue; + } + + int32_t data_size = 0; + if(HAS_FUNC(sp_av_samples_get_buffer_size)) { + data_size = sp_av_samples_get_buffer_size(NULL /* linesize, may be NULL */, + pAV->aChannels, + pAFrameCurrent->nb_samples, + pAFrameCurrent->format, + 1 /* align */); + } + #if 0 + fprintf(stderr, "channels %d sample_rate %d \n", pAV->aChannels , pAV->aSampleRate); + fprintf(stderr, "data %d \n", pAV->aFrameSize); + #endif + + const AVRational time_base = pAV->pAStream->time_base; + const int64_t pkt_pts = pAFrameCurrent->pkt_pts; + if( 0 == frameCount && AV_NOPTS_VALUE != pkt_pts ) { // 1st frame only, discard invalid PTS .. + pAV->aPTS = my_av_q2i32( pkt_pts * 1000, time_base); + } else { // subsequent frames or invalid PTS .. + const int32_t bytesPerSample = sp_av_get_bytes_per_sample( pAV->pACodecCtx->sample_fmt ); + pAV->aPTS += data_size / ( pAV->aChannels * bytesPerSample * ( pAV->aSampleRate / 1000 ) ); + } + if( pAV->verbose ) { + int32_t aDTS = my_av_q2i32( pAFrameCurrent->pkt_dts * 1000, time_base); + + fprintf(stderr, "A pts %d [pkt_pts %"PRId64"], dts %d [pkt_dts %"PRId64"], f# %d, aFrame %d/%d %p, dataPtr %p, dataSize %d\n", + pAV->aPTS, pkt_pts, aDTS, pAFrameCurrent->pkt_dts, frameCount, + pAV->aFrameCurrent, pAV->aFrameCount, pAFrameCurrent, pAFrameCurrent->data[0], data_size); + } + if( NULL != env ) { + void* data_ptr = pAFrameCurrent->data[0]; // default + + if( NULL != pAV->avResampleCtx || NULL != pAV->swResampleCtx ) { + uint8_t *tmp_out; + int out_samples=-1, out_size, out_linesize; + int osize = sp_av_get_bytes_per_sample( pAV->aSampleFmtOut ); + int nb_samples = pAFrameCurrent->nb_samples; + + out_size = sp_av_samples_get_buffer_size(&out_linesize, + pAV->aChannelsOut, + nb_samples, + pAV->aSampleFmtOut, 0 /* align */); + + tmp_out = sp_av_realloc(pAV->aResampleBuffer, out_size); + if (!tmp_out) { + JoglCommon_throwNewRuntimeException(env, "Couldn't alloc resample buffer of size %d", out_size); + return INVALID_PTS; + } + pAV->aResampleBuffer = tmp_out; + + if( NULL != pAV->avResampleCtx ) { + out_samples = sp_avresample_convert(pAV->avResampleCtx, + &pAV->aResampleBuffer, + out_linesize, nb_samples, + pAFrameCurrent->data, + pAFrameCurrent->linesize[0], + pAFrameCurrent->nb_samples); + } else if( NULL != pAV->swResampleCtx ) { + out_samples = sp_swr_convert(pAV->swResampleCtx, + &pAV->aResampleBuffer, nb_samples, + (const uint8_t **)pAFrameCurrent->data, pAFrameCurrent->nb_samples); + } + if (out_samples < 0) { + JoglCommon_throwNewRuntimeException(env, "avresample_convert() failed"); + return INVALID_PTS; + } + data_size = out_samples * osize * pAV->aChannelsOut; + data_ptr = tmp_out; + } + NIOBuffer_t * pNIOBufferCurrent = &pAV->pANIOBuffers[pAV->aFrameCurrent]; + int newNIO = NULL == pNIOBufferCurrent->nioRef; + if( !newNIO && ( data_ptr != pNIOBufferCurrent->origPtr || data_size > pNIOBufferCurrent->size ) ) { + if(pAV->verbose) { + fprintf(stderr, "A NIO: Free.0 ptr %p / ref %p, %d bytes\n", + pNIOBufferCurrent->origPtr, pNIOBufferCurrent->nioRef, pNIOBufferCurrent->size); + } + (*env)->DeleteGlobalRef(env, pNIOBufferCurrent->nioRef); + newNIO = 1; + } + if( newNIO ) { + jobject jSampleData = (*env)->NewDirectByteBuffer(env, data_ptr, data_size); + pNIOBufferCurrent->nioRef = (*env)->NewGlobalRef(env, jSampleData); + pNIOBufferCurrent->origPtr = data_ptr; + pNIOBufferCurrent->size = data_size; + (*env)->DeleteLocalRef(env, jSampleData); + if(pAV->verbose) { + fprintf(stderr, "A NIO: Alloc ptr %p / ref %p, %d bytes\n", + pNIOBufferCurrent->origPtr, pNIOBufferCurrent->nioRef, pNIOBufferCurrent->size); + } + } + (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_pushSound, pNIOBufferCurrent->nioRef, data_size, pAV->aPTS); + } + } + } else if(packet.stream_index==pAV->vid) { + // Decode video frame + if(NULL == pAV->pVFrame) { + sp_av_free_packet(&packet); + return INVALID_PTS; + } + int frameCount; + int flush_complete = 0; + for ( frameCount=0; 0 < packet.size || 0 == frameCount; frameCount++ ) { + int frameDecoded; + int len1; + sp_avcodec_get_frame_defaults(pAV->pVFrame); + if (flush_complete) { + break; + } + len1 = sp_avcodec_decode_video2(pAV->pVCodecCtx, pAV->pVFrame, &frameDecoded, &packet); + if (len1 < 0) { + // if error, we skip the frame + packet.size = 0; + break; + } + packet.data += len1; + packet.size -= len1; + + if (!frameDecoded) { + // stop sending empty packets if the decoder is finished + if (!packet.data && pAV->pVCodecCtx->codec->capabilities & CODEC_CAP_DELAY) { + flush_complete = 1; + } + continue; + } + + // FIXME: Libav Binary compatibility! JAU01 + const AVRational time_base = pAV->pVStream->time_base; + const int64_t pkt_pts = pAV->pVFrame->pkt_pts; + const int64_t pkt_dts = pAV->pVFrame->pkt_dts; + const int64_t fix_pts = evalPTS(&pAV->vPTSStats, pkt_pts, pkt_dts); + if( AV_NOPTS_VALUE != fix_pts ) { // discard invalid PTS .. + pAV->vPTS = my_av_q2i32( fix_pts * 1000, time_base); + } + if( pAV->verbose ) { + const int32_t vPTS = AV_NOPTS_VALUE != pkt_pts ? my_av_q2i32( pkt_pts * 1000, time_base) : 0; + const int32_t vDTS = AV_NOPTS_VALUE != pkt_dts ? my_av_q2i32( pkt_dts * 1000, time_base) : 0; + + const double frame_delay_d = av_q2d(pAV->pVCodecCtx->time_base); + const double frame_repeat_d = pAV->pVFrame->repeat_pict * (frame_delay_d * 0.5); + + const int32_t frame_delay_i = my_av_q2i32(1000, pAV->pVCodecCtx->time_base); + const int32_t frame_repeat_i = pAV->pVFrame->repeat_pict * (frame_delay_i / 2); + + const char * warn = frame_repeat_i > 0 ? "REPEAT" : "NORMAL" ; + + fprintf(stderr, "V fix_pts %d, pts %d [pkt_pts %"PRId64"], dts %d [pkt_dts %"PRId64"], time d(%lf s + r %lf = %lf s), i(%d ms + r %d = %d ms) - %s - f# %d, dec %d, data %p, lsz %d\n", + pAV->vPTS, vPTS, pkt_pts, vDTS, pkt_dts, + frame_delay_d, frame_repeat_d, (frame_delay_d + frame_repeat_d), + frame_delay_i, frame_repeat_i, (frame_delay_i + frame_repeat_i), warn, frameCount, + len1, pAV->pVFrame->data[0], pAV->pVFrame->linesize[0]); + // fflush(NULL); + } + if( 0 == pAV->pVFrame->linesize[0] ) { + if( pAV->useRefCountedFrames ) { + sp_av_frame_unref(pAV->pVFrame); + } + continue; + } + resPTS = pAV->vPTS; // Video Frame! + + int p_offset[] = { 0, 0, 0, 0 }; + if( pAV->pVFrame->linesize[0] < 0 ) { + if( JNI_FALSE == pAV->vFlipped ) { + pAV->vFlipped = JNI_TRUE; + _setIsGLOriented(env, pAV); + } + + // image bottom-up + int h_1 = pAV->pVCodecCtx->height - 1; + p_offset[0] = pAV->pVFrame->linesize[0] * h_1; + if( pAV->vBufferPlanes > 1 ) { + p_offset[1] = pAV->pVFrame->linesize[1] * h_1; + } + if( pAV->vBufferPlanes > 2 ) { + p_offset[2] = pAV->pVFrame->linesize[2] * h_1; + } + /** + if( pAV->vBufferPlanes > 3 ) { + p_offset[3] = pAV->pVFrame->linesize[3] * h_1; + } */ + } else if( JNI_TRUE == pAV->vFlipped ) { + pAV->vFlipped = JNI_FALSE; + _setIsGLOriented(env, pAV); + } + + // 1st plane or complete packed frame + // FIXME: Libav Binary compatibility! JAU01 + DBG_TEXSUBIMG2D_a('Y',pAV,1,1,1,0); + pAV->procAddrGLTexSubImage2D(texTarget, 0, + 0, 0, + pAV->vTexWidth[0], pAV->pVCodecCtx->height, + texFmt, texType, pAV->pVFrame->data[0] + p_offset[0]); + DBG_TEXSUBIMG2D_b(pAV); + + if( pAV->vPixFmt == PIX_FMT_YUV420P || pAV->vPixFmt == PIX_FMT_YUVJ420P ) { + // U plane + // FIXME: Libav Binary compatibility! JAU01 + DBG_TEXSUBIMG2D_a('U',pAV,1,1,2,1); + pAV->procAddrGLTexSubImage2D(texTarget, 0, + pAV->pVCodecCtx->width, 0, + pAV->vTexWidth[1], pAV->pVCodecCtx->height/2, + texFmt, texType, pAV->pVFrame->data[1] + p_offset[1]); + DBG_TEXSUBIMG2D_b(pAV); + // V plane + // FIXME: Libav Binary compatibility! JAU01 + DBG_TEXSUBIMG2D_a('V',pAV,1,1,2,2); + pAV->procAddrGLTexSubImage2D(texTarget, 0, + pAV->pVCodecCtx->width, pAV->pVCodecCtx->height/2, + pAV->vTexWidth[2], pAV->pVCodecCtx->height/2, + texFmt, texType, pAV->pVFrame->data[2] + p_offset[2]); + DBG_TEXSUBIMG2D_b(pAV); + } else if( pAV->vPixFmt == PIX_FMT_YUV422P || pAV->vPixFmt == PIX_FMT_YUVJ422P ) { + // U plane + // FIXME: Libav Binary compatibility! JAU01 + DBG_TEXSUBIMG2D_a('U',pAV,1,1,1,1); + pAV->procAddrGLTexSubImage2D(texTarget, 0, + pAV->pVCodecCtx->width, 0, + pAV->vTexWidth[1], pAV->pVCodecCtx->height, + texFmt, texType, pAV->pVFrame->data[1] + p_offset[1]); + DBG_TEXSUBIMG2D_b(pAV); + // V plane + // FIXME: Libav Binary compatibility! JAU01 + DBG_TEXSUBIMG2D_a('V',pAV,3,2,1,1); + pAV->procAddrGLTexSubImage2D(texTarget, 0, + pAV->pVCodecCtx->width+pAV->pVCodecCtx->width/2, 0, + pAV->vTexWidth[2], pAV->pVCodecCtx->height, + texFmt, texType, pAV->pVFrame->data[2] + p_offset[2]); + DBG_TEXSUBIMG2D_b(pAV); + } // FIXME: Add more planar formats ! + + pAV->procAddrGLFinish(); + //pAV->procAddrGLFlush(); + if( pAV->useRefCountedFrames ) { + sp_av_frame_unref(pAV->pVFrame); + } + } + } + // restore orig pointer and size values, we may have moved along within packet + packet.data = pkt_odata; + packet.size = pkt_osize; + sp_av_free_packet(&packet); + } + return resPTS; +} + +static void initPTSStats(PTSStats *ptsStats) { + ptsStats->ptsError = 0; + ptsStats->dtsError = 0; + ptsStats->ptsLast = INT64_MIN; + ptsStats->dtsLast = INT64_MIN; +} +static int64_t evalPTS(PTSStats *ptsStats, int64_t inPTS, int64_t inDTS) { + int64_t resPTS = AV_NOPTS_VALUE; + + if ( inDTS != AV_NOPTS_VALUE ) { + ptsStats->dtsError += inDTS <= ptsStats->dtsLast; + ptsStats->dtsLast = inDTS; + } + if ( inPTS != AV_NOPTS_VALUE ) { + ptsStats->ptsError += inPTS <= ptsStats->ptsLast; + ptsStats->ptsLast = inPTS; + } + if ( inPTS != AV_NOPTS_VALUE && + ( ptsStats->ptsError<=ptsStats->dtsError || inDTS == AV_NOPTS_VALUE ) ) { + resPTS = inPTS; + } else { + resPTS = inDTS; + } + return resPTS; +} + + +JNIEXPORT jint JNICALL FF_FUNC(play0) + (JNIEnv *env, jobject instance, jlong ptr) +{ + FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); + return sp_av_read_play(pAV->pFormatCtx); +} +JNIEXPORT jint JNICALL FF_FUNC(pause0) + (JNIEnv *env, jobject instance, jlong ptr) +{ + FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); + return sp_av_read_pause(pAV->pFormatCtx); +} + +JNIEXPORT jint JNICALL FF_FUNC(seek0) + (JNIEnv *env, jobject instance, jlong ptr, jint pos1) +{ + const FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); + int64_t pos0, pts0; + int streamID; + AVRational time_base; + if( pAV->vid >= 0 ) { + pos0 = pAV->vPTS; + streamID = pAV->vid; + time_base = pAV->pVStream->time_base; + pts0 = pAV->pVFrame->pkt_pts; + } else if( pAV->aid >= 0 ) { + pos0 = pAV->aPTS; + streamID = pAV->aid; + time_base = pAV->pAStream->time_base; + pts0 = pAV->pAFrames[pAV->aFrameCurrent]->pkt_pts; + } else { + return pAV->vPTS; + } + int64_t pts1 = (int64_t) (pos1 * (int64_t) time_base.den) + / (1000 * (int64_t) time_base.num); + if(pAV->verbose) { + fprintf(stderr, "SEEK: vid %d, aid %d, pos0 %"PRId64", pos1 %d, pts: %"PRId64" -> %"PRId64"\n", pAV->vid, pAV->aid, pos0, pos1, pts0, pts1); + } + int flags = 0; + if(pos1 < pos0) { + flags |= AVSEEK_FLAG_BACKWARD; + } + int res = -2; + if(HAS_FUNC(sp_av_seek_frame)) { + if(pAV->verbose) { + fprintf(stderr, "SEEK.0: pre : s %"PRId64" / %"PRId64" -> t %d / %"PRId64"\n", pos0, pts0, pos1, pts1); + } + sp_av_seek_frame(pAV->pFormatCtx, streamID, pts1, flags); + } else if(HAS_FUNC(sp_avformat_seek_file)) { + int64_t ptsD = pts1 - pts0; + int64_t seek_min = ptsD > 0 ? pts1 - ptsD : INT64_MIN; + int64_t seek_max = ptsD < 0 ? pts1 - ptsD : INT64_MAX; + if(pAV->verbose) { + fprintf(stderr, "SEEK.1: pre : s %"PRId64" / %"PRId64" -> t %d / %"PRId64" [%"PRId64" .. %"PRId64"]\n", + pos0, pts0, pos1, pts1, seek_min, seek_max); + } + res = sp_avformat_seek_file(pAV->pFormatCtx, -1, seek_min, pts1, seek_max, flags); + } + if(NULL != pAV->pVCodecCtx) { + sp_avcodec_flush_buffers( pAV->pVCodecCtx ); + } + if(NULL != pAV->pACodecCtx) { + sp_avcodec_flush_buffers( pAV->pACodecCtx ); + } + const jint rPTS = my_av_q2i32( ( pAV->vid >= 0 ? pAV->pVFrame->pkt_pts : pAV->pAFrames[pAV->aFrameCurrent]->pkt_pts ) * 1000, time_base); + if(pAV->verbose) { + fprintf(stderr, "SEEK: post : res %d, u %d\n", res, rPTS); + } + return rPTS; +} + +JNIEXPORT jint JNICALL FF_FUNC(getVideoPTS0) + (JNIEnv *env, jobject instance, jlong ptr) +{ + FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); + return pAV->vPTS; +} + +JNIEXPORT jint JNICALL FF_FUNC(getAudioPTS0) + (JNIEnv *env, jobject instance, jlong ptr) +{ + FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); + return pAV->aPTS; +} + diff --git a/src/jogl/native/libav/ffmpeg_lavc53_lavf53_lavu51.c b/src/jogl/native/libav/ffmpeg_lavc53_lavf53_lavu51.c new file mode 100644 index 000000000..edce2ba1d --- /dev/null +++ b/src/jogl/native/libav/ffmpeg_lavc53_lavf53_lavu51.c @@ -0,0 +1,33 @@ +/** + * Copyright 2013 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ + +#include "jogamp_opengl_util_av_impl_FFMPEGv08Natives.h" + +#define FF_FUNC(METHOD) Java_jogamp_opengl_util_av_impl_FFMPEGv08Natives_ ## METHOD + +#include "ffmpeg_impl_template.c" diff --git a/src/jogl/native/libav/ffmpeg_lavc54_lavf54_lavu52_lavr01.c b/src/jogl/native/libav/ffmpeg_lavc54_lavf54_lavu52_lavr01.c new file mode 100644 index 000000000..651a64976 --- /dev/null +++ b/src/jogl/native/libav/ffmpeg_lavc54_lavf54_lavu52_lavr01.c @@ -0,0 +1,33 @@ +/** + * Copyright 2013 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ + +#include "jogamp_opengl_util_av_impl_FFMPEGv09Natives.h" + +#define FF_FUNC(METHOD) Java_jogamp_opengl_util_av_impl_FFMPEGv09Natives_ ## METHOD + +#include "ffmpeg_impl_template.c" diff --git a/src/jogl/native/libav/ffmpeg_lavc55_lavf55_lavu52_lavr01.c b/src/jogl/native/libav/ffmpeg_lavc55_lavf55_lavu52_lavr01.c new file mode 100644 index 000000000..277100398 --- /dev/null +++ b/src/jogl/native/libav/ffmpeg_lavc55_lavf55_lavu52_lavr01.c @@ -0,0 +1,33 @@ +/** + * Copyright 2013 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ + +#include "jogamp_opengl_util_av_impl_FFMPEGv10Natives.h" + +#define FF_FUNC(METHOD) Java_jogamp_opengl_util_av_impl_FFMPEGv10Natives_ ## METHOD + +#include "ffmpeg_impl_template.c" diff --git a/src/jogl/native/libav/ffmpeg_static.c b/src/jogl/native/libav/ffmpeg_static.c new file mode 100644 index 000000000..c8af59540 --- /dev/null +++ b/src/jogl/native/libav/ffmpeg_static.c @@ -0,0 +1,91 @@ +/** + * Copyright 2013 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ + +#include "ffmpeg_static.h" + +#include "JoglCommon.h" + +#include <GL/gl-platform.h> + +static const char * const ClazzNameFFMPEGMediaPlayer = "jogamp/opengl/util/av/impl/FFMPEGMediaPlayer"; + +static jclass ffmpegMediaPlayerClazz = NULL; +jmethodID ffmpeg_jni_mid_pushSound = NULL; +jmethodID ffmpeg_jni_mid_updateAttributes = NULL; +jmethodID ffmpeg_jni_mid_setIsGLOriented = NULL; +jmethodID ffmpeg_jni_mid_setupFFAttributes = NULL; +jmethodID ffmpeg_jni_mid_isAudioFormatSupported = NULL; + +typedef unsigned (APIENTRYP AV_GET_VERSION)(void); + +JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGStaticNatives_initIDs0 + (JNIEnv *env, jclass clazz) +{ + jboolean res = JNI_TRUE; + JoglCommon_init(env); + + jclass c; + if (ffmpegMediaPlayerClazz != NULL) { + return JNI_FALSE; + } + + c = (*env)->FindClass(env, ClazzNameFFMPEGMediaPlayer); + if(NULL==c) { + JoglCommon_FatalError(env, "JOGL FFMPEG: can't find %s", ClazzNameFFMPEGMediaPlayer); + } + ffmpegMediaPlayerClazz = (jclass)(*env)->NewGlobalRef(env, c); + (*env)->DeleteLocalRef(env, c); + if(NULL==ffmpegMediaPlayerClazz) { + JoglCommon_FatalError(env, "JOGL FFMPEG: can't use %s", ClazzNameFFMPEGMediaPlayer); + } + + ffmpeg_jni_mid_pushSound = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "pushSound", "(Ljava/nio/ByteBuffer;II)V"); + ffmpeg_jni_mid_updateAttributes = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes", "(IIIIIIIFIIILjava/lang/String;Ljava/lang/String;)V"); + ffmpeg_jni_mid_setIsGLOriented = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "setIsGLOriented", "(Z)V"); + ffmpeg_jni_mid_setupFFAttributes = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "setupFFAttributes", "(IIIIIIIIIIIIIII)V"); + ffmpeg_jni_mid_isAudioFormatSupported = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "isAudioFormatSupported", "(III)Z"); + + if(ffmpeg_jni_mid_pushSound == NULL || + ffmpeg_jni_mid_updateAttributes == NULL || + ffmpeg_jni_mid_setIsGLOriented == NULL || + ffmpeg_jni_mid_setupFFAttributes == NULL || + ffmpeg_jni_mid_isAudioFormatSupported == NULL) { + return JNI_FALSE; + } + return res; +} + +JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGStaticNatives_getAvVersion0 + (JNIEnv *env, jclass clazz, jlong func) { + if( 0 != func ) { + return (jint) ((AV_GET_VERSION) (intptr_t) func)(); + } else { + return 0; + } +} + diff --git a/src/jogl/native/libav/ffmpeg_static.h b/src/jogl/native/libav/ffmpeg_static.h new file mode 100644 index 000000000..ab4db2506 --- /dev/null +++ b/src/jogl/native/libav/ffmpeg_static.h @@ -0,0 +1,50 @@ +/** + * Copyright 2013 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ + +#ifndef _FFMPEG_STATIC_H +#define _FFMPEG_STATIC_H + +#ifdef _WIN32 + #include <windows.h> +#endif + +#include <gluegen_stdint.h> +#include <gluegen_inttypes.h> +#include <gluegen_stddef.h> +#include <gluegen_stdint.h> + +#include "jogamp_opengl_util_av_impl_FFMPEGStaticNatives.h" + +extern jmethodID ffmpeg_jni_mid_pushSound; +extern jmethodID ffmpeg_jni_mid_updateAttributes; +extern jmethodID ffmpeg_jni_mid_setIsGLOriented; +extern jmethodID ffmpeg_jni_mid_setupFFAttributes; +extern jmethodID ffmpeg_jni_mid_isAudioFormatSupported; + +#endif /* _FFMPEG_STATIC_H */ + diff --git a/src/jogl/native/libav/ffmpeg_tool.h b/src/jogl/native/libav/ffmpeg_tool.h index 3181a8a8f..136be2ecc 100644 --- a/src/jogl/native/libav/ffmpeg_tool.h +++ b/src/jogl/native/libav/ffmpeg_tool.h @@ -41,28 +41,112 @@ #include <gluegen_stddef.h> #include <gluegen_stdint.h> -#include <libavcodec/avcodec.h> -#include <libavformat/avformat.h> +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" +#include "libavutil/avutil.h" +#if LIBAVCODEC_VERSION_MAJOR >= 54 + #include "libavresample/avresample.h" + #include "libswresample/swresample.h" +#endif + +#ifndef LIBAVRESAMPLE_VERSION_MAJOR +#define LIBAVRESAMPLE_VERSION_MAJOR -1 +// Opaque +typedef void* AVAudioResampleContext; +#endif +#ifndef LIBSWRESAMPLE_VERSION_MAJOR +#define LIBSWRESAMPLE_VERSION_MAJOR -1 +// Opaque +typedef struct SwrContext SwrContext; +#endif #include <stdarg.h> #include <stdio.h> #include <stdlib.h> +#include <GL/gl.h> + +typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const GLvoid *pixels); +typedef GLenum (APIENTRYP PFNGLGETERRORPROC) (void); +typedef void (APIENTRYP PFNGLFLUSH) (void); +typedef void (APIENTRYP PFNGLFINISH) (void); + /** * AV_TIME_BASE 1000000 */ #define AV_TIME_BASE_MSEC (AV_TIME_BASE/1000) +#define AV_VERSION_MAJOR(i) ( ( i >> 16 ) & 0xFF ) +#define AV_VERSION_MINOR(i) ( ( i >> 8 ) & 0xFF ) +#define AV_VERSION_SUB(i) ( ( i >> 0 ) & 0xFF ) + +/** Sync w/ GLMediaPlayer.STREAM_ID_NONE */ +#define AV_STREAM_ID_NONE -2 + +/** Sync w/ GLMediaPlayer.STREAM_ID_AUTO */ +#define AV_STREAM_ID_AUTO -1 + +/** Default number of audio frames per video frame. Sync w/ FFMPEGMediaPlayer.AV_DEFAULT_AFRAMES. */ +#define AV_DEFAULT_AFRAMES 8 + +/** Constant PTS marking an invalid PTS, i.e. Integer.MIN_VALUE == 0x80000000 == {@value}. Sync w/ TimeFrameI.INVALID_PTS */ +#define INVALID_PTS 0x80000000 + +/** Constant PTS marking the end of the stream, i.e. Integer.MIN_VALUE - 1 == 0x7FFFFFFF == {@value}. Sync w/ TimeFrameI.END_OF_STREAM_PTS */ +#define END_OF_STREAM_PTS 0x7FFFFFFF + +/** Since 54.0.0.1 */ +#define AV_HAS_API_AVRESAMPLE(pAV) ( ( LIBAVRESAMPLE_VERSION_MAJOR >= 0 ) && ( pAV->avresampleVersion != 0 ) ) + +/** Since 55.0.0.1 */ +#define AV_HAS_API_SWRESAMPLE(pAV) ( ( LIBSWRESAMPLE_VERSION_MAJOR >= 0 ) && ( pAV->swresampleVersion != 0 ) ) + +#define MAX_INT(a,b) ( (a >= b) ? a : b ) +#define MIN_INT(a,b) ( (a <= b) ? a : b ) + static inline float my_av_q2f(AVRational a){ - return a.num / (float) a.den; + return (float)a.num / (float)a.den; +} +static inline float my_av_q2f_r(AVRational a){ + return (float)a.den / (float)a.num; } -static inline int32_t my_av_q2i32(int32_t snum, AVRational a){ - return (snum * a.num) / a.den; +static inline int32_t my_av_q2i32(int64_t snum, AVRational a){ + return (int32_t) ( ( snum * (int64_t) a.num ) / (int64_t)a.den ); } +static inline int my_align(int v, int a){ + return ( v + a - 1 ) & ~( a - 1 ); +} + +typedef struct { + void *origPtr; + jobject nioRef; + int32_t size; +} NIOBuffer_t; + +typedef struct { + int64_t ptsError; // Number of backward PTS values (earlier than last PTS, excluding AV_NOPTS_VALUE) + int64_t dtsError; // Number of backward DTS values (earlier than last PTS, excluding AV_NOPTS_VALUE) + int64_t ptsLast; // PTS of the last frame + int64_t dtsLast; // DTS of the last frame +} PTSStats; typedef struct { + jobject ffmpegMediaPlayer; int32_t verbose; + uint32_t avcodecVersion; + uint32_t avformatVersion; + uint32_t avutilVersion; + uint32_t avresampleVersion; + uint32_t swresampleVersion; + + int32_t useRefCountedFrames; + + PFNGLTEXSUBIMAGE2DPROC procAddrGLTexSubImage2D; + PFNGLGETERRORPROC procAddrGLGetError; + PFNGLFLUSH procAddrGLFlush; + PFNGLFINISH procAddrGLFinish; + AVFormatContext* pFormatCtx; int32_t vid; AVStream* pVStream; @@ -74,26 +158,40 @@ typedef struct { uint32_t vBytesPerPixelPerPlane; enum PixelFormat vPixFmt; // native decoder fmt int32_t vPTS; // msec - overall last video PTS - int32_t vLinesize[3]; // decoded video linesize in bytes for each plane - int32_t vTexWidth[3]; // decoded video tex width in bytes for each plane - + PTSStats vPTSStats; + int32_t vTexWidth[4]; // decoded video tex width in bytes for each plane (max 4) + int32_t vWidth; + int32_t vHeight; + jboolean vFlipped; // false: !GL-Orientation, true: GL-Orientation int32_t aid; AVStream* pAStream; AVCodecContext* pACodecCtx; AVCodec* pACodec; - AVFrame* pAFrame; + AVFrame** pAFrames; + NIOBuffer_t* pANIOBuffers; + int32_t aFrameCount; + int32_t aFrameCurrent; + int32_t aFrameSize; // in samples per channel! + enum AVSampleFormat aSampleFmt; // native decoder fmt int32_t aSampleRate; int32_t aChannels; - int32_t aFrameSize; - enum AVSampleFormat aSampleFmt; // native decoder fmt + int32_t aSinkSupport; // supported by AudioSink + AVAudioResampleContext* avResampleCtx; + struct SwrContext* swResampleCtx; + uint8_t* aResampleBuffer; + enum AVSampleFormat aSampleFmtOut; // out fmt + int32_t aChannelsOut; + int32_t aSampleRateOut; int32_t aPTS; // msec - overall last audio PTS + PTSStats aPTSStats; float fps; // frames per seconds int32_t bps_stream; // bits per seconds int32_t bps_video; // bits per seconds int32_t bps_audio; // bits per seconds - int32_t totalFrames; + int32_t frames_video; + int32_t frames_audio; int32_t duration; // msec int32_t start_time; // msec diff --git a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c deleted file mode 100644 index 647de15ef..000000000 --- a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.c +++ /dev/null @@ -1,717 +0,0 @@ -/** - * Copyright 2012 JogAmp Community. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without modification, are - * permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this list of - * conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, this list - * of conditions and the following disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED - * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND - * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR - * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON - * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF - * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - * - * The views and conclusions contained in the software and documentation are those of the - * authors and should not be interpreted as representing official policies, either expressed - * or implied, of JogAmp Community. - */ - -#include "jogamp_opengl_util_av_impl_FFMPEGMediaPlayer.h" - -#include "JoglCommon.h" -#include "ffmpeg_tool.h" -#include <libavutil/pixdesc.h> -#include <GL/gl.h> - -typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const GLvoid *pixels); - -static const char * const ClazzNameFFMPEGMediaPlayer = "jogamp/opengl/util/av/impl/FFMPEGMediaPlayer"; - -static jclass ffmpegMediaPlayerClazz = NULL; -static jmethodID jni_mid_updateAttributes1 = NULL; -static jmethodID jni_mid_updateAttributes2 = NULL; - -#define HAS_FUNC(f) (NULL!=(f)) - -typedef unsigned (APIENTRYP AVCODEC_VERSION)(void); -typedef unsigned (APIENTRYP AVUTIL_VERSION)(void); -typedef unsigned (APIENTRYP AVFORMAT_VERSION)(void); - -static AVCODEC_VERSION sp_avcodec_version; -static AVFORMAT_VERSION sp_avformat_version; -static AVUTIL_VERSION sp_avutil_version; -// count: 3 - - -// libavcodec -typedef int (APIENTRYP AVCODEC_CLOSE)(AVCodecContext *avctx); -typedef void (APIENTRYP AVCODEC_STRING)(char *buf, int buf_size, AVCodecContext *enc, int encode); -typedef AVCodec *(APIENTRYP AVCODEC_FIND_DECODER)(enum CodecID id); -typedef int (APIENTRYP AVCODEC_OPEN2)(AVCodecContext *avctx, AVCodec *codec, AVDictionary **options); // 53.6.0 -typedef int (APIENTRYP AVCODEC_OPEN)(AVCodecContext *avctx, AVCodec *codec); -typedef AVFrame *(APIENTRYP AVCODEC_ALLOC_FRAME)(void); -typedef int (APIENTRYP AVCODEC_DEFAULT_GET_BUFFER)(AVCodecContext *s, AVFrame *pic); -typedef void (APIENTRYP AVCODEC_DEFAULT_RELEASE_BUFFER)(AVCodecContext *s, AVFrame *pic); -typedef void (APIENTRYP AV_FREE_PACKET)(AVPacket *pkt); -typedef int (APIENTRYP AVCODEC_DECODE_AUDIO4)(AVCodecContext *avctx, AVFrame *frame, int *got_frame_ptr, AVPacket *avpkt); // 53.25.0 -typedef int (APIENTRYP AVCODEC_DECODE_AUDIO3)(AVCodecContext *avctx, int16_t *samples, int *frame_size_ptr, AVPacket *avpkt); // 52.23.0 -typedef int (APIENTRYP AVCODEC_DECODE_VIDEO2)(AVCodecContext *avctx, AVFrame *picture, int *got_picture_ptr, AVPacket *avpkt); // 52.23.0 - -static AVCODEC_CLOSE sp_avcodec_close; -static AVCODEC_STRING sp_avcodec_string; -static AVCODEC_FIND_DECODER sp_avcodec_find_decoder; -static AVCODEC_OPEN2 sp_avcodec_open2; // 53.6.0 -static AVCODEC_OPEN sp_avcodec_open; -static AVCODEC_ALLOC_FRAME sp_avcodec_alloc_frame; -static AVCODEC_DEFAULT_GET_BUFFER sp_avcodec_default_get_buffer; -static AVCODEC_DEFAULT_RELEASE_BUFFER sp_avcodec_default_release_buffer; -static AV_FREE_PACKET sp_av_free_packet; -static AVCODEC_DECODE_AUDIO4 sp_avcodec_decode_audio4; // 53.25.0 -static AVCODEC_DECODE_AUDIO3 sp_avcodec_decode_audio3; // 52.23.0 -static AVCODEC_DECODE_VIDEO2 sp_avcodec_decode_video2; // 52.23.0 -// count: 15 - -// libavutil -typedef void (APIENTRYP AV_FREE)(void *ptr); -typedef int (APIENTRYP AV_GET_BITS_PER_PIXEL)(const AVPixFmtDescriptor *pixdesc); -static const AVPixFmtDescriptor* sp_av_pix_fmt_descriptors; -static AV_FREE sp_av_free; -static AV_GET_BITS_PER_PIXEL sp_av_get_bits_per_pixel; -// count: 18 - -// libavformat -typedef AVFormatContext *(APIENTRYP AVFORMAT_ALLOC_CONTEXT)(void); -typedef void (APIENTRYP AVFORMAT_FREE_CONTEXT)(AVFormatContext *s); // 52.96.0 -typedef void (APIENTRYP AVFORMAT_CLOSE_INPUT)(AVFormatContext **s); // 53.17.0 -typedef void (APIENTRYP AV_CLOSE_INPUT_FILE)(AVFormatContext *s); -typedef void (APIENTRYP AV_REGISTER_ALL)(void); -typedef int (APIENTRYP AVFORMAT_OPEN_INPUT)(AVFormatContext **ps, const char *filename, AVInputFormat *fmt, AVDictionary **options); -typedef void (APIENTRYP AV_DUMP_FORMAT)(AVFormatContext *ic, int index, const char *url, int is_output); -typedef int (APIENTRYP AV_READ_FRAME)(AVFormatContext *s, AVPacket *pkt); -typedef int (APIENTRYP AV_SEEK_FRAME)(AVFormatContext *s, int stream_index, int64_t timestamp, int flags); -typedef int (APIENTRYP AVFORMAT_NETWORK_INIT)(void); // 53.13.0 -typedef int (APIENTRYP AVFORMAT_NETWORK_DEINIT)(void); // 53.13.0 -typedef int (APIENTRYP AVFORMAT_FIND_STREAM_INFO)(AVFormatContext *ic, AVDictionary **options); // 53.3.0 -typedef int (APIENTRYP AV_FIND_STREAM_INFO)(AVFormatContext *ic); - -static AVFORMAT_ALLOC_CONTEXT sp_avformat_alloc_context; -static AVFORMAT_FREE_CONTEXT sp_avformat_free_context; // 52.96.0 -static AVFORMAT_CLOSE_INPUT sp_avformat_close_input; // 53.17.0 -static AV_CLOSE_INPUT_FILE sp_av_close_input_file; -static AV_REGISTER_ALL sp_av_register_all; -static AVFORMAT_OPEN_INPUT sp_avformat_open_input; -static AV_DUMP_FORMAT sp_av_dump_format; -static AV_READ_FRAME sp_av_read_frame; -static AV_SEEK_FRAME sp_av_seek_frame; -static AVFORMAT_NETWORK_INIT sp_avformat_network_init; // 53.13.0 -static AVFORMAT_NETWORK_DEINIT sp_avformat_network_deinit; // 53.13.0 -static AVFORMAT_FIND_STREAM_INFO sp_avformat_find_stream_info; // 53.3.0 -static AV_FIND_STREAM_INFO sp_av_find_stream_info; -// count: 31 - -#define SYMBOL_COUNT 31 - -JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGDynamicLibraryBundleInfo_initSymbols0 - (JNIEnv *env, jclass clazz, jobject jSymbols, jint count) -{ - int64_t* symbols; // jlong -> int64_t -> intptr_t -> FUNC_PTR - int i; - - if(SYMBOL_COUNT != count) { - fprintf(stderr, "FFMPEGDynamicLibraryBundleInfo.initSymbols0: Wrong symbol count: Expected %d, Is %d\n", - SYMBOL_COUNT, count); - return JNI_FALSE; - } - JoglCommon_init(env); - - i = 0; - symbols = (int64_t *) (*env)->GetPrimitiveArrayCritical(env, jSymbols, NULL); - - sp_avcodec_version = (AVCODEC_VERSION) (intptr_t) symbols[i++]; - sp_avformat_version = (AVFORMAT_VERSION) (intptr_t) symbols[i++]; - sp_avutil_version = (AVUTIL_VERSION) (intptr_t) symbols[i++]; - // count: 3 - - sp_avcodec_close = (AVCODEC_CLOSE) (intptr_t) symbols[i++]; - sp_avcodec_string = (AVCODEC_STRING) (intptr_t) symbols[i++]; - sp_avcodec_find_decoder = (AVCODEC_FIND_DECODER) (intptr_t) symbols[i++]; - sp_avcodec_open2 = (AVCODEC_OPEN2) (intptr_t) symbols[i++]; - sp_avcodec_open = (AVCODEC_OPEN) (intptr_t) symbols[i++]; - sp_avcodec_alloc_frame = (AVCODEC_ALLOC_FRAME) (intptr_t) symbols[i++]; - sp_avcodec_default_get_buffer = (AVCODEC_DEFAULT_GET_BUFFER) (intptr_t) symbols[i++]; - sp_avcodec_default_release_buffer = (AVCODEC_DEFAULT_RELEASE_BUFFER) (intptr_t) symbols[i++]; - sp_av_free_packet = (AV_FREE_PACKET) (intptr_t) symbols[i++]; - sp_avcodec_decode_audio4 = (AVCODEC_DECODE_AUDIO4) (intptr_t) symbols[i++]; - sp_avcodec_decode_audio3 = (AVCODEC_DECODE_AUDIO3) (intptr_t) symbols[i++]; - sp_avcodec_decode_video2 = (AVCODEC_DECODE_VIDEO2) (intptr_t) symbols[i++]; - // count: 15 - - sp_av_pix_fmt_descriptors = (const AVPixFmtDescriptor*) (intptr_t) symbols[i++]; - sp_av_free = (AV_FREE) (intptr_t) symbols[i++]; - sp_av_get_bits_per_pixel = (AV_GET_BITS_PER_PIXEL) (intptr_t) symbols[i++]; - // count: 18 - - sp_avformat_alloc_context = (AVFORMAT_ALLOC_CONTEXT) (intptr_t) symbols[i++];; - sp_avformat_free_context = (AVFORMAT_FREE_CONTEXT) (intptr_t) symbols[i++]; - sp_avformat_close_input = (AVFORMAT_CLOSE_INPUT) (intptr_t) symbols[i++]; - sp_av_close_input_file = (AV_CLOSE_INPUT_FILE) (intptr_t) symbols[i++]; - sp_av_register_all = (AV_REGISTER_ALL) (intptr_t) symbols[i++]; - sp_avformat_open_input = (AVFORMAT_OPEN_INPUT) (intptr_t) symbols[i++]; - sp_av_dump_format = (AV_DUMP_FORMAT) (intptr_t) symbols[i++]; - sp_av_read_frame = (AV_READ_FRAME) (intptr_t) symbols[i++]; - sp_av_seek_frame = (AV_SEEK_FRAME) (intptr_t) symbols[i++]; - sp_avformat_network_init = (AVFORMAT_NETWORK_INIT) (intptr_t) symbols[i++]; - sp_avformat_network_deinit = (AVFORMAT_NETWORK_DEINIT) (intptr_t) symbols[i++]; - sp_avformat_find_stream_info = (AVFORMAT_FIND_STREAM_INFO) (intptr_t) symbols[i++]; - sp_av_find_stream_info = (AV_FIND_STREAM_INFO) (intptr_t) symbols[i++]; - // count: 31 - - (*env)->ReleasePrimitiveArrayCritical(env, jSymbols, symbols, 0); - - if(SYMBOL_COUNT != i) { - // boom - fprintf(stderr, "FFMPEGDynamicLibraryBundleInfo.initSymbols0: Wrong symbol assignment count: Expected %d, Is %d\n", - SYMBOL_COUNT, i); - return JNI_FALSE; - } - - return JNI_TRUE; -} - -static void _updateJavaAttributes(JNIEnv *env, jobject instance, FFMPEGToolBasicAV_t* pAV) -{ - // int shallBeDetached = 0; - // JNIEnv * env = JoglCommon_GetJNIEnv (&shallBeDetached); - if(NULL!=env) { - int32_t w, h; - if( NULL != pAV->pVCodecCtx ) { - // FIXME: Libav Binary compatibility! JAU01 - w = pAV->pVCodecCtx->width; h = pAV->pVCodecCtx->height; - } else { - w = 0; h = 0; - } - - (*env)->CallVoidMethod(env, instance, jni_mid_updateAttributes1, - w, h, - pAV->bps_stream, pAV->bps_video, pAV->bps_audio, - pAV->fps, (int32_t)((pAV->duration/1000)*pAV->fps), pAV->duration, - (*env)->NewStringUTF(env, pAV->vcodec), - (*env)->NewStringUTF(env, pAV->acodec) ); - (*env)->CallVoidMethod(env, instance, jni_mid_updateAttributes2, - pAV->vPixFmt, pAV->vBufferPlanes, - pAV->vBitsPerPixel, pAV->vBytesPerPixelPerPlane, - pAV->vLinesize[0], pAV->vLinesize[1], pAV->vLinesize[2], - pAV->vTexWidth[0], pAV->vTexWidth[1], pAV->vTexWidth[2]); - // JoglCommon_ReleaseJNIEnv (shallBeDetached); - } -} - -static void freeInstance(FFMPEGToolBasicAV_t* pAV) { - int i; - if(NULL != pAV) { - // Close the V codec - if(NULL != pAV->pVCodecCtx) { - sp_avcodec_close(pAV->pVCodecCtx); - pAV->pVCodecCtx = NULL; - } - pAV->pVCodec=NULL; - - // Close the A codec - if(NULL != pAV->pACodecCtx) { - sp_avcodec_close(pAV->pACodecCtx); - pAV->pACodecCtx = NULL; - } - pAV->pACodec=NULL; - - // Close the frames - if(NULL != pAV->pVFrame) { - sp_av_free(pAV->pVFrame); - pAV->pVFrame = NULL; - } - if(NULL != pAV->pAFrame) { - sp_av_free(pAV->pAFrame); - pAV->pAFrame = NULL; - } - - // Close the video file - if(NULL != pAV->pFormatCtx) { - if(HAS_FUNC(sp_avformat_close_input)) { - sp_avformat_close_input(&pAV->pFormatCtx); - } else { - sp_av_close_input_file(pAV->pFormatCtx); - if(HAS_FUNC(sp_avformat_free_context)) { - sp_avformat_free_context(pAV->pFormatCtx); - } - } - pAV->pFormatCtx = NULL; - } - free(pAV); - } -} - -static int my_getPlaneCount(AVPixFmtDescriptor *pDesc) { - int i, p=-1; - for(i=pDesc->nb_components-1; i>=0; i--) { - int p0 = pDesc->comp[i].plane; - if( p < p0 ) { - p = p0; - } - } - return p+1; -} - -static int my_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt) { - return sp_av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL; -} - -#if 0 -static enum PixelFormat my_get_format(struct AVCodecContext *s, const enum PixelFormat * fmt) { - int i=0; - enum PixelFormat f0, fR = PIX_FMT_NONE; - char buf[256]; - - fprintf(stderr, "get_format ****\n"); - while (fmt[i] != PIX_FMT_NONE /* && ff_is_hwaccel_pix_fmt(fmt[i]) */) { - f0 = fmt[i]; - if(fR==PIX_FMT_NONE && !my_is_hwaccel_pix_fmt(f0)) { - fR = f0; - } - sp_av_get_pix_fmt_string(buf, sizeof(buf), f0); - fprintf(stderr, "get_format %d: %d - %s - %s\n", i, f0, sp_av_get_pix_fmt_name(f0), buf); - ++i; - } - fprintf(stderr, "get_format %d - %s *** \n", fR, sp_av_get_pix_fmt_name(fR)); - fflush(NULL); - return fR; -} -#endif - -JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_getAvUtilVersion0 - (JNIEnv *env, jclass clazz) { - return (jint) sp_avutil_version(); -} - -JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_getAvFormatVersion0 - (JNIEnv *env, jclass clazz) { - return (jint) sp_avformat_version(); -} - -JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_getAvCodecVersion0 - (JNIEnv *env, jclass clazz) { - return (jint) sp_avcodec_version(); -} - -JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_initIDs0 - (JNIEnv *env, jclass clazz) -{ - JoglCommon_init(env); - - jclass c; - if (ffmpegMediaPlayerClazz != NULL) { - return; - } - - c = (*env)->FindClass(env, ClazzNameFFMPEGMediaPlayer); - if(NULL==c) { - JoglCommon_FatalError(env, "JOGL FFMPEG: can't find %s", ClazzNameFFMPEGMediaPlayer); - } - ffmpegMediaPlayerClazz = (jclass)(*env)->NewGlobalRef(env, c); - (*env)->DeleteLocalRef(env, c); - if(NULL==ffmpegMediaPlayerClazz) { - JoglCommon_FatalError(env, "JOGL FFMPEG: can't use %s", ClazzNameFFMPEGMediaPlayer); - } - - jni_mid_updateAttributes1 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes", "(IIIIIFIILjava/lang/String;Ljava/lang/String;)V"); - jni_mid_updateAttributes2 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes2", "(IIIIIIIIII)V"); - - if(jni_mid_updateAttributes1 == NULL || - jni_mid_updateAttributes2 == NULL) { - return JNI_FALSE; - } - return JNI_TRUE; -} - -JNIEXPORT jlong JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_createInstance0 - (JNIEnv *env, jobject instance, jboolean verbose) -{ - FFMPEGToolBasicAV_t * pAV = calloc(1, sizeof(FFMPEGToolBasicAV_t)); - if(NULL==pAV) { - JoglCommon_throwNewRuntimeException(env, "Couldn't alloc instance"); - return 0; - } - // Register all formats and codecs - sp_av_register_all(); - // Network too .. - if(HAS_FUNC(sp_avformat_network_init)) { - sp_avformat_network_init(); - } - - pAV->verbose = verbose; - pAV->vid=-1; - pAV->aid=-1; - - return (jlong) (intptr_t) pAV; -} - -JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_destroyInstance0 - (JNIEnv *env, jobject instance, jlong ptr) -{ - FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); - if (pAV != NULL) { - // stop assumed .. - freeInstance(pAV); - } -} - -JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_setStream0 - (JNIEnv *env, jobject instance, jlong ptr, jstring jURL, jint vid, jint aid) -{ - int res, i; - jboolean iscopy; - FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)(intptr_t)ptr; - - if (pAV == NULL) { - JoglCommon_throwNewRuntimeException(env, "NULL AV ptr"); - return; - } - - pAV->pFormatCtx = sp_avformat_alloc_context(); - - // Open video file - const char *urlPath = (*env)->GetStringUTFChars(env, jURL, &iscopy); - res = sp_avformat_open_input(&pAV->pFormatCtx, urlPath, NULL, NULL); - if(res != 0) { - (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); - JoglCommon_throwNewRuntimeException(env, "Couldn't open URL"); - return; - } - - // Retrieve detailed stream information - if(HAS_FUNC(sp_avformat_find_stream_info)) { - if(sp_avformat_find_stream_info(pAV->pFormatCtx, NULL)<0) { - (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); - JoglCommon_throwNewRuntimeException(env, "Couldn't find stream information"); - return; - } - } else { - if(sp_av_find_stream_info(pAV->pFormatCtx)<0) { - (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); - JoglCommon_throwNewRuntimeException(env, "Couldn't find stream information"); - return; - } - } - - if(pAV->verbose) { - // Dump information about file onto standard error - sp_av_dump_format(pAV->pFormatCtx, 0, urlPath, JNI_FALSE); - } - (*env)->ReleaseStringChars(env, jURL, (const jchar *)urlPath); - // FIXME: Libav Binary compatibility! JAU01 - if (pAV->pFormatCtx->duration != AV_NOPTS_VALUE) { - pAV->duration = pAV->pFormatCtx->duration / AV_TIME_BASE_MSEC; - } - if (pAV->pFormatCtx->start_time != AV_NOPTS_VALUE) { - pAV->start_time = pAV->pFormatCtx->start_time / AV_TIME_BASE_MSEC; - } - if (pAV->pFormatCtx->bit_rate) { - pAV->bps_stream = pAV->pFormatCtx->bit_rate; - } - - fprintf(stderr, "Streams: %d\n", pAV->pFormatCtx->nb_streams); // JAU - - // Find the first audio and video stream, or the one matching vid - // FIXME: Libav Binary compatibility! JAU01 - for(i=0; ( -1==pAV->aid || -1==pAV->vid ) && i<pAV->pFormatCtx->nb_streams; i++) { - AVStream *st = pAV->pFormatCtx->streams[i]; - fprintf(stderr, "Stream: %d: is-video %d, is-audio %d\n", i, (AVMEDIA_TYPE_VIDEO == st->codec->codec_type), AVMEDIA_TYPE_AUDIO == st->codec->codec_type); // JAU - if(AVMEDIA_TYPE_VIDEO == st->codec->codec_type) { - if(-1==pAV->vid && (-1==vid || vid == i) ) { - pAV->pVStream = st; - pAV->vid=i; - } - } else if(AVMEDIA_TYPE_AUDIO == st->codec->codec_type) { - if(-1==pAV->aid && (-1==aid || aid == i) ) { - pAV->pAStream = st; - pAV->aid=i; - } - } - } - - fprintf(stderr, "Found vid %d, aid %d\n", pAV->vid, pAV->aid); // JAU - - if(0<=pAV->aid) { - // Get a pointer to the codec context for the audio stream - // FIXME: Libav Binary compatibility! JAU01 - pAV->pACodecCtx=pAV->pAStream->codec; - - // FIXME: Libav Binary compatibility! JAU01 - if (pAV->pACodecCtx->bit_rate) { - pAV->bps_audio = pAV->pACodecCtx->bit_rate; - } - sp_avcodec_string(pAV->acodec, sizeof(pAV->acodec), pAV->pACodecCtx, 0); - - // Find the decoder for the audio stream - pAV->pACodec=sp_avcodec_find_decoder(pAV->pACodecCtx->codec_id); - if(pAV->pACodec==NULL) { - JoglCommon_throwNewRuntimeException(env, "Couldn't find audio codec %d, %s", pAV->pACodecCtx->codec_id, pAV->acodec); - return; - } - - // Open codec - if(HAS_FUNC(sp_avcodec_open2)) { - res = sp_avcodec_open2(pAV->pACodecCtx, pAV->pACodec, NULL); - } else { - res = sp_avcodec_open(pAV->pACodecCtx, pAV->pACodec); - } - if(res<0) { - JoglCommon_throwNewRuntimeException(env, "Couldn't open audio codec %d, %s", pAV->pACodecCtx->codec_id, pAV->acodec); - return; - } - - // Allocate audio frames - // FIXME: Libav Binary compatibility! JAU01 - pAV->aSampleRate = pAV->pACodecCtx->sample_rate; - pAV->aChannels = pAV->pACodecCtx->channels; - pAV->aFrameSize = pAV->pACodecCtx->frame_size; - pAV->aSampleFmt = pAV->pACodecCtx->sample_fmt; - pAV->pAFrame=sp_avcodec_alloc_frame(); - if(pAV->pAFrame==NULL) { - JoglCommon_throwNewRuntimeException(env, "Couldn't alloc audio frame"); - return; - } - } - - if(0<=pAV->vid) { - // Get a pointer to the codec context for the video stream - // FIXME: Libav Binary compatibility! JAU01 - pAV->pVCodecCtx=pAV->pVStream->codec; - #if 0 - pAV->pVCodecCtx->get_format = my_get_format; - #endif - - if (pAV->pVCodecCtx->bit_rate) { - // FIXME: Libav Binary compatibility! JAU01 - pAV->bps_video = pAV->pVCodecCtx->bit_rate; - } - sp_avcodec_string(pAV->vcodec, sizeof(pAV->vcodec), pAV->pVCodecCtx, 0); - - // Find the decoder for the video stream - pAV->pVCodec=sp_avcodec_find_decoder(pAV->pVCodecCtx->codec_id); - if(pAV->pVCodec==NULL) { - JoglCommon_throwNewRuntimeException(env, "Couldn't find video codec %d, %s", pAV->pVCodecCtx->codec_id, pAV->vcodec); - return; - } - - // Open codec - if(HAS_FUNC(sp_avcodec_open2)) { - res = sp_avcodec_open2(pAV->pVCodecCtx, pAV->pVCodec, NULL); - } else { - res = sp_avcodec_open(pAV->pVCodecCtx, pAV->pVCodec); - } - if(res<0) { - JoglCommon_throwNewRuntimeException(env, "Couldn't open video codec %d, %s", pAV->pVCodecCtx->codec_id, pAV->vcodec); - return; - } - - // Hack to correct wrong frame rates that seem to be generated by some codecs - // FIXME: Libav Binary compatibility! JAU01 - if(pAV->pVCodecCtx->time_base.num>1000 && pAV->pVCodecCtx->time_base.den==1) { - pAV->pVCodecCtx->time_base.den=1000; - } - // FIXME: Libav Binary compatibility! JAU01 - pAV->fps = my_av_q2f(pAV->pVStream->avg_frame_rate); - - // Allocate video frames - // FIXME: Libav Binary compatibility! JAU01 - pAV->vPixFmt = pAV->pVCodecCtx->pix_fmt; - { - AVPixFmtDescriptor pixDesc = sp_av_pix_fmt_descriptors[pAV->vPixFmt]; - pAV->vBitsPerPixel = sp_av_get_bits_per_pixel(&pixDesc); - pAV->vBufferPlanes = my_getPlaneCount(&pixDesc); - } - pAV->pVFrame=sp_avcodec_alloc_frame(); - if( pAV->pVFrame == NULL ) { - JoglCommon_throwNewRuntimeException(env, "Couldn't alloc video frame"); - return; - } - res = sp_avcodec_default_get_buffer(pAV->pVCodecCtx, pAV->pVFrame); - if(0==res) { - const int32_t bytesPerPixel = ( pAV->vBitsPerPixel + 7 ) / 8 ; - if(1 == pAV->vBufferPlanes) { - pAV->vBytesPerPixelPerPlane = bytesPerPixel; - } else { - pAV->vBytesPerPixelPerPlane = 1; - } - for(i=0; i<3; i++) { - // FIXME: Libav Binary compatibility! JAU01 - pAV->vLinesize[i] = pAV->pVFrame->linesize[i]; - pAV->vTexWidth[i] = pAV->vLinesize[i] / pAV->vBytesPerPixelPerPlane ; - } - sp_avcodec_default_release_buffer(pAV->pVCodecCtx, pAV->pVFrame); - } else { - JoglCommon_throwNewRuntimeException(env, "Couldn't peek video buffer dimension"); - return; - } - } - pAV->vPTS=0; - pAV->aPTS=0; - _updateJavaAttributes(env, instance, pAV); -} - -JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_readNextPacket0 - (JNIEnv *env, jobject instance, jlong ptr, jlong jProcAddrGLTexSubImage2D, jint texTarget, jint texFmt, jint texType) -{ - FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); - PFNGLTEXSUBIMAGE2DPROC procAddrGLTexSubImage2D = (PFNGLTEXSUBIMAGE2DPROC) jProcAddrGLTexSubImage2D; - - jint res = 0; // 1 - audio, 2 - video - AVPacket packet; - int frameFinished; - - if(sp_av_read_frame(pAV->pFormatCtx, &packet)>=0) { - /** - if(packet.stream_index==pAV->aid) { - // Decode audio frame - if(NULL == pAV->pAFrame) { - sp_av_free_packet(&packet); - return res; - } - - int new_packet = 1; - int len1; - int flush_complete = 0; - int data_size = 0; - while (packet.size > 0 || (!packet.data && new_packet)) { - new_packet = 0; - if (flush_complete) { - break; - } - if(HAS_FUNC(sp_avcodec_decode_audio4)) { - len1 = sp_avcodec_decode_audio4(pAV->pVCodecCtx, pAV->pAFrame, &frameFinished, &packet); - } else { - len1 = sp_avcodec_decode_audio3(pAV->pVCodecCtx, int16_t *samples, int *frame_size_ptr, &frameFinished, &packet); - } - if (len1 < 0) { - // if error, we skip the frame - packet.size = 0; - break; - } - packet.data += len1; - packet.size -= len1; - - if (!frameFinished) { - // stop sending empty packets if the decoder is finished - if (!packet.data && pAV->pVCodecCtx->codec->capabilities & CODEC_CAP_DELAY) { - flush_complete = 1; - } - continue; - } - - int32_t pts = pAV->pAFrame->pkt_pts * my_av_q2i32(1000, pAV->pAStream->time_base); - pAV->aPTS += ( data_size * 1000 ) / (2 * pAV->pVCodecCtx->channels * pAV->pVCodecCtx->sample_rate); - printf("A pts %d - %d\n", pts, pAV->aPTS); - res = 1; - } - } else */ if(packet.stream_index==pAV->vid) { - // Decode video frame - if(NULL == pAV->pVFrame) { - sp_av_free_packet(&packet); - return res; - } - sp_avcodec_decode_video2(pAV->pVCodecCtx, pAV->pVFrame, &frameFinished, &packet); - - // Did we get a video frame? - if(frameFinished) - { - res = 2; - // FIXME: Libav Binary compatibility! JAU01 - const AVRational time_base = pAV->pVStream->time_base; - const int64_t pts = pAV->pVFrame->pkt_pts; - if(AV_NOPTS_VALUE != pts) { // discard invalid PTS .. - pAV->vPTS = pts * my_av_q2i32(1000, time_base); - - #if 0 - printf("PTS %d = %ld * ( ( 1000 * %ld ) / %ld ) '1000 * time_base', time_base = %lf\n", - pAV->vPTS, pAV->pVFrame->pkt_pts, time_base.num, time_base.den, (time_base.num/(double)time_base.den)); - #endif - } - - #if 0 - printf("tex2D codec %dx%d - frame %dx%d - width %d tex / %d linesize, pixfmt 0x%X, texType 0x%x, texTarget 0x%x\n", - pAV->pVCodecCtx->width, pAV->pVCodecCtx->height, - pAV->pVFrame->width, pAV->pVFrame->height, pAV->vTexWidth[0], pAV->pVFrame->linesize[0], - texFmt, texType, texTarget); - #endif - - // 1st plane or complete packed frame - // FIXME: Libav Binary compatibility! JAU01 - procAddrGLTexSubImage2D(texTarget, 0, - 0, 0, - pAV->vTexWidth[0], pAV->pVCodecCtx->height, - texFmt, texType, pAV->pVFrame->data[0]); - - if(pAV->vPixFmt == PIX_FMT_YUV420P) { - // U plane - // FIXME: Libav Binary compatibility! JAU01 - procAddrGLTexSubImage2D(texTarget, 0, - pAV->pVCodecCtx->width, 0, - pAV->vTexWidth[1], pAV->pVCodecCtx->height/2, - texFmt, texType, pAV->pVFrame->data[1]); - // V plane - // FIXME: Libav Binary compatibility! JAU01 - procAddrGLTexSubImage2D(texTarget, 0, - pAV->pVCodecCtx->width, pAV->pVCodecCtx->height/2, - pAV->vTexWidth[2], pAV->pVCodecCtx->height/2, - texFmt, texType, pAV->pVFrame->data[2]); - } // FIXME: Add more planar formats ! - } - } - - // Free the packet that was allocated by av_read_frame - sp_av_free_packet(&packet); - } - return res; -} - -JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_seek0 - (JNIEnv *env, jobject instance, jlong ptr, jint pos1) -{ - FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); - int64_t pos0 = pAV->vPTS; - int64_t pts0 = pAV->pVFrame->pkt_pts; - int64_t pts1 = (int64_t) pos1 / my_av_q2i32(1000, pAV->pVStream->time_base); - int flags = 0; - if(pos1 < pos0) { - flags |= AVSEEK_FLAG_BACKWARD; - } - fprintf(stderr, "SEEK: pre : u %d, p %d -> u %d, p %d\n", pos0, pts0, pos1, pts1); - sp_av_seek_frame(pAV->pFormatCtx, pAV->vid, pts1, flags); - pAV->vPTS = pAV->pVFrame->pkt_pts * my_av_q2i32(1000, pAV->pVStream->time_base); - fprintf(stderr, "SEEK: post : u %d, p %d\n", pAV->vPTS, pAV->pVFrame->pkt_pts); - return pAV->vPTS; -} - -JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_getVideoPTS0 - (JNIEnv *env, jobject instance, jlong ptr) -{ - FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); - return pAV->vPTS; -} - -JNIEXPORT jint JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGMediaPlayer_getAudioPTS0 - (JNIEnv *env, jobject instance, jlong ptr) -{ - FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr)); - return pAV->aPTS; -} - diff --git a/src/jogl/native/macosx/MacOSXCustomCGLCode.c b/src/jogl/native/macosx/MacOSXCustomCGLCode.c index f8b7a800f..75896c3a2 100644 --- a/src/jogl/native/macosx/MacOSXCustomCGLCode.c +++ b/src/jogl/native/macosx/MacOSXCustomCGLCode.c @@ -2,7 +2,7 @@ #include <assert.h> -#include </usr/include/machine/types.h> +#include <machine/types.h> #include "macosx-window-system.h" void CGLQueryPixelFormat(CGLPixelFormatObj fmt, int* iattrs, int niattrs, int* ivalues) { diff --git a/src/jogl/native/macosx/MacOSXWindowSystemInterface-calayer.m b/src/jogl/native/macosx/MacOSXWindowSystemInterface-calayer.m new file mode 100644 index 000000000..7ce8c58cf --- /dev/null +++ b/src/jogl/native/macosx/MacOSXWindowSystemInterface-calayer.m @@ -0,0 +1,966 @@ +#import "MacOSXWindowSystemInterface.h" +#import <QuartzCore/QuartzCore.h> +#import <pthread.h> +#import "NativeWindowProtocols.h" +#include "timespec.h" + +#import <OpenGL/glext.h> + +/** + * Partial include of gl3.h - which we can only expect and use + * in case of a GL3 core context at runtime. + * Otherwise we would need to have 2 modules, one including GL2 + * and one inclusing GL3 headers. + */ +#ifndef GL_ARB_vertex_array_object +#define GL_VERTEX_ARRAY_BINDING 0x85B5 +extern void glBindVertexArray (GLuint array); +extern void glDeleteVertexArrays (GLsizei n, const GLuint *arrays); +extern void glGenVertexArrays (GLsizei n, GLuint *arrays); +extern GLboolean glIsVertexArray (GLuint array); +#endif + +// +// CADisplayLink only available on iOS >= 3.1, sad, since it's convenient. +// Use CVDisplayLink otherwise. +// +// #define HAS_CADisplayLink 1 +// + +// lock/sync debug output +// +// #define DBG_SYNC 1 +// +#ifdef DBG_SYNC + // #define SYNC_PRINT(...) NSLog(@ ## __VA_ARGS__) + #define SYNC_PRINT(...) fprintf(stderr, __VA_ARGS__); fflush(stderr) +#else + #define SYNC_PRINT(...) +#endif + +// fps debug output +// +// #define DBG_PERF 1 + +// #define DBG_LIFECYCLE 1 + +/** + * Capture setView(NULL), which produces a 'invalid drawable' message + * + * Also track lifecycle via DBG_PRINT messages, if VERBOSE is enabled! + */ +@interface MyNSOpenGLContext: NSOpenGLContext +{ +} +- (id)initWithFormat:(NSOpenGLPixelFormat *)format shareContext:(NSOpenGLContext *)share; +- (void)setView:(NSView *)view; +- (void)update; +#ifdef DBG_LIFECYCLE +- (id)retain; +- (oneway void)release; +#endif +- (void)dealloc; + +@end + +@implementation MyNSOpenGLContext + +- (id)initWithFormat:(NSOpenGLPixelFormat *)format shareContext:(NSOpenGLContext *)share +{ + DBG_PRINT("MyNSOpenGLContext::initWithFormat.0: format %p, share %p\n", format, share); + MyNSOpenGLContext * o = [super initWithFormat:format shareContext:share]; + DBG_PRINT("MyNSOpenGLContext::initWithFormat.X: new %p\n", o); + return o; +} + +- (void)setView:(NSView *)view +{ + DBG_PRINT("MyNSOpenGLContext::setView: this.0 %p, view %p\n", self, view); + // NSLog(@"MyNSOpenGLContext::setView: %@",[NSThread callStackSymbols]); + if(NULL != view) { + [super setView:view]; + } else { + [self clearDrawable]; + } + DBG_PRINT("MyNSOpenGLContext::setView.X\n"); +} + +- (void)update +{ + DBG_PRINT("MyNSOpenGLContext::update: this.0 %p, view %p\n", self, [self view]); + [super update]; + DBG_PRINT("MyNSOpenGLContext::update.X\n"); +} + +#ifdef DBG_LIFECYCLE + +- (id)retain +{ + DBG_PRINT("MyNSOpenGLContext::retain.0: %p (refcnt %d)\n", self, (int)[self retainCount]); + // NSLog(@"MyNSOpenGLContext::retain: %@",[NSThread callStackSymbols]); + id o = [super retain]; + DBG_PRINT("MyNSOpenGLContext::retain.X: %p (refcnt %d)\n", o, (int)[o retainCount]); + return o; +} + +- (oneway void)release +{ + DBG_PRINT("MyNSOpenGLContext::release.0: %p (refcnt %d)\n", self, (int)[self retainCount]); + [super release]; + // DBG_PRINT("MyNSOpenGLContext::release.X: %p (refcnt %d)\n", self, (int)[self retainCount]); +} + +#endif + +#ifdef VERBOSE_ON + #define CGLRETAINCOUNT(c) (NULL!=c?(int)CGLGetContextRetainCount(c):-1) +#else + #define CGLRETAINCOUNT(c) +#endif + +- (void)dealloc +{ + /** + * The explicit CGLContext destruction below + * ensures that it is not left behind. + * Tests show that w/o these gymnastics, the CGLContext is not freed immediately after calling dealloc. + * The retain, release and dealloc ensures [super dealloc] won't message 'invalid context'. + */ + CGLContextObj cglCtx = [self CGLContextObj]; + + DBG_PRINT("MyNSOpenGLContext::dealloc.0 %p (refcnt %d) - CGL-Ctx %p\n", self, (int)[self retainCount], cglCtx, CGLRETAINCOUNT(cglCtx)); + // NSLog(@"MyNSOpenGLContext::dealloc: %@",[NSThread callStackSymbols]); + + [self clearDrawable]; + DBG_PRINT("MyNSOpenGLContext::dealloc.1 %d\n", CGLRETAINCOUNT(cglCtx)); + if( NULL != cglCtx ) { + CGLRetainContext( cglCtx ); + DBG_PRINT("MyNSOpenGLContext::dealloc.2 %d\n", CGLRETAINCOUNT(cglCtx)); + } + [super dealloc]; + DBG_PRINT("MyNSOpenGLContext::dealloc.3 %d\n", CGLRETAINCOUNT(cglCtx)); + if( NULL != cglCtx ) { + CGLReleaseContext( cglCtx ); + DBG_PRINT("MyNSOpenGLContext::dealloc.4 %d\n", CGLRETAINCOUNT(cglCtx)); + CGLDestroyContext( cglCtx ); + DBG_PRINT("MyNSOpenGLContext::dealloc.5 %d\n", CGLRETAINCOUNT(cglCtx)); + } + DBG_PRINT("MyNSOpenGLContext::dealloc.X\n"); +} + +@end + +@interface MyNSOpenGLLayer: NSOpenGLLayer <NWDedicatedFrame> +{ +@private + GLfloat gl_texCoords[8]; + NSOpenGLContext* glContext; + Bool isGLEnabled; + +@protected + GLuint gl3ShaderProgramName; + GLuint vboBufVert; + GLuint vboBufTexCoord; + GLint vertAttrLoc; + GLint texCoordAttrLoc; + NSOpenGLPixelFormat* parentPixelFmt; + int texWidth; + int texHeight; + volatile Bool dedicatedFrameSet; + volatile CGRect dedicatedFrame; + volatile NSOpenGLPixelBuffer* pbuffer; + volatile GLuint textureID; + volatile NSOpenGLPixelBuffer* newPBuffer; +#ifdef HAS_CADisplayLink + CADisplayLink* displayLink; +#else + CVDisplayLinkRef displayLink; +#endif + int tc; + struct timespec tStart; +@public + struct timespec lastWaitTime; + GLint swapInterval; + GLint swapIntervalCounter; + pthread_mutex_t renderLock; + pthread_cond_t renderSignal; + volatile Bool shallDraw; +} + +- (id) setupWithContext: (NSOpenGLContext*) parentCtx + gl3ShaderProgramName: (GLuint) gl3ShaderProgramName + pixelFormat: (NSOpenGLPixelFormat*) pfmt + pbuffer: (NSOpenGLPixelBuffer*) p + texIDArg: (GLuint) texID + opaque: (Bool) opaque + texWidth: (int) texWidth + texHeight: (int) texHeight; + +- (void)releaseLayer; +- (void)deallocPBuffer; +- (void)disableAnimation; +- (void)pauseAnimation:(Bool)pause; +- (void)setSwapInterval:(int)interval; +- (void)tick; +- (void)waitUntilRenderSignal: (long) to_micros; +- (Bool)isGLSourceValid; + +- (void) setGLEnabled: (Bool) enable; +- (Bool) validateTexSize: (CGRect) lRect; +- (void) setTextureID: (int) _texID; + +- (Bool) isSamePBuffer: (NSOpenGLPixelBuffer*) p; +- (void) setNewPBuffer: (NSOpenGLPixelBuffer*)p; +- (void) applyNewPBuffer; + +- (void)setDedicatedFrame:(CGRect)frame quirks:(int)quirks; // @NWDedicatedFrame +- (void) setFrame:(CGRect) frame; +- (id<CAAction>)actionForKey:(NSString *)key ; +- (NSOpenGLPixelFormat *)openGLPixelFormatForDisplayMask:(uint32_t)mask; +- (NSOpenGLContext *)openGLContextForPixelFormat:(NSOpenGLPixelFormat *)pixelFormat; +- (BOOL)canDrawInOpenGLContext:(NSOpenGLContext *)context pixelFormat:(NSOpenGLPixelFormat *)pixelFormat + forLayerTime:(CFTimeInterval)timeInterval displayTime:(const CVTimeStamp *)timeStamp; +- (void)drawInOpenGLContext:(NSOpenGLContext *)context pixelFormat:(NSOpenGLPixelFormat *)pixelFormat + forLayerTime:(CFTimeInterval)timeInterval displayTime:(const CVTimeStamp *)timeStamp; + +#ifdef DBG_LIFECYCLE +- (id)retain; +- (oneway void)release; +#endif +- (void)dealloc; + +@end + +#ifndef HAS_CADisplayLink + +static CVReturn renderMyNSOpenGLLayer(CVDisplayLinkRef displayLink, + const CVTimeStamp *inNow, + const CVTimeStamp *inOutputTime, + CVOptionFlags flagsIn, + CVOptionFlags *flagsOut, + void *displayLinkContext) +{ + NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + MyNSOpenGLLayer* l = (MyNSOpenGLLayer*)displayLinkContext; + pthread_mutex_lock(&l->renderLock); + if( 0 < l->swapInterval ) { + l->swapIntervalCounter++; + if( l->swapIntervalCounter >= l->swapInterval ) { + SYNC_PRINT("<S %d/%d>", (int)l->swapIntervalCounter, l->swapInterval); + l->swapIntervalCounter = 0; + pthread_cond_signal(&l->renderSignal); // wake up vsync + } + } + pthread_mutex_unlock(&l->renderLock); + [pool release]; + return kCVReturnSuccess; +} + +#endif + +static const GLfloat gl_verts[] = { + -1.0, -1.0, + -1.0, 1.0, + 1.0, 1.0, + 1.0, -1.0 +}; + +@implementation MyNSOpenGLLayer + +- (id) setupWithContext: (NSOpenGLContext*) parentCtx + gl3ShaderProgramName: (GLuint) _gl3ShaderProgramName + pixelFormat: (NSOpenGLPixelFormat*) _parentPixelFmt + pbuffer: (NSOpenGLPixelBuffer*) p + texIDArg: (GLuint) texID + opaque: (Bool) opaque + texWidth: (int) _texWidth + texHeight: (int) _texHeight; +{ + pthread_mutexattr_t renderLockAttr; + pthread_mutexattr_init(&renderLockAttr); + pthread_mutexattr_settype(&renderLockAttr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&renderLock, &renderLockAttr); // recursive + pthread_cond_init(&renderSignal, NULL); // no attribute + + { + int i; + for(i=0; i<8; i++) { + gl_texCoords[i] = 0.0f; + } + } + parentPixelFmt = [_parentPixelFmt retain]; // until destruction + glContext = [[MyNSOpenGLContext alloc] initWithFormat:parentPixelFmt shareContext:parentCtx]; + gl3ShaderProgramName = _gl3ShaderProgramName; + vboBufVert = 0; + vboBufTexCoord = 0; + vertAttrLoc = 0; + texCoordAttrLoc = 0; + swapInterval = 1; // defaults to on (as w/ new GL profiles) + swapIntervalCounter = 0; + timespec_now(&lastWaitTime); + shallDraw = NO; + isGLEnabled = YES; + dedicatedFrameSet = NO; + dedicatedFrame = CGRectMake(0, 0, _texWidth, _texHeight); + [self validateTexSize: dedicatedFrame]; + [self setTextureID: texID]; + + newPBuffer = NULL; + pbuffer = p; + if(NULL != pbuffer) { + [pbuffer retain]; + } + + { + // no animations for add/remove/swap sublayers etc + // doesn't work: [self removeAnimationForKey: kCAOnOrderIn, kCAOnOrderOut, kCATransition] + [self removeAllAnimations]; + } + + // instantiate a deactivated displayLink +#ifdef HAS_CADisplayLink + displayLink = [[CVDisplayLink displayLinkWithTarget:self selector:@selector(setNeedsDisplay)] retain]; +#else + CVReturn cvres; + { + int allDisplaysMask = 0; + int virtualScreen, accelerated, displayMask; + for (virtualScreen = 0; virtualScreen < [parentPixelFmt numberOfVirtualScreens]; virtualScreen++) { + [parentPixelFmt getValues:&displayMask forAttribute:NSOpenGLPFAScreenMask forVirtualScreen:virtualScreen]; + [parentPixelFmt getValues:&accelerated forAttribute:NSOpenGLPFAAccelerated forVirtualScreen:virtualScreen]; + if (accelerated) { + allDisplaysMask |= displayMask; + } + } + cvres = CVDisplayLinkCreateWithOpenGLDisplayMask(allDisplaysMask, &displayLink); + if(kCVReturnSuccess != cvres) { + DBG_PRINT("MyNSOpenGLLayer::init %p, CVDisplayLinkCreateWithOpenGLDisplayMask %X failed: %d\n", self, allDisplaysMask, cvres); + displayLink = NULL; + } + } + if(NULL != displayLink) { + CVReturn cvres; + DBG_PRINT("MyNSOpenGLLayer::openGLContextForPixelFormat.1: setup DisplayLink %p\n", displayLink); + cvres = CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(displayLink, [glContext CGLContextObj], [parentPixelFmt CGLPixelFormatObj]); + if(kCVReturnSuccess != cvres) { + DBG_PRINT("MyNSOpenGLLayer::init %p, CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext failed: %d\n", self, cvres); + } + } + if(NULL != displayLink) { + cvres = CVDisplayLinkSetOutputCallback(displayLink, renderMyNSOpenGLLayer, self); + if(kCVReturnSuccess != cvres) { + DBG_PRINT("MyNSOpenGLLayer::init %p, CVDisplayLinkSetOutputCallback failed: %d\n", self, cvres); + displayLink = NULL; + } + } +#endif + [self pauseAnimation: YES]; + + [self removeAllAnimations]; + [self setAutoresizingMask: (kCALayerWidthSizable|kCALayerHeightSizable)]; + [self setNeedsDisplayOnBoundsChange: YES]; + + [self setOpaque: opaque ? YES : NO]; + +#ifdef VERBOSE_ON + CGRect lRect = [self bounds]; + if(NULL != pbuffer) { + DBG_PRINT("MyNSOpenGLLayer::init (pbuffer) %p, pctx %p, pfmt %p, pbuffer %p, ctx %p, opaque %d, pbuffer %dx%d -> tex %dx%d, bounds: %lf/%lf %lfx%lf, displayLink %p (refcnt %d)\n", + self, parentCtx, parentPixelFmt, pbuffer, glContext, opaque, [pbuffer pixelsWide], [pbuffer pixelsHigh], texWidth, texHeight, + lRect.origin.x, lRect.origin.y, lRect.size.width, lRect.size.height, displayLink, (int)[self retainCount]); + } else { + DBG_PRINT("MyNSOpenGLLayer::init (texture) %p, pctx %p, pfmt %p, ctx %p, opaque %d, tex[id %d, %dx%d], bounds: %lf/%lf %lfx%lf, displayLink %p (refcnt %d)\n", + self, parentCtx, parentPixelFmt, glContext, opaque, (int)textureID, texWidth, texHeight, + lRect.origin.x, lRect.origin.y, lRect.size.width, lRect.size.height, displayLink, (int)[self retainCount]); + } +#endif + return self; +} + +- (void) setGLEnabled: (Bool) enable +{ + DBG_PRINT("MyNSOpenGLLayer::setGLEnabled: %p, %d -> %d\n", self, (int)isGLEnabled, (int)enable); + isGLEnabled = enable; +} + +- (Bool) validateTexSize: (CGRect) lRect +{ + const int lRectW = (int) (lRect.size.width + 0.5f); + const int lRectH = (int) (lRect.size.height + 0.5f); + Bool changed; + + if( lRectH != texHeight || lRectW != texWidth ) { + texWidth = lRectW; + texHeight = lRectH; + changed = YES; + + GLfloat texCoordWidth, texCoordHeight; + if(NULL != pbuffer) { + GLenum textureTarget = [pbuffer textureTarget] ; + GLsizei pwidth = [pbuffer pixelsWide]; + GLsizei pheight = [pbuffer pixelsHigh]; + if( GL_TEXTURE_2D == textureTarget ) { + texCoordWidth = (GLfloat)pwidth /(GLfloat)texWidth ; + texCoordHeight = (GLfloat)pheight/(GLfloat)texHeight ; + } else { + texCoordWidth = pwidth; + texCoordHeight = pheight; + } + } else { + texCoordWidth = (GLfloat)1.0f; + texCoordHeight = (GLfloat)1.0f; + } + gl_texCoords[3] = texCoordHeight; + gl_texCoords[5] = texCoordHeight; + gl_texCoords[4] = texCoordWidth; + gl_texCoords[6] = texCoordWidth; + #ifdef VERBOSE_ON + DBG_PRINT("MyNSOpenGLLayer::validateTexSize %p -> tex %dx%d, bounds: %lf/%lf %lfx%lf (%dx%d), dedicatedFrame set:%d %lf/%lf %lfx%lf\n", + self, texWidth, texHeight, + lRect.origin.x, lRect.origin.y, lRect.size.width, lRect.size.height, lRectW, lRectH, + dedicatedFrameSet, dedicatedFrame.origin.x, dedicatedFrame.origin.y, dedicatedFrame.size.width, dedicatedFrame.size.height); + #endif + } else { + changed = NO; + } + return changed; +} + +- (void) setTextureID: (int) _texID +{ + textureID = _texID; +} + +- (Bool) isSamePBuffer: (NSOpenGLPixelBuffer*) p +{ + return pbuffer == p || newPBuffer == p; +} + +- (void)setNewPBuffer: (NSOpenGLPixelBuffer*)p +{ + SYNC_PRINT("<NP-S %p -> %p>", pbuffer, p); + newPBuffer = p; + [newPBuffer retain]; +} + +- (void) applyNewPBuffer +{ + if( NULL != newPBuffer ) { // volatile OK + SYNC_PRINT("<NP-A %p -> %p>", pbuffer, newPBuffer); + + if( 0 != textureID ) { + glDeleteTextures(1, (GLuint *)&textureID); + [self setTextureID: 0]; + } + [pbuffer release]; + + pbuffer = newPBuffer; + newPBuffer = NULL; + } +} + +- (void)deallocPBuffer +{ + if(NULL != pbuffer) { + NSOpenGLContext* context = [self openGLContext]; + if(NULL!=context) { + [context makeCurrentContext]; + + DBG_PRINT("MyNSOpenGLLayer::deallocPBuffer (with ctx) %p (refcnt %d) - context %p, pbuffer %p, texID %d\n", self, (int)[self retainCount], context, pbuffer, (int)textureID); + + if( 0 != textureID ) { + glDeleteTextures(1, (GLuint *)&textureID); + [self setTextureID: 0]; + } + if(NULL != pbuffer) { + [pbuffer release]; + pbuffer = NULL; + } + if(NULL != newPBuffer) { + [newPBuffer release]; + newPBuffer = NULL; + } + + [context clearDrawable]; + } else { + DBG_PRINT("MyNSOpenGLLayer::deallocPBuffer (w/o ctx) %p (refcnt %d) - context %p, pbuffer %p, texID %d\n", self, (int)[self retainCount], context, pbuffer, (int)textureID); + } + } +} + +- (void)disableAnimation +{ + DBG_PRINT("MyNSOpenGLLayer::disableAnimation.0: %p (refcnt %d) - displayLink %p\n", self, (int)[self retainCount], displayLink); + [self setAsynchronous: NO]; + if(NULL != displayLink) { +#ifdef HAS_CADisplayLink + [displayLink setPaused: YES]; + [displayLink release]; +#else + CVDisplayLinkStop(displayLink); + CVDisplayLinkRelease(displayLink); +#endif + displayLink = NULL; + } + DBG_PRINT("MyNSOpenGLLayer::disableAnimation.X: %p (refcnt %d) - displayLink %p\n", self, (int)[self retainCount], displayLink); +} + +- (void)releaseLayer +{ + DBG_PRINT("MyNSOpenGLLayer::releaseLayer.0: %p (refcnt %d)\n", self, (int)[self retainCount]); + [self setGLEnabled: NO]; + [self disableAnimation]; + pthread_mutex_lock(&renderLock); + [self deallocPBuffer]; + if( NULL != glContext ) { + [glContext release]; + glContext = NULL; + } + if( NULL != parentPixelFmt ) { + [parentPixelFmt release]; + parentPixelFmt = NULL; + } + pthread_mutex_unlock(&renderLock); + [self release]; + DBG_PRINT("MyNSOpenGLLayer::releaseLayer.X: %p\n", self); +} + +#ifdef DBG_LIFECYCLE + +- (id)retain +{ + DBG_PRINT("MyNSOpenGLLayer::retain.0: %p (refcnt %d)\n", self, (int)[self retainCount]); + // NSLog(@"MyNSOpenGLLayer::retain: %@",[NSThread callStackSymbols]); + id o = [super retain]; + DBG_PRINT("MyNSOpenGLLayer::retain.X: %p (refcnt %d)\n", o, (int)[o retainCount]); + return o; +} + +- (oneway void)release +{ + DBG_PRINT("MyNSOpenGLLayer::release.0: %p (refcnt %d)\n", self, (int)[self retainCount]); + // NSLog(@"MyNSOpenGLLayer::release: %@",[NSThread callStackSymbols]); + [super release]; + // DBG_PRINT("MyNSOpenGLLayer::release.X: %p (refcnt %d)\n", self, (int)[self retainCount]); +} + +#endif + +- (void)dealloc +{ + DBG_PRINT("MyNSOpenGLLayer::dealloc.0 %p (refcnt %d)\n", self, (int)[self retainCount]); + // NSLog(@"MyNSOpenGLLayer::dealloc: %@",[NSThread callStackSymbols]); + [self disableAnimation]; + pthread_mutex_lock(&renderLock); + [self deallocPBuffer]; + pthread_mutex_unlock(&renderLock); + pthread_cond_destroy(&renderSignal); + pthread_mutex_destroy(&renderLock); + [super dealloc]; + // DBG_PRINT("MyNSOpenGLLayer::dealloc.X %p\n", self); +} + +- (Bool)isGLSourceValid +{ + return NULL != pbuffer || NULL != newPBuffer || 0 != textureID ; +} + +// @NWDedicatedFrame +- (void)setDedicatedFrame:(CGRect)dFrame quirks:(int)quirks { + CGRect lRect = [self frame]; + Bool dedicatedFramePosSet = 0 != ( NW_DEDICATEDFRAME_QUIRK_POSITION & quirks ); + Bool dedicatedFrameSizeSet = 0 != ( NW_DEDICATEDFRAME_QUIRK_SIZE & quirks ); + Bool dedicatedLayoutSet = 0 != ( NW_DEDICATEDFRAME_QUIRK_LAYOUT & quirks ); + dedicatedFrameSet = dedicatedFramePosSet || dedicatedFrameSizeSet || dedicatedLayoutSet; + dedicatedFrame = dFrame; + + DBG_PRINT("MyNSOpenGLLayer::setDedicatedFrame: Quirks [%d, pos %d, size %d, lout %d], %p, texSize %dx%d, %lf/%lf %lfx%lf -> %lf/%lf %lfx%lf\n", + quirks, dedicatedFramePosSet, dedicatedFrameSizeSet, dedicatedLayoutSet, self, texWidth, texHeight, + lRect.origin.x, lRect.origin.y, lRect.size.width, lRect.size.height, + dFrame.origin.x, dFrame.origin.y, dFrame.size.width, dFrame.size.height); + (void)lRect; // silence + + if( dedicatedFrameSet ) { + [super setFrame: dedicatedFrame]; + } +} + +- (void) setFrame:(CGRect) frame { + if( dedicatedFrameSet ) { + [super setFrame: dedicatedFrame]; + } else { + [super setFrame: frame]; + } +} + +- (id<CAAction>)actionForKey:(NSString *)key +{ + DBG_PRINT("MyNSOpenGLLayer::actionForKey.0 %p key %s -> NIL\n", self, [key UTF8String]); + return nil; + // return [super actionForKey: key]; +} + +- (NSOpenGLPixelFormat *)openGLPixelFormatForDisplayMask:(uint32_t)mask +{ + DBG_PRINT("MyNSOpenGLLayer::openGLPixelFormatForDisplayMask: %p (refcnt %d) - parent-pfmt %p -> new-pfmt %p\n", + self, (int)[self retainCount], parentPixelFmt, parentPixelFmt); + // We simply take over ownership of parent PixelFormat until releaseLayer.. + return parentPixelFmt; +} + +- (NSOpenGLContext *)openGLContextForPixelFormat:(NSOpenGLPixelFormat *)pixelFormat +{ + DBG_PRINT("MyNSOpenGLLayer::openGLContextForPixelFormat.0: %p (refcnt %d) - pfmt %p, ctx %p, DisplayLink %p\n", + self, (int)[self retainCount], pixelFormat, glContext, displayLink); + return glContext; +} + +- (BOOL)canDrawInOpenGLContext:(NSOpenGLContext *)context pixelFormat:(NSOpenGLPixelFormat *)pixelFormat + forLayerTime:(CFTimeInterval)timeInterval displayTime:(const CVTimeStamp *)timeStamp +{ + SYNC_PRINT("<? %d, %d>", (int)shallDraw, (int)isGLEnabled); + return shallDraw && isGLEnabled; +} + +- (void)drawInOpenGLContext:(NSOpenGLContext *)context pixelFormat:(NSOpenGLPixelFormat *)pixelFormat + forLayerTime:(CFTimeInterval)timeInterval displayTime:(const CVTimeStamp *)timeStamp +{ + pthread_mutex_unlock(&renderLock); + SYNC_PRINT("<* "); + // NSLog(@"MyNSOpenGLLayer::DRAW: %@",[NSThread callStackSymbols]); + + if( isGLEnabled && shallDraw && ( NULL != pbuffer || NULL != newPBuffer || 0 != textureID ) ) { + [context makeCurrentContext]; + + if( NULL != newPBuffer ) { // volatile OK + [self applyNewPBuffer]; + } + + GLenum textureTarget; + + Bool texSizeChanged = [self validateTexSize: ( dedicatedFrameSet ? dedicatedFrame : [self bounds] ) ]; + if( texSizeChanged ) { + [context update]; + } + + if( NULL != pbuffer ) { + if( texSizeChanged && 0 != textureID ) { + glDeleteTextures(1, (GLuint *)&textureID); + [self setTextureID: 0]; + } + textureTarget = [pbuffer textureTarget]; + if( 0 != gl3ShaderProgramName ) { + glUseProgram(gl3ShaderProgramName); + glActiveTexture(GL_TEXTURE0); + } + if( 0 == textureID ) { + glGenTextures(1, (GLuint *)&textureID); + glBindTexture(textureTarget, textureID); + glTexParameteri(textureTarget, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameteri(textureTarget, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + glTexParameteri(textureTarget, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(textureTarget, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + } else { + glBindTexture(textureTarget, textureID); + } + [context setTextureImageToPixelBuffer: (NSOpenGLPixelBuffer*) pbuffer colorBuffer: GL_FRONT]; + } else { + if( 0 != gl3ShaderProgramName ) { + glUseProgram(gl3ShaderProgramName); + glActiveTexture(GL_TEXTURE0); + } + textureTarget = GL_TEXTURE_2D; + glBindTexture(textureTarget, textureID); + } + SYNC_PRINT(" %d gl3Prog %d/%d*>", (int)textureID, (int)gl3ShaderProgramName, (int)glIsProgram (gl3ShaderProgramName)); + + if( 0 == vboBufVert ) { // Once: Init Data and Bind to Pointer + if( 0 != gl3ShaderProgramName ) { + // Install default VAO as required by GL 3.2 core! + GLuint vaoBuf = 0; + glGenVertexArrays(1, &vaoBuf); + glBindVertexArray(vaoBuf); + + // Set texture-unit 0 + GLint texUnitLoc = glGetUniformLocation (gl3ShaderProgramName, "mgl_Texture0"); + glUniform1i (texUnitLoc, 0); + } + glGenBuffers( 1, &vboBufVert ); + glBindBuffer( GL_ARRAY_BUFFER, vboBufVert ); + glBufferData( GL_ARRAY_BUFFER, 4 * 2 * sizeof(GLfloat), gl_verts, GL_STATIC_DRAW); + if( 0 != gl3ShaderProgramName ) { + vertAttrLoc = glGetAttribLocation( gl3ShaderProgramName, "mgl_Vertex" ); + glVertexAttribPointer( vertAttrLoc, 2, GL_FLOAT, GL_FALSE, 0, NULL ); + } else { + glVertexPointer(2, GL_FLOAT, 0, NULL); + } + + glGenBuffers( 1, &vboBufTexCoord ); + glBindBuffer( GL_ARRAY_BUFFER, vboBufTexCoord ); + glBufferData( GL_ARRAY_BUFFER, 4 * 2 * sizeof(GLfloat), gl_texCoords, GL_STATIC_DRAW); + if( 0 != gl3ShaderProgramName ) { + texCoordAttrLoc = glGetAttribLocation( gl3ShaderProgramName, "mgl_MultiTexCoord" ); + glVertexAttribPointer( texCoordAttrLoc, 2, GL_FLOAT, GL_FALSE, 0, NULL ); + } else { + glTexCoordPointer(2, GL_FLOAT, 0, NULL); + } + } + if( texSizeChanged ) { + glBindBuffer( GL_ARRAY_BUFFER, vboBufTexCoord ); + glBufferSubData( GL_ARRAY_BUFFER, 0, 4 * 2 * sizeof(GLfloat), gl_texCoords); + if( 0 != gl3ShaderProgramName ) { + glVertexAttribPointer( texCoordAttrLoc, 2, GL_FLOAT, GL_FALSE, 0, NULL ); + } else { + glTexCoordPointer(2, GL_FLOAT, 0, NULL); + } + } + if( 0 != gl3ShaderProgramName ) { + glEnableVertexAttribArray( vertAttrLoc ); + glEnableVertexAttribArray( texCoordAttrLoc ); + } else { + glEnable(textureTarget); + + glEnableClientState(GL_VERTEX_ARRAY); + glEnableClientState(GL_TEXTURE_COORD_ARRAY); + } + + glDrawArrays(GL_TRIANGLE_FAN, 0, 4); + + if( 0 != gl3ShaderProgramName ) { + glDisableVertexAttribArray( vertAttrLoc ); + glDisableVertexAttribArray( texCoordAttrLoc ); + glUseProgram(0); + } else { + glDisableClientState(GL_VERTEX_ARRAY); + glDisableClientState(GL_TEXTURE_COORD_ARRAY); + + glDisable(textureTarget); + } + + glBindTexture(textureTarget, 0); + + [context clearDrawable]; + + [super drawInOpenGLContext: context pixelFormat: pixelFormat forLayerTime: timeInterval displayTime: timeStamp]; + + } else { + // glClear(GL_COLOR_BUFFER_BIT); + // glBlitFramebuffer(0, 0, texWidth, texHeight, + // 0, 0, texWidth, texHeight, + // GL_COLOR_BUFFER_BIT, GL_NEAREST); + SYNC_PRINT(" 0*>"); + } + + #ifdef DBG_PERF + [self tick]; + #endif + shallDraw = NO; + + if( 0 >= swapInterval ) { + pthread_cond_signal(&renderSignal); // wake up !vsync + SYNC_PRINT("<s>"); + } + SYNC_PRINT("<$>\n"); + pthread_mutex_unlock(&renderLock); +} + +- (void)pauseAnimation:(Bool)pause +{ + DBG_PRINT("MyNSOpenGLLayer::pauseAnimation: %d\n", (int)pause); + [self setAsynchronous: NO]; + if(pause) { + if(NULL != displayLink) { + #ifdef HAS_CADisplayLink + [displayLink setPaused: YES]; + #else + CVDisplayLinkStop(displayLink); + #endif + } + } else { + if(NULL != displayLink) { + #ifdef HAS_CADisplayLink + [displayLink setPaused: NO]; + [displayLink setFrameInterval: swapInterval]; + #else + CVDisplayLinkStart(displayLink); + #endif + } + } + tc = 0; + timespec_now(&tStart); +} + +- (void)setSwapInterval:(int)interval +{ + /** + * v-sync doesn't works w/ NSOpenGLLayer's context .. well :( + * Using CVDisplayLink .. see setSwapInterval() below. + * + GLint si; + [context getValues: &si forParameter: NSOpenGLCPSwapInterval]; + if(si != swapInterval) { + DBG_PRINT("MyNSOpenGLLayer::drawInOpenGLContext %p setSwapInterval: %d -> %d\n", self, si, swapInterval); + [context setValues: &swapInterval forParameter: NSOpenGLCPSwapInterval]; + } + */ + + pthread_mutex_lock(&renderLock); + DBG_PRINT("MyNSOpenGLLayer::setSwapInterval.0: %d - displayLink %p\n", interval, displayLink); + swapInterval = interval; + swapIntervalCounter = 0; + pthread_mutex_unlock(&renderLock); + + if(0 < swapInterval) { + [self pauseAnimation: NO]; + } else { + [self pauseAnimation: YES]; + } + DBG_PRINT("MyNSOpenGLLayer::setSwapInterval.X: %d\n", interval); +} + +-(void)tick +{ + tc++; + if(tc%60==0) { + struct timespec t1, td; + timespec_now(&t1); + timespec_subtract(&td, &t1, &tStart); + long td_ms = timespec_milliseconds(&td); + fprintf(stderr, "NSOpenGLLayer: %ld ms / %d frames, %ld ms / frame, %f fps\n", + td_ms, tc, td_ms/tc, (tc * 1000.0) / (float)td_ms ); + fflush(NULL); + } +} + +- (void)waitUntilRenderSignal: (long) to_micros +{ + struct timespec t0, to_until; + BOOL tooLate; + int wr; + if( 0 >= to_micros ) { + to_micros = 16666 + 1000; // defaults to 1/60s + 1ms + NSLog(@"MyNSOpenGLContext::waitUntilRenderSignal: to_micros was zero, using defaults"); + } + pthread_mutex_lock(&renderLock); + timespec_now(&t0); + to_until = lastWaitTime; + timespec_addmicros(&to_until, to_micros); + tooLate = timespec_compare(&to_until, &t0) < 0; + #ifdef DBG_SYNC + struct timespec td_until; + timespec_subtract(&td_until, &to_until, &t0); + SYNC_PRINT("{W %ld ms, to %ld ms, late %d", to_micros/1000, timespec_milliseconds(&td_until), tooLate); + #endif + if( 0 < swapInterval ) { + if( tooLate ) { + // adjust! + to_until = t0; + timespec_addmicros(&to_until, to_micros); + } + wr = pthread_cond_timedwait(&renderSignal, &renderLock, &to_until); + #ifdef DBG_SYNC + struct timespec t1, td, td2; + timespec_now(&t1); + timespec_subtract(&td, &t1, &t0); + timespec_subtract(&td2, &t1, &lastWaitTime); + fprintf(stderr, "(%ld) / (%ld) ms", timespec_milliseconds(&td), timespec_milliseconds(&td2)); + #endif + } + SYNC_PRINT("-%d-%d}\n", shallDraw, wr); + timespec_now(&lastWaitTime); + pthread_mutex_unlock(&renderLock); +} + +@end + +NSOpenGLLayer* createNSOpenGLLayer(NSOpenGLContext* ctx, int gl3ShaderProgramName, NSOpenGLPixelFormat* fmt, NSOpenGLPixelBuffer* p, uint32_t texID, Bool opaque, int texWidth, int texHeight) { + return [[[MyNSOpenGLLayer alloc] init] setupWithContext:ctx gl3ShaderProgramName: (GLuint)gl3ShaderProgramName pixelFormat: fmt pbuffer: p texIDArg: (GLuint)texID + opaque: opaque texWidth: texWidth texHeight: texHeight]; +} + +void setNSOpenGLLayerEnabled(NSOpenGLLayer* layer, Bool enable) { + NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + MyNSOpenGLLayer* l = (MyNSOpenGLLayer*) layer; + [l setGLEnabled: enable]; + [pool release]; +} + +void setNSOpenGLLayerSwapInterval(NSOpenGLLayer* layer, int interval) { + NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + MyNSOpenGLLayer* l = (MyNSOpenGLLayer*) layer; + [l setSwapInterval: interval]; + [pool release]; +} + +void waitUntilNSOpenGLLayerIsReady(NSOpenGLLayer* layer, long to_micros) { + NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + MyNSOpenGLLayer* l = (MyNSOpenGLLayer*) layer; + [l waitUntilRenderSignal: to_micros]; + [pool release]; +} + +void setNSOpenGLLayerNeedsDisplayFBO(NSOpenGLLayer* layer, uint32_t texID) { + NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + MyNSOpenGLLayer* l = (MyNSOpenGLLayer*) layer; + Bool shallDraw; + + // volatile OK + [l setTextureID: texID]; + shallDraw = [l isGLSourceValid]; + l->shallDraw = shallDraw; + + SYNC_PRINT("<! T %d>", (int)shallDraw); + if(shallDraw) { + if ( [NSThread isMainThread] == YES ) { + [l setNeedsDisplay]; + } else { + // don't wait - using doublebuffering + [l performSelectorOnMainThread:@selector(setNeedsDisplay) withObject:nil waitUntilDone:NO]; + } + } + // DBG_PRINT("MyNSOpenGLLayer::setNSOpenGLLayerNeedsDisplay %p\n", l); + [pool release]; +} + +void setNSOpenGLLayerNeedsDisplayPBuffer(NSOpenGLLayer* layer, NSOpenGLPixelBuffer* p) { + NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + MyNSOpenGLLayer* l = (MyNSOpenGLLayer*) layer; + Bool shallDraw; + + if( NO == [l isSamePBuffer: p] ) { + [l setNewPBuffer: p]; + } + + // volatile OK + shallDraw = [l isGLSourceValid]; + l->shallDraw = shallDraw; + + SYNC_PRINT("<! T %d>", (int)shallDraw); + if(shallDraw) { + if ( [NSThread isMainThread] == YES ) { + [l setNeedsDisplay]; + } else { + // don't wait - using doublebuffering + [l performSelectorOnMainThread:@selector(setNeedsDisplay) withObject:nil waitUntilDone:NO]; + } + } + // DBG_PRINT("MyNSOpenGLLayer::setNSOpenGLLayerNeedsDisplay %p\n", l); + [pool release]; +} + +void releaseNSOpenGLLayer(NSOpenGLLayer* layer) { + NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + MyNSOpenGLLayer* l = (MyNSOpenGLLayer*) layer; + + [CATransaction begin]; + [CATransaction setValue:(id)kCFBooleanTrue forKey:kCATransactionDisableActions]; + + DBG_PRINT("MyNSOpenGLLayer::releaseNSOpenGLLayer.0: %p\n", l); + [l releaseLayer]; + DBG_PRINT("MyNSOpenGLLayer::releaseNSOpenGLLayer.X: %p\n", l); + + [CATransaction commit]; + + [pool release]; +} + diff --git a/src/jogl/native/macosx/MacOSXWindowSystemInterface-pbuffer.m b/src/jogl/native/macosx/MacOSXWindowSystemInterface-pbuffer.m deleted file mode 100644 index 66bd10f89..000000000 --- a/src/jogl/native/macosx/MacOSXWindowSystemInterface-pbuffer.m +++ /dev/null @@ -1,494 +0,0 @@ -#import "MacOSXWindowSystemInterface.h" -#import <QuartzCore/QuartzCore.h> -#import <pthread.h> -#include "timespec.h" - -// -// CADisplayLink only available on iOS >= 3.1, sad, since it's convenient. -// Use CVDisplayLink otherwise. -// -// #define HAS_CADisplayLink 1 -// - -// lock/sync debug output -// -// #define DBG_SYNC 1 -// -#ifdef DBG_SYNC - // #define SYNC_PRINT(...) NSLog(@ ## __VA_ARGS__) - #define SYNC_PRINT(...) fprintf(stderr, __VA_ARGS__); fflush(stderr) -#else - #define SYNC_PRINT(...) -#endif - -// fps debug output -// -// #define DBG_PERF 1 - -@interface MyNSOpenGLLayer: NSOpenGLLayer -{ -@protected - NSOpenGLPixelBuffer* pbuffer; - int texWidth; - int texHeight; - GLuint textureID; - GLint swapInterval; - GLint swapIntervalCounter; - struct timespec lastWaitTime; -#ifdef HAS_CADisplayLink - CADisplayLink* displayLink; -#else - CVDisplayLinkRef displayLink; -#endif - int tc; - struct timespec t0; -@public - pthread_mutex_t renderLock; - pthread_cond_t renderSignal; - BOOL shallDraw; -} - -- (id) setupWithContext: (NSOpenGLContext*) ctx - pixelFormat: (NSOpenGLPixelFormat*) pfmt - pbuffer: (NSOpenGLPixelBuffer*) p - opaque: (Bool) opaque - texWidth: (int) texWidth - texHeight: (int) texHeight; - -- (void)deallocTex; -- (void)disableAnimation; -- (void)releaseLayer; -- (void)dealloc; -- (int)getSwapInterval; -- (void)setSwapInterval:(int)interval; -- (void)tick; - -@end - -#ifndef HAS_CADisplayLink - -static CVReturn renderMyNSOpenGLLayer(CVDisplayLinkRef displayLink, - const CVTimeStamp *inNow, - const CVTimeStamp *inOutputTime, - CVOptionFlags flagsIn, - CVOptionFlags *flagsOut, - void *displayLinkContext) -{ - NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; - MyNSOpenGLLayer* l = (MyNSOpenGLLayer*)displayLinkContext; - pthread_mutex_lock(&l->renderLock); - #ifdef DBG_PERF - [l tick]; - #endif - if(0 < l->swapInterval) { - l->swapIntervalCounter++; - if(l->swapIntervalCounter>=l->swapInterval) { - l->swapIntervalCounter = 0; - pthread_cond_signal(&l->renderSignal); - SYNC_PRINT("S"); - } - } - pthread_mutex_unlock(&l->renderLock); - [pool release]; - return kCVReturnSuccess; -} - -#endif - -@implementation MyNSOpenGLLayer - -- (id) setupWithContext: (NSOpenGLContext*) _ctx - pixelFormat: (NSOpenGLPixelFormat*) _fmt - pbuffer: (NSOpenGLPixelBuffer*) p - opaque: (Bool) opaque - texWidth: (int) _texWidth - texHeight: (int) _texHeight; -{ - pthread_mutexattr_t renderLockAttr; - pthread_mutexattr_init(&renderLockAttr); - pthread_mutexattr_settype(&renderLockAttr, PTHREAD_MUTEX_RECURSIVE); - pthread_mutex_init(&renderLock, &renderLockAttr); // recursive - pthread_cond_init(&renderSignal, NULL); // no attribute - - textureID = 0; - swapInterval = 1; // defaults to on (as w/ new GL profiles) - swapIntervalCounter = 0; - timespec_now(&lastWaitTime); - shallDraw = NO; - texWidth = _texWidth; - texHeight = _texHeight; - pbuffer = p; - [pbuffer retain]; - - { - CGRect lRect = CGRectMake(0, 0, texWidth, texHeight); - [self setFrame:lRect]; - - // no animations for add/remove/swap sublayers etc - // doesn't work: [self removeAnimationForKey: kCAOnOrderIn, kCAOnOrderOut, kCATransition] - [self removeAllAnimations]; - } - - // instantiate a deactivated displayLink -#ifdef HAS_CADisplayLink - displayLink = [[CVDisplayLink displayLinkWithTarget:self selector:@selector(setNeedsDisplay)] retain]; - [displayLink setPaused: YES]; -#else - CVReturn cvres; - { - int allDisplaysMask = 0; - int virtualScreen, accelerated, displayMask; - for (virtualScreen = 0; virtualScreen < [_fmt numberOfVirtualScreens]; virtualScreen++) { - [_fmt getValues:&displayMask forAttribute:NSOpenGLPFAScreenMask forVirtualScreen:virtualScreen]; - [_fmt getValues:&accelerated forAttribute:NSOpenGLPFAAccelerated forVirtualScreen:virtualScreen]; - if (accelerated) { - allDisplaysMask |= displayMask; - } - } - cvres = CVDisplayLinkCreateWithOpenGLDisplayMask(allDisplaysMask, &displayLink); - if(kCVReturnSuccess != cvres) { - DBG_PRINT("MyNSOpenGLLayer::init %p, CVDisplayLinkCreateWithOpenGLDisplayMask %X failed: %d\n", self, allDisplaysMask, cvres); - displayLink = NULL; - } - } - if(NULL != displayLink) { - cvres = CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(displayLink, [_ctx CGLContextObj], [_fmt CGLPixelFormatObj]); - if(kCVReturnSuccess != cvres) { - DBG_PRINT("MyNSOpenGLLayer::init %p, CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext failed: %d\n", self, cvres); - displayLink = NULL; - } - } - if(NULL != displayLink) { - cvres = CVDisplayLinkSetOutputCallback(displayLink, renderMyNSOpenGLLayer, self); - if(kCVReturnSuccess != cvres) { - DBG_PRINT("MyNSOpenGLLayer::init %p, CVDisplayLinkSetOutputCallback failed: %d\n", self, cvres); - displayLink = NULL; - } - } - if(NULL != displayLink) { - CVDisplayLinkStop(displayLink); - } -#endif - [self setAsynchronous: YES]; - - [self setNeedsDisplayOnBoundsChange: YES]; // FIXME: learn how to recreate on size change! - - [self setOpaque: opaque ? YES : NO]; - - CGRect lRect = [self frame]; - DBG_PRINT("MyNSOpenGLLayer::init %p, ctx %p, pfmt %p, pbuffer %p, opaque %d, pbuffer %dx%d -> tex %dx%d, frame: %lf/%lf %lfx%lf (refcnt %d)\n", - self, _ctx, _fmt, pbuffer, opaque, [pbuffer pixelsWide], [pbuffer pixelsHigh], texWidth, texHeight, - lRect.origin.x, lRect.origin.y, lRect.size.width, lRect.size.height, (int)[self retainCount]); - return self; -} - -- (void)disableAnimation -{ - DBG_PRINT("MyNSOpenGLLayer::disableAnimation: %p (refcnt %d) - displayLink %p\n", self, (int)[self retainCount], displayLink); - pthread_mutex_lock(&renderLock); - [self setAsynchronous: NO]; - if(NULL != displayLink) { -#ifdef HAS_CADisplayLink - [displayLink setPaused: YES]; - [displayLink release]; -#else - if(NULL!=displayLink) { - CVDisplayLinkStop(displayLink); - CVDisplayLinkRelease(displayLink); - } -#endif - displayLink = NULL; - } - pthread_mutex_unlock(&renderLock); -} - -- (void)deallocTex -{ - pthread_mutex_lock(&renderLock); - NSOpenGLContext* context = [self openGLContext]; - DBG_PRINT("MyNSOpenGLLayer::deallocTex %p (refcnt %d) - context %p, pbuffer %p\n", self, (int)[self retainCount], context, pbuffer); - if(NULL != pbuffer) { - if(NULL!=context) { - [context makeCurrentContext]; - if(0 != textureID) { - glDeleteTextures(1, &textureID); - textureID = 0; - } - [context clearDrawable]; - } - [pbuffer release]; - pbuffer = NULL; - } - pthread_mutex_unlock(&renderLock); -} - -- (void)releaseLayer -{ - DBG_PRINT("MyNSOpenGLLayer::releaseLayer.0: %p (refcnt %d)\n", self, (int)[self retainCount]); - pthread_mutex_lock(&renderLock); - [self disableAnimation]; - [self deallocTex]; - [self release]; - DBG_PRINT("MyNSOpenGLLayer::releaseLayer.X: %p (refcnt %d)\n", self, (int)[self retainCount]); - pthread_mutex_unlock(&renderLock); -} - -- (void)dealloc -{ - DBG_PRINT("MyNSOpenGLLayer::dealloc.0 %p (refcnt %d)\n", self, (int)[self retainCount]); - // NSLog(@"MyNSOpenGLLayer::dealloc: %@",[NSThread callStackSymbols]); - - [self disableAnimation]; - [self deallocTex]; - pthread_cond_destroy(&renderSignal); - pthread_mutex_destroy(&renderLock); - [super dealloc]; - DBG_PRINT("MyNSOpenGLLayer::dealloc.X %p\n", self); -} - -- (BOOL)canDrawInOpenGLContext:(NSOpenGLContext *)context pixelFormat:(NSOpenGLPixelFormat *)pixelFormat - forLayerTime:(CFTimeInterval)timeInterval displayTime:(const CVTimeStamp *)timeStamp -{ - // assume both methods 'canDrawInOpenGLContext' and 'drawInOpenGLContext' - // are called from the same thread subsequently - pthread_mutex_lock(&renderLock); - Bool res = NULL != pbuffer && YES == shallDraw; - if(!res) { - SYNC_PRINT("0"); - pthread_mutex_unlock(&renderLock); - } else { - SYNC_PRINT("1"); - } - return res; -} - -- (void)drawInOpenGLContext:(NSOpenGLContext *)context pixelFormat:(NSOpenGLPixelFormat *)pixelFormat - forLayerTime:(CFTimeInterval)timeInterval displayTime:(const CVTimeStamp *)timeStamp -{ - [context makeCurrentContext]; - - /** - * v-sync doesn't works w/ NSOpenGLLayer's context .. well :( - * Using CVDisplayLink .. see setSwapInterval() below. - * - if(0 <= swapInterval) { - GLint si; - [context getValues: &si forParameter: NSOpenGLCPSwapInterval]; - if(si != swapInterval) { - DBG_PRINT("MyNSOpenGLLayer::drawInOpenGLContext %p setSwapInterval: %d -> %d\n", self, si, swapInterval); - [context setValues: &swapInterval forParameter: NSOpenGLCPSwapInterval]; - } - } */ - GLenum textureTarget = [pbuffer textureTarget]; - GLfloat tWidth, tHeight; - { - GLsizei pwidth = [pbuffer pixelsWide]; - GLsizei pheight = [pbuffer pixelsHigh]; - tWidth = textureTarget == GL_TEXTURE_2D ? (GLfloat)pwidth /(GLfloat)texWidth : pwidth; - tHeight = textureTarget == GL_TEXTURE_2D ? (GLfloat)pheight/(GLfloat)texHeight : pheight; - } - Bool texCreated = 0 == textureID; - - if(texCreated) { - glGenTextures(1, &textureID); - DBG_PRINT("MyNSOpenGLLayer::drawInOpenGLContext %p, ctx %p, pfmt %p tex %dx%d -> %fx%f 0x%X: creating texID 0x%X\n", - self, context, pixelFormat, texWidth, texHeight, tWidth, tHeight, textureTarget, textureID); - - CGRect lRect = [self frame]; - DBG_PRINT("MyNSOpenGLLayer::drawInOpenGLContext %p frame0: %lf/%lf %lfx%lf\n", - self, lRect.origin.x, lRect.origin.y, lRect.size.width, lRect.size.height); - } - - glBindTexture(textureTarget, textureID); - - /** - if(texCreated) { - // proper tex size setup - glTexImage2D(textureTarget, 0, GL_RGB, texWidth, texHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL); - } */ - - [context setTextureImageToPixelBuffer: pbuffer colorBuffer: GL_FRONT]; - - glTexParameteri(textureTarget, GL_TEXTURE_MIN_FILTER, GL_NEAREST); - glTexParameteri(textureTarget, GL_TEXTURE_MAG_FILTER, GL_NEAREST); - glTexParameteri(textureTarget, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(textureTarget, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - - glEnable(textureTarget); - - static GLfloat verts[] = { - -1.0, -1.0, - -1.0, 1.0, - 1.0, 1.0, - 1.0, -1.0 - }; - - GLfloat tex[] = { - 0.0, 0.0, - 0.0, tHeight, - tWidth, tHeight, - tWidth, 0.0 - }; - - glEnableClientState(GL_VERTEX_ARRAY); - glEnableClientState(GL_TEXTURE_COORD_ARRAY); - glVertexPointer(2, GL_FLOAT, 0, verts); - glTexCoordPointer(2, GL_FLOAT, 0, tex); - - glDrawArrays(GL_QUADS, 0, 4); - - glDisableClientState(GL_VERTEX_ARRAY); - glDisableClientState(GL_TEXTURE_COORD_ARRAY); - - glDisable(textureTarget); - glBindTexture(textureTarget, 0); - - [super drawInOpenGLContext: context pixelFormat: pixelFormat forLayerTime: timeInterval displayTime: timeStamp]; - shallDraw = NO; - if(0 >= swapInterval) { - pthread_cond_signal(&renderSignal); // just to wake up - SYNC_PRINT("s"); - } - SYNC_PRINT("$"); - pthread_mutex_unlock(&renderLock); -} - -- (int)getSwapInterval -{ - return swapInterval; -} - -- (void)setSwapInterval:(int)interval -{ - DBG_PRINT("MyNSOpenGLLayer::setSwapInterval: %d\n", interval); - swapInterval = interval; - swapIntervalCounter = 0; - if(0 < swapInterval) { - tc = 0; - timespec_now(&t0); - - [self setAsynchronous: NO]; - #ifdef HAS_CADisplayLink - [displayLink setPaused: NO]; - [displayLink setFrameInterval: interval]; - #else - if(NULL!=displayLink) { - CVDisplayLinkStart(displayLink); - // FIXME: doesn't support interval .. - } - #endif - } else { - #ifdef HAS_CADisplayLink - [displayLink setPaused: YES]; - #else - if(NULL!=displayLink) { - CVDisplayLinkStop(displayLink); - } - #endif - [self setAsynchronous: YES]; - } -} - --(void)tick -{ - tc++; - if(tc%60==0) { - struct timespec t1, td; - timespec_now(&t1); - timespec_subtract(&td, &t1, &t0); - long td_ms = timespec_milliseconds(&td); - fprintf(stderr, "NSOpenGLLayer: %ld ms / %d frames, %ld ms / frame, %f fps\n", - td_ms, tc, td_ms/tc, (tc * 1000.0) / (float)td_ms ); - fflush(NULL); - } -} - -@end - -NSOpenGLLayer* createNSOpenGLLayer(NSOpenGLContext* ctx, NSOpenGLPixelFormat* fmt, NSOpenGLPixelBuffer* p, Bool opaque, int texWidth, int texHeight) { - // This simply crashes after dealloc() has been called .. ie. ref-count -> 0 too early ? - // However using alloc/init, actual dealloc happens at JAWT destruction, hence too later IMHO. - // return [[MyNSOpenGLLayer layer] setupWithContext:ctx pixelFormat: fmt pbuffer: p opaque: opaque texWidth: texWidth texHeight: texHeight]; - - return [[[MyNSOpenGLLayer alloc] init] setupWithContext:ctx pixelFormat: fmt pbuffer: p opaque: opaque texWidth: texWidth texHeight: texHeight]; -} - -void setNSOpenGLLayerSwapInterval(NSOpenGLLayer* layer, int interval) { - MyNSOpenGLLayer* l = (MyNSOpenGLLayer*) layer; - pthread_mutex_lock(&l->renderLock); - [l setSwapInterval: interval]; - pthread_mutex_unlock(&l->renderLock); -} - -void waitUntilNSOpenGLLayerIsReady(NSOpenGLLayer* layer, long to_micros) { - MyNSOpenGLLayer* l = (MyNSOpenGLLayer*) layer; - BOOL ready = NO; - int wr = 0; - pthread_mutex_lock(&l->renderLock); - SYNC_PRINT("{"); - do { - if([l getSwapInterval] <= 0) { - ready = !l->shallDraw; - } - if(NO == ready) { - if(0 < to_micros) { - #ifdef DBG_SYNC - struct timespec t0, t1, td, td2; - timespec_now(&t0); - #endif - struct timespec to_abs = l->lastWaitTime; - timespec_addmicros(&to_abs, to_micros); - #ifdef DBG_SYNC - timespec_subtract(&td, &to_abs, &t0); - fprintf(stderr, "(%ld) / ", timespec_milliseconds(&td)); - #endif - wr = pthread_cond_timedwait(&l->renderSignal, &l->renderLock, &to_abs); - #ifdef DBG_SYNC - timespec_now(&t1); - timespec_subtract(&td, &t1, &t0); - timespec_subtract(&td2, &t1, &l->lastWaitTime); - fprintf(stderr, "(%ld) / (%ld) ms", timespec_milliseconds(&td), timespec_milliseconds(&td2)); - #endif - } else { - pthread_cond_wait (&l->renderSignal, &l->renderLock); - } - ready = !l->shallDraw; - } - } while (NO == ready && 0 == wr) ; - SYNC_PRINT("-%d}", ready); - timespec_now(&l->lastWaitTime); - pthread_mutex_unlock(&l->renderLock); -} - -void setNSOpenGLLayerNeedsDisplay(NSOpenGLLayer* layer) { - MyNSOpenGLLayer* l = (MyNSOpenGLLayer*) layer; - NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; - pthread_mutex_lock(&l->renderLock); - l->shallDraw = YES; - if ( [NSThread isMainThread] == YES ) { - [l setNeedsDisplay]; - } else { - // can't wait, otherwise we may deadlock AWT - [l performSelectorOnMainThread:@selector(setNeedsDisplay) withObject:nil waitUntilDone:NO]; - } - SYNC_PRINT("."); - pthread_mutex_unlock(&l->renderLock); - // DBG_PRINT("MyNSOpenGLLayer::setNSOpenGLLayerNeedsDisplay %p\n", l); - [pool release]; -} - -void releaseNSOpenGLLayer(NSOpenGLLayer* layer) { - MyNSOpenGLLayer* l = (MyNSOpenGLLayer*) layer; - NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; - DBG_PRINT("MyNSOpenGLLayer::releaseNSOpenGLLayer.0: %p\n", l); - - if ( [NSThread isMainThread] == YES ) { - [l releaseLayer]; - } else { - [l performSelectorOnMainThread:@selector(releaseLayer) withObject:nil waitUntilDone:NO]; - } - - DBG_PRINT("MyNSOpenGLLayer::releaseNSOpenGLLayer.X: %p\n", l); - [pool release]; -} - diff --git a/src/jogl/native/macosx/MacOSXWindowSystemInterface.m b/src/jogl/native/macosx/MacOSXWindowSystemInterface.m index 8ac9f4700..d4d3ddad9 100644 --- a/src/jogl/native/macosx/MacOSXWindowSystemInterface.m +++ b/src/jogl/native/macosx/MacOSXWindowSystemInterface.m @@ -446,7 +446,7 @@ NSOpenGLPixelFormat* createPixelFormat(int* iattrs, int niattrs, int* ivalues) { NSOpenGLPixelFormat* fmt = [[NSOpenGLPixelFormat alloc] initWithAttributes:attribs]; // if(fmt == nil) { fallback to a [NSOpenGLView defaultPixelFormat] crashed (SIGSEGV) on 10.6.7/NV } - DBG_PRINT("createPixelFormat.X: pfmt %p\n", fmt); + DBG_PRINT("\ncreatePixelFormat.X: pfmt %p\n", fmt); [pool release]; return fmt; @@ -501,9 +501,31 @@ NSView* getNSView(NSOpenGLContext* ctx) { return view; } +static Bool lockViewIfReady(NSView *view) { + Bool viewReady = false; + + if (view != nil) { + if ([view lockFocusIfCanDraw] == NO) { + DBG_PRINT("lockViewIfReady.1 [view lockFocusIfCanDraw] failed\n"); + } else { + NSRect frame = [view frame]; + if ((frame.size.width == 0) || (frame.size.height == 0)) { + [view unlockFocus]; + DBG_PRINT("lockViewIfReady.2 view.frame size %dx%d\n", (int)frame.size.width, (int)frame.size.height); + } else { + DBG_PRINT("lockViewIfReady.X ready and locked\n"); + viewReady = true; + } + } + } else { + DBG_PRINT("lockViewIfReady.3 nil view\n"); + } + return viewReady; +} + NSOpenGLContext* createContext(NSOpenGLContext* share, NSView* view, - Bool isBackingLayerView, + Bool incompleteView, NSOpenGLPixelFormat* fmt, Bool opaque, int* viewNotReady) @@ -512,56 +534,32 @@ NSOpenGLContext* createContext(NSOpenGLContext* share, getRendererInfo(); - DBG_PRINT("createContext.0: share %p, view %p, isBackingLayer %d, pixfmt %p, opaque %d\n", - share, view, (int)isBackingLayerView, fmt, opaque); - - if (view != NULL) { - Bool viewReady = true; + DBG_PRINT("createContext.0: share %p, view %p, incompleteView %d, pixfmt %p, opaque %d\n", + share, view, (int)incompleteView, fmt, opaque); - if(!isBackingLayerView) { - if ([view lockFocusIfCanDraw] == NO) { - DBG_PRINT("createContext.1 [view lockFocusIfCanDraw] failed\n"); - viewReady = false; - } - } - if(viewReady) { - NSRect frame = [view frame]; - if ((frame.size.width == 0) || (frame.size.height == 0)) { - if(!isBackingLayerView) { - [view unlockFocus]; - } - DBG_PRINT("createContext.2 view.frame size %dx%d\n", (int)frame.size.width, (int)frame.size.height); - viewReady = false; - } - } + Bool viewReadyAndLocked = incompleteView ? false : lockViewIfReady(view); - if (!viewReady) - { - if (viewNotReady != NULL) - { - *viewNotReady = 1; - } + if (nil != viewNotReady) { + *viewNotReady = 1; + } - // the view is not ready yet - DBG_PRINT("createContext.X: view not ready yet\n"); - [pool release]; - return NULL; - } + if (nil != view && !incompleteView && !viewReadyAndLocked) { + DBG_PRINT("createContext.X: Assumed complete view not ready yet\n"); + [pool release]; + return NULL; } - + NSOpenGLContext* ctx = [[NSOpenGLContext alloc] initWithFormat:fmt shareContext:share]; - if (ctx != nil) { - if (view != nil) { + if ( nil != ctx && nil != view ) { if(!opaque) { GLint zeroOpacity = 0; [ctx setValues:&zeroOpacity forParameter:NSOpenGLCPSurfaceOpacity]; } - [ctx setView:view]; - if(!isBackingLayerView) { + if( viewReadyAndLocked ) { + [ctx setView:view]; [view unlockFocus]; } - } } DBG_PRINT("createContext.X: ctx: %p\n", ctx); @@ -569,6 +567,32 @@ NSOpenGLContext* createContext(NSOpenGLContext* share, return ctx; } +void setContextView(NSOpenGLContext* ctx, NSView* view) { + NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + if ( nil != ctx ) { + if ( nil != view ) { + Bool viewReadyAndLocked = lockViewIfReady(view); + DBG_PRINT("setContextView.0: ctx %p, view %p: setView: %d\n", ctx, view, viewReadyAndLocked); + if( viewReadyAndLocked ) { + [ctx setView:view]; + [view unlockFocus]; + } + } + DBG_PRINT("setContextView.X\n"); + } + [pool release]; +} + +void clearDrawable(NSOpenGLContext* ctx) { + NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + if ( nil != ctx ) { + DBG_PRINT("clearDrawable.0: %p\n", ctx); + [ctx clearDrawable]; + DBG_PRINT("clearDrawable.X\n"); + } + [pool release]; +} + Bool makeCurrentContext(NSOpenGLContext* ctx) { #if 0 // we issue the CGL Lock from Java upfront! @@ -628,7 +652,12 @@ void setContextOpacity(NSOpenGLContext* ctx, int opacity) { void updateContext(NSOpenGLContext* ctx) { NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; - [ctx update]; + NSView *nsView = [ctx view]; + if(NULL != nsView) { + DBG_PRINT("updateContext.0: ctx %p, ctx.view %p\n", ctx, nsView); + [ctx update]; + DBG_PRINT("updateContext.X\n"); + } [pool release]; } @@ -638,7 +667,10 @@ void copyContext(NSOpenGLContext* dest, NSOpenGLContext* src, int mask) { void* updateContextRegister(NSOpenGLContext* ctx, NSView* view) { NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + + DBG_PRINT("updateContextRegister.0: ctx %p, view %p\n", ctx, view); ContextUpdater *contextUpdater = [[ContextUpdater alloc] initWithContext: ctx view: view]; + DBG_PRINT("updateContextRegister.X: ctxupd %p\n", contextUpdater); [pool release]; return contextUpdater; } @@ -658,44 +690,62 @@ void updateContextUnregister(void* updater) { ContextUpdater *contextUpdater = (ContextUpdater *)updater; NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + DBG_PRINT("updateContextUnregister.0: ctxupd %p\n", contextUpdater); [contextUpdater release]; + DBG_PRINT("updateContextUnregister.X\n"); [pool release]; } NSOpenGLPixelBuffer* createPBuffer(int renderTarget, int internalFormat, int width, int height) { NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + DBG_PRINT("createPBuffer.0: renderTarget 0x%x, ifmt 0x%x, %dx%d: \n", renderTarget, internalFormat, width, height); NSOpenGLPixelBuffer* pBuffer = [[NSOpenGLPixelBuffer alloc] initWithTextureTarget:renderTarget textureInternalFormat:internalFormat textureMaxMipMapLevel:0 pixelsWide:width pixelsHigh:height]; + DBG_PRINT("createPBuffer.X: res %p\n", pBuffer); [pool release]; return pBuffer; } Bool destroyPBuffer(NSOpenGLPixelBuffer* pBuffer) { NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + DBG_PRINT("destroyPBuffer.0: pbuffer %p\n", pBuffer); [pBuffer release]; + DBG_PRINT("destroyPBuffer.X\n"); [pool release]; return true; } void setContextPBuffer(NSOpenGLContext* ctx, NSOpenGLPixelBuffer* pBuffer) { NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + DBG_PRINT("setContextPBuffer.0: ctx %p, pbuffer %p\n", ctx, pBuffer); [ctx setPixelBuffer: pBuffer cubeMapFace: 0 mipMapLevel: 0 currentVirtualScreen: [ctx currentVirtualScreen]]; + DBG_PRINT("setContextPBuffer.X\n"); [pool release]; } void setContextTextureImageToPBuffer(NSOpenGLContext* ctx, NSOpenGLPixelBuffer* pBuffer, GLenum colorBuffer) { NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + DBG_PRINT("setContextTextureImageToPBuffer.0: ctx %p, pbuffer %p, colorBuffer 0x%x\n", ctx, pBuffer, (int)colorBuffer); [ctx setTextureImageToPixelBuffer: pBuffer colorBuffer: colorBuffer]; + DBG_PRINT("setContextTextureImageToPBuffer.X\n"); [pool release]; } +Bool isNSOpenGLPixelBuffer(uint64_t object) { + NSObject *nsObj = (NSObject*) (intptr_t) object; + DBG_PRINT("isNSOpenGLPixelBuffer.0: obj %p\n", object); + Bool res = [nsObj isKindOfClass:[NSOpenGLPixelBuffer class]]; + DBG_PRINT("isNSOpenGLPixelBuffer.X: res %d\n", (int)res); + return res; +} + #include <mach-o/dyld.h> Bool imagesInitialized = false; static char libGLStr[] = "/System/Library/Frameworks/OpenGL.framework/Libraries/libGL.dylib"; @@ -746,38 +796,3 @@ void resetGammaRamp() { CGDisplayRestoreColorSyncSettings(); } -/*** - * The following static functions are copied out of NEWT's OSX impl. <src/newt/native/MacWindow.m> - * May need to push code to NativeWindow, to remove duplication. - */ -static NSScreen * NewtScreen_getNSScreenByIndex(int screen_idx) { - NSArray *screens = [NSScreen screens]; - if(screen_idx<0) screen_idx=0; - if(screen_idx>=[screens count]) screen_idx=0; - return (NSScreen *) [screens objectAtIndex: screen_idx]; -} -static CGDirectDisplayID NewtScreen_getCGDirectDisplayIDByNSScreen(NSScreen *screen) { - // Mind: typedef uint32_t CGDirectDisplayID; - however, we assume it's 64bit on 64bit ?! - NSDictionary * dict = [screen deviceDescription]; - NSNumber * val = (NSNumber *) [dict objectForKey: @"NSScreenNumber"]; - // [NSNumber integerValue] returns NSInteger which is 32 or 64 bit native size - return (CGDirectDisplayID) [val integerValue]; -} -static long GetDictionaryLong(CFDictionaryRef theDict, const void* key) -{ - long value = 0; - CFNumberRef numRef; - numRef = (CFNumberRef)CFDictionaryGetValue(theDict, key); - if (numRef != NULL) - CFNumberGetValue(numRef, kCFNumberLongType, &value); - return value; -} -#define CGDDGetModeRefreshRate(mode) GetDictionaryLong((mode), kCGDisplayRefreshRate) - -int getScreenRefreshRate(int scrn_idx) { - NSScreen *screen = NewtScreen_getNSScreenByIndex(scrn_idx); - CGDirectDisplayID display = NewtScreen_getCGDirectDisplayIDByNSScreen(screen); - CFDictionaryRef mode = CGDisplayCurrentMode(display); - return CGDDGetModeRefreshRate(mode); -} - diff --git a/src/jogl/native/openmax/jogamp_opengl_util_av_impl_OMXGLMediaPlayer.c b/src/jogl/native/openmax/jogamp_opengl_util_av_impl_OMXGLMediaPlayer.c index 964ac64a7..3166306ba 100644 --- a/src/jogl/native/openmax/jogamp_opengl_util_av_impl_OMXGLMediaPlayer.c +++ b/src/jogl/native/openmax/jogamp_opengl_util_av_impl_OMXGLMediaPlayer.c @@ -67,7 +67,7 @@ void OMXInstance_UpdateJavaAttributes(OMXToolBasicAV_t *pAV) return; } int shallBeDetached = 0; - JNIEnv * env = JoglCommon_GetJNIEnv (&shallBeDetached); + JNIEnv * env = JoglCommon_GetJNIEnv (1 /* daemon */, &shallBeDetached); if(NULL!=env) { (*env)->CallVoidMethod(env, (jobject)pAV->jni_instance, jni_mid_updateAttributes, pAV->width, pAV->height, @@ -75,7 +75,8 @@ void OMXInstance_UpdateJavaAttributes(OMXToolBasicAV_t *pAV) pAV->framerate, (uint32_t)(pAV->length*pAV->framerate), pAV->length, (*env)->NewStringUTF(env, pAV->videoCodec), (*env)->NewStringUTF(env, pAV->audioCodec) ); - JoglCommon_ReleaseJNIEnv (shallBeDetached); + // detaching thread not required - daemon + // JoglCommon_ReleaseJNIEnv(shallBeDetached); } } @@ -107,11 +108,11 @@ JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_OMXGLMediaPlayer__1setStr } JNIEXPORT void JNICALL Java_jogamp_opengl_util_av_impl_OMXGLMediaPlayer__1setStreamEGLImageTexture2D - (JNIEnv *env, jobject instance, jlong ptr, jint i, jint tex, jlong image, jlong sync) + (JNIEnv *env, jobject instance, jlong ptr, jint tex, jlong image, jlong sync) { OMXToolBasicAV_t *pOMXAV = (OMXToolBasicAV_t *)((void *)((intptr_t)ptr)); if (pOMXAV != NULL) { - OMXToolBasicAV_SetStreamEGLImageTexture2D( pOMXAV, i, (GLuint) tex, + OMXToolBasicAV_SetStreamEGLImageTexture2D( pOMXAV, (GLuint) tex, (EGLImageKHR)(intptr_t)image, (EGLSyncKHR)(intptr_t)sync); } diff --git a/src/jogl/native/openmax/omx_tool.c b/src/jogl/native/openmax/omx_tool.c index 16c43f6a8..35d5cfa58 100644 --- a/src/jogl/native/openmax/omx_tool.c +++ b/src/jogl/native/openmax/omx_tool.c @@ -1049,21 +1049,17 @@ void OMXToolBasicAV_SetStream(OMXToolBasicAV_t * pOMXAV, int vBufferNum, const K DBG_PRINT( "SetStream X\n"); } -void OMXToolBasicAV_SetStreamEGLImageTexture2D(OMXToolBasicAV_t * pOMXAV, KDint i, GLuint tex, EGLImageKHR image, EGLSyncKHR sync) +void OMXToolBasicAV_SetStreamEGLImageTexture2D(OMXToolBasicAV_t * pOMXAV, GLuint tex, EGLImageKHR image, EGLSyncKHR sync) { if(NULL==pOMXAV) { JoglCommon_throwNewRuntimeException(0, "OMX instance null\n"); return; } - DBG_PRINT( "SetStreamEGLImg %p #%d/%d t:%d i:%p s:%p..\n", pOMXAV, i, pOMXAV->vBufferNum, tex, image, sync); - if(i<0||i>=pOMXAV->vBufferNum) { - JoglCommon_throwNewRuntimeException(0, "Buffer index out of range: %d\n", i); - return; - } + DBG_PRINT( "SetStreamEGLImg %p count %d t:%d i:%p s:%p..\n", pOMXAV, pOMXAV->vBufferNum, tex, image, sync); kdThreadMutexLock(pOMXAV->mutex); { - OMXToolImageBuffer_t *pBuf = &pOMXAV->buffers[i]; + OMXToolImageBuffer_t *pBuf = &pOMXAV->buffers[0]; // FIXME: Move all sync/buffer handling to Java - Already done -> GLMediaPlayerImpl pBuf->tex=tex; pBuf->image=image; pBuf->sync=sync; @@ -1644,7 +1640,7 @@ int ModuleTest() printf("6 eglGetError: 0x%x\n", eglGetError()); - if(OMXToolBasicAV_SetStreamEGLImageTexture2D(pOMXAV, i, tex, image, sync)) { + if(OMXToolBasicAV_SetStreamEGLImageTexture2D(pOMXAV, tex, image, sync)) { return -1; } } diff --git a/src/jogl/native/openmax/omx_tool.h b/src/jogl/native/openmax/omx_tool.h index 414befca0..022c56cf9 100644 --- a/src/jogl/native/openmax/omx_tool.h +++ b/src/jogl/native/openmax/omx_tool.h @@ -28,6 +28,7 @@ #include <stdlib.h> #include <string.h> +// FIXME: Move all sync/buffer handling to Java - Already done -> GLMediaPlayerImpl #define EGLIMAGE_MAX_BUFFERS 4 extern int USE_OPENGL; @@ -90,6 +91,7 @@ typedef struct { KDThreadMutex * mutex; KDThreadSem * flushSem; + // FIXME: Move all sync/buffer handling to Java - Already done -> GLMediaPlayerImpl OMXToolImageBuffer_t buffers[EGLIMAGE_MAX_BUFFERS]; int vBufferNum; int glPos; @@ -118,7 +120,7 @@ KDint OMXToolBasicAV_SetState(OMXToolBasicAV_t * pOMXAV, OMX_STATETYPE state, KD // OMXToolBasicAV_t * OMXToolBasicAV_CreateInstance(EGLDisplay dpy); // #1 void OMXToolBasicAV_SetStream(OMXToolBasicAV_t * pOMXAV, int vBufferNum, const KDchar * stream); // #2 -void OMXToolBasicAV_SetStreamEGLImageTexture2D(OMXToolBasicAV_t * pOMXAV, KDint i, GLuint tex, EGLImageKHR image, EGLSyncKHR sync); // #3 +void OMXToolBasicAV_SetStreamEGLImageTexture2D(OMXToolBasicAV_t * pOMXAV, GLuint tex, EGLImageKHR image, EGLSyncKHR sync); // #3 void OMXToolBasicAV_ActivateStream(OMXToolBasicAV_t * pOMXAV); // #4 void OMXToolBasicAV_AttachVideoRenderer(OMXToolBasicAV_t * pOMXAV); // Stop, DetachVideoRenderer, SetEGLImageTexture2D .. before .. diff --git a/src/jogl/native/timespec.c b/src/jogl/native/timespec.c index 50f0ca8c5..a69f4635e 100644 --- a/src/jogl/native/timespec.c +++ b/src/jogl/native/timespec.c @@ -75,3 +75,8 @@ long timespec_milliseconds(struct timespec *a) { return a->tv_sec*1000 + a->tv_nsec/1000000; } + +long timespec_microseconds(struct timespec *a) +{ + return a->tv_sec*1000000 + a->tv_nsec/1000; +} diff --git a/src/jogl/native/timespec.h b/src/jogl/native/timespec.h index f900bfa16..a621562b9 100644 --- a/src/jogl/native/timespec.h +++ b/src/jogl/native/timespec.h @@ -17,4 +17,7 @@ void timespec_subtract(struct timespec *r, struct timespec *a, struct timespec * /** convert the timespec into milliseconds (may overflow) */ long timespec_milliseconds(struct timespec *a); +/** convert the timespec into microseconds (may overflow) */ +long timespec_microseconds(struct timespec *a); + #endif /* _timespec_h */ |