diff options
author | Sven Gothel <[email protected]> | 2013-08-27 19:21:17 +0200 |
---|---|---|
committer | Sven Gothel <[email protected]> | 2013-08-27 19:21:17 +0200 |
commit | 554ec0576432194f050191bdf248a1462d542a6d (patch) | |
tree | c8f096f0cca0c07ae23d9d2cd81f91bd9334210f /src | |
parent | 0de489019085246abb437454e9ac8fd6fc238692 (diff) |
GLMediaPlayer: Add camera input / FFMPEG: Fix 'av_packet' leak and add missing symbol 'av_realloc'.
- Add camera input
- Use URI w/ scheme 'camera' to determine camera input is desired,
use URI host as camera id.
E.g. 'camera://0' for 1st camera.
- AndroidGLMediaPlayerAPI14: Via 'Camera'
- FFMPEG*: Via libavdevice, device name and input format
- TODO: Add controls to manipulate camera if available
- FFMPEG*
- Add symbols
- avcodec_register_all
- av_realloc (was missing)
- avdevice_register_all
- Load libavdevice (opt)
- Camera:
- Use <ID> (windows) and /dev/video<ID> other OS
- simply find the input format in native code
- Support YUYV422 (used in video4linux2, etc.)
- Stuff 2x 16bpp (YUYV) into one RGBA pixel!
- Add texture format for 16bpp
- Add texture lookup shader
- Fix av_packet leak in readNextImpl(..)
- Restore orig pointer and size values,
we may have moved along within packet.
Then call av_free_packet().
- Use null AudioSink if audio-id is NONE
Diffstat (limited to 'src')
13 files changed, 566 insertions, 139 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java index 6235bdeb0..0feca9f45 100644 --- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java +++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java @@ -47,6 +47,9 @@ import com.jogamp.opengl.util.TimeFrameI; * Audio and video streams can be selected or muted via {@link #initStream(URI, int, int, int)} * using the appropriate <a href="#streamIDs">stream id</a>'s. * </p> + * <p> + * Camera input can be selected using the {@link #CameraInputScheme} URI. + * </p> * * <a name="streamworker"><h5><i>StreamWorker</i> Decoding Thread</h5></a> * <p> @@ -190,6 +193,18 @@ public interface GLMediaPlayer extends TextureSequence { /** Constant {@value} for <i>auto</i> or <i>unspecified</i>. See <a href="#streamIDs">Audio and video Stream IDs</a>. */ public static final int STREAM_ID_AUTO = -1; + /** + * {@link URI#getScheme() URI scheme} name {@value} for camera input. E.g. <code>camera://0</code> + * for the 1st camera device. + * <p> + * Note: the {@link URI#getHost() URI host} is being used to identify the camera: + * <pre> + * camera://<id> + * </pre> + * </p> + */ + public static final String CameraInputScheme = "camera"; + /** Maximum video frame async of {@value} milliseconds. */ public static final int MAXIMUM_VIDEO_ASYNC = 22; diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java index 39489cff4..056998c0c 100644 --- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java +++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java @@ -28,6 +28,7 @@ package jogamp.opengl.android.av; import java.io.IOException; +import java.util.List; import javax.media.opengl.GL; import javax.media.opengl.GLES2; @@ -45,6 +46,7 @@ import jogamp.opengl.util.av.GLMediaPlayerImpl; import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture.OnFrameAvailableListener; +import android.hardware.Camera; import android.media.MediaPlayer; import android.media.MediaPlayer.OnCompletionListener; import android.net.Uri; @@ -85,6 +87,8 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { public static final boolean isAvailable() { return available; } private MediaPlayer mp; + private Camera cam; + private long playStart = 0; private volatile boolean updateSurface = false; private Object updateSurfaceLock = new Object(); private SurfaceTextureFrame singleSTexFrame = null; @@ -104,7 +108,6 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { throw new RuntimeException("AndroidGLMediaPlayerAPI14 not available"); } this.setTextureTarget(GLES2.GL_TEXTURE_EXTERNAL_OES); - mp = new MediaPlayer(); } @Override @@ -130,10 +133,11 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { @Override protected final boolean playImpl() { + playStart = Platform.currentTimeMillis(); if(null != mp) { try { mp.start(); - eos = false; + eos = false; mp.setOnCompletionListener(onCompletionListener); return true; } catch (IllegalStateException ise) { @@ -141,6 +145,17 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { ise.printStackTrace(); } } + } else if( null != cam ) { + try { + if( sTexFrameAttached ) { + cam.startPreview(); + } + return true; + } catch (IllegalStateException ise) { + if(DEBUG) { + ise.printStackTrace(); + } + } } return false; } @@ -157,6 +172,16 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { ise.printStackTrace(); } } + } else if( null != cam ) { + wakeUp(false); + try { + cam.stopPreview(); + return true; + } catch (IllegalStateException ise) { + if(DEBUG) { + ise.printStackTrace(); + } + } } return false; } @@ -196,6 +221,18 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { mp.release(); mp = null; } + if( null != cam ) { + wakeUp(false); + try { + cam.stopPreview(); + } catch (IllegalStateException ise) { + if(DEBUG) { + ise.printStackTrace(); + } + } + cam.release(); + cam = null; + } } public static class SurfaceTextureFrame extends TextureSequence.TextureFrame { @@ -212,7 +249,27 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { @Override protected final void initStreamImpl(int vid, int aid) throws IOException { - if(null!=mp && null!=streamLoc) { + + if( null == streamLoc ) { + return; + } + if( null == mp && null == cam ) { + if( null == cameraHostPart ) { + mp = new MediaPlayer(); + } else { + int cameraId = 0; + try { + cameraId = Integer.valueOf(cameraHostPart); + } catch (NumberFormatException nfe) {} + if( 0 <= cameraId && cameraId < Camera.getNumberOfCameras() ) { + cam = Camera.open(cameraId); + } else { + cam = Camera.open(); + } + } + } + + if(null!=mp) { if( GLMediaPlayer.STREAM_ID_NONE == aid ) { mp.setVolume(0f, 0f); // FIXME: Disable audio handling @@ -240,8 +297,31 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { mp.getVideoWidth(), mp.getVideoHeight(), 0, 0, 0, 0f, 0, 0, mp.getDuration(), icodec, icodec); + } else if( null != cam ) { + final String icodec = "android"; + final int[] fpsRange = { 0, 0 }; + final Camera.Parameters p = cam.getParameters(); + p.getPreviewFpsRange(fpsRange); + final Camera.Size size = p.getPreviewSize(); + if( DEBUG ) { + final int picFmt = p.getPictureFormat(); + final Camera.Size prefSize = p.getPreferredPreviewSizeForVideo(); + System.err.println("MediaPlayer.Camera: fps "+fpsRange[0]+".."+fpsRange[1]+", size[pref "+camSz2Str(prefSize)+", cur "+camSz2Str(size)+"], fmt "+picFmt); + List<Camera.Size> supSizes = p.getSupportedVideoSizes(); + for(int i=0; i<supSizes.size(); i++) { + System.err.println("size #"+i+": "+camSz2Str(supSizes.get(i))); + } + } + updateAttributes(0 /* fake */, GLMediaPlayer.STREAM_ID_NONE, + size.width, size.height, + 0, 0, 0, + fpsRange[1]/1000f, + 0, 0, 0, icodec, icodec); } } + private static String camSz2Str(Camera.Size csize) { + return csize.width+"x"+csize.height; + } @Override protected final void initGLImpl(GL gl) throws IOException, GLException { // NOP @@ -261,7 +341,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { @Override protected final int getNextTextureImpl(GL gl, TextureFrame nextFrame) { int pts = TimeFrameI.INVALID_PTS; - if(null != mp) { + if(null != mp || null != cam) { final SurfaceTextureFrame sTexFrame = (SurfaceTextureFrame) nextFrame; final SurfaceTexture surfTex = sTexFrame.surfaceTex; if( sTexFrame != singleSTexFrame ) { @@ -269,12 +349,25 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } if( !sTexFrameAttached ) { sTexFrameAttached = true; - final Surface surface = new Surface(sTexFrame.surfaceTex); - mp.setSurface(surface); - surface.release(); + final Surface surface; + if( null != mp ) { + surface = new Surface(sTexFrame.surfaceTex); + mp.setSurface(surface); + } else { + surface = null; + try { + cam.setPreviewTexture(sTexFrame.surfaceTex); + cam.startPreview(); + } catch (IOException ioe) { + throw new RuntimeException("MediaPlayer failed to process stream <"+streamLoc.toString()+">: "+ioe.getMessage(), ioe); + } + } + if( null != surface ) { + surface.release(); + } surfTex.setOnFrameAvailableListener(onFrameAvailableListener); } - if( eos || !mp.isPlaying() ) { + if( eos || (null != mp && !mp.isPlaying() ) ) { eos = true; pts = TimeFrameI.END_OF_STREAM_PTS; } else { @@ -297,7 +390,11 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { if(update) { surfTex.updateTexImage(); // nextFrame.setPTS( (int) ( nextSTex.getTimestamp() / 1000000L ) ); // nano -9 -> milli -3 - pts = mp.getCurrentPosition(); + if( null != mp ) { + pts = mp.getCurrentPosition(); + } else { + pts = (int) ( Platform.currentTimeMillis() - playStart ); + } // stex.getTransformMatrix(atex.getSTMatrix()); } } diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java index 73d5e7748..5286c86b8 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java +++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java @@ -85,7 +85,14 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { protected int[] texMinMagFilter = { GL.GL_NEAREST, GL.GL_NEAREST }; protected int[] texWrapST = { GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE }; + /** User requested URI stream location. */ protected URI streamLoc = null; + /** + * In case {@link #streamLoc} is a {@link GLMediaPlayer#CameraInputScheme}, + * {@link #cameraHostPart} holds the URI's path portion + * as parsed in {@link #initStream(URI, int, int, int)}. + */ + protected String cameraHostPart = null; protected volatile float playSpeed = 1.0f; protected float audioVolume = 1.0f; @@ -463,6 +470,15 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { presentedFrameCount = 0; displayedFrameCount = 0; this.streamLoc = streamLoc; + + // Pre-parse for camera-input scheme + final String streamLocScheme = streamLoc.getScheme(); + if( null != streamLocScheme && streamLocScheme.equals(CameraInputScheme) ) { + cameraHostPart = streamLoc.getHost(); + } else { + cameraHostPart = null; + } + this.vid = vid; this.aid = aid; if (this.streamLoc != null) { @@ -598,7 +614,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { { final int err = gl.glGetError(); if( GL.GL_NO_ERROR != err ) { - throw new RuntimeException("Couldn't create TexImage2D RGBA "+tWidth+"x"+tHeight+", err "+toHexString(err)); + throw new RuntimeException("Couldn't create TexImage2D RGBA "+tWidth+"x"+tHeight+", target "+toHexString(textureTarget)+ + ", ifmt "+toHexString(GL.GL_RGBA)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType)+ + ", err "+toHexString(err)); } } if(DEBUG) { @@ -1303,12 +1321,13 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { final String loc = ( null != streamLoc ) ? streamLoc.toString() : "<undefined stream>" ; final int freeVideoFrames = null != videoFramesFree ? videoFramesFree.size() : 0; final int decVideoFrames = null != videoFramesDecoded ? videoFramesDecoded.size() : 0; - final int video_scr = video_scr_pts + (int) ( ( Platform.currentTimeMillis() - video_scr_t0 ) * playSpeed ); + final int video_scr = video_scr_pts + (int) ( ( Platform.currentTimeMillis() - video_scr_t0 ) * playSpeed ); + final String camPath = null != cameraHostPart ? ", camera: "+cameraHostPart : ""; return "GLMediaPlayer["+state+", vSCR "+video_scr+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+" ("+tt+" s)], "+ "speed "+playSpeed+", "+bps_stream+" bps, "+ "Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+", target "+toHexString(textureTarget)+", format "+toHexString(textureFormat)+", type "+toHexString(textureType)+"], "+ "Video[id "+vid+", <"+vcodec+">, "+width+"x"+height+", "+fps+" fps, "+frame_duration+" fdur, "+bps_video+" bps], "+ - "Audio[id "+aid+", <"+acodec+">, "+bps_audio+" bps, "+audioFrames+" frames], uri "+loc+"]"; + "Audio[id "+aid+", <"+acodec+">, "+bps_audio+" bps, "+audioFrames+" frames], uri "+loc+camPath+"]"; } @Override diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java index ab16c5e5e..040d152f5 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java @@ -60,7 +60,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { private static final List<String> glueLibNames = new ArrayList<String>(); // none - private static final int symbolCount = 51; + private static final int symbolCount = 54; private static final String[] symbolNames = { "avcodec_version", "avformat_version", @@ -68,6 +68,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { /* 4 */ "avresample_version", // libavcodec + "avcodec_register_all", "avcodec_close", "avcodec_string", "avcodec_find_decoder", @@ -85,16 +86,17 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { "av_free_packet", "avcodec_decode_audio4", // 53.25.0 (opt) "avcodec_decode_audio3", // 52.23.0 -/* 22 */ "avcodec_decode_video2", // 52.23.0 +/* 23 */ "avcodec_decode_video2", // 52.23.0 // libavutil "av_pix_fmt_descriptors", "av_frame_unref", // 55.0.0 (opt) + "av_realloc", "av_free", "av_get_bits_per_pixel", "av_samples_get_buffer_size", "av_get_bytes_per_sample", // 51.4.0 -/* 29 */ "av_opt_set_int", // 51.12.0 +/* 31 */ "av_opt_set_int", // 51.12.0 // libavformat "avformat_alloc_context", @@ -113,14 +115,17 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { "avformat_network_init", // 53.13.0 (opt) "avformat_network_deinit", // 53.13.0 (opt) "avformat_find_stream_info", // 53.3.0 (opt) -/* 46 */ "av_find_stream_info", +/* 48 */ "av_find_stream_info", + // libavdevice +/* 49 */ "avdevice_register_all", // ??? + // libavresample "avresample_alloc_context", // 1.0.1 "avresample_open", "avresample_close", "avresample_free", -/* 51 */ "avresample_convert" +/* 54 */ "avresample_convert" }; // alternate symbol names @@ -139,7 +144,8 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { "avformat_seek_file", // ??? (opt) "avcodec_free_frame", // 54.28.0 (opt) "av_frame_unref", // 55.0.0 (opt) - + // libavdevice + "avdevice_register_all", // 53.0.0 (opt) // libavresample "avresample_version", // 1.0.1 "avresample_alloc_context", // 1.0.1 @@ -153,6 +159,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { private static final boolean ready; private static final boolean libsLoaded; private static final boolean avresampleLoaded; // optional + private static final boolean avdeviceLoaded; // optional static final VersionNumber avCodecVersion; static final VersionNumber avFormatVersion; static final VersionNumber avUtilVersion; @@ -164,14 +171,16 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { GLProfile.initSingleton(); boolean _ready = false; boolean[] _libsLoaded= { false }; + boolean[] _avdeviceLoaded= { false }; boolean[] _avresampleLoaded= { false }; VersionNumber[] _versions = new VersionNumber[4]; try { - _ready = initSymbols(_libsLoaded, _avresampleLoaded, _versions); + _ready = initSymbols(_libsLoaded, _avdeviceLoaded, _avresampleLoaded, _versions); } catch (Throwable t) { t.printStackTrace(); } libsLoaded = _libsLoaded[0]; + avdeviceLoaded = _avdeviceLoaded[0]; avresampleLoaded = _avresampleLoaded[0]; avCodecVersion = _versions[0]; avFormatVersion = _versions[1]; @@ -205,11 +214,13 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { } static boolean libsLoaded() { return libsLoaded; } + static boolean avDeviceLoaded() { return avdeviceLoaded; } static boolean avResampleLoaded() { return avresampleLoaded; } static FFMPEGNatives getNatives() { return natives; } static boolean initSingleton() { return ready; } - private static final boolean initSymbols(boolean[] libsLoaded, boolean[] avresampleLoaded, VersionNumber[] versions) { + private static final boolean initSymbols(boolean[] libsLoaded, boolean[] avdeviceLoaded, boolean[] avresampleLoaded, + VersionNumber[] versions) { libsLoaded[0] = false; final DynamicLibraryBundle dl = AccessController.doPrivileged(new PrivilegedAction<DynamicLibraryBundle>() { public DynamicLibraryBundle run() { @@ -221,7 +232,8 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { if(!avutilLoaded || !avformatLoaded || !avcodecLoaded) { throw new RuntimeException("FFMPEG Tool library incomplete: [ avutil "+avutilLoaded+", avformat "+avformatLoaded+", avcodec "+avcodecLoaded+"]"); } - avresampleLoaded[0] = dl.isToolLibLoaded(3); + avdeviceLoaded[0] = dl.isToolLibLoaded(3); + avresampleLoaded[0] = dl.isToolLibLoaded(4); libsLoaded[0] = true; if(symbolNames.length != symbolCount) { @@ -361,6 +373,16 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo { avcodec.add("avcodec-52"); // 0.7 libsList.add(avcodec); + final List<String> avdevice = new ArrayList<String>(); + avdevice.add("avdevice"); // default + + avdevice.add("libavdevice.so.54"); // dummy future proof + avdevice.add("libavdevice.so.53"); // 8 && 9 + + avdevice.add("avdevice-54"); // dummy future proof + avdevice.add("avdevice-53"); // 8 && 9 + libsList.add(avdevice); + final List<String> avresample = new ArrayList<String>(); avresample.add("avresample"); // default diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java index bcf4994b5..f46c5900c 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java @@ -129,7 +129,8 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { System.err.println("LIB_AV Util : "+FFMPEGDynamicLibraryBundleInfo.avUtilVersion+" [cc "+avUtilMajorVersionCC+"]"); System.err.println("LIB_AV Format : "+FFMPEGDynamicLibraryBundleInfo.avFormatVersion+" [cc "+avFormatMajorVersionCC+"]"); System.err.println("LIB_AV Codec : "+FFMPEGDynamicLibraryBundleInfo.avCodecVersion+" [cc "+avCodecMajorVersionCC+"]"); - System.err.println("LIB_AV Resample: "+FFMPEGDynamicLibraryBundleInfo.avResampleVersion+" [cc "+avResampleMajorVersionCC+"]"); + System.err.println("LIB_AV Device : [loaded "+FFMPEGDynamicLibraryBundleInfo.avDeviceLoaded()+"]"); + System.err.println("LIB_AV Resample: "+FFMPEGDynamicLibraryBundleInfo.avResampleVersion+" [cc "+avResampleMajorVersionCC+", loaded "+FFMPEGDynamicLibraryBundleInfo.avResampleLoaded()+"]"); libAVVersionGood = avUtilMajorVersionCC == FFMPEGDynamicLibraryBundleInfo.avUtilVersion.getMajor() && avFormatMajorVersionCC == FFMPEGDynamicLibraryBundleInfo.avFormatVersion.getMajor() && avCodecMajorVersionCC == FFMPEGDynamicLibraryBundleInfo.avCodecVersion.getMajor() && @@ -160,6 +161,8 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { // Video // + private String texLookupFuncName = "ffmpegTexture2D"; + private boolean usesTexLookupShader = false; private PixelFormat vPixelFmt = null; private int vPlanes = 0; private int vBitsPerPixel = 0; @@ -205,8 +208,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { } } - public static final String dev_video = "/dev/video"; - private static final int dev_video_len = dev_video.length(); + public static final String dev_video_linux = "/dev/video"; @Override protected final void initStreamImpl(int vid, int aid) throws IOException { @@ -229,40 +231,34 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { System.err.println("initStream: p2 preferred "+preferredAudioFormat+", "+this); } - final int streamLocSLen = streamLocS.length(); - final String inFormat; + final boolean isCameraInput = null != cameraHostPart; final String resStreamLocS; - if( streamLocSLen == dev_video_len + 1 && streamLocS.startsWith(dev_video) ) { - final int index = Integer.valueOf( streamLocS.substring(streamLocSLen-1) ).intValue(); + if( isCameraInput ) { switch(Platform.OS_TYPE) { - case ANDROID: - // ?? - case FREEBSD: - case HPUX: - case LINUX: - case SUNOS: - resStreamLocS = streamLocS; - inFormat = "video4linux2"; - break; - case WINDOWS: - resStreamLocS = String.valueOf(index); - inFormat = "vfwcap"; - break; - case MACOS: - case OPENKODE: - default: - resStreamLocS = streamLocS; - inFormat = null; - break; + case ANDROID: + // ?? + case FREEBSD: + case HPUX: + case LINUX: + case SUNOS: + resStreamLocS = dev_video_linux + cameraHostPart; + break; + case WINDOWS: + resStreamLocS = cameraHostPart; + break; + case MACOS: + case OPENKODE: + default: + resStreamLocS = streamLocS; // FIXME: ?? + break; } } else { resStreamLocS = streamLocS; - inFormat = null; } final int aMaxChannelCount = audioSink.getMaxSupportedChannels(); final int aPrefSampleRate = preferredAudioFormat.sampleRate; // setStream(..) issues updateAttributes*(..), and defines avChosenAudioFormat, vid, aid, .. etc - natives.setStream0(moviePtr, resStreamLocS, inFormat, vid, aid, aMaxChannelCount, aPrefSampleRate); + natives.setStream0(moviePtr, resStreamLocS, isCameraInput, vid, aid, aMaxChannelCount, aPrefSampleRate); } @Override @@ -300,6 +296,10 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { System.err.println("initGL: p3 avChosen "+avChosenAudioFormat); } + if( STREAM_ID_NONE == aid ) { + audioSink.destroy(); + audioSink = AudioSinkFactory.createNull(); + } final boolean audioSinkOK = audioSink.init(avChosenAudioFormat, frameDuration, AudioSink.DefaultInitialQueueSize, AudioSink.DefaultQueueGrowAmount, audioQueueLimit); if( !audioSinkOK ) { System.err.println("AudioSink "+audioSink.getClass().getName()+" does not support "+avChosenAudioFormat+", using Null"); @@ -314,6 +314,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { if( null != gl ) { int tf, tif=GL.GL_RGBA; // texture format and internal format + int tt = GL.GL_UNSIGNED_BYTE; switch(vBytesPerPixelPerPlane) { case 1: if( gl.isGL3ES3() ) { @@ -324,12 +325,20 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { tf = GL2ES2.GL_ALPHA; tif=GL2ES2.GL_ALPHA; singleTexComp = "a"; } break; - case 3: tf = GL2ES2.GL_RGB; tif=GL.GL_RGB; break; - case 4: tf = GL2ES2.GL_RGBA; tif=GL.GL_RGBA; break; + + case 2: if( vPixelFmt == PixelFormat.YUYV422 ) { + // YUYV422: // < packed YUV 4:2:2, 2x 16bpp, Y0 Cb Y1 Cr + // Stuffed into RGBA half width texture + tf = GL2ES2.GL_RGBA; tif=GL2ES2.GL_RGBA; break; + } else { + tf = GL2ES2.GL_RG; tif=GL2ES2.GL_RG; break; + } + case 3: tf = GL2ES2.GL_RGB; tif=GL.GL_RGB; break; + case 4: tf = GL2ES2.GL_RGBA; tif=GL.GL_RGBA; break; default: throw new RuntimeException("Unsupported bytes-per-pixel / plane "+vBytesPerPixelPerPlane); } setTextureFormat(tif, tf); - setTextureType(GL.GL_UNSIGNED_BYTE); + setTextureType(tt); } } @Override @@ -435,7 +444,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { */ void updateAttributes2(int pixFmt, int planes, int bitsPerPixel, int bytesPerPixelPerPlane, int lSz0, int lSz1, int lSz2, - int tWd0, int tWd1, int tWd2, int tH, + int tWd0, int tWd1, int tWd2, int vW, int vH, int audioSampleFmt, int audioSampleRate, int audioChannels, int audioSamplesPerFrameAndChannel) { vPixelFmt = PixelFormat.valueOf(pixFmt); @@ -446,23 +455,28 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { vTexWidth[0] = tWd0; vTexWidth[1] = tWd1; vTexWidth[2] = tWd2; switch(vPixelFmt) { - case YUV420P: + case YUV420P: // < planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) + usesTexLookupShader = true; // YUV420P: Adding U+V on right side of fixed height texture, // since width is already aligned by decoder. // Y=w*h, Y=w/2*h/2, U=w/2*h/2 // w*h + 2 ( w/2 * h/2 ) // w*h + w*h/2 // 2*w/2 * h - texWidth = vTexWidth[0] + vTexWidth[1]; texHeight = tH; + texWidth = vTexWidth[0] + vTexWidth[1]; texHeight = vH; + break; + case YUYV422: // < packed YUV 4:2:2, 2x 16bpp, Y0 Cb Y1 Cr - stuffed into RGBA half width texture + usesTexLookupShader = true; + texWidth = vTexWidth[0]; texHeight = vH; break; - // case PIX_FMT_YUYV422: case RGB24: case BGR24: case ARGB: case RGBA: case ABGR: case BGRA: - texWidth = vTexWidth[0]; texHeight = tH; + usesTexLookupShader = false; + texWidth = vTexWidth[0]; texHeight = vH; break; default: // FIXME: Add more formats ! throw new RuntimeException("Unsupported pixelformat: "+vPixelFmt); @@ -474,7 +488,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { if(DEBUG) { System.err.println("audio: fmt "+aSampleFmt+", "+avChosenAudioFormat+", aFrameSize/fc "+audioSamplesPerFrameAndChannel); - System.err.println("video: fmt "+vPixelFmt+", planes "+vPlanes+", bpp "+vBitsPerPixel+"/"+vBytesPerPixelPerPlane); + System.err.println("video: fmt "+vW+"x"+vH+", "+vPixelFmt+", planes "+vPlanes+", bpp "+vBitsPerPixel+"/"+vBytesPerPixelPerPlane+", usesTexLookupShader "+usesTexLookupShader); for(int i=0; i<3; i++) { System.err.println("video: "+i+": "+vTexWidth[i]+"/"+vLinesize[i]); } @@ -494,15 +508,14 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { if(State.Uninitialized == state) { throw new IllegalStateException("Instance not initialized: "+this); } - if(PixelFormat.YUV420P == vPixelFmt) { + if( usesTexLookupShader ) { if(null != desiredFuncName && desiredFuncName.length()>0) { - textureLookupFunctionName = desiredFuncName; + texLookupFuncName = desiredFuncName; } - return textureLookupFunctionName; + return texLookupFuncName; } return super.getTextureLookupFunctionName(desiredFuncName); } - private String textureLookupFunctionName = "ffmpegTexture2D"; /** * {@inheritDoc} @@ -515,11 +528,14 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { if(State.Uninitialized == state) { throw new IllegalStateException("Instance not initialized: "+this); } + if( !usesTexLookupShader ) { + return super.getTextureLookupFragmentShaderImpl(); + } final float tc_w_1 = (float)getWidth() / (float)texWidth; switch(vPixelFmt) { - case YUV420P: + case YUV420P: // < planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) return - "vec4 "+textureLookupFunctionName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+ + "vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+ " vec2 u_off = vec2("+tc_w_1+", 0.0);\n"+ " vec2 v_off = vec2("+tc_w_1+", 0.5);\n"+ " vec2 tc_half = texCoord*0.5;\n"+ @@ -536,8 +552,30 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { " return vec4(r, g, b, 1);\n"+ "}\n" ; + case YUYV422: // < packed YUV 4:2:2, 2 x 16bpp, [Y0 Cb] [Y1 Cr] + // Stuffed into RGBA half width texture + return + "vec4 "+texLookupFuncName+"(in "+getTextureSampler2DType()+" image, in vec2 texCoord) {\n"+ + " "+ + " float y1,u,y2,v,y,r,g,b;\n"+ + " vec2 tc_halfw = vec2(texCoord.x*0.5, texCoord.y);\n"+ + " vec4 yuyv = texture2D(image, tc_halfw).rgba;\n"+ + " y1 = yuyv.r;\n"+ + " u = yuyv.g;\n"+ + " y2 = yuyv.b;\n"+ + " v = yuyv.a;\n"+ + " y = mix( y1, y2, mod(gl_FragCoord.x, 2) ); /* avoid branching! */\n"+ + " y = 1.1643*(y-0.0625);\n"+ + " u = u-0.5;\n"+ + " v = v-0.5;\n"+ + " r = y+1.5958*v;\n"+ + " g = y-0.39173*u-0.81290*v;\n"+ + " b = y+2.017*u;\n"+ + " return vec4(r, g, b, 1);\n"+ + "}\n" + ; default: // FIXME: Add more formats ! - return super.getTextureLookupFragmentShaderImpl(); + throw new InternalError("Add proper mapping of: vPixelFmt "+vPixelFmt+", usesTexLookupShader "+usesTexLookupShader); } } diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java index 89cefbf17..9dd1ac74a 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java @@ -59,7 +59,7 @@ interface FFMPEGNatives { * @param aPrefChannelCount * @param aPrefSampleRate */ - void setStream0(long moviePtr, String url, String inFormat, int vid, int aid, int aMaxChannelCount, int aPrefSampleRate); + void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, int aid, int aMaxChannelCount, int aPrefSampleRate); void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish); int getVideoPTS0(long moviePtr); diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java index 1c0f66e0c..3b2567655 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv08Natives.java @@ -53,7 +53,7 @@ class FFMPEGv08Natives implements FFMPEGNatives { public native void destroyInstance0(long moviePtr); @Override - public native void setStream0(long moviePtr, String url, String inFormat, int vid, int aid, int aMaxChannelCount, int aPrefSampleRate); + public native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, int aid, int aMaxChannelCount, int aPrefSampleRate); @Override public native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish); diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java index 0bc26923c..6c56d3ccb 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv09Natives.java @@ -53,7 +53,7 @@ class FFMPEGv09Natives implements FFMPEGNatives { public native void destroyInstance0(long moviePtr); @Override - public native void setStream0(long moviePtr, String url, String inFormat, int vid, int aid, int aMaxChannelCount, int aPrefSampleRate); + public native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, int aid, int aMaxChannelCount, int aPrefSampleRate); @Override public native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish); diff --git a/src/jogl/native/libav/ffmpeg_tool.h b/src/jogl/native/libav/ffmpeg_tool.h index d62cff60f..90d795b91 100644 --- a/src/jogl/native/libav/ffmpeg_tool.h +++ b/src/jogl/native/libav/ffmpeg_tool.h @@ -154,7 +154,8 @@ typedef struct { PTSStats vPTSStats; int32_t vLinesize[3]; // decoded video linesize in bytes for each plane int32_t vTexWidth[3]; // decoded video tex width in bytes for each plane - + int32_t vWidth; + int32_t vHeight; int32_t aid; AVStream* pAStream; diff --git a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGvXXNatives.c b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGvXXNatives.c index 54615c4b9..aaa26cfb1 100644 --- a/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGvXXNatives.c +++ b/src/jogl/native/libav/jogamp_opengl_util_av_impl_FFMPEGvXXNatives.c @@ -62,6 +62,7 @@ static AVRESAMPLE_VERSION sp_avresample_version; // count: 4 // libavcodec +typedef int (APIENTRYP AVCODEC_REGISTER_ALL)(void); typedef int (APIENTRYP AVCODEC_CLOSE)(AVCodecContext *avctx); typedef void (APIENTRYP AVCODEC_STRING)(char *buf, int buf_size, AVCodecContext *enc, int encode); typedef AVCodec *(APIENTRYP AVCODEC_FIND_DECODER)(enum CodecID id); @@ -81,6 +82,7 @@ typedef int (APIENTRYP AVCODEC_DECODE_AUDIO4)(AVCodecContext *avctx, AVFrame *fr typedef int (APIENTRYP AVCODEC_DECODE_AUDIO3)(AVCodecContext *avctx, int16_t *samples, int *frame_size_ptr, AVPacket *avpkt); // 52.23.0 typedef int (APIENTRYP AVCODEC_DECODE_VIDEO2)(AVCodecContext *avctx, AVFrame *picture, int *got_picture_ptr, AVPacket *avpkt); // 52.23.0 +static AVCODEC_REGISTER_ALL sp_avcodec_register_all; static AVCODEC_CLOSE sp_avcodec_close; static AVCODEC_STRING sp_avcodec_string; static AVCODEC_FIND_DECODER sp_avcodec_find_decoder; @@ -99,10 +101,11 @@ static AV_FREE_PACKET sp_av_free_packet; static AVCODEC_DECODE_AUDIO4 sp_avcodec_decode_audio4; // 53.25.0 static AVCODEC_DECODE_AUDIO3 sp_avcodec_decode_audio3; // 52.23.0 static AVCODEC_DECODE_VIDEO2 sp_avcodec_decode_video2; // 52.23.0 -// count: 22 +// count: 23 // libavutil typedef void (APIENTRYP AV_FRAME_UNREF)(AVFrame *frame); +typedef void* (APIENTRYP AV_REALLOC)(void *ptr, size_t size); typedef void (APIENTRYP AV_FREE)(void *ptr); typedef int (APIENTRYP AV_GET_BITS_PER_PIXEL)(const AVPixFmtDescriptor *pixdesc); typedef int (APIENTRYP AV_SAMPLES_GET_BUFFER_SIZE)(int *linesize, int nb_channels, int nb_samples, enum AVSampleFormat sample_fmt, int align); @@ -110,12 +113,13 @@ typedef int (APIENTRYP AV_GET_BYTES_PER_SAMPLE)(enum AVSampleFormat sample_fmt); typedef int (APIENTRYP AV_OPT_SET_INT)(void *obj, const char *name, int64_t val, int search_flags); static const AVPixFmtDescriptor* sp_av_pix_fmt_descriptors; static AV_FRAME_UNREF sp_av_frame_unref; +static AV_REALLOC sp_av_realloc; static AV_FREE sp_av_free; static AV_GET_BITS_PER_PIXEL sp_av_get_bits_per_pixel; static AV_SAMPLES_GET_BUFFER_SIZE sp_av_samples_get_buffer_size; static AV_GET_BYTES_PER_SAMPLE sp_av_get_bytes_per_sample; static AV_OPT_SET_INT sp_av_opt_set_int; -// count: 28 +// count: 30 // libavformat typedef AVFormatContext *(APIENTRYP AVFORMAT_ALLOC_CONTEXT)(void); @@ -153,7 +157,12 @@ static AVFORMAT_NETWORK_INIT sp_avformat_network_init; // 53.13.0 static AVFORMAT_NETWORK_DEINIT sp_avformat_network_deinit; // 53.13.0 static AVFORMAT_FIND_STREAM_INFO sp_avformat_find_stream_info; // 53.3.0 static AV_FIND_STREAM_INFO sp_av_find_stream_info; -// count: 46 +// count: 47 + +// libavdevice [53.0.0] +typedef int (APIENTRYP AVDEVICE_REGISTER_ALL)(void); +static AVDEVICE_REGISTER_ALL sp_avdevice_register_all; +// count: 49 // libavresample [1.0.1] typedef AVAudioResampleContext* (APIENTRYP AVRESAMPLE_ALLOC_CONTEXT)(void); // 1.0.1 @@ -168,9 +177,9 @@ static AVRESAMPLE_OPEN sp_avresample_open; static AVRESAMPLE_CLOSE sp_avresample_close; static AVRESAMPLE_FREE sp_avresample_free; static AVRESAMPLE_CONVERT sp_avresample_convert; -// count: 51 +// count: 54 -#define SYMBOL_COUNT 51 +#define SYMBOL_COUNT 54 JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0) (JNIEnv *env, jobject instance, jobject jSymbols, jint count) @@ -194,6 +203,7 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0) sp_avresample_version = (AVRESAMPLE_VERSION) (intptr_t) symbols[i++]; // count: 4 + sp_avcodec_register_all = (AVCODEC_REGISTER_ALL) (intptr_t) symbols[i++]; sp_avcodec_close = (AVCODEC_CLOSE) (intptr_t) symbols[i++]; sp_avcodec_string = (AVCODEC_STRING) (intptr_t) symbols[i++]; sp_avcodec_find_decoder = (AVCODEC_FIND_DECODER) (intptr_t) symbols[i++]; @@ -212,16 +222,17 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0) sp_avcodec_decode_audio4 = (AVCODEC_DECODE_AUDIO4) (intptr_t) symbols[i++]; sp_avcodec_decode_audio3 = (AVCODEC_DECODE_AUDIO3) (intptr_t) symbols[i++]; sp_avcodec_decode_video2 = (AVCODEC_DECODE_VIDEO2) (intptr_t) symbols[i++]; - // count: 22 + // count: 23 sp_av_pix_fmt_descriptors = (const AVPixFmtDescriptor*) (intptr_t) symbols[i++]; sp_av_frame_unref = (AV_FRAME_UNREF) (intptr_t) symbols[i++]; + sp_av_realloc = (AV_REALLOC) (intptr_t) symbols[i++]; sp_av_free = (AV_FREE) (intptr_t) symbols[i++]; sp_av_get_bits_per_pixel = (AV_GET_BITS_PER_PIXEL) (intptr_t) symbols[i++]; sp_av_samples_get_buffer_size = (AV_SAMPLES_GET_BUFFER_SIZE) (intptr_t) symbols[i++]; sp_av_get_bytes_per_sample = (AV_GET_BYTES_PER_SAMPLE) (intptr_t) symbols[i++]; sp_av_opt_set_int = (AV_OPT_SET_INT) (intptr_t) symbols[i++]; - // count: 29 + // count: 31 sp_avformat_alloc_context = (AVFORMAT_ALLOC_CONTEXT) (intptr_t) symbols[i++];; sp_avformat_free_context = (AVFORMAT_FREE_CONTEXT) (intptr_t) symbols[i++]; @@ -240,14 +251,17 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0) sp_avformat_network_deinit = (AVFORMAT_NETWORK_DEINIT) (intptr_t) symbols[i++]; sp_avformat_find_stream_info = (AVFORMAT_FIND_STREAM_INFO) (intptr_t) symbols[i++]; sp_av_find_stream_info = (AV_FIND_STREAM_INFO) (intptr_t) symbols[i++]; - // count: 46 + // count: 48 + + sp_avdevice_register_all = (AVDEVICE_REGISTER_ALL) (intptr_t) symbols[i++]; + // count: 49 sp_avresample_alloc_context = (AVRESAMPLE_ALLOC_CONTEXT) (intptr_t) symbols[i++]; sp_avresample_open = (AVRESAMPLE_OPEN) (intptr_t) symbols[i++]; sp_avresample_close = (AVRESAMPLE_CLOSE) (intptr_t) symbols[i++]; sp_avresample_free = (AVRESAMPLE_FREE) (intptr_t) symbols[i++]; sp_avresample_convert = (AVRESAMPLE_CONVERT) (intptr_t) symbols[i++]; - // count: 51 + // count: 54 (*env)->ReleasePrimitiveArrayCritical(env, jSymbols, symbols, 0); @@ -270,23 +284,16 @@ static void _updateJavaAttributes(JNIEnv *env, jobject instance, FFMPEGToolBasic // int shallBeDetached = 0; // JNIEnv * env = JoglCommon_GetJNIEnv (&shallBeDetached); if(NULL!=env) { - int32_t w, h; - if( NULL != pAV->pVCodecCtx ) { - // FIXME: Libav Binary compatibility! JAU01 - w = pAV->pVCodecCtx->width; h = pAV->pVCodecCtx->height; - } else { - w = 0; h = 0; - } - (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, jni_mid_updateAttributes2, pAV->vPixFmt, pAV->vBufferPlanes, pAV->vBitsPerPixel, pAV->vBytesPerPixelPerPlane, pAV->vLinesize[0], pAV->vLinesize[1], pAV->vLinesize[2], - pAV->vTexWidth[0], pAV->vTexWidth[1], pAV->vTexWidth[2], h, + pAV->vTexWidth[0], pAV->vTexWidth[1], pAV->vTexWidth[2], + pAV->vWidth, pAV->vHeight, pAV->aSampleFmtOut, pAV->aSampleRateOut, pAV->aChannelsOut, pAV->aFrameSize); (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, jni_mid_updateAttributes1, pAV->vid, pAV->aid, - w, h, + pAV->vWidth, pAV->vHeight, pAV->bps_stream, pAV->bps_video, pAV->bps_audio, pAV->fps, pAV->frames_video, pAV->frames_audio, pAV->duration, (*env)->NewStringUTF(env, pAV->vcodec), @@ -455,7 +462,7 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initIDs0) jni_mid_pushSound = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "pushSound", "(Ljava/nio/ByteBuffer;II)V"); jni_mid_updateAttributes1 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes", "(IIIIIIIFIIILjava/lang/String;Ljava/lang/String;)V"); - jni_mid_updateAttributes2 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes2", "(IIIIIIIIIIIIIII)V"); + jni_mid_updateAttributes2 = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes2", "(IIIIIIIIIIIIIIII)V"); jni_mid_isAudioFormatSupported = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "isAudioFormatSupported", "(III)Z"); if(jni_mid_pushSound == NULL || @@ -495,6 +502,10 @@ JNIEXPORT jlong JNICALL FF_FUNC(createInstance0) #endif // Register all formats and codecs + sp_avcodec_register_all(); + if(HAS_FUNC(sp_avdevice_register_all)) { + sp_avdevice_register_all(); + } sp_av_register_all(); // Network too .. if(HAS_FUNC(sp_avformat_network_init)) { @@ -536,8 +547,47 @@ static uint64_t getDefaultAudioChannelLayout(int channelCount) { static void initPTSStats(PTSStats *ptsStats); static int64_t evalPTS(PTSStats *ptsStats, int64_t inPTS, int64_t inDTS); +static AVInputFormat* tryAVInputFormat(const char * name, int verbose) { + AVInputFormat* inFmt = sp_av_find_input_format(name); + if( verbose) { + if ( inFmt == NULL ) { + fprintf(stderr, "Warning: Could not find input format '%s'\n", name); + } else { + fprintf(stderr, "Info: Found input format '%s'\n", name); + } + } + return inFmt; +} +static const char * inFmtNames[] = { + "video4linux2", + "video4linux", + "vfwcap", + "dshow", + "mpg", + "yuv2", + "mjpeg", + "avi", + "wmv", + "libx264", + "h264", + "mpegts" +}; +static AVInputFormat* findAVInputFormat(int verbose) { + AVInputFormat* inFmt = NULL; + const char *inFmtName; + int i=0; + do { + inFmtName = inFmtNames[i++]; + if( NULL == inFmtName ) { + break; + } + inFmt = tryAVInputFormat(inFmtName, verbose); + } while ( NULL == inFmt ); + return inFmt; +} + JNIEXPORT void JNICALL FF_FUNC(setStream0) - (JNIEnv *env, jobject instance, jlong ptr, jstring jURL, jstring jInFmtStr, jint vid, jint aid, + (JNIEnv *env, jobject instance, jlong ptr, jstring jURL, jboolean jIsCameraInput, jint vid, jint aid, jint aMaxChannelCount, jint aPrefSampleRate) { int res, i; @@ -552,15 +602,8 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0) pAV->pFormatCtx = sp_avformat_alloc_context(); // Open video file - AVInputFormat *inFmt = NULL; - const char *inFmtStr = NULL != jInFmtStr ? (*env)->GetStringUTFChars(env, jInFmtStr, &iscopy) : NULL; - if( NULL != inFmtStr ) { - inFmt = sp_av_find_input_format(inFmtStr); - if( NULL == inFmt ) { - fprintf(stderr, "Warning: Could not find input format '%s'\n", inFmtStr); - } - (*env)->ReleaseStringChars(env, jInFmtStr, (const jchar *)inFmtStr); - } + AVInputFormat* inFmt = jIsCameraInput ? findAVInputFormat(pAV->verbose) : NULL; + const char *urlPath = (*env)->GetStringUTFChars(env, jURL, &iscopy); res = sp_avformat_open_input(&pAV->pFormatCtx, urlPath, inFmt, NULL); if(res != 0) { @@ -838,22 +881,26 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0) } pAV->frames_video = pAV->pVStream->nb_frames; - if( pAV->verbose ) { - fprintf(stderr, "V frame_size %d, frame_number %d, r_frame_rate %f %d/%d, avg_frame_rate %f %d/%d, nb_frames %d, \n", - pAV->pVCodecCtx->frame_size, pAV->pVCodecCtx->frame_number, - my_av_q2f(pAV->pVStream->r_frame_rate), pAV->pVStream->r_frame_rate.num, pAV->pVStream->r_frame_rate.den, - my_av_q2f(pAV->pVStream->avg_frame_rate), pAV->pVStream->avg_frame_rate.num, pAV->pVStream->avg_frame_rate.den, - pAV->pVStream->nb_frames); - } - // Allocate video frame // FIXME: Libav Binary compatibility! JAU01 + pAV->vWidth = pAV->pVCodecCtx->width; + pAV->vHeight = pAV->pVCodecCtx->height; pAV->vPixFmt = pAV->pVCodecCtx->pix_fmt; { AVPixFmtDescriptor pixDesc = sp_av_pix_fmt_descriptors[pAV->vPixFmt]; pAV->vBitsPerPixel = sp_av_get_bits_per_pixel(&pixDesc); pAV->vBufferPlanes = my_getPlaneCount(&pixDesc); } + + if( pAV->verbose ) { + fprintf(stderr, "V frame_size %d, frame_number %d, r_frame_rate %f %d/%d, avg_frame_rate %f %d/%d, nb_frames %d, size %dx%d, fmt 0x%X, bpp %d, planes %d\n", + pAV->pVCodecCtx->frame_size, pAV->pVCodecCtx->frame_number, + my_av_q2f(pAV->pVStream->r_frame_rate), pAV->pVStream->r_frame_rate.num, pAV->pVStream->r_frame_rate.den, + my_av_q2f(pAV->pVStream->avg_frame_rate), pAV->pVStream->avg_frame_rate.num, pAV->pVStream->avg_frame_rate.den, + pAV->pVStream->nb_frames, + pAV->vWidth, pAV->vHeight, pAV->vPixFmt, pAV->vBitsPerPixel, pAV->vBufferPlanes); + } + pAV->pVFrame=sp_avcodec_alloc_frame(); if( pAV->pVFrame == NULL ) { JoglCommon_throwNewRuntimeException(env, "Couldn't alloc video frame"); @@ -867,10 +914,20 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0) } else { pAV->vBytesPerPixelPerPlane = 1; } - for(i=0; i<3; i++) { - // FIXME: Libav Binary compatibility! JAU01 - pAV->vLinesize[i] = pAV->pVFrame->linesize[i]; - pAV->vTexWidth[i] = pAV->vLinesize[i] / pAV->vBytesPerPixelPerPlane ; + if( pAV->vBufferPlanes > 1 ) { + for(i=0; i<3; i++) { + // FIXME: Libav Binary compatibility! JAU01 + pAV->vLinesize[i] = pAV->pVFrame->linesize[i]; + pAV->vTexWidth[i] = pAV->vLinesize[i] / pAV->vBytesPerPixelPerPlane ; + } + } else { + pAV->vLinesize[0] = pAV->pVCodecCtx->width * pAV->vBytesPerPixelPerPlane; + if( pAV->vPixFmt == PIX_FMT_YUYV422 ) { + // Stuff 2x 16bpp (YUYV) into one RGBA pixel! + pAV->vTexWidth[0] = pAV->pVCodecCtx->width / 2; + } else { + pAV->vTexWidth[0] = pAV->pVCodecCtx->width; + } } sp_avcodec_default_release_buffer(pAV->pVCodecCtx, pAV->pVFrame); } else { @@ -896,10 +953,10 @@ JNIEXPORT void JNICALL FF_FUNC(setGLFuncs0) } #if 0 -#define DBG_TEXSUBIMG2D_a(c,p,i) fprintf(stderr, "TexSubImage2D.%c offset %d / %d, size %d x %d, ", c, p->pVCodecCtx->width, p->pVCodecCtx->height/2, p->vTexWidth[i], p->pVCodecCtx->height/2) +#define DBG_TEXSUBIMG2D_a(c,p,d,i) fprintf(stderr, "TexSubImage2D.%c offset %d / %d, size %d x %d, ", c, p->pVCodecCtx->width, p->pVCodecCtx->height/d, p->vTexWidth[i], p->pVCodecCtx->height/d) #define DBG_TEXSUBIMG2D_b(p) fprintf(stderr, "err 0x%X\n", pAV->procAddrGLGetError()) #else -#define DBG_TEXSUBIMG2D_a(c,p,i) +#define DBG_TEXSUBIMG2D_a(c,p,d,i) #define DBG_TEXSUBIMG2D_b(p) #endif @@ -911,17 +968,22 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) AVPacket packet; int frameDecoded; jint resPTS = INVALID_PTS; + uint8_t * pkt_odata; + int pkt_osize; + packet.data = NULL; // minimum + packet.size = 0; // requirement sp_av_init_packet(&packet); const int avRes = sp_av_read_frame(pAV->pFormatCtx, &packet); + pkt_odata = packet.data; + pkt_osize = packet.size; if( AVERROR_EOF == avRes || ( pAV->pFormatCtx->pb && pAV->pFormatCtx->pb->eof_reached ) ) { resPTS = END_OF_STREAM_PTS; } else if( 0 <= avRes ) { - /** if( pAV->verbose ) { fprintf(stderr, "P: ptr %p, size %d\n", packet.data, packet.size); - } */ + } if(packet.stream_index==pAV->aid) { // Decode audio frame if(NULL == pAV->pAFrames) { // no audio registered @@ -932,9 +994,6 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) int flush_complete = 0; for ( frameCount=0; 0 < packet.size || 0 == frameCount; frameCount++ ) { int len1; - if (flush_complete) { - break; - } NIOBuffer_t * pNIOBufferCurrent = &pAV->pANIOBuffers[pAV->aFrameCurrent]; AVFrame* pAFrameCurrent = pAV->pAFrames[pAV->aFrameCurrent]; if( pAV->useRefCountedFrames ) { @@ -942,6 +1001,10 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) pAV->aFrameCurrent = ( pAV->aFrameCurrent + 1 ) % pAV->aFrameCount ; } sp_avcodec_get_frame_defaults(pAFrameCurrent); + + if (flush_complete) { + break; + } if(HAS_FUNC(sp_avcodec_decode_audio4)) { len1 = sp_avcodec_decode_audio4(pAV->pACodecCtx, pAFrameCurrent, &frameDecoded, &packet); } else { @@ -1013,7 +1076,7 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) nb_samples, pAV->aSampleFmtOut, 0 /* align */); - tmp_out = av_realloc(pAV->aResampleBuffer, out_size); + tmp_out = sp_av_realloc(pAV->aResampleBuffer, out_size); if (!tmp_out) { JoglCommon_throwNewRuntimeException(env, "Couldn't alloc resample buffer of size %d", out_size); return; @@ -1066,10 +1129,10 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) int flush_complete = 0; for ( frameCount=0; 0 < packet.size || 0 == frameCount; frameCount++ ) { int len1; + sp_avcodec_get_frame_defaults(pAV->pVFrame); if (flush_complete) { break; } - sp_avcodec_get_frame_defaults(pAV->pVFrame); len1 = sp_avcodec_decode_video2(pAV->pVCodecCtx, pAV->pVFrame, &frameDecoded, &packet); if (len1 < 0) { // if error, we skip the frame @@ -1116,7 +1179,7 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) // 1st plane or complete packed frame // FIXME: Libav Binary compatibility! JAU01 - DBG_TEXSUBIMG2D_a('Y',pAV,0); + DBG_TEXSUBIMG2D_a('Y',pAV,1,0); pAV->procAddrGLTexSubImage2D(texTarget, 0, 0, 0, pAV->vTexWidth[0], pAV->pVCodecCtx->height, @@ -1126,7 +1189,7 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) if(pAV->vPixFmt == PIX_FMT_YUV420P) { // U plane // FIXME: Libav Binary compatibility! JAU01 - DBG_TEXSUBIMG2D_a('U',pAV,1); + DBG_TEXSUBIMG2D_a('U',pAV,2,1); pAV->procAddrGLTexSubImage2D(texTarget, 0, pAV->pVCodecCtx->width, 0, pAV->vTexWidth[1], pAV->pVCodecCtx->height/2, @@ -1134,7 +1197,7 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) DBG_TEXSUBIMG2D_b(pAV); // V plane // FIXME: Libav Binary compatibility! JAU01 - DBG_TEXSUBIMG2D_a('V',pAV,2); + DBG_TEXSUBIMG2D_a('V',pAV,2,2); pAV->procAddrGLTexSubImage2D(texTarget, 0, pAV->pVCodecCtx->width, pAV->pVCodecCtx->height/2, pAV->vTexWidth[2], pAV->pVCodecCtx->height/2, @@ -1148,11 +1211,10 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0) } } } - - // Free the packet that was allocated by av_read_frame - // This code cause a double free and have been commented out. - // TODO: check what release the packets memory. - // sp_av_free_packet(&packet); + // restore orig pointer and size values, we may have moved along within packet + packet.data = pkt_odata; + packet.size = pkt_osize; + sp_av_free_packet(&packet); } return resPTS; } diff --git a/src/test/com/jogamp/opengl/test/android/MovieCubeActivityLauncher1a.java b/src/test/com/jogamp/opengl/test/android/MovieCubeActivityLauncher1a.java new file mode 100644 index 000000000..87e8d7ecc --- /dev/null +++ b/src/test/com/jogamp/opengl/test/android/MovieCubeActivityLauncher1a.java @@ -0,0 +1,87 @@ +/** + * Copyright 2012 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ +package com.jogamp.opengl.test.android; + +import java.util.Arrays; +import java.util.List; + +import com.jogamp.opengl.test.android.LauncherUtil.OrderedProperties; + +public class MovieCubeActivityLauncher1a extends LauncherUtil.BaseActivityLauncher { + + static String demo = "com.jogamp.opengl.test.android.MovieCubeActivity0"; + static String[] sys_pkgs = new String[] { "com.jogamp.common", "javax.media.opengl" }; + static String[] usr_pkgs = new String[] { "com.jogamp.opengl.test" }; + + @Override + public void init() { + final OrderedProperties props = getProperties(); + props.setProperty("jnlp.media0_url2", "camera://0"); + props.setProperty("jnlp.media0_url1", "http://download.blender.org/peach/bigbuckbunny_movies/BigBuckBunny_320x180.mp4"); + props.setProperty("jnlp.media0_url0", "file:///mnt/sdcard/Movies/BigBuckBunny_320x180.mp4"); + props.setProperty("jnlp.media1_url0", "http://archive.org/download/ElephantsDream/ed_1024_512kb.mp4"); + // props.setProperty("jogamp.debug.JNILibLoader", "true"); + // props.setProperty("jogamp.debug.NativeLibrary", "true"); + // props.setProperty("jogamp.debug.NativeLibrary.Lookup", "true"); + // props.setProperty("jogamp.debug.IOUtil", "true"); + // props.setProperty("jogamp.debug.Lock", "true"); + // props.setProperty("jogamp.debug.Lock.TraceLock", "true"); + // props.setProperty("nativewindow.debug", "all"); + // props.setProperty("nativewindow.debug.GraphicsConfiguration", "true"); + // props.setProperty("jogl.debug", "all"); + // props.setProperty("jogl.debug.GLProfile", "true"); + // props.setProperty("jogl.debug.GLDrawable", "true"); + // props.setProperty("jogl.debug.GLContext", "true"); + props.setProperty("jogl.debug.GLSLCode", "true"); + props.setProperty("jogl.debug.GLMediaPlayer", "true"); + // props.setProperty("jogl.debug.CapabilitiesChooser", "true"); + // props.setProperty("jogl.debug.GLSLState", "true"); + // props.setProperty("jogl.debug.DebugGL", "true"); + // props.setProperty("jogl.debug.TraceGL", "true"); + // props.setProperty("newt.debug", "all"); + // props.setProperty("newt.debug.Window", "true"); + // props.setProperty("newt.debug.Window.MouseEvent", "true"); + // props.setProperty("newt.debug.Window.KeyEvent", "true"); + // props.setProperty("jogamp.debug.IOUtil", "true"); + } + + @Override + public String getActivityName() { + return demo; + } + + @Override + public List<String> getSysPackages() { + return Arrays.asList(sys_pkgs); + } + + @Override + public List<String> getUsrPackages() { + return Arrays.asList(usr_pkgs); + } +} diff --git a/src/test/com/jogamp/opengl/test/android/MovieCubeActivityLauncher1b.java b/src/test/com/jogamp/opengl/test/android/MovieCubeActivityLauncher1b.java new file mode 100644 index 000000000..f7a896a90 --- /dev/null +++ b/src/test/com/jogamp/opengl/test/android/MovieCubeActivityLauncher1b.java @@ -0,0 +1,87 @@ +/** + * Copyright 2012 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ +package com.jogamp.opengl.test.android; + +import java.util.Arrays; +import java.util.List; + +import com.jogamp.opengl.test.android.LauncherUtil.OrderedProperties; + +public class MovieCubeActivityLauncher1b extends LauncherUtil.BaseActivityLauncher { + + static String demo = "com.jogamp.opengl.test.android.MovieCubeActivity0"; + static String[] sys_pkgs = new String[] { "com.jogamp.common", "javax.media.opengl" }; + static String[] usr_pkgs = new String[] { "com.jogamp.opengl.test" }; + + @Override + public void init() { + final OrderedProperties props = getProperties(); + props.setProperty("jnlp.media0_url2", "camera://1"); + props.setProperty("jnlp.media0_url1", "http://download.blender.org/peach/bigbuckbunny_movies/BigBuckBunny_320x180.mp4"); + props.setProperty("jnlp.media0_url0", "file:///mnt/sdcard/Movies/BigBuckBunny_320x180.mp4"); + props.setProperty("jnlp.media1_url0", "http://archive.org/download/ElephantsDream/ed_1024_512kb.mp4"); + // props.setProperty("jogamp.debug.JNILibLoader", "true"); + // props.setProperty("jogamp.debug.NativeLibrary", "true"); + // props.setProperty("jogamp.debug.NativeLibrary.Lookup", "true"); + // props.setProperty("jogamp.debug.IOUtil", "true"); + // props.setProperty("jogamp.debug.Lock", "true"); + // props.setProperty("jogamp.debug.Lock.TraceLock", "true"); + // props.setProperty("nativewindow.debug", "all"); + // props.setProperty("nativewindow.debug.GraphicsConfiguration", "true"); + // props.setProperty("jogl.debug", "all"); + // props.setProperty("jogl.debug.GLProfile", "true"); + // props.setProperty("jogl.debug.GLDrawable", "true"); + // props.setProperty("jogl.debug.GLContext", "true"); + props.setProperty("jogl.debug.GLSLCode", "true"); + props.setProperty("jogl.debug.GLMediaPlayer", "true"); + // props.setProperty("jogl.debug.CapabilitiesChooser", "true"); + // props.setProperty("jogl.debug.GLSLState", "true"); + // props.setProperty("jogl.debug.DebugGL", "true"); + // props.setProperty("jogl.debug.TraceGL", "true"); + // props.setProperty("newt.debug", "all"); + // props.setProperty("newt.debug.Window", "true"); + // props.setProperty("newt.debug.Window.MouseEvent", "true"); + // props.setProperty("newt.debug.Window.KeyEvent", "true"); + // props.setProperty("jogamp.debug.IOUtil", "true"); + } + + @Override + public String getActivityName() { + return demo; + } + + @Override + public List<String> getSysPackages() { + return Arrays.asList(sys_pkgs); + } + + @Override + public List<String> getUsrPackages() { + return Arrays.asList(usr_pkgs); + } +} diff --git a/src/test/com/jogamp/opengl/test/android/MovieSimpleActivityLauncher00a.java b/src/test/com/jogamp/opengl/test/android/MovieSimpleActivityLauncher00c.java index e70e48ca3..2254b649a 100644 --- a/src/test/com/jogamp/opengl/test/android/MovieSimpleActivityLauncher00a.java +++ b/src/test/com/jogamp/opengl/test/android/MovieSimpleActivityLauncher00c.java @@ -32,34 +32,33 @@ import java.util.List; import com.jogamp.opengl.test.android.LauncherUtil.OrderedProperties; -public class MovieSimpleActivityLauncher00a extends LauncherUtil.BaseActivityLauncher { +public class MovieSimpleActivityLauncher00c extends LauncherUtil.BaseActivityLauncher { - static String demo = "com.jogamp.opengl.test.android.MovieSimpleActivity0"; + static String demo = "com.jogamp.opengl.test.android.MovieSimpleActivity1"; static String[] sys_pkgs = new String[] { "com.jogamp.common", "javax.media.opengl" }; static String[] usr_pkgs = new String[] { "com.jogamp.opengl.test" }; @Override public void init() { final OrderedProperties props = getProperties(); - props.setProperty("jnlp.mplayer.nozoom", "true"); + props.setProperty("jnlp.mplayer.nozoom", "false"); props.setProperty("jnlp.mplayer.hud", "false"); props.setProperty("jnlp.mplayer.hud.shared", "false"); - // props.setProperty("jnlp.media0_url2", "http://download.blender.org/peach/bigbuckbunny_movies/BigBuckBunny_640x360.m4v"); - props.setProperty("jnlp.media0_url2", "http://video.webmfiles.org/big-buck-bunny_trailer.webm"); - props.setProperty("jnlp.media0_url1", "http://download.blender.org/peach/bigbuckbunny_movies/BigBuckBunny_320x180.mp4"); + props.setProperty("jnlp.media0_url2", "camera://0"); + props.setProperty("jnlp.media0_url1", "http://video.webmfiles.org/big-buck-bunny_trailer.webm"); props.setProperty("jnlp.media0_url0", "file:///mnt/sdcard/Movies/BigBuckBunny_320x180.mp4"); - props.setProperty("jnlp.media1_url0", "http://archive.org/download/ElephantsDream/ed_1024_512kb.mp4"); // props.setProperty("jogamp.debug.JNILibLoader", "true"); // props.setProperty("jogamp.debug.NativeLibrary", "true"); // props.setProperty("jogamp.debug.NativeLibrary.Lookup", "true"); // props.setProperty("jogamp.debug.IOUtil", "true"); // props.setProperty("nativewindow.debug", "all"); - props.setProperty("nativewindow.debug.GraphicsConfiguration", "true"); + // props.setProperty("nativewindow.debug.GraphicsConfiguration", "true"); // props.setProperty("jogl.debug", "all"); // props.setProperty("jogl.debug.GLProfile", "true"); - props.setProperty("jogl.debug.GLDrawable", "true"); - props.setProperty("jogl.debug.GLContext", "true"); + // props.setProperty("jogl.debug.GLDrawable", "true"); + // props.setProperty("jogl.debug.GLContext", "true"); props.setProperty("jogl.debug.GLSLCode", "true"); + props.setProperty("jogl.debug.GLMediaPlayer", "true"); // props.setProperty("jogl.debug.CapabilitiesChooser", "true"); // props.setProperty("jogl.debug.GLSLState", "true"); // props.setProperty("jogl.debug.DebugGL", "true"); @@ -68,7 +67,7 @@ public class MovieSimpleActivityLauncher00a extends LauncherUtil.BaseActivityLau // props.setProperty("newt.debug.Window", "true"); // props.setProperty("newt.debug.Window.MouseEvent", "true"); // props.setProperty("newt.debug.Window.KeyEvent", "true"); - props.setProperty("jogamp.debug.IOUtil", "true"); + // props.setProperty("jogamp.debug.IOUtil", "true"); } @Override |