aboutsummaryrefslogtreecommitdiffstats
path: root/src/jogl
diff options
context:
space:
mode:
Diffstat (limited to 'src/jogl')
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java41
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/SubBitmapEvent.java9
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/SubEmptyEvent.java9
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/SubTextEvent.java243
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/SubTextEventLine.java142
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/av/SubtitleEvent.java31
-rw-r--r--src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java8
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java219
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java7
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java83
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java56
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java14
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java2
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0400Natives.java9
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0500Natives.java9
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0600Natives.java9
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java7
-rw-r--r--src/jogl/native/libav/ffmpeg_impl_template.c509
-rw-r--r--src/jogl/native/libav/ffmpeg_static.c2
-rw-r--r--src/jogl/native/libav/ffmpeg_tool.h41
20 files changed, 962 insertions, 488 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
index 2767ea7ef..4011bddcb 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java
@@ -40,6 +40,7 @@ import com.jogamp.common.av.AudioSink;
import com.jogamp.common.av.PTS;
import com.jogamp.common.av.TimeFrameI;
import com.jogamp.common.net.Uri;
+import com.jogamp.math.Vec4f;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureSequence;
@@ -286,7 +287,7 @@ public interface GLMediaPlayer extends TextureSequence {
}
@Override
public String toString() {
- return String.format("%02d: [%s .. %s] %s", id, PTS.millisToTimeStr(start), PTS.millisToTimeStr(end), title);
+ return String.format("%02d: [%s .. %s] %s", id, PTS.toTimeStr(start), PTS.toTimeStr(end), title);
}
}
@@ -326,17 +327,19 @@ public interface GLMediaPlayer extends TextureSequence {
/** Attribute change bits */
public static enum Bit {
/** State changed to {@link State#Initialized}. See <a href="#lifecycle">Lifecycle</a>.*/
- Init ( 1<<0 ),
+ Init ( 1<<0 ),
/** State changed to {@link State#Uninitialized}. See <a href="#lifecycle">Lifecycle</a>.*/
Uninit ( 1<<1 ),
/** State changed to {@link State#Playing}. See <a href="#lifecycle">Lifecycle</a>.*/
- Play ( 1<<2 ),
+ Play ( 1<<2 ),
/** State changed to {@link State#Paused}. See <a href="#lifecycle">Lifecycle</a>.*/
Pause ( 1<<3 ),
+ /** Time position has changed, e.g. via {@link GLMediaPlayer#seek(int)}.*/
+ Seek ( 1<<4 ),
/** End of stream reached. See <a href("#lifecycle">Lifecycle</a>.*/
- EOS ( 1<<4 ),
+ EOS ( 1<<5 ),
/** An error occurred, e.g. during off-thread initialization. See {@link StreamException} and <a href("#lifecycle">Lifecycle</a>. */
- Error ( 1<<5 ),
+ Error ( 1<<6 ),
/** Stream video id change. */
VID ( 1<<16 ),
@@ -352,8 +355,14 @@ public interface GLMediaPlayer extends TextureSequence {
BPS ( 1<<21 ),
/** Stream length change. */
Length ( 1<<22 ),
- /** Stream codec change. */
- Codec ( 1<<23 );
+ /** Audio, video or subtitle stream codec change. */
+ Codec ( 1<<23 ),
+ /** Audio stream codec change. */
+ ACodec ( 1<<24 ),
+ /** Video stream codec change. */
+ VCodec ( 1<<25 ),
+ /** Subtitle stream codec change. */
+ SCodec ( 1<<26 );
Bit(final int v) { value = v; }
public final int value;
@@ -833,18 +842,36 @@ public interface GLMediaPlayer extends TextureSequence {
/**
* <i>Warning:</i> Optional information, may not be supported by implementation.
+ * @return the {@link CodecID} of the video stream, if available
+ */
+ public CodecID getVideoCodecID();
+
+ /**
+ * <i>Warning:</i> Optional information, may not be supported by implementation.
* @return the codec of the video stream, if available
*/
public String getVideoCodec();
/**
* <i>Warning:</i> Optional information, may not be supported by implementation.
+ * @return the {@link CodecID} of the audio stream, if available
+ */
+ public CodecID getAudioCodecID();
+
+ /**
+ * <i>Warning:</i> Optional information, may not be supported by implementation.
* @return the codec of the audio stream, if available
*/
public String getAudioCodec();
/**
* <i>Warning:</i> Optional information, may not be supported by implementation.
+ * @return the {@link CodecID} of the subtitle stream, if available
+ */
+ public CodecID getSubtitleCodecID();
+
+ /**
+ * <i>Warning:</i> Optional information, may not be supported by implementation.
* @return the codec of the subtitle stream, if available
*/
public String getSubtitleCodec();
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/SubBitmapEvent.java b/src/jogl/classes/com/jogamp/opengl/util/av/SubBitmapEvent.java
index 68f25d046..0032aeffc 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/SubBitmapEvent.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/SubBitmapEvent.java
@@ -61,20 +61,13 @@ public class SubBitmapEvent extends SubtitleEvent {
* @param owner {@link Texture} owner code-stub to release the texture
*/
public SubBitmapEvent(final CodecID codec, final Vec2i pos, final Vec2i dim, final Texture tex, final int pts_start, final int pts_end, final TextureOwner owner) {
- super(codec, pts_start, pts_end);
+ super(SubtitleEvent.Type.Bitmap, codec, pts_start, pts_end);
position = pos;
dimension = dim;
texture = tex;
this.owner = owner;
}
- @Override
- public final boolean isTextASS() { return false; }
- @Override
- public final boolean isBitmap() { return true; }
- @Override
- public final boolean isEmpty() { return false; }
-
/**
* {@inheritDoc}
* <p>
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/SubEmptyEvent.java b/src/jogl/classes/com/jogamp/opengl/util/av/SubEmptyEvent.java
index c49558c57..d6796dc4d 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/SubEmptyEvent.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/SubEmptyEvent.java
@@ -34,17 +34,10 @@ public class SubEmptyEvent extends SubtitleEvent {
* Empty event ctor
*/
public SubEmptyEvent(final int pts_start, final int pts_end) {
- super(CodecID.NONE, pts_start, pts_end);
+ super(SubtitleEvent.Type.Empty, CodecID.NONE, pts_start, pts_end);
}
@Override
- public final boolean isTextASS() { return false; }
- @Override
- public final boolean isBitmap() { return false; }
- @Override
- public final boolean isEmpty() { return true; }
-
- @Override
public void release() {} // nothing to be released back to the owner
@Override
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/SubTextEvent.java b/src/jogl/classes/com/jogamp/opengl/util/av/SubTextEvent.java
new file mode 100644
index 000000000..d699d9300
--- /dev/null
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/SubTextEvent.java
@@ -0,0 +1,243 @@
+/**
+ * Copyright 2024 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package com.jogamp.opengl.util.av;
+
+import java.time.format.DateTimeParseException;
+
+import com.jogamp.common.av.PTS;
+
+/**
+ * Text Event Line including ASS/SAA of {@link SubtitleEvent}
+ * <p>
+ * See http://www.tcax.org/docs/ass-specs.htm
+ * </p>
+ */
+public class SubTextEvent extends SubtitleEvent {
+ /** Text formatting */
+ public enum TextFormat {
+ /** Multiple ASS formats may be passed, see {@link ASSType}. */
+ ASS,
+ /** Just plain text */
+ TEXT,
+ };
+ /** ASS Formatting Type */
+ public enum ASSType {
+ /**
+ * ASS dialogue-line output w/ start and end (Given by FFmpeg 4.*)
+ * <pre>
+ 0 1 2 3 4 5 6 7 8 9
+ Marked, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
+
+ 'Dialogue: 0,0:02:02.15,0:02:02.16,Default,,0,0,0,,trying to force him to travel to that'
+ * </pre>
+ */
+ DIALOGUE,
+ /**
+ * FFMpeg ASS event-line output w/o start, end (Given by FFmpeg 5.*, 6.*, ..)
+ * <pre>
+ 0 1 2 3 4 5 6 7 8
+ Seq, Layer, Style, Name, MarginL, MarginR, MarginV, Effect, TEXT
+ * </pre>
+ */
+ EVENT,
+ /** Just plain text */
+ TEXT
+ }
+ /** {@link TextFormat} of this text subtitle event. */
+ public final TextFormat textFormat;
+ /** {@link ASSType} sub-type */
+ public final ASSType assType;
+ /** Start time in milliseconds, or -1. */
+ public final int start;
+ /** End time in milliseconds, or -1. */
+ public final int end;
+ public final String style;
+
+ public final int seqnr;
+ public final int layer;
+
+ public final String name;
+ public final String effect;
+ /** Actual subtitle text */
+ public final String text;
+ /** Number of lines of {@link #text}, i.e. occurrence of {@code \n} + 1. */
+ public final int lines;
+
+ private static boolean DEBUG = false;
+
+ /**
+ * ASS/SAA Event Line ctor
+ * @param codec the {@link CodecID}
+ * @param fmt input format of {@code ass}, currently only {@link SubTextEvent.TextFormat#ASS} and {@link SubTextEvent.TextFormat#TEXT} is supported
+ * @param ass ASS/SAA compatible event line according to {@link ASSType}
+ * @param pts_start pts start in ms, provided for {@link SubTextEvent.TextFormat#ASS} and {@link SubTextEvent.TextFormat#TEXT}
+ * @param pts_end pts end in ms, provided for {@link SubTextEvent.TextFormat#ASS} and {@link SubTextEvent.TextFormat#TEXT}
+ */
+ public SubTextEvent(final CodecID codec, final TextFormat fmt, final String ass, final int pts_start, final int pts_end) {
+ super(SubtitleEvent.Type.Text, codec, pts_start, pts_end);
+ this.textFormat = fmt;
+ ASSType assType = ASSType.TEXT;
+ int start = -1;
+ int end = -1;
+ int seqnr = 0;
+ int layer = 0;
+ String style = "Default";
+ String name = "";
+ String effect = "";
+ String text = "";
+ boolean done = false;
+ if( TextFormat.ASS == fmt ) {
+ final int len = null != ass ? ass.length() : 0;
+ {
+ // ASSType.DIALOGUE
+ int part = 0;
+ for(int i=0; 10 > part && len > i; ) {
+ if( 9 == part ) {
+ text = ass.substring(i);
+ done = true;
+ assType = ASSType.DIALOGUE;
+ } else {
+ final int j = ass.indexOf(',', i);
+ if( 0 > j ) {
+ break;
+ }
+ final String v = ass.substring(i, j);
+ try {
+ switch(part) {
+ case 1:
+ start = PTS.toMillis(v, true);
+ break;
+ case 2:
+ end = PTS.toMillis(v, true);
+ break;
+ case 3:
+ style = v;
+ break;
+ case 4:
+ name = v;
+ break;
+ case 8:
+ effect = v;
+ break;
+ }
+ } catch(final DateTimeParseException pe) {
+ if( DEBUG ) {
+ System.err.println("ASS.DIALG parsing error of part "+part+" '"+v+"' of '"+ass+"'");
+ }
+ break;
+ }
+ i = j + 1;
+ }
+ ++part;
+ }
+ }
+ if( !done ) {
+ // ASSType.EVENT
+ int part = 0;
+ for(int i=0; 9 > part && len > i; ) {
+ if( 8 == part ) {
+ text = ass.substring(i);
+ done = true;
+ assType = ASSType.EVENT;
+ } else {
+ final int j = ass.indexOf(',', i);
+ if( 0 > j ) {
+ break;
+ }
+ final String v = ass.substring(i, j);
+ try {
+ switch(part) {
+ case 0:
+ seqnr = Integer.valueOf(v);
+ break;
+ case 1:
+ layer = Integer.valueOf(v);
+ break;
+ case 2:
+ style = v;
+ break;
+ case 3:
+ name = v;
+ break;
+ case 7:
+ effect = v;
+ break;
+ }
+ } catch(final NumberFormatException nfe) {
+ if( DEBUG ) {
+ System.err.println("ASS.EVENT parsing error of part "+part+" '"+v+"' of '"+ass+"'");
+ }
+ break;
+ }
+ i = j + 1;
+ }
+ ++part;
+ }
+ }
+ }
+ if( !done && TextFormat.TEXT == fmt ) {
+ text = ass;
+ done = true;
+ assType = ASSType.TEXT;
+ }
+ this.assType = assType;
+ this.start = start;
+ this.end = end;
+ this.seqnr = seqnr;
+ this.layer = layer;
+ this.style = style;
+ this.name = name;
+ this.effect = effect;
+ this.text = text.replace("\\N", "\n");
+ {
+ final int len = this.text.length();
+ int lc = 1;
+ for(int i=0; len > i; ) {
+ final int j = this.text.indexOf("\n", i);
+ if( 0 > j ) {
+ break;
+ }
+ ++lc;
+ i = j + 1;
+ }
+ this.lines = lc;
+ }
+ }
+
+ @Override
+ public void release() {} // nothing to be released back to the owner
+
+ @Override
+ public String toString() {
+ final String start_s = 0 <= start ? PTS.toTimeStr(start, true) : "undef";
+ final String end_s = 0 <= end ? PTS.toTimeStr(end, true) : "undef";
+ final String fms_s = TextFormat.ASS == textFormat ? "ASS("+assType+")" : textFormat.toString();
+ return getStartString()+", "+fms_s+", #"+seqnr+", l_"+layer+
+ ", ["+start_s+".."+end_s+"], style "+style+", name '"+name+"', effect '"+effect+"': '"+text+"' ("+lines+")]";
+ }
+}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/SubTextEventLine.java b/src/jogl/classes/com/jogamp/opengl/util/av/SubTextEventLine.java
deleted file mode 100644
index c867dea9c..000000000
--- a/src/jogl/classes/com/jogamp/opengl/util/av/SubTextEventLine.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/**
- * Copyright 2024 JogAmp Community. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are
- * permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice, this list of
- * conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice, this list
- * of conditions and the following disclaimer in the documentation and/or other materials
- * provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
- * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
- * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
- * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
- * The views and conclusions contained in the software and documentation are those of the
- * authors and should not be interpreted as representing official policies, either expressed
- * or implied, of JogAmp Community.
- */
-package com.jogamp.opengl.util.av;
-
-/**
- * Text Event Line including ASS/SAA of {@link SubtitleEvent}
- * <p>
- * See http://www.tcax.org/docs/ass-specs.htm
- * </p>
- */
-public class SubTextEventLine extends SubtitleEvent {
- public enum Format {
- /** Denoting {@link SubASSEventLine} using FFMpeg output w/o start, end:
- * <pre>
- 0 1 2 3 4 5 6 7 8
- Seq, Layer, Style, Name, MarginL, MarginR, MarginV, Effect, TEXT
- * </pre>
- */
- ASS_FFMPEG,
- /** Denoting {@link SubASSEventLine}, just the plain text part */
- TEXT,
- };
- /** {@link Format} of this text subtitle event. */
- public final Format format;
- public final int seqnr;
- public final int layer;
- public final String style;
- public final String name;
- /** Actual subtitle text */
- public final String text;
- /** Number of lines of {@link #text}, i.e. occurrence of {@code \n} + 1. */
- public final int lines;
-
- /**
- * ASS/SAA Event Line ctor
- * @param codec the {@link CodecID}
- * @param fmt input format of {@code ass}, currently only {@link SubTextEventLine.Format#ASS_FFMPEG} and {@link SubTextEventLine.Format#TEXT} is supported
- * @param ass ASS/SAA compatible event line according to {@code fmt}
- * @param pts_start pts start in ms, provided for {@link SubTextEventLine.Format#ASS_FFMPEG} and {@link SubTextEventLine.Format#TEXT}
- * @param pts_end pts end in ms, provided for {@link SubTextEventLine.Format#ASS_FFMPEG} and {@link SubTextEventLine.Format#TEXT}
- */
- public SubTextEventLine(final CodecID codec, final Format fmt, final String ass, final int pts_start, final int pts_end) {
- super(codec, pts_start, pts_end);
- this.format = fmt;
- int seqnr = 0;
- int layer = 0;
- String style = "Default";
- String name = "";
- String text = "";
- if( Format.ASS_FFMPEG == fmt ) {
- final int len = null != ass ? ass.length() : 0;
- int part = 0;
- for(int i=0; 9 > part && len > i; ) {
- if( 8 == part ) {
- text = ass.substring(i);
- } else {
- final int j = ass.indexOf(',', i);
- if( 0 > j ) {
- break;
- }
- final String v = ass.substring(i, j);
- switch(part) {
- case 0:
- seqnr = Integer.valueOf(v);
- break;
- case 1:
- layer = Integer.valueOf(v);
- break;
- case 2:
- style = v;
- break;
- case 3:
- name = v;
- break;
- }
- i = j + 1;
- }
- ++part;
- }
- } else if( Format.TEXT == fmt ) {
- text = ass;
- }
- this.seqnr = seqnr;
- this.layer = layer;
- this.style = style;
- this.name = name;
- this.text = text.replace("\\N", "\n");
- {
- final int len = this.text.length();
- int lc = 1;
- for(int i=0; len > i; ) {
- final int j = this.text.indexOf("\n", i);
- if( 0 > j ) {
- break;
- }
- ++lc;
- i = j + 1;
- }
- this.lines = lc;
- }
- }
-
- @Override
- public final boolean isTextASS() { return true; }
- @Override
- public final boolean isBitmap() { return false; }
- @Override
- public final boolean isEmpty() { return false; }
-
- @Override
- public void release() {} // nothing to be released back to the owner
-
- @Override
- public String toString() {
- return getStartString()+", "+format+", #"+seqnr+", l_"+layer+", style "+style+", name '"+name+"': '"+text+"' ("+lines+")]";
- }
-}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/SubtitleEvent.java b/src/jogl/classes/com/jogamp/opengl/util/av/SubtitleEvent.java
index f24246a70..e75fff3f8 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/av/SubtitleEvent.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/av/SubtitleEvent.java
@@ -1,5 +1,7 @@
package com.jogamp.opengl.util.av;
+import com.jogamp.common.av.PTS;
+
/**
* Generic subtitle event
* <p>
@@ -8,12 +10,23 @@ package com.jogamp.opengl.util.av;
* </p>
*/
public abstract class SubtitleEvent {
+ /** {@link SubtitleEvent} Implementation Type */
+ public enum Type {
+ /** {@link SubTextEvent} */
+ Text,
+ /** {@link SubBitmapEvent} */
+ Bitmap,
+ /** {@link SubEmptyEvent} */
+ Empty
+ };
+ /** Implementation {@link Type} of this instance. */
+ public final Type type;
/** {@link CodecID} of this subtitle event. */
public final CodecID codec;
- /** PTS start time to start showing this subtitle event. */
+ /** PTS start time in milliseconds to start showing this subtitle event. */
public final int pts_start;
/**
- * PTS start time to end showing this subtitle event.
+ * PTS end time in milliseconds to end showing this subtitle event.
* <p>
* {@link SubBitmapEvent} often (e.g. {@link CodecID#HDMV_PGS}) have an infinite end-time, i.e. ({@link Integer#MAX_VALUE},
* and shall be overwritten by the next one or {@link SubEmptyEvent}.
@@ -22,7 +35,8 @@ public abstract class SubtitleEvent {
*/
public final int pts_end;
- public SubtitleEvent(final CodecID codec, final int pts_start, final int pts_end) {
+ public SubtitleEvent(final Type type, final CodecID codec, final int pts_start, final int pts_end) {
+ this.type = type;
this.codec = codec;
this.pts_start = pts_start;
this.pts_end = pts_end;
@@ -36,16 +50,11 @@ public abstract class SubtitleEvent {
/** See {@link #pts_end}. */
public final boolean isEndDefined() { return pts_end < Integer.MAX_VALUE; }
- /** Returns {@code true} if Text/ASS/SAA subtitle type, o.e. {@link SubTextEvent}. */
- public abstract boolean isTextASS();
- /** Returns {@code true} if bitmap subtitle type, o.e. {@link SubBitmapEvent}. */
- public abstract boolean isBitmap();
- /** Returns {@code true} if empty subtitle type, o.e. {@link SubEmptyEvent}. */
- public abstract boolean isEmpty();
-
public final String getStartString() {
final boolean ied = isEndDefined();
- return "Sub["+codec+", ["+pts_start+".."+(ied?pts_end:"undef")+"] "+(ied?getDuration():"undef")+" ms";
+ final String pts_start_s = 0 <= pts_start ? PTS.toTimeStr(pts_start, true) : "undef";
+ final String pts_end_s = 0 <= pts_end && ied ? PTS.toTimeStr(pts_end, true) : "undef";
+ return "Sub[codec "+codec+", type "+type+", ["+pts_start_s+".."+pts_end_s+"] "+(ied?getDuration():"undef")+" ms";
}
diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
index c4d1ee78f..0924bb67e 100644
--- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
+++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java
@@ -341,7 +341,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
r_aids, r_alangs, r_aid,
new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
mp.getVideoWidth(), mp.getVideoHeight(), 0, 0, 0, 0f, 0, 0, mp.getDuration(),
- icodec, icodec, null);
+ icodec, icodec, null, -1, -1, -1);
/**
mp.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
@Override
@@ -379,7 +379,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
size.width, size.height, 0, 0, 0, fpsRange[1]/1000f, 0, 0, 0,
- icodec, icodec, null);
+ icodec, icodec, null, -1, -1, -1);
}
}
private static String camSz2Str(final Camera.Size csize) {
@@ -406,10 +406,10 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl {
}
@Override
- protected final int getNextTextureImpl(final GL gl, final TextureFrame nextFrame) throws InterruptedException {
+ protected final int getNextTextureImpl(final GL gl, final TextureFrame vFrame, Texture sTex, boolean[] sTexUsed) throws InterruptedException {
int pts = TimeFrameI.INVALID_PTS;
if(null != mp || null != cam) {
- final SurfaceTextureFrame sTexFrame = null != nextFrame ? (SurfaceTextureFrame) nextFrame : singleSTexFrame;
+ final SurfaceTextureFrame sTexFrame = null != vFrame ? (SurfaceTextureFrame) vFrame : singleSTexFrame;
final SurfaceTexture surfTex = sTexFrame.surfaceTex;
if( !sTexFrameAttached ) {
sTexFrameAttached = true;
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index 219cfb3e5..a52efe2ac 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -64,13 +64,15 @@ import com.jogamp.common.util.Ringbuffer;
import com.jogamp.common.util.TSPrinter;
import com.jogamp.common.util.WorkerThread;
import com.jogamp.math.FloatUtil;
+import com.jogamp.math.Vec2i;
+import com.jogamp.math.Vec4f;
import com.jogamp.opengl.GLExtensions;
import com.jogamp.opengl.util.av.SubtitleEventListener;
+import com.jogamp.opengl.util.av.CodecID;
import com.jogamp.opengl.util.av.GLMediaPlayer;
-import com.jogamp.opengl.util.av.SubASSEventLine;
+import com.jogamp.opengl.util.av.SubTextEvent;
import com.jogamp.opengl.util.av.SubEmptyEvent;
-import com.jogamp.opengl.util.av.SubTextureEvent;
-import com.jogamp.opengl.util.av.SubtitleEvent;
+import com.jogamp.opengl.util.av.SubBitmapEvent;
import com.jogamp.opengl.util.glsl.ShaderCode;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureData;
@@ -178,6 +180,12 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
/** In ms. Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private int duration = 0;
/** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
+ private CodecID acodecID = CodecID.NONE;
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
+ private CodecID vcodecID = CodecID.NONE;
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
+ private CodecID scodecID = CodecID.NONE;
+ /** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private String acodec = unknown;
/** Shall be set by the {@link #initStreamImpl(int, int, int)} method implementation. */
private String vcodec = unknown;
@@ -238,6 +246,11 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
private Ringbuffer<TextureFrame> videoFramesFree = null;
private Ringbuffer<TextureFrame> videoFramesDecoded = null;
private volatile TextureFrame lastFrame = null;
+ private Texture[] subTexOrig = null;
+ private Ringbuffer<Texture> subTexFree = null;
+ private static final int SUB_TEX_IMAGES_MIN = TEXTURE_COUNT_MIN + 1;
+ private static final boolean subDEBUG = true;
+
/**
* @see #isGLOriented()
*/
@@ -387,12 +400,12 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
public final PTS getPTS() { return av_scr_cpy; }
@Override
- public final int getVideoPTS() { return video_pts_last.get(Clock.currentMillis()); }
+ public final int getVideoPTS() { return video_pts_last.getCurrent(); }
@Override
public final int getAudioPTS() {
if( State.Uninitialized != state && null != audioSink ) {
- return audioSink.getPTS().get(Clock.currentMillis());
+ return audioSink.getPTS().getCurrent();
}
return 0;
}
@@ -746,9 +759,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
/**
* Implementation shall set the following set of data here
- * @param sid TODO
* @see #vid
* @see #aid
+ * @see #sid
* @see #width
* @see #height
* @see #fps
@@ -821,11 +834,20 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
videoFramesDecoded = new LFRingbuffer<TextureFrame>(TextureFrame[].class, textureCount);
lastFrame = videoFramesFree.getBlocking();
}
+ if( STREAM_ID_NONE != sid ) {
+ subTexOrig = createSubTextures(gl, Math.max(SUB_TEX_IMAGES_MIN, textureCount)); // minimum 2 textures
+ subTexFree = new LFRingbuffer<Texture>(subTexOrig);
+ } else {
+ subTexOrig = null;
+ subTexFree = null;
+ }
} else {
videoFramesOrig = null;
videoFramesFree = null;
videoFramesDecoded = null;
lastFrame = null;
+ subTexOrig = null;
+ subTexFree = null;
}
if( null == streamWorker &&
( TEXTURE_COUNT_MIN < textureCount || STREAM_ID_NONE == vid ) ) // Enable StreamWorker for 'audio only' as well (Bug 918).
@@ -848,6 +870,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
videoFramesFree = null;
videoFramesDecoded = null;
lastFrame = null;
+ subTexOrig = null;
+ subTexFree = null;
} else {
// Using a dummy test frame
width = TestTexture.singleton.getWidth();
@@ -865,6 +889,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
videoFramesDecoded = new LFRingbuffer<TextureFrame>(TextureFrame[].class, textureCount);
lastFrame = videoFramesFree.getBlocking( );
}
+ subTexOrig = null;
+ subTexFree = null;
// changeState(0, State.Paused);
}
} catch (final Throwable t) {
@@ -924,6 +950,20 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
return texFrames;
}
+ protected Texture[] createSubTextures(final GL gl, final int count) {
+ final int[] texNames = new int[count];
+ gl.glGenTextures(count, texNames, 0);
+ final int err = gl.glGetError();
+ if( GL.GL_NO_ERROR != err ) {
+ throw new RuntimeException("TextureNames creation failed (num: "+count+"): err "+toHexString(err));
+ }
+ final Texture[] textures = new Texture[count];
+ for(int i=0; i<count; i++) {
+ textures[i] = new Texture(texNames[i], true /* ownsTextureID */,
+ textureTarget, 1, 1, 1, 1, true);
+ }
+ return textures;
+ }
private static class TestTexture {
private static final TextureData singleton;
@@ -968,7 +1008,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
TestTexture.singleton.getWidth(), TestTexture.singleton.getHeight(), 0, 0, 0, _fps, _totalFrames, 0, _duration,
- "png-static", null, null);
+ "png-static", null, null, CodecID.toFFmpeg(CodecID.PNG), -1, -1);
}
protected abstract TextureFrame createTexImage(GL gl, int texName);
@@ -1063,11 +1103,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
return lastFrame;
}
- private final void removeAllTextureFrames(final GL gl) {
- final TextureFrame[] texFrames = videoFramesOrig;
- videoFramesOrig = null;
- videoFramesFree = null;
- videoFramesDecoded = null;
+ private final void destroyTexFrames(final GL gl, final TextureFrame[] texFrames) {
if( null != texFrames ) {
for(int i=0; i<texFrames.length; i++) {
final TextureFrame frame = texFrames[i];
@@ -1083,6 +1119,36 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
}
}
+ private final void destroyTextures(final GL gl, final Texture[] textures) {
+ if( null != textures ) {
+ for(int i=0; i<textures.length; i++) {
+ final Texture tex = textures[i];
+ if(null != tex) {
+ if( null != gl ) {
+ tex.destroy(gl);
+ }
+ textures[i] = null;
+ }
+ if( DEBUG ) {
+ logout.println(Thread.currentThread().getName()+"> Clear Texture["+i+"]: "+tex+" -> null");
+ }
+ }
+ }
+ }
+ private final void removeAllTextureFrames(final GL gl) {
+ destroyTexFrames(gl, videoFramesOrig);
+ videoFramesOrig = null;
+ videoFramesFree = null;
+ videoFramesDecoded = null;
+ lastFrame = null;
+ cachedFrame = null;
+ if( subDEBUG ) {
+ System.err.println("GLMediaPlayer: removeAllTextureFrames: subTexFree "+subTexFree);
+ }
+ destroyTextures(gl, subTexOrig); // can crash, if event obj w/ texture-copy still in use
+ subTexOrig = null;
+ subTexFree = null;
+ }
private TextureFrame cachedFrame = null;
private long lastMillis = 0;
@@ -1357,35 +1423,47 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
* Audio frames shall be ignored, if {@link #getAID()} is {@link #STREAM_ID_NONE}.
* </p>
* <p>
+ * Subtitle frames shall be ignored, if {@link #getSID()} is {@link #STREAM_ID_NONE}.
+ * </p>
+ * <p>
* Method may be invoked on the <a href="#streamworker"><i>StreamWorker</i> decoding thread</a>.
* </p>
* <p>
* Implementation shall care of OpenGL synchronization as required, e.g. glFinish()/glFlush()!
* </p>
* @param gl valid and current GL instance, shall be <code>null</code> for audio only.
- * @param nextFrame the {@link TextureFrame} to store the video PTS and texture data,
- * shall be <code>null</code> for audio only.
+ * @param vFrame next video {@link TextureFrame} to store the video PTS and texture data,
+ * shall be <code>null</code> for audio only.
+ * @param sTex {@link Texture} instance as bitmap subtitle target element.
+ * May be {@code null} for no desired bitmap subtitle.
+ * @param sTexUsed Result value. If the {@link Texture} {@code sTex} is used and {@link #pushSubtitleTex(Object, int, int, int, int, int, int, int, int, int, int)},
+ * {@code true} must be written into {@code sTexUsed}
* @return the last processed video PTS value, maybe {@link TimeFrameI#INVALID_PTS} if video frame is invalid or n/a.
* Will be {@link TimeFrameI#END_OF_STREAM_PTS} if end of stream reached.
* @throws InterruptedException if waiting for next frame fails
*/
- protected abstract int getNextTextureImpl(GL gl, TextureFrame nextFrame) throws InterruptedException;
+ protected abstract int getNextTextureImpl(GL gl, TextureFrame vFrame, Texture sTex, boolean[] sTexUsed) throws InterruptedException;
- protected final int getNextSingleThreaded(final GL gl, final TextureFrame nextFrame, final boolean[] gotVFrame) throws InterruptedException {
+ protected final int getNextSingleThreaded(final GL gl, final TextureFrame nextVFrame, final boolean[] gotVFrame) throws InterruptedException {
final int pts;
if( STREAM_ID_NONE != vid ) {
preNextTextureImpl(gl);
- pts = getNextTextureImpl(gl, nextFrame);
+ final boolean[] sTexUsed = { false };
+ final Texture subTex = ( null != subTexFree && STREAM_ID_NONE != sid ) ? subTexFree.get() : null;
+ pts = getNextTextureImpl(gl, nextVFrame, subTex, sTexUsed);
postNextTextureImpl(gl);
+ if( null != subTex && !sTexUsed[0] ) {
+ subTexFree.putBlocking(subTex); // return unused
+ }
if( TimeFrameI.INVALID_PTS != pts ) {
- newFrameAvailable(nextFrame, Clock.currentMillis());
+ newFrameAvailable(nextVFrame, Clock.currentMillis());
gotVFrame[0] = true;
} else {
gotVFrame[0] = false;
}
} else {
// audio only
- pts = getNextTextureImpl(null, null);
+ pts = getNextTextureImpl(null, null, null, null);
gotVFrame[0] = false;
}
return pts;
@@ -1603,6 +1681,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
private final WorkerThread.Callback action = (final WorkerThread self) -> {
final GL gl;
TextureFrame nextFrame = null;
+ final boolean[] subTexUsed = { false };
+ Texture subTex = null;
try {
if( STREAM_ID_NONE != vid ) {
nextFrame = videoFramesFree.getBlocking();
@@ -1611,7 +1691,17 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
} else {
gl = null;
}
- final int vPTS = getNextTextureImpl(gl, nextFrame);
+ if( null != gl && STREAM_ID_NONE != sid && null != subTexFree ) {
+ subTex = subTexFree.getBlocking();
+ }
+ final int vPTS = getNextTextureImpl(gl, nextFrame, subTex, subTexUsed);
+ if( null != subTex ) {
+ if( !subTexUsed[0] ) {
+ subTexFree.putBlocking(subTex);// return unused
+ } else if( subDEBUG ) {
+ System.err.println("GLMediaPlayer: Consumed SubTex: sid "+sid+", free "+subTexFree+", subTex "+subTex);
+ }
+ }
boolean audioEOS = false;
if( TimeFrameI.INVALID_PTS != vPTS ) {
if( null != nextFrame ) {
@@ -1652,7 +1742,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
} finally {
if( null != nextFrame ) { // put back
- videoFramesFree.put(nextFrame);
+ videoFramesFree.putBlocking(nextFrame);
}
}
};
@@ -1688,19 +1778,28 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
protected final void pushSubtitleText(final String text, final int start_display_pts, final int end_display_pts) {
if( null != subEventListener ) {
- subEventListener.run( new SubASSEventLine(SubtitleEvent.Format.ASS_TEXT, text, start_display_pts, end_display_pts) );
+ subEventListener.run( new SubTextEvent(this.scodecID, SubTextEvent.TextFormat.TEXT, text, start_display_pts, end_display_pts) );
}
}
protected final void pushSubtitleASS(final String ass, final int start_display_pts, final int end_display_pts) {
if( null != subEventListener ) {
- subEventListener.run( new SubASSEventLine(SubtitleEvent.Format.ASS_FFMPEG, ass, start_display_pts, end_display_pts) );
+ subEventListener.run( new SubTextEvent(this.scodecID, SubTextEvent.TextFormat.ASS, ass, start_display_pts, end_display_pts) );
}
}
- private final SubTextureEvent.TextureOwner subTexRelease = new SubTextureEvent.TextureOwner() {
+ /** {@link GLMediaPlayerImpl#pushSubtitleTex(Object, int, int, int, int, int, int, int, int, int)} */
+ private final SubBitmapEvent.TextureOwner subTexRelease = new SubBitmapEvent.TextureOwner() {
@Override
- public void release(final Texture tex) {
- if( null != subTexFree && null != tex ) { // put back
- subTexFree.put(tex);
+ public void release(final Texture subTex) {
+ if( null != subTexFree && null != subTex ) {
+ // return unused
+ try {
+ subTexFree.putBlocking(subTex);
+ if( subDEBUG ) {
+ System.err.println("GLMediaPlayer: Released SubTex: sid "+sid+", free "+subTexFree+", subTex "+subTex);
+ }
+ } catch (final InterruptedException e) {
+ throw new InternalError("GLMediaPlayer.SubTexRelease: Release failed, all full: sid "+sid+", free "+subTexFree+", subTex "+subTex+", "+GLMediaPlayerImpl.this, e);
+ }
}
}
@@ -1709,15 +1808,15 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
final int x, final int y, final int width, final int height,
final int start_display_pts, final int end_display_pts)
{
+ final Texture subTex = (Texture)texObj;
+ if( null != subTex ) {
+ subTex.set(texWidth, texHeight, width, height);
+ }
if( null != subEventListener ) {
- final Texture tex = (Texture)texObj;
- if( null != tex ) {
- tex.set(texWidth, texHeight, width, height);
- }
- subEventListener.run( new SubTextureEvent(new Vec2i(x, y), new Vec2i(width, height), tex,
- start_display_pts, end_display_pts, subTexRelease) );
+ subEventListener.run( new SubBitmapEvent(this.scodecID, new Vec2i(x, y), new Vec2i(width, height),
+ subTex, start_display_pts, end_display_pts, subTexRelease) );
} else {
- subTexRelease.release((Texture)texObj); // release right away
+ subTexRelease.release(subTex); // release right away
}
}
@@ -1779,13 +1878,15 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
* </p>
*/
protected final void updateAttributes(final String title,
- final int[] v_streams, final String[] v_langs,
- int vid, final int[] a_streams, final String[] a_langs,
- int aid, final int[] s_streams, final String[] s_langs,
- int sid, final int width, final int height,
+ final int[] v_streams, final String[] v_langs, int vid,
+ final int[] a_streams, final String[] a_langs, int aid,
+ final int[] s_streams, final String[] s_langs, int sid,
+ final int width, final int height,
final int bps_stream, final int bps_video, final int bps_audio,
final float fps, final int videoFrames, final int audioFrames, final int duration,
- final String vcodec, final String acodec, final String scodec) {
+ final String vcodec, final String acodec, final String scodec,
+ final int ffmpegVCodecID, final int ffmpegACodecID, final int ffmpegSCodecID)
+ {
final GLMediaPlayer.EventMask eventMask = new GLMediaPlayer.EventMask();
final boolean wasUninitialized = state == State.Uninitialized;
@@ -1878,16 +1979,37 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
if( (null!=acodec && acodec.length()>0 && !this.acodec.equals(acodec)) ) {
eventMask.setBit(GLMediaPlayer.EventMask.Bit.Codec);
+ eventMask.setBit(GLMediaPlayer.EventMask.Bit.ACodec);
this.acodec = acodec;
}
if( (null!=vcodec && vcodec.length()>0 && !this.vcodec.equals(vcodec)) ) {
eventMask.setBit(GLMediaPlayer.EventMask.Bit.Codec);
+ eventMask.setBit(GLMediaPlayer.EventMask.Bit.VCodec);
this.vcodec = vcodec;
}
if( (null!=scodec && scodec.length()>0 && !this.scodec.equals(scodec)) ) {
eventMask.setBit(GLMediaPlayer.EventMask.Bit.Codec);
+ eventMask.setBit(GLMediaPlayer.EventMask.Bit.SCodec);
this.scodec = scodec;
}
+ final CodecID acodecID = CodecID.fromFFmpeg(ffmpegACodecID);
+ final CodecID vcodecID = CodecID.fromFFmpeg(ffmpegVCodecID);
+ final CodecID scodecID = CodecID.fromFFmpeg(ffmpegSCodecID);
+ if( (0<ffmpegACodecID && CodecID.isAudioCodec(acodecID, true) && this.acodecID != acodecID) ) {
+ eventMask.setBit(GLMediaPlayer.EventMask.Bit.Codec);
+ eventMask.setBit(GLMediaPlayer.EventMask.Bit.ACodec);
+ this.acodecID = acodecID;
+ }
+ if( (0<ffmpegVCodecID && CodecID.isVideoCodec(vcodecID) && this.vcodecID != vcodecID) ) {
+ eventMask.setBit(GLMediaPlayer.EventMask.Bit.Codec);
+ eventMask.setBit(GLMediaPlayer.EventMask.Bit.VCodec);
+ this.vcodecID = vcodecID;
+ }
+ if( (0<ffmpegSCodecID && CodecID.isSubtitleCodec(scodecID) && this.scodecID != scodecID) ) {
+ eventMask.setBit(GLMediaPlayer.EventMask.Bit.Codec);
+ eventMask.setBit(GLMediaPlayer.EventMask.Bit.SCodec);
+ this.scodecID = scodecID;
+ }
if( eventMask.isZero() ) {
return;
}
@@ -2007,6 +2129,15 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
}
@Override
+ public final CodecID getVideoCodecID() { return vcodecID; }
+
+ @Override
+ public final CodecID getAudioCodecID() { return acodecID; }
+
+ @Override
+ public CodecID getSubtitleCodecID() { return scodecID; }
+
+ @Override
public final String getVideoCodec() { return vcodec; }
@Override
@@ -2069,18 +2200,18 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer {
@Override
public final String toString() {
- final String tt = PTS.millisToTimeStr(getDuration());
+ final String tt = PTS.toTimeStr(getDuration());
final String loc = ( null != streamLoc ) ? streamLoc.toString() : "<undefined stream>" ;
final int freeVideoFrames = null != videoFramesFree ? videoFramesFree.size() : 0;
final int decVideoFrames = null != videoFramesDecoded ? videoFramesDecoded.size() : 0;
- final int video_scr_ms = av_scr.get(Clock.currentMillis());
+ final int video_scr_ms = av_scr.getCurrent();
final String camPath = null != cameraPath ? ", camera: "+cameraPath : "";
return getClass().getSimpleName()+"["+state+", vSCR "+video_scr_ms+", "+getChapters().length+" chapters, duration "+tt+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+", z "+nullFrameCount+" / "+maxNullFrameCountUntilEOS+"], "+
"speed "+playSpeed+", "+bps_stream+" bps, hasSW "+(null!=streamWorker)+
", Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+", tagt "+toHexString(textureTarget)+", ifmt "+toHexString(textureInternalFormat)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType)+"], "+
- "Video[id "+vid+"/"+Arrays.toString(v_streams)+"/"+Arrays.toString(v_langs)+", <"+vcodec+">, "+width+"x"+height+", glOrient "+isInGLOrientation+", "+fps+" fps, "+frame_duration+" fdur, "+bps_video+" bps], "+
- "Audio[id "+aid+"/"+Arrays.toString(a_streams)+"/"+Arrays.toString(a_langs)+", <"+acodec+">, "+bps_audio+" bps, "+audioFrames+" frames], "+
- "Subs[id "+sid+"/"+Arrays.toString(s_streams)+"/"+Arrays.toString(s_langs)+", <"+scodec+">], uri "+loc+camPath+"]";
+ "Video[id "+vid+"/"+Arrays.toString(v_streams)+"/"+Arrays.toString(v_langs)+", "+vcodecID+"/'"+vcodec+"', "+width+"x"+height+", glOrient "+isInGLOrientation+", "+fps+" fps, "+frame_duration+" fdur, "+bps_video+" bps], "+
+ "Audio[id "+aid+"/"+Arrays.toString(a_streams)+"/"+Arrays.toString(a_langs)+", "+acodecID+"/'"+acodec+"', "+bps_audio+" bps, "+audioFrames+" frames], "+
+ "Subs[id "+sid+"/"+Arrays.toString(s_streams)+"/"+Arrays.toString(s_langs)+", "+scodecID+"/'"+scodec+"'], uri "+loc+camPath+"]";
}
@Override
diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
index 44031372f..b8394dace 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java
@@ -39,6 +39,7 @@ import com.jogamp.common.av.PTS;
import com.jogamp.common.nio.Buffers;
import com.jogamp.common.os.Clock;
import com.jogamp.common.util.IOUtil;
+import com.jogamp.opengl.util.av.CodecID;
import com.jogamp.opengl.util.av.GLMediaPlayer;
import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureData;
@@ -91,9 +92,9 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
}
@Override
- protected final int getNextTextureImpl(final GL gl, final TextureFrame nextFrame) {
+ protected final int getNextTextureImpl(final GL gl, final TextureFrame vFrame, final Texture sTex, final boolean[] sTexUsed) {
final int pts = getLastBufferedAudioPTS();
- nextFrame.setPTS( pts );
+ vFrame.setPTS( pts );
return pts;
}
@@ -154,7 +155,7 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl {
new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
new int[0], new String[0], GLMediaPlayer.STREAM_ID_NONE,
texData.getWidth(), texData.getHeight(), 0, 0, 0, _fps, _totalFrames, 0, _duration,
- "png-static", null, null);
+ "png-static", null, null, CodecID.toFFmpeg(CodecID.PNG), -1, -1);
}
@Override
protected final void initGLImpl(final GL gl) throws IOException, GLException {
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
index cfe0f72af..e33822697 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGDynamicLibraryBundleInfo.java
@@ -52,14 +52,15 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
private static final List<String> glueLibNames = new ArrayList<String>(); // none
- private static final int symbolCount = 63;
+ private static final int symbolCount = 67;
private static final String[] symbolNames = {
"avutil_version",
"avformat_version",
"avcodec_version",
"avdevice_version", // (opt)
"swresample_version",
- /* 5 */
+ "swscale_version", // (opt)
+ /* 6 */
// libavcodec
"avcodec_close",
@@ -82,7 +83,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"avcodec_receive_frame", // 57
"avcodec_decode_subtitle2", // 52.23.0
"avsubtitle_free", // 52.82.0
- /* +20 = 25 */
+ /* +20 = 26 */
// libavutil
"av_pix_fmt_desc_get", // >= lavu 51.45
@@ -102,7 +103,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"av_channel_layout_uninit", // >= 59 (opt)
"av_channel_layout_describe", // >= 59 (opt)
"av_opt_set_chlayout", // >= 59
- /* +17 = 42 */
+ /* +17 = 43 */
// libavformat
"avformat_alloc_context",
@@ -119,11 +120,11 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"avformat_network_init", // 53.13.0 (opt)
"avformat_network_deinit", // 53.13.0 (opt)
"avformat_find_stream_info", // 53.3.0 (opt)
- /* +14 = 56 */
+ /* +14 = 57 */
// libavdevice
"avdevice_register_all", // supported in all versions (opt)
- /* +1 = 57 */
+ /* +1 = 58 */
// libswresample
"av_opt_set_sample_fmt", // actually lavu .. but exist only w/ swresample!
@@ -132,7 +133,13 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"swr_free",
"swr_convert",
"swr_get_out_samples",
- /* +6 = 63 */
+ /* +6 = 64 */
+
+ // libswscale
+ "sws_getCachedContext", // opt
+ "sws_scale", // opt
+ "sws_freeContext", // opt
+ /* +3 = 67 */
};
// optional symbol names
@@ -158,15 +165,22 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
"swr_free",
"swr_convert",
"swr_get_out_samples",
+
+ // libswscale
+ "swscale_version", // opt
+ "sws_getCachedContext", // opt
+ "sws_scale", // opt
+ "sws_freeContext", // opt
};
- /** 5: util, format, codec, device, swresample */
- private static final int LIB_COUNT = 5;
+ /** 6: util, format, codec, device, swresample, swscale */
+ private static final int LIB_COUNT = 6;
private static final int LIB_IDX_UTI = 0;
private static final int LIB_IDX_FMT = 1;
private static final int LIB_IDX_COD = 2;
private static final int LIB_IDX_DEV = 3;
private static final int LIB_IDX_SWR = 4;
+ private static final int LIB_IDX_SWS = 5;
/** util, format, codec, device, swresample */
private static final boolean[] libLoaded = new boolean[LIB_COUNT];
@@ -200,6 +214,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
static final VersionedLib avCodec;
static final VersionedLib avDevice;
static final VersionedLib swResample;
+ static final VersionedLib swScale;
private static final FFMPEGNatives natives;
private static final PrivilegedAction<DynamicLibraryBundle> privInitSymbolsAction = new PrivilegedAction<DynamicLibraryBundle>() {
@@ -226,7 +241,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
} };
/**
- * @param versions 5: util, format, codec, device, swresample
+ * @param versions 6: util, format, codec, device, swresample, swscale
* @return
*/
private static final boolean initSymbols(final VersionNumber[] versions, final List<NativeLibrary> nativeLibs) {
@@ -271,7 +286,11 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
versions[LIB_IDX_DEV] = new VersionNumber(0, 0, 0);
}
versions[LIB_IDX_SWR] = FFMPEGStaticNatives.getAVVersion(FFMPEGStaticNatives.getAvVersion0(symbolAddr[LIB_IDX_SWR]));
-
+ if( 0 != symbolAddr[LIB_IDX_SWS] ) {
+ versions[LIB_IDX_SWS] = FFMPEGStaticNatives.getAVVersion(FFMPEGStaticNatives.getAvVersion0(symbolAddr[LIB_IDX_SWS]));
+ } else {
+ versions[LIB_IDX_SWS] = new VersionNumber(0, 0, 0);
+ }
return res;
}
@@ -280,7 +299,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
GLProfile.initSingleton();
boolean _ready = false;
- /** 5: util, format, codec, device, swresample */
+ /** 6: util, format, codec, device, swresample, swscale */
final VersionNumber[] _versions = new VersionNumber[LIB_COUNT];
final List<NativeLibrary> _nativeLibs = new ArrayList<NativeLibrary>();
try {
@@ -294,6 +313,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
avCodec = new VersionedLib(_nativeLibs.get(LIB_IDX_COD), _versions[LIB_IDX_COD]);
avDevice = new VersionedLib(_nativeLibs.get(LIB_IDX_DEV), _versions[LIB_IDX_DEV]);
swResample = new VersionedLib(_nativeLibs.get(LIB_IDX_SWR), _versions[LIB_IDX_SWR]);
+ swScale = new VersionedLib(_nativeLibs.get(LIB_IDX_SWS), _versions[LIB_IDX_SWS]);
if(!libsCFUSLoaded) {
String missing = "";
if( !libLoaded[LIB_IDX_COD] ) {
@@ -321,13 +341,20 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
final int avCodecMajor = avCodec.version.getMajor();
final int avDeviceMajor = avDevice.version.getMajor();
final int swResampleMajor = swResample.version.getMajor();
- if( avCodecMajor == 58 && avFormatMajor == 58 && ( avDeviceMajor == 58 || avDeviceMajor == 0 ) && avUtilMajor == 56 && swResampleMajor == 3) {
+ final int swScaleMajor = swScale.version.getMajor();
+ if( avCodecMajor == 58 && avFormatMajor == 58 && ( avDeviceMajor == 58 || avDeviceMajor == 0 ) && avUtilMajor == 56 &&
+ swResampleMajor == 3 && ( swScaleMajor == 5 || swScaleMajor == 0 ) )
+ {
// Exact match: ffmpeg 4.x.y
natives = new FFMPEGv0400Natives();
- } else if( avCodecMajor == 59 && avFormatMajor == 59 && ( avDeviceMajor == 59 || avDeviceMajor == 0 ) && avUtilMajor == 57 && swResampleMajor == 4) {
+ } else if( avCodecMajor == 59 && avFormatMajor == 59 && ( avDeviceMajor == 59 || avDeviceMajor == 0 ) && avUtilMajor == 57 &&
+ swResampleMajor == 4 && ( swScaleMajor == 6 || swScaleMajor == 0 ) )
+ {
// Exact match: ffmpeg 5.x.y
natives = new FFMPEGv0500Natives();
- } else if( avCodecMajor == 60 && avFormatMajor == 60 && ( avDeviceMajor == 60 || avDeviceMajor == 0 ) && avUtilMajor == 58 && swResampleMajor == 4) {
+ } else if( avCodecMajor == 60 && avFormatMajor == 60 && ( avDeviceMajor == 60 || avDeviceMajor == 0 ) && avUtilMajor == 58 &&
+ swResampleMajor == 4 && ( swScaleMajor == 7 || swScaleMajor == 0 ) )
+ {
// Exact match: ffmpeg 6.x.y
natives = new FFMPEGv0600Natives();
} else {
@@ -351,6 +378,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
static boolean libsLoaded() { return libsCFUSLoaded; }
static boolean avDeviceLoaded() { return libLoaded[LIB_IDX_DEV]; }
static boolean swResampleLoaded() { return libLoaded[LIB_IDX_SWR]; }
+ static boolean swScaleLoaded() { return libLoaded[LIB_IDX_SWS]; }
static FFMPEGNatives getNatives() { return natives; }
static boolean initSingleton() { return ready; }
@@ -390,7 +418,7 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
public final List<List<String>> getToolLibNames() {
final List<List<String>> libsList = new ArrayList<List<String>>();
- // 5: util, format, codec, device, swresample
+ // 6: util, format, codec, device, swresample, swscale
final List<String> avutil = new ArrayList<String>();
if( FFMPEGMediaPlayer.PREFER_SYSTEM_LIBS ) {
@@ -490,12 +518,31 @@ class FFMPEGDynamicLibraryBundleInfo implements DynamicLibraryBundleInfo {
}
libsList.add(swresample);
+ final List<String> swscale = new ArrayList<String>();
+ if( FFMPEGMediaPlayer.PREFER_SYSTEM_LIBS ) {
+ swscale.add("swscale"); // system default
+ } else {
+ swscale.add("internal_swscale");// internal
+ }
+ swscale.add("libswscale.so.7"); // ffmpeg 6.[0-x]
+ swscale.add("libswscale.so.6"); // ffmpeg 5.[0-x]
+ swscale.add("libswscale.so.5"); // ffmpeg 4.[0-x] (Debian-11)
+
+ swscale.add("swscale-7"); // ffmpeg 6.[0-x]
+ swscale.add("swscale-6"); // ffmpeg 5.[0-x]
+ swscale.add("swscale-5"); // ffmpeg 4.[0-x]
+ if( FFMPEGMediaPlayer.PREFER_SYSTEM_LIBS ) {
+ swscale.add("internal_swscale");// internal
+ } else {
+ swscale.add("swscale"); // system default
+ }
+ libsList.add(swscale);
return libsList;
}
@Override
public List<String> getSymbolForToolLibPath() {
- // 5: util, format, codec, device, swresample
- return Arrays.asList("av_free", "av_read_frame", "avcodec_close", "avdevice_register_all", "swr_convert");
+ // 6: util, format, codec, device, swresample, swscale
+ return Arrays.asList("av_free", "av_read_frame", "avcodec_close", "avdevice_register_all", "swr_convert", "swscale_version");
}
@Override
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
index e6784273e..974bdc10b 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java
@@ -30,7 +30,6 @@ package jogamp.opengl.util.av.impl;
import java.io.IOException;
import java.io.PrintStream;
-import java.nio.ByteBuffer;
import java.security.PrivilegedAction;
import com.jogamp.opengl.GL;
@@ -41,21 +40,19 @@ import com.jogamp.common.av.AudioFormat;
import com.jogamp.common.av.AudioSink;
import com.jogamp.common.av.AudioSinkFactory;
import com.jogamp.common.av.TimeFrameI;
-import com.jogamp.common.os.Clock;
import com.jogamp.common.util.IOUtil;
import com.jogamp.common.util.PropertyAccess;
import com.jogamp.common.util.SecurityUtil;
import com.jogamp.gluegen.runtime.ProcAddressTable;
import com.jogamp.opengl.util.GLPixelStorageModes;
-import com.jogamp.opengl.util.av.ASSEventLine;
import com.jogamp.opengl.util.av.GLMediaPlayer;
+import com.jogamp.opengl.util.av.VideoPixelFormat;
import com.jogamp.opengl.util.texture.Texture;
import jogamp.common.os.PlatformPropsImpl;
import jogamp.opengl.GLContextImpl;
import jogamp.opengl.util.av.AudioSampleFormat;
import jogamp.opengl.util.av.GLMediaPlayerImpl;
-import jogamp.opengl.util.av.VideoPixelFormat;
import jogamp.opengl.util.av.impl.FFMPEGDynamicLibraryBundleInfo.VersionedLib;
/***
@@ -98,6 +95,7 @@ import jogamp.opengl.util.av.impl.FFMPEGDynamicLibraryBundleInfo.VersionedLib;
* <li>avutil</li>
* <li>avdevice (optional for video input devices)</li>
* <li>swresample</li>
+ * <li>swscale (optional for bitmap'ed subtitles)</li>
* </ul>
* </p>
*
@@ -105,10 +103,10 @@ import jogamp.opengl.util.av.impl.FFMPEGDynamicLibraryBundleInfo.VersionedLib;
* <p>
* Currently we are binary compatible with the following major versions:
* <table border="1">
- * <tr><th>ffmpeg</th><th>avcodec</th><th>avformat</th><th>avdevice</th><th>avutil</th><th>swresample</th> <th>FFMPEG* class</th></tr>
- * <tr><td>4</td> <td>58</td> <td>58</td> <td>58</td> <td>56</td> <td>03</td> <td>FFMPEGv0400</td></tr>
- * <tr><td>5</td> <td>59</td> <td>59</td> <td>59</td> <td>57</td> <td>04</td> <td>FFMPEGv0500</td></tr>
- * <tr><td>6</td> <td>60</td> <td>60</td> <td>60</td> <td>58</td> <td>04</td> <td>FFMPEGv0600</td></tr>
+ * <tr><th>ffmpeg</th><th>avcodec</th><th>avformat</th><th>avdevice</th><th>avutil</th><th>swresample</th><th>swscale</th> <th>FFMPEG* class</th></tr>
+ * <tr><td>4</td> <td>58</td> <td>58</td> <td>58</td> <td>56</td> <td>03</td> <td>05</td> <td>FFMPEGv0400</td></tr>
+ * <tr><td>5</td> <td>59</td> <td>59</td> <td>59</td> <td>57</td> <td>04</td> <td>06</td> <td>FFMPEGv0500</td></tr>
+ * <tr><td>6</td> <td>60</td> <td>60</td> <td>60</td> <td>58</td> <td>04</td> <td>07</td> <td>FFMPEGv0600</td></tr>
* </table>
* </p>
* <p>
@@ -203,6 +201,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
private static final int avCodecMajorVersionCC;
private static final int avDeviceMajorVersionCC;
private static final int swResampleMajorVersionCC;
+ private static final int swScaleMajorVersionCC;
private static final boolean available;
static {
@@ -220,18 +219,21 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
avUtilMajorVersionCC = natives.getAvUtilMajorVersionCC0();
avDeviceMajorVersionCC = natives.getAvDeviceMajorVersionCC0();
swResampleMajorVersionCC = natives.getSwResampleMajorVersionCC0();
+ swScaleMajorVersionCC = natives.getSwScaleMajorVersionCC0();
} else {
avUtilMajorVersionCC = 0;
avFormatMajorVersionCC = 0;
avCodecMajorVersionCC = 0;
avDeviceMajorVersionCC = 0;
swResampleMajorVersionCC = 0;
+ swScaleMajorVersionCC = 0;
}
final VersionedLib avCodec = FFMPEGDynamicLibraryBundleInfo.avCodec;
final VersionedLib avFormat = FFMPEGDynamicLibraryBundleInfo.avFormat;
final VersionedLib avUtil = FFMPEGDynamicLibraryBundleInfo.avUtil;
final VersionedLib avDevice = FFMPEGDynamicLibraryBundleInfo.avDevice;
final VersionedLib swResample = FFMPEGDynamicLibraryBundleInfo.swResample;
+ final VersionedLib swScale = FFMPEGDynamicLibraryBundleInfo.swScale;
// final boolean avDeviceLoaded = FFMPEGDynamicLibraryBundleInfo.avDeviceLoaded();
// final boolean swResampleLoaded = FFMPEGDynamicLibraryBundleInfo.swResampleLoaded();
final int avCodecMajor = avCodec.version.getMajor();
@@ -239,11 +241,13 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
final int avUtilMajor = avUtil.version.getMajor();
final int avDeviceMajor = avDevice.version.getMajor();
final int swResampleMajor = swResample.version.getMajor();
+ final int swScaleMajor = swScale.version.getMajor();
libAVVersionGood = avCodecMajorVersionCC == avCodecMajor &&
avFormatMajorVersionCC == avFormatMajor &&
avUtilMajorVersionCC == avUtilMajor &&
( avDeviceMajorVersionCC == avDeviceMajor || 0 == avDeviceMajor ) &&
- swResampleMajorVersionCC == swResampleMajor;
+ swResampleMajorVersionCC == swResampleMajor &&
+ ( swScaleMajorVersionCC == swScaleMajor || 0 == swScaleMajor );
if( !libAVVersionGood ) {
System.err.println("FFmpeg Not Matching Compile-Time / Runtime Major-Version");
}
@@ -257,6 +261,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
avCodecMajorVersionCC = 0;
avDeviceMajorVersionCC = 0;
swResampleMajorVersionCC = 0;
+ swScaleMajorVersionCC = 0;
libAVVersionGood = false;
}
available = libAVGood && libAVVersionGood && null != natives;
@@ -270,6 +275,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
out.println("FFmpeg Util : "+FFMPEGDynamicLibraryBundleInfo.avUtil+" [cc "+avUtilMajorVersionCC+"]");
out.println("FFmpeg Device : "+FFMPEGDynamicLibraryBundleInfo.avDevice+" [cc "+avDeviceMajorVersionCC+", loaded "+FFMPEGDynamicLibraryBundleInfo.avDeviceLoaded()+"]");
out.println("FFmpeg Resample: "+FFMPEGDynamicLibraryBundleInfo.swResample+" [cc "+swResampleMajorVersionCC+", loaded "+FFMPEGDynamicLibraryBundleInfo.swResampleLoaded()+"]");
+ out.println("FFmpeg Scale : "+FFMPEGDynamicLibraryBundleInfo.swScale+" [cc "+swScaleMajorVersionCC+", loaded "+FFMPEGDynamicLibraryBundleInfo.swScaleLoaded()+"]");
out.println("FFmpeg Class : "+(null!= natives ? natives.getClass().getSimpleName() : "n/a"));
}
@Override
@@ -447,6 +453,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
@Override
public Object run() {
final ProcAddressTable pt = ctx.getGLProcAddressTable();
+ final long procAddrGLTexImage2D = pt.getAddressFor("glTexImage2D");
final long procAddrGLTexSubImage2D = pt.getAddressFor("glTexSubImage2D");
final long procAddrGLGetError = pt.getAddressFor("glGetError");
final long procAddrGLFlush = pt.getAddressFor("glFlush");
@@ -458,7 +465,9 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
procAddrGLEnable = 0;
}
final long procAddrGLBindTexture = pt.getAddressFor("glBindTexture");
- natives.setGLFuncs0(moviePtr, procAddrGLTexSubImage2D, procAddrGLGetError, procAddrGLFlush, procAddrGLFinish, procAddrGLEnable, procAddrGLBindTexture);
+ natives.setGLFuncs0(moviePtr, procAddrGLTexImage2D, procAddrGLTexSubImage2D,
+ procAddrGLGetError, procAddrGLFlush, procAddrGLFinish,
+ procAddrGLEnable, procAddrGLBindTexture, gl.isNPOTTextureAvailable());
return null;
} } );
audioQueueSize = AudioSink.DefaultQueueSizeWithVideo;
@@ -983,7 +992,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
}
@Override
- protected final int getNextTextureImpl(final GL gl, final TextureFrame nextFrame) {
+ protected final int getNextTextureImpl(final GL gl, final TextureFrame vFrame, final Texture sTex, final boolean[] sTexUsed) {
if(0==moviePtr) {
throw new GLException("FFMPEG native instance null");
}
@@ -994,15 +1003,32 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl {
// final Texture tex = nextFrame.getTexture();
// tex.enable(gl);
// tex.bind(gl);
- vTexID = nextFrame.getTexture().getTextureObject();
+ vTexID = vFrame.getTexture().getTextureObject();
}
/** Try decode up to 10 packets to find one containing video. */
for(int i=0; TimeFrameI.INVALID_PTS == vPTS && 10 > i; i++) {
- vPTS = natives.readNextPacket0(moviePtr, getTextureTarget(), vTexID, getTextureFormat(), getTextureType(), GL.GL_TEXTURE_2D, 0);
+ int sTexID = 0; // invalid
+ int sTexWidth = 0;
+ int sTexHeight = 0;
+ if( null != gl && !sTexUsed[0] ) {
+ // glEnable() and glBindTexture() are performed in native readNextPacket0()
+ // final Texture tex = nextFrame.getTexture();
+ // tex.enable(gl);
+ // tex.bind(gl);
+ vTexID = vFrame.getTexture().getTextureObject();
+ if( null != sTex ) {
+ sTexID = sTex.getTextureObject();
+ // FIXME: Disabled in native code, buggy on AMD GPU corrupting texture content
+ sTexWidth = sTex.getWidth();
+ sTexHeight = sTex.getHeight();
+ }
+ }
+ vPTS = natives.readNextPacket0(moviePtr, getTextureTarget(), vTexID, getTextureFormat(), getTextureType(),
+ GL.GL_TEXTURE_2D, sTexID, sTexWidth, sTexHeight, sTex, sTexUsed);
}
- if( null != nextFrame ) {
- nextFrame.setPTS(vPTS);
+ if( null != vFrame ) {
+ vFrame.setPTS(vPTS);
}
return vPTS;
}
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java
index c28028bde..2fe78cbc6 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGNatives.java
@@ -1,5 +1,5 @@
/**
- * Copyright 2013-2023 JogAmp Community. All rights reserved.
+ * Copyright 2013-2024 JogAmp Community. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@@ -28,6 +28,7 @@
package jogamp.opengl.util.av.impl;
import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame;
+import jogamp.opengl.util.av.GLMediaPlayerImpl;
/* pp */ abstract class FFMPEGNatives {
@@ -42,6 +43,7 @@ import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame;
abstract int getAvCodecMajorVersionCC0();
abstract int getAvDeviceMajorVersionCC0();
abstract int getSwResampleMajorVersionCC0();
+ abstract int getSwScaleMajorVersionCC0();
abstract long createInstance0(FFMPEGMediaPlayer upstream, boolean verbose);
abstract void destroyInstance0(long moviePtr);
@@ -67,8 +69,8 @@ import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame;
int aid, int aMaxChannelCount, int aPrefSampleRate,
int sid);
- abstract void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish,
- long procAddrGLEnable, long procAddrGLBindTexture);
+ abstract void setGLFuncs0(long moviePtr, long procAddrGLTexImage2D, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush,
+ long procAddrGLFinish, long procAddrGLEnable, long procAddrGLBindTexture, boolean hasNPOT);
abstract int getVideoPTS0(long moviePtr);
@@ -89,9 +91,13 @@ import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame;
* @param vTexType video texture data type
* @param sTexTarget subtitle texture target
* @param sTexID subtitle texture ID/name
+ * @param sTexWidthPre current texture size, may be increased and notified via {@link GLMediaPlayerImpl#pushSubtitleTex(Object, int, int, int, int, int, int, int, int, int)}
+ * @param sTexHeightPre current texture size, may be increased and notified via {@link GLMediaPlayerImpl#pushSubtitleTex(Object, int, int, int, int, int, int, int, int, int)}
+ * @param sTexObj subtitle texture Object to be passed to caller
+ * @param sTexUsed result value, if {@code sTexObj} is being used {@code true} must be written into it
* @return resulting current video PTS, or {@link TextureFrame#INVALID_PTS}
*/
- abstract int readNextPacket0(long moviePtr, int vTexTarget, int vTexID, int vTexFmt, int vTexType, int sTexTarget, int sTexID);
+ abstract int readNextPacket0(long moviePtr, int vTexTarget, int vTexID, int vTexFmt, int vTexType, int sTexTarget, int sTexID, int sTexWidthPre, int sTexHeightPre, Object sTexObj, boolean[] sTexUsed);
abstract int play0(long moviePtr);
abstract int pause0(long moviePtr);
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java
index 65a7e3618..854de7ad7 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGStaticNatives.java
@@ -1,5 +1,5 @@
/**
- * Copyright 2013 JogAmp Community. All rights reserved.
+ * Copyright 2013-2024 JogAmp Community. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0400Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0400Natives.java
index 8a567fa94..a7e6de270 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0400Natives.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0400Natives.java
@@ -1,5 +1,5 @@
/**
- * Copyright 2015-2023 JogAmp Community. All rights reserved.
+ * Copyright 2015-2024 JogAmp Community. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@@ -47,6 +47,9 @@ class FFMPEGv0400Natives extends FFMPEGNatives {
native int getSwResampleMajorVersionCC0();
@Override
+ native int getSwScaleMajorVersionCC0();
+
+ @Override
native long createInstance0(FFMPEGMediaPlayer upstream, boolean verbose);
@Override
@@ -56,7 +59,7 @@ class FFMPEGv0400Natives extends FFMPEGNatives {
native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate, int sid);
@Override
- native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish, long procAddrGLEnable, long procAddrGLBindTexture);
+ native void setGLFuncs0(long moviePtr, long procAddrGLTexImage2D, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish, long procAddrGLEnable, long procAddrGLBindTexture, boolean hasNPOT);
@Override
native int getVideoPTS0(long moviePtr);
@@ -65,7 +68,7 @@ class FFMPEGv0400Natives extends FFMPEGNatives {
native int getAudioPTS0(long moviePtr);
@Override
- native int readNextPacket0(long moviePtr, int vTexTarget, int vTexID, int vTexFmt, int vTexType, int sTexTarget, int sTexID);
+ native int readNextPacket0(long moviePtr, int vTexTarget, int vTexID, int vTexFmt, int vTexType, int sTexTarget, int sTexID, int sTexWidthPre, int sTexHeightPre, Object sTexObj, boolean[] sTexUsed);
@Override
native int play0(long moviePtr);
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0500Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0500Natives.java
index 09ae6b7b7..7268b0627 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0500Natives.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0500Natives.java
@@ -1,5 +1,5 @@
/**
- * Copyright 2015-2023 JogAmp Community. All rights reserved.
+ * Copyright 2015-2024 JogAmp Community. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@@ -47,6 +47,9 @@ class FFMPEGv0500Natives extends FFMPEGNatives {
native int getSwResampleMajorVersionCC0();
@Override
+ native int getSwScaleMajorVersionCC0();
+
+ @Override
native long createInstance0(FFMPEGMediaPlayer upstream, boolean verbose);
@Override
@@ -56,7 +59,7 @@ class FFMPEGv0500Natives extends FFMPEGNatives {
native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate, int sid);
@Override
- native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish, long procAddrGLEnable, long procAddrGLBindTexture);
+ native void setGLFuncs0(long moviePtr, long procAddrGLTexImage2D, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish, long procAddrGLEnable, long procAddrGLBindTexture, boolean hasNPOT);
@Override
native int getVideoPTS0(long moviePtr);
@@ -65,7 +68,7 @@ class FFMPEGv0500Natives extends FFMPEGNatives {
native int getAudioPTS0(long moviePtr);
@Override
- native int readNextPacket0(long moviePtr, int vTexTarget, int vTexID, int vTexFmt, int vTexType, int sTexTarget, int sTexID);
+ native int readNextPacket0(long moviePtr, int vTexTarget, int vTexID, int vTexFmt, int vTexType, int sTexTarget, int sTexID, int sTexWidthPre, int sTexHeightPre, Object sTexObj, boolean[] sTexUsed);
@Override
native int play0(long moviePtr);
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0600Natives.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0600Natives.java
index 663e9cbd0..a87c98dbc 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0600Natives.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGv0600Natives.java
@@ -1,5 +1,5 @@
/**
- * Copyright 2015-2023 JogAmp Community. All rights reserved.
+ * Copyright 2015-2024 JogAmp Community. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@@ -47,6 +47,9 @@ class FFMPEGv0600Natives extends FFMPEGNatives {
native int getSwResampleMajorVersionCC0();
@Override
+ native int getSwScaleMajorVersionCC0();
+
+ @Override
native long createInstance0(FFMPEGMediaPlayer upstream, boolean verbose);
@Override
@@ -56,7 +59,7 @@ class FFMPEGv0600Natives extends FFMPEGNatives {
native void setStream0(long moviePtr, String url, boolean isCameraInput, int vid, String sizes, int vWidth, int vHeight, int vRate, int aid, int aMaxChannelCount, int aPrefSampleRate, int sid);
@Override
- native void setGLFuncs0(long moviePtr, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish, long procAddrGLEnable, long procAddrGLBindTexture);
+ native void setGLFuncs0(long moviePtr, long procAddrGLTexImage2D, long procAddrGLTexSubImage2D, long procAddrGLGetError, long procAddrGLFlush, long procAddrGLFinish, long procAddrGLEnable, long procAddrGLBindTexture, boolean hasNPOT);
@Override
native int getVideoPTS0(long moviePtr);
@@ -65,7 +68,7 @@ class FFMPEGv0600Natives extends FFMPEGNatives {
native int getAudioPTS0(long moviePtr);
@Override
- native int readNextPacket0(long moviePtr, int vTexTarget, int vTexID, int vTexFmt, int vTexType, int sTexTarget, int sTexID);
+ native int readNextPacket0(long moviePtr, int vTexTarget, int vTexID, int vTexFmt, int vTexType, int sTexTarget, int sTexID, int sTexWidthPre, int sTexHeightPre, Object sTexObj, boolean[] sTexUsed);
@Override
native int play0(long moviePtr);
diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
index 24b1ad4a9..f9786a5ab 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java
@@ -1,5 +1,5 @@
/**
- * Copyright 2012 JogAmp Community. All rights reserved.
+ * Copyright 2012-2024 JogAmp Community. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@@ -38,6 +38,7 @@ import com.jogamp.opengl.GLException;
import com.jogamp.opengl.egl.EGL;
import com.jogamp.opengl.util.av.GLMediaPlayer.State;
+import com.jogamp.opengl.util.texture.Texture;
import com.jogamp.opengl.util.texture.TextureSequence;
import jogamp.opengl.util.av.EGLMediaPlayerImpl;
@@ -105,7 +106,7 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected void initStreamImpl(final int vid, final int aid, int sid) throws IOException {
+ protected void initStreamImpl(final int vid, final int aid, final int sid) throws IOException {
if(0==moviePtr) {
throw new GLException("OMX native instance null");
}
@@ -192,7 +193,7 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl {
}
@Override
- protected int getNextTextureImpl(final GL gl, final TextureFrame nextFrame) {
+ protected int getNextTextureImpl(final GL gl, final TextureFrame vFrame, final Texture sTex, final boolean[] sTexUsed) {
if(0==moviePtr) {
throw new GLException("OMX native instance null");
}
diff --git a/src/jogl/native/libav/ffmpeg_impl_template.c b/src/jogl/native/libav/ffmpeg_impl_template.c
index 61aa798a0..d807682b6 100644
--- a/src/jogl/native/libav/ffmpeg_impl_template.c
+++ b/src/jogl/native/libav/ffmpeg_impl_template.c
@@ -42,13 +42,15 @@ typedef unsigned (APIENTRYP AVFORMAT_VERSION)(void);
typedef unsigned (APIENTRYP AVCODEC_VERSION)(void);
typedef unsigned (APIENTRYP AVDEVICE_VERSION)(void);
typedef unsigned (APIENTRYP SWRESAMPLE_VERSION)(void);
+typedef unsigned (APIENTRYP SWSCALE_VERSION)(void);
static AVUTIL_VERSION sp_avutil_version;
static AVFORMAT_VERSION sp_avformat_version;
static AVCODEC_VERSION sp_avcodec_version;
static AVDEVICE_VERSION sp_avdevice_version;
static SWRESAMPLE_VERSION sp_swresample_version;
-// count: 5
+static SWSCALE_VERSION sp_swscale_version;
+// count: 6
// libavcodec
typedef int (APIENTRYP AVCODEC_CLOSE)(AVCodecContext *avctx);
@@ -93,7 +95,7 @@ static AVCODEC_SEND_PACKET sp_avcodec_send_packet; // 57
static AVCODEC_RECEIVE_FRAME sp_avcodec_receive_frame; // 57
static AVCODEC_DECODE_SUBTITLE2 sp_avcodec_decode_subtitle2; // 52.23
static AV_SUBTITLE_FREE sp_avsubtitle_free; // 52.82
-// count: +20 = 25
+// count: +20 = 26
// libavutil
typedef AVPixFmtDescriptor* (APIENTRYP AV_PIX_FMT_DESC_GET)(enum AVPixelFormat pix_fmt); // lavu >= 51.45; lavu 51: 'enum PixelFormat pix_fmt', lavu 53: 'enum AVPixelFormat pix_fmt'
@@ -131,7 +133,7 @@ static AV_CHANNEL_LAYOUT_DEFAULT sp_av_channel_layout_default; // >= 59
static AV_CHANNEL_LAYOUT_UNINIT sp_av_channel_layout_uninit; // >= 59
static AV_CHANNEL_LAYOUT_DESCRIBE sp_av_channel_layout_describe; // >= 59
static AV_OPT_SET_CHLAYOUT sp_av_opt_set_chlayout; // >= 59
-// count: +17 = 42
+// count: +17 = 43
// libavformat
typedef AVFormatContext *(APIENTRYP AVFORMAT_ALLOC_CONTEXT)(void);
@@ -163,12 +165,12 @@ static AV_READ_PAUSE sp_av_read_pause;
static AVFORMAT_NETWORK_INIT sp_avformat_network_init; // 53.13.0
static AVFORMAT_NETWORK_DEINIT sp_avformat_network_deinit; // 53.13.0
static AVFORMAT_FIND_STREAM_INFO sp_avformat_find_stream_info; // 53.3.0
-// count: +14 = 56
+// count: +14 = 57
// libavdevice [53.0.0]
typedef int (APIENTRYP AVDEVICE_REGISTER_ALL)(void);
static AVDEVICE_REGISTER_ALL sp_avdevice_register_all;
-// count: +1 = 57
+// count: +1 = 58
// libswresample [1...]
typedef int (APIENTRYP AV_OPT_SET_SAMPLE_FMT)(void *obj, const char *name, enum AVSampleFormat fmt, int search_flags); // actually lavu .. but exist only w/ swresample!
@@ -184,7 +186,20 @@ static SWR_INIT sp_swr_init;
static SWR_FREE sp_swr_free;
static SWR_CONVERT sp_swr_convert;
static SWR_GET_OUT_SAMPLES sp_swr_get_out_samples;
-// count: +6 = 66
+// count: +6 = 64
+
+
+typedef struct SwsContext *(APIENTRYP SWS_GETCACHEDCONTEXT)(struct SwsContext *context, int srcW, int srcH, enum AVPixelFormat srcFormat,
+ int dstW, int dstH, enum AVPixelFormat dstFormat, int flags,
+ SwsFilter *srcFilter, SwsFilter *dstFilter, const double *param);
+typedef int (APIENTRYP SWS_SCALE)(struct SwsContext *c, const uint8_t * const srcSlice[], const int srcStride[],
+ int srcSliceY, int srcSliceH, uint8_t *const dst[], const int dstStride[]);
+typedef void (APIENTRYP SWS_FREECONTEXT)(struct SwsContext* swsContext);
+
+static SWS_GETCACHEDCONTEXT sp_sws_getCachedContext;
+static SWS_SCALE sp_sws_scale;
+static SWS_FREECONTEXT sp_sws_freeContext;
+// count: +3 = 67
static const char * const ClazzNameString = "java/lang/String";
@@ -210,7 +225,7 @@ static const char * const ClazzNameString = "java/lang/String";
#define MY_MUTEX_UNLOCK(e,s)
#endif
-#define SYMBOL_COUNT 63
+#define SYMBOL_COUNT 67
JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0)
(JNIEnv *env, jobject instance, jobject jmutex_avcodec_openclose, jobject jSymbols, jint count)
@@ -236,6 +251,7 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0)
sp_avcodec_version = (AVCODEC_VERSION) (intptr_t) symbols[i++];
sp_avdevice_version = (AVDEVICE_VERSION) (intptr_t) symbols[i++];
sp_swresample_version = (SWRESAMPLE_VERSION) (intptr_t) symbols[i++];
+ sp_swscale_version = (SWSCALE_VERSION) (intptr_t) symbols[i++];
sp_avcodec_close = (AVCODEC_CLOSE) (intptr_t) symbols[i++];
sp_avcodec_string = (AVCODEC_STRING) (intptr_t) symbols[i++];
@@ -300,6 +316,10 @@ JNIEXPORT jboolean JNICALL FF_FUNC(initSymbols0)
sp_swr_convert = (SWR_CONVERT) (intptr_t) symbols[i++];
sp_swr_get_out_samples = (SWR_GET_OUT_SAMPLES) (intptr_t) symbols[i++];
+ sp_sws_getCachedContext = (SWS_GETCACHEDCONTEXT) (intptr_t) symbols[i++];
+ sp_sws_scale = (SWS_SCALE) (intptr_t) symbols[i++];
+ sp_sws_freeContext = (SWS_FREECONTEXT) (intptr_t) symbols[i++];
+
(*env)->ReleasePrimitiveArrayCritical(env, jSymbols, symbols, 0);
if(SYMBOL_COUNT != i) {
@@ -463,9 +483,11 @@ static void _updateJavaAttributes(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) {
pAV->vWidth, pAV->vHeight,
pAV->bps_stream, pAV->bps_video, pAV->bps_audio,
pAV->fps, pAV->frames_video, pAV->frames_audio, pAV->duration,
- (*env)->NewStringUTF(env, pAV->vcodec),
- (*env)->NewStringUTF(env, pAV->acodec),
- (*env)->NewStringUTF(env, pAV->scodec));
+ (*env)->NewStringUTF(env, pAV->vCodecStr),
+ (*env)->NewStringUTF(env, pAV->aCodecStr),
+ (*env)->NewStringUTF(env, pAV->sCodecStr),
+ pAV->vCodecID, pAV->aCodecID, pAV->sCodecID
+ );
JoglCommon_ExceptionCheck1_throwNewRuntimeException(env, "FFmpeg: Exception occured at updateAttributes(..)");
}
}
@@ -505,6 +527,17 @@ static void freeInstance(JNIEnv *env, FFMPEGToolBasicAV_t* pAV) {
pAV->pSCodecCtx = NULL;
}
pAV->pSCodec=NULL;
+ if( AV_HAS_API_SWSCALE(pAV) ) {
+ if( NULL != pAV->sScaleCtx ) {
+ sws_freeContext(pAV->sScaleCtx);
+ }
+ }
+ pAV->sScaleCtx = NULL;
+ if( NULL != pAV->sPixels ) {
+ free( pAV->sPixels );
+ }
+ pAV->sPixels = NULL;
+ pAV->sPixelsSize = 0;
// Close the video file
if(NULL != pAV->pFormatCtx) {
@@ -627,6 +660,11 @@ JNIEXPORT jint JNICALL FF_FUNC(getSwResampleMajorVersionCC0)
return (jint) LIBSWRESAMPLE_VERSION_MAJOR;
}
+JNIEXPORT jint JNICALL FF_FUNC(getSwScaleMajorVersionCC0)
+ (JNIEnv *env, jobject instance) {
+ return (jint) LIBSWSCALE_VERSION_MAJOR;
+}
+
JNIEXPORT jlong JNICALL FF_FUNC(createInstance0)
(JNIEnv *env, jobject instance, jobject ffmpegMediaPlayer, jboolean verbose)
{
@@ -649,6 +687,12 @@ JNIEXPORT jlong JNICALL FF_FUNC(createInstance0)
} else {
pAV->swresampleVersion = 0;
}
+ if( HAS_FUNC(sp_swscale_version) ) {
+ pAV->swscaleVersion = sp_swscale_version();
+ } else {
+ pAV->swscaleVersion = 0;
+ }
+
// NOTE: We keep code on using 1 a/v frame per decoding cycle now.
// This is compatible w/ OpenAL's alBufferData(..)
@@ -672,8 +716,8 @@ JNIEXPORT jlong JNICALL FF_FUNC(createInstance0)
}
if(pAV->verbose) {
- fprintf(stderr, "Info: Has swresample %d, device %d\n",
- AV_HAS_API_SWRESAMPLE(pAV), HAS_FUNC(sp_avdevice_register_all));
+ fprintf(stderr, "Info: Has device %d, swresample %d, swscale %d, \n",
+ HAS_FUNC(sp_avdevice_register_all), AV_HAS_API_SWRESAMPLE(pAV), AV_HAS_API_SWSCALE(pAV));
}
return (jlong) (intptr_t) pAV;
}
@@ -803,6 +847,90 @@ static int64_t getFrameNum(const AVCodecContext *avctx) {
}
#endif
+static int createOpenedAVCodecContext(JNIEnv* env, FFMPEGToolBasicAV_t *pAV, int32_t id,
+ AVStream* pStream,
+ AVCodecParameters** ppCodecPar,
+ AVCodec** ppCodec,
+ char* codecName, int codecNameSize,
+ AVCodecContext** ppCodecCtx)
+{
+ *ppCodecPar = pStream->codecpar;
+ AVCodecParameters* pCodecPar = *ppCodecPar;
+
+ if(AVMEDIA_TYPE_VIDEO == pCodecPar->codec_type) {
+ if (pCodecPar->bit_rate) {
+ // FIXME: Libav Binary compatibility! JAU01
+ pAV->bps_video = pCodecPar->bit_rate;
+ }
+ } else if(AVMEDIA_TYPE_AUDIO == pCodecPar->codec_type) {
+ // FIXME: Libav Binary compatibility! JAU01
+ if (pCodecPar->bit_rate) {
+ pAV->bps_audio = pCodecPar->bit_rate;
+ }
+ }
+
+ // Find the decoder for the stream
+ *ppCodec=sp_avcodec_find_decoder(pCodecPar->codec_id);
+ if(*ppCodec==NULL) {
+ JoglCommon_throwNewRuntimeException(env, "Couldn't find codec for codec_id %d", pCodecPar->codec_id);
+ return -1;
+ }
+ AVCodec* pCodec = *ppCodec;
+
+ // Allocate the decoder context for the stream
+ *ppCodecCtx = sp_avcodec_alloc_context3(pCodec);
+ if(*ppCodecCtx==NULL) {
+ JoglCommon_throwNewRuntimeException(env, "Couldn't allocate decoder context for codec_id %d", pCodecPar->codec_id);
+ return -1;
+ }
+ AVCodecContext* pCodecCtx = *ppCodecCtx;
+
+ int res = sp_avcodec_parameters_to_context(pCodecCtx, pCodecPar);
+ if(res<0) {
+ JoglCommon_throwNewRuntimeException(env, "Couldn't copy codec-par to context");
+ return -1;
+ }
+ pCodecCtx->pkt_timebase = pStream->time_base;
+
+ // Customize ..
+ // pCodecCtx->thread_count=2;
+ // pCodecCtx->thread_type=FF_THREAD_FRAME|FF_THREAD_SLICE; // Decode more than one frame at once
+ pCodecCtx->thread_count=0;
+ pCodecCtx->thread_type=0;
+ pCodecCtx->workaround_bugs=FF_BUG_AUTODETECT;
+ pCodecCtx->skip_frame=AVDISCARD_DEFAULT;
+
+ if(AVMEDIA_TYPE_AUDIO == pCodecPar->codec_type) {
+ // Note: OpenAL well supports n-channel by now (SOFT),
+ // however - AFAIK AV_SAMPLE_FMT_S16 would allow no conversion!
+ pCodecCtx->request_sample_fmt=AV_SAMPLE_FMT_S16;
+ }
+
+ sp_avcodec_string(codecName, codecNameSize, pCodecCtx, 0);
+
+ // Open codec
+ MY_MUTEX_LOCK(env, mutex_avcodec_openclose);
+ {
+ res = sp_avcodec_open2(pCodecCtx, pCodec, NULL);
+ }
+ MY_MUTEX_UNLOCK(env, mutex_avcodec_openclose);
+ if( 0 > res ) {
+ return res; // error
+ }
+ // OK
+ if(pAV->verbose) {
+ fprintf(stderr, "CreateOpenCtx: Stream[%d]: Ctx Time Base: %d/%d\n", id, pCodecCtx->time_base.num, pCodecCtx->time_base.den);
+ }
+ if(AVMEDIA_TYPE_VIDEO == pCodecPar->codec_type || AVMEDIA_TYPE_SUBTITLE == pCodecPar->codec_type) {
+ // Hack to correct wrong frame rates that seem to be generated by some codecs
+ // FIXME: Libav Binary compatibility! JAU01
+ if(pCodecCtx->time_base.num>1000 && pCodecCtx->time_base.den==1) {
+ pCodecCtx->time_base.den=1000;
+ fprintf(stderr, "CreateOpenCtx: Stream[%d]: Ctx Time Base: FIX %d/%d\n", id, pCodecCtx->time_base.num, pCodecCtx->time_base.den);
+ }
+ }
+ return 0;
+}
JNIEXPORT void JNICALL FF_FUNC(setStream0)
(JNIEnv *env, jobject instance, jlong ptr, jstring jURL, jboolean jIsCameraInput,
jint vid, jstring jSizeS, jint vWidth, jint vHeight, jint vRate,
@@ -1007,59 +1135,14 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
return;
}
- // Get a pointer to the codec context for the audio stream
- // FIXME: Libav Binary compatibility! JAU01
- pAV->pACodecPar=pAV->pAStream->codecpar;
-
- // FIXME: Libav Binary compatibility! JAU01
- if (pAV->pACodecPar->bit_rate) {
- pAV->bps_audio = pAV->pACodecPar->bit_rate;
- }
-
- // Find the decoder for the audio stream
- pAV->pACodec=sp_avcodec_find_decoder(pAV->pACodecPar->codec_id);
- if(pAV->pACodec==NULL) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't find audio codec for codec_id %d", pAV->pACodecPar->codec_id);
- return;
- }
-
- // Allocate the decoder context for the audio stream
- pAV->pACodecCtx = sp_avcodec_alloc_context3(pAV->pACodec);
- if(pAV->pACodecCtx==NULL) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't allocate audio decoder context for codec_id %d", pAV->pACodecPar->codec_id);
- return;
- }
- res = sp_avcodec_parameters_to_context(pAV->pACodecCtx, pAV->pACodecPar);
- if(res<0) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't copy audio codec-par to context");
+ res = createOpenedAVCodecContext(env, pAV, pAV->aid, pAV->pAStream, &pAV->pACodecPar, &pAV->pACodec,
+ pAV->aCodecStr, sizeof(pAV->aCodecStr), &pAV->pACodecCtx);
+ if( res ) {
+ JoglCommon_throwNewRuntimeException(env, "Couldn't open audio codec %d, %s", pAV->pACodecCtx->codec_id, pAV->aCodecStr);
return;
}
+ pAV->aCodecID = pAV->pACodecCtx->codec_id;
- // Customize ..
- pAV->pACodecCtx->pkt_timebase = pAV->pAStream->time_base;
- // pAV->pACodecCtx->thread_count=2;
- // pAV->pACodecCtx->thread_type=FF_THREAD_FRAME|FF_THREAD_SLICE; // Decode more than one frame at once
- pAV->pACodecCtx->thread_count=0;
- pAV->pACodecCtx->thread_type=0;
- pAV->pACodecCtx->workaround_bugs=FF_BUG_AUTODETECT;
- pAV->pACodecCtx->skip_frame=AVDISCARD_DEFAULT;
-
- // Note: OpenAL well supports n-channel by now (SOFT),
- // however - AFAIK AV_SAMPLE_FMT_S16 would allow no conversion!
- pAV->pACodecCtx->request_sample_fmt=AV_SAMPLE_FMT_S16;
-
- sp_avcodec_string(pAV->acodec, sizeof(pAV->acodec), pAV->pACodecCtx, 0);
-
- // Open codec
- MY_MUTEX_LOCK(env, mutex_avcodec_openclose);
- {
- res = sp_avcodec_open2(pAV->pACodecCtx, pAV->pACodec, NULL);
- }
- MY_MUTEX_UNLOCK(env, mutex_avcodec_openclose);
- if(res<0) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't open audio codec %d, %s", pAV->pACodecCtx->codec_id, pAV->acodec);
- return;
- }
// try to shape audio channel-layout on fixed audio channel-count
#if LIBAVCODEC_VERSION_MAJOR < 59
pAV->aChannels = pAV->pACodecCtx->channels;
@@ -1105,7 +1188,6 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
pAV->aSampleFmtOut = pAV->aSampleFmt;
pAV->aChannelsOut = pAV->aChannels;
pAV->aSampleRateOut = pAV->aSampleRate;
-
if( ( AV_HAS_API_SWRESAMPLE(pAV) ) &&
( pAV->aSampleFmt != AV_SAMPLE_FMT_S16 ||
( 0 != aPrefSampleRate && pAV->aSampleRate != aPrefSampleRate ) ||
@@ -1196,63 +1278,14 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
}
if(0<=pAV->vid) {
- // Get a pointer to the codec context for the video stream
- // FIXME: Libav Binary compatibility! JAU01
- pAV->pVCodecPar = pAV->pVStream->codecpar;
- #if 0
- pAV->pVCodecCtx->get_format = my_get_format;
- #endif
-
- if (pAV->pVCodecPar->bit_rate) {
- // FIXME: Libav Binary compatibility! JAU01
- pAV->bps_video = pAV->pVCodecPar->bit_rate;
- }
-
- // Find the decoder for the video stream
- pAV->pVCodec=sp_avcodec_find_decoder(pAV->pVCodecPar->codec_id);
- if(pAV->pVCodec==NULL) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't find video codec for codec_id %d", pAV->pVCodecPar->codec_id);
+ res = createOpenedAVCodecContext(env, pAV, pAV->vid, pAV->pVStream, &pAV->pVCodecPar, &pAV->pVCodec,
+ pAV->vCodecStr, sizeof(pAV->vCodecStr), &pAV->pVCodecCtx);
+ if( res ) {
+ JoglCommon_throwNewRuntimeException(env, "Couldn't open video codec %d, %s", pAV->pVCodecCtx->codec_id, pAV->vCodecStr);
return;
}
+ pAV->vCodecID = pAV->pVCodecCtx->codec_id;
- // Allocate the decoder context for the video stream
- pAV->pVCodecCtx = sp_avcodec_alloc_context3(pAV->pVCodec);
- if(pAV->pVCodecCtx==NULL) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't allocate video decoder context for codec_id %d", pAV->pVCodecPar->codec_id);
- return;
- }
- res = sp_avcodec_parameters_to_context(pAV->pVCodecCtx, pAV->pVCodecPar);
- if(res<0) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't copy video codec-par to context");
- return;
- }
- // Customize ..
- pAV->pVCodecCtx->pkt_timebase = pAV->pVStream->time_base;
- // pAV->pVCodecCtx->thread_count=2;
- // pAV->pVCodecCtx->thread_type=FF_THREAD_FRAME|FF_THREAD_SLICE; // Decode more than one frame at once
- pAV->pVCodecCtx->thread_count=0;
- pAV->pVCodecCtx->thread_type=0;
- pAV->pVCodecCtx->workaround_bugs=FF_BUG_AUTODETECT;
- pAV->pVCodecCtx->skip_frame=AVDISCARD_DEFAULT;
-
- sp_avcodec_string(pAV->vcodec, sizeof(pAV->vcodec), pAV->pVCodecCtx, 0);
-
- // Open codec
- MY_MUTEX_LOCK(env, mutex_avcodec_openclose);
- {
- res = sp_avcodec_open2(pAV->pVCodecCtx, pAV->pVCodec, NULL);
- }
- MY_MUTEX_UNLOCK(env, mutex_avcodec_openclose);
- if(res<0) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't open video codec %d, %s", pAV->pVCodecCtx->codec_id, pAV->vcodec);
- return;
- }
-
- // Hack to correct wrong frame rates that seem to be generated by some codecs
- // FIXME: Libav Binary compatibility! JAU01
- if(pAV->pVCodecCtx->time_base.num>1000 && pAV->pVCodecCtx->time_base.den==1) {
- pAV->pVCodecCtx->time_base.den=1000;
- }
// FIXME: Libav Binary compatibility! JAU01
if( pAV->pVStream->avg_frame_rate.den && pAV->pVStream->avg_frame_rate.num ) {
pAV->fps = my_av_q2f(pAV->pVStream->avg_frame_rate);
@@ -1335,50 +1368,13 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
}
if(0<=pAV->sid) {
- // Get a pointer to the codec context for the video stream
- // FIXME: Libav Binary compatibility! JAU01
- pAV->pSCodecPar = pAV->pSStream->codecpar;
- #if 0
- pAV->pSCodecCtx->get_format = my_get_format;
- #endif
-
- // Find the decoder for the video stream
- pAV->pSCodec=sp_avcodec_find_decoder(pAV->pSCodecPar->codec_id);
- if(pAV->pSCodec==NULL) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't find subtitle codec for codec_id %d", pAV->pSCodecPar->codec_id);
- return;
- }
-
- // Allocate the decoder context for the video stream
- pAV->pSCodecCtx = sp_avcodec_alloc_context3(pAV->pSCodec);
- if(pAV->pSCodecCtx==NULL) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't allocate subtitle decoder context for codec_id %d", pAV->pSCodecPar->codec_id);
- return;
- }
- res = sp_avcodec_parameters_to_context(pAV->pSCodecCtx, pAV->pSCodecPar);
- if(res<0) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't copy video codec-par to context");
- return;
- }
- // Customize ..
- pAV->pSCodecCtx->pkt_timebase = pAV->pSStream->time_base;
- pAV->pVCodecCtx->thread_count=0;
- pAV->pVCodecCtx->thread_type=0;
- pAV->pVCodecCtx->workaround_bugs=FF_BUG_AUTODETECT;
- pAV->pVCodecCtx->skip_frame=AVDISCARD_DEFAULT;
-
- sp_avcodec_string(pAV->scodec, sizeof(pAV->scodec), pAV->pSCodecCtx, 0);
-
- // Open codec
- MY_MUTEX_LOCK(env, mutex_avcodec_openclose);
- {
- res = sp_avcodec_open2(pAV->pSCodecCtx, pAV->pSCodec, NULL);
- }
- MY_MUTEX_UNLOCK(env, mutex_avcodec_openclose);
- if(res<0) {
- JoglCommon_throwNewRuntimeException(env, "Couldn't open subtitle codec %d, %s", pAV->pSCodecCtx->codec_id, pAV->scodec);
+ res = createOpenedAVCodecContext(env, pAV, pAV->sid, pAV->pSStream, &pAV->pSCodecPar, &pAV->pSCodec,
+ pAV->sCodecStr, sizeof(pAV->sCodecStr), &pAV->pSCodecCtx);
+ if( res ) {
+ JoglCommon_throwNewRuntimeException(env, "Couldn't open subtitle codec %d, %s", pAV->pSCodecCtx->codec_id, pAV->sCodecStr);
return;
}
+ pAV->sCodecID = pAV->pSCodecCtx->codec_id;
}
pAV->vPTS=0;
pAV->aPTS=0;
@@ -1390,16 +1386,18 @@ JNIEXPORT void JNICALL FF_FUNC(setStream0)
}
JNIEXPORT void JNICALL FF_FUNC(setGLFuncs0)
- (JNIEnv *env, jobject instance, jlong ptr, jlong jProcAddrGLTexSubImage2D, jlong jProcAddrGLGetError, jlong jProcAddrGLFlush, jlong jProcAddrGLFinish,
- jlong jProcAddrGLEnable, jlong jProcAddrGLBindTexture)
+ (JNIEnv *env, jobject instance, jlong ptr, jlong jProcAddrGLTexImage2D, jlong jProcAddrGLTexSubImage2D, jlong jProcAddrGLGetError, jlong jProcAddrGLFlush, jlong jProcAddrGLFinish,
+ jlong jProcAddrGLEnable, jlong jProcAddrGLBindTexture, jboolean jHasNPOT)
{
FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr));
+ pAV->procAddrGLTexImage2D = (PFNGLTEXIMAGE2DPROC) (intptr_t)jProcAddrGLTexImage2D;
pAV->procAddrGLTexSubImage2D = (PFNGLTEXSUBIMAGE2DPROC) (intptr_t)jProcAddrGLTexSubImage2D;
pAV->procAddrGLGetError = (PFNGLGETERRORPROC) (intptr_t)jProcAddrGLGetError;
pAV->procAddrGLFlush = (PFNGLFLUSH) (intptr_t)jProcAddrGLFlush;
pAV->procAddrGLFinish = (PFNGLFINISH) (intptr_t)jProcAddrGLFinish;
pAV->procAddrGLEnable = (PFNGLENABLE) (intptr_t)jProcAddrGLEnable;
pAV->procAddrGLBindTexture = (PFNGLBINDTEXTURE) (intptr_t)jProcAddrGLBindTexture;
+ pAV->hasNPOT = jHasNPOT == JNI_TRUE;
}
#if 0
@@ -1412,7 +1410,8 @@ JNIEXPORT void JNICALL FF_FUNC(setGLFuncs0)
JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0)
(JNIEnv *env, jobject instance, jlong ptr, jint vTexTarget, jint vTexID, jint vTexFmt, jint vTexType,
- jint sTexTarget, jint sTexID)
+ jint sTexTarget, jint sTexID, jint sTexWidthPre, jint sTexHeightPre,
+ jobject sTexObj, jbooleanArray sTexUsed)
{
FFMPEGToolBasicAV_t *pAV = (FFMPEGToolBasicAV_t *)((void *)((intptr_t)ptr));
if( 0 == pAV->ready ) {
@@ -1803,11 +1802,11 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0)
if( got_sub && pAV->packet->data ) {
got_sub2 = 1; // OK
} else {
- // !got_sub && data: EAGAIN
- // !got_sub && !data: EOF
- // got_sub && !data: pending
if( pAV->verbose ) {
- fprintf(stderr, "S-P: EAGAIN, EOF or Pending\n");
+ const int isEAGAIN = !got_sub && pAV->packet->data;
+ const int isEOF = !got_sub && !pAV->packet->data;
+ const int isPending = got_sub && !pAV->packet->data;
+ fprintf(stderr, "S-P: EAGAIN %d, EOF %d or Pending %d\n", isEAGAIN, isEOF, isPending);
}
}
}
@@ -1816,61 +1815,159 @@ JNIEXPORT jint JNICALL FF_FUNC(readNextPacket0)
if( AV_NOPTS_VALUE != sub.pts ) {
sPTS = my_av_q2i32( sub.pts * 1000, AV_TIME_BASE_Q);
sStart = sPTS + sub.start_display_time;
- sEnd = sPTS + sub.end_display_time;
+ if( sub.end_display_time < UINT32_MAX - (uint32_t)sPTS ) {
+ sEnd = sPTS + sub.end_display_time;
+ } else {
+ sEnd = INT32_MAX;
+ }
+ }
+ // Aggregated texture over all AVSubtitleRect
+ int subMinX=INT_MAX, subMinY=INT_MAX, subWidth=0, subHeight=0;
+ int subTextCount=0, subASSCount=0, subImgCount=0;
+ for(unsigned int sub_idx=0; sub_idx<sub.num_rects; ++sub_idx) {
+ AVSubtitleRect* r = sub.rects[sub_idx];
+ if( SUBTITLE_TEXT == r->type && NULL != r->text ) {
+ ++subTextCount;
+ } else if( SUBTITLE_ASS == r->type && NULL != r->ass ) {
+ ++subASSCount;
+ } else if( SUBTITLE_BITMAP == r->type ) {
+ ++subImgCount;
+ const int x = my_clip(r->x, 0, pAV->vWidth );
+ const int y = my_clip(r->y, 0, pAV->vHeight);
+ subMinX = my_min(subMinX, x);
+ subMinY = my_min(subMinY, y);
+ subWidth = my_max(subWidth, my_clip(r->w, 0, pAV->vWidth ));
+ subHeight = my_max(subHeight, my_clip(r->h, 0, pAV->vHeight));
+ }
+ }
+ if( 0 == subImgCount ) {
+ subMinX = 0;
+ subMinY = 0;
+ }
+ const GLenum texIFmt = GL_RGBA;
+ const GLenum texType = GL_UNSIGNED_BYTE;
+ int32_t texWidth=0, texHeight=0;
+ if( AV_HAS_API_SWSCALE(pAV) && 0 != sTexID && subWidth > 0 && subHeight > 0 ) {
+ if( !pAV->hasNPOT ) {
+ texWidth = (int32_t)roundToPowerOf2((uint32_t)subWidth);
+ texHeight = (int32_t)roundToPowerOf2((uint32_t)subHeight);
+ } else {
+ texWidth = subWidth;
+ texHeight = subHeight;
+ }
+ if( texWidth > 0 && texHeight > 0) {
+ // New RGBA Packed texture allocation
+ const size_t sPixelsSize = subWidth * subHeight * 4;
+ if( NULL == pAV->sPixels || NULL != pAV->sPixels && pAV->sPixelsSize < sPixelsSize ) {
+ // new-alloc or realloc
+ if( NULL != pAV->sPixels ) {
+ free( pAV->sPixels );
+ pAV->sPixels = NULL;
+ pAV->sPixelsSize = 0;
+ }
+ pAV->sPixels = malloc( sPixelsSize );
+ pAV->sPixelsSize = sPixelsSize;
+ }
+
+ if( NULL != pAV->procAddrGLEnable ) {
+ pAV->procAddrGLEnable(sTexTarget);
+ }
+ pAV->procAddrGLBindTexture(sTexTarget, sTexID);
+
+ if( 0 && texWidth <= sTexWidthPre && texHeight <= sTexHeightPre ) {
+ // Buggy on AMD GPU w/ shared ctx, hence disabled!
+ // Shows mangled texture content ...
+ //
+ // keep pre-alloc texture
+ texWidth = sTexWidthPre;
+ texHeight = sTexHeightPre;
+ } else {
+ pAV->procAddrGLTexImage2D(
+ sTexTarget, // target
+ 0, // level
+ texIFmt, // internal format
+ texWidth, // width
+ texHeight, // height
+ 0, // border
+ texIFmt, texType,
+ NULL); // pixels -- will be provided later per sub_rect
+ }
+ }
+ }
+ if( pAV->verbose ) {
+ const int empty = 0 == sub.num_rects;
+ fprintf(stderr, "Sub[START, empty %d, pts[%"PRId64" [%"PRIu32"..%"PRIu32"], %d [%d..%d]]: count[text %d, ass %d, img %d], tex %d/%d, all %d/%d %dx%d, tex %dx%d, vid %dx%d\n",
+ empty, sub.pts, sub.start_display_time, sub.end_display_time, sPTS, sStart, sEnd,
+ subTextCount, subASSCount, subImgCount,
+ sTexID, (NULL != sTexObj),
+ subMinX, subMinY, subWidth, subHeight, texWidth, texHeight,
+ pAV->vWidth, pAV->vHeight);
}
- for(unsigned int i=0; i<sub.num_rects; ++i) {
- AVSubtitleRect* r = sub.rects[i];
+ if( 0 == sub.num_rects ) {
+ (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_pushSubtitleEmpty, sStart, sEnd);
+ JoglCommon_ExceptionCheck1_throwNewRuntimeException(env, "FFmpeg: Exception occured at pushSubtitleEmpty(..)");
+ }
+ for(unsigned int sub_idx=0; sub_idx<sub.num_rects; ++sub_idx) {
+ AVSubtitleRect* r = sub.rects[sub_idx];
if( SUBTITLE_TEXT == r->type && NULL != r->text ) {
if( pAV->verbose ) {
- fprintf(stderr, "S[f %d, i %d, pts %d[%d..%d]]: %s\n", (int)r->type, i, r->text, sPTS, sStart, sEnd);
+ fprintf(stderr, "Sub[TEXT, f %d, i %d, pts %d[%d..%d]]: %s\n", (int)r->type, sub_idx, sPTS, sStart, sEnd, r->text);
}
- (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_pushSubtitleText, (*env)->NewStringUTF(env, r->text), sPTS, sStart, sEnd);
+ (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_pushSubtitleText, (*env)->NewStringUTF(env, r->text), sStart, sEnd);
JoglCommon_ExceptionCheck1_throwNewRuntimeException(env, "FFmpeg: Exception occured at pushSubtitleText(..)");
} else if( SUBTITLE_ASS == r->type && NULL != r->ass ) {
if( pAV->verbose ) {
- fprintf(stderr, "S[f %d, i %d, pts %d[%d..%d]]: %s\n", (int)r->type, i, r->ass, sPTS, sStart, sEnd);
+ fprintf(stderr, "Sub[ASS, f %d, i %d, pts %d[%d..%d]]: %s\n", (int)r->type, sub_idx, sPTS, sStart, sEnd, r->ass);
}
- (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_pushSubtitleASS, (*env)->NewStringUTF(env, r->ass), sPTS, sStart, sEnd);
+ (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_pushSubtitleASS, (*env)->NewStringUTF(env, r->ass), sStart, sEnd);
JoglCommon_ExceptionCheck1_throwNewRuntimeException(env, "FFmpeg: Exception occured at pushSubtitleASS(..)");
- } else {
- /**
- *
- * - AV_PIX_FMT_PAL8 8 bits with AV_PIX_FMT_RGB32 palette
- *
- * - SUBTITLE_BITMAP images are special in the sense that they
- * are like PAL8 images. first pointer to data, second to
- * palette. This makes the size calculation match this.
- * size_t buf_size = src_rect->type == SUBTITLE_BITMAP && j == 1 ? AVPALETTE_SIZE : src_rect->h * src_rect->linesize[j];
- * - linesize[0] > 0
- * - linesize[1] == 0 -> AVPALETTE_SIZE
- */
+ } else if( AV_HAS_API_SWSCALE(pAV) && SUBTITLE_BITMAP == r->type ) {
if( pAV->verbose ) {
- int hasText = NULL != r->text;
- int hasASS = NULL != r->ass;
- fprintf(stderr, "S[f %d, i %d, pts[%d [%d..%d]]: text %d, ass %d, %d/%d %dx%d c %d, lsz[%d, %d, %d, %d], data[%d, %d, %d, %d]\n",
- (int)r->type, i, sPTS, sStart, sEnd, hasText, hasASS,
- r->x, r->y, r->w, r->h, r->nb_colors,
+ fprintf(stderr, "Sub[IMG, f %d, i %d, pts[%"PRId64" [%"PRIu32"..%"PRIu32"], %d [%d..%d]]: tex %d/%d, r %d/%d %dx%d, all %d/%d %dx%d, tex %dx%d, vid %dx%d, c %d, lsz[%d, %d, %d, %d], data[%d, %d, %d, %d]\n",
+ (int)r->type, sub_idx, sub.pts, sub.start_display_time, sub.end_display_time, sPTS, sStart, sEnd, sTexID, (NULL != sTexObj),
+ r->x, r->y, r->w, r->h, subMinX, subMinY, subWidth, subHeight, texWidth, texHeight,
+ pAV->vWidth, pAV->vHeight,
+ r->nb_colors,
r->linesize[0], r->linesize[1], r->linesize[2], r->linesize[3],
NULL != r->data[0], NULL != r->data[1], NULL != r->data[2], NULL != r->data[3]);
}
- if( 0 != sTexID ) {
- if( NULL != pAV->procAddrGLEnable ) {
- pAV->procAddrGLEnable(sTexTarget);
+ int32_t x, y, width, height;
+ {
+ x = my_clip(r->x, 0, pAV->vWidth );
+ y = my_clip(r->y, 0, pAV->vHeight);
+ width = my_clip(r->w, 0, pAV->vWidth );
+ height = my_clip(r->h, 0, pAV->vHeight);
+ }
+ if( texWidth > 0 && texHeight > 0) {
+ pAV->sScaleCtx = sp_sws_getCachedContext(pAV->sScaleCtx,
+ width, height, AV_PIX_FMT_PAL8,
+ width, height, AV_PIX_FMT_RGBA, 0, NULL, NULL, NULL);
+ if ( NULL == pAV->sScaleCtx ) {
+ fprintf(stderr, "S-P: Failed to init scale ctx\n");
+ } else {
+ int stride[/*4*/] = { width*4, 0, 0, 0 };
+ uint8_t* pixels[/*4*/] = { pAV->sPixels, NULL, NULL, NULL };
+ sp_sws_scale(pAV->sScaleCtx, (const uint8_t * const *)r->data, r->linesize, 0, height,
+ pixels, stride);
+
+ pAV->procAddrGLTexSubImage2D(sTexTarget, 0,
+ x-subMinX, y-subMinY,
+ width, height,
+ texIFmt, texType, pixels[0]);
+ // pAV->procAddrGLFinish(); // No sync required and too expensive for multiple player
+ pAV->procAddrGLFlush(); // No sync required, but be nice
}
- pAV->procAddrGLBindTexture(sTexTarget, sTexID);
-
- const GLenum texIFmt = GL_RGBA;
- const GLenum texType = GL_UNSIGNED_BYTE;
-
- // pAV->procAddrGLTexSubImage2D(sTexTarget, 0,
- // 0, 0,
- // pAV->vTexWidth[0], pAV->pVCodecCtx->height,
- // texIFmt, texType, pAV->pVFrame->data[0] + p_offset[0]);
}
- (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_pushSubtitleTex,
- sTexID, r->x, r->y, r->w, r->h, sPTS, sStart, sEnd);
- JoglCommon_ExceptionCheck1_throwNewRuntimeException(env, "FFmpeg: Exception occured at pushSubtitleTex(..)");
- }
+ } // bitmap sub-rect, only avail with swscale
+ } // for all sub-rects
+ if( AV_HAS_API_SWSCALE(pAV) && 0 < subImgCount && texWidth > 0 && texHeight > 0) {
+ if( NULL != sTexUsed ) {
+ jboolean sTexUsedVal[] = { JNI_TRUE };
+ (*env)->SetBooleanArrayRegion(env, sTexUsed, 0, 1, sTexUsedVal);
+ }
+ (*env)->CallVoidMethod(env, pAV->ffmpegMediaPlayer, ffmpeg_jni_mid_pushSubtitleTex,
+ sTexObj, sTexID, (jint)texWidth, (jint)texHeight, subMinX, subMinY, subWidth, subHeight, sStart, sEnd);
+ JoglCommon_ExceptionCheck1_throwNewRuntimeException(env, "FFmpeg: Exception occured at pushSubtitleTex(..)");
}
pAV->sPTS = sPTS;
}
diff --git a/src/jogl/native/libav/ffmpeg_static.c b/src/jogl/native/libav/ffmpeg_static.c
index a8db0652c..54f33ded0 100644
--- a/src/jogl/native/libav/ffmpeg_static.c
+++ b/src/jogl/native/libav/ffmpeg_static.c
@@ -74,7 +74,7 @@ JNIEXPORT jboolean JNICALL Java_jogamp_opengl_util_av_impl_FFMPEGStaticNatives_i
ffmpeg_jni_mid_pushSubtitleTex = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "pushSubtitleTex", "(Ljava/lang/Object;IIIIIIIII)V");
ffmpeg_jni_mid_pushSubtitleEmpty = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "pushSubtitleEmpty", "(II)V");
ffmpeg_jni_mid_updateAttributes = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "updateAttributes",
- "(Ljava/lang/String;[I[Ljava/lang/String;I[I[Ljava/lang/String;I[I[Ljava/lang/String;IIIIIIFIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
+ "(Ljava/lang/String;[I[Ljava/lang/String;I[I[Ljava/lang/String;I[I[Ljava/lang/String;IIIIIIFIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;III)V");
ffmpeg_jni_mid_setIsGLOriented = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "setIsGLOriented", "(Z)V");
ffmpeg_jni_mid_setupFFAttributes = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "setupFFAttributes", "(IIIIIIIIIIIIIII)V");
ffmpeg_jni_mid_isAudioFormatSupported = (*env)->GetMethodID(env, ffmpegMediaPlayerClazz, "isAudioFormatSupported", "(III)Z");
diff --git a/src/jogl/native/libav/ffmpeg_tool.h b/src/jogl/native/libav/ffmpeg_tool.h
index be186d818..a8a9266cb 100644
--- a/src/jogl/native/libav/ffmpeg_tool.h
+++ b/src/jogl/native/libav/ffmpeg_tool.h
@@ -48,6 +48,7 @@
#include "libavutil/pixdesc.h"
#include "libavutil/samplefmt.h"
#include "libswresample/swresample.h"
+#include "libswscale/swscale.h"
#ifndef LIBSWRESAMPLE_VERSION_MAJOR
#define LIBSWRESAMPLE_VERSION_MAJOR -1
@@ -62,6 +63,7 @@ typedef struct SwrContext SwrContext;
#include <GL/gl.h>
typedef void (APIENTRYP PFNGLTEXSUBIMAGE2DPROC) (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const GLvoid *pixels);
+typedef void (APIENTRYP PFNGLTEXIMAGE2DPROC) (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const GLvoid *pixels);
typedef GLenum (APIENTRYP PFNGLGETERRORPROC) (void);
typedef void (APIENTRYP PFNGLFLUSH) (void);
typedef void (APIENTRYP PFNGLFINISH) (void);
@@ -102,9 +104,26 @@ typedef void (APIENTRYP PFNGLACTIVETEXTURE) (GLenum texture);
/** Since 55.0.0.1 */
#define AV_HAS_API_SWRESAMPLE(pAV) ( ( LIBSWRESAMPLE_VERSION_MAJOR >= 0 ) && ( pAV->swresampleVersion != 0 ) )
+#define AV_HAS_API_SWSCALE(pAV) ( ( LIBSWSCALE_VERSION_MAJOR >= 0 ) && ( pAV->swscaleVersion != 0 ) )
+
#define MAX_INT(a,b) ( (a >= b) ? a : b )
#define MIN_INT(a,b) ( (a <= b) ? a : b )
+static inline int isPowerOf2(uint32_t n) { return n && !(n & (n - 1)); }
+static inline int nextPowerOf2(uint32_t n) {
+ n--;
+ n |= n >> 1;
+ n |= n >> 2;
+ n |= n >> 4;
+ n |= n >> 8;
+ n |= n >> 16;
+ n++;
+ return (n == 0) ? 1 : n; // avoid edge case where n is 0, it would return 0, which isn't a power of 2
+}
+static inline int roundToPowerOf2(uint32_t n) {
+ return isPowerOf2(n) ? n : nextPowerOf2(n);
+}
+
static inline float my_av_q2f(AVRational a){
return (float)a.num / (float)a.den;
}
@@ -118,6 +137,12 @@ static inline int my_align(int v, int a){
return ( v + a - 1 ) & ~( a - 1 );
}
+static inline int32_t my_min(int a, int b) { return a < b ? a : b; }
+static inline int32_t my_max(int a, int b) { return a > b ? a : b; }
+static inline int32_t my_clip(int a, int min, int max) {
+ return my_min(max, my_max(min, a));
+}
+
#if LIBAVCODEC_VERSION_MAJOR < 59
typedef void* AVChannelLayoutPtr;
#else
@@ -148,19 +173,24 @@ typedef struct {
uint32_t avutilVersion;
uint32_t avdeviceVersion;
uint32_t swresampleVersion;
+ uint32_t swscaleVersion;
PFNGLTEXSUBIMAGE2DPROC procAddrGLTexSubImage2D;
+ PFNGLTEXIMAGE2DPROC procAddrGLTexImage2D;
PFNGLGETERRORPROC procAddrGLGetError;
PFNGLFLUSH procAddrGLFlush;
PFNGLFINISH procAddrGLFinish;
PFNGLENABLE procAddrGLEnable;
PFNGLBINDTEXTURE procAddrGLBindTexture;
+ int32_t hasNPOT;
AVPacket* packet;
AVFormatContext* pFormatCtx;
uint32_t v_stream_count;
int32_t v_streams[MAX_STREAM_COUNT];
int32_t vid;
+ int32_t vCodecID;
+ char vCodecStr[64];
AVStream* pVStream;
AVCodecParameters* pVCodecPar;
AVCodecContext* pVCodecCtx;
@@ -180,6 +210,8 @@ typedef struct {
uint32_t a_stream_count;
int32_t a_streams[MAX_STREAM_COUNT];
int32_t aid;
+ int32_t aCodecID;
+ char aCodecStr[64];
AVStream* pAStream;
AVCodecParameters* pACodecPar;
AVCodecContext* pACodecCtx;
@@ -204,11 +236,16 @@ typedef struct {
uint32_t s_stream_count;
int32_t s_streams[MAX_STREAM_COUNT];
int32_t sid;
+ int32_t sCodecID;
+ char sCodecStr[64];
AVStream* pSStream;
AVCodecParameters* pSCodecPar;
AVCodecContext* pSCodecCtx;
AVCodec* pSCodec;
int32_t sPTS; // msec - overall last subtitle PTS
+ struct SwsContext *sScaleCtx;
+ void* sPixels;
+ size_t sPixelsSize;
float fps; // frames per seconds
int32_t bps_stream; // bits per seconds
@@ -219,10 +256,6 @@ typedef struct {
int32_t duration; // msec
int32_t start_time; // msec
- char acodec[64];
- char vcodec[64];
- char scodec[64];
-
int32_t ready;
} FFMPEGToolBasicAV_t ;