summaryrefslogtreecommitdiff
path: root/gst/avi
diff options
context:
space:
mode:
Diffstat (limited to 'gst/avi')
-rw-r--r--gst/avi/Makefile.am45
-rw-r--r--gst/avi/Makefile.in863
-rw-r--r--gst/avi/README72
-rw-r--r--gst/avi/avi-ids.h78
-rw-r--r--gst/avi/gstavi.c59
-rw-r--r--gst/avi/gstavidemux.c6273
-rw-r--r--gst/avi/gstavidemux.h243
-rw-r--r--gst/avi/gstavimux.c2180
-rw-r--r--gst/avi/gstavimux.h197
-rw-r--r--gst/avi/gstavisubtitle.c383
-rw-r--r--gst/avi/gstavisubtitle.h39
11 files changed, 10432 insertions, 0 deletions
diff --git a/gst/avi/Makefile.am b/gst/avi/Makefile.am
new file mode 100644
index 0000000..aa75eb4
--- /dev/null
+++ b/gst/avi/Makefile.am
@@ -0,0 +1,45 @@
+plugin_LTLIBRARIES = libgstavi.la
+
+libgstavi_la_SOURCES = \
+ gstavi.c \
+ gstavimux.c \
+ gstavidemux.c \
+ gstavisubtitle.c
+
+noinst_HEADERS = \
+ avi-ids.h \
+ gstavimux.h \
+ gstavidemux.h \
+ gstavisubtitle.h
+
+libgstavi_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
+libgstavi_la_LIBADD = \
+ $(GST_PLUGINS_BASE_LIBS) \
+ $(GST_BASE_LIBS) \
+ $(GST_LIBS) \
+ -lgstriff-@GST_MAJORMINOR@ \
+ -lgstaudio-@GST_MAJORMINOR@ \
+ -lgsttag-@GST_MAJORMINOR@
+
+if USE_DIVX_DRM
+libgstavi_la_CFLAGS += -DDIVX_DRM
+endif
+
+libgstavi_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+libgstavi_la_LIBTOOLFLAGS = --tag=disable-static
+
+EXTRA_DIST = README
+
+Android.mk: Makefile.am $(BUILT_SOURCES)
+ androgenizer \
+ -:PROJECT libgstavi -:SHARED libgstavi \
+ -:TAGS eng debug \
+ -:REL_TOP $(top_srcdir) -:ABS_TOP $(abs_top_srcdir) \
+ -:SOURCES $(libgstavi_la_SOURCES) \
+ -:CFLAGS $(DEFS) $(DEFAULT_INCLUDES) $(libgstavi_la_CFLAGS) \
+ -:LDFLAGS $(libgstavi_la_LDFLAGS) \
+ $(libgstavi_la_LIBADD) \
+ -ldl \
+ -:PASSTHROUGH LOCAL_ARM_MODE:=arm \
+ LOCAL_MODULE_PATH:='$$(TARGET_OUT)/lib/gstreamer-0.10' \
+ > $@
diff --git a/gst/avi/Makefile.in b/gst/avi/Makefile.in
new file mode 100644
index 0000000..45af373
--- /dev/null
+++ b/gst/avi/Makefile.in
@@ -0,0 +1,863 @@
+# Makefile.in generated by automake 1.11.3 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software
+# Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkglibexecdir = $(libexecdir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+subdir = gst/avi
+DIST_COMMON = README $(noinst_HEADERS) $(srcdir)/Makefile.am \
+ $(srcdir)/Makefile.in
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/common/m4/as-ac-expand.m4 \
+ $(top_srcdir)/common/m4/as-auto-alt.m4 \
+ $(top_srcdir)/common/m4/as-compiler-flag.m4 \
+ $(top_srcdir)/common/m4/as-gcc-inline-assembly.m4 \
+ $(top_srcdir)/common/m4/as-objc.m4 \
+ $(top_srcdir)/common/m4/as-python.m4 \
+ $(top_srcdir)/common/m4/as-scrub-include.m4 \
+ $(top_srcdir)/common/m4/as-version.m4 \
+ $(top_srcdir)/common/m4/ax_create_stdint_h.m4 \
+ $(top_srcdir)/common/m4/gst-arch.m4 \
+ $(top_srcdir)/common/m4/gst-args.m4 \
+ $(top_srcdir)/common/m4/gst-check.m4 \
+ $(top_srcdir)/common/m4/gst-default.m4 \
+ $(top_srcdir)/common/m4/gst-dowhile.m4 \
+ $(top_srcdir)/common/m4/gst-error.m4 \
+ $(top_srcdir)/common/m4/gst-feature.m4 \
+ $(top_srcdir)/common/m4/gst-gettext.m4 \
+ $(top_srcdir)/common/m4/gst-glib2.m4 \
+ $(top_srcdir)/common/m4/gst-package-release-datetime.m4 \
+ $(top_srcdir)/common/m4/gst-platform.m4 \
+ $(top_srcdir)/common/m4/gst-plugin-docs.m4 \
+ $(top_srcdir)/common/m4/gst-plugindir.m4 \
+ $(top_srcdir)/common/m4/gst-x11.m4 \
+ $(top_srcdir)/common/m4/gst.m4 \
+ $(top_srcdir)/common/m4/gtk-doc.m4 \
+ $(top_srcdir)/common/m4/orc.m4 $(top_srcdir)/common/m4/pkg.m4 \
+ $(top_srcdir)/m4/aalib.m4 $(top_srcdir)/m4/esd.m4 \
+ $(top_srcdir)/m4/gconf-2.m4 $(top_srcdir)/m4/gettext.m4 \
+ $(top_srcdir)/m4/gst-fionread.m4 $(top_srcdir)/m4/iconv.m4 \
+ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \
+ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \
+ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \
+ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \
+ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \
+ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \
+ $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+ $(ACLOCAL_M4)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/config.h
+CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_VPATH_FILES =
+am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+am__vpath_adj = case $$p in \
+ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+ *) f=$$p;; \
+ esac;
+am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
+am__install_max = 40
+am__nobase_strip_setup = \
+ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
+am__nobase_strip = \
+ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
+am__nobase_list = $(am__nobase_strip_setup); \
+ for p in $$list; do echo "$$p $$p"; done | \
+ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
+ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
+ if (++n[$$2] == $(am__install_max)) \
+ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
+ END { for (dir in files) print dir, files[dir] }'
+am__base_list = \
+ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
+ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
+am__uninstall_files_from_dir = { \
+ test -z "$$files" \
+ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
+ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
+ $(am__cd) "$$dir" && rm -f $$files; }; \
+ }
+am__installdirs = "$(DESTDIR)$(plugindir)"
+LTLIBRARIES = $(plugin_LTLIBRARIES)
+am__DEPENDENCIES_1 =
+libgstavi_la_DEPENDENCIES = $(am__DEPENDENCIES_1) \
+ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1)
+am_libgstavi_la_OBJECTS = libgstavi_la-gstavi.lo \
+ libgstavi_la-gstavimux.lo libgstavi_la-gstavidemux.lo \
+ libgstavi_la-gstavisubtitle.lo
+libgstavi_la_OBJECTS = $(am_libgstavi_la_OBJECTS)
+AM_V_lt = $(am__v_lt_@AM_V@)
+am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
+am__v_lt_0 = --silent
+libgstavi_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC \
+ $(libgstavi_la_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link \
+ $(CCLD) $(libgstavi_la_CFLAGS) $(CFLAGS) \
+ $(libgstavi_la_LDFLAGS) $(LDFLAGS) -o $@
+DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)
+depcomp = $(SHELL) $(top_srcdir)/depcomp
+am__depfiles_maybe = depfiles
+am__mv = mv -f
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
+ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \
+ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
+ $(AM_CFLAGS) $(CFLAGS)
+AM_V_CC = $(am__v_CC_@AM_V@)
+am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@)
+am__v_CC_0 = @echo " CC " $@;
+AM_V_at = $(am__v_at_@AM_V@)
+am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
+am__v_at_0 = @
+CCLD = $(CC)
+LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
+ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+ $(AM_LDFLAGS) $(LDFLAGS) -o $@
+AM_V_CCLD = $(am__v_CCLD_@AM_V@)
+am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@)
+am__v_CCLD_0 = @echo " CCLD " $@;
+AM_V_GEN = $(am__v_GEN_@AM_V@)
+am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
+am__v_GEN_0 = @echo " GEN " $@;
+SOURCES = $(libgstavi_la_SOURCES)
+DIST_SOURCES = $(libgstavi_la_SOURCES)
+HEADERS = $(noinst_HEADERS)
+ETAGS = etags
+CTAGS = ctags
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+AALIB_CFLAGS = @AALIB_CFLAGS@
+AALIB_CONFIG = @AALIB_CONFIG@
+AALIB_LIBS = @AALIB_LIBS@
+ACLOCAL = @ACLOCAL@
+ACLOCAL_AMFLAGS = @ACLOCAL_AMFLAGS@
+AMTAR = @AMTAR@
+AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
+ANNODEX_CFLAGS = @ANNODEX_CFLAGS@
+ANNODEX_LIBS = @ANNODEX_LIBS@
+AR = @AR@
+AS = @AS@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+BZ2_LIBS = @BZ2_LIBS@
+CAIRO_CFLAGS = @CAIRO_CFLAGS@
+CAIRO_GOBJECT_CFLAGS = @CAIRO_GOBJECT_CFLAGS@
+CAIRO_GOBJECT_LIBS = @CAIRO_GOBJECT_LIBS@
+CAIRO_LIBS = @CAIRO_LIBS@
+CC = @CC@
+CCAS = @CCAS@
+CCASDEPMODE = @CCASDEPMODE@
+CCASFLAGS = @CCASFLAGS@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CXX = @CXX@
+CXXCPP = @CXXCPP@
+CXXDEPMODE = @CXXDEPMODE@
+CXXFLAGS = @CXXFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFAULT_AUDIOSINK = @DEFAULT_AUDIOSINK@
+DEFAULT_AUDIOSRC = @DEFAULT_AUDIOSRC@
+DEFAULT_VIDEOSINK = @DEFAULT_VIDEOSINK@
+DEFAULT_VIDEOSRC = @DEFAULT_VIDEOSRC@
+DEFAULT_VISUALIZER = @DEFAULT_VISUALIZER@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DEPRECATED_CFLAGS = @DEPRECATED_CFLAGS@
+DIRECTSOUND_CFLAGS = @DIRECTSOUND_CFLAGS@
+DIRECTSOUND_LDFLAGS = @DIRECTSOUND_LDFLAGS@
+DIRECTSOUND_LIBS = @DIRECTSOUND_LIBS@
+DLLTOOL = @DLLTOOL@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+DV1394_CFLAGS = @DV1394_CFLAGS@
+DV1394_LIBS = @DV1394_LIBS@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+ERROR_CFLAGS = @ERROR_CFLAGS@
+ERROR_CXXFLAGS = @ERROR_CXXFLAGS@
+ESD_CFLAGS = @ESD_CFLAGS@
+ESD_CONFIG = @ESD_CONFIG@
+ESD_LIBS = @ESD_LIBS@
+EXEEXT = @EXEEXT@
+FFLAGS = @FFLAGS@
+FGREP = @FGREP@
+FLAC_CFLAGS = @FLAC_CFLAGS@
+FLAC_LIBS = @FLAC_LIBS@
+GCONFTOOL = @GCONFTOOL@
+GCONF_CFLAGS = @GCONF_CFLAGS@
+GCONF_LIBS = @GCONF_LIBS@
+GCONF_SCHEMA_CONFIG_SOURCE = @GCONF_SCHEMA_CONFIG_SOURCE@
+GCONF_SCHEMA_FILE_DIR = @GCONF_SCHEMA_FILE_DIR@
+GCOV = @GCOV@
+GCOV_CFLAGS = @GCOV_CFLAGS@
+GCOV_LIBS = @GCOV_LIBS@
+GDK_PIXBUF_CFLAGS = @GDK_PIXBUF_CFLAGS@
+GDK_PIXBUF_LIBS = @GDK_PIXBUF_LIBS@
+GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@
+GETTEXT_PACKAGE = @GETTEXT_PACKAGE@
+GIO_CFLAGS = @GIO_CFLAGS@
+GIO_LIBS = @GIO_LIBS@
+GLIB_CFLAGS = @GLIB_CFLAGS@
+GLIB_EXTRA_CFLAGS = @GLIB_EXTRA_CFLAGS@
+GLIB_LIBS = @GLIB_LIBS@
+GLIB_PREFIX = @GLIB_PREFIX@
+GLIB_REQ = @GLIB_REQ@
+GMSGFMT = @GMSGFMT@
+GMSGFMT_015 = @GMSGFMT_015@
+GREP = @GREP@
+GSTPB_PLUGINS_DIR = @GSTPB_PLUGINS_DIR@
+GSTPB_PREFIX = @GSTPB_PREFIX@
+GST_ALL_LDFLAGS = @GST_ALL_LDFLAGS@
+GST_BASE_CFLAGS = @GST_BASE_CFLAGS@
+GST_BASE_LIBS = @GST_BASE_LIBS@
+GST_CFLAGS = @GST_CFLAGS@
+GST_CHECK_CFLAGS = @GST_CHECK_CFLAGS@
+GST_CHECK_LIBS = @GST_CHECK_LIBS@
+GST_CONTROLLER_CFLAGS = @GST_CONTROLLER_CFLAGS@
+GST_CONTROLLER_LIBS = @GST_CONTROLLER_LIBS@
+GST_CXXFLAGS = @GST_CXXFLAGS@
+GST_GDP_CFLAGS = @GST_GDP_CFLAGS@
+GST_GDP_LIBS = @GST_GDP_LIBS@
+GST_LEVEL_DEFAULT = @GST_LEVEL_DEFAULT@
+GST_LIBS = @GST_LIBS@
+GST_LICENSE = @GST_LICENSE@
+GST_LT_LDFLAGS = @GST_LT_LDFLAGS@
+GST_MAJORMINOR = @GST_MAJORMINOR@
+GST_OPTION_CFLAGS = @GST_OPTION_CFLAGS@
+GST_OPTION_CXXFLAGS = @GST_OPTION_CXXFLAGS@
+GST_PACKAGE_NAME = @GST_PACKAGE_NAME@
+GST_PACKAGE_ORIGIN = @GST_PACKAGE_ORIGIN@
+GST_PLUGINS_ALL = @GST_PLUGINS_ALL@
+GST_PLUGINS_BASE_CFLAGS = @GST_PLUGINS_BASE_CFLAGS@
+GST_PLUGINS_BASE_DIR = @GST_PLUGINS_BASE_DIR@
+GST_PLUGINS_BASE_LIBS = @GST_PLUGINS_BASE_LIBS@
+GST_PLUGINS_DIR = @GST_PLUGINS_DIR@
+GST_PLUGINS_SELECTED = @GST_PLUGINS_SELECTED@
+GST_PLUGIN_LDFLAGS = @GST_PLUGIN_LDFLAGS@
+GST_PREFIX = @GST_PREFIX@
+GST_TOOLS_DIR = @GST_TOOLS_DIR@
+GTKDOC_CHECK = @GTKDOC_CHECK@
+GTK_CFLAGS = @GTK_CFLAGS@
+GTK_LIBS = @GTK_LIBS@
+GTK_X11_CFLAGS = @GTK_X11_CFLAGS@
+GTK_X11_LIBS = @GTK_X11_LIBS@
+GUDEV_CFLAGS = @GUDEV_CFLAGS@
+GUDEV_LIBS = @GUDEV_LIBS@
+HAL_CFLAGS = @HAL_CFLAGS@
+HAL_LIBS = @HAL_LIBS@
+HAVE_AVC1394 = @HAVE_AVC1394@
+HAVE_BZ2 = @HAVE_BZ2@
+HAVE_CXX = @HAVE_CXX@
+HAVE_DIRECTSOUND = @HAVE_DIRECTSOUND@
+HAVE_GCONFTOOL = @HAVE_GCONFTOOL@
+HAVE_ROM1394 = @HAVE_ROM1394@
+HAVE_SPEEX = @HAVE_SPEEX@
+HAVE_X = @HAVE_X@
+HAVE_XSHM = @HAVE_XSHM@
+HAVE_ZLIB = @HAVE_ZLIB@
+HTML_DIR = @HTML_DIR@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+INTLLIBS = @INTLLIBS@
+INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@
+JACK_0_120_1_CFLAGS = @JACK_0_120_1_CFLAGS@
+JACK_0_120_1_LIBS = @JACK_0_120_1_LIBS@
+JACK_1_9_7_CFLAGS = @JACK_1_9_7_CFLAGS@
+JACK_1_9_7_LIBS = @JACK_1_9_7_LIBS@
+JACK_CFLAGS = @JACK_CFLAGS@
+JACK_LIBS = @JACK_LIBS@
+JPEG_LIBS = @JPEG_LIBS@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBCACA_CFLAGS = @LIBCACA_CFLAGS@
+LIBCACA_LIBS = @LIBCACA_LIBS@
+LIBDV_CFLAGS = @LIBDV_CFLAGS@
+LIBDV_LIBS = @LIBDV_LIBS@
+LIBICONV = @LIBICONV@
+LIBIEC61883_CFLAGS = @LIBIEC61883_CFLAGS@
+LIBIEC61883_LIBS = @LIBIEC61883_LIBS@
+LIBINTL = @LIBINTL@
+LIBM = @LIBM@
+LIBOBJS = @LIBOBJS@
+LIBPNG_CFLAGS = @LIBPNG_CFLAGS@
+LIBPNG_LIBS = @LIBPNG_LIBS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIBV4L2_CFLAGS = @LIBV4L2_CFLAGS@
+LIBV4L2_LIBS = @LIBV4L2_LIBS@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LOCALEDIR = @LOCALEDIR@
+LTLIBICONV = @LTLIBICONV@
+LTLIBINTL = @LTLIBINTL@
+LTLIBOBJS = @LTLIBOBJS@
+MAINT = @MAINT@
+MAKEINFO = @MAKEINFO@
+MANIFEST_TOOL = @MANIFEST_TOOL@
+MKDIR_P = @MKDIR_P@
+MSGFMT = @MSGFMT@
+MSGFMT_015 = @MSGFMT_015@
+MSGMERGE = @MSGMERGE@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJC = @OBJC@
+OBJCDEPMODE = @OBJCDEPMODE@
+OBJC_LDFLAGS = @OBJC_LDFLAGS@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+ORCC = @ORCC@
+ORCC_FLAGS = @ORCC_FLAGS@
+ORC_CFLAGS = @ORC_CFLAGS@
+ORC_LIBS = @ORC_LIBS@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PACKAGE_VERSION_MAJOR = @PACKAGE_VERSION_MAJOR@
+PACKAGE_VERSION_MICRO = @PACKAGE_VERSION_MICRO@
+PACKAGE_VERSION_MINOR = @PACKAGE_VERSION_MINOR@
+PACKAGE_VERSION_NANO = @PACKAGE_VERSION_NANO@
+PACKAGE_VERSION_RELEASE = @PACKAGE_VERSION_RELEASE@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+PKG_CONFIG = @PKG_CONFIG@
+PLUGINDIR = @PLUGINDIR@
+POSUB = @POSUB@
+PROFILE_CFLAGS = @PROFILE_CFLAGS@
+PULSE_0_9_20_CFLAGS = @PULSE_0_9_20_CFLAGS@
+PULSE_0_9_20_LIBS = @PULSE_0_9_20_LIBS@
+PULSE_1_0_CFLAGS = @PULSE_1_0_CFLAGS@
+PULSE_1_0_LIBS = @PULSE_1_0_LIBS@
+PULSE_CFLAGS = @PULSE_CFLAGS@
+PULSE_LIBS = @PULSE_LIBS@
+PYTHON = @PYTHON@
+PYTHON_EXEC_PREFIX = @PYTHON_EXEC_PREFIX@
+PYTHON_PLATFORM = @PYTHON_PLATFORM@
+PYTHON_PREFIX = @PYTHON_PREFIX@
+PYTHON_VERSION = @PYTHON_VERSION@
+RANLIB = @RANLIB@
+RAW1394_CFLAGS = @RAW1394_CFLAGS@
+RAW1394_LIBS = @RAW1394_LIBS@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+SHOUT2_CFLAGS = @SHOUT2_CFLAGS@
+SHOUT2_LIBS = @SHOUT2_LIBS@
+SOUP_CFLAGS = @SOUP_CFLAGS@
+SOUP_LIBS = @SOUP_LIBS@
+SPEEX_CFLAGS = @SPEEX_CFLAGS@
+SPEEX_LIBS = @SPEEX_LIBS@
+STRIP = @STRIP@
+TAGLIB_CFLAGS = @TAGLIB_CFLAGS@
+TAGLIB_CXXFLAGS = @TAGLIB_CXXFLAGS@
+TAGLIB_LIBS = @TAGLIB_LIBS@
+USE_NLS = @USE_NLS@
+VALGRIND_CFLAGS = @VALGRIND_CFLAGS@
+VALGRIND_LIBS = @VALGRIND_LIBS@
+VALGRIND_PATH = @VALGRIND_PATH@
+VERSION = @VERSION@
+WARNING_CFLAGS = @WARNING_CFLAGS@
+WARNING_CXXFLAGS = @WARNING_CXXFLAGS@
+WAVPACK_CFLAGS = @WAVPACK_CFLAGS@
+WAVPACK_LIBS = @WAVPACK_LIBS@
+WIN32_LIBS = @WIN32_LIBS@
+XDAMAGE_CFLAGS = @XDAMAGE_CFLAGS@
+XDAMAGE_LIBS = @XDAMAGE_LIBS@
+XFIXES_CFLAGS = @XFIXES_CFLAGS@
+XFIXES_LIBS = @XFIXES_LIBS@
+XGETTEXT = @XGETTEXT@
+XGETTEXT_015 = @XGETTEXT_015@
+XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@
+XMKMF = @XMKMF@
+XSHM_LIBS = @XSHM_LIBS@
+XVIDEO_LIBS = @XVIDEO_LIBS@
+X_CFLAGS = @X_CFLAGS@
+X_EXTRA_LIBS = @X_EXTRA_LIBS@
+X_LIBS = @X_LIBS@
+X_PRE_LIBS = @X_PRE_LIBS@
+ZLIB_LIBS = @ZLIB_LIBS@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_CXX = @ac_ct_CXX@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+ac_ct_OBJC = @ac_ct_OBJC@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+pkgpyexecdir = @pkgpyexecdir@
+pkgpythondir = @pkgpythondir@
+plugindir = @plugindir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+pyexecdir = @pyexecdir@
+pythondir = @pythondir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+plugin_LTLIBRARIES = libgstavi.la
+libgstavi_la_SOURCES = \
+ gstavi.c \
+ gstavimux.c \
+ gstavidemux.c \
+ gstavisubtitle.c
+
+noinst_HEADERS = \
+ avi-ids.h \
+ gstavimux.h \
+ gstavidemux.h \
+ gstavisubtitle.h
+
+libgstavi_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
+libgstavi_la_LIBADD = \
+ $(GST_PLUGINS_BASE_LIBS) \
+ $(GST_BASE_LIBS) \
+ $(GST_LIBS) \
+ -lgstriff-@GST_MAJORMINOR@ \
+ -lgstaudio-@GST_MAJORMINOR@ \
+ -lgsttag-@GST_MAJORMINOR@
+
+libgstavi_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+libgstavi_la_LIBTOOLFLAGS = --tag=disable-static
+EXTRA_DIST = README
+all: all-am
+
+.SUFFIXES:
+.SUFFIXES: .c .lo .o .obj
+$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps)
+ @for dep in $?; do \
+ case '$(am__configure_deps)' in \
+ *$$dep*) \
+ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
+ && { if test -f $@; then exit 0; else break; fi; }; \
+ exit 1;; \
+ esac; \
+ done; \
+ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu gst/avi/Makefile'; \
+ $(am__cd) $(top_srcdir) && \
+ $(AUTOMAKE) --gnu gst/avi/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+ @case '$?' in \
+ *config.status*) \
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+ *) \
+ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+ esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(am__aclocal_m4_deps):
+install-pluginLTLIBRARIES: $(plugin_LTLIBRARIES)
+ @$(NORMAL_INSTALL)
+ test -z "$(plugindir)" || $(MKDIR_P) "$(DESTDIR)$(plugindir)"
+ @list='$(plugin_LTLIBRARIES)'; test -n "$(plugindir)" || list=; \
+ list2=; for p in $$list; do \
+ if test -f $$p; then \
+ list2="$$list2 $$p"; \
+ else :; fi; \
+ done; \
+ test -z "$$list2" || { \
+ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(plugindir)'"; \
+ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(plugindir)"; \
+ }
+
+uninstall-pluginLTLIBRARIES:
+ @$(NORMAL_UNINSTALL)
+ @list='$(plugin_LTLIBRARIES)'; test -n "$(plugindir)" || list=; \
+ for p in $$list; do \
+ $(am__strip_dir) \
+ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(plugindir)/$$f'"; \
+ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(plugindir)/$$f"; \
+ done
+
+clean-pluginLTLIBRARIES:
+ -test -z "$(plugin_LTLIBRARIES)" || rm -f $(plugin_LTLIBRARIES)
+ @list='$(plugin_LTLIBRARIES)'; for p in $$list; do \
+ dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
+ test "$$dir" != "$$p" || dir=.; \
+ echo "rm -f \"$${dir}/so_locations\""; \
+ rm -f "$${dir}/so_locations"; \
+ done
+libgstavi.la: $(libgstavi_la_OBJECTS) $(libgstavi_la_DEPENDENCIES) $(EXTRA_libgstavi_la_DEPENDENCIES)
+ $(AM_V_CCLD)$(libgstavi_la_LINK) -rpath $(plugindir) $(libgstavi_la_OBJECTS) $(libgstavi_la_LIBADD) $(LIBS)
+
+mostlyclean-compile:
+ -rm -f *.$(OBJEXT)
+
+distclean-compile:
+ -rm -f *.tab.c
+
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libgstavi_la-gstavi.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libgstavi_la-gstavidemux.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libgstavi_la-gstavimux.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libgstavi_la-gstavisubtitle.Plo@am__quote@
+
+.c.o:
+@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c $<
+
+.c.obj:
+@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
+@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c `$(CYGPATH_W) '$<'`
+
+.c.lo:
+@am__fastdepCC_TRUE@ $(AM_V_CC)$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LTCOMPILE) -c -o $@ $<
+
+libgstavi_la-gstavi.lo: gstavi.c
+@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(libgstavi_la_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgstavi_la_CFLAGS) $(CFLAGS) -MT libgstavi_la-gstavi.lo -MD -MP -MF $(DEPDIR)/libgstavi_la-gstavi.Tpo -c -o libgstavi_la-gstavi.lo `test -f 'gstavi.c' || echo '$(srcdir)/'`gstavi.c
+@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libgstavi_la-gstavi.Tpo $(DEPDIR)/libgstavi_la-gstavi.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='gstavi.c' object='libgstavi_la-gstavi.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(libgstavi_la_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgstavi_la_CFLAGS) $(CFLAGS) -c -o libgstavi_la-gstavi.lo `test -f 'gstavi.c' || echo '$(srcdir)/'`gstavi.c
+
+libgstavi_la-gstavimux.lo: gstavimux.c
+@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(libgstavi_la_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgstavi_la_CFLAGS) $(CFLAGS) -MT libgstavi_la-gstavimux.lo -MD -MP -MF $(DEPDIR)/libgstavi_la-gstavimux.Tpo -c -o libgstavi_la-gstavimux.lo `test -f 'gstavimux.c' || echo '$(srcdir)/'`gstavimux.c
+@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libgstavi_la-gstavimux.Tpo $(DEPDIR)/libgstavi_la-gstavimux.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='gstavimux.c' object='libgstavi_la-gstavimux.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(libgstavi_la_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgstavi_la_CFLAGS) $(CFLAGS) -c -o libgstavi_la-gstavimux.lo `test -f 'gstavimux.c' || echo '$(srcdir)/'`gstavimux.c
+
+libgstavi_la-gstavidemux.lo: gstavidemux.c
+@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(libgstavi_la_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgstavi_la_CFLAGS) $(CFLAGS) -MT libgstavi_la-gstavidemux.lo -MD -MP -MF $(DEPDIR)/libgstavi_la-gstavidemux.Tpo -c -o libgstavi_la-gstavidemux.lo `test -f 'gstavidemux.c' || echo '$(srcdir)/'`gstavidemux.c
+@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libgstavi_la-gstavidemux.Tpo $(DEPDIR)/libgstavi_la-gstavidemux.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='gstavidemux.c' object='libgstavi_la-gstavidemux.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(libgstavi_la_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgstavi_la_CFLAGS) $(CFLAGS) -c -o libgstavi_la-gstavidemux.lo `test -f 'gstavidemux.c' || echo '$(srcdir)/'`gstavidemux.c
+
+libgstavi_la-gstavisubtitle.lo: gstavisubtitle.c
+@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(libgstavi_la_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgstavi_la_CFLAGS) $(CFLAGS) -MT libgstavi_la-gstavisubtitle.lo -MD -MP -MF $(DEPDIR)/libgstavi_la-gstavisubtitle.Tpo -c -o libgstavi_la-gstavisubtitle.lo `test -f 'gstavisubtitle.c' || echo '$(srcdir)/'`gstavisubtitle.c
+@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libgstavi_la-gstavisubtitle.Tpo $(DEPDIR)/libgstavi_la-gstavisubtitle.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='gstavisubtitle.c' object='libgstavi_la-gstavisubtitle.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(libgstavi_la_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgstavi_la_CFLAGS) $(CFLAGS) -c -o libgstavi_la-gstavisubtitle.lo `test -f 'gstavisubtitle.c' || echo '$(srcdir)/'`gstavisubtitle.c
+
+mostlyclean-libtool:
+ -rm -f *.lo
+
+clean-libtool:
+ -rm -rf .libs _libs
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \
+ END { if (nonempty) { for (i in files) print i; }; }'`; \
+ mkid -fID $$unique
+tags: TAGS
+
+TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
+ $(TAGS_FILES) $(LISP)
+ set x; \
+ here=`pwd`; \
+ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \
+ END { if (nonempty) { for (i in files) print i; }; }'`; \
+ shift; \
+ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
+ test -n "$$unique" || unique=$$empty_fix; \
+ if test $$# -gt 0; then \
+ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+ "$$@" $$unique; \
+ else \
+ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+ $$unique; \
+ fi; \
+ fi
+ctags: CTAGS
+CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
+ $(TAGS_FILES) $(LISP)
+ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \
+ END { if (nonempty) { for (i in files) print i; }; }'`; \
+ test -z "$(CTAGS_ARGS)$$unique" \
+ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+ $$unique
+
+GTAGS:
+ here=`$(am__cd) $(top_builddir) && pwd` \
+ && $(am__cd) $(top_srcdir) \
+ && gtags -i $(GTAGS_ARGS) "$$here"
+
+distclean-tags:
+ -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+ @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+ list='$(DISTFILES)'; \
+ dist_files=`for file in $$list; do echo $$file; done | \
+ sed -e "s|^$$srcdirstrip/||;t" \
+ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+ case $$dist_files in \
+ */*) $(MKDIR_P) `echo "$$dist_files" | \
+ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+ sort -u` ;; \
+ esac; \
+ for file in $$dist_files; do \
+ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+ if test -d $$d/$$file; then \
+ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+ if test -d "$(distdir)/$$file"; then \
+ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+ fi; \
+ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
+ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+ fi; \
+ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
+ else \
+ test -f "$(distdir)/$$file" \
+ || cp -p $$d/$$file "$(distdir)/$$file" \
+ || exit 1; \
+ fi; \
+ done
+check-am: all-am
+check: check-am
+all-am: Makefile $(LTLIBRARIES) $(HEADERS)
+installdirs:
+ for dir in "$(DESTDIR)$(plugindir)"; do \
+ test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+ done
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+ @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+ if test -z '$(STRIP)'; then \
+ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+ install; \
+ else \
+ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
+ fi
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+ -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+ -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
+
+maintainer-clean-generic:
+ @echo "This command is intended for maintainers to use"
+ @echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-generic clean-libtool clean-pluginLTLIBRARIES \
+ mostlyclean-am
+
+distclean: distclean-am
+ -rm -rf ./$(DEPDIR)
+ -rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+ distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+html-am:
+
+info: info-am
+
+info-am:
+
+install-data-am: install-pluginLTLIBRARIES
+
+install-dvi: install-dvi-am
+
+install-dvi-am:
+
+install-exec-am:
+
+install-html: install-html-am
+
+install-html-am:
+
+install-info: install-info-am
+
+install-info-am:
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-pdf-am:
+
+install-ps: install-ps-am
+
+install-ps-am:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+ -rm -rf ./$(DEPDIR)
+ -rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+ mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-pluginLTLIBRARIES
+
+.MAKE: install-am install-strip
+
+.PHONY: CTAGS GTAGS all all-am check check-am clean clean-generic \
+ clean-libtool clean-pluginLTLIBRARIES ctags distclean \
+ distclean-compile distclean-generic distclean-libtool \
+ distclean-tags distdir dvi dvi-am html html-am info info-am \
+ install install-am install-data install-data-am install-dvi \
+ install-dvi-am install-exec install-exec-am install-html \
+ install-html-am install-info install-info-am install-man \
+ install-pdf install-pdf-am install-pluginLTLIBRARIES \
+ install-ps install-ps-am install-strip installcheck \
+ installcheck-am installdirs maintainer-clean \
+ maintainer-clean-generic mostlyclean mostlyclean-compile \
+ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
+ tags uninstall uninstall-am uninstall-pluginLTLIBRARIES
+
+
+Android.mk: Makefile.am $(BUILT_SOURCES)
+ androgenizer \
+ -:PROJECT libgstavi -:SHARED libgstavi \
+ -:TAGS eng debug \
+ -:REL_TOP $(top_srcdir) -:ABS_TOP $(abs_top_srcdir) \
+ -:SOURCES $(libgstavi_la_SOURCES) \
+ -:CFLAGS $(DEFS) $(DEFAULT_INCLUDES) $(libgstavi_la_CFLAGS) \
+ -:LDFLAGS $(libgstavi_la_LDFLAGS) \
+ $(libgstavi_la_LIBADD) \
+ -ldl \
+ -:PASSTHROUGH LOCAL_ARM_MODE:=arm \
+ LOCAL_MODULE_PATH:='$$(TARGET_OUT)/lib/gstreamer-0.10' \
+ > $@
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/gst/avi/README b/gst/avi/README
new file mode 100644
index 0000000..69a08a9
--- /dev/null
+++ b/gst/avi/README
@@ -0,0 +1,72 @@
+The avi decoder plugins
+-----------------------
+
+The avi decoder consists of a set of gstreamer plugins:
+
+ - demuxer (avidemux)
+ - avi to gstreamer type converter (avitypes)
+ - windows dlls wrappers.
+
+the avidecoder element uses the above plugins to perform the avi
+decoding. It is constructed as a custom bin which initially only has
+the demuxer element in it. The demuxer has a set of padtemplates for
+raw audio and video.
+
+ (------------------------------------)
+ ! avidecoder !
+ ! (video/raw)...
+ ! (----------) !
+ ! ! demuxer (video/x-msvideo, auds)..
+ ! ! ! !
+ ! -src ! !
+ ! / ! (video/x-msvideo, vids)..
+ - src ! ! !
+ ! (----------) (audio/raw)...
+ ! !
+ (------------------------------------)
+
+the demuxer has a set of padtemplates for the raw avi header properties.
+
+The avi decoder will act on the new_pad signal of the demuxer element
+and will attach an avitype plugin to the new pad. Caps negotiation will
+convert the raw avi caps to the gstreamer caps. If the src pad of the
+avitypes plugin are compatible with the avidecoder padtemplate, the
+avitype pad is ghosted to the avidecoder bin, this is the case where no
+codec is needed (for raw PCM samples, for example).
+
+When the avitypes caps are not compatible with one of the avidecoder
+templates, a static autoplugger is used the find an element to connect
+the demuxers pad to the decoders padtemplate.
+
+When no element could be found, an windec plugin is attached to the
+demuxers pad and the avitypes plugin is removed from the decoder.
+
+
+example:
+--------
+
+ An avidecoder that has a video pad (decoded with windows dlls) and an
+ audio pad (raw PCM).
+
+ (----------------------------------------------------------------)
+ ! avidecoder (--------) (------) !
+ ! !avitypes! !windec! /-- (video/raw)
+ ! (----------) /-sink src--sink src ----- !
+ ! !demuxer (video/x-msvideo, ! ! ! !
+ ! ! ! auds).. (--------) (------) !
+ ! -sink ! (--------) !
+ ! / ! (video/x-..,!avitypes! !
+ -sink ! ! vids).. ! ! !
+ ! (----------) \-sink src -------------------- (audio/raw)
+ ! (--------) !
+ (----------------------------------------------------------------)
+
+
+
+TODO
+----
+
+automatically generate the padtemplates from all possible avi types
+found in the registry.
+
+
diff --git a/gst/avi/avi-ids.h b/gst/avi/avi-ids.h
new file mode 100644
index 0000000..7b724df
--- /dev/null
+++ b/gst/avi/avi-ids.h
@@ -0,0 +1,78 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_AVI_H__
+#define __GST_AVI_H__
+
+#include <gst/gst.h>
+
+typedef struct _gst_riff_avih {
+ guint32 us_frame; /* microsec per frame */
+ guint32 max_bps; /* byte/s overall */
+ guint32 pad_gran; /* pad_granularity */
+ guint32 flags;
+/* flags values */
+#define GST_RIFF_AVIH_HASINDEX 0x00000010 /* has idx1 chunk */
+#define GST_RIFF_AVIH_MUSTUSEINDEX 0x00000020 /* must use idx1 chunk to determine order */
+#define GST_RIFF_AVIH_ISINTERLEAVED 0x00000100 /* AVI file is interleaved */
+#define GST_RIFF_AVIH_TRUSTCKTYPE 0x00000800 /* Use CKType to find key frames */
+#define GST_RIFF_AVIH_WASCAPTUREFILE 0x00010000 /* specially allocated used for capturing real time video */
+#define GST_RIFF_AVIH_COPYRIGHTED 0x00020000 /* contains copyrighted data */
+ guint32 tot_frames; /* # of frames (all) */
+ guint32 init_frames; /* initial frames (???) */
+ guint32 streams;
+ guint32 bufsize; /* suggested buffer size */
+ guint32 width;
+ guint32 height;
+ guint32 scale;
+ guint32 rate;
+ guint32 start;
+ guint32 length;
+} gst_riff_avih;
+
+/* vprp (video properties) ODML header */
+/* see ODML spec for some/more explanation */
+#define GST_RIFF_TAG_vprp GST_MAKE_FOURCC ('v','p','r','p')
+#define GST_RIFF_VPRP_VIDEO_FIELDS (2)
+
+typedef struct _gst_riff_vprp_video_field_desc {
+ guint32 compressed_bm_height;
+ guint32 compressed_bm_width;
+ guint32 valid_bm_height;
+ guint32 valid_bm_width;
+ guint32 valid_bm_x_offset;
+ guint32 valid_bm_y_offset;
+ guint32 video_x_t_offset;
+ guint32 video_y_start;
+} gst_riff_vprp_video_field_desc;
+
+typedef struct _gst_riff_vprp {
+ guint32 format_token; /* whether fields defined by standard */
+ guint32 standard; /* video display standard, UNKNOWN, PAL, etc */
+ guint32 vert_rate; /* vertical refresh rate */
+ guint32 hor_t_total; /* width */
+ guint32 vert_lines; /* height */
+ guint32 aspect; /* aspect ratio high word:low word */
+ guint32 width; /* active width */
+ guint32 height; /* active height */
+ guint32 fields; /* field count */
+ gst_riff_vprp_video_field_desc field_info[GST_RIFF_VPRP_VIDEO_FIELDS];
+} gst_riff_vprp;
+
+#endif /* __GST_AVI_H__ */
diff --git a/gst/avi/gstavi.c b/gst/avi/gstavi.c
new file mode 100644
index 0000000..38ef650
--- /dev/null
+++ b/gst/avi/gstavi.c
@@ -0,0 +1,59 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@temple-baptist.com>
+ *
+ * gstavi.c: plugin registering
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gst/gst-i18n-plugin.h"
+
+#include "gstavidemux.h"
+#include "gstavimux.h"
+#include "gstavisubtitle.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gst_riff_init ();
+
+#ifdef ENABLE_NLS
+ setlocale (LC_ALL, "");
+ bindtextdomain (GETTEXT_PACKAGE, LOCALEDIR);
+ bind_textdomain_codeset (GETTEXT_PACKAGE, "UTF-8");
+#endif /* ENABLE_NLS */
+
+ if (!gst_element_register (plugin, "avidemux", GST_RANK_PRIMARY,
+ GST_TYPE_AVI_DEMUX) ||
+ !gst_element_register (plugin, "avimux", GST_RANK_PRIMARY,
+ GST_TYPE_AVI_MUX) ||
+ !gst_element_register (plugin, "avisubtitle", GST_RANK_PRIMARY,
+ GST_TYPE_AVI_SUBTITLE)) {
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "avi",
+ "AVI stream handling",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/avi/gstavidemux.c b/gst/avi/gstavidemux.c
new file mode 100644
index 0000000..6671b0d
--- /dev/null
+++ b/gst/avi/gstavidemux.c
@@ -0,0 +1,6273 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@temple-baptist.com>
+ * Copyright (C) <2006> Nokia Corporation (contact <stefan.kost@nokia.com>)
+ * Copyright (C) <2009-2010> STEricsson <benjamin.gaignard@stericsson.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+/* Element-Checklist-Version: 5 */
+
+/**
+ * SECTION:element-avidemux
+ *
+ * Demuxes an .avi file into raw or compressed audio and/or video streams.
+ *
+ * This element supports both push and pull-based scheduling, depending on the
+ * capabilities of the upstream elements.
+ *
+ * <refsect2>
+ * <title>Example launch line</title>
+ * |[
+ * gst-launch filesrc location=test.avi ! avidemux name=demux demux.audio_00 ! decodebin ! audioconvert ! audioresample ! autoaudiosink demux.video_00 ! queue ! decodebin ! ffmpegcolorspace ! videoscale ! autovideosink
+ * ]| Play (parse and decode) an .avi file and try to output it to
+ * an automatically detected soundcard and videosink. If the AVI file contains
+ * compressed audio or video data, this will only work if you have the
+ * right decoder elements/plugins installed.
+ * </refsect2>
+ *
+ * Last reviewed on 2006-12-29 (0.10.6)
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#include <string.h>
+#include <stdio.h>
+
+#ifdef DIVX_DRM /* need to check to use same define */
+#include <stdlib.h>
+#include <dlfcn.h>
+#endif
+
+#include "gst/riff/riff-media.h"
+#include "gstavidemux.h"
+#include "avi-ids.h"
+#include <gst/gst-i18n-plugin.h>
+#include <gst/base/gstadapter.h>
+
+
+#define DIV_ROUND_UP(s,v) (((s) + ((v)-1)) / (v))
+
+#define GST_AVI_KEYFRAME 1
+#ifdef AVIDEMUX_MODIFICATION
+#define GST_AVI_NON_KEYFRAME 0
+#endif
+#define ENTRY_IS_KEYFRAME(e) ((e)->flags == GST_AVI_KEYFRAME)
+#define ENTRY_SET_KEYFRAME(e) ((e)->flags = GST_AVI_KEYFRAME)
+#define ENTRY_UNSET_KEYFRAME(e) ((e)->flags = 0)
+
+
+GST_DEBUG_CATEGORY_STATIC (avidemux_debug);
+#define GST_CAT_DEFAULT avidemux_debug
+
+static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-msvideo")
+ );
+
+#ifdef AVIDEMUX_MODIFICATION
+/*Modification: Frame type enumeraions used to generat the index-table */
+typedef enum
+{
+ H264_NUT_UNKNOWN = 0,
+ H264_NUT_SLICE = 1,
+ H264_NUT_DPA = 2,
+ H264_NUT_DPB = 3,
+ H264_NUT_DPC = 4,
+ H264_NUT_IDR = 5,
+ H264_NUT_SEI = 6,
+ H264_NUT_SPS = 7,
+ H264_NUT_PPS = 8,
+ H264_NUT_AUD = 9,
+ H264_NUT_EOSEQ = 10,
+ H264_NUT_EOSTREAM = 11,
+ H264_NUT_FILL = 12,
+ H264_NUT_MIXED = 24,
+} eH264NalType;
+#endif
+
+static void gst_avi_demux_base_init (GstAviDemuxClass * klass);
+static void gst_avi_demux_class_init (GstAviDemuxClass * klass);
+static void gst_avi_demux_init (GstAviDemux * avi);
+static void gst_avi_demux_finalize (GObject * object);
+
+static void gst_avi_demux_reset (GstAviDemux * avi);
+
+#if 0
+static const GstEventMask *gst_avi_demux_get_event_mask (GstPad * pad);
+#endif
+static gboolean gst_avi_demux_handle_src_event (GstPad * pad, GstEvent * event);
+static gboolean gst_avi_demux_handle_sink_event (GstPad * pad,
+ GstEvent * event);
+static gboolean gst_avi_demux_push_event (GstAviDemux * avi, GstEvent * event);
+
+#if 0
+static const GstFormat *gst_avi_demux_get_src_formats (GstPad * pad);
+#endif
+static const GstQueryType *gst_avi_demux_get_src_query_types (GstPad * pad);
+static gboolean gst_avi_demux_handle_src_query (GstPad * pad, GstQuery * query);
+static gboolean gst_avi_demux_src_convert (GstPad * pad, GstFormat src_format,
+ gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
+
+static gboolean gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment);
+static gboolean gst_avi_demux_handle_seek (GstAviDemux * avi, GstPad * pad,
+ GstEvent * event);
+static gboolean gst_avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad,
+ GstEvent * event);
+static void gst_avi_demux_loop (GstPad * pad);
+static gboolean gst_avi_demux_sink_activate (GstPad * sinkpad);
+static gboolean gst_avi_demux_sink_activate_pull (GstPad * sinkpad,
+ gboolean active);
+static gboolean gst_avi_demux_activate_push (GstPad * pad, gboolean active);
+static GstFlowReturn gst_avi_demux_chain (GstPad * pad, GstBuffer * buf);
+
+static void gst_avi_demux_set_index (GstElement * element, GstIndex * index);
+static GstIndex *gst_avi_demux_get_index (GstElement * element);
+static GstStateChangeReturn gst_avi_demux_change_state (GstElement * element,
+ GstStateChange transition);
+static void gst_avi_demux_calculate_durations_from_index (GstAviDemux * avi);
+static void gst_avi_demux_get_buffer_info (GstAviDemux * avi,
+ GstAviStream * stream, guint entry_n, GstClockTime * timestamp,
+ GstClockTime * ts_end, guint64 * offset, guint64 * offset_end);
+
+static void gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf);
+#ifdef AVIDEMUX_MODIFICATION
+/*Modification: Added function to find out the frame_type for index-table generation */
+static int
+gst_avi_demux_find_frame_type (GstAviStream *stream, GstBuffer *buf, int *frame_type);
+static void gst_avidemux_forward_trickplay (GstAviDemux * avi, GstAviStream * stream, guint64 *timestamp);
+static void gst_avidemux_backward_trickplay (GstAviDemux * avi, GstAviStream * stream, guint64 *timestamp);
+static GstFlowReturn gst_avidemux_seek_to_previous_keyframe (GstAviDemux *avi);
+#endif
+
+static GstElementClass *parent_class = NULL;
+
+#ifdef DIVX_DRM
+
+typedef enum drmErrorCodes
+{
+ DRM_SUCCESS = 0,
+ DRM_NOT_AUTHORIZED,
+ DRM_NOT_REGISTERED,
+ DRM_RENTAL_EXPIRED,
+ DRM_GENERAL_ERROR,
+ DRM_NEVER_REGISTERED,
+} drmErrorCodes_t;
+
+
+#define DIVX_SDK_PLUGIN_NAME "libmm_divxsdk.so"
+
+static gboolean init_divx_plugin (GstAviDemux * avi)
+{
+ char *error;
+ avi->divx_handle = dlopen (DIVX_SDK_PLUGIN_NAME, RTLD_LAZY);
+ if (!avi->divx_handle) {
+ GST_ERROR ("dlopen failed [%s]", dlerror());
+ return FALSE;
+ }
+ GST_DEBUG("dlopen success");
+
+ avi->divx_init = dlsym (avi->divx_handle, "divx_init");
+ if ((error = dlerror()) != NULL) {
+ GST_ERROR ("[%s][%d]", __func__, __LINE__);
+ goto DL_ERROR;
+ }
+
+ avi->divx_commit = dlsym (avi->divx_handle, "divx_commit");
+ if ((error = dlerror()) != NULL) {
+ GST_ERROR ("[%s][%d] %p", __func__, __LINE__, avi->divx_commit);
+ goto DL_ERROR;
+ }
+
+ avi->divx_decrypt_audio = dlsym (avi->divx_handle, "divx_decrypt_audio");
+ GST_ERROR ("[%s][%d] %p", __func__, __LINE__, avi->divx_decrypt_audio);
+ if ((error = dlerror()) != NULL) {
+ goto DL_ERROR;
+ }
+
+ avi->divx_prepare_video_bitstream = dlsym (avi->divx_handle, "divx_prepare_video_bitstream");
+ if ((error = dlerror()) != NULL) {
+ GST_ERROR ("[%s][%d]", __func__, __LINE__);
+ goto DL_ERROR;
+ }
+
+ avi->divx_finalize = dlsym (avi->divx_handle, "divx_finalize");
+ if ((error = dlerror()) != NULL) {
+ GST_ERROR ("[%s][%d]", __func__, __LINE__);
+ goto DL_ERROR;
+ }
+
+ return TRUE;
+
+DL_ERROR:
+ GST_ERROR ("error : %s", error);
+ dlclose(avi->divx_handle);
+ avi->divx_handle = NULL;
+ return FALSE;
+}
+
+
+/* ---------------------------------------------------- DIVX DRM Code : Start -----------------------------------------------------------------*/
+static gboolean
+gst_avi_demux_init_divx_drm (GstAviDemux * avi, uint8_t* drm_info)
+{
+ int error_code = 0;
+
+ if (init_divx_plugin (avi) == FALSE) {
+ GST_ERROR_OBJECT (avi, "Loading plugin failed....");
+ return FALSE;
+ }
+
+ avi->drmContext = avi->divx_init (drm_info, &error_code);
+ if (avi->drmContext) {
+ GST_DEBUG_OBJECT (avi,"%s init success: drmContext = %p\n", __func__, avi->drmContext);
+ } else {
+ GST_ERROR_OBJECT (avi,"%s failed to init... error code = %d \n", __func__, error_code);
+ return FALSE;
+ }
+
+ error_code = avi->divx_commit (avi->drmContext);
+ if (error_code == DRM_SUCCESS) {
+ GST_DEBUG_OBJECT (avi,"%s commit success: drmContext = %p\n", __func__, avi->drmContext);
+ } else {
+ GST_ERROR_OBJECT (avi,"%s failed to commit... error code = %d \n", __func__, error_code);
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_avi_demux_send_divx_tag (GstAviDemux * avi)
+{
+ gboolean ret = 0;
+ GstTagList *tags = NULL;
+ GST_ERROR_OBJECT (avi, "*********** posting divx drm tags!!!!!!!!!!!!!!!!!!");
+ tags = gst_tag_list_new_full ("drm_divx", "1", NULL);
+ if (tags) {
+ ret = gst_avi_demux_push_event (avi, gst_event_new_tag (tags) );
+ GST_ERROR_OBJECT (avi, "*********** posting tags returns [%d] !!!!!!!!!!!!!!!!!!", ret);
+ }
+ return ret;
+}
+/* ---------------------------------------------------- DIVX DRM Code : End -----------------------------------------------------------------*/
+#endif // DIVX_DRM
+/* GObject methods */
+
+GType
+gst_avi_demux_get_type (void)
+{
+ static GType avi_demux_type = 0;
+
+ if (!avi_demux_type) {
+ static const GTypeInfo avi_demux_info = {
+ sizeof (GstAviDemuxClass),
+ (GBaseInitFunc) gst_avi_demux_base_init,
+ NULL,
+ (GClassInitFunc) gst_avi_demux_class_init,
+ NULL,
+ NULL,
+ sizeof (GstAviDemux),
+ 0,
+ (GInstanceInitFunc) gst_avi_demux_init,
+ };
+
+ avi_demux_type =
+ g_type_register_static (GST_TYPE_ELEMENT,
+ "GstAviDemux", &avi_demux_info, 0);
+ }
+
+ return avi_demux_type;
+}
+
+static void
+gst_avi_demux_base_init (GstAviDemuxClass * klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstPadTemplate *videosrctempl, *audiosrctempl, *subsrctempl;
+ GstCaps *audcaps, *vidcaps, *subcaps;
+
+ audcaps = gst_riff_create_audio_template_caps ();
+ gst_caps_append (audcaps, gst_caps_new_simple ("audio/x-avi-unknown", NULL));
+ audiosrctempl = gst_pad_template_new ("audio_%02d",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, audcaps);
+
+ vidcaps = gst_riff_create_video_template_caps ();
+ gst_caps_append (vidcaps, gst_riff_create_iavs_template_caps ());
+ gst_caps_append (vidcaps, gst_caps_new_simple ("video/x-avi-unknown", NULL));
+ videosrctempl = gst_pad_template_new ("video_%02d",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, vidcaps);
+
+ subcaps = gst_caps_new_simple ("application/x-subtitle-avi", NULL);
+ subsrctempl = gst_pad_template_new ("subtitle_%02d",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, subcaps);
+ gst_element_class_add_pad_template (element_class, audiosrctempl);
+ gst_element_class_add_pad_template (element_class, videosrctempl);
+ gst_element_class_add_pad_template (element_class, subsrctempl);
+ gst_element_class_add_static_pad_template (element_class, &sink_templ);
+ gst_object_unref (audiosrctempl);
+ gst_object_unref (videosrctempl);
+ gst_object_unref (subsrctempl);
+ gst_element_class_set_details_simple (element_class, "Avi demuxer",
+ "Codec/Demuxer",
+ "Demultiplex an avi file into audio and video",
+ "Erik Walthinsen <omega@cse.ogi.edu>, "
+ "Wim Taymans <wim.taymans@chello.be>, "
+ "Thijs Vermeir <thijsvermeir@gmail.com>");
+}
+
+static void
+gst_avi_demux_class_init (GstAviDemuxClass * klass)
+{
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (avidemux_debug, "avidemux",
+ 0, "Demuxer for AVI streams");
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = gst_avi_demux_finalize;
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_avi_demux_change_state);
+
+ gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_avi_demux_set_index);
+ gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_avi_demux_get_index);
+
+#ifdef DIVX_DRM
+ gst_tag_register ("drm_divx", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("DRM DivX"),
+ _("a tag that is specific to DivX DRM File"),
+ NULL);
+#endif
+}
+
+static void
+gst_avi_demux_init (GstAviDemux * avi)
+{
+ avi->sinkpad = gst_pad_new_from_static_template (&sink_templ, "sink");
+ gst_pad_set_activate_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_sink_activate));
+ gst_pad_set_activatepull_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_sink_activate_pull));
+ gst_pad_set_activatepush_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_activate_push));
+ gst_pad_set_chain_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_chain));
+ gst_pad_set_event_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_handle_sink_event));
+ gst_element_add_pad (GST_ELEMENT_CAST (avi), avi->sinkpad);
+
+ avi->adapter = gst_adapter_new ();
+
+ gst_avi_demux_reset (avi);
+}
+
+static void
+gst_avi_demux_finalize (GObject * object)
+{
+ GstAviDemux *avi = GST_AVI_DEMUX (object);
+
+ GST_DEBUG ("AVI: finalize");
+
+ g_object_unref (avi->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_avi_demux_reset_stream (GstAviDemux * avi, GstAviStream * stream)
+{
+ g_free (stream->strh);
+ g_free (stream->strf.data);
+ g_free (stream->name);
+ g_free (stream->index);
+ g_free (stream->indexes);
+#ifdef AVIDEMUX_MODIFICATION
+ if (stream->trickplay_info)
+ g_free (stream->trickplay_info);
+#endif
+
+ if (stream->initdata)
+ gst_buffer_unref (stream->initdata);
+ if (stream->extradata)
+ gst_buffer_unref (stream->extradata);
+ if (stream->pad) {
+ if (stream->exposed) {
+ gst_pad_set_active (stream->pad, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (avi), stream->pad);
+ } else
+ gst_object_unref (stream->pad);
+ }
+ if (stream->taglist) {
+ gst_tag_list_free (stream->taglist);
+ stream->taglist = NULL;
+ }
+ memset (stream, 0, sizeof (GstAviStream));
+}
+
+static void
+gst_avi_demux_reset (GstAviDemux * avi)
+{
+ gint i;
+
+ GST_DEBUG ("AVI: reset");
+
+ for (i = 0; i < avi->num_streams; i++)
+ gst_avi_demux_reset_stream (avi, &avi->stream[i]);
+
+ avi->header_state = GST_AVI_DEMUX_HEADER_TAG_LIST;
+ avi->num_streams = 0;
+ avi->num_v_streams = 0;
+ avi->num_a_streams = 0;
+ avi->num_t_streams = 0;
+ avi->main_stream = -1;
+
+ avi->state = GST_AVI_DEMUX_START;
+ avi->offset = 0;
+ avi->building_index = FALSE;
+
+ avi->index_offset = 0;
+ g_free (avi->avih);
+ avi->avih = NULL;
+
+ if (avi->element_index)
+ gst_object_unref (avi->element_index);
+ avi->element_index = NULL;
+
+ if (avi->close_seg_event) {
+ gst_event_unref (avi->close_seg_event);
+ avi->close_seg_event = NULL;
+ }
+ if (avi->seg_event) {
+ gst_event_unref (avi->seg_event);
+ avi->seg_event = NULL;
+ }
+ if (avi->seek_event) {
+ gst_event_unref (avi->seek_event);
+ avi->seek_event = NULL;
+ }
+
+ if (avi->globaltags)
+ gst_tag_list_free (avi->globaltags);
+ avi->globaltags = NULL;
+
+ avi->got_tags = TRUE; /* we always want to push global tags */
+ avi->have_eos = FALSE;
+ avi->seekable = TRUE;
+
+ gst_adapter_clear (avi->adapter);
+
+ gst_segment_init (&avi->segment, GST_FORMAT_TIME);
+
+#ifdef DIVX_DRM
+ if(avi->drmContext)
+ {
+ avi->divx_finalize (avi->drmContext);
+ free(avi->drmContext);
+ avi->drmContext = NULL;
+ }
+
+ if (avi->divx_handle)
+ {
+ dlclose(avi->divx_handle);
+ avi->divx_handle = NULL;
+ }
+#endif
+}
+
+
+/* GstElement methods */
+
+#if 0
+static const GstFormat *
+gst_avi_demux_get_src_formats (GstPad * pad)
+{
+ GstAviStream *stream = gst_pad_get_element_private (pad);
+
+ static const GstFormat src_a_formats[] = {
+ GST_FORMAT_TIME,
+ GST_FORMAT_BYTES,
+ GST_FORMAT_DEFAULT,
+ 0
+ };
+ static const GstFormat src_v_formats[] = {
+ GST_FORMAT_TIME,
+ GST_FORMAT_DEFAULT,
+ 0
+ };
+
+ return (stream->strh->type == GST_RIFF_FCC_auds ?
+ src_a_formats : src_v_formats);
+}
+#endif
+
+/* assumes stream->strf.auds->av_bps != 0 */
+static inline GstClockTime
+avi_stream_convert_bytes_to_time_unchecked (GstAviStream * stream,
+ guint64 bytes)
+{
+ return gst_util_uint64_scale_int (bytes, GST_SECOND,
+ stream->strf.auds->av_bps);
+}
+
+static inline guint64
+avi_stream_convert_time_to_bytes_unchecked (GstAviStream * stream,
+ GstClockTime time)
+{
+ return gst_util_uint64_scale_int (time, stream->strf.auds->av_bps,
+ GST_SECOND);
+}
+
+/* assumes stream->strh->rate != 0 */
+static inline GstClockTime
+avi_stream_convert_frames_to_time_unchecked (GstAviStream * stream,
+ guint64 frames)
+{
+ return gst_util_uint64_scale (frames, stream->strh->scale * GST_SECOND,
+ stream->strh->rate);
+}
+
+static inline guint64
+avi_stream_convert_time_to_frames_unchecked (GstAviStream * stream,
+ GstClockTime time)
+{
+ return gst_util_uint64_scale (time, stream->strh->rate,
+ stream->strh->scale * GST_SECOND);
+}
+
+static gboolean
+gst_avi_demux_src_convert (GstPad * pad,
+ GstFormat src_format,
+ gint64 src_value, GstFormat * dest_format, gint64 * dest_value)
+{
+ GstAviStream *stream = gst_pad_get_element_private (pad);
+ gboolean res = TRUE;
+
+ GST_LOG_OBJECT (pad,
+ "Received src_format:%s, src_value:%" G_GUINT64_FORMAT
+ ", dest_format:%s", gst_format_get_name (src_format), src_value,
+ gst_format_get_name (*dest_format));
+
+ if (G_UNLIKELY (src_format == *dest_format)) {
+ *dest_value = src_value;
+ goto done;
+ }
+ if (G_UNLIKELY (!stream->strh || !stream->strf.data)) {
+ res = FALSE;
+ goto done;
+ }
+ if (G_UNLIKELY (stream->strh->type == GST_RIFF_FCC_vids &&
+ (src_format == GST_FORMAT_BYTES
+ || *dest_format == GST_FORMAT_BYTES))) {
+ res = FALSE;
+ goto done;
+ }
+
+ switch (src_format) {
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ *dest_value = gst_util_uint64_scale_int (src_value,
+ stream->strf.auds->av_bps, GST_SECOND);
+ break;
+ case GST_FORMAT_DEFAULT:
+ *dest_value =
+ gst_util_uint64_scale_round (src_value, stream->strh->rate,
+ stream->strh->scale * GST_SECOND);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_BYTES:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ if (stream->strf.auds->av_bps != 0) {
+ *dest_value = avi_stream_convert_bytes_to_time_unchecked (stream,
+ src_value);
+ } else
+ res = FALSE;
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ *dest_value =
+ avi_stream_convert_frames_to_time_unchecked (stream, src_value);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ }
+
+done:
+ GST_LOG_OBJECT (pad,
+ "Returning res:%d dest_format:%s dest_value:%" G_GUINT64_FORMAT, res,
+ gst_format_get_name (*dest_format), *dest_value);
+ return res;
+}
+
+static const GstQueryType *
+gst_avi_demux_get_src_query_types (GstPad * pad)
+{
+ static const GstQueryType src_types[] = {
+ GST_QUERY_POSITION,
+ GST_QUERY_DURATION,
+ GST_QUERY_SEEKING,
+ GST_QUERY_CONVERT,
+ 0
+ };
+
+ return src_types;
+}
+
+static gboolean
+gst_avi_demux_handle_src_query (GstPad * pad, GstQuery * query)
+{
+ gboolean res = TRUE;
+ GstAviDemux *avi = GST_AVI_DEMUX (gst_pad_get_parent (pad));
+
+ GstAviStream *stream = gst_pad_get_element_private (pad);
+
+ if (!stream->strh || !stream->strf.data)
+ return gst_pad_query_default (pad, query);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:{
+ gint64 pos = 0;
+
+ GST_DEBUG ("pos query for stream %u: frames %u, bytes %u",
+ stream->num, stream->current_entry, stream->current_total);
+
+ /* FIXME, this looks clumsy */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ if (stream->is_vbr) {
+ /* VBR */
+ pos = gst_util_uint64_scale ((gint64) stream->current_entry *
+ stream->strh->scale, GST_SECOND, (guint64) stream->strh->rate);
+ GST_DEBUG_OBJECT (avi, "VBR convert frame %u, time %"
+ GST_TIME_FORMAT, stream->current_entry, GST_TIME_ARGS (pos));
+ } else if (stream->strf.auds->av_bps != 0) {
+ /* CBR */
+ pos = gst_util_uint64_scale (stream->current_total, GST_SECOND,
+ (guint64) stream->strf.auds->av_bps);
+ GST_DEBUG_OBJECT (avi,
+ "CBR convert bytes %u, time %" GST_TIME_FORMAT,
+ stream->current_total, GST_TIME_ARGS (pos));
+ } else if (stream->idx_n != 0 && stream->total_bytes != 0) {
+ /* calculate timestamps based on percentage of length */
+ guint64 xlen = avi->avih->us_frame *
+ avi->avih->tot_frames * GST_USECOND;
+
+ if (stream->is_vbr) {
+ pos = gst_util_uint64_scale (xlen, stream->current_entry,
+ stream->idx_n);
+ GST_DEBUG_OBJECT (avi, "VBR perc convert frame %u, time %"
+ GST_TIME_FORMAT, stream->current_entry, GST_TIME_ARGS (pos));
+ } else {
+ pos = gst_util_uint64_scale (xlen, stream->current_total,
+ stream->total_bytes);
+ GST_DEBUG_OBJECT (avi,
+ "CBR perc convert bytes %u, time %" GST_TIME_FORMAT,
+ stream->current_total, GST_TIME_ARGS (pos));
+ }
+ } else {
+ /* we don't know */
+ res = FALSE;
+ }
+ } else {
+ if (stream->strh->rate != 0) {
+ pos = gst_util_uint64_scale ((guint64) stream->current_entry *
+ stream->strh->scale, GST_SECOND, (guint64) stream->strh->rate);
+ } else {
+ pos = stream->current_entry * avi->avih->us_frame * GST_USECOND;
+ }
+ }
+ if (res) {
+ GST_DEBUG ("pos query : %" GST_TIME_FORMAT, GST_TIME_ARGS (pos));
+ gst_query_set_position (query, GST_FORMAT_TIME, pos);
+ } else
+ GST_WARNING ("pos query failed");
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ GstFormat fmt;
+ GstClockTime duration;
+
+ /* only act on audio or video streams */
+ if (stream->strh->type != GST_RIFF_FCC_auds &&
+ stream->strh->type != GST_RIFF_FCC_vids) {
+ res = FALSE;
+ break;
+ }
+
+ /* take stream duration, fall back to avih duration */
+ if ((duration = stream->duration) == -1)
+ duration = avi->duration;
+
+ gst_query_parse_duration (query, &fmt, NULL);
+
+ switch (fmt) {
+ case GST_FORMAT_TIME:
+ gst_query_set_duration (query, fmt, duration);
+ break;
+ case GST_FORMAT_DEFAULT:
+ {
+ gint64 dur;
+ GST_DEBUG_OBJECT (query, "total frames is %" G_GUINT32_FORMAT,
+ stream->idx_n);
+
+ if (stream->idx_n >= 0)
+ gst_query_set_duration (query, fmt, stream->idx_n);
+ else if (gst_pad_query_convert (pad, GST_FORMAT_TIME,
+ duration, &fmt, &dur))
+ gst_query_set_duration (query, fmt, dur);
+ break;
+ }
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ if (fmt == GST_FORMAT_TIME) {
+ gboolean seekable = TRUE;
+
+ if (avi->streaming) {
+ seekable = avi->seekable;
+ }
+
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable,
+ 0, stream->duration);
+ res = TRUE;
+ }
+ break;
+ }
+ case GST_QUERY_CONVERT:{
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ if ((res = gst_avi_demux_src_convert (pad, src_fmt, src_val, &dest_fmt,
+ &dest_val)))
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ else
+ res = gst_pad_query_default (pad, query);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, query);
+ break;
+ }
+
+ gst_object_unref (avi);
+ return res;
+}
+
+#if 0
+static const GstEventMask *
+gst_avi_demux_get_event_mask (GstPad * pad)
+{
+ static const GstEventMask masks[] = {
+ {GST_EVENT_SEEK, GST_SEEK_METHOD_SET | GST_SEEK_FLAG_KEY_UNIT},
+ {0,}
+ };
+
+ return masks;
+}
+#endif
+
+static guint64
+gst_avi_demux_seek_streams (GstAviDemux * avi, guint64 offset, gboolean before)
+{
+ GstAviStream *stream;
+ GstIndexEntry *entry;
+ gint i;
+ gint64 val, min = offset;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+
+ entry = gst_index_get_assoc_entry (avi->element_index, stream->index_id,
+ before ? GST_INDEX_LOOKUP_BEFORE : GST_INDEX_LOOKUP_AFTER,
+ GST_ASSOCIATION_FLAG_NONE, GST_FORMAT_BYTES, offset);
+
+ if (before) {
+ if (entry) {
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &val);
+ GST_DEBUG_OBJECT (avi, "stream %d, previous entry at %"
+ G_GUINT64_FORMAT, i, val);
+ if (val < min)
+ min = val;
+ }
+ continue;
+ }
+
+ if (!entry) {
+ GST_DEBUG_OBJECT (avi, "no position for stream %d, assuming at start", i);
+ stream->current_entry = 0;
+ stream->current_total = 0;
+ continue;
+ }
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &val);
+ GST_DEBUG_OBJECT (avi, "stream %d, next entry at %" G_GUINT64_FORMAT,
+ i, val);
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &val);
+ stream->current_total = val;
+ gst_index_entry_assoc_map (entry, GST_FORMAT_DEFAULT, &val);
+ stream->current_entry = val;
+ }
+
+ return min;
+}
+
+static guint
+gst_avi_demux_index_entry_offset_search (GstAviIndexEntry * entry,
+ guint64 * offset)
+{
+ if (entry->offset < *offset)
+ return -1;
+ else if (entry->offset > *offset)
+ return 1;
+ return 0;
+}
+
+static guint64
+gst_avi_demux_seek_streams_index (GstAviDemux * avi, guint64 offset,
+ gboolean before)
+{
+ GstAviStream *stream;
+ GstAviIndexEntry *entry;
+ gint i;
+ gint64 val, min = offset;
+ guint index = 0;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+
+ /* compensate for chunk header */
+ offset += 8;
+ entry =
+ gst_util_array_binary_search (stream->index, stream->idx_n,
+ sizeof (GstAviIndexEntry),
+ (GCompareDataFunc) gst_avi_demux_index_entry_offset_search,
+ before ? GST_SEARCH_MODE_BEFORE : GST_SEARCH_MODE_AFTER, &offset, NULL);
+ offset -= 8;
+
+ if (entry)
+ index = entry - stream->index;
+
+ if (before) {
+ if (entry) {
+ val = stream->index[index].offset;
+ GST_DEBUG_OBJECT (avi,
+ "stream %d, previous entry at %" G_GUINT64_FORMAT, i, val);
+ if (val < min)
+ min = val;
+ }
+ continue;
+ }
+
+ if (!entry) {
+ GST_DEBUG_OBJECT (avi, "no position for stream %d, assuming at start", i);
+ stream->current_entry = 0;
+ stream->current_total = 0;
+ continue;
+ }
+
+ val = stream->index[index].offset - 8;
+ GST_DEBUG_OBJECT (avi, "stream %d, next entry at %" G_GUINT64_FORMAT, i,
+ val);
+
+ stream->current_total = stream->index[index].total;
+ stream->current_entry = index;
+ }
+
+ return min;
+}
+
+#define GST_AVI_SEEK_PUSH_DISPLACE (4 * GST_SECOND)
+
+static gboolean
+gst_avi_demux_handle_sink_event (GstPad * pad, GstEvent * event)
+{
+ gboolean res = TRUE;
+ GstAviDemux *avi = GST_AVI_DEMUX (gst_pad_get_parent (pad));
+
+ GST_DEBUG_OBJECT (avi,
+ "have event type %s: %p on sink pad", GST_EVENT_TYPE_NAME (event), event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_NEWSEGMENT:
+ {
+ GstFormat format;
+ gdouble rate, arate;
+ gint64 start, stop, time, offset = 0;
+ gboolean update;
+ GstSegment segment;
+
+ /* some debug output */
+ gst_segment_init (&segment, GST_FORMAT_UNDEFINED);
+ gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
+ &start, &stop, &time);
+ gst_segment_set_newsegment_full (&segment, update, rate, arate, format,
+ start, stop, time);
+ GST_DEBUG_OBJECT (avi,
+ "received format %d newsegment %" GST_SEGMENT_FORMAT, format,
+ &segment);
+
+ /* chain will send initial newsegment after pads have been added */
+ if (avi->state != GST_AVI_DEMUX_MOVI) {
+ GST_DEBUG_OBJECT (avi, "still starting, eating event");
+ goto exit;
+ }
+
+ /* we only expect a BYTE segment, e.g. following a seek */
+ if (format != GST_FORMAT_BYTES) {
+ GST_DEBUG_OBJECT (avi, "unsupported segment format, ignoring");
+ goto exit;
+ }
+
+ if (avi->have_index) {
+ GstAviIndexEntry *entry;
+ guint i = 0, index = 0, k = 0;
+ GstAviStream *stream;
+
+ /* compensate chunk header, stored index offset points after header */
+ start += 8;
+ /* find which stream we're on */
+ do {
+ stream = &avi->stream[i];
+
+ /* find the index for start bytes offset */
+ entry = gst_util_array_binary_search (stream->index,
+ stream->idx_n, sizeof (GstAviIndexEntry),
+ (GCompareDataFunc) gst_avi_demux_index_entry_offset_search,
+ GST_SEARCH_MODE_AFTER, &start, NULL);
+
+ if (entry == NULL)
+ continue;
+ index = entry - stream->index;
+
+ /* we are on the stream with a chunk start offset closest to start */
+ if (!offset || stream->index[index].offset < offset) {
+ offset = stream->index[index].offset;
+ k = i;
+ }
+ /* exact match needs no further searching */
+ if (stream->index[index].offset == start)
+ break;
+ } while (++i < avi->num_streams);
+ start -= 8;
+ offset -= 8;
+ stream = &avi->stream[k];
+
+ /* so we have no idea what is to come, or where we are */
+ if (!offset) {
+ GST_WARNING_OBJECT (avi, "insufficient index data, forcing EOS");
+ goto eos;
+ }
+
+ /* get the ts corresponding to start offset bytes for the stream */
+ gst_avi_demux_get_buffer_info (avi, stream, index,
+ (GstClockTime *) & time, NULL, NULL, NULL);
+ } else if (avi->element_index) {
+ GstIndexEntry *entry;
+
+ /* Let's check if we have an index entry for this position */
+ entry = gst_index_get_assoc_entry (avi->element_index, avi->index_id,
+ GST_INDEX_LOOKUP_AFTER, GST_ASSOCIATION_FLAG_NONE,
+ GST_FORMAT_BYTES, start);
+
+ /* we can not go where we have not yet been before ... */
+ if (!entry) {
+ GST_WARNING_OBJECT (avi, "insufficient index data, forcing EOS");
+ goto eos;
+ }
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &offset);
+ } else {
+ GST_WARNING_OBJECT (avi, "no index data, forcing EOS");
+ goto eos;
+ }
+
+ stop = GST_CLOCK_TIME_NONE;
+
+ /* set up segment and send downstream */
+ gst_segment_set_newsegment_full (&avi->segment, update, rate, arate,
+ GST_FORMAT_TIME, time, stop, time);
+ GST_DEBUG_OBJECT (avi, "Pushing newseg update %d, rate %g, "
+ "applied rate %g, format %d, start %" G_GINT64_FORMAT ", "
+ "stop %" G_GINT64_FORMAT, update, rate, arate, GST_FORMAT_TIME,
+ time, stop);
+ gst_avi_demux_push_event (avi,
+ gst_event_new_new_segment_full (update, rate, arate, GST_FORMAT_TIME,
+ time, stop, time));
+
+ GST_DEBUG_OBJECT (avi, "next chunk expected at %" G_GINT64_FORMAT, start);
+
+ /* adjust state for streaming thread accordingly */
+ if (avi->have_index)
+ gst_avi_demux_seek_streams_index (avi, offset, FALSE);
+ else
+ gst_avi_demux_seek_streams (avi, offset, FALSE);
+
+ /* set up streaming thread */
+ g_assert (offset >= start);
+ avi->offset = start;
+ avi->todrop = offset - start;
+
+ exit:
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ eos:
+ /* set up for EOS */
+ avi->have_eos = TRUE;
+ goto exit;
+ }
+ case GST_EVENT_EOS:
+ {
+ if (avi->state != GST_AVI_DEMUX_MOVI) {
+ gst_event_unref (event);
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX,
+ (NULL), ("got eos and didn't receive a complete header object"));
+ } else if (!gst_avi_demux_push_event (avi, event)) {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ }
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ {
+ gint i;
+
+ gst_adapter_clear (avi->adapter);
+ avi->have_eos = FALSE;
+ for (i = 0; i < avi->num_streams; i++) {
+ avi->stream[i].last_flow = GST_FLOW_OK;
+ avi->stream[i].discont = TRUE;
+ }
+ /* fall through to default case so that the event gets passed downstream */
+ }
+ default:
+ res = gst_pad_event_default (pad, event);
+ break;
+ }
+
+ gst_object_unref (avi);
+
+ return res;
+}
+
+static gboolean
+gst_avi_demux_handle_src_event (GstPad * pad, GstEvent * event)
+{
+ gboolean res = TRUE;
+ GstAviDemux *avi = GST_AVI_DEMUX (gst_pad_get_parent (pad));
+
+ GST_DEBUG_OBJECT (avi,
+ "have event type %s: %p on src pad", GST_EVENT_TYPE_NAME (event), event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ if (!avi->streaming) {
+ res = gst_avi_demux_handle_seek (avi, pad, event);
+ } else {
+ res = gst_avi_demux_handle_seek_push (avi, pad, event);
+ }
+ gst_event_unref (event);
+ break;
+ case GST_EVENT_QOS:
+ case GST_EVENT_NAVIGATION:
+ res = FALSE;
+ gst_event_unref (event);
+ break;
+ default:
+ res = gst_pad_event_default (pad, event);
+ break;
+ }
+
+ gst_object_unref (avi);
+
+ return res;
+}
+
+/* streaming helper (push) */
+
+/*
+ * gst_avi_demux_peek_chunk_info:
+ * @avi: Avi object
+ * @tag: holder for tag
+ * @size: holder for tag size
+ *
+ * Peek next chunk info (tag and size)
+ *
+ * Returns: TRUE when one chunk info has been got
+ */
+static gboolean
+gst_avi_demux_peek_chunk_info (GstAviDemux * avi, guint32 * tag, guint32 * size)
+{
+ const guint8 *data = NULL;
+
+ if (gst_adapter_available (avi->adapter) < 8)
+ return FALSE;
+
+ data = gst_adapter_peek (avi->adapter, 8);
+ *tag = GST_READ_UINT32_LE (data);
+ *size = GST_READ_UINT32_LE (data + 4);
+
+ return TRUE;
+}
+
+/*
+ * gst_avi_demux_peek_chunk:
+ * @avi: Avi object
+ * @tag: holder for tag
+ * @size: holder for tag size
+ *
+ * Peek enough data for one full chunk
+ *
+ * Returns: %TRUE when one chunk has been got
+ */
+static gboolean
+gst_avi_demux_peek_chunk (GstAviDemux * avi, guint32 * tag, guint32 * size)
+{
+ guint32 peek_size = 0;
+ gint available;
+
+ if (!gst_avi_demux_peek_chunk_info (avi, tag, size))
+ goto peek_failed;
+
+ /* size 0 -> empty data buffer would surprise most callers,
+ * large size -> do not bother trying to squeeze that into adapter,
+ * so we throw poor man's exception, which can be caught if caller really
+ * wants to handle 0 size chunk */
+ if (!(*size) || (*size) >= (1 << 30))
+ goto strange_size;
+
+ peek_size = (*size + 1) & ~1;
+ available = gst_adapter_available (avi->adapter);
+
+ GST_DEBUG_OBJECT (avi,
+ "Need to peek chunk of %d bytes to read chunk %" GST_FOURCC_FORMAT
+ ", %d bytes available", *size, GST_FOURCC_ARGS (*tag), available);
+
+ if (available < (8 + peek_size))
+ goto need_more;
+
+ return TRUE;
+
+ /* ERRORS */
+peek_failed:
+ {
+ GST_INFO_OBJECT (avi, "Failed to peek");
+ return FALSE;
+ }
+strange_size:
+ {
+ GST_INFO_OBJECT (avi,
+ "Invalid/unexpected chunk size %d for tag %" GST_FOURCC_FORMAT, *size,
+ GST_FOURCC_ARGS (*tag));
+ /* chain should give up */
+ avi->abort_buffering = TRUE;
+ return FALSE;
+ }
+need_more:
+ {
+ GST_INFO_OBJECT (avi, "need more %d < %" G_GUINT32_FORMAT,
+ available, 8 + peek_size);
+ return FALSE;
+ }
+}
+
+/* AVI init */
+
+/*
+ * gst_avi_demux_parse_file_header:
+ * @element: caller element (used for errors/debug).
+ * @buf: input data to be used for parsing.
+ *
+ * "Open" a RIFF/AVI file. The buffer should be at least 12
+ * bytes long. Takes ownership of @buf.
+ *
+ * Returns: TRUE if the file is a RIFF/AVI file, FALSE otherwise.
+ * Throws an error, caller should error out (fatal).
+ */
+static gboolean
+gst_avi_demux_parse_file_header (GstElement * element, GstBuffer * buf)
+{
+ guint32 doctype;
+ GstClockTime stamp;
+
+ stamp = gst_util_get_timestamp ();
+
+ /* riff_parse posts an error */
+ if (!gst_riff_parse_file_header (element, buf, &doctype))
+ return FALSE;
+
+ if (doctype != GST_RIFF_RIFF_AVI)
+ goto not_avi;
+
+ stamp = gst_util_get_timestamp () - stamp;
+ GST_DEBUG_OBJECT (element, "header parsing took %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stamp));
+
+ return TRUE;
+
+ /* ERRORS */
+not_avi:
+ {
+ GST_ELEMENT_ERROR (element, STREAM, WRONG_TYPE, (NULL),
+ ("File is not an AVI file: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (doctype)));
+ return FALSE;
+ }
+}
+
+/*
+ * Read AVI file tag when streaming
+ */
+static GstFlowReturn
+gst_avi_demux_stream_init_push (GstAviDemux * avi)
+{
+ if (gst_adapter_available (avi->adapter) >= 12) {
+ GstBuffer *tmp;
+
+ tmp = gst_adapter_take_buffer (avi->adapter, 12);
+
+ GST_DEBUG ("Parsing avi header");
+ if (!gst_avi_demux_parse_file_header (GST_ELEMENT_CAST (avi), tmp)) {
+ return GST_FLOW_ERROR;
+ }
+ GST_DEBUG ("header ok");
+ avi->offset += 12;
+
+ avi->state = GST_AVI_DEMUX_HEADER;
+ }
+ return GST_FLOW_OK;
+}
+
+/*
+ * Read AVI file tag
+ */
+static GstFlowReturn
+gst_avi_demux_stream_init_pull (GstAviDemux * avi)
+{
+ GstFlowReturn res;
+ GstBuffer *buf = NULL;
+
+ res = gst_pad_pull_range (avi->sinkpad, avi->offset, 12, &buf);
+ if (res != GST_FLOW_OK)
+ return res;
+ else if (!gst_avi_demux_parse_file_header (GST_ELEMENT_CAST (avi), buf))
+ goto wrong_header;
+
+ avi->offset += 12;
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+wrong_header:
+ {
+ GST_DEBUG_OBJECT (avi, "error parsing file header");
+ return GST_FLOW_ERROR;
+ }
+}
+
+/* AVI header handling */
+/*
+ * gst_avi_demux_parse_avih:
+ * @avi: caller element (used for errors/debug).
+ * @buf: input data to be used for parsing.
+ * @avih: pointer to structure (filled in by function) containing
+ * stream information (such as flags, number of streams, etc.).
+ *
+ * Read 'avih' header. Discards buffer after use.
+ *
+ * Returns: TRUE on success, FALSE otherwise. Throws an error if
+ * the header is invalid. The caller should error out
+ * (fatal).
+ */
+static gboolean
+gst_avi_demux_parse_avih (GstAviDemux * avi,
+ GstBuffer * buf, gst_riff_avih ** _avih)
+{
+ gst_riff_avih *avih;
+
+ if (buf == NULL)
+ goto no_buffer;
+
+ if (GST_BUFFER_SIZE (buf) < sizeof (gst_riff_avih))
+ goto avih_too_small;
+
+ avih = g_memdup (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+
+#if (G_BYTE_ORDER == G_BIG_ENDIAN)
+ avih->us_frame = GUINT32_FROM_LE (avih->us_frame);
+ avih->max_bps = GUINT32_FROM_LE (avih->max_bps);
+ avih->pad_gran = GUINT32_FROM_LE (avih->pad_gran);
+ avih->flags = GUINT32_FROM_LE (avih->flags);
+ avih->tot_frames = GUINT32_FROM_LE (avih->tot_frames);
+ avih->init_frames = GUINT32_FROM_LE (avih->init_frames);
+ avih->streams = GUINT32_FROM_LE (avih->streams);
+ avih->bufsize = GUINT32_FROM_LE (avih->bufsize);
+ avih->width = GUINT32_FROM_LE (avih->width);
+ avih->height = GUINT32_FROM_LE (avih->height);
+ avih->scale = GUINT32_FROM_LE (avih->scale);
+ avih->rate = GUINT32_FROM_LE (avih->rate);
+ avih->start = GUINT32_FROM_LE (avih->start);
+ avih->length = GUINT32_FROM_LE (avih->length);
+#endif
+
+ /* debug stuff */
+ GST_INFO_OBJECT (avi, "avih tag found:");
+ GST_INFO_OBJECT (avi, " us_frame %u", avih->us_frame);
+ GST_INFO_OBJECT (avi, " max_bps %u", avih->max_bps);
+ GST_INFO_OBJECT (avi, " pad_gran %u", avih->pad_gran);
+ GST_INFO_OBJECT (avi, " flags 0x%08x", avih->flags);
+ GST_INFO_OBJECT (avi, " tot_frames %u", avih->tot_frames);
+ GST_INFO_OBJECT (avi, " init_frames %u", avih->init_frames);
+ GST_INFO_OBJECT (avi, " streams %u", avih->streams);
+ GST_INFO_OBJECT (avi, " bufsize %u", avih->bufsize);
+ GST_INFO_OBJECT (avi, " width %u", avih->width);
+ GST_INFO_OBJECT (avi, " height %u", avih->height);
+ GST_INFO_OBJECT (avi, " scale %u", avih->scale);
+ GST_INFO_OBJECT (avi, " rate %u", avih->rate);
+ GST_INFO_OBJECT (avi, " start %u", avih->start);
+ GST_INFO_OBJECT (avi, " length %u", avih->length);
+
+ *_avih = avih;
+ gst_buffer_unref (buf);
+
+ if (avih->us_frame != 0 && avih->tot_frames != 0)
+ avi->duration =
+ (guint64) avih->us_frame * (guint64) avih->tot_frames * 1000;
+ else
+ avi->duration = GST_CLOCK_TIME_NONE;
+
+ GST_INFO_OBJECT (avi, " header duration %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (avi->duration));
+
+ return TRUE;
+
+ /* ERRORS */
+no_buffer:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("No buffer"));
+ return FALSE;
+ }
+avih_too_small:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Too small avih (%d available, %d needed)",
+ GST_BUFFER_SIZE (buf), (int) sizeof (gst_riff_avih)));
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+}
+
+/*
+ * gst_avi_demux_parse_superindex:
+ * @avi: caller element (used for debugging/errors).
+ * @buf: input data to use for parsing.
+ * @locations: locations in the file (byte-offsets) that contain
+ * the actual indexes (see get_avi_demux_parse_subindex()).
+ * The array ends with GST_BUFFER_OFFSET_NONE.
+ *
+ * Reads superindex (openDML-2 spec stuff) from the provided data.
+ *
+ * Returns: TRUE on success, FALSE otherwise. Indexes should be skipped
+ * on error, but they are not fatal.
+ */
+static gboolean
+gst_avi_demux_parse_superindex (GstAviDemux * avi,
+ GstBuffer * buf, guint64 ** _indexes)
+{
+ guint8 *data;
+ guint16 bpe = 16;
+ guint32 num, i;
+ guint64 *indexes;
+ guint size;
+
+ *_indexes = NULL;
+
+ size = buf ? GST_BUFFER_SIZE (buf) : 0;
+ if (size < 24)
+ goto too_small;
+
+ data = GST_BUFFER_DATA (buf);
+
+ /* check type of index. The opendml2 specs state that
+ * there should be 4 dwords per array entry. Type can be
+ * either frame or field (and we don't care). */
+ if (GST_READ_UINT16_LE (data) != 4 ||
+ (data[2] & 0xfe) != 0x0 || data[3] != 0x0) {
+ GST_WARNING_OBJECT (avi,
+ "Superindex for stream has unexpected "
+ "size_entry %d (bytes) or flags 0x%02x/0x%02x",
+ GST_READ_UINT16_LE (data), data[2], data[3]);
+ bpe = GST_READ_UINT16_LE (data) * 4;
+ }
+ num = GST_READ_UINT32_LE (&data[4]);
+
+ GST_DEBUG_OBJECT (avi, "got %d indexes", num);
+
+ /* this can't work out well ... */
+ if (num > G_MAXUINT32 >> 1 || bpe < 8) {
+ goto invalid_params;
+ }
+
+ indexes = g_new (guint64, num + 1);
+ for (i = 0; i < num; i++) {
+ if (size < 24 + bpe * (i + 1))
+ break;
+ indexes[i] = GST_READ_UINT64_LE (&data[24 + bpe * i]);
+ GST_DEBUG_OBJECT (avi, "index %d at %" G_GUINT64_FORMAT, i, indexes[i]);
+ }
+ indexes[i] = GST_BUFFER_OFFSET_NONE;
+ *_indexes = indexes;
+
+ gst_buffer_unref (buf);
+
+ return TRUE;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_ERROR_OBJECT (avi,
+ "Not enough data to parse superindex (%d available, 24 needed)", size);
+ if (buf)
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+invalid_params:
+ {
+ GST_ERROR_OBJECT (avi, "invalid index parameters (num = %d, bpe = %d)",
+ num, bpe);
+ if (buf)
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+}
+
+/* add an entry to the index of a stream. @num should be an estimate of the
+ * total amount of index entries for all streams and is used to dynamically
+ * allocate memory for the index entries. */
+static inline gboolean
+gst_avi_demux_add_index (GstAviDemux * avi, GstAviStream * stream,
+ guint num, GstAviIndexEntry * entry)
+{
+ /* ensure index memory */
+ if (G_UNLIKELY (stream->idx_n >= stream->idx_max)) {
+ guint idx_max = stream->idx_max;
+ GstAviIndexEntry *new_idx;
+
+ /* we need to make some more room */
+ if (idx_max == 0) {
+ /* initial size guess, assume each stream has an equal amount of entries,
+ * overshoot with at least 8K */
+ idx_max = (num / avi->num_streams) + (8192 / sizeof (GstAviIndexEntry));
+ } else {
+ idx_max += 8192 / sizeof (GstAviIndexEntry);
+ GST_DEBUG_OBJECT (avi, "expanded index from %u to %u",
+ stream->idx_max, idx_max);
+ }
+ new_idx = g_try_renew (GstAviIndexEntry, stream->index, idx_max);
+ /* out of memory, if this fails stream->index is untouched. */
+ if (G_UNLIKELY (!new_idx))
+ return FALSE;
+ /* use new index */
+ stream->index = new_idx;
+ stream->idx_max = idx_max;
+ }
+
+ /* update entry total and stream stats. The entry total can be converted to
+ * the timestamp of the entry easily. */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ gint blockalign;
+
+ if (stream->is_vbr) {
+ entry->total = stream->total_blocks;
+ } else {
+ entry->total = stream->total_bytes;
+ }
+ blockalign = stream->strf.auds->blockalign;
+ if (blockalign > 0)
+ stream->total_blocks += DIV_ROUND_UP (entry->size, blockalign);
+ else
+ stream->total_blocks++;
+ } else {
+ if (stream->is_vbr) {
+ entry->total = stream->idx_n;
+ } else {
+ entry->total = stream->total_bytes;
+ }
+ }
+ stream->total_bytes += entry->size;
+ if (ENTRY_IS_KEYFRAME (entry))
+ stream->n_keyframes++;
+
+ /* and add */
+ GST_LOG_OBJECT (avi,
+ "Adding stream %u, index entry %d, kf %d, size %u "
+ ", offset %" G_GUINT64_FORMAT ", total %" G_GUINT64_FORMAT, stream->num,
+ stream->idx_n, ENTRY_IS_KEYFRAME (entry), entry->size, entry->offset,
+ entry->total);
+ stream->index[stream->idx_n++] = *entry;
+
+ return TRUE;
+}
+
+/* given @entry_n in @stream, calculate info such as timestamps and
+ * offsets for the entry. */
+static void
+gst_avi_demux_get_buffer_info (GstAviDemux * avi, GstAviStream * stream,
+ guint entry_n, GstClockTime * timestamp, GstClockTime * ts_end,
+ guint64 * offset, guint64 * offset_end)
+{
+ GstAviIndexEntry *entry;
+
+ entry = &stream->index[entry_n];
+
+ if (stream->is_vbr) {
+ /* VBR stream next timestamp */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ if (timestamp)
+ *timestamp =
+ avi_stream_convert_frames_to_time_unchecked (stream, entry->total);
+ if (ts_end)
+ *ts_end = avi_stream_convert_frames_to_time_unchecked (stream,
+ entry->total + 1);
+ } else {
+ if (timestamp)
+ *timestamp =
+ avi_stream_convert_frames_to_time_unchecked (stream, entry_n);
+ if (ts_end)
+ *ts_end = avi_stream_convert_frames_to_time_unchecked (stream,
+ entry_n + 1);
+ }
+ } else if (stream->strh->type == GST_RIFF_FCC_auds) {
+ /* constant rate stream */
+ if (timestamp)
+ *timestamp =
+ avi_stream_convert_bytes_to_time_unchecked (stream, entry->total);
+ if (ts_end)
+ *ts_end = avi_stream_convert_bytes_to_time_unchecked (stream,
+ entry->total + entry->size);
+ }
+ if (stream->strh->type == GST_RIFF_FCC_vids) {
+ /* video offsets are the frame number */
+ if (offset)
+ *offset = entry_n;
+ if (offset_end)
+ *offset_end = entry_n + 1;
+ } else {
+ /* no offsets for audio */
+ if (offset)
+ *offset = -1;
+ if (offset_end)
+ *offset_end = -1;
+ }
+}
+
+/* collect and debug stats about the indexes for all streams.
+ * This method is also responsible for filling in the stream duration
+ * as measured by the amount of index entries.
+ *
+ * Returns TRUE if the index is not empty, else FALSE */
+static gboolean
+gst_avi_demux_do_index_stats (GstAviDemux * avi)
+{
+ guint total_idx = 0;
+ guint i;
+#ifndef GST_DISABLE_GST_DEBUG
+ guint total_max = 0;
+#endif
+
+ /* get stream stats now */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream;
+
+ if (G_UNLIKELY (!(stream = &avi->stream[i])))
+ continue;
+ if (G_UNLIKELY (!stream->strh))
+ continue;
+ if (G_UNLIKELY (!stream->index || stream->idx_n == 0))
+ continue;
+
+ /* we interested in the end_ts of the last entry, which is the total
+ * duration of this stream */
+ gst_avi_demux_get_buffer_info (avi, stream, stream->idx_n - 1,
+ NULL, &stream->idx_duration, NULL, NULL);
+
+ total_idx += stream->idx_n;
+#ifndef GST_DISABLE_GST_DEBUG
+ total_max += stream->idx_max;
+#endif
+ GST_INFO_OBJECT (avi, "Stream %d, dur %" GST_TIME_FORMAT ", %6u entries, "
+ "%5u keyframes, entry size = %2u, total size = %10u, allocated %10u",
+ i, GST_TIME_ARGS (stream->idx_duration), stream->idx_n,
+ stream->n_keyframes, (guint) sizeof (GstAviIndexEntry),
+ (guint) (stream->idx_n * sizeof (GstAviIndexEntry)),
+ (guint) (stream->idx_max * sizeof (GstAviIndexEntry)));
+ }
+ total_idx *= sizeof (GstAviIndexEntry);
+#ifndef GST_DISABLE_GST_DEBUG
+ total_max *= sizeof (GstAviIndexEntry);
+#endif
+ GST_INFO_OBJECT (avi, "%u bytes for index vs %u ideally, %u wasted",
+ total_max, total_idx, total_max - total_idx);
+
+ if (total_idx == 0) {
+ GST_WARNING_OBJECT (avi, "Index is empty !");
+ return FALSE;
+ }
+ return TRUE;
+}
+
+/*
+ * gst_avi_demux_parse_subindex:
+ * @avi: Avi object
+ * @buf: input data to use for parsing.
+ * @stream: stream context.
+ * @entries_list: a list (returned by the function) containing all the
+ * indexes parsed in this specific subindex. The first
+ * entry is also a pointer to allocated memory that needs
+ * to be free´ed. May be NULL if no supported indexes were
+ * found.
+ *
+ * Reads superindex (openDML-2 spec stuff) from the provided data.
+ * The buffer should contain a GST_RIFF_TAG_ix?? chunk.
+ *
+ * Returns: TRUE on success, FALSE otherwise. Errors are fatal, we
+ * throw an error, caller should bail out asap.
+ */
+static gboolean
+gst_avi_demux_parse_subindex (GstAviDemux * avi, GstAviStream * stream,
+ GstBuffer * buf)
+{
+ guint8 *data;
+ guint16 bpe;
+ guint32 num, i;
+ guint64 baseoff;
+ guint size;
+
+ if (!buf)
+ return TRUE;
+
+ size = GST_BUFFER_SIZE (buf);
+
+ /* check size */
+ if (size < 24)
+ goto too_small;
+
+ data = GST_BUFFER_DATA (buf);
+
+ /* We don't support index-data yet */
+ if (data[3] & 0x80)
+ goto not_implemented;
+
+ /* check type of index. The opendml2 specs state that
+ * there should be 4 dwords per array entry. Type can be
+ * either frame or field (and we don't care). */
+ bpe = (data[2] & 0x01) ? 12 : 8;
+ if (GST_READ_UINT16_LE (data) != bpe / 4 ||
+ (data[2] & 0xfe) != 0x0 || data[3] != 0x1) {
+ GST_WARNING_OBJECT (avi,
+ "Superindex for stream %d has unexpected "
+ "size_entry %d (bytes) or flags 0x%02x/0x%02x",
+ stream->num, GST_READ_UINT16_LE (data), data[2], data[3]);
+ bpe = GST_READ_UINT16_LE (data) * 4;
+ }
+ num = GST_READ_UINT32_LE (&data[4]);
+ baseoff = GST_READ_UINT64_LE (&data[12]);
+
+ /* If there's nothing, just return ! */
+ if (num == 0)
+ goto empty_index;
+
+ GST_INFO_OBJECT (avi, "Parsing subindex, nr_entries = %6d", num);
+
+ for (i = 0; i < num; i++) {
+ GstAviIndexEntry entry;
+
+ if (size < 24 + bpe * (i + 1))
+ break;
+
+ /* fill in offset and size. offset contains the keyframe flag in the
+ * upper bit*/
+ entry.offset = baseoff + GST_READ_UINT32_LE (&data[24 + bpe * i]);
+ entry.size = GST_READ_UINT32_LE (&data[24 + bpe * i + 4]);
+ /* handle flags */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ /* all audio frames are keyframes */
+ ENTRY_SET_KEYFRAME (&entry);
+ } else {
+ /* else read flags */
+ entry.flags = (entry.size & 0x80000000) ? 0 : GST_AVI_KEYFRAME;
+ }
+ entry.size &= ~0x80000000;
+
+ /* and add */
+ if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
+ goto out_of_mem;
+ }
+ gst_buffer_unref (buf);
+
+ return TRUE;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_ERROR_OBJECT (avi,
+ "Not enough data to parse subindex (%d available, 24 needed)", size);
+ gst_buffer_unref (buf);
+ return TRUE; /* continue */
+ }
+not_implemented:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, NOT_IMPLEMENTED, (NULL),
+ ("Subindex-is-data is not implemented"));
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+empty_index:
+ {
+ GST_DEBUG_OBJECT (avi, "the index is empty");
+ gst_buffer_unref (buf);
+ return TRUE;
+ }
+out_of_mem:
+ {
+ GST_ELEMENT_ERROR (avi, RESOURCE, NO_SPACE_LEFT, (NULL),
+ ("Cannot allocate memory for %u*%u=%u bytes",
+ (guint) sizeof (GstAviIndexEntry), num,
+ (guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+}
+
+/*
+ * Create and push a flushing seek event upstream
+ */
+static gboolean
+perform_seek_to_offset (GstAviDemux * demux, guint64 offset)
+{
+ GstEvent *event;
+ gboolean res = 0;
+
+ GST_DEBUG_OBJECT (demux, "Seeking to %" G_GUINT64_FORMAT, offset);
+
+ event =
+ gst_event_new_seek (1.0, GST_FORMAT_BYTES,
+ GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, offset,
+ GST_SEEK_TYPE_NONE, -1);
+
+ res = gst_pad_push_event (demux->sinkpad, event);
+
+ if (res)
+ demux->offset = offset;
+ return res;
+}
+
+/*
+ * Read AVI index when streaming
+ */
+static gboolean
+gst_avi_demux_read_subindexes_push (GstAviDemux * avi)
+{
+ guint32 tag = 0, size;
+ GstBuffer *buf = NULL;
+ guint odml_stream;
+
+ GST_DEBUG_OBJECT (avi, "read subindexes for %d streams", avi->num_streams);
+
+ if (avi->odml_subidxs[avi->odml_subidx] != avi->offset)
+ return FALSE;
+
+ if (!gst_avi_demux_peek_chunk (avi, &tag, &size))
+ return TRUE;
+
+ /* this is the ODML chunk we expect */
+ odml_stream = avi->odml_stream;
+
+ if ((tag != GST_MAKE_FOURCC ('i', 'x', '0' + odml_stream / 10,
+ '0' + odml_stream % 10)) &&
+ (tag != GST_MAKE_FOURCC ('0' + odml_stream / 10,
+ '0' + odml_stream % 10, 'i', 'x'))) {
+ GST_WARNING_OBJECT (avi, "Not an ix## chunk (%" GST_FOURCC_FORMAT ")",
+ GST_FOURCC_ARGS (tag));
+ return FALSE;
+ }
+
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ /* flush chunk header so we get just the 'size' payload data */
+ gst_adapter_flush (avi->adapter, 8);
+ buf = gst_adapter_take_buffer (avi->adapter, size);
+
+ if (!gst_avi_demux_parse_subindex (avi, &avi->stream[odml_stream], buf))
+ return FALSE;
+
+ /* we parsed the index, go to next subindex */
+ avi->odml_subidx++;
+
+ if (avi->odml_subidxs[avi->odml_subidx] == GST_BUFFER_OFFSET_NONE) {
+ /* we reached the end of the indexes for this stream, move to the next
+ * stream to handle the first index */
+ avi->odml_stream++;
+ avi->odml_subidx = 0;
+
+ if (avi->odml_stream < avi->num_streams) {
+ /* there are more indexes */
+ avi->odml_subidxs = avi->stream[avi->odml_stream].indexes;
+ } else {
+ /* we're done, get stream stats now */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+
+ return TRUE;
+ }
+ }
+
+ /* seek to next index */
+ return perform_seek_to_offset (avi, avi->odml_subidxs[avi->odml_subidx]);
+}
+
+/*
+ * Read AVI index
+ */
+static void
+gst_avi_demux_read_subindexes_pull (GstAviDemux * avi)
+{
+ guint32 tag;
+ GstBuffer *buf;
+ gint i, n;
+
+ GST_DEBUG_OBJECT (avi, "read subindexes for %d streams", avi->num_streams);
+
+ for (n = 0; n < avi->num_streams; n++) {
+ GstAviStream *stream = &avi->stream[n];
+
+ if (stream->indexes == NULL)
+ continue;
+
+ for (i = 0; stream->indexes[i] != GST_BUFFER_OFFSET_NONE; i++) {
+ if (gst_riff_read_chunk (GST_ELEMENT_CAST (avi), avi->sinkpad,
+ &stream->indexes[i], &tag, &buf) != GST_FLOW_OK)
+ continue;
+ else if ((tag != GST_MAKE_FOURCC ('i', 'x', '0' + stream->num / 10,
+ '0' + stream->num % 10)) &&
+ (tag != GST_MAKE_FOURCC ('0' + stream->num / 10,
+ '0' + stream->num % 10, 'i', 'x'))) {
+ /* Some ODML files (created by god knows what muxer) have a ##ix format
+ * instead of the 'official' ix##. They are still valid though. */
+ GST_WARNING_OBJECT (avi, "Not an ix## chunk (%" GST_FOURCC_FORMAT ")",
+ GST_FOURCC_ARGS (tag));
+ gst_buffer_unref (buf);
+ continue;
+ }
+
+ if (!gst_avi_demux_parse_subindex (avi, stream, buf))
+ continue;
+ }
+
+ g_free (stream->indexes);
+ stream->indexes = NULL;
+ }
+ /* get stream stats now */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+}
+
+/*
+ * gst_avi_demux_riff_parse_vprp:
+ * @element: caller element (used for debugging/error).
+ * @buf: input data to be used for parsing, stripped from header.
+ * @vprp: a pointer (returned by this function) to a filled-in vprp
+ * structure. Caller should free it.
+ *
+ * Parses a video stream´s vprp. This function takes ownership of @buf.
+ *
+ * Returns: TRUE if parsing succeeded, otherwise FALSE. The stream
+ * should be skipped on error, but it is not fatal.
+ */
+static gboolean
+gst_avi_demux_riff_parse_vprp (GstElement * element,
+ GstBuffer * buf, gst_riff_vprp ** _vprp)
+{
+ gst_riff_vprp *vprp;
+ gint k;
+
+ g_return_val_if_fail (buf != NULL, FALSE);
+ g_return_val_if_fail (_vprp != NULL, FALSE);
+
+ if (GST_BUFFER_SIZE (buf) < G_STRUCT_OFFSET (gst_riff_vprp, field_info))
+ goto too_small;
+
+ vprp = g_memdup (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+
+#if (G_BYTE_ORDER == G_BIG_ENDIAN)
+ vprp->format_token = GUINT32_FROM_LE (vprp->format_token);
+ vprp->standard = GUINT32_FROM_LE (vprp->standard);
+ vprp->vert_rate = GUINT32_FROM_LE (vprp->vert_rate);
+ vprp->hor_t_total = GUINT32_FROM_LE (vprp->hor_t_total);
+ vprp->vert_lines = GUINT32_FROM_LE (vprp->vert_lines);
+ vprp->aspect = GUINT32_FROM_LE (vprp->aspect);
+ vprp->width = GUINT32_FROM_LE (vprp->width);
+ vprp->height = GUINT32_FROM_LE (vprp->height);
+ vprp->fields = GUINT32_FROM_LE (vprp->fields);
+#endif
+
+ /* size checking */
+ /* calculate fields based on size */
+ k = (GST_BUFFER_SIZE (buf) - G_STRUCT_OFFSET (gst_riff_vprp, field_info)) /
+ vprp->fields;
+ if (vprp->fields > k) {
+ GST_WARNING_OBJECT (element,
+ "vprp header indicated %d fields, only %d available", vprp->fields, k);
+ vprp->fields = k;
+ }
+ if (vprp->fields > GST_RIFF_VPRP_VIDEO_FIELDS) {
+ GST_WARNING_OBJECT (element,
+ "vprp header indicated %d fields, at most %d supported", vprp->fields,
+ GST_RIFF_VPRP_VIDEO_FIELDS);
+ vprp->fields = GST_RIFF_VPRP_VIDEO_FIELDS;
+ }
+#if (G_BYTE_ORDER == G_BIG_ENDIAN)
+ for (k = 0; k < vprp->fields; k++) {
+ gst_riff_vprp_video_field_desc *fd;
+
+ fd = &vprp->field_info[k];
+ fd->compressed_bm_height = GUINT32_FROM_LE (fd->compressed_bm_height);
+ fd->compressed_bm_width = GUINT32_FROM_LE (fd->compressed_bm_width);
+ fd->valid_bm_height = GUINT32_FROM_LE (fd->valid_bm_height);
+ fd->valid_bm_width = GUINT16_FROM_LE (fd->valid_bm_width);
+ fd->valid_bm_x_offset = GUINT16_FROM_LE (fd->valid_bm_x_offset);
+ fd->valid_bm_y_offset = GUINT32_FROM_LE (fd->valid_bm_y_offset);
+ fd->video_x_t_offset = GUINT32_FROM_LE (fd->video_x_t_offset);
+ fd->video_y_start = GUINT32_FROM_LE (fd->video_y_start);
+ }
+#endif
+
+ /* debug */
+ GST_INFO_OBJECT (element, "vprp tag found in context vids:");
+ GST_INFO_OBJECT (element, " format_token %d", vprp->format_token);
+ GST_INFO_OBJECT (element, " standard %d", vprp->standard);
+ GST_INFO_OBJECT (element, " vert_rate %d", vprp->vert_rate);
+ GST_INFO_OBJECT (element, " hor_t_total %d", vprp->hor_t_total);
+ GST_INFO_OBJECT (element, " vert_lines %d", vprp->vert_lines);
+ GST_INFO_OBJECT (element, " aspect %d:%d", vprp->aspect >> 16,
+ vprp->aspect & 0xffff);
+ GST_INFO_OBJECT (element, " width %d", vprp->width);
+ GST_INFO_OBJECT (element, " height %d", vprp->height);
+ GST_INFO_OBJECT (element, " fields %d", vprp->fields);
+ for (k = 0; k < vprp->fields; k++) {
+ gst_riff_vprp_video_field_desc *fd;
+
+ fd = &(vprp->field_info[k]);
+ GST_INFO_OBJECT (element, " field %u description:", k);
+ GST_INFO_OBJECT (element, " compressed_bm_height %d",
+ fd->compressed_bm_height);
+ GST_INFO_OBJECT (element, " compressed_bm_width %d",
+ fd->compressed_bm_width);
+ GST_INFO_OBJECT (element, " valid_bm_height %d",
+ fd->valid_bm_height);
+ GST_INFO_OBJECT (element, " valid_bm_width %d", fd->valid_bm_width);
+ GST_INFO_OBJECT (element, " valid_bm_x_offset %d",
+ fd->valid_bm_x_offset);
+ GST_INFO_OBJECT (element, " valid_bm_y_offset %d",
+ fd->valid_bm_y_offset);
+ GST_INFO_OBJECT (element, " video_x_t_offset %d",
+ fd->video_x_t_offset);
+ GST_INFO_OBJECT (element, " video_y_start %d", fd->video_y_start);
+ }
+
+ gst_buffer_unref (buf);
+
+ *_vprp = vprp;
+
+ return TRUE;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_ERROR_OBJECT (element,
+ "Too small vprp (%d available, at least %d needed)",
+ GST_BUFFER_SIZE (buf),
+ (int) G_STRUCT_OFFSET (gst_riff_vprp, field_info));
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+}
+
+static void
+gst_avi_demux_expose_streams (GstAviDemux * avi, gboolean force)
+{
+ guint i;
+
+ GST_DEBUG_OBJECT (avi, "force : %d", force);
+
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream = &avi->stream[i];
+
+ if (force || stream->idx_n != 0) {
+ GST_LOG_OBJECT (avi, "Added pad %s with caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (stream->pad), GST_PAD_CAPS (stream->pad));
+ gst_element_add_pad ((GstElement *) avi, stream->pad);
+
+ if (avi->element_index)
+ gst_index_get_writer_id (avi->element_index,
+ GST_OBJECT_CAST (stream->pad), &stream->index_id);
+
+ stream->exposed = TRUE;
+ if (avi->main_stream == -1)
+ avi->main_stream = i;
+ } else {
+ GST_WARNING_OBJECT (avi, "Stream #%d doesn't have any entry, removing it",
+ i);
+ gst_avi_demux_reset_stream (avi, stream);
+ }
+ }
+}
+
+
+#ifdef DIVX_DRM
+typedef struct _gst_riff_strd {
+ guint32 version;
+ guint32 drm_size;
+} gst_riff_strd;
+
+
+static gboolean
+gst_riff_parse_strd (GstAviDemux * avi,
+ GstBuffer * buf)
+{
+ g_return_val_if_fail (buf != NULL, FALSE);
+
+ if (GST_BUFFER_SIZE (buf) < sizeof (gst_riff_strd))
+ goto too_small;
+
+ GST_DEBUG_OBJECT (avi, " version %d", ((gst_riff_strd*)GST_BUFFER_DATA(buf))->version);
+ GST_DEBUG_OBJECT (avi, " drm_size %d", ((gst_riff_strd*)GST_BUFFER_DATA(buf))->drm_size);
+
+ return gst_avi_demux_init_divx_drm (avi, GST_BUFFER_DATA(buf)+sizeof(gst_riff_strd));
+
+ /* ERRORS */
+too_small:
+ {
+ GST_ERROR_OBJECT (avi,
+ "Too small strh (%d available, %d needed)",
+ GST_BUFFER_SIZE (buf), (int) sizeof (gst_riff_strd));
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+}
+#endif // DIVX_DRM
+
+
+/* buf contains LIST chunk data, and will be padded to even size,
+ * since some buggy files do not account for the padding of chunks
+ * within a LIST in the size of the LIST */
+static inline void
+gst_avi_demux_roundup_list (GstAviDemux * avi, GstBuffer ** buf)
+{
+ gint size = GST_BUFFER_SIZE (*buf);
+
+ if (G_UNLIKELY (size & 1)) {
+ GstBuffer *obuf;
+
+ GST_DEBUG_OBJECT (avi, "rounding up dubious list size %d", size);
+ obuf = gst_buffer_new_and_alloc (size + 1);
+ memcpy (GST_BUFFER_DATA (obuf), GST_BUFFER_DATA (*buf), size);
+ /* assume 0 padding, at least makes outcome deterministic */
+ (GST_BUFFER_DATA (obuf))[size] = 0;
+ gst_buffer_replace (buf, obuf);
+ }
+}
+
+/*
+ * gst_avi_demux_parse_stream:
+ * @avi: calling element (used for debugging/errors).
+ * @buf: input buffer used to parse the stream.
+ *
+ * Parses all subchunks in a strl chunk (which defines a single
+ * stream). Discards the buffer after use. This function will
+ * increment the stream counter internally.
+ *
+ * Returns: whether the stream was identified successfully.
+ * Errors are not fatal. It does indicate the stream
+ * was skipped.
+ */
+static gboolean
+gst_avi_demux_parse_stream (GstAviDemux * avi, GstBuffer * buf)
+{
+ GstAviStream *stream;
+ GstElementClass *klass;
+ GstPadTemplate *templ;
+ GstBuffer *sub = NULL;
+ guint offset = 4;
+ guint32 tag = 0;
+ gchar *codec_name = NULL, *padname = NULL;
+ const gchar *tag_name;
+ GstCaps *caps = NULL;
+ GstPad *pad;
+ GstElement *element;
+ gboolean got_strh = FALSE, got_strf = FALSE, got_vprp = FALSE;
+ gst_riff_vprp *vprp = NULL;
+
+ element = GST_ELEMENT_CAST (avi);
+
+ GST_DEBUG_OBJECT (avi, "Parsing stream");
+
+ gst_avi_demux_roundup_list (avi, &buf);
+
+ if (avi->num_streams >= GST_AVI_DEMUX_MAX_STREAMS) {
+ GST_WARNING_OBJECT (avi,
+ "maximum no of streams (%d) exceeded, ignoring stream",
+ GST_AVI_DEMUX_MAX_STREAMS);
+ gst_buffer_unref (buf);
+ /* not a fatal error, let's say */
+ return TRUE;
+ }
+
+ stream = &avi->stream[avi->num_streams];
+
+ /* initial settings */
+ stream->idx_duration = GST_CLOCK_TIME_NONE;
+ stream->hdr_duration = GST_CLOCK_TIME_NONE;
+ stream->duration = GST_CLOCK_TIME_NONE;
+
+ while (gst_riff_parse_chunk (element, buf, &offset, &tag, &sub)) {
+ /* sub can be NULL if the chunk is empty */
+ if (sub == NULL) {
+ GST_DEBUG_OBJECT (avi, "ignoring empty chunk %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ continue;
+ }
+ switch (tag) {
+ case GST_RIFF_TAG_strh:
+ {
+ gst_riff_strh *strh;
+
+ if (got_strh) {
+ GST_WARNING_OBJECT (avi, "Ignoring additional strh chunk");
+ break;
+ }
+ if (!gst_riff_parse_strh (element, sub, &stream->strh)) {
+ /* ownership given away */
+ sub = NULL;
+ GST_WARNING_OBJECT (avi, "Failed to parse strh chunk");
+ goto fail;
+ }
+ sub = NULL;
+ strh = stream->strh;
+ /* sanity check; stream header frame rate matches global header
+ * frame duration */
+ if (stream->strh->type == GST_RIFF_FCC_vids) {
+ GstClockTime s_dur;
+ GstClockTime h_dur = avi->avih->us_frame * GST_USECOND;
+
+ s_dur = gst_util_uint64_scale (GST_SECOND, strh->scale, strh->rate);
+ GST_DEBUG_OBJECT (avi, "verifying stream framerate %d/%d, "
+ "frame duration = %d ms", strh->rate, strh->scale,
+ (gint) (s_dur / GST_MSECOND));
+ if (h_dur > (10 * GST_MSECOND) && (s_dur > 10 * h_dur)) {
+ strh->rate = GST_SECOND / GST_USECOND;
+ strh->scale = h_dur / GST_USECOND;
+ GST_DEBUG_OBJECT (avi, "correcting stream framerate to %d/%d",
+ strh->rate, strh->scale);
+ }
+ }
+ /* determine duration as indicated by header */
+ stream->hdr_duration = gst_util_uint64_scale ((guint64) strh->length *
+ strh->scale, GST_SECOND, (guint64) strh->rate);
+ GST_INFO ("Stream duration according to header: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->hdr_duration));
+ if (stream->hdr_duration == 0)
+ stream->hdr_duration = GST_CLOCK_TIME_NONE;
+
+ got_strh = TRUE;
+ break;
+ }
+ case GST_RIFF_TAG_strf:
+ {
+ gboolean res = FALSE;
+
+ if (got_strf) {
+ GST_WARNING_OBJECT (avi, "Ignoring additional strf chunk");
+ break;
+ }
+ if (!got_strh) {
+ GST_ERROR_OBJECT (avi, "Found strf chunk before strh chunk");
+ goto fail;
+ }
+ switch (stream->strh->type) {
+ case GST_RIFF_FCC_vids:
+ stream->is_vbr = TRUE;
+ res = gst_riff_parse_strf_vids (element, sub,
+ &stream->strf.vids, &stream->extradata);
+ sub = NULL;
+ GST_DEBUG_OBJECT (element, "marking video as VBR, res %d", res);
+ break;
+ case GST_RIFF_FCC_auds:
+ res =
+ gst_riff_parse_strf_auds (element, sub, &stream->strf.auds,
+ &stream->extradata);
+ sub = NULL;
+ if (!res)
+ break;
+ stream->is_vbr = (stream->strh->samplesize == 0)
+ && stream->strh->scale > 1
+ && stream->strf.auds->blockalign != 1;
+ GST_DEBUG_OBJECT (element, "marking audio as VBR:%d, res %d",
+ stream->is_vbr, res);
+ /* we need these or we have no way to come up with timestamps */
+ if ((!stream->is_vbr && !stream->strf.auds->av_bps) ||
+ (stream->is_vbr && (!stream->strh->scale ||
+ !stream->strh->rate))) {
+ GST_WARNING_OBJECT (element,
+ "invalid audio header, ignoring stream");
+ goto fail;
+ }
+ /* some more sanity checks */
+ if (stream->is_vbr) {
+ if (stream->strf.auds->blockalign <= 4) {
+ /* that would mean (too) many frames per chunk,
+ * so not likely set as expected */
+ GST_DEBUG_OBJECT (element,
+ "suspicious blockalign %d for VBR audio; "
+ "overriding to 1 frame per chunk",
+ stream->strf.auds->blockalign);
+ /* this should top any likely value */
+ stream->strf.auds->blockalign = (1 << 12);
+ }
+ }
+ break;
+ case GST_RIFF_FCC_iavs:
+ stream->is_vbr = TRUE;
+ res = gst_riff_parse_strf_iavs (element, sub,
+ &stream->strf.iavs, &stream->extradata);
+ sub = NULL;
+ GST_DEBUG_OBJECT (element, "marking iavs as VBR, res %d", res);
+ break;
+ case GST_RIFF_FCC_txts:
+ /* nothing to parse here */
+ stream->is_vbr = (stream->strh->samplesize == 0)
+ && (stream->strh->scale > 1);
+ res = TRUE;
+ break;
+ default:
+ GST_ERROR_OBJECT (avi,
+ "Don´t know how to handle stream type %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->strh->type));
+ break;
+ }
+ if (sub) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ if (!res)
+ goto fail;
+ got_strf = TRUE;
+ break;
+ }
+ case GST_RIFF_TAG_vprp:
+ {
+ if (got_vprp) {
+ GST_WARNING_OBJECT (avi, "Ignoring additional vprp chunk");
+ break;
+ }
+ if (!got_strh) {
+ GST_ERROR_OBJECT (avi, "Found vprp chunk before strh chunk");
+ goto fail;
+ }
+ if (!got_strf) {
+ GST_ERROR_OBJECT (avi, "Found vprp chunk before strf chunk");
+ goto fail;
+ }
+
+ if (!gst_avi_demux_riff_parse_vprp (element, sub, &vprp)) {
+ GST_WARNING_OBJECT (avi, "Failed to parse vprp chunk");
+ /* not considered fatal */
+ g_free (vprp);
+ vprp = NULL;
+ } else
+ got_vprp = TRUE;
+ sub = NULL;
+ break;
+ }
+ case GST_RIFF_TAG_strd:
+#ifdef DIVX_DRM
+ GST_DEBUG_OBJECT (avi, "******************* strd tag found:");
+ if (gst_riff_parse_strd (avi, sub) == FALSE) {
+ GST_ELEMENT_ERROR(avi, STREAM, DECRYPT,
+ ("DivX initialization failed"),
+ ("gst_avi_demux_init_divx_drm() failed") );
+ return FALSE;
+ }
+#endif
+
+ if (stream->initdata)
+ gst_buffer_unref (stream->initdata);
+ stream->initdata = sub;
+ sub = NULL;
+ break;
+ case GST_RIFF_TAG_strn:
+ g_free (stream->name);
+ if (sub != NULL) {
+ stream->name =
+ g_strndup ((gchar *) GST_BUFFER_DATA (sub),
+ (gsize) GST_BUFFER_SIZE (sub));
+ gst_buffer_unref (sub);
+ sub = NULL;
+ } else {
+ stream->name = g_strdup ("");
+ }
+ GST_DEBUG_OBJECT (avi, "stream name: %s", stream->name);
+ break;
+ case GST_RIFF_IDIT:
+ gst_avi_demux_parse_idit (avi, sub);
+ break;
+ default:
+ if (tag == GST_MAKE_FOURCC ('i', 'n', 'd', 'x') ||
+ tag == GST_MAKE_FOURCC ('i', 'x', '0' + avi->num_streams / 10,
+ '0' + avi->num_streams % 10)) {
+ g_free (stream->indexes);
+ gst_avi_demux_parse_superindex (avi, sub, &stream->indexes);
+ stream->superindex = TRUE;
+ sub = NULL;
+ break;
+ }
+ GST_WARNING_OBJECT (avi,
+ "Unknown stream header tag %" GST_FOURCC_FORMAT ", ignoring",
+ GST_FOURCC_ARGS (tag));
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ break;
+ }
+ if (sub != NULL) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ }
+
+ if (!got_strh) {
+ GST_WARNING_OBJECT (avi, "Failed to find strh chunk");
+ goto fail;
+ }
+
+ if (!got_strf) {
+ GST_WARNING_OBJECT (avi, "Failed to find strf chunk");
+ goto fail;
+ }
+
+ /* get class to figure out the template */
+ klass = GST_ELEMENT_GET_CLASS (avi);
+
+ /* we now have all info, let´s set up a pad and a caps and be done */
+ /* create stream name + pad */
+ switch (stream->strh->type) {
+ case GST_RIFF_FCC_vids:{
+ guint32 fourcc;
+
+ fourcc = (stream->strf.vids->compression) ?
+ stream->strf.vids->compression : stream->strh->fcc_handler;
+ padname = g_strdup_printf ("video_%02d", avi->num_v_streams);
+ templ = gst_element_class_get_pad_template (klass, "video_%02d");
+ caps = gst_riff_create_video_caps (fourcc, stream->strh,
+ stream->strf.vids, stream->extradata, stream->initdata, &codec_name);
+ if (!caps) {
+ caps = gst_caps_new_simple ("video/x-avi-unknown", "fourcc",
+ GST_TYPE_FOURCC, fourcc, NULL);
+ } else if (got_vprp && vprp) {
+ guint32 aspect_n, aspect_d;
+ gint n, d;
+
+ aspect_n = vprp->aspect >> 16;
+ aspect_d = vprp->aspect & 0xffff;
+ /* calculate the pixel aspect ratio using w/h and aspect ratio */
+ n = aspect_n * stream->strf.vids->height;
+ d = aspect_d * stream->strf.vids->width;
+ if (n && d)
+ gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ n, d, NULL);
+ /* very local, not needed elsewhere */
+ g_free (vprp);
+ vprp = NULL;
+ }
+ tag_name = GST_TAG_VIDEO_CODEC;
+ avi->num_v_streams++;
+ break;
+ }
+ case GST_RIFF_FCC_auds:{
+ padname = g_strdup_printf ("audio_%02d", avi->num_a_streams);
+ templ = gst_element_class_get_pad_template (klass, "audio_%02d");
+ caps = gst_riff_create_audio_caps (stream->strf.auds->format,
+ stream->strh, stream->strf.auds, stream->extradata,
+ stream->initdata, &codec_name);
+ if (!caps) {
+ caps = gst_caps_new_simple ("audio/x-avi-unknown", "codec_id",
+ G_TYPE_INT, stream->strf.auds->format, NULL);
+ }
+ tag_name = GST_TAG_AUDIO_CODEC;
+ avi->num_a_streams++;
+ break;
+ }
+ case GST_RIFF_FCC_iavs:{
+ guint32 fourcc = stream->strh->fcc_handler;
+
+ padname = g_strdup_printf ("video_%02d", avi->num_v_streams);
+ templ = gst_element_class_get_pad_template (klass, "video_%02d");
+ caps = gst_riff_create_iavs_caps (fourcc, stream->strh,
+ stream->strf.iavs, stream->extradata, stream->initdata, &codec_name);
+ if (!caps) {
+ caps = gst_caps_new_simple ("video/x-avi-unknown", "fourcc",
+ GST_TYPE_FOURCC, fourcc, NULL);
+ }
+ tag_name = GST_TAG_VIDEO_CODEC;
+ avi->num_v_streams++;
+ break;
+ }
+ case GST_RIFF_FCC_txts:{
+ padname = g_strdup_printf ("subtitle_%02d", avi->num_t_streams);
+ templ = gst_element_class_get_pad_template (klass, "subtitle_%02d");
+ caps = gst_caps_new_simple ("application/x-subtitle-avi", NULL);
+ tag_name = NULL;
+ avi->num_t_streams++;
+ break;
+ }
+ default:
+ g_return_val_if_reached (FALSE);
+ }
+
+ /* no caps means no stream */
+ if (!caps) {
+ GST_ERROR_OBJECT (element, "Did not find caps for stream %s", padname);
+ goto fail;
+ }
+
+ GST_DEBUG_OBJECT (element, "codec-name=%s",
+ (codec_name ? codec_name : "NULL"));
+ GST_DEBUG_OBJECT (element, "caps=%" GST_PTR_FORMAT, caps);
+
+ /* set proper settings and add it */
+ if (stream->pad)
+ gst_object_unref (stream->pad);
+ pad = stream->pad = gst_pad_new_from_template (templ, padname);
+ g_free (padname);
+
+ gst_pad_use_fixed_caps (pad);
+#if 0
+ gst_pad_set_formats_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_get_src_formats));
+ gst_pad_set_event_mask_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_get_event_mask));
+#endif
+ gst_pad_set_event_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_handle_src_event));
+ gst_pad_set_query_type_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_get_src_query_types));
+ gst_pad_set_query_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_handle_src_query));
+#if 0
+ gst_pad_set_convert_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_src_convert));
+#endif
+
+ stream->num = avi->num_streams;
+
+ stream->start_entry = 0;
+ stream->step_entry = 0;
+ stream->stop_entry = 0;
+
+ stream->current_entry = -1;
+ stream->current_total = 0;
+
+ stream->last_flow = GST_FLOW_OK;
+ stream->discont = TRUE;
+
+ stream->total_bytes = 0;
+ stream->total_blocks = 0;
+ stream->n_keyframes = 0;
+
+ stream->idx_n = 0;
+ stream->idx_max = 0;
+
+ gst_pad_set_element_private (pad, stream);
+ avi->num_streams++;
+
+#ifdef AVIDEMUX_MODIFICATION
+ stream->trickplay_info = g_new0 (TrickPlayInfo, 1);
+ stream->trickplay_info->prev_kidx = 0;
+ stream->trickplay_info->next_kidx = 0;
+ stream->trickplay_info->kidxs_dur_diff = 0;
+#endif
+ gst_pad_set_caps (pad, caps);
+ gst_pad_set_active (pad, TRUE);
+ gst_caps_unref (caps);
+
+ /* make tags */
+ if (codec_name) {
+ if (!stream->taglist)
+ stream->taglist = gst_tag_list_new ();
+
+ avi->got_tags = TRUE;
+
+ gst_tag_list_add (stream->taglist, GST_TAG_MERGE_APPEND, tag_name,
+ codec_name, NULL);
+ g_free (codec_name);
+ }
+
+ gst_buffer_unref (buf);
+
+ return TRUE;
+
+ /* ERRORS */
+fail:
+ {
+ /* unref any mem that may be in use */
+ if (buf)
+ gst_buffer_unref (buf);
+ if (sub)
+ gst_buffer_unref (sub);
+ g_free (vprp);
+ g_free (codec_name);
+ gst_avi_demux_reset_stream (avi, stream);
+ avi->num_streams++;
+ return FALSE;
+ }
+}
+
+/*
+ * gst_avi_demux_parse_odml:
+ * @avi: calling element (used for debug/error).
+ * @buf: input buffer to be used for parsing.
+ *
+ * Read an openDML-2.0 extension header. Fills in the frame number
+ * in the avi demuxer object when reading succeeds.
+ */
+static void
+gst_avi_demux_parse_odml (GstAviDemux * avi, GstBuffer * buf)
+{
+ guint32 tag = 0;
+ guint offset = 4;
+ GstBuffer *sub = NULL;
+
+ while (gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag,
+ &sub)) {
+ switch (tag) {
+ case GST_RIFF_TAG_dmlh:{
+ gst_riff_dmlh dmlh, *_dmlh;
+ guint size;
+
+ /* sub == NULL is possible and means an empty buffer */
+ size = sub ? GST_BUFFER_SIZE (sub) : 0;
+
+ /* check size */
+ if (size < sizeof (gst_riff_dmlh)) {
+ GST_ERROR_OBJECT (avi,
+ "DMLH entry is too small (%d bytes, %d needed)",
+ size, (int) sizeof (gst_riff_dmlh));
+ goto next;
+ }
+ _dmlh = (gst_riff_dmlh *) GST_BUFFER_DATA (sub);
+ dmlh.totalframes = GST_READ_UINT32_LE (&_dmlh->totalframes);
+
+ GST_INFO_OBJECT (avi, "dmlh tag found: totalframes: %u",
+ dmlh.totalframes);
+
+ avi->avih->tot_frames = dmlh.totalframes;
+ goto next;
+ }
+
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown tag %" GST_FOURCC_FORMAT " in ODML header",
+ GST_FOURCC_ARGS (tag));
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ next:
+ /* skip and move to next chunk */
+ if (sub) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ break;
+ }
+ }
+ if (buf)
+ gst_buffer_unref (buf);
+}
+
+/* Index helper */
+static guint
+gst_avi_demux_index_last (GstAviDemux * avi, GstAviStream * stream)
+{
+ return stream->idx_n;
+}
+
+/* find a previous entry in the index with the given flags */
+static guint
+gst_avi_demux_index_prev (GstAviDemux * avi, GstAviStream * stream,
+ guint last, gboolean keyframe)
+{
+ GstAviIndexEntry *entry;
+ guint i;
+
+ for (i = last; i > 0; i--) {
+ entry = &stream->index[i - 1];
+ if (!keyframe || ENTRY_IS_KEYFRAME (entry)) {
+ return i - 1;
+ }
+ }
+ return 0;
+}
+
+static guint
+gst_avi_demux_index_next (GstAviDemux * avi, GstAviStream * stream,
+ guint last, gboolean keyframe)
+{
+ GstAviIndexEntry *entry;
+ gint i;
+
+ for (i = last + 1; i < stream->idx_n; i++) {
+ entry = &stream->index[i];
+ if (!keyframe || ENTRY_IS_KEYFRAME (entry)) {
+ return i;
+ }
+ }
+ return stream->idx_n - 1;
+}
+
+static guint
+gst_avi_demux_index_entry_search (GstAviIndexEntry * entry, guint64 * total)
+{
+ if (entry->total < *total)
+ return -1;
+ else if (entry->total > *total)
+ return 1;
+ return 0;
+}
+
+/*
+ * gst_avi_demux_index_for_time:
+ * @avi: Avi object
+ * @stream: the stream
+ * @time: a time position
+ *
+ * Finds the index entry which time is less or equal than the requested time.
+ * Try to avoid binary search when we can convert the time to an index
+ * position directly (for example for video frames with a fixed duration).
+ *
+ * Returns: the found position in the index.
+ */
+static guint
+gst_avi_demux_index_for_time (GstAviDemux * avi,
+ GstAviStream * stream, guint64 time)
+{
+ guint index = -1;
+ guint64 total = 0;
+
+ GST_LOG_OBJECT (avi, "search time:%" GST_TIME_FORMAT, GST_TIME_ARGS (time));
+
+ /* easy (and common) cases */
+ if (time == 0 || stream->idx_n == 0)
+ return 0;
+ if (time >= stream->idx_duration)
+ return stream->idx_n - 1;
+
+ /* figure out where we need to go. For that we convert the time to an
+ * index entry or we convert it to a total and then do a binary search. */
+ if (stream->is_vbr) {
+ /* VBR stream next timestamp */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ total = avi_stream_convert_time_to_frames_unchecked (stream, time);
+ } else {
+ index = avi_stream_convert_time_to_frames_unchecked (stream, time);
+ }
+ } else {
+ /* constant rate stream */
+ total = avi_stream_convert_time_to_bytes_unchecked (stream, time);
+ }
+
+ if (index == -1) {
+ GstAviIndexEntry *entry;
+
+ /* no index, find index with binary search on total */
+ GST_LOG_OBJECT (avi, "binary search for entry with total %"
+ G_GUINT64_FORMAT, total);
+
+ entry = gst_util_array_binary_search (stream->index,
+ stream->idx_n, sizeof (GstAviIndexEntry),
+ (GCompareDataFunc) gst_avi_demux_index_entry_search,
+ GST_SEARCH_MODE_BEFORE, &total, NULL);
+
+ if (entry == NULL) {
+ GST_LOG_OBJECT (avi, "not found, assume index 0");
+ index = 0;
+ } else {
+ index = entry - stream->index;
+ GST_LOG_OBJECT (avi, "found at %u", index);
+ }
+ } else {
+ GST_LOG_OBJECT (avi, "converted time to index %u", index);
+ }
+
+ return index;
+}
+
+static inline GstAviStream *
+gst_avi_demux_stream_for_id (GstAviDemux * avi, guint32 id)
+{
+ guint stream_nr;
+ GstAviStream *stream;
+
+ /* get the stream for this entry */
+ stream_nr = CHUNKID_TO_STREAMNR (id);
+ if (G_UNLIKELY (stream_nr >= avi->num_streams)) {
+ GST_WARNING_OBJECT (avi, "invalid stream nr %d", stream_nr);
+ return NULL;
+ }
+ stream = &avi->stream[stream_nr];
+ if (G_UNLIKELY (!stream->strh)) {
+ GST_WARNING_OBJECT (avi, "Unhandled stream %d, skipping", stream_nr);
+ return NULL;
+ }
+ return stream;
+}
+
+/*
+ * gst_avi_demux_parse_index:
+ * @avi: calling element (used for debugging/errors).
+ * @buf: buffer containing the full index.
+ *
+ * Read index entries from the provided buffer.
+ * The buffer should contain a GST_RIFF_TAG_idx1 chunk.
+ */
+static gboolean
+gst_avi_demux_parse_index (GstAviDemux * avi, GstBuffer * buf)
+{
+ guint8 *data;
+ guint size;
+ guint i, num, n;
+ gst_riff_index_entry *index;
+ GstClockTime stamp;
+ GstAviStream *stream;
+ GstAviIndexEntry entry = {0};
+ guint32 id;
+
+ if (!buf)
+ return FALSE;
+
+ data = GST_BUFFER_DATA (buf);
+ size = GST_BUFFER_SIZE (buf);
+
+ stamp = gst_util_get_timestamp ();
+
+ /* see how many items in the index */
+ num = size / sizeof (gst_riff_index_entry);
+ if (num == 0)
+ goto empty_list;
+
+ GST_INFO_OBJECT (avi, "Parsing index, nr_entries = %6d", num);
+
+ index = (gst_riff_index_entry *) data;
+
+ /* figure out if the index is 0 based or relative to the MOVI start */
+ entry.offset = GST_READ_UINT32_LE (&index[0].offset);
+ if (entry.offset < avi->offset) {
+ avi->index_offset = avi->offset + 8;
+ GST_DEBUG ("index_offset = %" G_GUINT64_FORMAT, avi->index_offset);
+ } else {
+ avi->index_offset = 0;
+ GST_DEBUG ("index is 0 based");
+ }
+
+ for (i = 0, n = 0; i < num; i++) {
+ id = GST_READ_UINT32_LE (&index[i].id);
+ entry.offset = GST_READ_UINT32_LE (&index[i].offset);
+
+ /* some sanity checks */
+ if (G_UNLIKELY (id == GST_RIFF_rec || id == 0 ||
+ (entry.offset == 0 && n > 0)))
+ continue;
+
+#ifdef DIVX_DRM /* need to check using same define */
+ if ( id == GST_MAKE_FOURCC('0','0','d','d') )
+ {
+ GST_DEBUG("Skipping Encrypt data chunk");
+ continue;
+ }
+#endif
+
+ /* get the stream for this entry */
+ stream = gst_avi_demux_stream_for_id (avi, id);
+ if (G_UNLIKELY (!stream))
+ continue;
+
+ /* handle offset and size */
+ entry.offset += avi->index_offset + 8;
+ entry.size = GST_READ_UINT32_LE (&index[i].size);
+
+ /* handle flags */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ /* all audio frames are keyframes */
+ ENTRY_SET_KEYFRAME (&entry);
+ } else {
+ guint32 flags;
+ /* else read flags */
+ flags = GST_READ_UINT32_LE (&index[i].flags);
+ if (flags & GST_RIFF_IF_KEYFRAME) {
+ ENTRY_SET_KEYFRAME (&entry);
+ } else {
+ ENTRY_UNSET_KEYFRAME (&entry);
+ }
+ }
+
+ /* and add */
+ if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
+ goto out_of_mem;
+
+ n++;
+ }
+ gst_buffer_unref (buf);
+
+ /* get stream stats now */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+
+ stamp = gst_util_get_timestamp () - stamp;
+ GST_DEBUG_OBJECT (avi, "index parsing took %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stamp));
+
+ return TRUE;
+
+ /* ERRORS */
+empty_list:
+ {
+ GST_DEBUG_OBJECT (avi, "empty index");
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+out_of_mem:
+ {
+ GST_ELEMENT_ERROR (avi, RESOURCE, NO_SPACE_LEFT, (NULL),
+ ("Cannot allocate memory for %u*%u=%u bytes",
+ (guint) sizeof (GstAviIndexEntry), num,
+ (guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+}
+
+/*
+ * gst_avi_demux_stream_index:
+ * @avi: avi demuxer object.
+ *
+ * Seeks to index and reads it.
+ */
+static void
+gst_avi_demux_stream_index (GstAviDemux * avi)
+{
+ GstFlowReturn res;
+ guint64 offset = avi->offset;
+ GstBuffer *buf;
+ guint32 tag;
+ guint32 size;
+
+ GST_DEBUG ("demux stream index at offset %" G_GUINT64_FORMAT, offset);
+
+ /* get chunk information */
+ res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_failed;
+ else if (GST_BUFFER_SIZE (buf) < 8)
+ goto too_small;
+
+ /* check tag first before blindy trying to read 'size' bytes */
+ tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
+ size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf) + 4);
+ if (tag == GST_RIFF_TAG_LIST) {
+ /* this is the movi tag */
+ GST_DEBUG_OBJECT (avi, "skip LIST chunk, size %" G_GUINT32_FORMAT,
+ (8 + GST_ROUND_UP_2 (size)));
+ offset += 8 + GST_ROUND_UP_2 (size);
+ gst_buffer_unref (buf);
+ res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_failed;
+ else if (GST_BUFFER_SIZE (buf) < 8)
+ goto too_small;
+ tag = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf));
+ size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buf) + 4);
+ }
+
+ if (tag != GST_RIFF_TAG_idx1)
+ goto no_index;
+ if (!size)
+ goto zero_index;
+
+ gst_buffer_unref (buf);
+
+ GST_DEBUG ("index found at offset %" G_GUINT64_FORMAT, offset);
+
+ /* read chunk, advance offset */
+ if (gst_riff_read_chunk (GST_ELEMENT_CAST (avi),
+ avi->sinkpad, &offset, &tag, &buf) != GST_FLOW_OK)
+ return;
+
+ GST_DEBUG ("will parse index chunk size %u for tag %"
+ GST_FOURCC_FORMAT, GST_BUFFER_SIZE (buf), GST_FOURCC_ARGS (tag));
+
+ gst_avi_demux_parse_index (avi, buf);
+
+#ifndef GST_DISABLE_GST_DEBUG
+ /* debug our indexes */
+ {
+ gint i;
+ GstAviStream *stream;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+ GST_DEBUG_OBJECT (avi, "stream %u: %u frames, %" G_GINT64_FORMAT " bytes",
+ i, stream->idx_n, stream->total_bytes);
+ }
+ }
+#endif
+ return;
+
+ /* ERRORS */
+pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi,
+ "pull range failed: pos=%" G_GUINT64_FORMAT " size=8", offset);
+ return;
+ }
+too_small:
+ {
+ GST_DEBUG_OBJECT (avi, "Buffer is too small");
+ gst_buffer_unref (buf);
+ return;
+ }
+no_index:
+ {
+ GST_WARNING_OBJECT (avi,
+ "No index data (idx1) after movi chunk, but %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ gst_buffer_unref (buf);
+ return;
+ }
+zero_index:
+ {
+ GST_WARNING_OBJECT (avi, "Empty index data (idx1) after movi chunk");
+ gst_buffer_unref (buf);
+ return;
+ }
+}
+
+/*
+ * gst_avi_demux_stream_index_push:
+ * @avi: avi demuxer object.
+ *
+ * Read index.
+ */
+static void
+gst_avi_demux_stream_index_push (GstAviDemux * avi)
+{
+ guint64 offset = avi->idx1_offset;
+ GstBuffer *buf;
+ guint32 tag;
+ guint32 size;
+
+ GST_DEBUG ("demux stream index at offset %" G_GUINT64_FORMAT, offset);
+
+ /* get chunk information */
+ if (!gst_avi_demux_peek_chunk (avi, &tag, &size))
+ return;
+
+ /* check tag first before blindly trying to read 'size' bytes */
+ if (tag == GST_RIFF_TAG_LIST) {
+ /* this is the movi tag */
+ GST_DEBUG_OBJECT (avi, "skip LIST chunk, size %" G_GUINT32_FORMAT,
+ (8 + GST_ROUND_UP_2 (size)));
+ avi->idx1_offset = offset + 8 + GST_ROUND_UP_2 (size);
+ /* issue seek to allow chain function to handle it and return! */
+ perform_seek_to_offset (avi, avi->idx1_offset);
+ return;
+ }
+
+ if (tag != GST_RIFF_TAG_idx1)
+ goto no_index;
+
+ GST_DEBUG ("index found at offset %" G_GUINT64_FORMAT, offset);
+
+ /* flush chunk header */
+ gst_adapter_flush (avi->adapter, 8);
+ /* read chunk payload */
+ buf = gst_adapter_take_buffer (avi->adapter, size);
+ if (!buf)
+ goto pull_failed;
+ /* advance offset */
+ offset += 8 + GST_ROUND_UP_2 (size);
+
+ GST_DEBUG ("will parse index chunk size %u for tag %"
+ GST_FOURCC_FORMAT, GST_BUFFER_SIZE (buf), GST_FOURCC_ARGS (tag));
+
+ avi->offset = avi->first_movi_offset;
+ gst_avi_demux_parse_index (avi, buf);
+
+#ifndef GST_DISABLE_GST_DEBUG
+ /* debug our indexes */
+ {
+ gint i;
+ GstAviStream *stream;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+ GST_DEBUG_OBJECT (avi, "stream %u: %u frames, %" G_GINT64_FORMAT " bytes",
+ i, stream->idx_n, stream->total_bytes);
+ }
+ }
+#endif
+ return;
+
+ /* ERRORS */
+pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi,
+ "taking data from adapter failed: pos=%" G_GUINT64_FORMAT " size=%u",
+ offset, size);
+ return;
+ }
+no_index:
+ {
+ GST_WARNING_OBJECT (avi,
+ "No index data (idx1) after movi chunk, but %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ return;
+ }
+}
+
+/*
+ * gst_avi_demux_peek_tag:
+ *
+ * Returns the tag and size of the next chunk
+ */
+static GstFlowReturn
+gst_avi_demux_peek_tag (GstAviDemux * avi, guint64 offset, guint32 * tag,
+ guint * size)
+{
+ GstFlowReturn res = GST_FLOW_OK;
+ GstBuffer *buf = NULL;
+ guint bufsize;
+ guint8 *bufdata;
+
+ res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_failed;
+
+ bufsize = GST_BUFFER_SIZE (buf);
+ if (bufsize != 8)
+ goto wrong_size;
+
+ bufdata = GST_BUFFER_DATA (buf);
+
+ *tag = GST_READ_UINT32_LE (bufdata);
+ *size = GST_READ_UINT32_LE (bufdata + 4);
+
+ GST_LOG_OBJECT (avi, "Tag[%" GST_FOURCC_FORMAT "] (size:%d) %"
+ G_GINT64_FORMAT " -- %" G_GINT64_FORMAT, GST_FOURCC_ARGS (*tag),
+ *size, offset + 8, offset + 8 + (gint64) * size);
+
+done:
+ gst_buffer_unref (buf);
+
+ return res;
+
+ /* ERRORS */
+pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi, "pull_ranged returned %s", gst_flow_get_name (res));
+ return res;
+ }
+wrong_size:
+ {
+ GST_DEBUG_OBJECT (avi, "got %d bytes which is <> 8 bytes", bufsize);
+ res = GST_FLOW_ERROR;
+ goto done;
+ }
+}
+
+/*
+ * gst_avi_demux_next_data_buffer:
+ *
+ * Returns the offset and size of the next buffer
+ * Position is the position of the buffer (after tag and size)
+ */
+static GstFlowReturn
+gst_avi_demux_next_data_buffer (GstAviDemux * avi, guint64 * offset,
+ guint32 * tag, guint * size)
+{
+ guint64 off = *offset;
+ guint _size = 0;
+ GstFlowReturn res;
+
+ do {
+ res = gst_avi_demux_peek_tag (avi, off, tag, &_size);
+ if (res != GST_FLOW_OK)
+ break;
+ if (*tag == GST_RIFF_TAG_LIST || *tag == GST_RIFF_TAG_RIFF)
+ off += 8 + 4; /* skip tag + size + subtag */
+ else {
+ *offset = off + 8;
+ *size = _size;
+ break;
+ }
+ } while (TRUE);
+
+ return res;
+}
+
+/*
+ * gst_avi_demux_stream_scan:
+ * @avi: calling element (used for debugging/errors).
+ *
+ * Scan the file for all chunks to "create" a new index.
+ * pull-range based
+ */
+static gboolean
+gst_avi_demux_stream_scan (GstAviDemux * avi)
+{
+ GstFlowReturn res;
+ GstAviStream *stream;
+ GstFormat format;
+ guint64 pos = 0;
+ guint64 length;
+ gint64 tmplength;
+ guint32 tag = 0;
+ guint num;
+
+ /* FIXME:
+ * - implement non-seekable source support.
+ */
+ GST_DEBUG_OBJECT (avi, "Creating index");
+
+ /* get the size of the file */
+ format = GST_FORMAT_BYTES;
+ if (!gst_pad_query_peer_duration (avi->sinkpad, &format, &tmplength))
+ return FALSE;
+ length = tmplength;
+
+ /* guess the total amount of entries we expect */
+ num = 16000;
+
+ while (TRUE) {
+ GstAviIndexEntry entry;
+ guint size = 0;
+#ifdef AVIDEMUX_MODIFICATION
+ gint frame_type = GST_AVI_KEYFRAME;
+#endif
+
+ /* start reading data buffers to find the id and offset */
+ res = gst_avi_demux_next_data_buffer (avi, &pos, &tag, &size);
+ if (G_UNLIKELY (res != GST_FLOW_OK))
+ break;
+
+ /* get stream */
+ stream = gst_avi_demux_stream_for_id (avi, tag);
+ if (G_UNLIKELY (!stream))
+ goto next;
+#ifdef AVIDEMUX_MODIFICATION
+ /* generating index table with key frames */
+ if (stream->strh->type == GST_RIFF_FCC_vids) {
+ GstBuffer *buf = NULL;
+ int ret = -1;
+
+ res = gst_pad_pull_range (avi->sinkpad, pos, size, &buf);
+ if (res != GST_FLOW_OK) {
+ gst_buffer_unref (buf);
+ GST_ERROR ("Pull failed....\n\n");
+ break;
+ }
+ ret = gst_avi_demux_find_frame_type (stream, buf, &frame_type);
+ if (ret == -1)
+ break;
+ gst_buffer_unref (buf);
+ }
+ entry.flags = frame_type;
+#else
+ /* we can't figure out the keyframes, assume they all are */
+ entry.flags = GST_AVI_KEYFRAME;
+#endif
+ entry.offset = pos;
+ entry.size = size;
+
+ /* and add to the index of this stream */
+ if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
+ goto out_of_mem;
+
+ next:
+ /* update position */
+ pos += GST_ROUND_UP_2 (size);
+ if (G_UNLIKELY (pos > length)) {
+ GST_WARNING_OBJECT (avi,
+ "Stopping index lookup since we are further than EOF");
+ break;
+ }
+ }
+
+ /* collect stats */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+
+ return TRUE;
+
+ /* ERRORS */
+out_of_mem:
+ {
+ GST_ELEMENT_ERROR (avi, RESOURCE, NO_SPACE_LEFT, (NULL),
+ ("Cannot allocate memory for %u*%u=%u bytes",
+ (guint) sizeof (GstAviIndexEntry), num,
+ (guint) sizeof (GstAviIndexEntry) * num));
+ return FALSE;
+ }
+}
+
+static void
+gst_avi_demux_calculate_durations_from_index (GstAviDemux * avi)
+{
+ guint i;
+ GstClockTime total;
+ GstAviStream *stream;
+
+ total = GST_CLOCK_TIME_NONE;
+
+ /* all streams start at a timestamp 0 */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstClockTime duration, hduration;
+ gst_riff_strh *strh;
+
+ stream = &avi->stream[i];
+ if (G_UNLIKELY (!stream || !stream->idx_n || !(strh = stream->strh)))
+ continue;
+
+ /* get header duration for the stream */
+ hduration = stream->hdr_duration;
+ /* index duration calculated during parsing */
+ duration = stream->idx_duration;
+
+ /* now pick a good duration */
+ if (GST_CLOCK_TIME_IS_VALID (duration)) {
+ /* index gave valid duration, use that */
+ GST_INFO ("Stream %p duration according to index: %" GST_TIME_FORMAT,
+ stream, GST_TIME_ARGS (duration));
+ } else {
+ /* fall back to header info to calculate a duration */
+ duration = hduration;
+ }
+ GST_INFO ("Setting duration of stream #%d to %" GST_TIME_FORMAT,
+ i, GST_TIME_ARGS (duration));
+ /* set duration for the stream */
+ stream->duration = duration;
+
+ /* find total duration */
+ if (total == GST_CLOCK_TIME_NONE ||
+ (GST_CLOCK_TIME_IS_VALID (duration) && duration > total))
+ total = duration;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (total) && (total > 0)) {
+ /* now update the duration for those streams where we had none */
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+
+ if (!GST_CLOCK_TIME_IS_VALID (stream->duration)
+ || stream->duration == 0) {
+ stream->duration = total;
+
+ GST_INFO ("Stream %p duration according to total: %" GST_TIME_FORMAT,
+ stream, GST_TIME_ARGS (total));
+ }
+ }
+ }
+
+ /* and set the total duration in the segment. */
+ GST_INFO ("Setting total duration to: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (total));
+
+ gst_segment_set_duration (&avi->segment, GST_FORMAT_TIME, total);
+}
+
+#ifdef AVIDEMUX_MODIFICATION
+static void
+gst_avi_demux_calculate_durations_from_strh (GstAviDemux * avi)
+{
+ guint i;
+ GstClockTime total;
+ GstAviStream *stream;
+
+ total = GST_CLOCK_TIME_NONE;
+
+ /* all streams start at a timestamp 0 */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstClockTime hduration;
+ gst_riff_strh *strh;
+
+ stream = &avi->stream[i];
+
+ if (G_UNLIKELY (!stream || !(strh = stream->strh)))
+ continue;
+
+ /* get header duration for the stream */
+ hduration = stream->hdr_duration;
+
+ /* check duration */
+ if (GST_CLOCK_TIME_IS_VALID (hduration)) {
+ GST_INFO ("Stream %p duration according to strh: %" GST_TIME_FORMAT,
+ stream, GST_TIME_ARGS (hduration));
+ }
+
+ GST_INFO ("Setting duration of stream #%d to %" GST_TIME_FORMAT,
+ i, GST_TIME_ARGS (hduration));
+
+ /* set duration for the stream */
+ stream->duration = hduration;
+
+ /* find total duration */
+ if (total == GST_CLOCK_TIME_NONE ||
+ (GST_CLOCK_TIME_IS_VALID (hduration) && hduration > total))
+ total = hduration;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (total) && (total > 0)) {
+ /* now update the duration for those streams where we had none */
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+
+ if (!GST_CLOCK_TIME_IS_VALID (stream->duration)
+ || stream->duration == 0) {
+ stream->duration = total;
+
+ GST_INFO ("Stream %p duration according to total: %" GST_TIME_FORMAT,
+ stream, GST_TIME_ARGS (total));
+ }
+ }
+ }
+
+ /* and set the total duration in the segment. */
+ GST_INFO ("Setting total duration to: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (total));
+
+ gst_segment_set_duration (&avi->segment, GST_FORMAT_TIME, total);
+}
+#endif
+
+/* returns FALSE if there are no pads to deliver event to,
+ * otherwise TRUE (whatever the outcome of event sending),
+ * takes ownership of the event. */
+static gboolean
+gst_avi_demux_push_event (GstAviDemux * avi, GstEvent * event)
+{
+ gboolean result = FALSE;
+ gint i;
+
+ GST_DEBUG_OBJECT (avi, "sending %s event to %d streams",
+ GST_EVENT_TYPE_NAME (event), avi->num_streams);
+
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream = &avi->stream[i];
+
+ if (stream->pad) {
+ result = TRUE;
+ gst_pad_push_event (stream->pad, gst_event_ref (event));
+ }
+ }
+ gst_event_unref (event);
+ return result;
+}
+
+static void
+gst_avi_demux_check_seekability (GstAviDemux * avi)
+{
+ GstQuery *query;
+ gboolean seekable = FALSE;
+ gint64 start = -1, stop = -1;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (!gst_pad_peer_query (avi->sinkpad, query)) {
+ GST_DEBUG_OBJECT (avi, "seeking query failed");
+ goto done;
+ }
+
+ gst_query_parse_seeking (query, NULL, &seekable, &start, &stop);
+
+ /* try harder to query upstream size if we didn't get it the first time */
+ if (seekable && stop == -1) {
+ GstFormat fmt = GST_FORMAT_BYTES;
+
+ GST_DEBUG_OBJECT (avi, "doing duration query to fix up unset stop");
+ gst_pad_query_peer_duration (avi->sinkpad, &fmt, &stop);
+ }
+
+ /* if upstream doesn't know the size, it's likely that it's not seekable in
+ * practice even if it technically may be seekable */
+ if (seekable && (start != 0 || stop <= start)) {
+ GST_DEBUG_OBJECT (avi, "seekable but unknown start/stop -> disable");
+ seekable = FALSE;
+ }
+
+done:
+ GST_INFO_OBJECT (avi, "seekable: %d (%" G_GUINT64_FORMAT " - %"
+ G_GUINT64_FORMAT ")", seekable, start, stop);
+ avi->seekable = seekable;
+
+ gst_query_unref (query);
+}
+
+/*
+ * Read AVI headers when streaming
+ */
+static GstFlowReturn
+gst_avi_demux_stream_header_push (GstAviDemux * avi)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 tag = 0;
+ guint32 ltag = 0;
+ guint32 size = 0;
+ const guint8 *data;
+ GstBuffer *buf = NULL, *sub = NULL;
+ guint offset = 4;
+ gint64 stop;
+ gint i;
+ GstTagList *tags = NULL;
+
+ GST_DEBUG ("Reading and parsing avi headers: %d", avi->header_state);
+
+ switch (avi->header_state) {
+ case GST_AVI_DEMUX_HEADER_TAG_LIST:
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size)) {
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ if (tag != GST_RIFF_TAG_LIST)
+ goto header_no_list;
+
+ gst_adapter_flush (avi->adapter, 8);
+ /* Find the 'hdrl' LIST tag */
+ GST_DEBUG ("Reading %d bytes", size);
+ buf = gst_adapter_take_buffer (avi->adapter, size);
+
+ if (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf)) != GST_RIFF_LIST_hdrl)
+ goto header_no_hdrl;
+
+ /* mind padding */
+ if (size & 1)
+ gst_adapter_flush (avi->adapter, 1);
+
+ GST_DEBUG ("'hdrl' LIST tag found. Parsing next chunk");
+
+ gst_avi_demux_roundup_list (avi, &buf);
+
+ /* the hdrl starts with a 'avih' header */
+ if (!gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag,
+ &sub))
+ goto header_no_avih;
+
+ if (tag != GST_RIFF_TAG_avih)
+ goto header_no_avih;
+
+ if (!gst_avi_demux_parse_avih (avi, sub, &avi->avih))
+ goto header_wrong_avih;
+
+ GST_DEBUG_OBJECT (avi, "AVI header ok, reading elemnts from header");
+
+ /* now, read the elements from the header until the end */
+ while (gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag,
+ &sub)) {
+ /* sub can be NULL on empty tags */
+ if (!sub)
+ continue;
+
+ switch (tag) {
+ case GST_RIFF_TAG_LIST:
+ if (GST_BUFFER_SIZE (sub) < 4)
+ goto next;
+
+ switch (GST_READ_UINT32_LE (GST_BUFFER_DATA (sub))) {
+ case GST_RIFF_LIST_strl:
+ if (!(gst_avi_demux_parse_stream (avi, sub))) {
+ sub = NULL;
+ GST_ELEMENT_WARNING (avi, STREAM, DEMUX, (NULL),
+ ("failed to parse stream, ignoring"));
+ goto next;
+ }
+ sub = NULL;
+ goto next;
+ case GST_RIFF_LIST_odml:
+ gst_avi_demux_parse_odml (avi, sub);
+ sub = NULL;
+ break;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown list %" GST_FOURCC_FORMAT " in AVI header",
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (GST_BUFFER_DATA
+ (sub))));
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ goto next;
+ }
+ break;
+ case GST_RIFF_IDIT:
+ gst_avi_demux_parse_idit (avi, sub);
+ goto next;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown off %d tag %" GST_FOURCC_FORMAT " in AVI header",
+ offset, GST_FOURCC_ARGS (tag));
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ next:
+ /* move to next chunk */
+ if (sub)
+ gst_buffer_unref (sub);
+ sub = NULL;
+ break;
+ }
+ }
+ gst_buffer_unref (buf);
+ GST_DEBUG ("elements parsed");
+
+ /* check parsed streams */
+ if (avi->num_streams == 0) {
+ goto no_streams;
+ } else if (avi->num_streams != avi->avih->streams) {
+ GST_WARNING_OBJECT (avi,
+ "Stream header mentioned %d streams, but %d available",
+ avi->avih->streams, avi->num_streams);
+ }
+ GST_DEBUG ("Get junk and info next");
+ avi->header_state = GST_AVI_DEMUX_HEADER_INFO;
+ } else {
+ /* Need more data */
+ return ret;
+ }
+ /* fall-though */
+ case GST_AVI_DEMUX_HEADER_INFO:
+ GST_DEBUG_OBJECT (avi, "skipping junk between header and data ...");
+ while (TRUE) {
+ if (gst_adapter_available (avi->adapter) < 12)
+ return GST_FLOW_OK;
+
+ data = gst_adapter_peek (avi->adapter, 12);
+ tag = GST_READ_UINT32_LE (data);
+ size = GST_READ_UINT32_LE (data + 4);
+ ltag = GST_READ_UINT32_LE (data + 8);
+
+ if (tag == GST_RIFF_TAG_LIST) {
+ switch (ltag) {
+ case GST_RIFF_LIST_movi:
+ gst_adapter_flush (avi->adapter, 12);
+ if (!avi->first_movi_offset)
+ avi->first_movi_offset = avi->offset;
+ avi->offset += 12;
+ avi->idx1_offset = avi->offset + size - 4;
+ goto skipping_done;
+ case GST_RIFF_LIST_INFO:
+ GST_DEBUG ("Found INFO chunk");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size)) {
+ GST_DEBUG ("got size %d", size);
+ avi->offset += 12;
+ gst_adapter_flush (avi->adapter, 12);
+ if (size > 4) {
+ buf = gst_adapter_take_buffer (avi->adapter, size - 4);
+ /* mind padding */
+ if (size & 1)
+ gst_adapter_flush (avi->adapter, 1);
+ gst_riff_parse_info (GST_ELEMENT_CAST (avi), buf, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ gst_buffer_unref (buf);
+
+ avi->offset += GST_ROUND_UP_2 (size) - 4;
+ } else {
+ GST_DEBUG ("skipping INFO LIST prefix");
+ }
+ } else {
+ /* Need more data */
+ return GST_FLOW_OK;
+ }
+ break;
+ default:
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size)) {
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ // ??? goto iterate; ???
+ } else {
+ /* Need more data */
+ return GST_FLOW_OK;
+ }
+ break;
+ }
+ } else {
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size)) {
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ //goto iterate;
+ } else {
+ /* Need more data */
+ return GST_FLOW_OK;
+ }
+ }
+ }
+ break;
+ default:
+ GST_WARNING ("unhandled header state: %d", avi->header_state);
+ break;
+ }
+skipping_done:
+
+ GST_DEBUG_OBJECT (avi, "skipping done ... (streams=%u, stream[0].indexes=%p)",
+ avi->num_streams, avi->stream[0].indexes);
+
+ GST_DEBUG ("Found movi chunk. Starting to stream data");
+ avi->state = GST_AVI_DEMUX_MOVI;
+
+#ifdef AVIDEMUX_MODIFICATION
+ /*no indexs in push mode, but it could be get from strh chunk */
+ gst_avi_demux_calculate_durations_from_strh (avi);
+#else
+ /* no indexes in push mode, but it still sets some variables */
+ gst_avi_demux_calculate_durations_from_index (avi);
+#endif
+
+ gst_avi_demux_expose_streams (avi, TRUE);
+
+ /* prepare all streams for index 0 */
+ for (i = 0; i < avi->num_streams; i++)
+ avi->stream[i].current_entry = 0;
+
+ /* create initial NEWSEGMENT event */
+ if ((stop = avi->segment.stop) == GST_CLOCK_TIME_NONE)
+ stop = avi->segment.duration;
+
+ GST_DEBUG_OBJECT (avi, "segment stop %" G_GINT64_FORMAT, stop);
+
+ if (avi->seg_event)
+ gst_event_unref (avi->seg_event);
+ avi->seg_event = gst_event_new_new_segment_full
+ (FALSE, avi->segment.rate, avi->segment.applied_rate, GST_FORMAT_TIME,
+ avi->segment.start, stop, avi->segment.time);
+
+ gst_avi_demux_check_seekability (avi);
+
+ /* at this point we know all the streams and we can signal the no more
+ * pads signal */
+ GST_DEBUG_OBJECT (avi, "signaling no more pads");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (avi));
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+no_streams:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("No streams found"));
+ return GST_FLOW_ERROR;
+ }
+header_no_list:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no LIST at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ return GST_FLOW_ERROR;
+ }
+header_no_hdrl:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no hdrl at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+header_no_avih:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no avih at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ if (sub)
+ gst_buffer_unref (sub);
+
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+header_wrong_avih:
+ {
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static void
+gst_avi_demux_add_date_tag (GstAviDemux * avi, gint y, gint m, gint d,
+ gint h, gint min, gint s)
+{
+ GDate *date;
+ GstDateTime *dt;
+
+ date = g_date_new_dmy (d, m, y);
+ if (!g_date_valid (date)) {
+ /* bogus date */
+ GST_WARNING_OBJECT (avi, "Refusing to add invalid date %d-%d-%d", y, m, d);
+ g_date_free (date);
+ return;
+ }
+
+ dt = gst_date_time_new_local_time (y, m, d, h, min, s);
+
+ if (avi->globaltags == NULL)
+ avi->globaltags = gst_tag_list_new ();
+
+ gst_tag_list_add (avi->globaltags, GST_TAG_MERGE_REPLACE, GST_TAG_DATE, date,
+ NULL);
+ g_date_free (date);
+ if (dt) {
+ gst_tag_list_add (avi->globaltags, GST_TAG_MERGE_REPLACE, GST_TAG_DATE_TIME,
+ dt, NULL);
+ gst_date_time_unref (dt);
+ }
+}
+
+static void
+gst_avi_demux_parse_idit_nums_only (GstAviDemux * avi, gchar * data)
+{
+ gint y, m, d;
+ gint hr = 0, min = 0, sec = 0;
+ gint ret;
+
+ GST_DEBUG ("data : '%s'", data);
+
+ ret = sscanf (data, "%d:%d:%d %d:%d:%d", &y, &m, &d, &hr, &min, &sec);
+ if (ret < 3) {
+ /* Attempt YYYY/MM/DD/ HH:MM variant (found in CASIO cameras) */
+ ret = sscanf (data, "%04d/%02d/%02d/ %d:%d", &y, &m, &d, &hr, &min);
+ if (ret < 3) {
+ GST_WARNING_OBJECT (avi, "Failed to parse IDIT tag");
+ return;
+ }
+ }
+ gst_avi_demux_add_date_tag (avi, y, m, d, hr, min, sec);
+}
+
+static gint
+get_month_num (gchar * data, guint size)
+{
+ if (g_ascii_strncasecmp (data, "jan", 3) == 0) {
+ return 1;
+ } else if (g_ascii_strncasecmp (data, "feb", 3) == 0) {
+ return 2;
+ } else if (g_ascii_strncasecmp (data, "mar", 3) == 0) {
+ return 3;
+ } else if (g_ascii_strncasecmp (data, "apr", 3) == 0) {
+ return 4;
+ } else if (g_ascii_strncasecmp (data, "may", 3) == 0) {
+ return 5;
+ } else if (g_ascii_strncasecmp (data, "jun", 3) == 0) {
+ return 6;
+ } else if (g_ascii_strncasecmp (data, "jul", 3) == 0) {
+ return 7;
+ } else if (g_ascii_strncasecmp (data, "aug", 3) == 0) {
+ return 8;
+ } else if (g_ascii_strncasecmp (data, "sep", 3) == 0) {
+ return 9;
+ } else if (g_ascii_strncasecmp (data, "oct", 3) == 0) {
+ return 10;
+ } else if (g_ascii_strncasecmp (data, "nov", 3) == 0) {
+ return 11;
+ } else if (g_ascii_strncasecmp (data, "dec", 3) == 0) {
+ return 12;
+ }
+
+ return 0;
+}
+
+static void
+gst_avi_demux_parse_idit_text (GstAviDemux * avi, gchar * data)
+{
+ gint year, month, day;
+ gint hour, min, sec;
+ gint ret;
+ gchar weekday[4];
+ gchar monthstr[4];
+
+ ret = sscanf (data, "%3s %3s %d %d:%d:%d %d", weekday, monthstr, &day, &hour,
+ &min, &sec, &year);
+ if (ret != 7) {
+ GST_WARNING_OBJECT (avi, "Failed to parse IDIT tag");
+ return;
+ }
+ month = get_month_num (monthstr, strlen (monthstr));
+ gst_avi_demux_add_date_tag (avi, year, month, day, hour, min, sec);
+}
+
+static void
+gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf)
+{
+ gchar *data = (gchar *) GST_BUFFER_DATA (buf);
+ guint size = GST_BUFFER_SIZE (buf);
+ gchar *safedata = NULL;
+
+ /*
+ * According to:
+ * http://www.eden-foundation.org/products/code/film_date_stamp/index.html
+ *
+ * This tag could be in one of the below formats
+ * 2005:08:17 11:42:43
+ * THU OCT 26 16:46:04 2006
+ * Mon Mar 3 09:44:56 2008
+ *
+ * FIXME: Our date tag doesn't include hours
+ */
+
+ /* skip eventual initial whitespace */
+ while (size > 0 && g_ascii_isspace (data[0])) {
+ data++;
+ size--;
+ }
+
+ if (size == 0) {
+ goto non_parsable;
+ }
+
+ /* make a safe copy to add a \0 to the end of the string */
+ safedata = g_strndup (data, size);
+
+ /* test if the first char is a alpha or a number */
+ if (g_ascii_isdigit (data[0])) {
+ gst_avi_demux_parse_idit_nums_only (avi, safedata);
+ g_free (safedata);
+ return;
+ } else if (g_ascii_isalpha (data[0])) {
+ gst_avi_demux_parse_idit_text (avi, safedata);
+ g_free (safedata);
+ return;
+ }
+
+ g_free (safedata);
+
+non_parsable:
+ GST_WARNING_OBJECT (avi, "IDIT tag has no parsable info");
+}
+
+/*
+ * Read full AVI headers.
+ */
+static GstFlowReturn
+gst_avi_demux_stream_header_pull (GstAviDemux * avi)
+{
+ GstFlowReturn res;
+ GstBuffer *buf, *sub = NULL;
+ guint32 tag;
+ guint offset = 4;
+ gint64 stop;
+ GstElement *element = GST_ELEMENT_CAST (avi);
+ GstClockTime stamp;
+ GstTagList *tags = NULL;
+
+ stamp = gst_util_get_timestamp ();
+
+ /* the header consists of a 'hdrl' LIST tag */
+ res = gst_riff_read_chunk (element, avi->sinkpad, &avi->offset, &tag, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_range_failed;
+ else if (tag != GST_RIFF_TAG_LIST)
+ goto no_list;
+ else if (GST_BUFFER_SIZE (buf) < 4)
+ goto no_header;
+
+ GST_DEBUG_OBJECT (avi, "parsing headers");
+
+ /* Find the 'hdrl' LIST tag */
+ while (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf)) != GST_RIFF_LIST_hdrl) {
+ GST_LOG_OBJECT (avi, "buffer contains %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (GST_BUFFER_DATA (buf))));
+
+ /* Eat up */
+ gst_buffer_unref (buf);
+
+ /* read new chunk */
+ res = gst_riff_read_chunk (element, avi->sinkpad, &avi->offset, &tag, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_range_failed;
+ else if (tag != GST_RIFF_TAG_LIST)
+ goto no_list;
+ else if (GST_BUFFER_SIZE (buf) < 4)
+ goto no_header;
+ }
+
+ GST_DEBUG_OBJECT (avi, "hdrl LIST tag found");
+
+ gst_avi_demux_roundup_list (avi, &buf);
+
+ /* the hdrl starts with a 'avih' header */
+ if (!gst_riff_parse_chunk (element, buf, &offset, &tag, &sub))
+ goto no_avih;
+ else if (tag != GST_RIFF_TAG_avih)
+ goto no_avih;
+ else if (!gst_avi_demux_parse_avih (avi, sub, &avi->avih))
+ goto invalid_avih;
+
+ GST_DEBUG_OBJECT (avi, "AVI header ok, reading elements from header");
+
+ /* now, read the elements from the header until the end */
+ while (gst_riff_parse_chunk (element, buf, &offset, &tag, &sub)) {
+ /* sub can be NULL on empty tags */
+ if (!sub)
+ continue;
+
+ switch (tag) {
+ case GST_RIFF_TAG_LIST:
+ {
+ guint8 *data;
+ guint32 fourcc;
+
+ if (GST_BUFFER_SIZE (sub) < 4)
+ goto next;
+
+ data = GST_BUFFER_DATA (sub);
+ fourcc = GST_READ_UINT32_LE (data);
+
+ switch (fourcc) {
+ case GST_RIFF_LIST_strl:
+ if (!(gst_avi_demux_parse_stream (avi, sub))) {
+ GST_ELEMENT_WARNING (avi, STREAM, DEMUX, (NULL),
+ ("failed to parse stream, ignoring"));
+ sub = NULL;
+#ifdef DIVX_DRM
+ goto invalid_stream;
+#endif
+ }
+ sub = NULL;
+ goto next;
+ case GST_RIFF_LIST_odml:
+ gst_avi_demux_parse_odml (avi, sub);
+ sub = NULL;
+ break;
+ case GST_RIFF_LIST_INFO:
+ GST_BUFFER_DATA (sub) = data + 4;
+ GST_BUFFER_SIZE (sub) -= 4;
+ gst_riff_parse_info (element, sub, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ break;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown list %" GST_FOURCC_FORMAT " in AVI header",
+ GST_FOURCC_ARGS (fourcc));
+ GST_MEMDUMP_OBJECT (avi, "Unknown list", GST_BUFFER_DATA (sub),
+ GST_BUFFER_SIZE (sub));
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ goto next;
+ }
+ break;
+ }
+ case GST_RIFF_IDIT:
+ gst_avi_demux_parse_idit (avi, sub);
+ goto next;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown tag %" GST_FOURCC_FORMAT " in AVI header at off %d",
+ GST_FOURCC_ARGS (tag), offset);
+ GST_MEMDUMP_OBJECT (avi, "Unknown tag", GST_BUFFER_DATA (sub),
+ GST_BUFFER_SIZE (sub));
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ next:
+ if (sub)
+ gst_buffer_unref (sub);
+ sub = NULL;
+ break;
+ }
+ }
+ gst_buffer_unref (buf);
+ GST_DEBUG ("elements parsed");
+
+ /* check parsed streams */
+ if (avi->num_streams == 0)
+ goto no_streams;
+ else if (avi->num_streams != avi->avih->streams) {
+ GST_WARNING_OBJECT (avi,
+ "Stream header mentioned %d streams, but %d available",
+ avi->avih->streams, avi->num_streams);
+ }
+
+ GST_DEBUG_OBJECT (avi, "skipping junk between header and data, offset=%"
+ G_GUINT64_FORMAT, avi->offset);
+
+ /* Now, find the data (i.e. skip all junk between header and data) */
+ do {
+ guint size;
+ guint8 *data;
+ guint32 tag, ltag;
+
+ res = gst_pad_pull_range (avi->sinkpad, avi->offset, 12, &buf);
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (avi, "pull_range failure while looking for tags");
+ goto pull_range_failed;
+ } else if (GST_BUFFER_SIZE (buf) < 12) {
+ GST_DEBUG_OBJECT (avi, "got %d bytes which is less than 12 bytes",
+ GST_BUFFER_SIZE (buf));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+
+ data = GST_BUFFER_DATA (buf);
+
+ tag = GST_READ_UINT32_LE (data);
+ size = GST_READ_UINT32_LE (data + 4);
+ ltag = GST_READ_UINT32_LE (data + 8);
+
+ GST_DEBUG ("tag %" GST_FOURCC_FORMAT ", size %u",
+ GST_FOURCC_ARGS (tag), size);
+ GST_MEMDUMP ("Tag content", data, GST_BUFFER_SIZE (buf));
+ gst_buffer_unref (buf);
+
+ switch (tag) {
+ case GST_RIFF_TAG_LIST:{
+ switch (ltag) {
+ case GST_RIFF_LIST_movi:
+ GST_DEBUG_OBJECT (avi,
+ "Reached the 'movi' tag, we're done with skipping");
+ goto skipping_done;
+ case GST_RIFF_LIST_INFO:
+ res =
+ gst_riff_read_chunk (element, avi->sinkpad, &avi->offset, &tag,
+ &buf);
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
+ goto pull_range_failed;
+ }
+ GST_DEBUG ("got size %u", GST_BUFFER_SIZE (buf));
+ if (size < 4) {
+ GST_DEBUG ("skipping INFO LIST prefix");
+ avi->offset += (4 - GST_ROUND_UP_2 (size));
+ gst_buffer_unref (buf);
+ continue;
+ }
+
+ sub = gst_buffer_create_sub (buf, 4, GST_BUFFER_SIZE (buf) - 4);
+ gst_riff_parse_info (element, sub, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ if (sub) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ gst_buffer_unref (buf);
+ /* gst_riff_read_chunk() has already advanced avi->offset */
+ break;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Skipping unknown list tag %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (ltag));
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ break;
+ }
+ }
+ break;
+ default:
+ GST_WARNING_OBJECT (avi, "Skipping unknown tag %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ /* Fall-through */
+ case GST_MAKE_FOURCC ('J', 'U', 'N', 'Q'):
+ case GST_MAKE_FOURCC ('J', 'U', 'N', 'K'):
+ /* Only get buffer for debugging if the memdump is needed */
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= 9) {
+ res = gst_pad_pull_range (avi->sinkpad, avi->offset, size, &buf);
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
+ goto pull_range_failed;
+ }
+ GST_MEMDUMP ("Junk", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+ gst_buffer_unref (buf);
+ }
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ break;
+ }
+ } while (1);
+skipping_done:
+
+ GST_DEBUG_OBJECT (avi, "skipping done ... (streams=%u, stream[0].indexes=%p)",
+ avi->num_streams, avi->stream[0].indexes);
+
+ /* create or read stream index (for seeking) */
+ if (avi->stream[0].indexes != NULL) {
+ /* we read a super index already (gst_avi_demux_parse_superindex() ) */
+ gst_avi_demux_read_subindexes_pull (avi);
+ }
+ if (!avi->have_index) {
+ if (avi->avih->flags & GST_RIFF_AVIH_HASINDEX)
+ gst_avi_demux_stream_index (avi);
+
+ /* still no index, scan */
+ if (!avi->have_index) {
+ gst_avi_demux_stream_scan (avi);
+
+ /* still no index.. this is a fatal error for now.
+ * FIXME, we should switch to plain push mode without seeking
+ * instead of failing. */
+ if (!avi->have_index)
+ goto no_index;
+ }
+ }
+ /* use the indexes now to construct nice durations */
+ gst_avi_demux_calculate_durations_from_index (avi);
+
+ gst_avi_demux_expose_streams (avi, FALSE);
+
+ /* create initial NEWSEGMENT event */
+ if ((stop = avi->segment.stop) == GST_CLOCK_TIME_NONE)
+ stop = avi->segment.duration;
+
+ GST_DEBUG_OBJECT (avi, "segment stop %" G_GINT64_FORMAT, stop);
+
+ /* do initial seek to the default segment values */
+ gst_avi_demux_do_seek (avi, &avi->segment);
+
+ /* prepare initial segment */
+ if (avi->seg_event)
+ gst_event_unref (avi->seg_event);
+ avi->seg_event = gst_event_new_new_segment_full
+ (FALSE, avi->segment.rate, avi->segment.applied_rate, GST_FORMAT_TIME,
+ avi->segment.start, stop, avi->segment.time);
+
+ stamp = gst_util_get_timestamp () - stamp;
+ GST_DEBUG_OBJECT (avi, "pulling header took %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stamp));
+
+ /* at this point we know all the streams and we can signal the no more
+ * pads signal */
+ GST_DEBUG_OBJECT (avi, "signaling no more pads");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (avi));
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+no_list:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no LIST at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+no_header:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no hdrl at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+no_avih:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no avih at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ if (sub)
+ gst_buffer_unref (sub);
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+invalid_avih:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (cannot parse avih at start)"));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+no_streams:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("No streams found"));
+ return GST_FLOW_ERROR;
+ }
+no_index:
+ {
+ GST_WARNING ("file without or too big index");
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Could not get/create index"));
+ return GST_FLOW_ERROR;
+ }
+pull_range_failed:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("pull_range flow reading header: %s", gst_flow_get_name (res)));
+ return GST_FLOW_ERROR;
+ }
+#ifdef DIVX_DRM
+invalid_stream:
+ {
+ gst_buffer_unref(buf);
+ return GST_FLOW_ERROR;
+ }
+#endif
+}
+
+/* move a stream to @index */
+static void
+gst_avi_demux_move_stream (GstAviDemux * avi, GstAviStream * stream,
+ GstSegment * segment, guint index)
+{
+ GST_DEBUG_OBJECT (avi, "Move stream %d to %u", stream->num, index);
+
+ if (segment->rate < 0.0) {
+ guint next_key;
+ /* Because we don't know the frame order we need to push from the prev keyframe
+ * to the next keyframe. If there is a smart decoder downstream he will notice
+ * that there are too many encoded frames send and return UNEXPECTED when there
+ * are enough decoded frames to fill the segment. */
+#ifdef AVIDEMUX_MODIFICATION
+ next_key = gst_avi_demux_index_for_time (avi, stream, avi->seek_kf_offset);
+#else
+ next_key = gst_avi_demux_index_next (avi, stream, index, TRUE);
+#endif
+
+ /* FIXME, we go back to 0, we should look at segment.start. We will however
+ * stop earlier when the see the timestamp < segment.start */
+ stream->start_entry = 0;
+ stream->step_entry = index;
+ stream->current_entry = index;
+ stream->stop_entry = next_key;
+
+ GST_DEBUG_OBJECT (avi, "reverse seek: start %u, step %u, stop %u",
+ stream->start_entry, stream->step_entry, stream->stop_entry);
+ } else {
+ stream->start_entry = index;
+ stream->step_entry = index;
+ stream->stop_entry = gst_avi_demux_index_last (avi, stream);
+ }
+ if (stream->current_entry != index) {
+ GST_DEBUG_OBJECT (avi, "Move DISCONT from %u to %u",
+ stream->current_entry, index);
+ stream->current_entry = index;
+ stream->discont = TRUE;
+ }
+
+ /* update the buffer info */
+ gst_avi_demux_get_buffer_info (avi, stream, index,
+ &stream->current_timestamp, &stream->current_ts_end,
+ &stream->current_offset, &stream->current_offset_end);
+
+ GST_DEBUG_OBJECT (avi, "Moved to %u, ts %" GST_TIME_FORMAT
+ ", ts_end %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT, index,
+ GST_TIME_ARGS (stream->current_timestamp),
+ GST_TIME_ARGS (stream->current_ts_end), stream->current_offset,
+ stream->current_offset_end);
+
+ GST_DEBUG_OBJECT (avi, "Seeking to offset %" G_GUINT64_FORMAT,
+ stream->index[index].offset);
+}
+
+/*
+ * Do the actual seeking.
+ */
+static gboolean
+gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment)
+{
+ GstClockTime seek_time;
+ gboolean keyframe;
+ guint i, index;
+ GstAviStream *stream;
+
+ seek_time = segment->last_stop;
+
+#ifdef AVIDEMUX_MODIFICATION
+ avi->seek_kf_offset = seek_time;
+#endif
+
+ keyframe = !!(segment->flags & GST_SEEK_FLAG_KEY_UNIT);
+
+ GST_DEBUG_OBJECT (avi, "seek to: %" GST_TIME_FORMAT
+ " keyframe seeking:%d", GST_TIME_ARGS (seek_time), keyframe);
+
+ /* FIXME, this code assumes the main stream with keyframes is stream 0,
+ * which is mostly correct... */
+ stream = &avi->stream[avi->main_stream];
+
+ /* get the entry index for the requested position */
+ index = gst_avi_demux_index_for_time (avi, stream, seek_time);
+ GST_DEBUG_OBJECT (avi, "Got entry %u", index);
+
+
+#ifdef AVIDEMUX_MODIFICATION
+ if(segment->rate < 0.0 && index) {
+ /* If index is keyframe, reduce index by 1, so that we could fetch prev keyframe for video */
+ /* This change is done to fix the out of segment issue when seek position is a keyframe position */
+ if (ENTRY_IS_KEYFRAME (&stream->index[index])) {
+ index--;
+ }
+ }
+#endif
+
+
+ /* check if we are already on a keyframe */
+ if (!ENTRY_IS_KEYFRAME (&stream->index[index])) {
+ GST_DEBUG_OBJECT (avi, "not keyframe, searching back");
+ /* now go to the previous keyframe, this is where we should start
+ * decoding from. */
+ index = gst_avi_demux_index_prev (avi, stream, index, TRUE);
+ GST_DEBUG_OBJECT (avi, "previous keyframe at %u", index);
+ }
+
+ /* move the main stream to this position */
+ gst_avi_demux_move_stream (avi, stream, segment, index);
+
+ if (keyframe) {
+ /* when seeking to a keyframe, we update the result seek time
+ * to the time of the keyframe. */
+ seek_time = stream->current_timestamp;
+ GST_DEBUG_OBJECT (avi, "keyframe adjusted to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seek_time));
+ }
+
+ /* the seek time is also the last_stop and stream time when going
+ * forwards */
+ segment->last_stop = seek_time;
+
+#ifdef AVIDEMUX_MODIFICATION
+ /*initialization of rate params */
+ stream->trickplay_info->prev_kidx =0;
+ stream->trickplay_info->next_kidx=0;
+ stream->trickplay_info->kidxs_dur_diff=0;
+ stream->trickplay_info->start_pos = segment->last_stop;
+ /* Adjust seek_time to video keyframe's timestamp so that audio can align to that position */
+ if(segment->rate < 0.0)
+ seek_time = stream->current_timestamp;
+#else
+ if (segment->rate > 0.0)
+ segment->time = seek_time;
+#endif
+
+ /* now set DISCONT and align the other streams */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *ostream;
+
+ ostream = &avi->stream[i];
+ if ((ostream == stream) || (ostream->index == NULL))
+ continue;
+
+ /* get the entry index for the requested position */
+ index = gst_avi_demux_index_for_time (avi, ostream, seek_time);
+
+ /* move to previous keyframe */
+ if (!ENTRY_IS_KEYFRAME (&ostream->index[index]))
+ index = gst_avi_demux_index_prev (avi, ostream, index, TRUE);
+
+ gst_avi_demux_move_stream (avi, ostream, segment, index);
+ }
+ GST_DEBUG_OBJECT (avi, "done seek to: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seek_time));
+
+ return TRUE;
+}
+
+/*
+ * Handle seek event in pull mode.
+ */
+static gboolean
+gst_avi_demux_handle_seek (GstAviDemux * avi, GstPad * pad, GstEvent * event)
+{
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
+ gint64 cur = 0, stop;
+ gboolean flush;
+ gboolean update;
+ GstSegment seeksegment = { 0, };
+ gint i;
+
+ if (event) {
+ GST_DEBUG_OBJECT (avi, "doing seek with event");
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+
+ /* we have to have a format as the segment format. Try to convert
+ * if not. */
+ if (format != GST_FORMAT_TIME) {
+ GstFormat fmt = GST_FORMAT_TIME;
+ gboolean res = TRUE;
+
+ if (cur_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, cur, &fmt, &cur);
+ if (res && stop_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, stop, &fmt, &stop);
+ if (!res)
+ goto no_format;
+
+ format = fmt;
+ }
+ GST_DEBUG_OBJECT (avi,
+ "seek requested: rate %g cur %" GST_TIME_FORMAT " stop %"
+ GST_TIME_FORMAT, rate, GST_TIME_ARGS (cur), GST_TIME_ARGS (stop));
+ /* FIXME: can we do anything with rate!=1.0 */
+ } else {
+ GST_DEBUG_OBJECT (avi, "doing seek without event");
+ flags = 0;
+ rate = 1.0;
+ }
+
+ /* save flush flag */
+ flush = flags & GST_SEEK_FLAG_FLUSH;
+
+ if (flush) {
+ GstEvent *fevent = gst_event_new_flush_start ();
+
+ /* for a flushing seek, we send a flush_start on all pads. This will
+ * eventually stop streaming with a WRONG_STATE. We can thus eventually
+ * take the STREAM_LOCK. */
+ GST_DEBUG_OBJECT (avi, "sending flush start");
+ gst_avi_demux_push_event (avi, gst_event_ref (fevent));
+ gst_pad_push_event (avi->sinkpad, fevent);
+ } else {
+ /* a non-flushing seek, we PAUSE the task so that we can take the
+ * STREAM_LOCK */
+ GST_DEBUG_OBJECT (avi, "non flushing seek, pausing task");
+ gst_pad_pause_task (avi->sinkpad);
+ }
+
+ /* wait for streaming to stop */
+ GST_DEBUG_OBJECT (avi, "wait for streaming to stop");
+ GST_PAD_STREAM_LOCK (avi->sinkpad);
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ memcpy (&seeksegment, &avi->segment, sizeof (GstSegment));
+
+ if (event) {
+ GST_DEBUG_OBJECT (avi, "configuring seek");
+ gst_segment_set_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
+ }
+
+#ifdef AVIDEMUX_MODIFICATION
+ if (cur != GST_CLOCK_TIME_NONE)
+ gst_segment_set_last_stop (&seeksegment, GST_FORMAT_TIME, cur);
+#endif
+
+ /* do the seek, seeksegment.last_stop contains the new position, this
+ * actually never fails. */
+ gst_avi_demux_do_seek (avi, &seeksegment);
+
+ gst_event_replace (&avi->close_seg_event, NULL);
+ if (flush) {
+ GstEvent *fevent = gst_event_new_flush_stop ();
+
+ GST_DEBUG_OBJECT (avi, "sending flush stop");
+ gst_avi_demux_push_event (avi, gst_event_ref (fevent));
+ gst_pad_push_event (avi->sinkpad, fevent);
+ } else if (avi->segment_running) {
+ /* we are running the current segment and doing a non-flushing seek,
+ * close the segment first based on the last_stop. */
+ GST_DEBUG_OBJECT (avi, "closing running segment %" G_GINT64_FORMAT
+ " to %" G_GINT64_FORMAT, avi->segment.start, avi->segment.last_stop);
+ avi->close_seg_event = gst_event_new_new_segment_full (TRUE,
+ avi->segment.rate, avi->segment.applied_rate, avi->segment.format,
+ avi->segment.start, avi->segment.last_stop, avi->segment.time);
+ }
+
+ /* now update the real segment info */
+ memcpy (&avi->segment, &seeksegment, sizeof (GstSegment));
+
+ /* post the SEGMENT_START message when we do segmented playback */
+ if (avi->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gst_element_post_message (GST_ELEMENT_CAST (avi),
+ gst_message_new_segment_start (GST_OBJECT_CAST (avi),
+ avi->segment.format, avi->segment.last_stop));
+ }
+
+ /* prepare for streaming again */
+ if ((stop = avi->segment.stop) == GST_CLOCK_TIME_NONE)
+ stop = avi->segment.duration;
+
+ /* queue the segment event for the streaming thread. */
+ if (avi->seg_event)
+ gst_event_unref (avi->seg_event);
+ if (avi->segment.rate > 0.0) {
+ /* forwards goes from last_stop to stop */
+ avi->seg_event = gst_event_new_new_segment_full (FALSE,
+ avi->segment.rate, avi->segment.applied_rate, avi->segment.format,
+ avi->segment.last_stop, stop, avi->segment.time);
+ } else {
+#ifdef AVIDEMUX_MODIFICATION
+ avi->segment.start = 0;
+ avi->segment.time = 0;
+#endif
+ /* reverse goes from start to last_stop */
+ avi->seg_event = gst_event_new_new_segment_full (FALSE,
+ avi->segment.rate, avi->segment.applied_rate, avi->segment.format,
+ avi->segment.start, avi->segment.last_stop, avi->segment.time);
+ }
+
+ if (!avi->streaming) {
+ avi->segment_running = TRUE;
+ gst_pad_start_task (avi->sinkpad, (GstTaskFunction) gst_avi_demux_loop,
+ avi->sinkpad);
+ }
+ /* reset the last flow and mark discont, seek is always DISCONT */
+ for (i = 0; i < avi->num_streams; i++) {
+ GST_DEBUG_OBJECT (avi, "marking DISCONT");
+ avi->stream[i].last_flow = GST_FLOW_OK;
+ avi->stream[i].discont = TRUE;
+ }
+ GST_PAD_STREAM_UNLOCK (avi->sinkpad);
+
+ return TRUE;
+
+ /* ERRORS */
+no_format:
+ {
+ GST_DEBUG_OBJECT (avi, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+}
+
+/*
+ * Handle seek event in push mode.
+ */
+static gboolean
+avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad, GstEvent * event)
+{
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
+ gint64 cur, stop;
+ gboolean keyframe;
+ GstAviStream *stream;
+ guint index;
+ guint n, str_num;
+ guint64 min_offset;
+ GstSegment seeksegment;
+ gboolean update;
+
+ /* check we have the index */
+ if (!avi->have_index) {
+ GST_DEBUG_OBJECT (avi, "no seek index built, seek aborted.");
+ return FALSE;
+ } else {
+ GST_DEBUG_OBJECT (avi, "doing push-based seek with event");
+ }
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME) {
+ GstFormat fmt = GST_FORMAT_TIME;
+ gboolean res = TRUE;
+
+ if (cur_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, cur, &fmt, &cur);
+ if (res && stop_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, stop, &fmt, &stop);
+ if (!res) {
+ GST_DEBUG_OBJECT (avi, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+
+ format = fmt;
+ }
+
+ /* let gst_segment handle any tricky stuff */
+ GST_DEBUG_OBJECT (avi, "configuring seek");
+ memcpy (&seeksegment, &avi->segment, sizeof (GstSegment));
+ gst_segment_set_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
+
+ keyframe = !!(flags & GST_SEEK_FLAG_KEY_UNIT);
+ cur = seeksegment.last_stop;
+
+ GST_DEBUG_OBJECT (avi,
+ "Seek requested: ts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
+ ", kf %u, rate %lf", GST_TIME_ARGS (cur), GST_TIME_ARGS (stop), keyframe,
+ rate);
+
+ if (rate < 0) {
+ GST_DEBUG_OBJECT (avi, "negative rate seek not supported in push mode");
+ return FALSE;
+ }
+
+ /* FIXME, this code assumes the main stream with keyframes is stream 0,
+ * which is mostly correct... */
+ str_num = avi->main_stream;
+ stream = &avi->stream[str_num];
+
+ /* get the entry index for the requested position */
+ index = gst_avi_demux_index_for_time (avi, stream, cur);
+ GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT,
+ str_num, index, GST_TIME_ARGS (cur));
+
+ /* check if we are already on a keyframe */
+ if (!ENTRY_IS_KEYFRAME (&stream->index[index])) {
+ GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching back");
+ /* now go to the previous keyframe, this is where we should start
+ * decoding from. */
+ index = gst_avi_demux_index_prev (avi, stream, index, TRUE);
+ GST_DEBUG_OBJECT (avi, "Found previous keyframe at %u", index);
+ }
+
+ gst_avi_demux_get_buffer_info (avi, stream, index,
+ &stream->current_timestamp, &stream->current_ts_end,
+ &stream->current_offset, &stream->current_offset_end);
+
+ /* re-use cur to be the timestamp of the seek as it _will_ be */
+ cur = stream->current_timestamp;
+
+ min_offset = stream->index[index].offset;
+ avi->seek_kf_offset = min_offset - 8;
+
+ GST_DEBUG_OBJECT (avi,
+ "Seek to: ts %" GST_TIME_FORMAT " (on str %u, idx %u, offset %"
+ G_GUINT64_FORMAT ")", GST_TIME_ARGS (stream->current_timestamp), str_num,
+ index, min_offset);
+
+ for (n = 0; n < avi->num_streams; n++) {
+ GstAviStream *str = &avi->stream[n];
+ guint idx;
+
+ if (n == avi->main_stream)
+ continue;
+
+ /* get the entry index for the requested position */
+ idx = gst_avi_demux_index_for_time (avi, str, cur);
+ GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT, n,
+ idx, GST_TIME_ARGS (cur));
+
+ /* check if we are already on a keyframe */
+ if (!ENTRY_IS_KEYFRAME (&str->index[idx])) {
+ GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching back");
+ /* now go to the previous keyframe, this is where we should start
+ * decoding from. */
+ idx = gst_avi_demux_index_prev (avi, str, idx, TRUE);
+ GST_DEBUG_OBJECT (avi, "Found previous keyframe at %u", idx);
+ }
+
+ gst_avi_demux_get_buffer_info (avi, str, idx,
+ &str->current_timestamp, &str->current_ts_end,
+ &str->current_offset, &str->current_offset_end);
+
+ if (str->index[idx].offset < min_offset) {
+ min_offset = str->index[idx].offset;
+ GST_DEBUG_OBJECT (avi,
+ "Found an earlier offset at %" G_GUINT64_FORMAT ", str %u",
+ min_offset, n);
+ str_num = n;
+ stream = str;
+ index = idx;
+ }
+ }
+
+ GST_DEBUG_OBJECT (avi,
+ "Seek performed: str %u, offset %" G_GUINT64_FORMAT ", idx %u, ts %"
+ GST_TIME_FORMAT ", ts_end %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT, str_num, min_offset, index,
+ GST_TIME_ARGS (stream->current_timestamp),
+ GST_TIME_ARGS (stream->current_ts_end), stream->current_offset,
+ stream->current_offset_end);
+
+ /* index data refers to data, not chunk header (for pull mode convenience) */
+ min_offset -= 8;
+ GST_DEBUG_OBJECT (avi, "seeking to chunk at offset %" G_GUINT64_FORMAT,
+ min_offset);
+
+ if (!perform_seek_to_offset (avi, min_offset)) {
+ GST_DEBUG_OBJECT (avi, "seek event failed!");
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+/*
+ * Handle whether we can perform the seek event or if we have to let the chain
+ * function handle seeks to build the seek indexes first.
+ */
+static gboolean
+gst_avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad,
+ GstEvent * event)
+{
+ /* check for having parsed index already */
+ if (!avi->have_index) {
+ guint64 offset = 0;
+ gboolean building_index;
+
+ GST_OBJECT_LOCK (avi);
+ /* handle the seek event in the chain function */
+ avi->state = GST_AVI_DEMUX_SEEK;
+
+ /* copy the event */
+ if (avi->seek_event)
+ gst_event_unref (avi->seek_event);
+ avi->seek_event = gst_event_ref (event);
+
+ /* set the building_index flag so that only one thread can setup the
+ * structures for index seeking. */
+ building_index = avi->building_index;
+ if (!building_index) {
+ avi->building_index = TRUE;
+ if (avi->stream[0].indexes) {
+ avi->odml_stream = 0;
+ avi->odml_subidxs = avi->stream[avi->odml_stream].indexes;
+ offset = avi->odml_subidxs[0];
+ } else {
+ offset = avi->idx1_offset;
+ }
+ }
+ GST_OBJECT_UNLOCK (avi);
+
+ if (!building_index) {
+ /* seek to the first subindex or legacy index */
+ GST_INFO_OBJECT (avi,
+ "Seeking to legacy index/first subindex at %" G_GUINT64_FORMAT,
+ offset);
+ return perform_seek_to_offset (avi, offset);
+ }
+
+ /* FIXME: we have to always return true so that we don't block the seek
+ * thread.
+ * Note: maybe it is OK to return true if we're still building the index */
+ return TRUE;
+ }
+
+ return avi_demux_handle_seek_push (avi, pad, event);
+}
+
+/*
+ * Helper for gst_avi_demux_invert()
+ */
+static inline void
+swap_line (guint8 * d1, guint8 * d2, guint8 * tmp, gint bytes)
+{
+ memcpy (tmp, d1, bytes);
+ memcpy (d1, d2, bytes);
+ memcpy (d2, tmp, bytes);
+}
+
+
+#define gst_avi_demux_is_uncompressed(fourcc) \
+ (fourcc && \
+ (fourcc == GST_RIFF_DIB || \
+ fourcc == GST_RIFF_rgb || \
+ fourcc == GST_RIFF_RGB || fourcc == GST_RIFF_RAW))
+
+/*
+ * Invert DIB buffers... Takes existing buffer and
+ * returns either the buffer or a new one (with old
+ * one dereferenced).
+ * FIXME: can't we preallocate tmp? and remember stride, bpp?
+ */
+static GstBuffer *
+gst_avi_demux_invert (GstAviStream * stream, GstBuffer * buf)
+{
+ GstStructure *s;
+ gint y, w, h;
+ gint bpp, stride;
+ guint8 *tmp = NULL;
+
+ if (stream->strh->type != GST_RIFF_FCC_vids)
+ return buf;
+
+ if (!gst_avi_demux_is_uncompressed (stream->strh->fcc_handler)) {
+ return buf; /* Ignore non DIB buffers */
+ }
+
+ s = gst_caps_get_structure (GST_PAD_CAPS (stream->pad), 0);
+ if (!gst_structure_get_int (s, "bpp", &bpp)) {
+ GST_WARNING ("Failed to retrieve depth from caps");
+ return buf;
+ }
+
+ if (stream->strf.vids == NULL) {
+ GST_WARNING ("Failed to retrieve vids for stream");
+ return buf;
+ }
+
+ h = stream->strf.vids->height;
+ w = stream->strf.vids->width;
+ stride = GST_ROUND_UP_4 (w * (bpp / 8));
+
+ buf = gst_buffer_make_writable (buf);
+ if (GST_BUFFER_SIZE (buf) < (stride * h)) {
+ GST_WARNING ("Buffer is smaller than reported Width x Height x Depth");
+ return buf;
+ }
+
+ tmp = g_malloc (stride);
+
+ for (y = 0; y < h / 2; y++) {
+ swap_line (GST_BUFFER_DATA (buf) + stride * y,
+ GST_BUFFER_DATA (buf) + stride * (h - 1 - y), tmp, stride);
+ }
+
+ g_free (tmp);
+
+ return buf;
+}
+
+static void
+gst_avi_demux_add_assoc (GstAviDemux * avi, GstAviStream * stream,
+ GstClockTime timestamp, guint64 offset, gboolean keyframe)
+{
+ /* do not add indefinitely for open-ended streaming */
+ if (G_UNLIKELY (avi->element_index && avi->seekable)) {
+ GST_LOG_OBJECT (avi, "adding association %" GST_TIME_FORMAT "-> %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (timestamp), offset);
+ gst_index_add_association (avi->element_index, avi->index_id,
+ keyframe ? GST_ASSOCIATION_FLAG_KEY_UNIT :
+ GST_ASSOCIATION_FLAG_DELTA_UNIT, GST_FORMAT_TIME, timestamp,
+ GST_FORMAT_BYTES, offset, NULL);
+ /* current_entry is DEFAULT (frame #) */
+ gst_index_add_association (avi->element_index, stream->index_id,
+ keyframe ? GST_ASSOCIATION_FLAG_KEY_UNIT :
+ GST_ASSOCIATION_FLAG_DELTA_UNIT, GST_FORMAT_TIME, timestamp,
+ GST_FORMAT_BYTES, offset, GST_FORMAT_DEFAULT, stream->current_entry,
+ NULL);
+ }
+}
+
+/*
+ * Returns the aggregated GstFlowReturn.
+ */
+static GstFlowReturn
+gst_avi_demux_combine_flows (GstAviDemux * avi, GstAviStream * stream,
+ GstFlowReturn ret)
+{
+ guint i;
+ gboolean unexpected = FALSE, not_linked = TRUE;
+
+ /* store the value */
+ stream->last_flow = ret;
+
+ /* any other error that is not-linked or eos can be returned right away */
+ if (G_LIKELY (ret != GST_FLOW_UNEXPECTED && ret != GST_FLOW_NOT_LINKED))
+ goto done;
+
+ /* only return NOT_LINKED if all other pads returned NOT_LINKED */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *ostream = &avi->stream[i];
+
+ ret = ostream->last_flow;
+ /* no unexpected or unlinked, return */
+ if (G_LIKELY (ret != GST_FLOW_UNEXPECTED && ret != GST_FLOW_NOT_LINKED))
+ goto done;
+
+ /* we check to see if we have at least 1 unexpected or all unlinked */
+ unexpected |= (ret == GST_FLOW_UNEXPECTED);
+ not_linked &= (ret == GST_FLOW_NOT_LINKED);
+ }
+ /* when we get here, we all have unlinked or unexpected */
+ if (not_linked)
+ ret = GST_FLOW_NOT_LINKED;
+ else if (unexpected)
+ ret = GST_FLOW_UNEXPECTED;
+done:
+ GST_LOG_OBJECT (avi, "combined %s to return %s",
+ gst_flow_get_name (stream->last_flow), gst_flow_get_name (ret));
+ return ret;
+}
+
+/* move @stream to the next position in its index */
+static GstFlowReturn
+gst_avi_demux_advance (GstAviDemux * avi, GstAviStream * stream,
+ GstFlowReturn ret)
+{
+ guint old_entry, new_entry;
+
+ old_entry = stream->current_entry;
+ /* move forwards */
+ new_entry = old_entry + 1;
+
+ /* see if we reached the end */
+ if (new_entry >= stream->stop_entry) {
+ if (avi->segment.rate < 0.0) {
+
+#ifdef AVIDEMUX_MODIFICATION
+ GST_DEBUG_OBJECT (avi, "backward reached stop %u", stream->stop_entry);
+ goto eos;
+#else
+ if (stream->step_entry == stream->start_entry) {
+ /* we stepped all the way to the start, eos */
+ GST_DEBUG_OBJECT (avi, "reverse reached start %u", stream->start_entry);
+ goto eos;
+ }
+ /* backwards, stop becomes step, find a new step */
+ stream->stop_entry = stream->step_entry;
+ stream->step_entry = gst_avi_demux_index_prev (avi, stream,
+ stream->stop_entry, TRUE);
+
+ GST_DEBUG_OBJECT (avi,
+ "reverse playback jump: start %u, step %u, stop %u",
+ stream->start_entry, stream->step_entry, stream->stop_entry);
+
+ /* and start from the previous keyframe now */
+ new_entry = stream->step_entry;
+#endif
+ } else {
+ /* EOS */
+ GST_DEBUG_OBJECT (avi, "forward reached stop %u", stream->stop_entry);
+ goto eos;
+ }
+ }
+
+ if (new_entry != old_entry) {
+ stream->current_entry = new_entry;
+ stream->current_total = stream->index[new_entry].total;
+
+ if (new_entry == old_entry + 1) {
+ GST_DEBUG_OBJECT (avi, "moved forwards from %u to %u",
+ old_entry, new_entry);
+ /* we simply moved one step forwards, reuse current info */
+ stream->current_timestamp = stream->current_ts_end;
+ stream->current_offset = stream->current_offset_end;
+ gst_avi_demux_get_buffer_info (avi, stream, new_entry,
+ NULL, &stream->current_ts_end, NULL, &stream->current_offset_end);
+ } else {
+ /* we moved DISCONT, full update */
+ gst_avi_demux_get_buffer_info (avi, stream, new_entry,
+ &stream->current_timestamp, &stream->current_ts_end,
+ &stream->current_offset, &stream->current_offset_end);
+ /* and MARK discont for this stream */
+ stream->last_flow = GST_FLOW_OK;
+ stream->discont = TRUE;
+ GST_DEBUG_OBJECT (avi, "Moved from %u to %u, ts %" GST_TIME_FORMAT
+ ", ts_end %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT, old_entry, new_entry,
+ GST_TIME_ARGS (stream->current_timestamp),
+ GST_TIME_ARGS (stream->current_ts_end), stream->current_offset,
+ stream->current_offset_end);
+ }
+ }
+ return ret;
+
+ /* ERROR */
+eos:
+ {
+ GST_DEBUG_OBJECT (avi, "we are EOS");
+ /* setting current_timestamp to -1 marks EOS */
+ stream->current_timestamp = -1;
+ return GST_FLOW_UNEXPECTED;
+ }
+}
+
+/* find the stream with the lowest current position when going forwards or with
+ * the highest position when going backwards, this is the stream
+ * we should push from next */
+static gint
+gst_avi_demux_find_next (GstAviDemux * avi, gfloat rate)
+{
+ guint64 min_time, max_time;
+ guint stream_num, i;
+
+ max_time = 0;
+ min_time = G_MAXUINT64;
+ stream_num = -1;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ guint64 position;
+ GstAviStream *stream;
+
+ stream = &avi->stream[i];
+
+ /* ignore streams that finished */
+ if (stream->last_flow == GST_FLOW_UNEXPECTED)
+ continue;
+
+ position = stream->current_timestamp;
+
+ /* position of -1 is EOS */
+ if (position != -1) {
+#ifdef AVIDEMUX_MODIFICATION
+ if (position < min_time) {
+ min_time = position;
+ stream_num = i;
+ }
+#else
+ if (rate > 0.0 && position < min_time) {
+ min_time = position;
+ stream_num = i;
+ } else if (rate < 0.0 && position >= max_time) {
+ max_time = position;
+ stream_num = i;
+ }
+#endif
+ }
+ }
+ return stream_num;
+}
+
+static GstFlowReturn
+gst_avi_demux_loop_data (GstAviDemux * avi)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint stream_num;
+ GstAviStream *stream;
+ gboolean processed = FALSE;
+ GstBuffer *buf;
+ guint64 offset, size;
+ GstClockTime timestamp, duration;
+ guint64 out_offset, out_offset_end;
+ gboolean keyframe;
+ GstAviIndexEntry *entry;
+
+ do {
+ stream_num = gst_avi_demux_find_next (avi, avi->segment.rate);
+
+ /* all are EOS */
+ if (G_UNLIKELY (stream_num == -1)) {
+ GST_DEBUG_OBJECT (avi, "all streams are EOS");
+ goto eos;
+ }
+
+ /* we have the stream now */
+ stream = &avi->stream[stream_num];
+
+ /* skip streams without pads */
+ if (!stream->pad) {
+ GST_DEBUG_OBJECT (avi, "skipping entry from stream %d without pad",
+ stream_num);
+ goto next;
+ }
+
+ /* get the timing info for the entry */
+ timestamp = stream->current_timestamp;
+ duration = stream->current_ts_end - timestamp;
+ out_offset = stream->current_offset;
+ out_offset_end = stream->current_offset_end;
+
+ /* get the entry data info */
+ entry = &stream->index[stream->current_entry];
+ offset = entry->offset;
+ size = entry->size;
+ keyframe = ENTRY_IS_KEYFRAME (entry);
+
+
+#ifdef AVIDEMUX_MODIFICATION
+ /* Forward trickplay */
+ if(avi->segment.rate > 1.0 && stream->strh->type == GST_RIFF_FCC_vids) {
+ gst_avidemux_forward_trickplay (avi, stream, &timestamp);
+ } else if(avi->segment.rate < 0.0 && stream->strh->type == GST_RIFF_FCC_vids) {
+ gst_avidemux_backward_trickplay (avi, stream, &timestamp);
+ }
+#endif
+
+ /* skip empty entries */
+ if (size == 0) {
+ GST_DEBUG_OBJECT (avi, "Skipping entry %u (%" G_GUINT64_FORMAT ", %p)",
+ stream->current_entry, size, stream->pad);
+ goto next;
+ }
+
+ if (avi->segment.rate > 0.0) {
+ /* only check this for fowards playback for now */
+#ifdef AVIDEMUX_MODIFICATION
+ if (GST_CLOCK_TIME_IS_VALID (avi->segment.stop)
+ && (timestamp > avi->segment.stop)) {
+#else
+ if (keyframe && GST_CLOCK_TIME_IS_VALID (avi->segment.stop)
+ && (timestamp > avi->segment.stop)) {
+#endif
+ goto eos_stop;
+ }
+ }
+
+ GST_LOG ("reading buffer (size=%" G_GUINT64_FORMAT "), stream %d, pos %"
+ G_GUINT64_FORMAT " (0x%" G_GINT64_MODIFIER "x), kf %d", size,
+ stream_num, offset, offset, keyframe);
+
+ /* FIXME, check large chunks and cut them up */
+
+ /* pull in the data */
+ ret = gst_pad_pull_range (avi->sinkpad, offset, size, &buf);
+ if (ret != GST_FLOW_OK)
+ goto pull_failed;
+
+ /* check for short buffers, this is EOS as well */
+ if (GST_BUFFER_SIZE (buf) < size)
+ goto short_buffer;
+
+ /* invert the picture if needed */
+ buf = gst_avi_demux_invert (stream, buf);
+
+ /* mark non-keyframes */
+ if (keyframe)
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ GST_BUFFER_TIMESTAMP (buf) = timestamp;
+ GST_BUFFER_DURATION (buf) = duration;
+ GST_BUFFER_OFFSET (buf) = out_offset;
+ GST_BUFFER_OFFSET_END (buf) = out_offset_end;
+
+ /* mark discont when pending */
+ if (stream->discont) {
+ GST_DEBUG_OBJECT (avi, "setting DISCONT flag");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ }
+
+ gst_avi_demux_add_assoc (avi, stream, timestamp, offset, keyframe);
+
+ gst_buffer_set_caps (buf, GST_PAD_CAPS (stream->pad));
+
+ /* update current position in the segment */
+ gst_segment_set_last_stop (&avi->segment, GST_FORMAT_TIME, timestamp);
+#ifdef AVIDEMUX_MODIFICATION
+ GST_DEBUG_OBJECT (avi, " %s : Pushing buffer of size %u, ts %"
+ GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT,
+ stream_num ? "Audio" : "Video", GST_BUFFER_SIZE (buf), GST_TIME_ARGS (timestamp),
+ GST_TIME_ARGS (duration), out_offset, out_offset_end);
+#else
+ GST_DEBUG_OBJECT (avi, "Pushing buffer of size %u, ts %"
+ GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT,
+ GST_BUFFER_SIZE (buf), GST_TIME_ARGS (timestamp),
+ GST_TIME_ARGS (duration), out_offset, out_offset_end);
+#endif
+
+#ifdef DIVX_DRM
+
+#define CHUNK_ID_LEN 4
+#define CHUNK_SIZE_LEN 4
+#define DD_CHUNK_DATA_LEN 10
+#define DD_CHUNK_TOTAL_LEN CHUNK_ID_LEN+CHUNK_SIZE_LEN+DD_CHUNK_DATA_LEN
+
+ if (avi->drmContext)// this is drm
+ {
+ GstBuffer* encrypted_buf = NULL;
+
+ if (stream->strh->type == GST_RIFF_FCC_auds) { /* Audio Stream */
+ if (DRM_SUCCESS == avi->divx_decrypt_audio (avi->drmContext, GST_BUFFER_DATA(buf), GST_BUFFER_SIZE(buf))) {
+ GST_DEBUG_OBJECT (avi, "drmDecryptAudio() Success : buffer = %d", GST_BUFFER_SIZE(buf));
+ } else {
+ GST_ERROR_OBJECT (avi, "drmDecryptAudio () Failed : buffer = %d", GST_BUFFER_SIZE(buf));
+ }
+ ret = gst_pad_push (stream->pad, buf);
+
+ } else if (stream->strh->type == GST_RIFF_FCC_vids) { /* Video Stream */
+
+ /* Read previous dd chunk */
+ GstBuffer* dd_chunk_buf = NULL;
+ if (GST_FLOW_OK != gst_pad_pull_range (avi->sinkpad,
+ offset-(CHUNK_ID_LEN+CHUNK_SIZE_LEN+DD_CHUNK_TOTAL_LEN),
+ DD_CHUNK_TOTAL_LEN, &dd_chunk_buf)) {
+ GST_ERROR_OBJECT (avi, "pull range failed");
+ } else {
+ guint8 tempBuffer[256] = { 0, };
+ guint32 tempBufferLength = 0;
+ int ret;
+
+ ret = avi->divx_prepare_video_bitstream (avi->drmContext,
+ GST_BUFFER_DATA(dd_chunk_buf)+(CHUNK_ID_LEN+CHUNK_SIZE_LEN),
+ DD_CHUNK_DATA_LEN,
+ tempBuffer,
+ &tempBufferLength );
+
+ if (ret == DRM_SUCCESS) {
+ /* Create new buffer and copy retrieved tempBuffer and original buffer to created buffer */
+ encrypted_buf = gst_buffer_new_and_alloc (GST_BUFFER_SIZE(buf)+tempBufferLength);
+ if (encrypted_buf) {
+ /* FIXME: Can be enhance merge buffer code */
+ memcpy (GST_BUFFER_DATA(encrypted_buf), tempBuffer, tempBufferLength);
+ memcpy (GST_BUFFER_DATA(encrypted_buf)+tempBufferLength, GST_BUFFER_DATA(buf), GST_BUFFER_SIZE(buf));
+ gst_buffer_copy_metadata (encrypted_buf, buf, GST_BUFFER_COPY_ALL);
+
+ /* relase buf because we will push encrypted_buf instead of buf */
+ gst_buffer_unref (buf);
+ } else {
+ GST_ERROR_OBJECT (avi, "gst_buffer_new_and_alloc() failed!!!!");
+ }
+ } else {
+ GST_ERROR_OBJECT (avi, "divx_prepare_video_bitstream failed!!!! ret = [%d]", ret);
+ }
+ }
+
+ /* Release DD-chunk Buffer */
+ if (dd_chunk_buf)
+ gst_buffer_unref (dd_chunk_buf);
+
+ /* Push encrypted_buf if is valid, otherwise push original buffer */
+ if (encrypted_buf)
+ ret = gst_pad_push (stream->pad, encrypted_buf);
+ else
+ ret = gst_pad_push (stream->pad, buf);
+ }
+ } else {
+ /* This is normal file */
+ ret = gst_pad_push (stream->pad, buf);
+ }
+#else
+ ret = gst_pad_push (stream->pad, buf);
+#endif // DIVX_DRM
+
+ /* mark as processed, we increment the frame and byte counters then
+ * leave the while loop and return the GstFlowReturn */
+ processed = TRUE;
+
+ if (avi->segment.rate < 0) {
+ if (timestamp > avi->segment.stop && ret == GST_FLOW_UNEXPECTED) {
+ /* In reverse playback we can get a GST_FLOW_UNEXPECTED when
+ * we are at the end of the segment, so we just need to jump
+ * back to the previous section. */
+ GST_DEBUG_OBJECT (avi, "downstream has reached end of segment");
+ ret = GST_FLOW_OK;
+ }
+ }
+ next:
+ /* move to next item */
+ ret = gst_avi_demux_advance (avi, stream, ret);
+
+ /* combine flows */
+ ret = gst_avi_demux_combine_flows (avi, stream, ret);
+ } while (!processed);
+
+beach:
+ return ret;
+
+ /* special cases */
+eos:
+ {
+ GST_DEBUG_OBJECT (avi, "No samples left for any streams - EOS");
+ ret = GST_FLOW_UNEXPECTED;
+ goto beach;
+ }
+eos_stop:
+ {
+ GST_LOG_OBJECT (avi, "Found keyframe after segment,"
+ " setting EOS (%" GST_TIME_FORMAT " > %" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (timestamp), GST_TIME_ARGS (avi->segment.stop));
+ ret = GST_FLOW_UNEXPECTED;
+ /* move to next stream */
+ goto next;
+ }
+pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi, "pull range failed: pos=%" G_GUINT64_FORMAT
+ " size=%" G_GUINT64_FORMAT, offset, size);
+ goto beach;
+ }
+short_buffer:
+ {
+ GST_WARNING_OBJECT (avi, "Short read at offset %" G_GUINT64_FORMAT
+ ", only got %d/%" G_GUINT64_FORMAT " bytes (truncated file?)", offset,
+ GST_BUFFER_SIZE (buf), size);
+ gst_buffer_unref (buf);
+ ret = GST_FLOW_UNEXPECTED;
+ goto beach;
+ }
+}
+
+/*
+ * Read data. If we have an index it delegates to
+ * gst_avi_demux_process_next_entry().
+ */
+static GstFlowReturn
+gst_avi_demux_stream_data (GstAviDemux * avi)
+{
+ guint32 tag = 0;
+ guint32 size = 0;
+ gint stream_nr = 0;
+ GstFlowReturn res = GST_FLOW_OK;
+ GstFormat format = GST_FORMAT_TIME;
+
+ if (G_UNLIKELY (avi->have_eos)) {
+ /* Clean adapter, we're done */
+ gst_adapter_clear (avi->adapter);
+ return GST_FLOW_UNEXPECTED;
+ }
+
+ if (G_UNLIKELY (avi->todrop)) {
+ guint drop;
+
+ if ((drop = gst_adapter_available (avi->adapter))) {
+ if (drop > avi->todrop)
+ drop = avi->todrop;
+ GST_DEBUG_OBJECT (avi, "Dropping %d bytes", drop);
+ gst_adapter_flush (avi->adapter, drop);
+ avi->todrop -= drop;
+ avi->offset += drop;
+ }
+ }
+
+ /* Iterate until need more data, so adapter won't grow too much */
+ while (1) {
+ if (G_UNLIKELY (!gst_avi_demux_peek_chunk_info (avi, &tag, &size))) {
+ return GST_FLOW_OK;
+ }
+
+ GST_DEBUG ("Trying chunk (%" GST_FOURCC_FORMAT "), size %d",
+ GST_FOURCC_ARGS (tag), size);
+
+ if (G_LIKELY ((tag & 0xff) >= '0' && (tag & 0xff) <= '9' &&
+ ((tag >> 8) & 0xff) >= '0' && ((tag >> 8) & 0xff) <= '9')) {
+ GST_LOG ("Chunk ok");
+ } else if ((tag & 0xffff) == (('x' << 8) | 'i')) {
+ GST_DEBUG ("Found sub-index tag");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ GST_DEBUG (" skipping %d bytes for now", size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_RIFF) {
+ /* RIFF tags can appear in ODML files, just jump over them */
+ if (gst_adapter_available (avi->adapter) >= 12) {
+ GST_DEBUG ("Found RIFF tag, skipping RIFF header");
+ gst_adapter_flush (avi->adapter, 12);
+ continue;
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_idx1) {
+ GST_DEBUG ("Found index tag");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ GST_DEBUG (" skipping %d bytes for now", size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_LIST) {
+ /* movi chunks might be grouped in rec list */
+ if (gst_adapter_available (avi->adapter) >= 12) {
+ GST_DEBUG ("Found LIST tag, skipping LIST header");
+ gst_adapter_flush (avi->adapter, 12);
+ continue;
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_JUNK || tag == GST_RIFF_TAG_JUNQ) {
+ /* rec list might contain JUNK chunks */
+ GST_DEBUG ("Found JUNK tag");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ GST_DEBUG (" skipping %d bytes for now", size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+ return GST_FLOW_OK;
+ } else {
+ GST_DEBUG ("No more stream chunks, send EOS");
+ avi->have_eos = TRUE;
+ return GST_FLOW_UNEXPECTED;
+ }
+
+ if (G_UNLIKELY (!gst_avi_demux_peek_chunk (avi, &tag, &size))) {
+ /* supposedly one hopes to catch a nicer chunk later on ... */
+ /* FIXME ?? give up here rather than possibly ending up going
+ * through the whole file */
+ if (avi->abort_buffering) {
+ avi->abort_buffering = FALSE;
+ if (size) {
+ gst_adapter_flush (avi->adapter, 8);
+ return GST_FLOW_OK;
+ }
+ } else {
+ return GST_FLOW_OK;
+ }
+ }
+ GST_DEBUG ("chunk ID %" GST_FOURCC_FORMAT ", size %u",
+ GST_FOURCC_ARGS (tag), size);
+
+ stream_nr = CHUNKID_TO_STREAMNR (tag);
+
+ if (G_UNLIKELY (stream_nr < 0 || stream_nr >= avi->num_streams)) {
+ /* recoverable */
+ GST_WARNING ("Invalid stream ID %d (%" GST_FOURCC_FORMAT ")",
+ stream_nr, GST_FOURCC_ARGS (tag));
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ } else {
+ GstAviStream *stream;
+ GstClockTime next_ts = 0;
+ GstBuffer *buf = NULL;
+ guint64 offset;
+ gboolean saw_desired_kf = stream_nr != avi->main_stream
+ || avi->offset >= avi->seek_kf_offset;
+
+ if (stream_nr == avi->main_stream && avi->offset == avi->seek_kf_offset) {
+ GST_DEBUG_OBJECT (avi, "Desired keyframe reached");
+ avi->seek_kf_offset = 0;
+ }
+
+ if (saw_desired_kf) {
+ gst_adapter_flush (avi->adapter, 8);
+ /* get buffer */
+ if (size) {
+ buf = gst_adapter_take_buffer (avi->adapter, GST_ROUND_UP_2 (size));
+ /* patch the size */
+ GST_BUFFER_SIZE (buf) = size;
+ } else {
+ buf = NULL;
+ }
+ } else {
+ GST_DEBUG_OBJECT (avi,
+ "Desired keyframe not yet reached, flushing chunk");
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+
+ offset = avi->offset;
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+
+ stream = &avi->stream[stream_nr];
+
+ /* set delay (if any)
+ if (stream->strh->init_frames == stream->current_frame &&
+ stream->delay == 0)
+ stream->delay = next_ts;
+ */
+
+ /* parsing of corresponding header may have failed */
+ if (G_UNLIKELY (!stream->pad)) {
+ GST_WARNING_OBJECT (avi, "no pad for stream ID %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ if (buf)
+ gst_buffer_unref (buf);
+ } else {
+ /* get time of this buffer */
+ gst_pad_query_position (stream->pad, &format, (gint64 *) & next_ts);
+ if (G_UNLIKELY (format != GST_FORMAT_TIME))
+ goto wrong_format;
+
+ gst_avi_demux_add_assoc (avi, stream, next_ts, offset, FALSE);
+
+ /* increment our positions */
+ stream->current_entry++;
+ stream->current_total += size;
+
+ /* update current position in the segment */
+ gst_segment_set_last_stop (&avi->segment, GST_FORMAT_TIME, next_ts);
+
+ if (saw_desired_kf && buf) {
+ GstClockTime dur_ts = 0;
+
+ /* invert the picture if needed */
+ buf = gst_avi_demux_invert (stream, buf);
+
+ gst_pad_query_position (stream->pad, &format, (gint64 *) & dur_ts);
+ if (G_UNLIKELY (format != GST_FORMAT_TIME))
+ goto wrong_format;
+
+ GST_BUFFER_TIMESTAMP (buf) = next_ts;
+ GST_BUFFER_DURATION (buf) = dur_ts - next_ts;
+ if (stream->strh->type == GST_RIFF_FCC_vids) {
+ GST_BUFFER_OFFSET (buf) = stream->current_entry - 1;
+ GST_BUFFER_OFFSET_END (buf) = stream->current_entry;
+ } else {
+ GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE;
+ GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
+ }
+
+ gst_buffer_set_caps (buf, GST_PAD_CAPS (stream->pad));
+ GST_DEBUG_OBJECT (avi,
+ "Pushing buffer with time=%" GST_TIME_FORMAT ", duration %"
+ GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT
+ " and size %d over pad %s", GST_TIME_ARGS (next_ts),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)),
+ GST_BUFFER_OFFSET (buf), size, GST_PAD_NAME (stream->pad));
+
+ /* mark discont when pending */
+ if (G_UNLIKELY (stream->discont)) {
+ GST_DEBUG_OBJECT (avi, "Setting DISCONT");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ }
+ res = gst_pad_push (stream->pad, buf);
+ buf = NULL;
+
+ /* combine flows */
+ res = gst_avi_demux_combine_flows (avi, stream, res);
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_DEBUG ("Push failed; %s", gst_flow_get_name (res));
+ return res;
+ }
+ }
+ }
+ }
+ }
+
+done:
+ return res;
+
+ /* ERRORS */
+wrong_format:
+ {
+ GST_DEBUG_OBJECT (avi, "format %s != GST_FORMAT_TIME",
+ gst_format_get_name (format));
+ res = GST_FLOW_ERROR;
+ goto done;
+ }
+}
+
+/*
+ * Send pending tags.
+ */
+static void
+push_tag_lists (GstAviDemux * avi)
+{
+ guint i;
+ GstTagList *tags;
+
+ if (!avi->got_tags)
+ return;
+
+ GST_DEBUG_OBJECT (avi, "Pushing pending tag lists");
+
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream = &avi->stream[i];
+ GstPad *pad = stream->pad;
+
+ tags = stream->taglist;
+
+ if (pad && tags) {
+ GST_DEBUG_OBJECT (pad, "Tags: %" GST_PTR_FORMAT, tags);
+
+ gst_element_found_tags_for_pad (GST_ELEMENT_CAST (avi), pad, tags);
+ stream->taglist = NULL;
+ }
+ }
+
+ if (!(tags = avi->globaltags))
+ tags = gst_tag_list_new ();
+
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_CONTAINER_FORMAT, "AVI", NULL);
+
+ GST_DEBUG_OBJECT (avi, "Global tags: %" GST_PTR_FORMAT, tags);
+ gst_element_found_tags (GST_ELEMENT_CAST (avi), tags);
+ avi->globaltags = NULL;
+ avi->got_tags = FALSE;
+}
+
+static void
+gst_avi_demux_loop (GstPad * pad)
+{
+ GstFlowReturn res;
+ GstAviDemux *avi = GST_AVI_DEMUX (GST_PAD_PARENT (pad));
+
+ switch (avi->state) {
+ case GST_AVI_DEMUX_START:
+ res = gst_avi_demux_stream_init_pull (avi);
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_WARNING ("stream_init flow: %s", gst_flow_get_name (res));
+ goto pause;
+ }
+ avi->state = GST_AVI_DEMUX_HEADER;
+ /* fall-through */
+ case GST_AVI_DEMUX_HEADER:
+ res = gst_avi_demux_stream_header_pull (avi);
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_WARNING ("stream_header flow: %s", gst_flow_get_name (res));
+ goto pause;
+ }
+#ifdef DIVX_DRM
+ /* Send tag to decoder, so decoder can knows that this is divx drm file */
+ if (avi->drmContext)
+ gst_avi_demux_send_divx_tag (avi);
+#endif
+
+ avi->state = GST_AVI_DEMUX_MOVI;
+ break;
+ case GST_AVI_DEMUX_MOVI:
+ if (G_UNLIKELY (avi->close_seg_event)) {
+ gst_avi_demux_push_event (avi, avi->close_seg_event);
+ avi->close_seg_event = NULL;
+ }
+ if (G_UNLIKELY (avi->seg_event)) {
+ gst_avi_demux_push_event (avi, avi->seg_event);
+ avi->seg_event = NULL;
+ }
+ if (G_UNLIKELY (avi->got_tags)) {
+ push_tag_lists (avi);
+ }
+ /* process each index entry in turn */
+ res = gst_avi_demux_loop_data (avi);
+
+#ifdef AVIDEMUX_MODIFICATION
+ if (avi->segment.rate < 0.0 && res == GST_FLOW_UNEXPECTED) {
+ GST_DEBUG_OBJECT(avi, "Seeking to previous keyframe");
+ res = gst_avidemux_seek_to_previous_keyframe (avi);
+ }
+#endif
+
+ /* pause when error */
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_INFO ("stream_movi flow: %s", gst_flow_get_name (res));
+ goto pause;
+ }
+ break;
+ default:
+ GST_ERROR_OBJECT (avi, "unknown state %d", avi->state);
+ res = GST_FLOW_ERROR;
+ goto pause;
+ }
+
+ return;
+
+ /* ERRORS */
+pause:{
+
+ gboolean push_eos = FALSE;
+ GST_LOG_OBJECT (avi, "pausing task, reason %s", gst_flow_get_name (res));
+ avi->segment_running = FALSE;
+ gst_pad_pause_task (avi->sinkpad);
+
+
+ if (res == GST_FLOW_UNEXPECTED) {
+ /* handle end-of-stream/segment */
+ if (avi->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gint64 stop;
+
+ if ((stop = avi->segment.stop) == -1)
+ stop = avi->segment.duration;
+
+ GST_INFO_OBJECT (avi, "sending segment_done");
+
+#ifdef AVIDEMUX_MODIFICATION
+ if (avi->segment.rate >= 0) {
+ /* Sending segment done at the end of segment */
+ gst_element_post_message(GST_ELEMENT_CAST (avi),
+ gst_message_new_segment_done (GST_OBJECT_CAST (avi), GST_FORMAT_TIME, stop));
+ } else {
+ /* Sending segment done at the start of segment */
+ gst_element_post_message(GST_ELEMENT_CAST (avi),
+ gst_message_new_segment_done (GST_OBJECT_CAST (avi), GST_FORMAT_TIME, avi->segment.start));
+ }
+#else
+ gst_element_post_message
+ (GST_ELEMENT_CAST (avi),
+ gst_message_new_segment_done (GST_OBJECT_CAST (avi),
+ GST_FORMAT_TIME, stop));
+#endif
+ } else {
+ push_eos = TRUE;
+ }
+ } else if (res == GST_FLOW_NOT_LINKED || res < GST_FLOW_UNEXPECTED) {
+ /* for fatal errors we post an error message, wrong-state is
+ * not fatal because it happens due to flushes and only means
+ * that we should stop now. */
+ GST_ELEMENT_ERROR (avi, STREAM, FAILED,
+ (_("Internal data stream error.")),
+ ("streaming stopped, reason %s", gst_flow_get_name (res)));
+ push_eos = TRUE;
+ }
+ if (push_eos) {
+ GST_INFO_OBJECT (avi, "sending eos");
+ if (!gst_avi_demux_push_event (avi, gst_event_new_eos ()) &&
+ (res == GST_FLOW_UNEXPECTED)) {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ }
+ }
+ }
+}
+
+
+static GstFlowReturn
+gst_avi_demux_chain (GstPad * pad, GstBuffer * buf)
+{
+ GstFlowReturn res;
+ GstAviDemux *avi = GST_AVI_DEMUX (GST_PAD_PARENT (pad));
+ gint i;
+
+ if (GST_BUFFER_IS_DISCONT (buf)) {
+ GST_DEBUG_OBJECT (avi, "got DISCONT");
+ gst_adapter_clear (avi->adapter);
+ /* mark all streams DISCONT */
+ for (i = 0; i < avi->num_streams; i++)
+ avi->stream[i].discont = TRUE;
+ }
+
+ GST_DEBUG ("Store %d bytes in adapter", GST_BUFFER_SIZE (buf));
+ gst_adapter_push (avi->adapter, buf);
+
+ switch (avi->state) {
+ case GST_AVI_DEMUX_START:
+ if ((res = gst_avi_demux_stream_init_push (avi)) != GST_FLOW_OK) {
+ GST_WARNING ("stream_init flow: %s", gst_flow_get_name (res));
+ break;
+ }
+ break;
+ case GST_AVI_DEMUX_HEADER:
+ if ((res = gst_avi_demux_stream_header_push (avi)) != GST_FLOW_OK) {
+ GST_WARNING ("stream_header flow: %s", gst_flow_get_name (res));
+ break;
+ }
+ break;
+ case GST_AVI_DEMUX_MOVI:
+ if (G_UNLIKELY (avi->close_seg_event)) {
+ gst_avi_demux_push_event (avi, avi->close_seg_event);
+ avi->close_seg_event = NULL;
+ }
+ if (G_UNLIKELY (avi->seg_event)) {
+ gst_avi_demux_push_event (avi, avi->seg_event);
+ avi->seg_event = NULL;
+ }
+ if (G_UNLIKELY (avi->got_tags)) {
+ push_tag_lists (avi);
+ }
+ res = gst_avi_demux_stream_data (avi);
+ break;
+ case GST_AVI_DEMUX_SEEK:
+ {
+ GstEvent *event;
+
+ res = GST_FLOW_OK;
+
+ /* obtain and parse indexes */
+ if (avi->stream[0].indexes && !gst_avi_demux_read_subindexes_push (avi))
+ /* seek in subindex read function failed */
+ goto index_failed;
+
+ if (!avi->stream[0].indexes && !avi->have_index
+ && avi->avih->flags & GST_RIFF_AVIH_HASINDEX)
+ gst_avi_demux_stream_index_push (avi);
+
+ if (avi->have_index) {
+ /* use the indexes now to construct nice durations */
+ gst_avi_demux_calculate_durations_from_index (avi);
+ } else {
+ /* still parsing indexes */
+ break;
+ }
+
+ GST_OBJECT_LOCK (avi);
+ event = avi->seek_event;
+ avi->seek_event = NULL;
+ GST_OBJECT_UNLOCK (avi);
+
+ /* calculate and perform seek */
+ if (!avi_demux_handle_seek_push (avi, avi->sinkpad, event))
+ goto seek_failed;
+
+ gst_event_unref (event);
+ avi->state = GST_AVI_DEMUX_MOVI;
+ break;
+ }
+ default:
+ GST_ELEMENT_ERROR (avi, STREAM, FAILED, (NULL),
+ ("Illegal internal state"));
+ res = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (avi, "state: %d res:%s", avi->state,
+ gst_flow_get_name (res));
+
+ if (G_UNLIKELY (avi->abort_buffering))
+ goto abort_buffering;
+
+ return res;
+
+ /* ERRORS */
+index_failed:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("failed to read indexes"));
+ return GST_FLOW_ERROR;
+ }
+seek_failed:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("push mode seek failed"));
+ return GST_FLOW_ERROR;
+ }
+abort_buffering:
+ {
+ avi->abort_buffering = FALSE;
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("unhandled buffer size"));
+ return GST_FLOW_ERROR;
+ }
+}
+
+static gboolean
+gst_avi_demux_sink_activate (GstPad * sinkpad)
+{
+ if (gst_pad_check_pull_range (sinkpad)) {
+ GST_DEBUG ("going to pull mode");
+ return gst_pad_activate_pull (sinkpad, TRUE);
+ } else {
+ GST_DEBUG ("going to push (streaming) mode");
+ return gst_pad_activate_push (sinkpad, TRUE);
+ }
+}
+
+static gboolean
+gst_avi_demux_sink_activate_pull (GstPad * sinkpad, gboolean active)
+{
+ GstAviDemux *avi = GST_AVI_DEMUX (GST_OBJECT_PARENT (sinkpad));
+
+ if (active) {
+ avi->segment_running = TRUE;
+ avi->streaming = FALSE;
+ return gst_pad_start_task (sinkpad, (GstTaskFunction) gst_avi_demux_loop,
+ sinkpad);
+ } else {
+ avi->segment_running = FALSE;
+ return gst_pad_stop_task (sinkpad);
+ }
+}
+
+static gboolean
+gst_avi_demux_activate_push (GstPad * pad, gboolean active)
+{
+ GstAviDemux *avi = GST_AVI_DEMUX (GST_OBJECT_PARENT (pad));
+
+ if (active) {
+ GST_DEBUG ("avi: activating push/chain function");
+ avi->streaming = TRUE;
+#if 0
+ /* create index for some push based seeking if not provided */
+ GST_OBJECT_LOCK (avi);
+ if (!avi->element_index) {
+ GST_DEBUG_OBJECT (avi, "creating index");
+ avi->element_index = gst_index_factory_make ("memindex");
+ }
+ GST_OBJECT_UNLOCK (avi);
+ /* object lock might be taken again */
+ gst_index_get_writer_id (avi->element_index, GST_OBJECT_CAST (avi),
+ &avi->index_id);
+#endif
+ } else {
+ GST_DEBUG ("avi: deactivating push/chain function");
+ }
+
+ return TRUE;
+}
+
+static void
+gst_avi_demux_set_index (GstElement * element, GstIndex * index)
+{
+ GstAviDemux *avi = GST_AVI_DEMUX (element);
+
+ GST_OBJECT_LOCK (avi);
+ if (avi->element_index)
+ gst_object_unref (avi->element_index);
+ if (index) {
+ avi->element_index = gst_object_ref (index);
+ } else {
+ avi->element_index = NULL;
+ }
+ GST_OBJECT_UNLOCK (avi);
+ /* object lock might be taken again */
+ if (index)
+ gst_index_get_writer_id (index, GST_OBJECT_CAST (element), &avi->index_id);
+ GST_DEBUG_OBJECT (avi, "Set index %" GST_PTR_FORMAT, avi->element_index);
+}
+
+static GstIndex *
+gst_avi_demux_get_index (GstElement * element)
+{
+ GstIndex *result = NULL;
+ GstAviDemux *avi = GST_AVI_DEMUX (element);
+
+ GST_OBJECT_LOCK (avi);
+ if (avi->element_index)
+ result = gst_object_ref (avi->element_index);
+ GST_OBJECT_UNLOCK (avi);
+
+ GST_DEBUG_OBJECT (avi, "Returning index %" GST_PTR_FORMAT, result);
+
+ return result;
+}
+
+static GstStateChangeReturn
+gst_avi_demux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstAviDemux *avi = GST_AVI_DEMUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ avi->streaming = FALSE;
+ gst_segment_init (&avi->segment, GST_FORMAT_TIME);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto done;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ avi->have_index = FALSE;
+ gst_avi_demux_reset (avi);
+ break;
+ default:
+ break;
+ }
+
+done:
+ return ret;
+}
+#ifdef AVIDEMUX_MODIFICATION
+/*Modification: Added function to find out the frame_type for index-table generation */
+
+static int
+gst_avi_demux_find_frame_type (GstAviStream *stream, GstBuffer *buf, int *frame_type)
+{
+ unsigned char *buff = NULL;
+ unsigned int buff_len = 0;
+
+ if (!stream || !buf || !frame_type) {
+ GST_ERROR ("Invalid arguments..");
+ return -1;
+ }
+
+ buff = GST_BUFFER_DATA (buf);
+ buff_len = GST_BUFFER_SIZE (buf);
+
+ if ((NULL == buff) || buff_len < 5) {
+ GST_ERROR ("Invalid buffer...");
+ return -1;
+ }
+
+ switch (stream->strh->fcc_handler) {
+ /* mpeg stream parsing case */
+ case GST_MAKE_FOURCC ('X', 'V', 'I', 'D'):
+ case GST_MAKE_FOURCC ('x', 'v', 'i', 'd'):
+ case GST_MAKE_FOURCC ('D', 'X', '5', '0'):
+ case GST_MAKE_FOURCC ('d', 'i', 'v', 'x'):
+ case GST_MAKE_FOURCC ('D', 'I', 'V', 'X'):
+ case GST_MAKE_FOURCC ('B', 'L', 'Z', '0'):
+ case GST_MAKE_FOURCC ('F', 'M', 'P', '4'):
+ case GST_MAKE_FOURCC ('U', 'M', 'P', '4'):
+ case GST_MAKE_FOURCC ('F', 'F', 'D', 'S'):
+ case GST_MAKE_FOURCC ('M', 'P', 'E', 'G'):
+ case GST_MAKE_FOURCC ('M', 'P', 'G', 'I'):
+ case GST_MAKE_FOURCC ('m', 'p', 'g', '1'):
+ case GST_MAKE_FOURCC ('M', 'P', 'G', '1'):
+ case GST_MAKE_FOURCC ('P', 'I', 'M', '1'):
+ case GST_MAKE_FOURCC ('M', 'P', 'G', '2'):
+ case GST_MAKE_FOURCC ('m', 'p', 'g', '2'):
+ case GST_MAKE_FOURCC ('P', 'I', 'M', '2'):
+ case GST_MAKE_FOURCC ('D', 'V', 'R', ' '): {
+ int idx = 0;
+ gboolean found_vop_code = FALSE;
+
+ for (idx=0; idx< (buff_len-4); idx++) {
+ /* Find VOP start frame which should be in every frame */
+ if (buff[idx] == 0x00 && buff[idx+1] == 0x00 && buff[idx+2] == 0x01 && buff[idx+3] == 0xB6) {
+ GST_DEBUG ("Found VOP start code...");
+ found_vop_code = TRUE;
+ break;
+ }
+ }
+
+ if (!found_vop_code) {
+ GST_ERROR ("Invalid input stream : There isn't any VOP header");
+ return -1;
+ }
+
+ if ((buff[idx] == 0x00) && (buff[idx+1] == 0x00) && (buff[idx+2] == 0x01)) {
+ if(buff[idx+3] == 0xB6) {
+ switch (buff[idx+4] & 0xC0) {
+ case 0x00:
+ GST_DEBUG ("Found Key-Frame");
+ *frame_type = GST_AVI_KEYFRAME;
+ break;
+ default:
+ GST_DEBUG ("Found Non-Key frame.. value = %x", buff[idx+4]);
+ *frame_type = GST_AVI_NON_KEYFRAME;
+ break;
+ }
+ }
+ }
+ }
+ break;
+ case GST_MAKE_FOURCC ('H', '2', '6', '3'):
+ case GST_MAKE_FOURCC ('h', '2', '6', '3'):
+ case GST_MAKE_FOURCC ('i', '2', '6', '3'):
+ case GST_MAKE_FOURCC ('U', '2', '6', '3'):
+ case GST_MAKE_FOURCC ('v', 'i', 'v', '1'):
+ case GST_MAKE_FOURCC ('T', '2', '6', '3'): {
+ /* FIXME: H263 Frame Parsing is yet to be done.*/
+ *frame_type = GST_AVI_KEYFRAME;
+ }
+ break;
+ case GST_MAKE_FOURCC ('X', '2', '6', '4'):
+ case GST_MAKE_FOURCC ('x', '2', '6', '4'):
+ case GST_MAKE_FOURCC ('H', '2', '6', '4'):
+ case GST_MAKE_FOURCC ('h', '2', '6', '4'):
+ case GST_MAKE_FOURCC ('a', 'v', 'c', '1'):
+ case GST_MAKE_FOURCC ('A', 'V', 'C', '1'): {
+ gint idx = 0;
+ gint nalu_type = H264_NUT_UNKNOWN;
+
+ /* H264 Frame Parsing */
+ do {
+ if (buff[idx+0] == 0x00 &&
+ buff[idx+1] == 0x00 &&
+ ((buff [idx+2] == 0x01) || ((buff [idx+2] == 0x00) && (buff [idx+3] == 0x01)))) {
+
+ if (buff [idx+2] == 0x01) {
+ nalu_type = buff[idx +3] & 0x1f;
+ } else if ((buff [idx+2] == 0x00) && (buff [idx+3] == 0x01)) {
+ nalu_type = buff[idx +4] & 0x1f;
+ idx++;
+ }
+
+ if ((nalu_type == H264_NUT_SPS) ||
+ (nalu_type == H264_NUT_PPS) ||
+ (nalu_type == H264_NUT_SEI) ||
+ (nalu_type == H264_NUT_AUD)) {
+ GST_LOG ("Skipping NALU SPS/PPS/SEI/AUD...");
+ } else if (nalu_type == H264_NUT_IDR) {
+ GST_DEBUG ("Found KEY frame...\n");
+ *frame_type = GST_AVI_KEYFRAME;
+ break;
+ } else if ((nalu_type == H264_NUT_SLICE) ||
+ (nalu_type == H264_NUT_DPA) ||
+ (nalu_type == H264_NUT_DPB) ||
+ (nalu_type == H264_NUT_DPC) ||
+ (nalu_type == H264_NUT_EOSEQ) ||
+ (nalu_type == H264_NUT_EOSTREAM)) {
+ *frame_type = GST_AVI_NON_KEYFRAME;
+ break;
+ } else {
+ GST_DEBUG ("Unknown frame type, val = %d...", *frame_type);
+ *frame_type = GST_AVI_NON_KEYFRAME;
+ break;
+ }
+ }
+ idx++;
+ }while (idx < (buff_len - 4));
+ }
+ break;
+ default:
+ //default make all frames as key frames
+ *frame_type = GST_AVI_KEYFRAME;
+ break;
+ }
+
+ return 0;
+
+}
+
+static void gst_avidemux_forward_trickplay (GstAviDemux * avi, GstAviStream * stream, guint64 *timestamp)
+{
+ guint32 nsamples = 0; /* Number of samples between two consecutive keyframes */
+ guint64 next_kindex_timestamp;
+ guint64 prev_kindex_timestamp;
+
+ if (*timestamp < stream->trickplay_info->start_pos) {
+ GST_LOG_OBJECT (avi, "Received shown sample... not applying trickplay algo");
+ return;
+ }
+
+ if (stream->trickplay_info->next_kidx == 0) {
+ stream->trickplay_info->next_kidx = stream->trickplay_info->prev_kidx = stream->current_entry;
+
+ /* while loop to handle multiple consecutive key frames */
+ while(1) {
+ if((stream->trickplay_info->next_kidx +1)>=stream->idx_n) {
+ GST_DEBUG_OBJECT(avi,"eos");
+ break;
+ }
+
+ /* find previous key frame */
+ stream->trickplay_info->next_kidx = gst_avi_demux_index_next(avi, stream, stream->trickplay_info->next_kidx +1, TRUE);
+
+ /* based no.of sample between key frame and rate, drop frames */
+ GST_DEBUG_OBJECT (avi, "current index :%d, next key index : %d", stream->current_entry, stream->trickplay_info->next_kidx);
+
+ /* find no.of samples between present and previous key frames */
+ nsamples = stream->trickplay_info->next_kidx - stream->trickplay_info->prev_kidx;
+
+ /* find corresponding timestamps of present and previous keyframes */
+ next_kindex_timestamp = avi_stream_convert_frames_to_time_unchecked (stream, stream->trickplay_info->next_kidx);
+ prev_kindex_timestamp = avi_stream_convert_frames_to_time_unchecked (stream, stream->trickplay_info->prev_kidx);
+
+ /* find average duration between key frames */
+ stream->trickplay_info->kidxs_dur_diff = (next_kindex_timestamp - prev_kindex_timestamp)/nsamples;
+
+ stream->trickplay_info->show_samples = nsamples / avi->segment.rate;
+
+ GST_DEBUG_OBJECT (avi, " duration between keyframes:%"GST_TIME_FORMAT, GST_TIME_ARGS(stream->trickplay_info->kidxs_dur_diff));
+
+ if(stream->trickplay_info->show_samples) {
+ GST_DEBUG_OBJECT (avi, "samples to display between two key frames = %d",
+ stream->trickplay_info->show_samples);
+ /* found no. of samples to show between key frames */
+ *timestamp = avi_stream_convert_frames_to_time_unchecked (stream, stream->current_entry);
+ break;
+ } else if ((!stream->trickplay_info->show_samples) &&
+ (stream->trickplay_info->next_kidx >= (stream->idx_n-1))){
+ /* shown samples required to show between 2 key frames */
+ stream->current_entry = stream->trickplay_info->next_kidx -1; /* next_kidx -1 is because advance_sample will increment */
+ stream->trickplay_info->next_kidx = 0;
+ break;
+ }
+ }
+ stream->discont = TRUE;
+ } else {
+ stream->trickplay_info->show_samples--;
+ prev_kindex_timestamp = avi_stream_convert_frames_to_time_unchecked (stream, stream->trickplay_info->prev_kidx);
+ *timestamp = prev_kindex_timestamp +
+ (stream->current_entry - stream->trickplay_info->prev_kidx) * avi->segment.rate * stream->trickplay_info->kidxs_dur_diff;
+
+ if (stream->trickplay_info->show_samples == 0) {
+ /* shown samples required to show between 2 key frames */
+ GST_DEBUG_OBJECT (avi, "reached end of keyframe interval....Jumping to next key index = %d", stream->trickplay_info->next_kidx);
+ stream->current_entry= stream->trickplay_info->next_kidx -1; /* next_kidx -1 is because advance_sample will increment */
+ stream->trickplay_info->next_kidx = 0;
+ stream->discont = TRUE;
+ }
+ }
+}
+
+static void
+gst_avidemux_backward_trickplay (GstAviDemux * avi, GstAviStream * stream, guint64 *timestamp)
+{
+ int index;
+
+ /* backward trick play */
+ GST_DEBUG_OBJECT (avi, "backward trickplay start");
+ index = gst_avi_demux_index_for_time(avi,stream,avi->seek_kf_offset);
+ gst_avi_demux_move_stream (avi, stream, &avi->segment, index);
+}
+
+static GstFlowReturn
+gst_avidemux_seek_to_previous_keyframe (GstAviDemux *avi)
+{
+ guint index;
+ GstAviStream *stream;
+ GstClockTime seek_time;
+ int i;
+ guint64 time_position;
+ guint64 duration;
+ gdouble minusone = -1;
+
+
+ /* FIXME, this code assumes the main stream with keyframes is stream 0,
+ * which is mostly correct... */
+ stream = &avi->stream[avi->main_stream];
+
+ if(stream->current_entry <= stream->start_entry) {
+ /* Video stream reached start of the clip. So stop seeking to previous and send newsegment
+ from _loop function, so that normal playback resumes*/
+ goto eos;
+ }
+
+ index = stream->current_entry;
+ time_position = avi_stream_convert_frames_to_time_unchecked (stream, index);
+ duration= stream->current_ts_end - time_position;
+
+ if((time_position - (minusone *avi->segment.rate)*duration)>0)
+ time_position -= (minusone *avi->segment.rate)*duration;
+ else
+ time_position=0;
+
+ avi->seek_kf_offset = time_position;
+
+ GST_DEBUG_OBJECT (avi, " seek_kf_offset after:%"GST_TIME_FORMAT, GST_TIME_ARGS(avi->seek_kf_offset));
+
+ index = gst_avi_demux_index_for_time(avi,stream,time_position);
+
+ index = gst_avi_demux_index_prev (avi, stream, index, TRUE);
+
+ gst_avi_demux_move_stream (avi, stream, &avi->segment, index);
+
+ seek_time = avi_stream_convert_frames_to_time_unchecked (stream, index);
+ GST_DEBUG_OBJECT (avi, " seek_time is :%"GST_TIME_FORMAT, GST_TIME_ARGS(seek_time));
+
+
+ stream->last_flow = GST_FLOW_OK;
+ stream->discont = TRUE;
+
+ /* Aligning other stream */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *ostream;
+
+ ostream = &avi->stream[i];
+ if ((ostream == stream) || (ostream->index == NULL))
+ continue;
+
+ /* get the entry index for the requested position */
+ index = gst_avi_demux_index_for_time (avi, ostream, seek_time);
+
+ gst_avi_demux_move_stream (avi, ostream, &avi->segment, index);
+
+ ostream->last_flow = GST_FLOW_OK;
+ ostream->discont = TRUE;
+ }
+ return GST_FLOW_OK;
+eos:
+ return GST_FLOW_UNEXPECTED;
+
+}
+
+#endif
+
diff --git a/gst/avi/gstavidemux.h b/gst/avi/gstavidemux.h
new file mode 100644
index 0000000..174d8a8
--- /dev/null
+++ b/gst/avi/gstavidemux.h
@@ -0,0 +1,243 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2006> Nokia Corporation (contact <stefan.kost@nokia.com>)
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_AVI_DEMUX_H__
+#define __GST_AVI_DEMUX_H__
+
+#include <gst/gst.h>
+
+#include "avi-ids.h"
+#include "gst/riff/riff-ids.h"
+#include "gst/riff/riff-read.h"
+#include <gst/base/gstadapter.h>
+
+#ifdef DIVX_DRM /* need to check using same define */
+#include <stdint.h>
+#endif
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AVI_DEMUX \
+ (gst_avi_demux_get_type ())
+#define GST_AVI_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AVI_DEMUX, GstAviDemux))
+#define GST_AVI_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AVI_DEMUX, GstAviDemuxClass))
+#define GST_IS_AVI_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AVI_DEMUX))
+#define GST_IS_AVI_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_AVI_DEMUX))
+
+#define GST_AVI_DEMUX_MAX_STREAMS 16
+
+#define AVIDEMUX_MODIFICATION
+
+#define CHUNKID_TO_STREAMNR(chunkid) \
+ ((((chunkid) & 0xff) - '0') * 10 + \
+ (((chunkid) >> 8) & 0xff) - '0')
+
+
+/* new index entries 24 bytes */
+typedef struct {
+ guint32 flags;
+ guint32 size; /* bytes of the data */
+ guint64 offset; /* data offset in file */
+ guint64 total; /* total bytes before */
+} GstAviIndexEntry;
+
+#ifdef AVIDEMUX_MODIFICATION
+typedef struct _TrickPlayInfo TrickPlayInfo;
+
+struct _TrickPlayInfo {
+
+ gint32 next_kidx; // next Key index
+ gint32 prev_kidx; // Previous key index
+ guint64 kidxs_dur_diff; // duration between two consecutive key frames
+ gint32 show_samples; // samples to show between two consecutive key frames
+ guint64 start_pos; /* trickplay start position */
+};
+#endif
+
+typedef struct {
+ /* index of this streamcontext */
+ guint num;
+
+ /* pad*/
+ GstPad *pad;
+ gboolean exposed;
+
+ /* stream info and headers */
+ gst_riff_strh *strh;
+ union {
+ gst_riff_strf_vids *vids;
+ gst_riff_strf_auds *auds;
+ gst_riff_strf_iavs *iavs;
+ gpointer data;
+ } strf;
+ GstBuffer *extradata, *initdata;
+ gchar *name;
+
+ /* the start/step/stop entries */
+ guint start_entry;
+ guint step_entry;
+ guint stop_entry;
+
+#ifdef AVIDEMUX_MODIFICATION
+ TrickPlayInfo *trickplay_info;
+#endif
+
+ /* current index entry */
+ guint current_entry;
+ /* position (byte, frame, time) for current_entry */
+ guint current_total;
+ GstClockTime current_timestamp;
+ GstClockTime current_ts_end;
+ guint64 current_offset;
+ guint64 current_offset_end;
+
+ GstFlowReturn last_flow;
+ gboolean discont;
+
+ /* stream length */
+ guint64 total_bytes;
+ guint32 total_blocks;
+ guint n_keyframes;
+ /* stream length according to index */
+ GstClockTime idx_duration;
+ /* stream length according to header */
+ GstClockTime hdr_duration;
+ /* stream length based on header/index */
+ GstClockTime duration;
+
+ /* VBR indicator */
+ gboolean is_vbr;
+
+ /* openDML support (for files >4GB) */
+ gboolean superindex;
+ guint64 *indexes;
+
+ /* new indexes */
+ GstAviIndexEntry *index; /* array with index entries */
+ guint idx_n; /* number of entries */
+ guint idx_max; /* max allocated size of entries */
+
+ GstTagList *taglist;
+
+ gint index_id;
+} GstAviStream;
+
+typedef enum {
+ GST_AVI_DEMUX_START,
+ GST_AVI_DEMUX_HEADER,
+ GST_AVI_DEMUX_MOVI,
+ GST_AVI_DEMUX_SEEK,
+} GstAviDemuxState;
+
+typedef enum {
+ GST_AVI_DEMUX_HEADER_TAG_LIST,
+ GST_AVI_DEMUX_HEADER_AVIH,
+ GST_AVI_DEMUX_HEADER_ELEMENTS,
+ GST_AVI_DEMUX_HEADER_INFO,
+ GST_AVI_DEMUX_HEADER_JUNK,
+ GST_AVI_DEMUX_HEADER_DATA
+} GstAviDemuxHeaderState;
+
+typedef struct _GstAviDemux {
+ GstElement parent;
+
+ /* pads */
+ GstPad *sinkpad;
+
+ /* AVI decoding state */
+ GstAviDemuxState state;
+ GstAviDemuxHeaderState header_state;
+ guint64 offset;
+ gboolean abort_buffering;
+
+ /* when we loaded the indexes */
+ gboolean have_index;
+ /* index offset in the file */
+ guint64 index_offset;
+
+ /* streams */
+ GstAviStream stream[GST_AVI_DEMUX_MAX_STREAMS];
+ guint num_streams;
+ guint num_v_streams;
+ guint num_a_streams;
+ guint num_t_streams; /* subtitle text streams */
+
+ guint main_stream; /* used for seeking */
+
+ /* for streaming mode */
+ gboolean streaming;
+ gboolean have_eos;
+ GstAdapter *adapter;
+ guint todrop;
+
+ /* some stream info for length */
+ gst_riff_avih *avih;
+ GstClockTime duration;
+
+ /* segment in TIME */
+ GstSegment segment;
+ gboolean segment_running;
+
+ /* pending tags/events */
+ GstEvent *seg_event;
+ GstEvent *close_seg_event;
+ GstTagList *globaltags;
+ gboolean got_tags;
+
+ /* gst index support */
+ GstIndex *element_index;
+ gint index_id;
+ gboolean seekable;
+
+#ifdef DIVX_DRM
+ uint8_t* drmContext;
+ void *divx_handle;
+ uint8_t* (*divx_init) (uint8_t*, int*);
+ int (*divx_commit) (uint8_t *);
+ int (*divx_decrypt_audio) (uint8_t *, uint8_t *, uint32_t);
+ int (*divx_prepare_video_bitstream) (uint8_t *, uint8_t * , uint32_t , uint8_t * , uint32_t * );
+ int (*divx_finalize) (uint8_t *);
+#endif
+
+ guint64 first_movi_offset;
+ guint64 idx1_offset; /* offset in file of list/chunk after movi */
+ GstEvent *seek_event;
+
+ gboolean building_index;
+ guint odml_stream;
+ guint odml_subidx;
+ guint64 *odml_subidxs;
+
+ guint64 seek_kf_offset; /* offset of the keyframe to which we want to seek */
+} GstAviDemux;
+
+typedef struct _GstAviDemuxClass {
+ GstElementClass parent_class;
+} GstAviDemuxClass;
+
+GType gst_avi_demux_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_AVI_DEMUX_H__ */
diff --git a/gst/avi/gstavimux.c b/gst/avi/gstavimux.c
new file mode 100644
index 0000000..fe50cef
--- /dev/null
+++ b/gst/avi/gstavimux.c
@@ -0,0 +1,2180 @@
+/* AVI muxer plugin for GStreamer
+ * Copyright (C) 2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (C) 2006 Mark Nauwelaerts <manauw@skynet.be>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/* based on:
+ * - the old avimuxer (by Wim Taymans)
+ * - xawtv's aviwriter (by Gerd Knorr)
+ * - mjpegtools' avilib (by Rainer Johanni)
+ * - openDML large-AVI docs
+ */
+
+/**
+ * SECTION:element-avimux
+ *
+ * Muxes raw or compressed audio and/or video streams into an AVI file.
+ *
+ * <refsect2>
+ * <title>Example launch lines</title>
+ * <para>(write everything in one line, without the backslash characters)</para>
+ * |[
+ * gst-launch videotestsrc num-buffers=250 \
+ * ! 'video/x-raw-yuv,format=(fourcc)I420,width=320,height=240,framerate=(fraction)25/1' \
+ * ! queue ! mux. \
+ * audiotestsrc num-buffers=440 ! audioconvert \
+ * ! 'audio/x-raw-int,rate=44100,channels=2' ! queue ! mux. \
+ * avimux name=mux ! filesink location=test.avi
+ * ]| This will create an .AVI file containing an uncompressed video stream
+ * with a test picture and an uncompressed audio stream containing a
+ * test sound.
+ * |[
+ * gst-launch videotestsrc num-buffers=250 \
+ * ! 'video/x-raw-yuv,format=(fourcc)I420,width=320,height=240,framerate=(fraction)25/1' \
+ * ! xvidenc ! queue ! mux. \
+ * audiotestsrc num-buffers=440 ! audioconvert ! 'audio/x-raw-int,rate=44100,channels=2' \
+ * ! lame ! queue ! mux. \
+ * avimux name=mux ! filesink location=test.avi
+ * ]| This will create an .AVI file containing the same test video and sound
+ * as above, only that both streams will be compressed this time. This will
+ * only work if you have the necessary encoder elements installed of course.
+ * </refsect2>
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gst/gst-i18n-plugin.h"
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <gst/video/video.h>
+#include <gst/base/gstbytewriter.h>
+
+#include "gstavimux.h"
+
+GST_DEBUG_CATEGORY_STATIC (avimux_debug);
+#define GST_CAT_DEFAULT avimux_debug
+
+enum
+{
+ ARG_0,
+ ARG_BIGFILE
+};
+
+#define DEFAULT_BIGFILE TRUE
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-msvideo")
+ );
+
+static GstStaticPadTemplate video_sink_factory =
+ GST_STATIC_PAD_TEMPLATE ("video_%d",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("video/x-raw-yuv, "
+ "format = (fourcc) { YUY2, I420 }, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ]; "
+ "image/jpeg, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ]; "
+ "video/x-divx, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ], "
+ "divxversion = (int) [ 3, 5 ]; "
+ "video/x-xvid, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ]; "
+ "video/x-3ivx, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ]; "
+ "video/x-msmpeg, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ], "
+ "msmpegversion = (int) [ 41, 43 ]; "
+ "video/mpeg, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ], "
+ "mpegversion = (int) { 1, 2, 4}, "
+ "systemstream = (boolean) FALSE; "
+ "video/x-h263, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ]; "
+ "video/x-h264, "
+ "stream-format = (string) byte-stream, "
+ "alignment = (string) au, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ]; "
+ "video/x-dv, "
+ "width = (int) 720, "
+ "height = (int) { 576, 480 }, "
+ "framerate = (fraction) [ 0, MAX ], "
+ "systemstream = (boolean) FALSE; "
+ "video/x-huffyuv, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], " "framerate = (fraction) [ 0, MAX ];"
+ "video/x-wmv, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], " "framerate = (fraction) [ 0, MAX ], "
+ "wmvversion = (int) [ 1, 3];"
+ "image/x-jpc, "
+ "width = (int) [ 1, 2147483647 ], "
+ "height = (int) [ 1, 2147483647 ], "
+ "framerate = (fraction) [ 0, MAX ];"
+ "video/x-vp8, "
+ "width = (int) [ 1, 2147483647 ], "
+ "height = (int) [ 1, 2147483647 ], "
+ "framerate = (fraction) [ 0, MAX ]")
+ );
+
+static GstStaticPadTemplate audio_sink_factory =
+ GST_STATIC_PAD_TEMPLATE ("audio_%d",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("audio/x-raw-int, "
+ "endianness = (int) LITTLE_ENDIAN, "
+ "signed = (boolean) { TRUE, FALSE }, "
+ "width = (int) { 8, 16 }, "
+ "depth = (int) { 8, 16 }, "
+ "rate = (int) [ 1000, 96000 ], "
+ "channels = (int) [ 1, 2 ]; "
+ "audio/mpeg, "
+ "mpegversion = (int) 1, "
+ "layer = (int) [ 1, 3 ], "
+ "rate = (int) [ 1000, 96000 ], " "channels = (int) [ 1, 2 ]; "
+ "audio/mpeg, "
+ "mpegversion = (int) 4, "
+ "stream-format = (string) raw, "
+ "rate = (int) [ 1000, 96000 ], " "channels = (int) [ 1, 2 ]; "
+/*#if 0 VC6 doesn't support #if here ...
+ "audio/x-vorbis, "
+ "rate = (int) [ 1000, 96000 ], " "channels = (int) [ 1, 2 ]; "
+#endif*/
+ "audio/x-ac3, "
+ "rate = (int) [ 1000, 96000 ], " "channels = (int) [ 1, 2 ]; "
+ "audio/x-alaw, "
+ "rate = (int) [ 1000, 48000 ], " "channels = (int) [ 1, 2 ]; "
+ "audio/x-mulaw, "
+ "rate = (int) [ 1000, 48000 ], " "channels = (int) [ 1, 2 ]; "
+ "audio/x-wma, "
+ "rate = (int) [ 1000, 96000 ], " "channels = (int) [ 1, 2 ], "
+ "wmaversion = (int) [ 1, 2 ] ")
+ );
+
+static void gst_avi_mux_base_init (gpointer g_class);
+static void gst_avi_mux_class_init (GstAviMuxClass * klass);
+static void gst_avi_mux_init (GstAviMux * avimux);
+static void gst_avi_mux_pad_reset (GstAviPad * avipad, gboolean free);
+
+static GstFlowReturn gst_avi_mux_collect_pads (GstCollectPads * pads,
+ GstAviMux * avimux);
+static gboolean gst_avi_mux_handle_event (GstPad * pad, GstEvent * event);
+static GstPad *gst_avi_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name);
+static void gst_avi_mux_release_pad (GstElement * element, GstPad * pad);
+static void gst_avi_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_avi_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static GstStateChangeReturn gst_avi_mux_change_state (GstElement * element,
+ GstStateChange transition);
+
+static GstElementClass *parent_class = NULL;
+
+GType
+gst_avi_mux_get_type (void)
+{
+ static GType avimux_type = 0;
+
+ if (!avimux_type) {
+ static const GTypeInfo avimux_info = {
+ sizeof (GstAviMuxClass),
+ gst_avi_mux_base_init,
+ NULL,
+ (GClassInitFunc) gst_avi_mux_class_init,
+ NULL,
+ NULL,
+ sizeof (GstAviMux),
+ 0,
+ (GInstanceInitFunc) gst_avi_mux_init,
+ };
+ static const GInterfaceInfo tag_setter_info = {
+ NULL,
+ NULL,
+ NULL
+ };
+
+ avimux_type =
+ g_type_register_static (GST_TYPE_ELEMENT, "GstAviMux", &avimux_info, 0);
+ g_type_add_interface_static (avimux_type, GST_TYPE_TAG_SETTER,
+ &tag_setter_info);
+ }
+ return avimux_type;
+}
+
+static void
+gst_avi_mux_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_add_static_pad_template (element_class, &src_factory);
+ gst_element_class_add_static_pad_template (element_class,
+ &audio_sink_factory);
+ gst_element_class_add_static_pad_template (element_class,
+ &video_sink_factory);
+
+ gst_element_class_set_details_simple (element_class, "Avi muxer",
+ "Codec/Muxer",
+ "Muxes audio and video into an avi stream",
+ "GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
+
+ GST_DEBUG_CATEGORY_INIT (avimux_debug, "avimux", 0, "Muxer for AVI streams");
+}
+
+static void
+gst_avi_mux_finalize (GObject * object)
+{
+ GstAviMux *mux = GST_AVI_MUX (object);
+ GSList *node;
+
+ /* completely free each sinkpad */
+ node = mux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ node = node->next;
+
+ gst_avi_mux_pad_reset (avipad, TRUE);
+ g_free (avipad);
+ }
+ g_slist_free (mux->sinkpads);
+ mux->sinkpads = NULL;
+
+ g_free (mux->idx);
+ mux->idx = NULL;
+
+ gst_object_unref (mux->collect);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_avi_mux_class_init (GstAviMuxClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->get_property = gst_avi_mux_get_property;
+ gobject_class->set_property = gst_avi_mux_set_property;
+ gobject_class->finalize = gst_avi_mux_finalize;
+
+ g_object_class_install_property (gobject_class, ARG_BIGFILE,
+ g_param_spec_boolean ("bigfile", "Bigfile Support (>2GB)",
+ "Support for openDML-2.0 (big) AVI files", DEFAULT_BIGFILE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_avi_mux_request_new_pad);
+ gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_avi_mux_release_pad);
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_avi_mux_change_state);
+}
+
+/* reset pad to initial state
+ * free - if true, release all, not only stream related, data */
+static void
+gst_avi_mux_pad_reset (GstAviPad * avipad, gboolean free)
+{
+ /* generic part */
+ memset (&(avipad->hdr), 0, sizeof (gst_riff_strh));
+
+ memset (&(avipad->idx[0]), 0, sizeof (avipad->idx));
+
+ if (free) {
+ g_free (avipad->tag);
+ avipad->tag = NULL;
+ g_free (avipad->idx_tag);
+ avipad->idx_tag = NULL;
+ }
+
+ if (avipad->is_video) {
+ GstAviVideoPad *vidpad = (GstAviVideoPad *) avipad;
+
+ avipad->hdr.type = GST_MAKE_FOURCC ('v', 'i', 'd', 's');
+ if (vidpad->vids_codec_data) {
+ gst_buffer_unref (vidpad->vids_codec_data);
+ vidpad->vids_codec_data = NULL;
+ }
+
+ if (vidpad->prepend_buffer) {
+ gst_buffer_unref (vidpad->prepend_buffer);
+ vidpad->prepend_buffer = NULL;
+ }
+
+ memset (&(vidpad->vids), 0, sizeof (gst_riff_strf_vids));
+ memset (&(vidpad->vprp), 0, sizeof (gst_riff_vprp));
+ } else {
+ GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
+
+ audpad->samples = 0;
+
+ avipad->hdr.type = GST_MAKE_FOURCC ('a', 'u', 'd', 's');
+ if (audpad->auds_codec_data) {
+ gst_buffer_unref (audpad->auds_codec_data);
+ audpad->auds_codec_data = NULL;
+ }
+
+ memset (&(audpad->auds), 0, sizeof (gst_riff_strf_auds));
+ }
+}
+
+static void
+gst_avi_mux_reset (GstAviMux * avimux)
+{
+ GSList *node, *newlist = NULL;
+
+ /* free and reset each sinkpad */
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ node = node->next;
+
+ gst_avi_mux_pad_reset (avipad, FALSE);
+ /* if this pad has collectdata, keep it, otherwise dump it completely */
+ if (avipad->collect)
+ newlist = g_slist_append (newlist, avipad);
+ else {
+ gst_avi_mux_pad_reset (avipad, TRUE);
+ g_free (avipad);
+ }
+ }
+
+ /* free the old list of sinkpads, only keep the real collecting ones */
+ g_slist_free (avimux->sinkpads);
+ avimux->sinkpads = newlist;
+
+ /* avi data */
+ avimux->num_frames = 0;
+ memset (&(avimux->avi_hdr), 0, sizeof (gst_riff_avih));
+ avimux->avi_hdr.max_bps = 10000000;
+ avimux->codec_data_size = 0;
+
+ if (avimux->tags_snap) {
+ gst_tag_list_free (avimux->tags_snap);
+ avimux->tags_snap = NULL;
+ }
+
+ g_free (avimux->idx);
+ avimux->idx = NULL;
+
+ /* state info */
+ avimux->write_header = TRUE;
+
+ /* tags */
+ gst_tag_setter_reset_tags (GST_TAG_SETTER (avimux));
+}
+
+static void
+gst_avi_mux_init (GstAviMux * avimux)
+{
+ avimux->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
+ gst_pad_use_fixed_caps (avimux->srcpad);
+ gst_element_add_pad (GST_ELEMENT (avimux), avimux->srcpad);
+
+ /* property */
+ avimux->enable_large_avi = DEFAULT_BIGFILE;
+
+ avimux->collect = gst_collect_pads_new ();
+ gst_collect_pads_set_function (avimux->collect,
+ (GstCollectPadsFunction) (GST_DEBUG_FUNCPTR (gst_avi_mux_collect_pads)),
+ avimux);
+
+ /* set to clean state */
+ gst_avi_mux_reset (avimux);
+}
+
+static gboolean
+gst_avi_mux_vidsink_set_caps (GstPad * pad, GstCaps * vscaps)
+{
+ GstAviMux *avimux;
+ GstAviVideoPad *avipad;
+ GstAviCollectData *collect_pad;
+ GstStructure *structure;
+ const gchar *mimetype;
+ const GValue *fps, *par;
+ const GValue *codec_data;
+ gint width, height;
+ gint par_n, par_d;
+ gboolean codec_data_in_headers = TRUE;
+
+ avimux = GST_AVI_MUX (gst_pad_get_parent (pad));
+
+ /* find stream data */
+ collect_pad = (GstAviCollectData *) gst_pad_get_element_private (pad);
+ g_assert (collect_pad);
+ avipad = (GstAviVideoPad *) collect_pad->avipad;
+ g_assert (avipad);
+ g_assert (avipad->parent.is_video);
+ g_assert (avipad->parent.hdr.type == GST_MAKE_FOURCC ('v', 'i', 'd', 's'));
+
+ GST_DEBUG_OBJECT (avimux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), vscaps);
+
+ structure = gst_caps_get_structure (vscaps, 0);
+ mimetype = gst_structure_get_name (structure);
+
+ /* global */
+ avipad->vids.size = sizeof (gst_riff_strf_vids);
+ avipad->vids.planes = 1;
+ if (!gst_structure_get_int (structure, "width", &width) ||
+ !gst_structure_get_int (structure, "height", &height)) {
+ goto refuse_caps;
+ }
+
+ avipad->vids.width = width;
+ avipad->vids.height = height;
+
+ fps = gst_structure_get_value (structure, "framerate");
+ if (fps == NULL || !GST_VALUE_HOLDS_FRACTION (fps))
+ goto refuse_caps;
+
+ avipad->parent.hdr.rate = gst_value_get_fraction_numerator (fps);
+ avipad->parent.hdr.scale = gst_value_get_fraction_denominator (fps);
+
+ /* (pixel) aspect ratio data, if any */
+ par = gst_structure_get_value (structure, "pixel-aspect-ratio");
+ /* only use video properties header if there is non-trivial aspect info */
+ if (par && GST_VALUE_HOLDS_FRACTION (par) &&
+ ((par_n = gst_value_get_fraction_numerator (par)) !=
+ (par_d = gst_value_get_fraction_denominator (par)))) {
+ GValue to_ratio = { 0, };
+ guint ratio_n, ratio_d;
+
+ /* some fraction voodoo to obtain simplest possible ratio */
+ g_value_init (&to_ratio, GST_TYPE_FRACTION);
+ gst_value_set_fraction (&to_ratio, width * par_n, height * par_d);
+ ratio_n = gst_value_get_fraction_numerator (&to_ratio);
+ ratio_d = gst_value_get_fraction_denominator (&to_ratio);
+ GST_DEBUG_OBJECT (avimux, "generating vprp data with aspect ratio %d/%d",
+ ratio_n, ratio_d);
+ /* simply fill in */
+ avipad->vprp.vert_rate = avipad->parent.hdr.rate / avipad->parent.hdr.scale;
+ avipad->vprp.hor_t_total = width;
+ avipad->vprp.vert_lines = height;
+ avipad->vprp.aspect = (ratio_n) << 16 | (ratio_d & 0xffff);
+ avipad->vprp.width = width;
+ avipad->vprp.height = height;
+ avipad->vprp.fields = 1;
+ avipad->vprp.field_info[0].compressed_bm_height = height;
+ avipad->vprp.field_info[0].compressed_bm_width = width;
+ avipad->vprp.field_info[0].valid_bm_height = height;
+ avipad->vprp.field_info[0].valid_bm_width = width;
+ }
+
+ if (!strcmp (mimetype, "video/x-raw-yuv")) {
+ guint32 format;
+
+ gst_structure_get_fourcc (structure, "format", &format);
+ avipad->vids.compression = format;
+ switch (format) {
+ case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
+ avipad->vids.bit_cnt = 16;
+ break;
+ case GST_MAKE_FOURCC ('I', '4', '2', '0'):
+ avipad->vids.bit_cnt = 12;
+ break;
+ }
+ } else {
+ avipad->vids.bit_cnt = 24;
+ avipad->vids.compression = 0;
+
+ /* find format */
+ if (!strcmp (mimetype, "video/x-huffyuv")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('H', 'F', 'Y', 'U');
+ } else if (!strcmp (mimetype, "image/jpeg")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'J', 'P', 'G');
+ } else if (!strcmp (mimetype, "video/x-divx")) {
+ gint divxversion;
+
+ gst_structure_get_int (structure, "divxversion", &divxversion);
+ switch (divxversion) {
+ case 3:
+ avipad->vids.compression = GST_MAKE_FOURCC ('D', 'I', 'V', '3');
+ break;
+ case 4:
+ avipad->vids.compression = GST_MAKE_FOURCC ('D', 'I', 'V', 'X');
+ break;
+ case 5:
+ avipad->vids.compression = GST_MAKE_FOURCC ('D', 'X', '5', '0');
+ break;
+ }
+ } else if (!strcmp (mimetype, "video/x-xvid")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('X', 'V', 'I', 'D');
+ } else if (!strcmp (mimetype, "video/x-3ivx")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('3', 'I', 'V', '2');
+ } else if (gst_structure_has_name (structure, "video/x-msmpeg")) {
+ gint msmpegversion;
+
+ gst_structure_get_int (structure, "msmpegversion", &msmpegversion);
+ switch (msmpegversion) {
+ case 41:
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', 'G', '4');
+ break;
+ case 42:
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', '4', '2');
+ break;
+ case 43:
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', '4', '3');
+ break;
+ default:
+ GST_INFO ("unhandled msmpegversion : %d, fall back to fourcc=MPEG",
+ msmpegversion);
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', 'E', 'G');
+ break;
+ }
+ } else if (!strcmp (mimetype, "video/x-dv")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('D', 'V', 'S', 'D');
+ } else if (!strcmp (mimetype, "video/x-h263")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('H', '2', '6', '3');
+ } else if (!strcmp (mimetype, "video/x-h264")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('H', '2', '6', '4');
+ } else if (!strcmp (mimetype, "video/mpeg")) {
+ gint mpegversion;
+
+ gst_structure_get_int (structure, "mpegversion", &mpegversion);
+
+ switch (mpegversion) {
+ case 2:
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', 'G', '2');
+ break;
+ case 4:
+ /* mplayer/ffmpeg might not work with DIVX, but with FMP4 */
+ avipad->vids.compression = GST_MAKE_FOURCC ('D', 'I', 'V', 'X');
+
+ /* DIVX/XVID in AVI store the codec_data chunk as part of the
+ first data buffer. So for this case, we prepend the codec_data
+ blob (if any) to that first buffer */
+ codec_data_in_headers = FALSE;
+ break;
+ default:
+ GST_INFO ("unhandled mpegversion : %d, fall back to fourcc=MPEG",
+ mpegversion);
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', 'E', 'G');
+ break;
+ }
+ } else if (!strcmp (mimetype, "video/x-wmv")) {
+ gint wmvversion;
+
+ if (gst_structure_get_int (structure, "wmvversion", &wmvversion)) {
+ switch (wmvversion) {
+ case 1:
+ avipad->vids.compression = GST_MAKE_FOURCC ('W', 'M', 'V', '1');
+ break;
+ case 2:
+ avipad->vids.compression = GST_MAKE_FOURCC ('W', 'M', 'V', '2');
+ break;
+ case 3:
+ avipad->vids.compression = GST_MAKE_FOURCC ('W', 'M', 'V', '3');
+ default:
+ break;
+ }
+ }
+ } else if (!strcmp (mimetype, "image/x-jpc")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'J', '2', 'C');
+ } else if (!strcmp (mimetype, "video/x-vp8")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('V', 'P', '8', '0');
+ }
+
+ if (!avipad->vids.compression)
+ goto refuse_caps;
+ }
+
+ /* codec initialization data, if any */
+ codec_data = gst_structure_get_value (structure, "codec_data");
+ if (codec_data) {
+ if (codec_data_in_headers) {
+ avipad->vids_codec_data = gst_value_get_buffer (codec_data);
+ gst_buffer_ref (avipad->vids_codec_data);
+ /* keep global track of size */
+ avimux->codec_data_size += GST_BUFFER_SIZE (avipad->vids_codec_data);
+ } else {
+ avipad->prepend_buffer =
+ gst_buffer_ref (gst_value_get_buffer (codec_data));
+ }
+ }
+
+ avipad->parent.hdr.fcc_handler = avipad->vids.compression;
+ avipad->vids.image_size = avipad->vids.height * avipad->vids.width;
+ /* hm, maybe why avi only handles one stream well ... */
+ avimux->avi_hdr.width = avipad->vids.width;
+ avimux->avi_hdr.height = avipad->vids.height;
+ avimux->avi_hdr.us_frame = 1000000. * avipad->parent.hdr.scale /
+ avipad->parent.hdr.rate;
+
+ gst_object_unref (avimux);
+ return TRUE;
+
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (avimux, "refused caps %" GST_PTR_FORMAT, vscaps);
+ gst_object_unref (avimux);
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_avi_mux_audsink_scan_mpeg_audio (GstAviMux * avimux, GstAviPad * avipad,
+ GstBuffer * buffer)
+{
+ guint8 *data;
+ guint size;
+ guint spf;
+ guint32 header;
+ gulong layer;
+ gulong version;
+ gint lsf, mpg25;
+
+ data = GST_BUFFER_DATA (buffer);
+ size = GST_BUFFER_SIZE (buffer);
+
+ if (size < 4)
+ goto not_parsed;
+
+ header = GST_READ_UINT32_BE (data);
+
+ if ((header & 0xffe00000) != 0xffe00000)
+ goto not_parsed;
+
+ /* thanks go to mp3parse */
+ if (header & (1 << 20)) {
+ lsf = (header & (1 << 19)) ? 0 : 1;
+ mpg25 = 0;
+ } else {
+ lsf = 1;
+ mpg25 = 1;
+ }
+
+ version = 1 + lsf + mpg25;
+ layer = 4 - ((header >> 17) & 0x3);
+
+ /* see http://www.codeproject.com/audio/MPEGAudioInfo.asp */
+ if (layer == 1)
+ spf = 384;
+ else if (layer == 2)
+ spf = 1152;
+ else if (version == 1) {
+ spf = 1152;
+ } else {
+ /* MPEG-2 or "2.5" */
+ spf = 576;
+ }
+
+ if (G_UNLIKELY (avipad->hdr.scale <= 1))
+ avipad->hdr.scale = spf;
+ else if (G_UNLIKELY (avipad->hdr.scale != spf)) {
+ GST_WARNING_OBJECT (avimux, "input mpeg audio has varying frame size");
+ goto cbr_fallback;
+ }
+
+ return GST_FLOW_OK;
+
+ /* EXITS */
+not_parsed:
+ {
+ GST_WARNING_OBJECT (avimux, "input mpeg audio is not parsed");
+ /* fall-through */
+ }
+cbr_fallback:
+ {
+ GST_WARNING_OBJECT (avimux, "falling back to CBR muxing");
+ avipad->hdr.scale = 1;
+ /* no need to check further */
+ avipad->hook = NULL;
+ return GST_FLOW_OK;
+ }
+}
+
+static void
+gst_avi_mux_audsink_set_fields (GstAviMux * avimux, GstAviAudioPad * avipad)
+{
+ if (avipad->parent.hdr.scale > 1) {
+ /* vbr case: fixed duration per frame/chunk */
+ avipad->parent.hdr.rate = avipad->auds.rate;
+ avipad->parent.hdr.samplesize = 0;
+ /* FIXME ?? some rumours say this should be largest audio chunk size */
+ avipad->auds.blockalign = avipad->parent.hdr.scale;
+ } else {
+ /* by spec, hdr.rate is av_bps related, is calculated that way in stop_file,
+ * and reduces to sample rate in PCM like cases */
+ avipad->parent.hdr.rate = avipad->auds.av_bps / avipad->auds.blockalign;
+ avipad->parent.hdr.samplesize = avipad->auds.blockalign;
+ avipad->parent.hdr.scale = 1;
+ }
+}
+
+static gboolean
+gst_avi_mux_audsink_set_caps (GstPad * pad, GstCaps * vscaps)
+{
+ GstAviMux *avimux;
+ GstAviAudioPad *avipad;
+ GstAviCollectData *collect_pad;
+ GstStructure *structure;
+ const gchar *mimetype;
+ const GValue *codec_data;
+ gint channels, rate;
+
+ avimux = GST_AVI_MUX (gst_pad_get_parent (pad));
+
+ /* find stream data */
+ collect_pad = (GstAviCollectData *) gst_pad_get_element_private (pad);
+ g_assert (collect_pad);
+ avipad = (GstAviAudioPad *) collect_pad->avipad;
+ g_assert (avipad);
+ g_assert (!avipad->parent.is_video);
+ g_assert (avipad->parent.hdr.type == GST_MAKE_FOURCC ('a', 'u', 'd', 's'));
+
+ GST_DEBUG_OBJECT (avimux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), vscaps);
+
+ structure = gst_caps_get_structure (vscaps, 0);
+ mimetype = gst_structure_get_name (structure);
+
+ /* we want these for all */
+ if (!gst_structure_get_int (structure, "channels", &channels) ||
+ !gst_structure_get_int (structure, "rate", &rate)) {
+ goto refuse_caps;
+ }
+
+ avipad->auds.channels = channels;
+ avipad->auds.rate = rate;
+
+ /* codec initialization data, if any */
+ codec_data = gst_structure_get_value (structure, "codec_data");
+ if (codec_data) {
+ avipad->auds_codec_data = gst_value_get_buffer (codec_data);
+ gst_buffer_ref (avipad->auds_codec_data);
+ /* keep global track of size */
+ avimux->codec_data_size += GST_BUFFER_SIZE (avipad->auds_codec_data);
+ }
+
+ if (!strcmp (mimetype, "audio/x-raw-int")) {
+ gint width, depth;
+ gboolean signedness;
+
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_PCM;
+
+ if (!gst_structure_get_int (structure, "width", &width) ||
+ !gst_structure_get_int (structure, "depth", &depth) ||
+ !gst_structure_get_boolean (structure, "signed", &signedness)) {
+ GST_DEBUG_OBJECT (avimux,
+ "broken caps, width/depth/signed field missing");
+ goto refuse_caps;
+ }
+
+ /* no clear place to put different values for these while keeping to spec */
+ if (width != depth) {
+ GST_DEBUG_OBJECT (avimux, "width must be same as depth!");
+ goto refuse_caps;
+ }
+
+ /* because that's the way the caps will be recreated from riff data */
+ if ((width == 8 && signedness) || (width == 16 && !signedness)) {
+ GST_DEBUG_OBJECT (avimux,
+ "8-bit PCM must be unsigned, 16-bit PCM signed");
+ goto refuse_caps;
+ }
+
+ avipad->auds.blockalign = width;
+ avipad->auds.size = (width == 8) ? 8 : depth;
+
+ /* set some more info straight */
+ avipad->auds.blockalign /= 8;
+ avipad->auds.blockalign *= avipad->auds.channels;
+ avipad->auds.av_bps = avipad->auds.blockalign * avipad->auds.rate;
+ } else {
+ avipad->auds.format = 0;
+ /* set some defaults */
+ avipad->auds.blockalign = 1;
+ avipad->auds.av_bps = 0;
+ avipad->auds.size = 16;
+
+ if (!strcmp (mimetype, "audio/mpeg")) {
+ gint mpegversion;
+
+ gst_structure_get_int (structure, "mpegversion", &mpegversion);
+ switch (mpegversion) {
+ case 1:{
+ gint layer = 3;
+ gboolean parsed = FALSE;
+
+ gst_structure_get_int (structure, "layer", &layer);
+ gst_structure_get_boolean (structure, "parsed", &parsed);
+ switch (layer) {
+ case 3:
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_MPEGL3;
+ break;
+ case 1:
+ case 2:
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_MPEGL12;
+ break;
+ }
+ if (parsed) {
+ /* treat as VBR, should also cover CBR case;
+ * setup hook to parse frame header and determine spf */
+ avipad->parent.hook = gst_avi_mux_audsink_scan_mpeg_audio;
+ } else {
+ GST_WARNING_OBJECT (avimux, "unparsed MPEG audio input (?), "
+ "doing CBR muxing");
+ }
+ break;
+ }
+ case 4:
+ {
+ GstBuffer *codec_data_buf = avipad->auds_codec_data;
+ const gchar *stream_format;
+ guint codec;
+
+ stream_format = gst_structure_get_string (structure, "stream-format");
+ if (stream_format) {
+ if (strcmp (stream_format, "raw") != 0) {
+ GST_WARNING_OBJECT (avimux, "AAC's stream format '%s' is not "
+ "supported, please use 'raw'", stream_format);
+ break;
+ }
+ } else {
+ GST_WARNING_OBJECT (avimux, "AAC's stream-format not specified, "
+ "assuming 'raw'");
+ }
+
+ /* vbr case needs some special handling */
+ if (!codec_data_buf || GST_BUFFER_SIZE (codec_data_buf) < 2) {
+ GST_WARNING_OBJECT (avimux, "no (valid) codec_data for AAC audio");
+ break;
+ }
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_AAC;
+ /* need to determine frame length */
+ codec = GST_READ_UINT16_BE (GST_BUFFER_DATA (codec_data_buf));
+ avipad->parent.hdr.scale = (codec & 0x4) ? 960 : 1024;
+ break;
+ }
+ }
+ } else if (!strcmp (mimetype, "audio/x-vorbis")) {
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_VORBIS3;
+ } else if (!strcmp (mimetype, "audio/x-ac3")) {
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_A52;
+ } else if (!strcmp (mimetype, "audio/x-alaw")) {
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_ALAW;
+ avipad->auds.size = 8;
+ avipad->auds.blockalign = avipad->auds.channels;
+ avipad->auds.av_bps = avipad->auds.blockalign * avipad->auds.rate;
+ } else if (!strcmp (mimetype, "audio/x-mulaw")) {
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_MULAW;
+ avipad->auds.size = 8;
+ avipad->auds.blockalign = avipad->auds.channels;
+ avipad->auds.av_bps = avipad->auds.blockalign * avipad->auds.rate;
+ } else if (!strcmp (mimetype, "audio/x-wma")) {
+ gint version;
+ gint bitrate;
+ gint block_align;
+
+ if (gst_structure_get_int (structure, "wmaversion", &version)) {
+ switch (version) {
+ case 1:
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_WMAV1;
+ break;
+ case 2:
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_WMAV2;
+ break;
+ default:
+ break;
+ }
+ }
+
+ if (avipad->auds.format != 0) {
+ if (gst_structure_get_int (structure, "block_align", &block_align)) {
+ avipad->auds.blockalign = block_align;
+ }
+ if (gst_structure_get_int (structure, "bitrate", &bitrate)) {
+ avipad->auds.av_bps = bitrate / 8;
+ }
+ }
+ }
+ }
+
+ if (!avipad->auds.format)
+ goto refuse_caps;
+
+ avipad->parent.hdr.fcc_handler = avipad->auds.format;
+ gst_avi_mux_audsink_set_fields (avimux, avipad);
+
+ gst_object_unref (avimux);
+ return TRUE;
+
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (avimux, "refused caps %" GST_PTR_FORMAT, vscaps);
+ gst_object_unref (avimux);
+ return FALSE;
+ }
+}
+
+
+static GstPad *
+gst_avi_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * req_name)
+{
+ GstAviMux *avimux;
+ GstPad *newpad;
+ GstAviPad *avipad;
+ GstElementClass *klass;
+ gchar *name = NULL;
+ const gchar *pad_name = NULL;
+ GstPadSetCapsFunction setcapsfunc = NULL;
+ gint pad_id;
+
+ g_return_val_if_fail (templ != NULL, NULL);
+
+ if (templ->direction != GST_PAD_SINK)
+ goto wrong_direction;
+
+ g_return_val_if_fail (GST_IS_AVI_MUX (element), NULL);
+ avimux = GST_AVI_MUX (element);
+
+ if (!avimux->write_header)
+ goto too_late;
+
+ klass = GST_ELEMENT_GET_CLASS (element);
+
+ /* FIXME-0.11: use %d instead of %02d for pad_names */
+
+ if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) {
+ /* don't mix named and unnamed pads, if the pad already exists we fail when
+ * trying to add it */
+ if (req_name != NULL && sscanf (req_name, "audio_%02d", &pad_id) == 1) {
+ pad_name = req_name;
+ } else {
+ name = g_strdup_printf ("audio_%02d", avimux->audio_pads++);
+ pad_name = name;
+ }
+ setcapsfunc = GST_DEBUG_FUNCPTR (gst_avi_mux_audsink_set_caps);
+
+ /* init pad specific data */
+ avipad = g_malloc0 (sizeof (GstAviAudioPad));
+ avipad->is_video = FALSE;
+ avipad->hdr.type = GST_MAKE_FOURCC ('a', 'u', 'd', 's');
+ /* audio goes last */
+ avimux->sinkpads = g_slist_append (avimux->sinkpads, avipad);
+ } else if (templ == gst_element_class_get_pad_template (klass, "video_%d")) {
+ /* though streams are pretty generic and relatively self-contained,
+ * some video info goes in a single avi header -and therefore mux struct-
+ * so video restricted to one stream */
+ if (avimux->video_pads > 0)
+ goto too_many_video_pads;
+
+ /* setup pad */
+ pad_name = "video_00";
+ avimux->video_pads++;
+ setcapsfunc = GST_DEBUG_FUNCPTR (gst_avi_mux_vidsink_set_caps);
+
+ /* init pad specific data */
+ avipad = g_malloc0 (sizeof (GstAviVideoPad));
+ avipad->is_video = TRUE;
+ avipad->hdr.type = GST_MAKE_FOURCC ('v', 'i', 'd', 's');
+ /* video goes first */
+ avimux->sinkpads = g_slist_prepend (avimux->sinkpads, avipad);
+ } else
+ goto wrong_template;
+
+ newpad = gst_pad_new_from_template (templ, pad_name);
+ gst_pad_set_setcaps_function (newpad, setcapsfunc);
+
+ g_free (name);
+
+ avipad->collect = gst_collect_pads_add_pad (avimux->collect,
+ newpad, sizeof (GstAviCollectData));
+ ((GstAviCollectData *) (avipad->collect))->avipad = avipad;
+ /* FIXME: hacked way to override/extend the event function of
+ * GstCollectPads; because it sets its own event function giving the
+ * element no access to events */
+ avimux->collect_event = (GstPadEventFunction) GST_PAD_EVENTFUNC (newpad);
+ gst_pad_set_event_function (newpad,
+ GST_DEBUG_FUNCPTR (gst_avi_mux_handle_event));
+
+ if (!gst_element_add_pad (element, newpad))
+ goto pad_add_failed;
+
+ GST_DEBUG_OBJECT (newpad, "Added new request pad");
+
+ return newpad;
+
+ /* ERRORS */
+wrong_direction:
+ {
+ g_warning ("avimux: request pad that is not a SINK pad\n");
+ return NULL;
+ }
+too_late:
+ {
+ g_warning ("avimux: request pad cannot be added after streaming started\n");
+ return NULL;
+ }
+wrong_template:
+ {
+ g_warning ("avimux: this is not our template!\n");
+ return NULL;
+ }
+too_many_video_pads:
+ {
+ GST_WARNING_OBJECT (avimux, "Can only have one video stream");
+ return NULL;
+ }
+pad_add_failed:
+ {
+ GST_WARNING_OBJECT (avimux, "Adding the new pad '%s' failed", pad_name);
+ gst_object_unref (newpad);
+ return NULL;
+ }
+}
+
+static void
+gst_avi_mux_release_pad (GstElement * element, GstPad * pad)
+{
+ GstAviMux *avimux = GST_AVI_MUX (element);
+ GSList *node;
+
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ if (avipad->collect->pad == pad) {
+ /* pad count should not be adjusted,
+ * as it also represent number of streams present */
+ avipad->collect = NULL;
+ GST_DEBUG_OBJECT (avimux, "removed pad '%s'", GST_PAD_NAME (pad));
+ gst_collect_pads_remove_pad (avimux->collect, pad);
+ gst_element_remove_pad (element, pad);
+ /* if not started yet, we can remove any sign this pad ever existed */
+ /* in this case _start will take care of the real pad count */
+ if (avimux->write_header) {
+ avimux->sinkpads = g_slist_remove (avimux->sinkpads, avipad);
+ gst_avi_mux_pad_reset (avipad, TRUE);
+ g_free (avipad);
+ }
+ return;
+ }
+
+ node = node->next;
+ }
+
+ g_warning ("Unknown pad %s", GST_PAD_NAME (pad));
+}
+
+static inline guint
+gst_avi_mux_start_chunk (GstByteWriter * bw, const gchar * tag, guint32 fourcc)
+{
+ guint chunk_offset;
+
+ if (tag)
+ gst_byte_writer_put_data (bw, (const guint8 *) tag, 4);
+ else
+ gst_byte_writer_put_uint32_le (bw, fourcc);
+
+ chunk_offset = gst_byte_writer_get_pos (bw);
+ /* real chunk size comes later */
+ gst_byte_writer_put_uint32_le (bw, 0);
+
+ return chunk_offset;
+}
+
+static inline void
+gst_avi_mux_end_chunk (GstByteWriter * bw, guint chunk_offset)
+{
+ guint size;
+
+ size = gst_byte_writer_get_pos (bw);
+
+ gst_byte_writer_set_pos (bw, chunk_offset);
+ gst_byte_writer_put_uint32_le (bw, size - chunk_offset - 4);
+ gst_byte_writer_set_pos (bw, size);
+
+ /* arrange for even padding */
+ if (size & 1)
+ gst_byte_writer_put_uint8 (bw, 0);
+}
+
+/* maybe some of these functions should be moved to riff.h? */
+
+static void
+gst_avi_mux_write_tag (const GstTagList * list, const gchar * tag,
+ gpointer data)
+{
+ const struct
+ {
+ guint32 fcc;
+ const gchar *tag;
+ } rifftags[] = {
+ {
+ GST_RIFF_INFO_IARL, GST_TAG_LOCATION}, {
+ GST_RIFF_INFO_IART, GST_TAG_ARTIST}, {
+ GST_RIFF_INFO_ICMT, GST_TAG_COMMENT}, {
+ GST_RIFF_INFO_ICOP, GST_TAG_COPYRIGHT}, {
+ GST_RIFF_INFO_ICRD, GST_TAG_DATE}, {
+ GST_RIFF_INFO_IGNR, GST_TAG_GENRE}, {
+ GST_RIFF_INFO_IKEY, GST_TAG_KEYWORDS}, {
+ GST_RIFF_INFO_INAM, GST_TAG_TITLE}, {
+ GST_RIFF_INFO_ISFT, GST_TAG_ENCODER}, {
+ GST_RIFF_INFO_ISRC, GST_TAG_ISRC}, {
+ 0, NULL}
+ };
+ gint n;
+ gchar *str;
+ GstByteWriter *bw = data;
+ guint chunk;
+
+ for (n = 0; rifftags[n].fcc != 0; n++) {
+ if (!strcmp (rifftags[n].tag, tag) &&
+ gst_tag_list_get_string (list, tag, &str) && str) {
+ chunk = gst_avi_mux_start_chunk (bw, NULL, rifftags[n].fcc);
+ gst_byte_writer_put_string (bw, str);
+ gst_avi_mux_end_chunk (bw, chunk);
+ g_free (str);
+ break;
+ }
+ }
+}
+
+static GstBuffer *
+gst_avi_mux_riff_get_avi_header (GstAviMux * avimux)
+{
+ const GstTagList *tags;
+ GstBuffer *buffer;
+ gint size = 0;
+ GstByteWriter bw;
+ GSList *node;
+ guint avih, riff, hdrl;
+
+ GST_DEBUG_OBJECT (avimux, "creating avi header, data_size %u, idx_size %u",
+ avimux->data_size, avimux->idx_size);
+
+ if (avimux->tags_snap)
+ tags = avimux->tags_snap;
+ else {
+ /* need to make snapshot of current state of tags to ensure the same set
+ * is used next time around during header rewrite at the end */
+ tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (avimux));
+ if (tags)
+ tags = avimux->tags_snap = gst_tag_list_copy (tags);
+ }
+
+ gst_byte_writer_init_with_size (&bw, 1024, FALSE);
+
+ /* avi header metadata */
+ riff = gst_avi_mux_start_chunk (&bw, "RIFF", 0);
+ gst_byte_writer_put_data (&bw, (guint8 *) "AVI ", 4);
+ hdrl = gst_avi_mux_start_chunk (&bw, "LIST", 0);
+ gst_byte_writer_put_data (&bw, (guint8 *) "hdrl", 4);
+
+ avih = gst_avi_mux_start_chunk (&bw, "avih", 0);
+ /* the AVI header itself */
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.us_frame);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.max_bps);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.pad_gran);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.flags);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.tot_frames);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.init_frames);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.streams);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.bufsize);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.width);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.height);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.scale);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.rate);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.start);
+ gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.length);
+ gst_avi_mux_end_chunk (&bw, avih);
+
+ /* stream data */
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+ GstAviVideoPad *vidpad = (GstAviVideoPad *) avipad;
+ GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
+ gint codec_size = 0;
+ guint strh, strl, strf, indx;
+
+ /* stream list metadata */
+ strl = gst_avi_mux_start_chunk (&bw, "LIST", 0);
+ gst_byte_writer_put_data (&bw, (guint8 *) "strl", 4);
+
+ /* generic header */
+ strh = gst_avi_mux_start_chunk (&bw, "strh", 0);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.type);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.fcc_handler);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.flags);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.priority);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.init_frames);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.scale);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.rate);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.start);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.length);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.bufsize);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.quality);
+ gst_byte_writer_put_uint32_le (&bw, avipad->hdr.samplesize);
+ gst_byte_writer_put_uint16_le (&bw, 0);
+ gst_byte_writer_put_uint16_le (&bw, 0);
+ gst_byte_writer_put_uint16_le (&bw, 0);
+ gst_byte_writer_put_uint16_le (&bw, 0);
+ gst_avi_mux_end_chunk (&bw, strh);
+
+ if (avipad->is_video) {
+ codec_size = vidpad->vids_codec_data ?
+ GST_BUFFER_SIZE (vidpad->vids_codec_data) : 0;
+ /* the video header */
+ strf = gst_avi_mux_start_chunk (&bw, "strf", 0);
+ /* the actual header */
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vids.size + codec_size);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vids.width);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vids.height);
+ gst_byte_writer_put_uint16_le (&bw, vidpad->vids.planes);
+ gst_byte_writer_put_uint16_le (&bw, vidpad->vids.bit_cnt);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vids.compression);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vids.image_size);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vids.xpels_meter);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vids.ypels_meter);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vids.num_colors);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vids.imp_colors);
+ if (vidpad->vids_codec_data) {
+ gst_byte_writer_put_data (&bw,
+ GST_BUFFER_DATA (vidpad->vids_codec_data),
+ GST_BUFFER_SIZE (vidpad->vids_codec_data));
+ }
+ gst_avi_mux_end_chunk (&bw, strf);
+
+ /* add video property data, mainly for aspect ratio, if any */
+ if (vidpad->vprp.aspect) {
+ gint f;
+ guint vprp;
+
+ /* let's be on the safe side */
+ vidpad->vprp.fields = MIN (vidpad->vprp.fields,
+ GST_RIFF_VPRP_VIDEO_FIELDS);
+ /* the vprp header */
+ vprp = gst_avi_mux_start_chunk (&bw, "vprp", 0);
+ /* the actual data */
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.format_token);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.standard);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.vert_rate);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.hor_t_total);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.vert_lines);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.aspect);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.width);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.height);
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.fields);
+
+ for (f = 0; f < vidpad->vprp.fields; ++f) {
+ gst_riff_vprp_video_field_desc *fd;
+
+ fd = &(vidpad->vprp.field_info[f]);
+ gst_byte_writer_put_uint32_le (&bw, fd->compressed_bm_height);
+ gst_byte_writer_put_uint32_le (&bw, fd->compressed_bm_width);
+ gst_byte_writer_put_uint32_le (&bw, fd->valid_bm_height);
+ gst_byte_writer_put_uint32_le (&bw, fd->valid_bm_width);
+ gst_byte_writer_put_uint32_le (&bw, fd->valid_bm_x_offset);
+ gst_byte_writer_put_uint32_le (&bw, fd->valid_bm_y_offset);
+ gst_byte_writer_put_uint32_le (&bw, fd->video_x_t_offset);
+ gst_byte_writer_put_uint32_le (&bw, fd->video_y_start);
+ }
+ gst_avi_mux_end_chunk (&bw, vprp);
+ }
+ } else {
+ codec_size = audpad->auds_codec_data ?
+ GST_BUFFER_SIZE (audpad->auds_codec_data) : 0;
+ /* the audio header */
+ strf = gst_avi_mux_start_chunk (&bw, "strf", 0);
+ /* the actual header */
+ gst_byte_writer_put_uint16_le (&bw, audpad->auds.format);
+ gst_byte_writer_put_uint16_le (&bw, audpad->auds.channels);
+ gst_byte_writer_put_uint32_le (&bw, audpad->auds.rate);
+ gst_byte_writer_put_uint32_le (&bw, audpad->auds.av_bps);
+ gst_byte_writer_put_uint16_le (&bw, audpad->auds.blockalign);
+ gst_byte_writer_put_uint16_le (&bw, audpad->auds.size);
+ gst_byte_writer_put_uint16_le (&bw, codec_size);
+ if (audpad->auds_codec_data) {
+ gst_byte_writer_put_data (&bw,
+ GST_BUFFER_DATA (audpad->auds_codec_data),
+ GST_BUFFER_SIZE (audpad->auds_codec_data));
+ }
+ gst_avi_mux_end_chunk (&bw, strf);
+ }
+
+ /* odml superindex chunk */
+ if (avipad->idx_index > 0)
+ indx = gst_avi_mux_start_chunk (&bw, "indx", 0);
+ else
+ indx = gst_avi_mux_start_chunk (&bw, "JUNK", 0);
+ gst_byte_writer_put_uint16_le (&bw, 4); /* bytes per entry */
+ gst_byte_writer_put_uint8 (&bw, 0); /* index subtype */
+ gst_byte_writer_put_uint8 (&bw, GST_AVI_INDEX_OF_INDEXES); /* index type */
+ gst_byte_writer_put_uint32_le (&bw, avipad->idx_index); /* entries in use */
+ gst_byte_writer_put_data (&bw, (guint8 *) avipad->tag, 4); /* stream id */
+ gst_byte_writer_put_uint32_le (&bw, 0); /* reserved */
+ gst_byte_writer_put_uint32_le (&bw, 0); /* reserved */
+ gst_byte_writer_put_uint32_le (&bw, 0); /* reserved */
+ gst_byte_writer_put_data (&bw, (guint8 *) avipad->idx,
+ GST_AVI_SUPERINDEX_COUNT * sizeof (gst_avi_superindex_entry));
+ gst_avi_mux_end_chunk (&bw, indx);
+
+ /* end strl for this stream */
+ gst_avi_mux_end_chunk (&bw, strl);
+
+ node = node->next;
+ }
+
+ if (avimux->video_pads > 0) {
+ guint odml, dmlh;
+ /* odml header */
+ odml = gst_avi_mux_start_chunk (&bw, "LIST", 0);
+ gst_byte_writer_put_data (&bw, (guint8 *) "odml", 4);
+ dmlh = gst_avi_mux_start_chunk (&bw, "dmlh", 0);
+ gst_byte_writer_put_uint32_le (&bw, avimux->total_frames);
+ gst_avi_mux_end_chunk (&bw, dmlh);
+ gst_avi_mux_end_chunk (&bw, odml);
+ }
+
+ /* end hdrl */
+ gst_avi_mux_end_chunk (&bw, hdrl);
+
+ /* tags */
+ if (tags) {
+ guint info;
+
+ info = gst_avi_mux_start_chunk (&bw, "LIST", 0);
+ gst_byte_writer_put_data (&bw, (guint8 *) "INFO", 4);
+
+ gst_tag_list_foreach (tags, gst_avi_mux_write_tag, &bw);
+ if (info + 8 == gst_byte_writer_get_pos (&bw)) {
+ /* no tags writen, remove the empty INFO LIST as it is useless
+ * and prevents playback in vlc */
+ gst_byte_writer_set_pos (&bw, info - 4);
+ } else {
+ gst_avi_mux_end_chunk (&bw, info);
+ }
+ }
+
+ /* pop RIFF */
+ gst_avi_mux_end_chunk (&bw, riff);
+
+ /* avi data header */
+ gst_byte_writer_put_data (&bw, (guint8 *) "LIST", 4);
+ gst_byte_writer_put_uint32_le (&bw, avimux->data_size);
+ gst_byte_writer_put_data (&bw, (guint8 *) "movi", 4);
+
+ /* now get the data */
+ buffer = gst_byte_writer_reset_and_get_buffer (&bw);
+
+ /* ... but RIFF includes more than just header */
+ size = GST_READ_UINT32_LE (GST_BUFFER_DATA (buffer) + 4);
+ size += 8 + avimux->data_size + avimux->idx_size;
+ GST_WRITE_UINT32_LE (GST_BUFFER_DATA (buffer) + 4, size);
+
+ GST_MEMDUMP_OBJECT (avimux, "avi header", GST_BUFFER_DATA (buffer),
+ GST_BUFFER_SIZE (buffer));
+
+ return buffer;
+}
+
+static GstBuffer *
+gst_avi_mux_riff_get_avix_header (guint32 datax_size)
+{
+ GstBuffer *buffer;
+ guint8 *buffdata;
+
+ buffer = gst_buffer_new_and_alloc (24);
+ buffdata = GST_BUFFER_DATA (buffer);
+
+ memcpy (buffdata + 0, "RIFF", 4);
+ GST_WRITE_UINT32_LE (buffdata + 4, datax_size + 3 * 4);
+ memcpy (buffdata + 8, "AVIX", 4);
+ memcpy (buffdata + 12, "LIST", 4);
+ GST_WRITE_UINT32_LE (buffdata + 16, datax_size);
+ memcpy (buffdata + 20, "movi", 4);
+
+ return buffer;
+}
+
+static inline GstBuffer *
+gst_avi_mux_riff_get_header (GstAviPad * avipad, guint32 video_frame_size)
+{
+ GstBuffer *buffer;
+ guint8 *buffdata;
+
+ buffer = gst_buffer_new_and_alloc (8);
+ buffdata = GST_BUFFER_DATA (buffer);
+ memcpy (buffdata + 0, avipad->tag, 4);
+ GST_WRITE_UINT32_LE (buffdata + 4, video_frame_size);
+
+ return buffer;
+}
+
+/* write an odml index chunk in the movi list */
+static GstFlowReturn
+gst_avi_mux_write_avix_index (GstAviMux * avimux, GstAviPad * avipad,
+ gchar * code, gchar * chunk, gst_avi_superindex_entry * super_index,
+ gint * super_index_count)
+{
+ GstFlowReturn res;
+ GstBuffer *buffer;
+ guint8 *buffdata, *data;
+ gst_riff_index_entry *entry;
+ gint i;
+ guint32 size, entry_count;
+ gboolean is_pcm = FALSE;
+ guint32 pcm_samples = 0;
+
+ /* check if it is pcm */
+ if (avipad && !avipad->is_video) {
+ GstAviAudioPad *audiopad = (GstAviAudioPad *) avipad;
+ if (audiopad->auds.format == GST_RIFF_WAVE_FORMAT_PCM) {
+ pcm_samples = audiopad->samples;
+ is_pcm = TRUE;
+ }
+ }
+
+ /* allocate the maximum possible */
+ buffer = gst_buffer_new_and_alloc (32 + 8 * avimux->idx_index);
+ buffdata = GST_BUFFER_DATA (buffer);
+
+ /* general index chunk info */
+ memcpy (buffdata + 0, chunk, 4); /* chunk id */
+ GST_WRITE_UINT32_LE (buffdata + 4, 0); /* chunk size; fill later */
+ GST_WRITE_UINT16_LE (buffdata + 8, 2); /* index entry is 2 words */
+ buffdata[10] = 0; /* index subtype */
+ buffdata[11] = GST_AVI_INDEX_OF_CHUNKS; /* index type: AVI_INDEX_OF_CHUNKS */
+ GST_WRITE_UINT32_LE (buffdata + 12, 0); /* entries in use; fill later */
+ memcpy (buffdata + 16, code, 4); /* stream to which index refers */
+ GST_WRITE_UINT64_LE (buffdata + 20, avimux->avix_start); /* base offset */
+ GST_WRITE_UINT32_LE (buffdata + 28, 0); /* reserved */
+ buffdata += 32;
+
+ /* now the actual index entries */
+ i = avimux->idx_index;
+ entry = avimux->idx;
+ while (i > 0) {
+ if (memcmp (&entry->id, code, 4) == 0) {
+ /* enter relative offset to the data (!) */
+ GST_WRITE_UINT32_LE (buffdata, GUINT32_FROM_LE (entry->offset) + 8);
+ /* msb is set if not (!) keyframe */
+ GST_WRITE_UINT32_LE (buffdata + 4, GUINT32_FROM_LE (entry->size)
+ | (GUINT32_FROM_LE (entry->flags)
+ & GST_RIFF_IF_KEYFRAME ? 0 : 1U << 31));
+ buffdata += 8;
+ }
+ i--;
+ entry++;
+ }
+
+ /* ok, now we know the size and no of entries, fill in where needed */
+ data = GST_BUFFER_DATA (buffer);
+ GST_BUFFER_SIZE (buffer) = size = buffdata - data;
+ GST_WRITE_UINT32_LE (data + 4, size - 8);
+ entry_count = (size - 32) / 8;
+ GST_WRITE_UINT32_LE (data + 12, entry_count);
+
+ /* decorate and send */
+ gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
+ if ((res = gst_pad_push (avimux->srcpad, buffer)) != GST_FLOW_OK)
+ return res;
+
+ /* keep track of this in superindex (if room) ... */
+ if (*super_index_count < GST_AVI_SUPERINDEX_COUNT) {
+ i = *super_index_count;
+ super_index[i].offset = GUINT64_TO_LE (avimux->total_data);
+ super_index[i].size = GUINT32_TO_LE (size);
+ if (is_pcm) {
+ super_index[i].duration = GUINT32_TO_LE (pcm_samples);
+ } else {
+ super_index[i].duration = GUINT32_TO_LE (entry_count);
+ }
+ (*super_index_count)++;
+ } else
+ GST_WARNING_OBJECT (avimux, "No more room in superindex of stream %s",
+ code);
+
+ /* ... and in size */
+ avimux->total_data += size;
+ if (avimux->is_bigfile)
+ avimux->datax_size += size;
+ else
+ avimux->data_size += size;
+
+ return GST_FLOW_OK;
+}
+
+/* some other usable functions (thankyou xawtv ;-) ) */
+
+static void
+gst_avi_mux_add_index (GstAviMux * avimux, GstAviPad * avipad, guint32 flags,
+ guint32 size)
+{
+ gchar *code = avipad->tag;
+ if (avimux->idx_index == avimux->idx_count) {
+ avimux->idx_count += 256;
+ avimux->idx =
+ g_realloc (avimux->idx,
+ avimux->idx_count * sizeof (gst_riff_index_entry));
+ }
+
+ /* in case of pcm audio, we need to count the number of samples for
+ * putting in the indx entries */
+ if (!avipad->is_video) {
+ GstAviAudioPad *audiopad = (GstAviAudioPad *) avipad;
+ if (audiopad->auds.format == GST_RIFF_WAVE_FORMAT_PCM) {
+ audiopad->samples += size / audiopad->auds.blockalign;
+ }
+ }
+
+ memcpy (&(avimux->idx[avimux->idx_index].id), code, 4);
+ avimux->idx[avimux->idx_index].flags = GUINT32_TO_LE (flags);
+ avimux->idx[avimux->idx_index].offset = GUINT32_TO_LE (avimux->idx_offset);
+ avimux->idx[avimux->idx_index].size = GUINT32_TO_LE (size);
+ avimux->idx_index++;
+}
+
+static GstFlowReturn
+gst_avi_mux_write_index (GstAviMux * avimux)
+{
+ GstFlowReturn res;
+ GstBuffer *buffer;
+ guint8 *buffdata;
+
+ buffer = gst_buffer_new_and_alloc (8);
+ buffdata = GST_BUFFER_DATA (buffer);
+ memcpy (buffdata + 0, "idx1", 4);
+ GST_WRITE_UINT32_LE (buffdata + 4,
+ avimux->idx_index * sizeof (gst_riff_index_entry));
+
+ gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
+ res = gst_pad_push (avimux->srcpad, buffer);
+ if (res != GST_FLOW_OK)
+ return res;
+
+ buffer = gst_buffer_new ();
+ GST_BUFFER_SIZE (buffer) = avimux->idx_index * sizeof (gst_riff_index_entry);
+ GST_BUFFER_DATA (buffer) = (guint8 *) avimux->idx;
+ GST_BUFFER_MALLOCDATA (buffer) = GST_BUFFER_DATA (buffer);
+ avimux->idx = NULL; /* will be free()'ed by gst_buffer_unref() */
+ avimux->total_data += GST_BUFFER_SIZE (buffer) + 8;
+
+ gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
+ res = gst_pad_push (avimux->srcpad, buffer);
+ if (res != GST_FLOW_OK)
+ return res;
+
+ avimux->idx_size += avimux->idx_index * sizeof (gst_riff_index_entry) + 8;
+
+ /* update header */
+ avimux->avi_hdr.flags |= GST_RIFF_AVIH_HASINDEX;
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_avi_mux_bigfile (GstAviMux * avimux, gboolean last)
+{
+ GstFlowReturn res = GST_FLOW_OK;
+ GstBuffer *header;
+ GstEvent *event;
+ GSList *node;
+
+ /* first some odml standard index chunks in the movi list */
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ node = node->next;
+
+ res = gst_avi_mux_write_avix_index (avimux, avipad, avipad->tag,
+ avipad->idx_tag, avipad->idx, &avipad->idx_index);
+ if (res != GST_FLOW_OK)
+ return res;
+ }
+
+ if (avimux->is_bigfile) {
+ /* search back */
+ event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
+ avimux->avix_start, GST_CLOCK_TIME_NONE, avimux->avix_start);
+ /* if the event succeeds */
+ gst_pad_push_event (avimux->srcpad, event);
+
+ /* rewrite AVIX header */
+ header = gst_avi_mux_riff_get_avix_header (avimux->datax_size);
+ gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
+ res = gst_pad_push (avimux->srcpad, header);
+
+ /* go back to current location, at least try */
+ event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
+ avimux->total_data, GST_CLOCK_TIME_NONE, avimux->total_data);
+ gst_pad_push_event (avimux->srcpad, event);
+
+ if (res != GST_FLOW_OK)
+ return res;
+ } else { /* write a standard index in the first riff chunk */
+ res = gst_avi_mux_write_index (avimux);
+ /* the index data/buffer is freed by pushing it */
+ avimux->idx_count = 0;
+ if (res != GST_FLOW_OK)
+ return res;
+ }
+
+ avimux->avix_start = avimux->total_data;
+
+ if (last)
+ return res;
+
+ avimux->is_bigfile = TRUE;
+ avimux->numx_frames = 0;
+ avimux->datax_size = 4; /* movi tag */
+ avimux->idx_index = 0;
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+ node = node->next;
+ if (!avipad->is_video) {
+ GstAviAudioPad *audiopad = (GstAviAudioPad *) avipad;
+ audiopad->samples = 0;
+ }
+ }
+
+ header = gst_avi_mux_riff_get_avix_header (0);
+ avimux->total_data += GST_BUFFER_SIZE (header);
+ /* avix_start is used as base offset for the odml index chunk */
+ avimux->idx_offset = avimux->total_data - avimux->avix_start;
+ gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
+ return gst_pad_push (avimux->srcpad, header);
+}
+
+/* enough header blabla now, let's go on to actually writing the headers */
+
+static GstFlowReturn
+gst_avi_mux_start_file (GstAviMux * avimux)
+{
+ GstFlowReturn res;
+ GstBuffer *header;
+ GSList *node;
+ GstCaps *caps;
+
+ avimux->total_data = 0;
+ avimux->total_frames = 0;
+ avimux->data_size = 4; /* movi tag */
+ avimux->datax_size = 0;
+ avimux->num_frames = 0;
+ avimux->numx_frames = 0;
+ avimux->avix_start = 0;
+
+ avimux->idx_index = 0;
+ avimux->idx_offset = 0; /* see 10 lines below */
+ avimux->idx_size = 0;
+ avimux->idx_count = 0;
+ avimux->idx = NULL;
+
+ /* state */
+ avimux->write_header = FALSE;
+ avimux->restart = FALSE;
+
+ /* init streams, see what we've got */
+ node = avimux->sinkpads;
+ avimux->audio_pads = avimux->video_pads = 0;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ node = node->next;
+
+ if (!avipad->is_video) {
+ /* audio stream numbers must start at 1 iff there is a video stream 0;
+ * request_pad inserts video pad at head of list, so this test suffices */
+ if (avimux->video_pads)
+ avimux->audio_pads++;
+ avipad->tag = g_strdup_printf ("%02uwb", avimux->audio_pads);
+ avipad->idx_tag = g_strdup_printf ("ix%02u", avimux->audio_pads);
+ if (!avimux->video_pads)
+ avimux->audio_pads++;
+ } else {
+ avipad->tag = g_strdup_printf ("%02udb", avimux->video_pads);
+ avipad->idx_tag = g_strdup_printf ("ix%02u", avimux->video_pads++);
+ }
+ }
+
+ caps = gst_caps_copy (gst_pad_get_pad_template_caps (avimux->srcpad));
+ gst_pad_set_caps (avimux->srcpad, caps);
+ gst_caps_unref (caps);
+
+ /* let downstream know we think in BYTES and expect to do seeking later on */
+ gst_pad_push_event (avimux->srcpad,
+ gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0));
+
+ /* header */
+ avimux->avi_hdr.streams = g_slist_length (avimux->sinkpads);
+ avimux->is_bigfile = FALSE;
+
+ header = gst_avi_mux_riff_get_avi_header (avimux);
+ avimux->total_data += GST_BUFFER_SIZE (header);
+
+ gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
+ res = gst_pad_push (avimux->srcpad, header);
+
+ avimux->idx_offset = avimux->total_data;
+
+ return res;
+}
+
+static GstFlowReturn
+gst_avi_mux_stop_file (GstAviMux * avimux)
+{
+ GstFlowReturn res = GST_FLOW_OK;
+ GstEvent *event;
+ GstBuffer *header;
+ GSList *node;
+
+ /* if bigfile, rewrite header, else write indexes */
+ /* don't bail out at once if error, still try to re-write header */
+ if (avimux->video_pads > 0) {
+ if (avimux->is_bigfile) {
+ res = gst_avi_mux_bigfile (avimux, TRUE);
+ } else {
+ res = gst_avi_mux_write_index (avimux);
+ }
+ }
+
+ /* we do our best to make it interleaved at least ... */
+ if (avimux->audio_pads > 0 && avimux->video_pads > 0)
+ avimux->avi_hdr.flags |= GST_RIFF_AVIH_ISINTERLEAVED;
+
+ /* set rate and everything having to do with that */
+ avimux->avi_hdr.max_bps = 0;
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ node = node->next;
+
+ if (!avipad->is_video) {
+ GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
+
+ /* calculate bps if needed */
+ if (!audpad->auds.av_bps) {
+ if (audpad->audio_time) {
+ audpad->auds.av_bps =
+ (GST_SECOND * audpad->audio_size) / audpad->audio_time;
+ /* round bps to nearest multiple of 8;
+ * which is much more likely to be the (cbr) bitrate in use;
+ * which in turn results in better timestamp calculation on playback */
+ audpad->auds.av_bps = GST_ROUND_UP_8 (audpad->auds.av_bps - 4);
+ } else {
+ GST_ELEMENT_WARNING (avimux, STREAM, MUX,
+ (_("No or invalid input audio, AVI stream will be corrupt.")),
+ (NULL));
+ audpad->auds.av_bps = 0;
+ }
+ }
+ gst_avi_mux_audsink_set_fields (avimux, audpad);
+ avimux->avi_hdr.max_bps += audpad->auds.av_bps;
+ avipad->hdr.length = gst_util_uint64_scale (audpad->audio_time,
+ avipad->hdr.rate, avipad->hdr.scale * GST_SECOND);
+ } else {
+ GstAviVideoPad *vidpad = (GstAviVideoPad *) avipad;
+
+ avimux->avi_hdr.max_bps += ((vidpad->vids.bit_cnt + 7) / 8) *
+ (1000000. / avimux->avi_hdr.us_frame) * vidpad->vids.image_size;
+ avipad->hdr.length = avimux->total_frames;
+ }
+ }
+
+ /* statistics/total_frames/... */
+ avimux->avi_hdr.tot_frames = avimux->num_frames;
+
+ /* seek and rewrite the header */
+ header = gst_avi_mux_riff_get_avi_header (avimux);
+ event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
+ 0, GST_CLOCK_TIME_NONE, 0);
+ gst_pad_push_event (avimux->srcpad, event);
+
+ gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
+ /* the first error survives */
+ if (res == GST_FLOW_OK)
+ res = gst_pad_push (avimux->srcpad, header);
+ else
+ gst_pad_push (avimux->srcpad, header);
+
+ event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
+ avimux->total_data, GST_CLOCK_TIME_NONE, avimux->total_data);
+ gst_pad_push_event (avimux->srcpad, event);
+
+ avimux->write_header = TRUE;
+
+ return res;
+}
+
+static GstFlowReturn
+gst_avi_mux_restart_file (GstAviMux * avimux)
+{
+ GstFlowReturn res;
+
+ if ((res = gst_avi_mux_stop_file (avimux)) != GST_FLOW_OK)
+ return res;
+
+ gst_pad_push_event (avimux->srcpad, gst_event_new_eos ());
+
+ return gst_avi_mux_start_file (avimux);
+}
+
+/* handle events (search) */
+static gboolean
+gst_avi_mux_handle_event (GstPad * pad, GstEvent * event)
+{
+ GstAviMux *avimux;
+ gboolean ret;
+
+ avimux = GST_AVI_MUX (gst_pad_get_parent (pad));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_TAG:{
+ GstTagList *list;
+ GstTagSetter *setter = GST_TAG_SETTER (avimux);
+ const GstTagMergeMode mode = gst_tag_setter_get_tag_merge_mode (setter);
+
+ gst_event_parse_tag (event, &list);
+ gst_tag_setter_merge_tags (setter, list, mode);
+ break;
+ }
+ default:
+ break;
+ }
+
+ /* now GstCollectPads can take care of the rest, e.g. EOS */
+ ret = avimux->collect_event (pad, event);
+
+ gst_object_unref (avimux);
+
+ return ret;
+}
+
+/* send extra 'padding' data */
+static GstFlowReturn
+gst_avi_mux_send_pad_data (GstAviMux * avimux, gulong num_bytes)
+{
+ GstBuffer *buffer;
+
+ buffer = gst_buffer_new_and_alloc (num_bytes);
+ memset (GST_BUFFER_DATA (buffer), 0, num_bytes);
+ gst_buffer_set_caps (buffer, GST_PAD_CAPS (avimux->srcpad));
+ return gst_pad_push (avimux->srcpad, buffer);
+}
+
+/* do buffer */
+static GstFlowReturn
+gst_avi_mux_do_buffer (GstAviMux * avimux, GstAviPad * avipad)
+{
+ GstFlowReturn res;
+ GstBuffer *data, *header;
+ gulong total_size, pad_bytes = 0;
+ guint flags;
+
+ data = gst_collect_pads_pop (avimux->collect, avipad->collect);
+ /* arrange downstream running time */
+ data = gst_buffer_make_metadata_writable (data);
+ GST_BUFFER_TIMESTAMP (data) =
+ gst_segment_to_running_time (&avipad->collect->segment,
+ GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (data));
+
+ /* Prepend a special buffer to the first one for some formats */
+ if (avipad->is_video) {
+ GstAviVideoPad *vidpad = (GstAviVideoPad *) avipad;
+
+ if (vidpad->prepend_buffer) {
+ GstBuffer *newdata = gst_buffer_merge (vidpad->prepend_buffer, data);
+ gst_buffer_copy_metadata (newdata, data, GST_BUFFER_COPY_TIMESTAMPS);
+ gst_buffer_unref (data);
+ gst_buffer_unref (vidpad->prepend_buffer);
+
+ data = newdata;
+ vidpad->prepend_buffer = NULL;
+ }
+ }
+
+ if (avimux->restart) {
+ if ((res = gst_avi_mux_restart_file (avimux)) != GST_FLOW_OK)
+ return res;
+ }
+
+ /* need to restart or start a next avix chunk ? */
+ if ((avimux->is_bigfile ? avimux->datax_size : avimux->data_size) +
+ GST_BUFFER_SIZE (data) > GST_AVI_MAX_SIZE) {
+ if (avimux->enable_large_avi) {
+ if ((res = gst_avi_mux_bigfile (avimux, FALSE)) != GST_FLOW_OK)
+ return res;
+ } else {
+ if ((res = gst_avi_mux_restart_file (avimux)) != GST_FLOW_OK)
+ return res;
+ }
+ }
+
+ /* get header and record some stats */
+ if (GST_BUFFER_SIZE (data) & 1) {
+ pad_bytes = 2 - (GST_BUFFER_SIZE (data) & 1);
+ }
+ header = gst_avi_mux_riff_get_header (avipad, GST_BUFFER_SIZE (data));
+ total_size = GST_BUFFER_SIZE (header) + GST_BUFFER_SIZE (data) + pad_bytes;
+
+ if (avimux->is_bigfile) {
+ avimux->datax_size += total_size;
+ } else {
+ avimux->data_size += total_size;
+ }
+
+ if (G_UNLIKELY (avipad->hook))
+ avipad->hook (avimux, avipad, data);
+
+ /* the suggested buffer size is the max frame size */
+ if (avipad->hdr.bufsize < GST_BUFFER_SIZE (data))
+ avipad->hdr.bufsize = GST_BUFFER_SIZE (data);
+
+ if (avipad->is_video) {
+ avimux->total_frames++;
+
+ if (avimux->is_bigfile) {
+ avimux->numx_frames++;
+ } else {
+ avimux->num_frames++;
+ }
+
+ flags = 0x02;
+ if (!GST_BUFFER_FLAG_IS_SET (data, GST_BUFFER_FLAG_DELTA_UNIT))
+ flags |= 0x10;
+ } else {
+ GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
+
+ flags = 0;
+ audpad->audio_size += GST_BUFFER_SIZE (data);
+ audpad->audio_time += GST_BUFFER_DURATION (data);
+ }
+
+ gst_avi_mux_add_index (avimux, avipad, flags, GST_BUFFER_SIZE (data));
+
+ /* prepare buffers for sending */
+ gst_buffer_set_caps (header, GST_PAD_CAPS (avimux->srcpad));
+ data = gst_buffer_make_metadata_writable (data);
+ gst_buffer_set_caps (data, GST_PAD_CAPS (avimux->srcpad));
+
+ GST_LOG_OBJECT (avimux, "pushing buffers: head, data");
+
+ if ((res = gst_pad_push (avimux->srcpad, header)) != GST_FLOW_OK)
+ return res;
+ if ((res = gst_pad_push (avimux->srcpad, data)) != GST_FLOW_OK)
+ return res;
+
+ if (pad_bytes) {
+ if ((res = gst_avi_mux_send_pad_data (avimux, pad_bytes)) != GST_FLOW_OK)
+ return res;
+ }
+
+ /* if any push above fails, we're in trouble with file consistency anyway */
+ avimux->total_data += total_size;
+ avimux->idx_offset += total_size;
+
+ return res;
+}
+
+/* pick the oldest buffer from the pads and push it */
+static GstFlowReturn
+gst_avi_mux_do_one_buffer (GstAviMux * avimux)
+{
+ GstAviPad *avipad, *best_pad;
+ GSList *node;
+ GstBuffer *buffer;
+ GstClockTime time, best_time, delay;
+
+ node = avimux->sinkpads;
+ best_pad = NULL;
+ best_time = GST_CLOCK_TIME_NONE;
+ for (; node; node = node->next) {
+ avipad = (GstAviPad *) node->data;
+
+ if (!avipad->collect)
+ continue;
+
+ if (!avipad->hdr.fcc_handler)
+ goto not_negotiated;
+
+ buffer = gst_collect_pads_peek (avimux->collect, avipad->collect);
+ if (!buffer)
+ continue;
+ time = GST_BUFFER_TIMESTAMP (buffer);
+ gst_buffer_unref (buffer);
+
+ /* invalid should pass */
+ if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (time))) {
+ time = gst_segment_to_running_time (&avipad->collect->segment,
+ GST_FORMAT_TIME, time);
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (time))) {
+ GST_DEBUG_OBJECT (avimux, "clipping buffer on pad %s outside segment",
+ GST_PAD_NAME (avipad->collect->pad));
+ buffer = gst_collect_pads_pop (avimux->collect, avipad->collect);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+ }
+
+ delay = avipad->is_video ? GST_SECOND / 2 : 0;
+
+ /* invalid timestamp buffers pass first,
+ * these are probably initialization buffers */
+ if (best_pad == NULL || !GST_CLOCK_TIME_IS_VALID (time)
+ || (GST_CLOCK_TIME_IS_VALID (best_time) && time + delay < best_time)) {
+ best_pad = avipad;
+ best_time = time + delay;
+ }
+ }
+
+ if (best_pad) {
+ GST_LOG_OBJECT (avimux, "selected pad %s with time %" GST_TIME_FORMAT,
+ GST_PAD_NAME (best_pad->collect->pad), GST_TIME_ARGS (best_time));
+
+ return gst_avi_mux_do_buffer (avimux, best_pad);
+ } else {
+ /* simply finish off the file and send EOS */
+ gst_avi_mux_stop_file (avimux);
+ gst_pad_push_event (avimux->srcpad, gst_event_new_eos ());
+ return GST_FLOW_UNEXPECTED;
+ }
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ELEMENT_ERROR (avimux, CORE, NEGOTIATION, (NULL),
+ ("pad %s not negotiated", GST_PAD_NAME (avipad->collect->pad)));
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
+
+static GstFlowReturn
+gst_avi_mux_collect_pads (GstCollectPads * pads, GstAviMux * avimux)
+{
+ GstFlowReturn res;
+
+ if (G_UNLIKELY (avimux->write_header)) {
+ if ((res = gst_avi_mux_start_file (avimux)) != GST_FLOW_OK)
+ return res;
+ }
+
+ return gst_avi_mux_do_one_buffer (avimux);
+}
+
+
+static void
+gst_avi_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstAviMux *avimux;
+
+ avimux = GST_AVI_MUX (object);
+
+ switch (prop_id) {
+ case ARG_BIGFILE:
+ g_value_set_boolean (value, avimux->enable_large_avi);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_avi_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstAviMux *avimux;
+
+ avimux = GST_AVI_MUX (object);
+
+ switch (prop_id) {
+ case ARG_BIGFILE:
+ avimux->enable_large_avi = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_avi_mux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstAviMux *avimux;
+ GstStateChangeReturn ret;
+
+ avimux = GST_AVI_MUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_collect_pads_start (avimux->collect);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_collect_pads_stop (avimux->collect);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto done;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_avi_mux_reset (avimux);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+done:
+ return ret;
+}
diff --git a/gst/avi/gstavimux.h b/gst/avi/gstavimux.h
new file mode 100644
index 0000000..0d90392
--- /dev/null
+++ b/gst/avi/gstavimux.h
@@ -0,0 +1,197 @@
+/* AVI muxer plugin for GStreamer
+ * Copyright (C) 2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __GST_AVI_MUX_H__
+#define __GST_AVI_MUX_H__
+
+
+#include <gst/gst.h>
+#include <gst/base/gstcollectpads.h>
+#include <gst/riff/riff-ids.h>
+#include "avi-ids.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AVI_MUX \
+ (gst_avi_mux_get_type())
+#define GST_AVI_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AVI_MUX,GstAviMux))
+#define GST_AVI_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AVI_MUX,GstAviMuxClass))
+#define GST_IS_AVI_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AVI_MUX))
+#define GST_IS_AVI_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AVI_MUX))
+
+#define GST_AVI_INDEX_OF_INDEXES 0
+#define GST_AVI_INDEX_OF_CHUNKS 1
+
+/* this allows indexing up to 64GB avi file */
+#define GST_AVI_SUPERINDEX_COUNT 32
+
+/* max size */
+#define GST_AVI_MAX_SIZE 0x40000000
+
+typedef struct _gst_avi_superindex_entry {
+ guint64 offset;
+ guint32 size;
+ guint32 duration;
+} gst_avi_superindex_entry;
+
+typedef struct _gst_riff_strh_full {
+ gst_riff_strh parent;
+ /* rcFrame, RECT structure (struct of 4 shorts) */
+ gint16 left;
+ gint16 top;
+ gint16 right;
+ gint16 bottom;
+} gst_riff_strh_full;
+
+typedef struct _GstAviPad GstAviPad;
+typedef struct _GstAviMux GstAviMux;
+typedef struct _GstAviMuxClass GstAviMuxClass;
+
+typedef GstFlowReturn (*GstAviPadHook) (GstAviMux * avi, GstAviPad * avipad,
+ GstBuffer * buffer);
+
+struct _GstAviPad {
+ /* do not extend, link to it */
+ /* is NULL if original sink request pad has been removed */
+ GstCollectData *collect;
+
+ /* type */
+ gboolean is_video;
+ gboolean connected;
+
+ /* chunk tag */
+ gchar *tag;
+
+ /* stream header */
+ gst_riff_strh hdr;
+
+ /* odml super indexes */
+ gst_avi_superindex_entry idx[GST_AVI_SUPERINDEX_COUNT];
+ gint idx_index;
+ gchar *idx_tag;
+
+ /* stream specific hook */
+ GstAviPadHook hook;
+};
+
+typedef struct _GstAviVideoPad {
+ GstAviPad parent;
+
+ /* stream format */
+ gst_riff_strf_vids vids;
+ /* extra data */
+ GstBuffer *vids_codec_data;
+ /* ODML video properties */
+ gst_riff_vprp vprp;
+
+ GstBuffer *prepend_buffer;
+
+} GstAviVideoPad;
+
+typedef struct _GstAviAudioPad {
+ GstAviPad parent;
+
+ /* stream format */
+ gst_riff_strf_auds auds;
+ /* audio info for bps calculation */
+ guint32 audio_size;
+ guint64 audio_time;
+
+ /* counts the number of samples to put in indx chunk
+ * useful for raw audio where usually there are more than
+ * 1 sample in each GstBuffer */
+ gint samples;
+
+ /* extra data */
+ GstBuffer *auds_codec_data;
+} GstAviAudioPad;
+
+typedef struct _GstAviCollectData {
+ /* extend the CollectData */
+ GstCollectData collect;
+
+ GstAviPad *avipad;
+} GstAviCollectData;
+
+struct _GstAviMux {
+ GstElement element;
+
+ /* pads */
+ GstPad *srcpad;
+ /* sinkpads, video first */
+ GSList *sinkpads;
+ /* video restricted to 1 pad */
+ guint video_pads, audio_pads;
+ GstCollectPads *collect;
+ GstPadEventFunction collect_event;
+
+ /* the AVI header */
+ /* still some single stream video data in mux struct */
+ gst_riff_avih avi_hdr;
+ /* total number of (video) frames */
+ guint32 total_frames;
+ /* amount of total data (bytes) */
+ guint64 total_data;
+ /* amount of data (bytes) in the AVI/AVIX block;
+ * actually the movi list, so counted from and including the movi tag */
+ guint32 data_size, datax_size;
+ /* num (video) frames in the AVI/AVIX block */
+ guint32 num_frames, numx_frames;
+ /* size of hdrl list, including tag as usual */
+
+ /* total size of extra codec data */
+ guint32 codec_data_size;
+ /* state info */
+ gboolean write_header;
+ gboolean restart;
+
+ /* tags */
+ GstTagList *tags_snap;
+
+ /* information about the AVI index ('idx') */
+ gst_riff_index_entry *idx;
+ gint idx_index, idx_count;
+ /* offset of *chunk* (relative to a base offset); entered in the index */
+ guint32 idx_offset;
+ /* size of idx1 chunk (including! chunk header and size bytes) */
+ guint32 idx_size;
+
+ /* are we a big file already? */
+ gboolean is_bigfile;
+ guint64 avix_start;
+
+ /* whether to use "large AVI files" or just stick to small indexed files */
+ gboolean enable_large_avi;
+};
+
+struct _GstAviMuxClass {
+ GstElementClass parent_class;
+};
+
+GType gst_avi_mux_get_type(void);
+
+G_END_DECLS
+
+
+#endif /* __GST_AVI_MUX_H__ */
diff --git a/gst/avi/gstavisubtitle.c b/gst/avi/gstavisubtitle.c
new file mode 100644
index 0000000..ad1cd90
--- /dev/null
+++ b/gst/avi/gstavisubtitle.c
@@ -0,0 +1,383 @@
+/* GStreamer AVI GAB2 subtitle parser
+ * Copyright (C) <2007> Thijs Vermeir <thijsvermeir@gmail.com>
+ * Copyright (C) <2007> Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:element-avisubtitle
+ *
+ * <refsect2>
+ * <para>
+ * Parses the subtitle stream from an avi file.
+ * </para>
+ * <title>Example launch line</title>
+ * <para>
+ * <programlisting>
+ * gst-launch filesrc location=subtitle.avi ! avidemux name=demux ! queue ! avisubtitle ! subparse ! textoverlay name=overlay ! ffmpegcolorspace ! autovideosink demux. ! queue ! decodebin ! overlay.
+ * </programlisting>
+ * This plays an avi file with a video and subtitle stream.
+ * </para>
+ * </refsect2>
+ *
+ * Last reviewed on 2008-02-01
+ */
+
+/* example of a subtitle chunk in an avi file
+ * 00000000: 47 41 42 32 00 02 00 10 00 00 00 45 00 6e 00 67 GAB2.......E.n.g
+ * 00000010: 00 6c 00 69 00 73 00 68 00 00 00 04 00 8e 00 00 .l.i.s.h........
+ * 00000020: 00 ef bb bf 31 0d 0a 30 30 3a 30 30 3a 30 30 2c ....1..00:00:00,
+ * 00000030: 31 30 30 20 2d 2d 3e 20 30 30 3a 30 30 3a 30 32 100 --> 00:00:02
+ * 00000040: 2c 30 30 30 0d 0a 3c 62 3e 41 6e 20 55 54 46 38 ,000..<b>An UTF8
+ * 00000050: 20 53 75 62 74 69 74 6c 65 20 77 69 74 68 20 42 Subtitle with B
+ * 00000060: 4f 4d 3c 2f 62 3e 0d 0a 0d 0a 32 0d 0a 30 30 3a OM</b>....2..00:
+ * 00000070: 30 30 3a 30 32 2c 31 30 30 20 2d 2d 3e 20 30 30 00:02,100 --> 00
+ * 00000080: 3a 30 30 3a 30 34 2c 30 30 30 0d 0a 53 6f 6d 65 :00:04,000..Some
+ * 00000090: 74 68 69 6e 67 20 6e 6f 6e 41 53 43 49 49 20 2d thing nonASCII -
+ * 000000a0: 20 c2 b5 c3 b6 c3 a4 c3 bc c3 9f 0d 0a 0d 0a ..............
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "gstavisubtitle.h"
+
+GST_DEBUG_CATEGORY_STATIC (avisubtitle_debug);
+#define GST_CAT_DEFAULT avisubtitle_debug
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-subtitle-avi")
+ );
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-subtitle")
+ );
+
+static void gst_avi_subtitle_title_tag (GstAviSubtitle * sub, gchar * title);
+static GstFlowReturn gst_avi_subtitle_chain (GstPad * pad, GstBuffer * buffer);
+static GstStateChangeReturn gst_avi_subtitle_change_state (GstElement * element,
+ GstStateChange transition);
+static gboolean gst_avi_subtitle_send_event (GstElement * element,
+ GstEvent * event);
+
+GST_BOILERPLATE (GstAviSubtitle, gst_avi_subtitle, GstElement,
+ GST_TYPE_ELEMENT);
+
+#define IS_BOM_UTF8(data) ((GST_READ_UINT32_BE(data) >> 8) == 0xEFBBBF)
+#define IS_BOM_UTF16_BE(data) (GST_READ_UINT16_BE(data) == 0xFEFF)
+#define IS_BOM_UTF16_LE(data) (GST_READ_UINT16_LE(data) == 0xFEFF)
+#define IS_BOM_UTF32_BE(data) (GST_READ_UINT32_BE(data) == 0xFEFF)
+#define IS_BOM_UTF32_LE(data) (GST_READ_UINT32_LE(data) == 0xFEFF)
+
+static GstBuffer *
+gst_avi_subtitle_extract_file (GstAviSubtitle * sub, GstBuffer * buffer,
+ guint offset, guint len)
+{
+ const gchar *input_enc = NULL;
+ GstBuffer *ret = NULL;
+ gchar *data;
+
+ data = (gchar *) GST_BUFFER_DATA (buffer) + offset;
+
+ if (len >= (3 + 1) && IS_BOM_UTF8 (data) &&
+ g_utf8_validate (data + 3, len - 3, NULL)) {
+ ret = gst_buffer_create_sub (buffer, offset + 3, len - 3);
+ } else if (len >= 2 && IS_BOM_UTF16_BE (data)) {
+ input_enc = "UTF-16BE";
+ data += 2;
+ len -= 2;
+ } else if (len >= 2 && IS_BOM_UTF16_LE (data)) {
+ input_enc = "UTF-16LE";
+ data += 2;
+ len -= 2;
+ } else if (len >= 4 && IS_BOM_UTF32_BE (data)) {
+ input_enc = "UTF-32BE";
+ data += 4;
+ len -= 4;
+ } else if (len >= 4 && IS_BOM_UTF32_LE (data)) {
+ input_enc = "UTF-32LE";
+ data += 4;
+ len -= 4;
+ } else if (g_utf8_validate (data, len, NULL)) {
+ /* not specified, check if it's UTF-8 */
+ ret = gst_buffer_create_sub (buffer, offset, len);
+ } else {
+ /* we could fall back to gst_tag_freeform_to_utf8() here */
+ GST_WARNING_OBJECT (sub, "unspecified encoding, and not UTF-8");
+ return NULL;
+ }
+
+ g_return_val_if_fail (ret != NULL || input_enc != NULL, NULL);
+
+ if (input_enc) {
+ GError *err = NULL;
+ gchar *utf8;
+
+ GST_DEBUG_OBJECT (sub, "converting subtitles from %s to UTF-8", input_enc);
+ utf8 = g_convert (data, len, "UTF-8", input_enc, NULL, NULL, &err);
+
+ if (err != NULL) {
+ GST_WARNING_OBJECT (sub, "conversion to UTF-8 failed : %s", err->message);
+ g_error_free (err);
+ return NULL;
+ }
+
+ ret = gst_buffer_new ();
+ GST_BUFFER_DATA (ret) = (guint8 *) utf8;
+ GST_BUFFER_MALLOCDATA (ret) = (guint8 *) utf8;
+ GST_BUFFER_SIZE (ret) = strlen (utf8);
+ GST_BUFFER_OFFSET (ret) = 0;
+ }
+
+ GST_BUFFER_CAPS (ret) = gst_caps_new_simple ("application/x-subtitle", NULL);
+ return ret;
+}
+
+/**
+ * gst_avi_subtitle_title_tag:
+ * @sub: subtitle element
+ * @title: the title of this subtitle stream
+ *
+ * Send an event to the srcpad of the @sub element with the title
+ * of the subtitle stream as a GST_TAG_TITLE
+ */
+static void
+gst_avi_subtitle_title_tag (GstAviSubtitle * sub, gchar * title)
+{
+ GstTagList *temp_list = gst_tag_list_new ();
+
+ gst_tag_list_add (temp_list, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, title,
+ NULL);
+ gst_pad_push_event (sub->src, gst_event_new_tag (temp_list));
+}
+
+static GstFlowReturn
+gst_avi_subtitle_parse_gab2_chunk (GstAviSubtitle * sub, GstBuffer * buf)
+{
+ const guint8 *data;
+ gchar *name_utf8;
+ guint name_length;
+ guint file_length;
+ guint size;
+
+ data = GST_BUFFER_DATA (buf);
+ size = GST_BUFFER_SIZE (buf);
+
+ /* check the magic word "GAB2\0", and the next word must be 2 */
+ if (size < 12 || memcmp (data, "GAB2\0\2\0", 5 + 2) != 0)
+ goto wrong_magic_word;
+
+ /* read 'name' of subtitle */
+ name_length = GST_READ_UINT32_LE (data + 5 + 2);
+ GST_LOG_OBJECT (sub, "length of name: %u", name_length);
+ if (size <= 17 + name_length)
+ goto wrong_name_length;
+
+ name_utf8 = g_convert ((gchar *) data + 11, name_length, "UTF-8", "UTF-16LE",
+ NULL, NULL, NULL);
+
+ if (name_utf8) {
+ GST_LOG_OBJECT (sub, "subtitle name: %s", name_utf8);
+ gst_avi_subtitle_title_tag (sub, name_utf8);
+ g_free (name_utf8);
+ }
+
+ /* next word must be 4 */
+ if (GST_READ_UINT16_LE (data + 11 + name_length) != 0x4)
+ goto wrong_fixed_word_2;
+
+ file_length = GST_READ_UINT32_LE (data + 13 + name_length);
+ GST_LOG_OBJECT (sub, "length srt/ssa file: %u", file_length);
+
+ if (size < (17 + name_length + file_length))
+ goto wrong_total_length;
+
+ /* store this, so we can send it again after a seek; note that we shouldn't
+ * assume all the remaining data in the chunk is subtitle data, there may
+ * be padding at the end for some reason, so only parse file_length bytes */
+ sub->subfile =
+ gst_avi_subtitle_extract_file (sub, buf, 17 + name_length, file_length);
+
+ if (sub->subfile == NULL)
+ goto extract_failed;
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+wrong_magic_word:
+ {
+ GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL), ("Wrong magic word"));
+ return GST_FLOW_ERROR;
+ }
+wrong_name_length:
+ {
+ GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
+ ("name doesn't fit in buffer (%d < %d)", size, 17 + name_length));
+ return GST_FLOW_ERROR;
+ }
+wrong_fixed_word_2:
+ {
+ GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
+ ("wrong fixed word: expected %u, got %u", 4,
+ GST_READ_UINT16_LE (data + 11 + name_length)));
+ return GST_FLOW_ERROR;
+ }
+wrong_total_length:
+ {
+ GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
+ ("buffer size is wrong: need %d bytes, have %d bytes",
+ 17 + name_length + file_length, size));
+ return GST_FLOW_ERROR;
+ }
+extract_failed:
+ {
+ GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
+ ("could not extract subtitles"));
+ return GST_FLOW_ERROR;
+ }
+}
+
+static GstFlowReturn
+gst_avi_subtitle_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstAviSubtitle *sub = GST_AVI_SUBTITLE (GST_PAD_PARENT (pad));
+ GstFlowReturn ret;
+
+ if (sub->subfile != NULL) {
+ GST_WARNING_OBJECT (sub, "Got more buffers than expected, dropping");
+ ret = GST_FLOW_UNEXPECTED;
+ goto done;
+ }
+
+ /* we expect exactly one buffer with the whole srt/ssa file in it */
+ ret = gst_avi_subtitle_parse_gab2_chunk (sub, buffer);
+ if (ret != GST_FLOW_OK)
+ goto done;
+
+ /* now push the subtitle data downstream */
+ ret = gst_pad_push (sub->src, gst_buffer_ref (sub->subfile));
+
+done:
+
+ gst_buffer_unref (buffer);
+ return ret;
+}
+
+static gboolean
+gst_avi_subtitle_send_event (GstElement * element, GstEvent * event)
+{
+ GstAviSubtitle *avisubtitle = GST_AVI_SUBTITLE (element);
+ gboolean ret = FALSE;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEEK) {
+ if (avisubtitle->subfile) {
+ if (gst_pad_push (avisubtitle->src,
+ gst_buffer_ref (avisubtitle->subfile)) == GST_FLOW_OK)
+ ret = TRUE;
+ }
+ }
+ gst_event_unref (event);
+ return ret;
+}
+
+static void
+gst_avi_subtitle_base_init (gpointer klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (avisubtitle_debug, "avisubtitle", 0,
+ "parse avi subtitle stream");
+
+ gst_element_class_add_static_pad_template (element_class,
+ &sink_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+
+ gst_element_class_set_details_simple (element_class,
+ "Avi subtitle parser", "Codec/Parser/Subtitle",
+ "Parse avi subtitle stream", "Thijs Vermeir <thijsvermeir@gmail.com>");
+}
+
+static void
+gst_avi_subtitle_class_init (GstAviSubtitleClass * klass)
+{
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_avi_subtitle_change_state);
+ gstelement_class->send_event =
+ GST_DEBUG_FUNCPTR (gst_avi_subtitle_send_event);
+}
+
+static void
+gst_avi_subtitle_init (GstAviSubtitle * self, GstAviSubtitleClass * klass)
+{
+ GstCaps *caps;
+
+ self->src = gst_pad_new_from_static_template (&src_template, "src");
+ gst_element_add_pad (GST_ELEMENT (self), self->src);
+
+ self->sink = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_chain_function (self->sink,
+ GST_DEBUG_FUNCPTR (gst_avi_subtitle_chain));
+
+ caps = gst_static_pad_template_get_caps (&src_template);
+ gst_pad_set_caps (self->src, caps);
+ gst_caps_unref (caps);
+
+ gst_pad_use_fixed_caps (self->src);
+ gst_element_add_pad (GST_ELEMENT (self), self->sink);
+
+ self->subfile = NULL;
+}
+
+static GstStateChangeReturn
+gst_avi_subtitle_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstAviSubtitle *sub = GST_AVI_SUBTITLE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (sub->subfile) {
+ gst_buffer_unref (sub->subfile);
+ sub->subfile = NULL;
+ }
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/avi/gstavisubtitle.h b/gst/avi/gstavisubtitle.h
new file mode 100644
index 0000000..0f8048e
--- /dev/null
+++ b/gst/avi/gstavisubtitle.h
@@ -0,0 +1,39 @@
+
+#ifndef __GSTAVISUBTITLE_H__
+#define __GSTAVISUBTITLE_H__
+
+#include <glib.h>
+#include <glib-object.h>
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstAviSubtitle GstAviSubtitle;
+typedef struct _GstAviSubtitleClass GstAviSubtitleClass;
+
+#define GST_TYPE_AVI_SUBTITLE (gst_avi_subtitle_get_type ())
+#define GST_AVI_SUBTITLE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AVI_SUBTITLE, GstAviSubtitle))
+#define GST_AVI_SUBTITLE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AVI_SUBTITLE, GstAviSubtitleClass))
+#define GST_IS_AVI_SUBTITLE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AVI_SUBTITLE))
+#define GST_IS_AVI_SUBTITLE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_AVI_SUBTITLE))
+#define GST_AVI_SUBTITLE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_AVI_SUBTITLE, GstAviSubtitleClass))
+
+GType gst_avi_subtitle_get_type (void);
+
+struct _GstAviSubtitle
+{
+ GstElement parent;
+
+ GstPad *src;
+ GstPad *sink;
+
+ GstBuffer *subfile; /* the complete subtitle file in one buffer */
+};
+
+struct _GstAviSubtitleClass
+{
+ GstElementClass parent;
+};
+
+G_END_DECLS
+#endif