cedric pushed a commit to branch master.

http://git.enlightenment.org/core/efl.git/commit/?id=b8dc80c144fec54a521987535c57b995748ccece

commit b8dc80c144fec54a521987535c57b995748ccece
Author: Stefan Schmidt <[email protected]>
Date:   Wed Oct 30 17:09:20 2019 +0100

    emotion & evas: remove gstreamer 0.10 support
    
    We have Gstreamer 1.x support for a long time already. We used to keep
    this around as fallback. By now Linux distributions start to actually no
    longer ship the Gstreamer 0.10.x packages and upstream has not seen a
    release in in 5 years. Time to remove it on our side as well.
    
    Signed-off-by: Stefan Schmidt <[email protected]>
    Reviewed-by: Cedric BAIL <[email protected]>
    Differential Revision: https://phab.enlightenment.org/D10779
---
 .ci/ci-configure.sh                               |    6 +-
 README                                            |    2 +-
 meson_options.txt                                 |    4 +-
 src/generic/evas/gst/main_0_10.c                  |  281 ---
 src/generic/evas/gst/meson.build                  |   10 +-
 src/lib/emotion/emotion_modules.c                 |    3 -
 src/modules/emotion/gstreamer/emotion_alloc.c     |   90 -
 src/modules/emotion/gstreamer/emotion_convert.c   |  251 ---
 src/modules/emotion/gstreamer/emotion_fakeeos.c   |   70 -
 src/modules/emotion/gstreamer/emotion_gstreamer.c | 2018 ---------------------
 src/modules/emotion/gstreamer/emotion_gstreamer.h |  352 ----
 src/modules/emotion/gstreamer/emotion_sink.c      | 1461 ---------------
 src/modules/emotion/gstreamer/meson.build         |   18 -
 src/modules/emotion/meson.build                   |    1 -
 14 files changed, 7 insertions(+), 4560 deletions(-)

diff --git a/.ci/ci-configure.sh b/.ci/ci-configure.sh
index 88c7f414e3..06478d1960 100755
--- a/.ci/ci-configure.sh
+++ b/.ci/ci-configure.sh
@@ -19,7 +19,7 @@ if [ "$DISTRO" != "" ] ; then
   # - RPM fusion repo for xine and libvlc
   ENABLED_LINUX_COPTS=" -Dfb=true -Dsdl=true -Dbuffer=true 
-Dbuild-id=travis-build \
   -Ddebug-threads=true -Dglib=true -Dg-mainloop=true -Dxpresent=true 
-Dxgesture=false -Dxinput22=true \
-  -Devas-loaders-disabler=json -Decore-imf-loaders-disabler= 
-Demotion-loaders-disabler=gstreamer,libvlc,xine \
+  -Devas-loaders-disabler=json -Decore-imf-loaders-disabler= 
-Demotion-loaders-disabler=libvlc,xine \
   -Demotion-generic-loaders-disabler=vlc -Dharfbuzz=true -Dpixman=true 
-Dhyphen=true \
   -Dvnc-server=true -Dbindings=luajit -Delogind=false -Dinstall-eo-files=true 
-Dphysics=true"
 
@@ -29,7 +29,7 @@ if [ "$DISTRO" != "" ] ; then
   -Dcrypto=gnutls -Dglib=false -Dgstreamer=false -Dsystemd=false 
-Dpulseaudio=false \
   -Dnetwork-backend=connman -Dxinput2=false -Dtslib=false \
   
-Devas-loaders-disabler=gst,pdf,ps,raw,svg,xcf,bmp,dds,eet,generic,gif,ico,jp2k,json,pmaps,psd,tga,tgv,tiff,wbmp,webp,xpm
 \
-  -Decore-imf-loaders-disabler=xim,ibus,scim  
-Demotion-loaders-disabler=gstreamer,gstreamer1,libvlc,xine \
+  -Decore-imf-loaders-disabler=xim,ibus,scim  
-Demotion-loaders-disabler=gstreamer1,libvlc,xine \
   -Demotion-generic-loaders-disabler=vlc -Dfribidi=false -Dfontconfig=false \
   -Dedje-sound-and-video=false -Dembedded-lz4=false -Dlibmount=false 
-Dv4l2=false \
   -Delua=true -Dnls=false -Dbindings= -Dlua-interpreter=luajit 
-Dnative-arch-optimization=false"
@@ -109,6 +109,6 @@ else
   export 
PKG_CONFIG_PATH="/usr/local/opt/openssl/lib/pkgconfig:/usr/local/Cellar/libffi/$LIBFFI_VER/lib/pkgconfig"
   export CC="ccache gcc"
   travis_fold meson meson
-  mkdir build && meson build -Dopengl=full 
-Decore-imf-loaders-disabler=scim,ibus -Dx11=false -Davahi=false 
-Dbindings=luajit -Deeze=false -Dsystemd=false -Dnls=false -Dcocoa=true 
-Demotion-loaders-disabler=gstreamer,gstreamer1,libvlc,xine
+  mkdir build && meson build -Dopengl=full 
-Decore-imf-loaders-disabler=scim,ibus -Dx11=false -Davahi=false 
-Dbindings=luajit -Deeze=false -Dsystemd=false -Dnls=false -Dcocoa=true 
-Demotion-loaders-disabler=gstreamer1,libvlc,xine
   travis_endfold meson
 fi
diff --git a/README b/README
index 5fd277ede7..83ef671d29 100644
--- a/README
+++ b/README
@@ -426,7 +426,7 @@ Required by default:
   * bullet
   * libpng
   * libjpeg
-  * gstreamer (1.x, 0.10 support optional. Ensure all codecs you want are 
installed.)
+  * gstreamer (Ensure all codecs you want are installed.)
   * zlib
   * luajit (lua 5.1 or 5.2 support optional)
   * libtiff
diff --git a/meson_options.txt b/meson_options.txt
index 784bdf9a27..be7f8addad 100644
--- a/meson_options.txt
+++ b/meson_options.txt
@@ -209,8 +209,8 @@ option('ecore-imf-loaders-disabler',
 option('emotion-loaders-disabler',
   type : 'array',
   description : 'List of video back-ends to disable in efl',
-  choices : ['gstreamer', 'gstreamer1', 'libvlc', 'xine'],
-  value : ['gstreamer', 'libvlc', 'xine']
+  choices : ['gstreamer1', 'libvlc', 'xine'],
+  value : ['libvlc', 'xine']
 )
 
 option('emotion-generic-loaders-disabler',
diff --git a/src/generic/evas/gst/main_0_10.c b/src/generic/evas/gst/main_0_10.c
deleted file mode 100644
index 8a197c4208..0000000000
--- a/src/generic/evas/gst/main_0_10.c
+++ /dev/null
@@ -1,281 +0,0 @@
-#ifdef HAVE_CONFIG_H
-# include <config.h>
-#endif
-
-#include <fcntl.h>
-#include <unistd.h>
-
-#include <gst/gst.h>
-
-#include <Eina.h>
-
-#include "shmfile.h"
-#include "timeout.h"
-
-#define DATA32  unsigned int
-
-//#define GST_DBG
-
-#ifdef GST_DBG
-#define D(fmt, args...) fprintf(stderr, fmt, ## args)
-#else
-#define D(fmt, args...)
-#endif
-
-#define CAPS 
"video/x-raw-rgb,bpp=(int)32,depth=(int)32,endianness=(int)4321,red_mask=(int)0x0000ff00,
 green_mask=(int)0x00ff0000, blue_mask=(int)0xff000000"
-
-static GstElement *pipeline = NULL;
-static GstElement *sink = NULL;
-static gint64      duration = -1;
-
-int   width = 0;
-int   height = 0;
-void *data = NULL;
-
-
-static Eina_Bool
-_gst_init(const char *filename)
-{
-   GstPad              *pad;
-   GstCaps             *caps;
-   GstStructure        *structure;
-   gchar               *descr;
-   gchar               *uri;
-   GError              *error = NULL;
-   GstFormat            format;
-   GstStateChangeReturn ret;
-//   int                  vidstr = 0;
-
-   if (!filename || !*filename)
-     return EINA_FALSE;
-
-   if (!gst_init_check(NULL, NULL, &error))
-     return EINA_FALSE;
-
-   if ((*filename == '/') || (*filename == '~'))
-     {
-        uri = g_filename_to_uri(filename, NULL, NULL);
-        if (!uri)
-          {
-             D("could not create new uri from %s", filename);
-             goto unref_pipeline;
-          }
-     }
-   else
-     uri = strdup(filename);
-
-   D("Setting file %s\n", uri);
-
-   descr = g_strdup_printf("uridecodebin uri=%s ! typefind ! ffmpegcolorspace 
! "
-      " appsink name=sink caps=\"" CAPS "\"", uri);
-   pipeline = gst_parse_launch(descr, &error);
-   free(uri);
-
-   if (error != NULL)
-     {
-        D("could not construct pipeline: %s\n", error->message);
-        g_error_free (error);
-        goto gst_shutdown;
-     }
-/* needs gst 1.0+
- * also only works on playbin objects!!! this is a uridecodebin!
-   g_object_get(G_OBJECT(pipeline),
-                "n-video", &vidstr,
-                NULL);
-   if (vidstr <= 0)
-     {
-        D("no video stream\n");
-        goto gst_shutdown;
-     }
-*/
-   sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");
-
-   ret = gst_element_set_state (pipeline, GST_STATE_PAUSED);
-   switch (ret)
-     {
-     case GST_STATE_CHANGE_FAILURE:
-        D("failed to play the file\n");
-        goto unref_pipeline;
-     case GST_STATE_CHANGE_NO_PREROLL:
-        D("live sources not supported yet\n");
-        goto unref_pipeline;
-     default:
-        break;
-     }
-
-   ret = gst_element_get_state((pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
-   if (ret == GST_STATE_CHANGE_FAILURE)
-     {
-       D("could not complete pause\n");
-        goto unref_pipeline;
-     }
-
-   format = GST_FORMAT_TIME;
-   gst_element_query_duration (pipeline, &format, &duration);
-   if (duration == -1)
-     {
-       D("could not retrieve the duration, set it to 1s\n");
-        duration = 1 * GST_SECOND;
-     }
-
-   pad = gst_element_get_static_pad(sink, "sink");
-   if (!pad)
-     {
-       D("could not retrieve the sink pad\n");
-        goto unref_pipeline;
-     }
-
-   caps = gst_pad_get_negotiated_caps(pad);
-   if (!caps)
-     goto unref_pad;
-
-   structure = gst_caps_get_structure(caps, 0);
-
-   if (!gst_structure_get_int(structure, "width", &width))
-     goto unref_caps;
-   if (!gst_structure_get_int(structure, "height", &height))
-     goto unref_caps;
-
-   gst_caps_unref(caps);
-   gst_object_unref(pad);
-
-   return EINA_TRUE;
-
- unref_caps:
-   gst_caps_unref(caps);
- unref_pad:
-   gst_object_unref(pad);
- unref_pipeline:
-   gst_element_set_state (pipeline, GST_STATE_NULL);
-   gst_object_unref(pipeline);
- gst_shutdown:
-   gst_deinit();
-
-   return EINA_FALSE;
-}
-
-static void
-_gst_shutdown()
-{
-   gst_element_set_state (pipeline, GST_STATE_NULL);
-   gst_object_unref(pipeline);
-   gst_deinit();
-}
-
-static void
-_gst_load_image(int size_w EINA_UNUSED, int size_h EINA_UNUSED, double pos)
-{
-   GstBuffer *buffer;
-
-   D("load image\n");
-   if (pos >= 0.0)
-     gst_element_seek_simple(pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
-                             pos * 1000000000.0);
-   else
-     gst_element_seek_simple(pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
-                             duration / 2);
-   g_signal_emit_by_name(sink, "pull-preroll", &buffer, NULL);
-   D("load image : %p %d\n", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
-
-   shm_alloc(width * height * sizeof(DATA32));
-   if (!shm_addr) return;
-   data = shm_addr;
-
-   memcpy(data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
-}
-
-int
-main(int argc, char **argv)
-{
-   char *file, *p;
-   int i, numonly;
-   int size_w = 0, size_h = 0;
-   int head_only = 0;
-   long long pos = -1.0;
-
-   if (argc < 2) return -1;
-   // file is ALWAYS first arg, other options come after
-   file = argv[1];
-   for (i = 2; i < argc; i++)
-     {
-        if      (!strcmp(argv[i], "-head"))
-           // asked to only load header, not body/data
-           head_only = 1;
-        else if (!strcmp(argv[i], "-key"))
-          {
-             i++;
-             numonly = 1;
-             for (p = argv[i]; *p; p++)
-               {
-                  if ((!*p < '0') || (*p > 9))
-                    {
-                       numonly = 0;
-                       break;
-                    }
-               }
-             if (numonly) pos = (double)(atoll(argv[i])) / 1000.0;
-             i++;
-          }
-        else if (!strcmp(argv[i], "-opt-scale-down-by"))
-          { // not used by ps loader
-             i++;
-             // int scale_down = atoi(argv[i]);
-          }
-        else if (!strcmp(argv[i], "-opt-dpi"))
-          {
-             i++;
-          }
-        else if (!strcmp(argv[i], "-opt-size"))
-          { // not used by ps loader
-             i++;
-             size_w = atoi(argv[i]);
-             i++;
-             size_h = atoi(argv[i]);
-          }
-     }
-
-   timeout_init(10);
-   
-   D("_gst_init_file\n");
-
-   if (!_gst_init(file))
-     return -1;
-   D("_gst_init done\n");
-
-   if (!head_only)
-     {
-        _gst_load_image(size_w, size_h, pos);
-     }
-
-   D("size...: %ix%i\n", width, height);
-   D("alpha..: 0\n");
-
-   printf("size %i %i\n", width, height);
-   printf("alpha 0\n");
-
-   if (!head_only)
-     {
-        if (shm_fd >= 0)
-          {
-             printf("shmfile %s\n", shmfile);
-          }
-        else
-          {
-             // could also to "tmpfile %s\n" like shmfile but just
-             // a mmaped tmp file on the system
-             printf("data\n");
-             if (fwrite(data, width * height * sizeof(DATA32), 1, stdout) != 1)
-               {
-                  shm_free();
-                  return -1;
-               }
-          }
-        shm_free();
-     }
-   else
-     printf("done\n");
-
-   _gst_shutdown();
-   fflush(stdout);
-   return 0;
-}
diff --git a/src/generic/evas/gst/meson.build b/src/generic/evas/gst/meson.build
index aa10f7e513..c324ef4702 100644
--- a/src/generic/evas/gst/meson.build
+++ b/src/generic/evas/gst/meson.build
@@ -1,11 +1,3 @@
-
-# gstreamer 0.1 support
-#generic_deps = [dependency('gstreamer')]
-#generic_src = files([
-#  'main_0_10.c'
-#])
-
-
 generic_src = files([
   'main.c'
 ])
@@ -14,4 +6,4 @@ generic_deps = []
 if get_option('gstreamer') == true
   generic_deps += dependency('gstreamer-1.0')
 endif
-generic_support = 
['264','3g2','3gp','3gp2','3gpp','3gpp2','3p2','asf','avi','bdm','bdmv','clpi','cpi','dv','fla','flv','m1v','m2t','m2v','m4v','mkv','mov','mp2','mp2ts','mp4','mpe','mpeg','mpg','mpl','mpls','mts','mxf','nut','nuv','ogg','ogm','ogv','qt','rm','rmj','rmm','rms','rmx','rmvb','rv','swf','ts','webm','weba','wmv']
\ No newline at end of file
+generic_support = 
['264','3g2','3gp','3gp2','3gpp','3gpp2','3p2','asf','avi','bdm','bdmv','clpi','cpi','dv','fla','flv','m1v','m2t','m2v','m4v','mkv','mov','mp2','mp2ts','mp4','mpe','mpeg','mpg','mpl','mpls','mts','mxf','nut','nuv','ogg','ogm','ogv','qt','rm','rmj','rmm','rms','rmx','rmvb','rv','swf','ts','webm','weba','wmv']
diff --git a/src/lib/emotion/emotion_modules.c 
b/src/lib/emotion/emotion_modules.c
index 22e6994e4b..c23d0bb69d 100644
--- a/src/lib/emotion/emotion_modules.c
+++ b/src/lib/emotion/emotion_modules.c
@@ -81,9 +81,6 @@ _emotion_modules_load(void)
              if (stat(buf, &st) == 0)
                {
                   const char *built_modules[] = {
-#ifdef EMOTION_BUILD_GSTREAMER
-                     "gstreamer",
-#endif
 #ifdef EMOTION_BUILD_GSTREAMER1
                      "gstreamer1",
 #endif
diff --git a/src/modules/emotion/gstreamer/emotion_alloc.c 
b/src/modules/emotion/gstreamer/emotion_alloc.c
deleted file mode 100644
index c4aae047b7..0000000000
--- a/src/modules/emotion/gstreamer/emotion_alloc.c
+++ /dev/null
@@ -1,90 +0,0 @@
-#ifdef HAVE_CONFIG_H
-# include "config.h"
-#endif
-
-#include <Eina.h>
-#include <Evas.h>
-#include <Ecore.h>
-
-#include <glib.h>
-#include <gst/gst.h>
-#include <gst/video/video.h>
-#include <gst/video/gstvideosink.h>
-
-#ifdef HAVE_ECORE_X
-# include <Ecore_X.h>
-# ifdef HAVE_XOVERLAY_H
-#  include <gst/interfaces/xoverlay.h>
-# endif
-#endif
-
-#include "Emotion.h"
-#include "emotion_gstreamer.h"
-
-Emotion_Gstreamer_Buffer *
-emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
-                              GstBuffer *buffer,
-                              Eina_Bool preroll)
-{
-   Emotion_Gstreamer_Buffer *send;
-
-   if (!sink->ev) return NULL;
-
-   send = malloc(sizeof (Emotion_Gstreamer_Buffer));
-   if (!send) return NULL;
-
-   send->sink = sink;
-   send->frame = gst_buffer_ref(buffer);
-   send->preroll = preroll;
-   send->force = EINA_FALSE;
-   sink->ev->out++;
-   send->ev = sink->ev;
-
-   return send;
-}
-
-void
-emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send)
-{
-   send->ev->in++;
-
-   if (send->ev->in == send->ev->out
-       && send->ev->threads == NULL
-       && send->ev->delete_me)
-     send->ev->api->del(send->ev);
-
-   gst_buffer_unref(send->frame);
-   free(send);
-}
-
-Emotion_Gstreamer_Message *
-emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev,
-                               GstMessage *msg)
-{
-   Emotion_Gstreamer_Message *send;
-
-   if (!ev) return NULL;
-
-   send = malloc(sizeof (Emotion_Gstreamer_Message));
-   if (!send) return NULL;
-
-   ev->out++;
-   send->ev = ev;
-   send->msg = gst_message_ref(msg);
-
-   return send;
-}
-
-void
-emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send)
-{
-   send->ev->in++;
-
-   if (send->ev->in == send->ev->out
-       && send->ev->threads == NULL
-       && send->ev->delete_me)
-     send->ev->api->del(send->ev);
-
-   gst_message_unref(send->msg);
-   free(send);
-}
diff --git a/src/modules/emotion/gstreamer/emotion_convert.c 
b/src/modules/emotion/gstreamer/emotion_convert.c
deleted file mode 100644
index 2664d28be6..0000000000
--- a/src/modules/emotion/gstreamer/emotion_convert.c
+++ /dev/null
@@ -1,251 +0,0 @@
-#ifdef HAVE_CONFIG_H
-# include "config.h"
-#endif
-
-#include <Eina.h>
-#include <Evas.h>
-
-#include <glib.h>
-#include <gst/gst.h>
-#include <gst/video/video.h>
-#include <gst/video/gstvideosink.h>
-
-#ifdef HAVE_ECORE_X
-# include <Ecore_X.h>
-# ifdef HAVE_XOVERLAY_H
-#  include <gst/interfaces/xoverlay.h>
-# endif
-#endif
-
-#include "Emotion.h"
-#include "emotion_gstreamer.h"
-
-static inline void
-_evas_video_bgrx_step(unsigned char *evas_data, const unsigned char *gst_data,
-                      unsigned int w, unsigned int h EINA_UNUSED, unsigned int 
output_height, unsigned int step)
-{
-   unsigned int x;
-   unsigned int y;
-
-   for (y = 0; y < output_height; ++y)
-     {
-        for (x = 0; x < w; x++)
-          {
-             evas_data[0] = gst_data[0];
-             evas_data[1] = gst_data[1];
-             evas_data[2] = gst_data[2];
-             evas_data[3] = 255;
-             gst_data += step;
-             evas_data += 4;
-          }
-     }
-}
-
-static void
-_evas_video_bgr(unsigned char *evas_data, const unsigned char *gst_data, 
unsigned int w, unsigned int h, unsigned int output_height)
-{
-   _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 3);
-}
-
-static void
-_evas_video_bgrx(unsigned char *evas_data, const unsigned char *gst_data, 
unsigned int w, unsigned int h, unsigned int output_height)
-{
-   _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 4);
-}
-
-static void
-_evas_video_bgra(unsigned char *evas_data, const unsigned char *gst_data, 
unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
-{
-   unsigned int x;
-   unsigned int y;
-
-   for (y = 0; y < output_height; ++y)
-     {
-        unsigned char alpha;
-
-        for (x = 0; x < w; ++x)
-          {
-             alpha = gst_data[3];
-             evas_data[0] = (gst_data[0] * alpha) / 255;
-             evas_data[1] = (gst_data[1] * alpha) / 255;
-             evas_data[2] = (gst_data[2] * alpha) / 255;
-             evas_data[3] = alpha;
-             gst_data += 4;
-             evas_data += 4;
-          }
-     }
-}
-
-static void
-_evas_video_i420(unsigned char *evas_data, const unsigned char *gst_data, 
unsigned int w, unsigned int h, unsigned int output_height)
-{
-   const unsigned char **rows;
-   unsigned int i, j;
-   unsigned int rh;
-   unsigned int stride_y, stride_uv;
-
-   rh = output_height;
-
-   rows = (const unsigned char **)evas_data;
-
-   stride_y = GST_ROUND_UP_4(w);
-   stride_uv = GST_ROUND_UP_8(w) / 2;
-
-   for (i = 0; i < rh; i++)
-     rows[i] = &gst_data[i * stride_y];
-
-   for (j = 0; j < (rh / 2); j++, i++)
-     rows[i] = &gst_data[h * stride_y + j * stride_uv];
-
-   for (j = 0; j < (rh / 2); j++, i++)
-     rows[i] = &gst_data[h * stride_y +
-                        (rh / 2) * stride_uv +
-                        j * stride_uv];
-}
-
-static void
-_evas_video_yv12(unsigned char *evas_data, const unsigned char *gst_data, 
unsigned int w, unsigned int h, unsigned int output_height)
-{
-   const unsigned char **rows;
-   unsigned int i, j;
-   unsigned int rh;
-   unsigned int stride_y, stride_uv;
-
-   rh = output_height;
-
-   rows = (const unsigned char **)evas_data;
-
-   stride_y = GST_ROUND_UP_4(w);
-   stride_uv = GST_ROUND_UP_8(w) / 2;
-
-   for (i = 0; i < rh; i++)
-     rows[i] = &gst_data[i * stride_y];
-
-   for (j = 0; j < (rh / 2); j++, i++)
-     rows[i] = &gst_data[h * stride_y +
-                        (rh / 2) * stride_uv +
-                        j * stride_uv];
-
-   for (j = 0; j < (rh / 2); j++, i++)
-     rows[i] = &gst_data[h * stride_y + j * stride_uv];
-}
-
-static void
-_evas_video_yuy2(unsigned char *evas_data, const unsigned char *gst_data, 
unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
-{
-   const unsigned char **rows;
-   unsigned int i;
-   unsigned int stride;
-
-   rows = (const unsigned char **)evas_data;
-
-   stride = GST_ROUND_UP_4(w * 2);
-
-   for (i = 0; i < output_height; i++)
-     rows[i] = &gst_data[i * stride];
-}
-
-static void
-_evas_video_nv12(unsigned char *evas_data, const unsigned char *gst_data, 
unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
-{
-   const unsigned char **rows;
-   unsigned int i, j;
-   unsigned int rh;
-
-   rh = output_height;
-
-   rows = (const unsigned char **)evas_data;
-
-   for (i = 0; i < rh; i++)
-     rows[i] = &gst_data[i * w];
-
-   for (j = 0; j < (rh / 2); j++, i++)
-     rows[i] = &gst_data[rh * w + j * w];
-}
-
-static void
-_evas_video_mt12(unsigned char *evas_data, const unsigned char *gst_data, 
unsigned int w, unsigned int h, unsigned int output_height EINA_UNUSED)
-{
-   const unsigned char **rows;
-   unsigned int i;
-   unsigned int j;
-
-   rows = (const unsigned char **)evas_data;
-
-   for (i = 0; i < (h / 32) / 2; i++)
-     rows[i] = &gst_data[i * w * 2 * 32];
-
-   if ((h / 32) % 2)
-     {
-        rows[i] = &gst_data[i * w * 2 * 32];
-        i++;
-     }
-
-   for (j = 0; j < ((h / 2) / 32) / 2; ++j, ++i)
-     rows[i] = &gst_data[h * w + j * (w / 2) * 2 * 16];
-}
-
-void
-_evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char 
*gst_data, unsigned int w, unsigned int h, unsigned int output_height 
EINA_UNUSED)
-{
-   const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) 
gst_data;
-   const unsigned char **rows;
-   unsigned int i;
-   unsigned int j;
-
-   rows = (const unsigned char **)evas_data;
-
-   for (i = 0; i < (h / 32) / 2; i++)
-     rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
-   if ((h / 32) % 2)
-     {
-        rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
-        i++;
-     }
-
-   for (j = 0; j < ((h / 2) / 16) / 2; j++, i++)
-     {
-       rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
-     }
-   if (((h / 2) / 16) % 2)
-     rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
-}
-
-void
-_evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, 
unsigned int w EINA_UNUSED, unsigned int h, unsigned int output_height 
EINA_UNUSED)
-{
-   const SCMN_IMGB *imgb = (const SCMN_IMGB *) gst_data;
-   const unsigned char **rows;
-   unsigned int i, j;
-
-   rows = (const unsigned char **)evas_data;
-
-   for (i = 0; i < (h / 32) / 2; i++)
-     rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
-   if ((h / 32) % 2)
-     {
-        rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
-        i++;
-     }
-
-   for (j = 0; j < (unsigned int) imgb->elevation[1] / 32 / 2; j++, i++)
-     rows[i] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
-   if ((imgb->elevation[1] / 32) % 2)
-     rows[i++] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
-}
-
-const ColorSpace_FourCC_Convertion colorspace_fourcc_convertion[] = {
-  { "I420", GST_MAKE_FOURCC('I', '4', '2', '0'), 
EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_i420, EINA_TRUE },
-  { "YV12", GST_MAKE_FOURCC('Y', 'V', '1', '2'), 
EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_yv12, EINA_TRUE },
-  { "YUY2", GST_MAKE_FOURCC('Y', 'U', 'Y', '2'), 
EVAS_COLORSPACE_YCBCR422601_PL, _evas_video_yuy2, EINA_FALSE },
-  { "NV12", GST_MAKE_FOURCC('N', 'V', '1', '2'), 
EVAS_COLORSPACE_YCBCR420NV12601_PL, _evas_video_nv12, EINA_TRUE },
-  { "TM12", GST_MAKE_FOURCC('T', 'M', '1', '2'), 
EVAS_COLORSPACE_YCBCR420TM12601_PL, _evas_video_mt12, EINA_TRUE },
-  { NULL, 0, 0, NULL, 0 }
-};
-
-const ColorSpace_Format_Convertion colorspace_format_convertion[] = {
-  { "BGR", GST_VIDEO_FORMAT_BGR, EVAS_COLORSPACE_ARGB8888, _evas_video_bgr },
-  { "BGRx", GST_VIDEO_FORMAT_BGRx, EVAS_COLORSPACE_ARGB8888, _evas_video_bgrx 
},
-  { "BGRA", GST_VIDEO_FORMAT_BGRA, EVAS_COLORSPACE_ARGB8888, _evas_video_bgra 
},
-  { NULL, 0, 0, NULL }
-};
diff --git a/src/modules/emotion/gstreamer/emotion_fakeeos.c 
b/src/modules/emotion/gstreamer/emotion_fakeeos.c
deleted file mode 100644
index fc6dc0f989..0000000000
--- a/src/modules/emotion/gstreamer/emotion_fakeeos.c
+++ /dev/null
@@ -1,70 +0,0 @@
-#ifdef HAVE_CONFIG_H
-# include "config.h"
-#endif
-
-#include <Eina.h>
-#include <Evas.h>
-
-#include <glib.h>
-#include <gst/gst.h>
-#include <gst/video/video.h>
-#include <gst/video/gstvideosink.h>
-
-#ifdef HAVE_ECORE_X
-# include <Ecore_X.h>
-# ifdef HAVE_XOVERLAY_H
-#  include <gst/interfaces/xoverlay.h>
-# endif
-#endif
-
-#include "Emotion.h"
-#include "emotion_gstreamer.h"
-
-typedef struct _FakeEOSBin
-{
-   GstBin parent;
-} FakeEOSBin;
-
-typedef struct _FakeEOSBinClass
-{
-   GstBinClass parent;
-} FakeEOSBinClass;
-
-GST_BOILERPLATE(FakeEOSBin, fakeeos_bin, GstBin,
-                GST_TYPE_BIN);
-
-static void
-fakeeos_bin_handle_message(GstBin * bin, GstMessage * message)
-{
-   /* FakeEOSBin *fakeeos = (FakeEOSBin *)(bin); */
-
-   switch (GST_MESSAGE_TYPE(message)) {
-    case GST_MESSAGE_EOS:
-       /* what to do here ? just returning at the moment */
-       return;
-    default:
-       break;
-   }
-
-   GST_BIN_CLASS(parent_class)->handle_message(bin, message);
-}
-
-static void
-fakeeos_bin_base_init(gpointer g_class EINA_UNUSED)
-{
-}
-
-static void
-fakeeos_bin_class_init(FakeEOSBinClass * klass)
-{
-   GstBinClass *gstbin_class = GST_BIN_CLASS(klass);
-
-   gstbin_class->handle_message =
-     GST_DEBUG_FUNCPTR (fakeeos_bin_handle_message);
-}
-
-static void
-fakeeos_bin_init(FakeEOSBin *src EINA_UNUSED,
-                 FakeEOSBinClass *klass EINA_UNUSED)
-{
-}
diff --git a/src/modules/emotion/gstreamer/emotion_gstreamer.c 
b/src/modules/emotion/gstreamer/emotion_gstreamer.c
deleted file mode 100644
index b37f5ce639..0000000000
--- a/src/modules/emotion/gstreamer/emotion_gstreamer.c
+++ /dev/null
@@ -1,2018 +0,0 @@
-#ifdef HAVE_CONFIG_H
-# include "config.h"
-#endif
-
-#include <unistd.h>
-#include <fcntl.h>
-
-#ifdef _WIN32
-# include <direct.h> /* getcwd */
-#endif
-
-#include <Eina.h>
-#include <Evas.h>
-#include <Ecore.h>
-
-#define HTTP_STREAM 0
-#define RTSP_STREAM 1
-#include <glib.h>
-#include <gst/gst.h>
-#include <glib-object.h>
-#include <gst/video/gstvideosink.h>
-#include <gst/video/video.h>
-
-// forcibly disable x overlay window.. broken badly.
-#undef HAVE_ECORE_X
-
-#ifdef HAVE_ECORE_X
-# include <Ecore_X.h>
-# ifdef HAVE_XOVERLAY_H
-#  include <gst/interfaces/xoverlay.h>
-# endif
-#endif
-
-#include "emotion_modules.h"
-#include "emotion_gstreamer.h"
-
-Eina_Bool window_manager_video = EINA_FALSE;
-int _emotion_gstreamer_log_domain = -1;
-Eina_Bool debug_fps = EINA_FALSE;
-Eina_Bool _ecore_x_available = EINA_FALSE;
-
-static Ecore_Idler *restart_idler;
-static int _emotion_init_count = 0;
-
-/* Callbacks to get the eos */
-static void _for_each_tag    (GstTagList const* list, gchar const* tag, void 
*data);
-static void _free_metadata   (Emotion_Gstreamer_Metadata *m);
-
-static GstBusSyncReply _eos_sync_fct(GstBus *bus,
-                                    GstMessage *message,
-                                    gpointer data);
-
-static Eina_Bool _em_restart_stream(void *data);
-
-/* Module interface */
-
-
-static int priority_overide = 0;
-
-static Emotion_Video_Stream *
-emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
-{
-   Emotion_Video_Stream *vstream;
-
-   if (!ev) return NULL;
-
-   vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
-   if (!vstream) return NULL;
-
-   ev->video_streams = eina_list_append(ev->video_streams, vstream);
-   return vstream;
-}
-
-static const char *
-emotion_visualization_element_name_get(Emotion_Vis visualisation)
-{
-   switch (visualisation)
-     {
-      case EMOTION_VIS_NONE:
-         return NULL;
-      case EMOTION_VIS_GOOM:
-         return "goom";
-      case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
-         return "libvisual_bumpscope";
-      case EMOTION_VIS_LIBVISUAL_CORONA:
-         return "libvisual_corona";
-      case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
-         return "libvisual_dancingparticles";
-      case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
-         return "libvisual_gdkpixbuf";
-      case EMOTION_VIS_LIBVISUAL_G_FORCE:
-         return "libvisual_G-Force";
-      case EMOTION_VIS_LIBVISUAL_GOOM:
-         return "libvisual_goom";
-      case EMOTION_VIS_LIBVISUAL_INFINITE:
-         return "libvisual_infinite";
-      case EMOTION_VIS_LIBVISUAL_JAKDAW:
-         return "libvisual_jakdaw";
-      case EMOTION_VIS_LIBVISUAL_JESS:
-         return "libvisual_jess";
-      case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
-         return "libvisual_lv_analyzer";
-      case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
-         return "libvisual_lv_flower";
-      case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
-         return "libvisual_lv_gltest";
-      case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
-         return "libvisual_lv_scope";
-      case EMOTION_VIS_LIBVISUAL_MADSPIN:
-         return "libvisual_madspin";
-      case EMOTION_VIS_LIBVISUAL_NEBULUS:
-         return "libvisual_nebulus";
-      case EMOTION_VIS_LIBVISUAL_OINKSIE:
-         return "libvisual_oinksie";
-      case EMOTION_VIS_LIBVISUAL_PLASMA:
-         return "libvisual_plazma";
-      default:
-         return "goom";
-     }
-}
-
-static void
-em_cleanup(Emotion_Gstreamer_Video *ev)
-{
-   Emotion_Audio_Stream *astream;
-   Emotion_Video_Stream *vstream;
-
-   if (ev->send)
-     {
-        emotion_gstreamer_buffer_free(ev->send);
-        ev->send = NULL;
-     }
-
-   if (ev->eos_bus)
-     {
-        gst_object_unref(GST_OBJECT(ev->eos_bus));
-        ev->eos_bus = NULL;
-     }
-
-   if (ev->metadata)
-     {
-        _free_metadata(ev->metadata);
-        ev->metadata = NULL;
-     }
-
-   if (ev->last_buffer)
-     {
-        gst_buffer_unref(ev->last_buffer);
-        ev->last_buffer = NULL;
-     }
-
-   if (!ev->stream)
-     {
-        evas_object_image_video_surface_set(emotion_object_image_get(ev->obj), 
NULL);
-        ev->stream = EINA_TRUE;
-     }
-
-   if (ev->pipeline)
-     {
-       gstreamer_video_sink_new(ev, ev->obj, NULL);
-
-       g_object_set(G_OBJECT(ev->esink), "ev", NULL, NULL);
-       g_object_set(G_OBJECT(ev->esink), "evas-object", NULL, NULL);
-       gst_element_set_state(ev->pipeline, GST_STATE_NULL);
-       gst_object_unref(ev->pipeline);
-
-       ev->pipeline = NULL;
-       ev->sink = NULL;
-
-       if (ev->eteepad) gst_object_unref(ev->eteepad);
-       ev->eteepad = NULL;
-       if (ev->xvteepad) gst_object_unref(ev->xvteepad);
-       ev->xvteepad = NULL;
-       if (ev->xvpad) gst_object_unref(ev->xvpad);
-       ev->xvpad = NULL;
-
-       ev->src_width = 0;
-       ev->src_height = 0;
-
-#ifdef HAVE_ECORE_X
-       INF("destroying window: %i", ev->win);
-       if (ev->win) ecore_x_window_free(ev->win);
-       ev->win = 0;
-#endif
-     }
-
-   if (restart_idler)
-     {
-        ecore_idler_del(restart_idler);
-        restart_idler = NULL;
-     }
-
-   EINA_LIST_FREE(ev->audio_streams, astream)
-     free(astream);
-   EINA_LIST_FREE(ev->video_streams, vstream)
-     free(vstream);
-}
-
-static void
-em_del(void *video)
-{
-   Emotion_Gstreamer_Video *ev = video;
-
-   if (ev->threads)
-     {
-        Ecore_Thread *t;
-
-        EINA_LIST_FREE(ev->threads, t)
-          ecore_thread_cancel(t);
-
-        ev->delete_me = EINA_TRUE;
-        return;
-     }
-
-   if (ev->in != ev->out)
-     {
-        ev->delete_me = EINA_TRUE;
-        return;
-     }
-
-   em_cleanup(ev);
-
-   free(ev);
-}
-
-static Eina_Bool
-em_file_open(void *video,
-             const char   *file)
-{
-   Emotion_Gstreamer_Video *ev = video;
-   Eina_Strbuf *sbuf = NULL;
-   const char *uri;
-
-   if (!file) return EINA_FALSE;
-   if (strstr(file, "://") == NULL)
-     {
-        sbuf = eina_strbuf_new();
-        eina_strbuf_append(sbuf, "file://");
-        if (strncmp(file, "./", 2) == 0)
-          file += 2;
-       if (strstr(file, ":/") != NULL)
-         { /* We absolutely need file:///C:/ under Windows, so adding it here 
*/
-             eina_strbuf_append(sbuf, "/");
-         }
-       else if (*file != '/')
-          {
-             char tmp[PATH_MAX];
-
-             if (getcwd(tmp, PATH_MAX))
-               {
-                  eina_strbuf_append(sbuf, tmp);
-                  eina_strbuf_append(sbuf, "/");
-               }
-          }
-        eina_strbuf_append(sbuf, file);
-     }
-
-   ev->play_started = 0;
-   ev->pipeline_parsed = 0;
-
-   uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
-   DBG("setting file to '%s'", uri);
-   ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, uri);
-   if (sbuf) eina_strbuf_free(sbuf);
-
-   if (!ev->pipeline)
-     return EINA_FALSE;
-
-   ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
-   if (!ev->eos_bus)
-     {
-        ERR("could not get the bus");
-        return EINA_FALSE;
-     }
-
-   gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
-
-   ev->position = 0.0;
-
-   return 1;
-}
-
-static void
-em_file_close(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-   if (!ev)
-     return;
-
-   if (ev->threads)
-     {
-        Ecore_Thread *t;
-
-        EINA_LIST_FREE(ev->threads, t)
-          ecore_thread_cancel(t);
-     }
-
-   em_cleanup(ev);
-
-   ev->pipeline_parsed = EINA_FALSE;
-   ev->play_started = 0;
-}
-
-static void
-em_play(void   *video,
-        double  pos EINA_UNUSED)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-   if (!ev->pipeline) return;
-
-   if (ev->pipeline_parsed)
-     gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
-   ev->play = 1;
-   ev->play_started = 1;
-}
-
-static void
-em_stop(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (!ev->pipeline) return;
-
-   if (ev->pipeline_parsed)
-     gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
-   ev->play = 0;
-}
-
-static void
-em_size_get(void  *video,
-            int   *width,
-            int   *height)
-{
-   Emotion_Gstreamer_Video *ev;
-   Emotion_Video_Stream      *vstream;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
-     goto on_error;
-
-   vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
-   if (vstream)
-     {
-        if (width) *width = vstream->width;
-        if (height) *height = vstream->height;
-
-        return;
-     }
-
- on_error:
-   if (width) *width = 0;
-   if (height) *height = 0;
-}
-
-static void
-em_pos_set(void   *video,
-           double  pos)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (!ev->pipeline) return;
-
-   if (ev->play)
-     gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
-
-   gst_element_seek(ev->pipeline, 1.0,
-                          GST_FORMAT_TIME,
-                          GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
-                          GST_SEEK_TYPE_SET,
-                          (gint64)(pos * (double)GST_SECOND),
-                          GST_SEEK_TYPE_NONE, -1);
-
-   if (ev->play)
-     gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
-}
-
-/**
- * Returns stream duration in seconds
- */
-static double
-em_len_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-   Emotion_Video_Stream *vstream;
-   Emotion_Audio_Stream *astream;
-   Eina_List *l;
-   GstFormat fmt;
-   gint64 val;
-   gboolean ret;
-
-   ev = video;
-   fmt = GST_FORMAT_TIME;
-
-   if (!ev->pipeline) return 0.0;
-
-   ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
-   if (!ret)
-     goto fallback;
-
-   if (fmt != GST_FORMAT_TIME)
-     {
-        DBG("requested duration in time, but got %s instead.",
-            gst_format_get_name(fmt));
-        goto fallback;
-     }
-
-   if (val <= 0.0)
-     goto fallback;
-
-   return GST_TIME_AS_SECONDS(val);
-
- fallback:
-   if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
-     return 0.0;
-
-   EINA_LIST_FOREACH(ev->audio_streams, l, astream)
-     if (astream->length_time >= 0)
-       return astream->length_time;
-
-   EINA_LIST_FOREACH(ev->video_streams, l, vstream)
-     if (vstream->length_time >= 0)
-       return vstream->length_time;
-
-   return 0.0;
-}
-
-static double
-em_buffer_size_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   GstQuery *query;
-   gboolean busy;
-   gint percent;
-
-   ev = video;
-
-   if (!ev->pipeline) return 0.0;
-
-   query = gst_query_new_buffering(GST_FORMAT_DEFAULT);
-   if (gst_element_query(ev->pipeline, query))
-     gst_query_parse_buffering_percent(query, &busy, &percent);
-   else
-     percent = 100;
-
-   gst_query_unref(query);
-   return ((float)(percent)) / 100.0;
-}
-
-static int
-em_fps_num_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-   Emotion_Video_Stream      *vstream;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
-     return 0;
-
-   vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
-   if (vstream)
-     return vstream->fps_num;
-
-   return 0;
-}
-
-static int
-em_fps_den_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-   Emotion_Video_Stream      *vstream;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
-     return 1;
-
-   vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
-   if (vstream)
-     return vstream->fps_den;
-
-   return 1;
-}
-
-static double
-em_fps_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-   Emotion_Video_Stream      *vstream;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
-     return 0.0;
-
-   vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
-   if (vstream)
-     return (double)vstream->fps_num / (double)vstream->fps_den;
-
-   return 0.0;
-}
-
-/**
- * Returns stream position in seconds
- */
-static double
-em_pos_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-   GstFormat fmt;
-   gint64 val;
-   gboolean ret;
-
-   ev = video;
-   fmt = GST_FORMAT_TIME;
-
-   if (!ev->pipeline) return 0.0;
-
-   ret = gst_element_query_position(ev->pipeline, &fmt, &val);
-   if (!ret)
-     return ev->position;
-
-   if (fmt != GST_FORMAT_TIME)
-     {
-        ERR("requested position in time, but got %s instead.",
-            gst_format_get_name(fmt));
-        return ev->position;
-     }
-
-   ev->position = GST_TIME_AS_SECONDS(val);
-   return ev->position;
-}
-
-static void
-em_vis_set(void *video,
-           Emotion_Vis vis)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   ev->vis = vis;
-}
-
-static Emotion_Vis
-em_vis_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   return ev->vis;
-}
-
-static Eina_Bool
-em_vis_supported(void *ef EINA_UNUSED, Emotion_Vis vis)
-{
-   const char *name;
-   GstElementFactory *factory;
-
-   if (vis == EMOTION_VIS_NONE)
-     return EINA_TRUE;
-
-   name = emotion_visualization_element_name_get(vis);
-   if (!name)
-     return EINA_FALSE;
-
-   factory = gst_element_factory_find(name);
-   if (!factory)
-     return EINA_FALSE;
-
-   gst_object_unref(factory);
-   return EINA_TRUE;
-}
-
-static double
-em_ratio_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   return ev->ratio;
-}
-
-static int
-em_video_handled(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
-
-   if (!eina_list_count(ev->video_streams))
-     return 0;
-
-   return 1;
-}
-
-static int
-em_audio_handled(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
-
-   if (!eina_list_count(ev->audio_streams))
-     return 0;
-
-   return 1;
-}
-
-static int
-em_seekable(void *video EINA_UNUSED)
-{
-   return 1;
-}
-
-static void
-em_frame_done(void *video EINA_UNUSED)
-{
-}
-
-static Emotion_Format
-em_format_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-   Emotion_Video_Stream    *vstream;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
-     return EMOTION_FORMAT_NONE;
-
-   vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
-   if (vstream)
-     {
-        switch (vstream->fourcc)
-          {
-           case GST_MAKE_FOURCC('I', '4', '2', '0'):
-              return EMOTION_FORMAT_I420;
-           case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
-              return EMOTION_FORMAT_YV12;
-           case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
-              return EMOTION_FORMAT_YUY2;
-           case GST_MAKE_FOURCC('A', 'R', 'G', 'B'):
-              return EMOTION_FORMAT_BGRA;
-           default:
-              return EMOTION_FORMAT_NONE;
-          }
-     }
-   return EMOTION_FORMAT_NONE;
-}
-
-static void
-em_video_data_size_get(void *video, int *w, int *h)
-{
-   Emotion_Gstreamer_Video *ev;
-   Emotion_Video_Stream    *vstream;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (ev->pipeline && (!ev->video_stream_nbr || !ev->video_streams))
-     if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
-       goto on_error;
-
-   vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
-   if (vstream)
-     {
-        *w = vstream->width;
-        *h = vstream->height;
-
-        return;
-     }
-
- on_error:
-   *w = 0;
-   *h = 0;
-}
-
-static int
-em_yuv_rows_get(void           *video EINA_UNUSED,
-                int             w EINA_UNUSED,
-                int             h EINA_UNUSED,
-                unsigned char **yrows EINA_UNUSED,
-                unsigned char **urows EINA_UNUSED,
-                unsigned char **vrows EINA_UNUSED)
-{
-   return 0;
-}
-
-static int
-em_bgra_data_get(void *video EINA_UNUSED, unsigned char **bgra_data 
EINA_UNUSED)
-{
-   return 0;
-}
-
-static void
-em_event_feed(void *video EINA_UNUSED, int event EINA_UNUSED)
-{
-}
-
-static void
-em_event_mouse_button_feed(void *video EINA_UNUSED, int button EINA_UNUSED, 
int x EINA_UNUSED, int y EINA_UNUSED)
-{
-}
-
-static void
-em_event_mouse_move_feed(void *video EINA_UNUSED, int x EINA_UNUSED, int y 
EINA_UNUSED)
-{
-}
-
-/* Video channels */
-static int
-em_video_channel_count(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
-
-   return eina_list_count(ev->video_streams);
-}
-
-static void
-em_video_channel_set(void *video EINA_UNUSED,
-                     int   channel EINA_UNUSED)
-{
-#if 0
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (channel < 0) channel = 0;
-#endif
-   /* FIXME: a faire... */
-}
-
-static int
-em_video_channel_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
-
-   return ev->video_stream_nbr;
-}
-
-static void
-em_video_subtitle_file_set(void *video EINA_UNUSED,
-                           const char *filepath EINA_UNUSED)
-{
-   DBG("video_subtitle_file_set not implemented for gstreamer yet.");
-}
-
-static const char *
-em_video_subtitle_file_get(void *video EINA_UNUSED)
-{
-   DBG("video_subtitle_file_get not implemented for gstreamer yet.");
-   return NULL;
-}
-
-static const char *
-em_video_channel_name_get(void *video EINA_UNUSED,
-                          int   channel EINA_UNUSED)
-{
-   return NULL;
-}
-
-static void
-em_video_channel_mute_set(void *video,
-                          int   mute)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   ev->video_mute = mute;
-}
-
-static int
-em_video_channel_mute_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   return ev->video_mute;
-}
-
-/* Audio channels */
-
-static int
-em_audio_channel_count(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
-
-   return eina_list_count(ev->audio_streams);
-}
-
-static void
-em_audio_channel_set(void *video EINA_UNUSED,
-                     int   channel EINA_UNUSED)
-{
-#if 0
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (channel < -1) channel = -1;
-#endif
-   /* FIXME: a faire... */
-}
-
-static int
-em_audio_channel_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
-
-   return ev->audio_stream_nbr;
-}
-
-static const char *
-em_audio_channel_name_get(void *video EINA_UNUSED,
-                          int   channel EINA_UNUSED)
-{
-   return NULL;
-}
-
-#define GST_PLAY_FLAG_AUDIO (1 << 1)
-
-static void
-em_audio_channel_mute_set(void *video,
-                          int   mute)
-{
-   /* NOTE: at first I wanted to completly shutdown the audio path on mute,
-      but that's not possible as the audio sink could be the clock source
-      for the pipeline (at least that's the case on some of the hardware
-      I have been tested emotion on.
-    */
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (!ev->pipeline) return;
-
-   ev->audio_mute = mute;
-
-   g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
-}
-
-static int
-em_audio_channel_mute_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   return ev->audio_mute;
-}
-
-static void
-em_audio_channel_volume_set(void  *video,
-                            double vol)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (!ev->pipeline) return;
-
-   if (vol < 0.0)
-     vol = 0.0;
-   if (vol > 1.0)
-     vol = 1.0;
-   ev->volume = vol;
-   g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
-}
-
-static double
-em_audio_channel_volume_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   return ev->volume;
-}
-
-/* spu stuff */
-
-static int
-em_spu_channel_count(void *video EINA_UNUSED)
-{
-   return 0;
-}
-
-static void
-em_spu_channel_set(void *video EINA_UNUSED, int channel EINA_UNUSED)
-{
-}
-
-static int
-em_spu_channel_get(void *video EINA_UNUSED)
-{
-   return 1;
-}
-
-static const char *
-em_spu_channel_name_get(void *video EINA_UNUSED, int channel EINA_UNUSED)
-{
-   return NULL;
-}
-
-static void
-em_spu_channel_mute_set(void *video EINA_UNUSED, int mute EINA_UNUSED)
-{
-}
-
-static int
-em_spu_channel_mute_get(void *video EINA_UNUSED)
-{
-   return 0;
-}
-
-static int
-em_chapter_count(void *video EINA_UNUSED)
-{
-   return 0;
-}
-
-static void
-em_chapter_set(void *video EINA_UNUSED, int chapter EINA_UNUSED)
-{
-}
-
-static int
-em_chapter_get(void *video EINA_UNUSED)
-{
-   return 0;
-}
-
-static const char *
-em_chapter_name_get(void *video EINA_UNUSED, int chapter EINA_UNUSED)
-{
-   return NULL;
-}
-
-static void
-em_speed_set(void *video EINA_UNUSED, double speed EINA_UNUSED)
-{
-}
-
-static double
-em_speed_get(void *video EINA_UNUSED)
-{
-   return 1.0;
-}
-
-static int
-em_eject(void *video EINA_UNUSED)
-{
-   return 1;
-}
-
-static const char *
-em_meta_get(void *video, int meta)
-{
-   Emotion_Gstreamer_Video *ev;
-   const char *str = NULL;
-
-   ev = (Emotion_Gstreamer_Video *)video;
-
-   if (!ev || !ev->metadata) return NULL;
-   switch (meta)
-     {
-      case META_TRACK_TITLE:
-         str = ev->metadata->title;
-         break;
-      case META_TRACK_ARTIST:
-         str = ev->metadata->artist;
-         break;
-      case  META_TRACK_ALBUM:
-         str = ev->metadata->album;
-         break;
-      case META_TRACK_YEAR:
-         str = ev->metadata->year;
-         break;
-      case META_TRACK_GENRE:
-         str = ev->metadata->genre;
-         break;
-      case META_TRACK_COMMENT:
-         str = ev->metadata->comment;
-         break;
-      case META_TRACK_DISCID:
-         str = ev->metadata->disc_id;
-         break;
-      default:
-         break;
-     }
-
-   return str;
-}
-
-static void
-em_priority_set(void *video, Eina_Bool pri)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = video;
-   if (priority_overide > 3) return; /* If we failed to much to create that 
pipeline, let's don't wast our time anymore */
-
-   if (ev->priority != pri && ev->pipeline)
-     {
-        if (ev->threads)
-          {
-             Ecore_Thread *t;
-
-             EINA_LIST_FREE(ev->threads, t)
-                ecore_thread_cancel(t);
-          }
-        em_cleanup(ev);
-        restart_idler = ecore_idler_add(_em_restart_stream, ev);
-     }
-   ev->priority = pri;
-}
-
-static Eina_Bool
-em_priority_get(void *video)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = video;
-   return !ev->stream;
-}
-
-#ifdef HAVE_ECORE_X
-static Eina_Bool
-_ecore_event_x_destroy(void *data EINA_UNUSED, int type EINA_UNUSED, void 
*event EINA_UNUSED)
-{
-   Ecore_X_Event_Window_Destroy *ev = event;
-
-   INF("killed window: %x (%x).", ev->win, ev->event_win);
-
-   return EINA_TRUE;
-}
-
-static void
-gstreamer_ecore_x_check(void)
-{
-   Ecore_X_Window *roots;
-   int num;
-
-   ecore_event_handler_add(ECORE_X_EVENT_WINDOW_DESTROY, 
_ecore_event_x_destroy, NULL);
-
-   /* Check if the window manager is able to handle our special Xv window. */
-   roots = ecore_x_window_root_list(&num);
-   if (roots && num > 0)
-     {
-        Ecore_X_Window  win, twin;
-        int nwins;
-
-        nwins = ecore_x_window_prop_window_get(roots[0],
-                                               
ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
-                                               &win, 1);
-        if (nwins > 0)
-          {
-             nwins = ecore_x_window_prop_window_get(win,
-                                                    
ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
-                                                    &twin, 1);
-             if (nwins > 0 && twin == win)
-               {
-                  Ecore_X_Atom *supported;
-                  int supported_num;
-                  int i;
-
-                  if (ecore_x_netwm_supported_get(roots[0], &supported, 
&supported_num))
-                    {
-                       Eina_Bool parent = EINA_FALSE;
-                       Eina_Bool video_position = EINA_FALSE;
-
-                       for (i = 0; i < supported_num; ++i)
-                         {
-                            if (supported[i] == ECORE_X_ATOM_E_VIDEO_PARENT)
-                              parent = EINA_TRUE;
-                            else if (supported[i] == 
ECORE_X_ATOM_E_VIDEO_POSITION)
-                              video_position = EINA_TRUE;
-                            if (parent && video_position)
-                              break;
-                         }
-
-                       if (parent && video_position)
-                         {
-                            window_manager_video = EINA_TRUE;
-                         }
-                    }
-                  free(supported);
-               }
-          }
-     }
-   free(roots);
-}
-#endif
-
-static void *
-em_add(const Emotion_Engine *api,
-       Evas_Object *obj,
-       const Emotion_Module_Options *opt EINA_UNUSED)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
-   EINA_SAFETY_ON_NULL_RETURN_VAL(ev, NULL);
-
-   ev->api = api;
-   ev->obj = obj;
-
-   /* Default values */
-   ev->ratio = 1.0;
-   ev->vis = EMOTION_VIS_NONE;
-   ev->volume = 0.8;
-   ev->play_started = 0;
-   ev->delete_me = EINA_FALSE;
-   ev->threads = NULL;
-
-   return ev;
-}
-
-static const Emotion_Engine em_engine =
-{
-   EMOTION_ENGINE_API_VERSION,
-   EMOTION_ENGINE_PRIORITY_DEFAULT,
-   "gstreamer",
-   em_add, /* add */
-   em_del, /* del */
-   em_file_open, /* file_open */
-   em_file_close, /* file_close */
-   em_play, /* play */
-   em_stop, /* stop */
-   em_size_get, /* size_get */
-   em_pos_set, /* pos_set */
-   em_len_get, /* len_get */
-   em_buffer_size_get, /* buffer_size_get */
-   em_fps_num_get, /* fps_num_get */
-   em_fps_den_get, /* fps_den_get */
-   em_fps_get, /* fps_get */
-   em_pos_get, /* pos_get */
-   em_vis_set, /* vis_set */
-   em_vis_get, /* vis_get */
-   em_vis_supported, /* vis_supported */
-   em_ratio_get, /* ratio_get */
-   em_video_handled, /* video_handled */
-   em_audio_handled, /* audio_handled */
-   em_seekable, /* seekable */
-   em_frame_done, /* frame_done */
-   em_format_get, /* format_get */
-   em_video_data_size_get, /* video_data_size_get */
-   em_yuv_rows_get, /* yuv_rows_get */
-   em_bgra_data_get, /* bgra_data_get */
-   em_event_feed, /* event_feed */
-   em_event_mouse_button_feed, /* event_mouse_button_feed */
-   em_event_mouse_move_feed, /* event_mouse_move_feed */
-   em_video_channel_count, /* video_channel_count */
-   em_video_channel_set, /* video_channel_set */
-   em_video_channel_get, /* video_channel_get */
-   em_video_subtitle_file_set, /* video_subtitle_file_set */
-   em_video_subtitle_file_get, /* video_subtitle_file_get */
-   em_video_channel_name_get, /* video_channel_name_get */
-   em_video_channel_mute_set, /* video_channel_mute_set */
-   em_video_channel_mute_get, /* video_channel_mute_get */
-   em_audio_channel_count, /* audio_channel_count */
-   em_audio_channel_set, /* audio_channel_set */
-   em_audio_channel_get, /* audio_channel_get */
-   em_audio_channel_name_get, /* audio_channel_name_get */
-   em_audio_channel_mute_set, /* audio_channel_mute_set */
-   em_audio_channel_mute_get, /* audio_channel_mute_get */
-   em_audio_channel_volume_set, /* audio_channel_volume_set */
-   em_audio_channel_volume_get, /* audio_channel_volume_get */
-   em_spu_channel_count, /* spu_channel_count */
-   em_spu_channel_set, /* spu_channel_set */
-   em_spu_channel_get, /* spu_channel_get */
-   em_spu_channel_name_get, /* spu_channel_name_get */
-   em_spu_channel_mute_set, /* spu_channel_mute_set */
-   em_spu_channel_mute_get, /* spu_channel_mute_get */
-   em_chapter_count, /* chapter_count */
-   em_chapter_set, /* chapter_set */
-   em_chapter_get, /* chapter_get */
-   em_chapter_name_get, /* chapter_name_get */
-   em_speed_set, /* speed_set */
-   em_speed_get, /* speed_get */
-   em_eject, /* eject */
-   em_meta_get, /* meta_get */
-   em_priority_set, /* priority_set */
-   em_priority_get, /* priority_get */
-   NULL /* em_meta_artwork_get */
-};
-
-Eina_Bool
-gstreamer_module_init(void)
-{
-   GError *error;
-
-   if (_emotion_init_count > 0)
-     {
-        _emotion_pending_ecore_begin();
-        return EINA_TRUE;
-     }
-
-   if (getenv("EMOTION_FPS_DEBUG")) debug_fps = EINA_TRUE;
-
-   eina_threads_init();
-   eina_log_threads_enable();
-   _emotion_gstreamer_log_domain = eina_log_domain_register
-     ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
-   if (_emotion_gstreamer_log_domain < 0)
-     {
-        EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
-        return EINA_FALSE;
-     }
-
-   if (!gst_init_check(0, NULL, &error))
-     {
-        EINA_LOG_CRIT("Could not init GStreamer");
-        goto error_gst_init;
-     }
-
-#ifdef HAVE_ECORE_X
-   if (ecore_x_init(NULL) > 0)
-     {
-        _ecore_x_available = EINA_TRUE;
-        gstreamer_ecore_x_check();
-     }
-#endif
-
-   if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
-                                  "emotion-sink",
-                                  "video sink plugin for Emotion",
-                                  gstreamer_plugin_init,
-                                  VERSION,
-                                  "LGPL",
-                                  "Enlightenment",
-                                  PACKAGE,
-                                  "http://www.enlightenment.org/";) == FALSE)
-     {
-        EINA_LOG_CRIT("Could not load static gstreamer video sink for 
Emotion.");
-        goto error_gst_plugin;
-     }
-
-   if (!_emotion_module_register(&em_engine))
-     {
-        ERR("Could not register module %p", &em_engine);
-        goto error_register;
-     }
-
-   _emotion_init_count = 1;
-   return EINA_TRUE;
-
- error_register:
- error_gst_plugin:
-#ifdef HAVE_ECORE_X
-   if (_ecore_x_available)
-     {
-        ecore_x_shutdown();
-        _ecore_x_available = EINA_FALSE;
-        window_manager_video = EINA_FALSE;
-     }
-#endif
-
-   gst_deinit();
-
- error_gst_init:
-   eina_log_domain_unregister(_emotion_gstreamer_log_domain);
-   _emotion_gstreamer_log_domain = -1;
-
-   return EINA_FALSE;
-}
-
-void
-gstreamer_module_shutdown(void)
-{
-   if (_emotion_init_count > 1)
-     {
-        _emotion_init_count--;
-        return;
-     }
-   else if (_emotion_init_count == 0)
-     {
-        EINA_LOG_ERR("too many gstreamer_module_shutdown()");
-        return;
-     }
-   _emotion_init_count = 0;
-
-   _emotion_module_unregister(&em_engine);
-
-#ifdef HAVE_ECORE_X
-   if (_ecore_x_available)
-     {
-        ecore_x_shutdown();
-        _ecore_x_available = EINA_FALSE;
-        window_manager_video = EINA_FALSE;
-     }
-#endif
-
-   eina_log_domain_unregister(_emotion_gstreamer_log_domain);
-   _emotion_gstreamer_log_domain = -1;
-
-   gst_deinit();
-}
-
-#ifndef EMOTION_STATIC_BUILD_GSTREAMER
-
-EINA_MODULE_INIT(gstreamer_module_init);
-EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
-
-#endif
-
-static void
-_for_each_tag(GstTagList const* list,
-                    gchar const* tag,
-                    void *data)
-{
-   Emotion_Gstreamer_Video *ev;
-   int i;
-   int count;
-
-
-   ev = (Emotion_Gstreamer_Video*)data;
-
-   if (!ev || !ev->metadata) return;
-
-   count = gst_tag_list_get_tag_size(list, tag);
-
-   for (i = 0; i < count; i++)
-     {
-        if (!strcmp(tag, GST_TAG_TITLE))
-          {
-             char *str;
-             g_free(ev->metadata->title);
-             if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
-               ev->metadata->title = str;
-             else
-               ev->metadata->title = NULL;
-             break;
-          }
-        if (!strcmp(tag, GST_TAG_ALBUM))
-          {
-             gchar *str;
-             g_free(ev->metadata->album);
-             if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
-               ev->metadata->album = str;
-             else
-               ev->metadata->album = NULL;
-             break;
-          }
-        if (!strcmp(tag, GST_TAG_ARTIST))
-          {
-             gchar *str;
-             g_free(ev->metadata->artist);
-             if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
-               ev->metadata->artist = str;
-             else
-               ev->metadata->artist = NULL;
-             break;
-          }
-        if (!strcmp(tag, GST_TAG_GENRE))
-          {
-             gchar *str;
-             g_free(ev->metadata->genre);
-             if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
-               ev->metadata->genre = str;
-             else
-               ev->metadata->genre = NULL;
-             break;
-          }
-        if (!strcmp(tag, GST_TAG_COMMENT))
-          {
-             gchar *str;
-             g_free(ev->metadata->comment);
-             if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
-               ev->metadata->comment = str;
-             else
-               ev->metadata->comment = NULL;
-             break;
-          }
-        if (!strcmp(tag, GST_TAG_DATE))
-          {
-             gchar *str;
-             const GValue *date;
-             g_free(ev->metadata->year);
-             date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
-             if (date)
-               str = g_strdup_value_contents(date);
-             else
-               str = NULL;
-             ev->metadata->year = str;
-             break;
-          }
-
-        if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
-          {
-             gchar *str;
-             const GValue *track;
-             g_free(ev->metadata->count);
-             track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 
0);
-             if (track)
-               str = g_strdup_value_contents(track);
-             else
-               str = NULL;
-             ev->metadata->count = str;
-             break;
-          }
-
-#ifdef GST_TAG_CDDA_CDDB_DISCID
-        if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
-          {
-             gchar *str;
-             const GValue *discid;
-             g_free(ev->metadata->disc_id);
-             discid = gst_tag_list_get_value_index(list, 
GST_TAG_CDDA_CDDB_DISCID, 0);
-             if (discid)
-               str = g_strdup_value_contents(discid);
-             else
-               str = NULL;
-             ev->metadata->disc_id = str;
-             break;
-          }
-#endif
-     }
-
-}
-
-static void
-_free_metadata(Emotion_Gstreamer_Metadata *m)
-{
-  if (!m) return;
-
-  g_free(m->title);
-  g_free(m->album);
-  g_free(m->artist);
-  g_free(m->genre);
-  g_free(m->comment);
-  g_free(m->year);
-  g_free(m->count);
-  g_free(m->disc_id);
-
-  free(m);
-}
-
-static Eina_Bool
-_em_restart_stream(void *data)
-{
-   Emotion_Gstreamer_Video *ev;
-
-   ev = data;
-
-   ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, ev->uri);
-
-   if (ev->pipeline)
-     {
-        ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
-        if (!ev->eos_bus)
-          {
-             ERR("could not get the bus");
-             return EINA_FALSE;
-          }
-
-        gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
-     }
-
-   restart_idler = NULL;
-
-   return ECORE_CALLBACK_CANCEL;
-}
-
-static Eina_Bool
-_video_size_get(GstElement *elem, int *width, int *height)
-{
-   GstIterator *itr = NULL;
-   GstCaps *caps;
-   GstStructure *str;
-   gpointer pad;
-   Eina_Bool ret = EINA_FALSE;
-
-   itr = gst_element_iterate_src_pads(elem);
-   while(gst_iterator_next(itr, &pad) && !ret)
-     {
-        caps = gst_pad_get_caps(GST_PAD(pad));
-        str = gst_caps_get_structure(caps, 0);
-        if (g_strrstr(gst_structure_get_name(str), "video"))
-          {
-             if (gst_structure_get_int(str, "width", width) && 
gst_structure_get_int(str, "height", height))
-                ret = EINA_TRUE;
-          }
-        gst_caps_unref(caps);
-        gst_object_unref(pad);
-     }
-   gst_iterator_free(itr);
-
-   return ret;
-}
-
-static void
-_main_frame_resize(void *data)
-{
-   Emotion_Gstreamer_Video *ev = data;
-   double ratio;
-
-   ratio = (double)ev->src_width / (double)ev->src_height;
-   _emotion_frame_resize(ev->obj, ev->src_width, ev->src_height, ratio);
-   _emotion_pending_ecore_end();
-}
-
-static void
-_no_more_pads(GstElement *decodebin, gpointer data)
-{
-   GstIterator *itr = NULL;
-   gpointer elem;
-   Emotion_Gstreamer_Video *ev = data;
-
-   itr = gst_bin_iterate_elements(GST_BIN(decodebin));
-   while(gst_iterator_next(itr, &elem))
-     {
-        if(_video_size_get(GST_ELEMENT(elem), &ev->src_width, &ev->src_height))
-          {
-             _emotion_pending_ecore_begin();
-             ecore_main_loop_thread_safe_call_async(_main_frame_resize, ev);
-             gst_object_unref(elem);
-             break;
-          }
-        gst_object_unref(elem);
-     }
-   gst_iterator_free(itr);
-}
-
-static void
-_eos_main_fct(void *data)
-{
-   Emotion_Gstreamer_Message *send;
-   Emotion_Gstreamer_Video *ev;
-   GstMessage              *msg;
-
-   send = data;
-   ev = send->ev;
-   msg = send->msg;
-
-   if (ev->play_started && !ev->delete_me)
-     {
-        _emotion_playback_started(ev->obj);
-        ev->play_started = 0;
-     }
-
-   switch (GST_MESSAGE_TYPE(msg))
-     {
-      case GST_MESSAGE_EOS:
-         if (!ev->delete_me)
-           {
-              ev->play = 0;
-              _emotion_decode_stop(ev->obj);
-              _emotion_playback_finished(ev->obj);
-           }
-         break;
-      case GST_MESSAGE_TAG:
-         if (!ev->delete_me)
-           {
-              GstTagList *new_tags;
-              gst_message_parse_tag(msg, &new_tags);
-              if (new_tags)
-                {
-                   gst_tag_list_foreach(new_tags,
-                                        (GstTagForeachFunc)_for_each_tag,
-                                        ev);
-                   gst_tag_list_free(new_tags);
-                }
-           }
-         break;
-      case GST_MESSAGE_ASYNC_DONE:
-         if (!ev->delete_me) _emotion_seek_done(ev->obj);
-         break;
-      case GST_MESSAGE_STREAM_STATUS:
-         break;
-      case GST_MESSAGE_STATE_CHANGED:
-         if (!ev->delete_me)
-           {
-              if (!g_signal_handlers_disconnect_by_func(msg->src, 
_no_more_pads, ev))
-                 g_signal_connect(msg->src, "no-more-pads", 
G_CALLBACK(_no_more_pads), ev);
-           }
-         break;
-      case GST_MESSAGE_ERROR:
-         em_cleanup(ev);
-
-        if (ev->priority)
-          {
-            ERR("Switching back to canvas rendering.");
-            ev->priority = EINA_FALSE;
-            priority_overide++;
-
-            restart_idler = ecore_idler_add(_em_restart_stream, ev);
-          }
-         break;
-      default:
-         ERR("bus say: %s [%i - %s]",
-             GST_MESSAGE_SRC_NAME(msg),
-             GST_MESSAGE_TYPE(msg),
-            GST_MESSAGE_TYPE_NAME(msg));
-         break;
-     }
-
-   emotion_gstreamer_message_free(send);
-   _emotion_pending_ecore_end();
-}
-
-static GstBusSyncReply
-_eos_sync_fct(GstBus *bus EINA_UNUSED, GstMessage *msg, gpointer data)
-{
-   Emotion_Gstreamer_Video *ev = data;
-   Emotion_Gstreamer_Message *send;
-
-   switch (GST_MESSAGE_TYPE(msg))
-     {
-      case GST_MESSAGE_EOS:
-      case GST_MESSAGE_TAG:
-      case GST_MESSAGE_ASYNC_DONE:
-      case GST_MESSAGE_STREAM_STATUS:
-         INF("bus say: %s [%i - %s]",
-             GST_MESSAGE_SRC_NAME(msg),
-             GST_MESSAGE_TYPE(msg),
-            GST_MESSAGE_TYPE_NAME(msg));
-         send = emotion_gstreamer_message_alloc(ev, msg);
-
-        if (send)
-          {
-             _emotion_pending_ecore_begin();
-             ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
-          }
-
-         break;
-
-      case GST_MESSAGE_STATE_CHANGED:
-        {
-           GstState old_state, new_state;
-
-           gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
-           INF("Element %s changed state from %s to %s.",
-               GST_OBJECT_NAME(msg->src),
-               gst_element_state_get_name(old_state),
-               gst_element_state_get_name(new_state));
-
-           if (!strncmp(GST_OBJECT_NAME(msg->src), "decodebin", 9) && 
!strcmp(gst_element_state_get_name(new_state), "READY"))
-             {
-                send = emotion_gstreamer_message_alloc(ev, msg);
-
-                if (send)
-                  {
-                     _emotion_pending_ecore_begin();
-                     ecore_main_loop_thread_safe_call_async(_eos_main_fct, 
send);
-                  }
-             }
-           break;
-        }
-      case GST_MESSAGE_ERROR:
-       {
-           GError *error;
-           gchar *debug;
-
-          gst_message_parse_error(msg, &error, &debug);
-          ERR("ERROR from element %s: %s", GST_OBJECT_NAME(msg->src), 
error->message);
-          ERR("Debugging info: %s", (debug) ? debug : "none");
-          g_error_free(error);
-          g_free(debug);
-
-           if (strncmp(GST_OBJECT_NAME(msg->src), "xvimagesink", 11) == 0)
-             {
-                send = emotion_gstreamer_message_alloc(ev, msg);
-
-                if (send)
-                  {
-                     _emotion_pending_ecore_begin();
-                     ecore_main_loop_thread_safe_call_async(_eos_main_fct, 
send);
-                  }
-             }
-          break;
-       }
-      case GST_MESSAGE_WARNING:
-        {
-           GError *error;
-           gchar *debug;
-
-           gst_message_parse_warning(msg, &error, &debug);
-           WRN("WARNING from element %s: %s", GST_OBJECT_NAME(msg->src), 
error->message);
-           WRN("Debugging info: %s", (debug) ? debug : "none");
-           g_error_free(error);
-           g_free(debug);
-           break;
-        }
-      default:
-         WRN("bus say: %s [%i - %s]",
-             GST_MESSAGE_SRC_NAME(msg),
-             GST_MESSAGE_TYPE(msg),
-            GST_MESSAGE_TYPE_NAME(msg));
-         break;
-     }
-
-   gst_message_unref(msg);
-
-   return GST_BUS_DROP;
-}
-
-Eina_Bool
-_emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
-                                        Eina_Bool force)
-{
-   gboolean      mute = 0;
-   gdouble       vol = 0.0;
-   gboolean res;
-   int i;
-
-   if (ev->pipeline_parsed)
-     return EINA_TRUE;
-
-   if (force && ev->threads)
-     {
-        Ecore_Thread *t;
-
-        EINA_LIST_FREE(ev->threads, t)
-          ecore_thread_cancel(t);
-     }
-
-   if (ev->threads)
-     return EINA_FALSE;
-
-   res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
-   if (res == GST_STATE_CHANGE_NO_PREROLL)
-     {
-       gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
-
-       res = gst_element_get_state(ev->pipeline, NULL, NULL, 
GST_CLOCK_TIME_NONE);
-     }
-
-   /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp 
EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
-   /** then call dot -Tpng -oemotion_pipeline.png 
/tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
-#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
-   if (getuid() == geteuid())
-#endif
-     {
-        if (getenv("EMOTION_GSTREAMER_DOT"))
-          GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
-                                            GST_DEBUG_GRAPH_SHOW_ALL,
-                                            getenv("EMOTION_GSTREAMER_DOT"));
-     }
-
-   if (!(res == GST_STATE_CHANGE_SUCCESS
-         || res == GST_STATE_CHANGE_NO_PREROLL))
-     {
-        ERR("Unable to get GST_CLOCK_TIME_NONE.");
-        return EINA_FALSE;
-     }
-
-   g_object_get(G_OBJECT(ev->pipeline),
-                "n-audio", &ev->audio_stream_nbr,
-                "n-video", &ev->video_stream_nbr,
-                NULL);
-
-   if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
-     {
-        ERR("No audio nor video stream found");
-        return EINA_FALSE;
-     }
-
-   /* video stream */
-   for (i = 0; i < ev->video_stream_nbr; i++)
-     {
-        Emotion_Video_Stream *vstream;
-        GstPad       *pad = NULL;
-        GstCaps      *caps;
-        GstStructure *structure;
-        GstQuery     *query;
-        const GValue *val;
-        gchar        *str = NULL;
-        
-        gdouble length_time = 0.0;
-        gint width;
-        gint height;
-        gint fps_num;
-        gint fps_den;
-        guint32 fourcc = 0;
-
-        g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
-        if (!pad)
-          continue;
-
-        caps = gst_pad_get_negotiated_caps(pad);
-        if (!caps)
-          goto unref_pad_v;
-        structure = gst_caps_get_structure(caps, 0);
-        str = gst_caps_to_string(caps);
-
-        if (!gst_structure_get_int(structure, "width", &width))
-          goto unref_caps_v;
-        if (!gst_structure_get_int(structure, "height", &height))
-          goto unref_caps_v;
-        if (!gst_structure_get_fraction(structure, "framerate", &fps_num, 
&fps_den))
-          goto unref_caps_v;
-
-        if (g_str_has_prefix(str, "video/x-raw-yuv"))
-          {
-             val = gst_structure_get_value(structure, "format");
-             fourcc = gst_value_get_fourcc(val);
-          }
-        else if (g_str_has_prefix(str, "video/x-raw-rgb"))
-          fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
-        else
-          goto unref_caps_v;
-
-        query = gst_query_new_duration(GST_FORMAT_TIME);
-        if (gst_pad_peer_query(pad, query))
-          {
-             gint64 t;
-
-             gst_query_parse_duration(query, NULL, &t);
-             length_time = (double)t / (double)GST_SECOND;
-          }
-        else
-          goto unref_query_v;
-
-        vstream = emotion_video_stream_new(ev);
-        if (!vstream) goto unref_query_v;
-
-        vstream->length_time = length_time;
-        vstream->width = width;
-        vstream->height = height;
-        vstream->fps_num = fps_num;
-        vstream->fps_den = fps_den;
-        vstream->fourcc = fourcc;
-        vstream->index = i;
-
-     unref_query_v:
-        gst_query_unref(query);
-     unref_caps_v:
-        gst_caps_unref(caps);
-        g_free(str);
-     unref_pad_v:
-        gst_object_unref(pad);
-     }
-
-   /* Audio streams */
-   for (i = 0; i < ev->audio_stream_nbr; i++)
-     {
-        Emotion_Audio_Stream *astream;
-        GstPad       *pad;
-        GstCaps      *caps;
-        GstStructure *structure;
-        GstQuery     *query;
-
-        gdouble length_time = 0.0;
-        gint channels;
-        gint samplerate;
-
-        g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
-        if (!pad)
-          continue;
-
-        caps = gst_pad_get_negotiated_caps(pad);
-        if (!caps)
-          goto unref_pad_a;
-        structure = gst_caps_get_structure(caps, 0);
-
-        if (!gst_structure_get_int(structure, "channels", &channels))
-          goto unref_caps_a;
-        if (!gst_structure_get_int(structure, "rate", &samplerate))
-          goto unref_caps_a;
-
-        query = gst_query_new_duration(GST_FORMAT_TIME);
-        if (gst_pad_peer_query(pad, query))
-          {
-             gint64 t;
-
-             gst_query_parse_duration(query, NULL, &t);
-             length_time = (double)t / (double)GST_SECOND;
-          }
-        else
-          goto unref_query_a;
-
-        astream = calloc(1, sizeof(Emotion_Audio_Stream));
-        if (!astream) continue;
-        ev->audio_streams = eina_list_append(ev->audio_streams, astream);
-
-        astream->length_time = length_time;
-        astream->channels = channels;
-        astream->samplerate = samplerate;
-
-     unref_query_a:
-        gst_query_unref(query);
-     unref_caps_a:
-        gst_caps_unref(caps);
-     unref_pad_a:
-        gst_object_unref(pad);
-     }
-
-   /* Visualization sink */
-   if (ev->video_stream_nbr == 0)
-     {
-        GstElement *vis = NULL;
-        Emotion_Video_Stream *vstream;
-        Emotion_Audio_Stream *astream;
-        gint flags;
-        const char *vis_name;
-
-        if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
-          {
-             WRN("pb vis name %d", ev->vis);
-             goto finalize;
-          }
-
-        astream = eina_list_data_get(ev->audio_streams);
-
-        vis = gst_element_factory_make(vis_name, "vissink");
-        vstream = emotion_video_stream_new(ev);
-        if (!vstream)
-          goto finalize;
-        else
-          DBG("could not create visualization stream");
-
-        vstream->length_time = astream->length_time;
-        vstream->width = 320;
-        vstream->height = 200;
-        vstream->fps_num = 25;
-        vstream->fps_den = 1;
-        vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
-
-        g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
-        g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
-        flags |= 0x00000008;
-        g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
-     }
-
- finalize:
-
-   ev->video_stream_nbr = eina_list_count(ev->video_streams);
-   ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
-
-   if (ev->video_stream_nbr == 1)
-     {
-       Emotion_Video_Stream *vstream;
-
-       vstream = eina_list_data_get(ev->video_streams);
-       ev->ratio = (double)vstream->width / (double)vstream->height;
-       _emotion_frame_resize(ev->obj, vstream->width, vstream->height, 
ev->ratio);
-     }
-
-   {
-     /* on recapitule : */
-     Emotion_Video_Stream *vstream;
-     Emotion_Audio_Stream *astream;
-
-     vstream = eina_list_data_get(ev->video_streams);
-     if (vstream)
-       {
-         DBG("video size=%dx%d, fps=%d/%d, "
-             "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
-             vstream->width, vstream->height, vstream->fps_num, 
vstream->fps_den,
-             GST_FOURCC_ARGS(vstream->fourcc),
-             GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
-       }
-
-     astream = eina_list_data_get(ev->audio_streams);
-     if (astream)
-       {
-         DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
-             astream->channels, astream->samplerate,
-             GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND)));
-       }
-   }
-
-   if (ev->metadata)
-     _free_metadata(ev->metadata);
-   ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
-
-   ev->pipeline_parsed = EINA_TRUE;
-
-   g_object_get(ev->pipeline, "volume", &vol, NULL);
-   g_object_get(ev->pipeline, "mute", &mute, NULL);
-   ev->volume = vol;
-   ev->audio_mute = mute;
-
-   if (ev->play_started)
-     {
-        _emotion_playback_started(ev->obj);
-        ev->play_started = 0;
-     }
-
-   _emotion_open_done(ev->obj);
-
-   return EINA_TRUE;
-}
diff --git a/src/modules/emotion/gstreamer/emotion_gstreamer.h 
b/src/modules/emotion/gstreamer/emotion_gstreamer.h
deleted file mode 100644
index 4b15ae5777..0000000000
--- a/src/modules/emotion/gstreamer/emotion_gstreamer.h
+++ /dev/null
@@ -1,352 +0,0 @@
-#ifndef __EMOTION_GSTREAMER_H__
-#define __EMOTION_GSTREAMER_H__
-
-#include "emotion_modules.h"
-
-typedef void (*Evas_Video_Convert_Cb)(unsigned char *evas_data,
-                                      const unsigned char *gst_data,
-                                      unsigned int w,
-                                      unsigned int h,
-                                      unsigned int output_height);
-
-typedef struct _EvasVideoSinkPrivate EvasVideoSinkPrivate;
-typedef struct _EvasVideoSink        EvasVideoSink;
-typedef struct _EvasVideoSinkClass   EvasVideoSinkClass;
-typedef struct _Emotion_Gstreamer_Video Emotion_Gstreamer_Video;
-typedef struct _Emotion_Audio_Stream Emotion_Audio_Stream;
-typedef struct _Emotion_Gstreamer_Metadata Emotion_Gstreamer_Metadata;
-typedef struct _Emotion_Gstreamer_Buffer Emotion_Gstreamer_Buffer;
-typedef struct _Emotion_Gstreamer_Message Emotion_Gstreamer_Message;
-typedef struct _Emotion_Video_Stream Emotion_Video_Stream;
-
-struct _Emotion_Video_Stream
-{
-   gdouble     length_time;
-   gint        width;
-   gint        height;
-   gint        fps_num;
-   gint        fps_den;
-   guint32     fourcc;
-   int         index;
-};
-
-struct _Emotion_Audio_Stream
-{
-   gdouble     length_time;
-   gint        channels;
-   gint        samplerate;
-};
-
-struct _Emotion_Gstreamer_Metadata
-{
-   char *title;
-   char *album;
-   char *artist;
-   char *genre;
-   char *comment;
-   char *year;
-   char *count;
-   char *disc_id;
-};
-
-struct _Emotion_Gstreamer_Video
-{
-   const Emotion_Engine *api;
-
-   /* Gstreamer elements */
-   GstElement       *pipeline;
-   GstElement       *sink;
-   GstElement       *esink;
-   GstElement       *xvsink;
-   GstElement       *tee;
-   GstElement       *convert;
-
-   GstPad           *eteepad;
-   GstPad           *xvteepad;
-   GstPad           *xvpad;
-   Eina_List        *threads;
-
-   /* eos */
-   GstBus           *eos_bus;
-
-   /* Strams */
-   Eina_List        *video_streams;
-   Eina_List        *audio_streams;
-
-   int               video_stream_nbr;
-   int               audio_stream_nbr;
-
-    /* We need to keep a copy of the last inserted buffer as evas doesn't copy 
YUV data around */
-   GstBuffer        *last_buffer;
-
-   /* Evas object */
-   Evas_Object      *obj;
-
-   /* Characteristics of stream */
-   double            position;
-   double            ratio;
-   double            volume;
-
-   volatile int      seek_to;
-   volatile int      get_poslen;
-
-   Emotion_Gstreamer_Metadata *metadata;
-
-#ifdef HAVE_ECORE_X
-   Ecore_X_Window    win;
-#endif
-
-   const char       *uri;
-
-   Emotion_Gstreamer_Buffer *send;
-
-   EvasVideoSinkPrivate *sink_data;
-
-   Emotion_Vis       vis;
-
-   int               in;
-   int               out;
-
-   int frames;
-   int flapse;
-   double rtime;
-   double rlapse;
-
-   struct
-   {
-      double         width;
-      double         height;
-   } fill;
-
-   Eina_Bool         play         : 1;
-   Eina_Bool         play_started : 1;
-   Eina_Bool         video_mute   : 1;
-   Eina_Bool         audio_mute   : 1;
-   Eina_Bool         pipeline_parsed : 1;
-   Eina_Bool         delete_me    : 1;
-   Eina_Bool         samsung      : 1;
-   Eina_Bool         kill_buffer  : 1;
-   Eina_Bool         stream       : 1;
-   Eina_Bool         priority     : 1;
-
-   int src_width;
-   int src_height;
-};
-
-struct _EvasVideoSink {
-    /*< private >*/
-    GstVideoSink parent;
-    EvasVideoSinkPrivate *priv;
-};
-
-struct _EvasVideoSinkClass {
-    /*< private >*/
-    GstVideoSinkClass parent_class;
-};
-
-struct _EvasVideoSinkPrivate {
-   EINA_REFCOUNT;
-
-   Evas_Object *o;
-
-   Emotion_Gstreamer_Video *ev;
-
-   Evas_Video_Convert_Cb func;
-
-   unsigned int width;
-   unsigned int height;
-   unsigned int source_height;
-   Evas_Colorspace eformat;
-
-   Eina_Lock m;
-   Eina_Condition c;
-
-   // If this is TRUE all processing should finish ASAP
-   // This is necessary because there could be a race between
-   // unlock() and render(), where unlock() wins, signals the
-   // GCond, then render() tries to render a frame although
-   // everything else isn't running anymore. This will lead
-   // to deadlocks because render() holds the stream lock.
-   //
-   // Protected by the buffer mutex
-   Eina_Bool unlocked : 1;
-   Eina_Bool samsung : 1; /** ST12 will only define a Samsung specific 
GstBuffer */
-};
-
-struct _Emotion_Gstreamer_Buffer
-{
-   Emotion_Gstreamer_Video *ev;
-   EvasVideoSinkPrivate *sink;
-
-   GstBuffer *frame;
-
-   Eina_Bool preroll : 1;
-   Eina_Bool force : 1;
-};
-
-struct _Emotion_Gstreamer_Message
-{
-   Emotion_Gstreamer_Video *ev;
-
-   GstMessage *msg;
-};
-
-extern Eina_Bool window_manager_video;
-extern Eina_Bool debug_fps;
-extern int _emotion_gstreamer_log_domain;
-extern Eina_Bool _ecore_x_available;
-
-#ifdef DBG
-#undef DBG
-#endif
-#define DBG(...) EINA_LOG_DOM_DBG(_emotion_gstreamer_log_domain, __VA_ARGS__)
-
-#ifdef INF
-#undef INF
-#endif
-#define INF(...) EINA_LOG_DOM_INFO(_emotion_gstreamer_log_domain, __VA_ARGS__)
-
-#ifdef WRN
-#undef WRN
-#endif
-#define WRN(...) EINA_LOG_DOM_WARN(_emotion_gstreamer_log_domain, __VA_ARGS__)
-
-#ifdef ERR
-#undef ERR
-#endif
-#define ERR(...) EINA_LOG_DOM_ERR(_emotion_gstreamer_log_domain, __VA_ARGS__)
-
-#ifdef CRI
-#undef CRI
-#endif
-#define CRI(...) EINA_LOG_DOM_CRIT(_emotion_gstreamer_log_domain, __VA_ARGS__)
-
-#define EVAS_TYPE_VIDEO_SINK evas_video_sink_get_type()
-
-GType fakeeos_bin_get_type(void);
-
-#define EVAS_VIDEO_SINK(obj) \
-    (G_TYPE_CHECK_INSTANCE_CAST((obj), \
-    EVAS_TYPE_VIDEO_SINK, EvasVideoSink))
-
-#define EVAS_VIDEO_SINK_CLASS(klass) \
-    (G_TYPE_CHECK_CLASS_CAST((klass), \
-    EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
-
-#define EVAS_IS_VIDEO_SINK(obj) \
-    (G_TYPE_CHECK_INSTANCE_TYPE((obj), \
-    EVAS_TYPE_VIDEO_SINK))
-
-#define EVAS_IS_VIDEO_SINK_CLASS(klass) \
-    (G_TYPE_CHECK_CLASS_TYPE((klass), \
-    EVAS_TYPE_VIDEO_SINK))
-
-#define EVAS_VIDEO_SINK_GET_CLASS(obj) \
-    (G_TYPE_INSTANCE_GET_CLASS((obj), \
-    EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
-
-#define GST_TYPE_FAKEEOS_BIN fakeeos_bin_get_type()
-
-GstElement *gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
-                                     Evas_Object *obj,
-                                     const char *uri);
-
-gboolean    gstreamer_plugin_init(GstPlugin *plugin);
-
-Emotion_Gstreamer_Buffer *emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate 
*sink,
-                                                        GstBuffer *buffer,
-                                                         Eina_Bool preroll);
-void emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send);
-
-Emotion_Gstreamer_Message 
*emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev,
-                                                           GstMessage *msg);
-void emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send);
-Eina_Bool _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
-                                                  Eina_Bool force);
-
-typedef struct _ColorSpace_FourCC_Convertion ColorSpace_FourCC_Convertion;
-typedef struct _ColorSpace_Format_Convertion ColorSpace_Format_Convertion;
-
-struct _ColorSpace_FourCC_Convertion
-{
-   const char *name;
-   guint32 fourcc;
-   Evas_Colorspace eformat;
-   Evas_Video_Convert_Cb func;
-   Eina_Bool force_height;
-};
-
-struct _ColorSpace_Format_Convertion
-{
-   const char *name;
-   GstVideoFormat format;
-   Evas_Colorspace eformat;
-   Evas_Video_Convert_Cb func;
-};
-
-extern const ColorSpace_FourCC_Convertion colorspace_fourcc_convertion[];
-extern const ColorSpace_Format_Convertion colorspace_format_convertion[];
-
-/** Samsung specific infrastructure - do not touch, do not modify */
-#define MPLANE_IMGB_MAX_COUNT 4
-#define SCMN_IMGB_MAX_PLANE 4
-
-typedef struct _GstMultiPlaneImageBuffer GstMultiPlaneImageBuffer;
-typedef struct _SCMN_IMGB SCMN_IMGB;
-
-struct _GstMultiPlaneImageBuffer
-{
-   GstBuffer buffer;
-
-   /* width of each image plane */
-   gint      width[MPLANE_IMGB_MAX_COUNT];
-   /* height of each image plane */
-   gint      height[MPLANE_IMGB_MAX_COUNT];
-   /* stride of each image plane */
-   gint      stride[MPLANE_IMGB_MAX_COUNT];
-   /* elevation of each image plane */
-   gint      elevation[MPLANE_IMGB_MAX_COUNT];
-   /* user space address of each image plane */
-   guchar   *uaddr[MPLANE_IMGB_MAX_COUNT];
-   /* Index of real address of each image plane, if needs */
-   guchar   *index[MPLANE_IMGB_MAX_COUNT];
-   /* left postion, if needs */
-   gint      x;
-   /* top position, if needs */
-   gint      y;
-   /* to align memory */
-   gint      __dummy2;
-   /* arbitrary data */
-   gint      data[16];
-};
-
-struct _SCMN_IMGB
-{
-   /* width of each image plane */
-   int      width[SCMN_IMGB_MAX_PLANE];
-   /* height of each image plane */
-   int      height[SCMN_IMGB_MAX_PLANE];
-   /* stride of each image plane */
-   int      stride[SCMN_IMGB_MAX_PLANE];
-   /* elevation of each image plane */
-   int      elevation[SCMN_IMGB_MAX_PLANE];
-   /* user space address of each image plane */
-   guchar  *uaddr[SCMN_IMGB_MAX_PLANE];
-   /* physical address of each image plane, if needs */
-   guchar  *p[SCMN_IMGB_MAX_PLANE];
-   /* color space type of image */
-   int      cs;
-   /* left postion, if needs */
-   int      x;
-   /* top position, if needs */
-   int      y;
-   /* to align memory */
-   int      __dummy2;
-   /* arbitrary data */
-   int      data[16];
-};
-
-void _evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char 
*gst_data, unsigned int w, unsigned int h, unsigned int output_height 
EINA_UNUSED);
-void _evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, 
unsigned int w EINA_UNUSED, unsigned int h, unsigned int output_height 
EINA_UNUSED);
-
-#endif /* __EMOTION_GSTREAMER_H__ */
diff --git a/src/modules/emotion/gstreamer/emotion_sink.c 
b/src/modules/emotion/gstreamer/emotion_sink.c
deleted file mode 100644
index 63fcbeeea7..0000000000
--- a/src/modules/emotion/gstreamer/emotion_sink.c
+++ /dev/null
@@ -1,1461 +0,0 @@
-#ifdef HAVE_CONFIG_H
-# include "config.h"
-#endif
-
-#include <Eina.h>
-#include <Evas.h>
-#include <Ecore.h>
-
-#define HTTP_STREAM 0
-#define RTSP_STREAM 1
-#include <glib.h>
-#include <gst/gst.h>
-#include <glib-object.h>
-#include <gst/video/gstvideosink.h>
-#include <gst/video/video.h>
-
-// forcibly disable x overlay window.. broken badly.
-#undef HAVE_ECORE_X
-
-#ifdef HAVE_ECORE_X
-# include <Ecore_X.h>
-# include <Ecore_Evas.h>
-# ifdef HAVE_XOVERLAY_H
-#  include <gst/interfaces/xoverlay.h>
-# endif
-#endif
-
-#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
-# include <unistd.h>
-# include <sys/types.h>
-#endif
-
-#include "emotion_modules.h"
-#include "emotion_gstreamer.h"
-
-static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
-                                                                   
GST_PAD_SINK, GST_PAD_ALWAYS,
-                                                                   
GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
-                                                                               
    GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
-
-GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
-#define GST_CAT_DEFAULT evas_video_sink_debug
-
-enum {
-  REPAINT_REQUESTED,
-  LAST_SIGNAL
-};
-
-enum {
-  PROP_0,
-  PROP_EVAS_OBJECT,
-  PROP_WIDTH,
-  PROP_HEIGHT,
-  PROP_EV,
-  PROP_LAST
-};
-
-static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
-
-#define _do_init(bla)                                   \
-  GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug,        \
-                          "emotion-sink",              \
-                          0,                            \
-                          "emotion video sink")
-
-GST_BOILERPLATE_FULL(EvasVideoSink,
-                     evas_video_sink,
-                     GstVideoSink,
-                     GST_TYPE_VIDEO_SINK,
-                     _do_init);
-
-
-static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
-static void evas_video_sink_main_render(void *data);
-static void evas_video_sink_samsung_main_render(void *data);
-
-static void
-evas_video_sink_base_init(gpointer g_class)
-{
-   GstElementClass* element_class;
-
-   element_class = GST_ELEMENT_CLASS(g_class);
-   gst_element_class_add_pad_template(element_class, 
gst_static_pad_template_get(&sinktemplate));
-   gst_element_class_set_details_simple(element_class, "Evas video sink",
-                                        "Sink/Video", "Sends video data from a 
GStreamer pipeline to an Evas object",
-                                        "Vincent Torri <[email protected]>");
-}
-
-static void
-evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass 
EINA_UNUSED)
-{
-   EvasVideoSinkPrivate* priv;
-
-   INF("sink init");
-   sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, 
EvasVideoSinkPrivate);
-   priv->o = NULL;
-   priv->width = 0;
-   priv->height = 0;
-   priv->func = NULL;
-   priv->eformat = EVAS_COLORSPACE_ARGB8888;
-   priv->samsung = EINA_FALSE;
-   eina_lock_new(&priv->m);
-   eina_condition_new(&priv->c, &priv->m);
-   priv->unlocked = EINA_FALSE;
-}
-
-/**** Object methods ****/
-static void
-_cleanup_priv(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void 
*event_info EINA_UNUSED)
-{
-   EvasVideoSinkPrivate* priv;
-
-   priv = data;
-
-   eina_lock_take(&priv->m);
-   if (priv->o == obj)
-     priv->o = NULL;
-   eina_lock_release(&priv->m);
-}
-
-static void
-evas_video_sink_set_property(GObject * object, guint prop_id,
-                             const GValue * value, GParamSpec * pspec)
-{
-   EvasVideoSink* sink;
-   EvasVideoSinkPrivate* priv;
-
-   sink = EVAS_VIDEO_SINK (object);
-   priv = sink->priv;
-
-   switch (prop_id) {
-    case PROP_EVAS_OBJECT:
-       eina_lock_take(&priv->m);
-       if (priv->o)
-         evas_object_event_callback_del(priv->o, EVAS_CALLBACK_DEL, 
_cleanup_priv);
-       priv->o = g_value_get_pointer (value);
-       INF("sink set Evas_Object %p.", priv->o);
-       if (priv->o)
-         evas_object_event_callback_add(priv->o, EVAS_CALLBACK_DEL, 
_cleanup_priv, priv);
-       eina_lock_release(&priv->m);
-       break;
-    case PROP_EV:
-       INF("sink set ev.");
-       eina_lock_take(&priv->m);
-       priv->ev = g_value_get_pointer (value);
-       if (priv->ev)
-         priv->ev->samsung = EINA_TRUE;
-       eina_lock_release(&priv->m);
-       break;
-    default:
-       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
-       ERR("invalid property");
-       break;
-   }
-}
-
-static void
-evas_video_sink_get_property(GObject * object, guint prop_id,
-                             GValue * value, GParamSpec * pspec)
-{
-   EvasVideoSink* sink;
-   EvasVideoSinkPrivate* priv;
-
-   sink = EVAS_VIDEO_SINK (object);
-   priv = sink->priv;
-
-   switch (prop_id) {
-    case PROP_EVAS_OBJECT:
-       INF("sink get property.");
-       eina_lock_take(&priv->m);
-       g_value_set_pointer(value, priv->o);
-       eina_lock_release(&priv->m);
-       break;
-    case PROP_WIDTH:
-       INF("sink get width.");
-       eina_lock_take(&priv->m);
-       g_value_set_int(value, priv->width);
-       eina_lock_release(&priv->m);
-       break;
-    case PROP_HEIGHT:
-       INF("sink get height.");
-       eina_lock_take(&priv->m);
-       g_value_set_int (value, priv->height);
-       eina_lock_release(&priv->m);
-       break;
-    case PROP_EV:
-       INF("sink get ev.");
-       eina_lock_take(&priv->m);
-       g_value_set_pointer (value, priv->ev);
-       eina_lock_release(&priv->m);
-       break;
-    default:
-       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
-       ERR("invalid property");
-       break;
-   }
-}
-
-static void
-evas_video_sink_dispose(GObject* object)
-{
-   EvasVideoSink* sink;
-   EvasVideoSinkPrivate* priv;
-
-   INF("dispose.");
-
-   sink = EVAS_VIDEO_SINK(object);
-   priv = sink->priv;
-
-   eina_lock_free(&priv->m);
-   eina_condition_free(&priv->c);
-
-   G_OBJECT_CLASS(parent_class)->dispose(object);
-}
-
-
-/**** BaseSink methods ****/
-
-gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
-{
-   EvasVideoSink* sink;
-   EvasVideoSinkPrivate* priv;
-   GstStructure *structure;
-   GstVideoFormat format;
-   guint32 fourcc;
-   unsigned int i;
-
-   sink = EVAS_VIDEO_SINK(bsink);
-   priv = sink->priv;
-
-   structure = gst_caps_get_structure(caps, 0);
-
-   if (gst_structure_get_int(structure, "width", (int*) &priv->width)
-       && gst_structure_get_int(structure, "height", (int*) &priv->height)
-       && gst_structure_get_fourcc(structure, "format", &fourcc))
-     {
-        priv->source_height = priv->height;
-
-        for (i = 0; colorspace_fourcc_convertion[i].name != NULL; ++i)
-          if (fourcc == colorspace_fourcc_convertion[i].fourcc)
-            {
-               DBG("Found '%s'", colorspace_fourcc_convertion[i].name);
-               priv->eformat = colorspace_fourcc_convertion[i].eformat;
-               priv->func = colorspace_fourcc_convertion[i].func;
-              if (colorspace_fourcc_convertion[i].force_height)
-                 {
-                    priv->height = (priv->height >> 1) << 1;
-                 }
-               if (priv->ev)
-                 priv->ev->kill_buffer = EINA_TRUE;
-               return TRUE;
-            }
-
-        if (fourcc == GST_MAKE_FOURCC('S', 'T', '1', '2'))
-          {
-             DBG("Found '%s'", "ST12");
-             priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
-             priv->samsung = EINA_TRUE;
-             priv->func = NULL;
-             if (priv->ev)
-               {
-                  priv->ev->samsung = EINA_TRUE;
-                  priv->ev->kill_buffer = EINA_TRUE;
-               }
-            return TRUE;
-          }
-     }
-
-   INF("fallback code !");
-   if (!gst_video_format_parse_caps(caps, &format, (int*) &priv->width, (int*) 
&priv->height))
-     {
-        ERR("Unable to parse caps.");
-        return FALSE;
-     }
-
-   priv->source_height = priv->height;
-
-   for (i = 0; colorspace_format_convertion[i].name != NULL; ++i)
-     if (format == colorspace_format_convertion[i].format)
-       {
-          DBG("Found '%s'", colorspace_format_convertion[i].name);
-          priv->eformat = colorspace_format_convertion[i].eformat;
-          priv->func = colorspace_format_convertion[i].func;
-          if (priv->ev)
-            priv->ev->kill_buffer = EINA_FALSE;
-          return TRUE;
-       }
-
-   ERR("unsupported : %d\n", format);
-   return FALSE;
-}
-
-static gboolean
-evas_video_sink_start(GstBaseSink* base_sink)
-{
-   EvasVideoSinkPrivate* priv;
-   gboolean res = TRUE;
-
-   INF("sink start");
-
-   priv = EVAS_VIDEO_SINK(base_sink)->priv;
-   eina_lock_take(&priv->m);
-   if (!priv->o)
-     res = FALSE;
-   else
-     priv->unlocked = EINA_FALSE;
-   eina_lock_release(&priv->m);
-   return res;
-}
-
-static gboolean
-evas_video_sink_stop(GstBaseSink* base_sink)
-{
-   EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
-
-   INF("sink stop");
-
-   unlock_buffer_mutex(priv);
-   return TRUE;
-}
-
-static gboolean
-evas_video_sink_unlock(GstBaseSink* object)
-{
-   EvasVideoSink* sink;
-
-   INF("sink unlock");
-
-   sink = EVAS_VIDEO_SINK(object);
-
-   unlock_buffer_mutex(sink->priv);
-
-   return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
-                                       (object), TRUE);
-}
-
-static gboolean
-evas_video_sink_unlock_stop(GstBaseSink* object)
-{
-   EvasVideoSink* sink;
-   EvasVideoSinkPrivate* priv;
-
-   sink = EVAS_VIDEO_SINK(object);
-   priv = sink->priv;
-
-   INF("sink unlock stop");
-
-   eina_lock_take(&priv->m);
-   priv->unlocked = FALSE;
-   eina_lock_release(&priv->m);
-
-   return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
-                                       (object), TRUE);
-}
-
-static GstFlowReturn
-evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
-{
-   Emotion_Gstreamer_Buffer *send;
-   EvasVideoSinkPrivate *priv;
-   EvasVideoSink *sink;
-
-   INF("sink preroll %p [%i]", GST_BUFFER_DATA(buffer), 
GST_BUFFER_SIZE(buffer));
-
-   sink = EVAS_VIDEO_SINK(bsink);
-   priv = sink->priv;
-
-   if (GST_BUFFER_SIZE(buffer) <= 0 && !priv->samsung)
-     {
-        WRN("empty buffer");
-        return GST_FLOW_OK;
-     }
-
-   send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
-
-   if (send)
-     {
-        if (priv->samsung)
-          {
-             if (!priv->func)
-               {
-                  GstStructure *structure;
-                  GstCaps *caps;
-                  gboolean is_multiplane = FALSE;
-
-                  caps = GST_BUFFER_CAPS(buffer);
-                  structure = gst_caps_get_structure (caps, 0);
-                  gst_structure_get_boolean(structure, "multiplane", 
&is_multiplane);
-                 gst_caps_unref(caps);
-
-                  if (is_multiplane)
-                    priv->func = _evas_video_st12_multiplane;
-                  else
-                    priv->func = _evas_video_st12;
-               }
-             _emotion_pending_ecore_begin();
-             
ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, 
send);
-          }
-        else
-          {
-             _emotion_pending_ecore_begin();
-             
ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
-          }
-     }
-
-   return GST_FLOW_OK;
-}
-
-static GstFlowReturn
-evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
-{
-   Emotion_Gstreamer_Buffer *send;
-   EvasVideoSinkPrivate *priv;
-   EvasVideoSink *sink;
-
-   INF("sink render %p", buffer);
-
-   sink = EVAS_VIDEO_SINK(bsink);
-   priv = sink->priv;
-
-   eina_lock_take(&priv->m);
-
-   if (priv->unlocked) {
-      ERR("LOCKED");
-      eina_lock_release(&priv->m);
-      return GST_FLOW_OK;
-   }
-
-   send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
-   if (!send) {
-      eina_lock_release(&priv->m);
-      return GST_FLOW_ERROR;
-   }
-
-   if (priv->samsung)
-     {
-        if (!priv->func)
-          {
-             GstStructure *structure;
-             GstCaps *caps;
-             gboolean is_multiplane = FALSE;
-
-             caps = GST_BUFFER_CAPS(buffer);
-             structure = gst_caps_get_structure (caps, 0);
-             gst_structure_get_boolean(structure, "multiplane", 
&is_multiplane);
-            gst_caps_unref(caps);
-
-             if (is_multiplane)
-               priv->func = _evas_video_st12_multiplane;
-             else
-               priv->func = _evas_video_st12;
-          }
-        _emotion_pending_ecore_begin();
-        
ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, 
send);
-     }
-   else
-     {
-        _emotion_pending_ecore_begin();
-        ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, 
send);
-     }
-
-   eina_condition_wait(&priv->c);
-   eina_lock_release(&priv->m);
-
-   return GST_FLOW_OK;
-}
-
-static void
-_update_emotion_fps(Emotion_Gstreamer_Video *ev)
-{
-   double tim;
-
-   if (!debug_fps) return;
-
-   tim = ecore_time_get();
-   ev->frames++;
-
-   if (ev->rlapse == 0.0)
-     {
-        ev->rlapse = tim;
-        ev->flapse = ev->frames;
-     }
-   else if ((tim - ev->rlapse) >= 0.5)
-     {
-        printf("FRAME: %i, FPS: %3.1f\n",
-               ev->frames,
-               (ev->frames - ev->flapse) / (tim - ev->rlapse));
-        ev->rlapse = tim;
-        ev->flapse = ev->frames;
-     }
-}
-
-static void
-evas_video_sink_samsung_main_render(void *data)
-{
-   Emotion_Gstreamer_Buffer *send;
-   Emotion_Video_Stream *vstream;
-   EvasVideoSinkPrivate *priv = NULL;
-   GstBuffer* buffer;
-   unsigned char *evas_data;
-   const guint8 *gst_data;
-   GstFormat fmt = GST_FORMAT_TIME;
-   gint64 pos;
-   Eina_Bool preroll = EINA_FALSE;
-   int stride, elevation;
-   Evas_Coord w, h;
-
-   send = data;
-
-   if (!send) goto exit_point;
-
-   priv = send->sink;
-   buffer = send->frame;
-   preroll = send->preroll;
-
-   /* frame after cleanup */
-   if (!preroll && !send->ev->last_buffer)
-     {
-        priv = NULL;
-        goto exit_point;
-     }
-
-   if (!priv || !priv->o || priv->unlocked)
-     goto exit_point;
-
-   if (send->ev->send)
-     {
-        emotion_gstreamer_buffer_free(send->ev->send);
-        send->ev->send = NULL;
-     }
-
-   if (!send->ev->stream && !send->force)
-     {
-        send->ev->send = send;
-        _emotion_frame_new(send->ev->obj);
-        goto exit_stream;
-     }
-
-   _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
-
-   /* Getting stride to compute the right size and then fill the object 
properly */
-   /* Y => [0] and UV in [1] */
-   if (priv->func == _evas_video_st12_multiplane)
-     {
-        const GstMultiPlaneImageBuffer *mp_buf = (const 
GstMultiPlaneImageBuffer *) buffer;
-
-        stride = mp_buf->stride[0];
-        elevation = mp_buf->elevation[0];
-        priv->width = mp_buf->width[0];
-        priv->height = mp_buf->height[0];
-
-        gst_data = (const guint8 *) mp_buf;
-     }
-   else
-     {
-        const SCMN_IMGB *imgb = (const SCMN_IMGB *) 
GST_BUFFER_MALLOCDATA(buffer);
-
-        stride = imgb->stride[0];
-        elevation = imgb->elevation[0];
-        priv->width = imgb->width[0];
-        priv->height = imgb->height[0];
-
-        gst_data = (const guint8 *) imgb;
-     }
-
-   evas_object_geometry_get(priv->o, NULL, NULL, &w, &h);
-
-   send->ev->fill.width = (double) stride / priv->width;
-   send->ev->fill.height = (double) elevation / priv->height;
-
-   evas_object_image_alpha_set(priv->o, 0);
-   evas_object_image_colorspace_set(priv->o, priv->eformat);
-   evas_object_image_size_set(priv->o, stride, elevation);
-
-   _update_emotion_fps(send->ev);
-
-   evas_data = evas_object_image_data_get(priv->o, 1);
-
-   if (priv->func)
-     priv->func(evas_data, gst_data, stride, elevation, elevation);
-   else
-     WRN("No way to decode colorspace '%x'!", priv->eformat);
-
-   evas_object_image_data_set(priv->o, evas_data);
-   evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
-   evas_object_image_pixels_dirty_set(priv->o, 0);
-
-   if (!preroll && send->ev->play_started)
-     {
-        _emotion_playback_started(send->ev->obj);
-        send->ev->play_started = 0;
-     }
-
-   if (!send->force)
-     {
-        _emotion_frame_new(send->ev->obj);
-     }
-
-   vstream = eina_list_nth(send->ev->video_streams, send->ev->video_stream_nbr 
- 1);
-
-   gst_element_query_position(send->ev->pipeline, &fmt, &pos);
-   send->ev->position = (double)pos / (double)GST_SECOND;
-
-   if (vstream)
-     {
-        vstream->width = priv->width;
-        vstream->height = priv->height;
-
-        _emotion_video_pos_update(send->ev->obj, send->ev->position, 
vstream->length_time);
-     }
-
-   send->ev->ratio = (double) priv->width / (double) priv->height;
-   _emotion_frame_refill(send->ev->obj, send->ev->fill.width, 
send->ev->fill.height);
-   _emotion_frame_resize(send->ev->obj, priv->width, priv->height, 
send->ev->ratio);
-
-   buffer = gst_buffer_ref(buffer);
-   if (send->ev->last_buffer) gst_buffer_unref(send->ev->last_buffer);
-   send->ev->last_buffer = buffer;
-
- exit_point:
-   if (send) emotion_gstreamer_buffer_free(send);
-
- exit_stream:
-   if (priv)
-     {
-        if (preroll || !priv->o)
-          {
-             _emotion_pending_ecore_end();
-             return;
-          }
-        
-        if (!priv->unlocked)
-          eina_condition_signal(&priv->c);
-     }
-   _emotion_pending_ecore_end();
-}
-
-static void
-evas_video_sink_main_render(void *data)
-{
-   Emotion_Gstreamer_Buffer *send;
-   Emotion_Gstreamer_Video *ev = NULL;
-   Emotion_Video_Stream *vstream;
-   EvasVideoSinkPrivate *priv = NULL;
-   GstBuffer *buffer;
-   unsigned char *evas_data;
-   GstFormat fmt = GST_FORMAT_TIME;
-   gint64 pos;
-   Eina_Bool preroll = EINA_FALSE;
-
-   send = data;
-
-   if (!send) goto exit_point;
-
-   priv = send->sink;
-   buffer = send->frame;
-   preroll = send->preroll;
-   ev = send->ev;
-
-   /* frame after cleanup */
-   if (!preroll && !ev->last_buffer)
-     {
-        priv = NULL;
-        goto exit_point;
-     }
-
-   if (!priv || !priv->o || priv->unlocked)
-     goto exit_point;
-
-   if (ev->send && send != ev->send)
-     {
-        emotion_gstreamer_buffer_free(ev->send);
-        ev->send = NULL;
-     }
-
-   if (!ev->stream && !send->force)
-     {
-        ev->send = send;
-        _emotion_frame_new(ev->obj);
-        evas_object_image_data_update_add(priv->o, 0, 0, priv->width, 
priv->height);
-        goto exit_stream;
-     }
-
-   _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
-
-   INF("sink main render [%i, %i] (source height: %i)", priv->width, 
priv->height, priv->source_height);
-
-   evas_object_image_alpha_set(priv->o, 0);
-   evas_object_image_colorspace_set(priv->o, priv->eformat);
-   evas_object_image_size_set(priv->o, priv->width, priv->height);
-
-   evas_data = evas_object_image_data_get(priv->o, 1);
-
-   if (priv->func)
-     priv->func(evas_data, GST_BUFFER_DATA(buffer), priv->width, 
priv->source_height, priv->height);
-   else
-     WRN("No way to decode colorspace '%x'!", priv->eformat);
-
-   evas_object_image_data_set(priv->o, evas_data);
-   evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
-   evas_object_image_pixels_dirty_set(priv->o, 0);
-
-   _update_emotion_fps(ev);
-
-   if (!preroll && ev->play_started)
-     {
-        _emotion_playback_started(ev->obj);
-        ev->play_started = 0;
-     }
-
-   if (!send->force)
-     {
-        _emotion_frame_new(ev->obj);
-     }
-
-   gst_element_query_position(ev->pipeline, &fmt, &pos);
-   ev->position = (double)pos / (double)GST_SECOND;
-
-   vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
-
-   if (vstream)
-     {
-       vstream->width = priv->width;
-       vstream->height = priv->height;
-       _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
-     }
-
-   ev->ratio = (double) priv->width / (double) priv->height;
-
-   _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
-
-   buffer = gst_buffer_ref(buffer);
-   if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
-   ev->last_buffer = buffer;
-
- exit_point:
-   if (send) emotion_gstreamer_buffer_free(send);
-
- exit_stream:
-   if (priv)
-     {
-        if (preroll || !priv->o)
-          {
-             _emotion_pending_ecore_end();
-             return;
-          }
-        
-        if (!priv->unlocked)
-          eina_condition_signal(&priv->c);
-     }
-   _emotion_pending_ecore_end();
-}
-
-static void
-unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
-{
-   priv->unlocked = EINA_TRUE;
-
-   eina_condition_signal(&priv->c);
-}
-
-static void
-marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value EINA_UNUSED,
-                         guint n_param_values, const GValue * param_values,
-                         gpointer invocation_hint EINA_UNUSED, gpointer 
marshal_data)
-{
-   typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, 
gpointer data2);
-   marshalfunc_VOID__MINIOBJECT callback;
-   GCClosure *cc;
-   gpointer data1, data2;
-
-   cc = (GCClosure *) closure;
-
-   g_return_if_fail(n_param_values == 2);
-
-   if (G_CCLOSURE_SWAP_DATA(closure)) {
-      data1 = closure->data;
-      data2 = g_value_peek_pointer(param_values + 0);
-   } else {
-      data1 = g_value_peek_pointer(param_values + 0);
-      data2 = closure->data;
-   }
-   callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : 
cc->callback);
-
-   callback(data1, gst_value_get_mini_object(param_values + 1), data2);
-}
-
-static void
-evas_video_sink_class_init(EvasVideoSinkClass* klass)
-{
-   GObjectClass* gobject_class;
-   GstBaseSinkClass* gstbase_sink_class;
-
-   gobject_class = G_OBJECT_CLASS(klass);
-   gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
-
-   g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
-
-   gobject_class->set_property = evas_video_sink_set_property;
-   gobject_class->get_property = evas_video_sink_get_property;
-
-   g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
-                                    g_param_spec_pointer ("evas-object", "Evas 
Object",
-                                                          "The Evas object 
where the display of the video will be done",
-                                                          G_PARAM_READWRITE));
-
-   g_object_class_install_property (gobject_class, PROP_WIDTH,
-                                    g_param_spec_int ("width", "Width",
-                                                      "The width of the video",
-                                                      0, 65536, 0, 
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
-
-   g_object_class_install_property (gobject_class, PROP_HEIGHT,
-                                    g_param_spec_int ("height", "Height",
-                                                      "The height of the 
video",
-                                                      0, 65536, 0, 
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
-   g_object_class_install_property (gobject_class, PROP_EV,
-                                    g_param_spec_pointer ("ev", 
"Emotion_Gstreamer_Video",
-                                                          "THe internal data 
of the emotion object",
-                                                          G_PARAM_READWRITE));
-
-   gobject_class->dispose = evas_video_sink_dispose;
-
-   gstbase_sink_class->set_caps = evas_video_sink_set_caps;
-   gstbase_sink_class->stop = evas_video_sink_stop;
-   gstbase_sink_class->start = evas_video_sink_start;
-   gstbase_sink_class->unlock = evas_video_sink_unlock;
-   gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
-   gstbase_sink_class->render = evas_video_sink_render;
-   gstbase_sink_class->preroll = evas_video_sink_preroll;
-
-   evas_video_sink_signals[REPAINT_REQUESTED] = 
g_signal_new("repaint-requested",
-                                                             
G_TYPE_FROM_CLASS(klass),
-                                                             
(GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
-                                                             0,
-                                                             0,
-                                                             0,
-                                                             
marshal_VOID__MINIOBJECT,
-                                                             G_TYPE_NONE, 1, 
GST_TYPE_BUFFER);
-}
-
-gboolean
-gstreamer_plugin_init (GstPlugin * plugin)
-{
-   return gst_element_register (plugin,
-                                "emotion-sink",
-                                GST_RANK_NONE,
-                                EVAS_TYPE_VIDEO_SINK);
-}
-
-static void
-_emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
-{
-   Emotion_Gstreamer_Video *ev = data;
-   gboolean res;
-
-   if (ecore_thread_check(thread) || !ev->pipeline) return;
-
-   gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
-   res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
-   if (res == GST_STATE_CHANGE_NO_PREROLL)
-     {
-        gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
-       gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
-     }
-}
-
-static void
-_emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
-{
-   Emotion_Gstreamer_Video *ev = data;
-
-   ev->threads = eina_list_remove(ev->threads, thread);
-
-#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
-   if (getuid() == geteuid())
-#endif
-     {
-        if (getenv("EMOTION_GSTREAMER_DOT")) 
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), 
GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
-     }
-
-   if (ev->in == ev->out && ev->delete_me)
-     ev->api->del(ev);
-}
-
-static void
-_emotion_gstreamer_end(void *data, Ecore_Thread *thread)
-{
-   Emotion_Gstreamer_Video *ev = data;
-
-   ev->threads = eina_list_remove(ev->threads, thread);
-
-   if (ev->play)
-     {
-        gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
-        ev->play_started = 1;
-     }
-
-#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
-   if (getuid() == geteuid())
-#endif
-     {
-        if (getenv("EMOTION_GSTREAMER_DOT")) 
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), 
GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
-     }
-
-   if (ev->in == ev->out && ev->delete_me)
-     ev->api->del(ev);
-   else
-     _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
-}
-
-static void
-_video_resize(void *data, Evas_Object *obj EINA_UNUSED, const 
Evas_Video_Surface *surface EINA_UNUSED,
-              Evas_Coord w, Evas_Coord h)
-{
-#ifdef HAVE_ECORE_X
-   Emotion_Gstreamer_Video *ev = data;
-
-   ecore_x_window_resize(ev->win, w, h);
-   DBG("resize: %i, %i", w, h);
-#else   
-   if (data)
-     {
-        DBG("resize: %i, %i (fake)", w, h);
-     }
-#endif
-}
-
-static void
-_video_move(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface 
*surface EINA_UNUSED,
-            Evas_Coord x, Evas_Coord y)
-{
-#ifdef HAVE_ECORE_X
-   Emotion_Gstreamer_Video *ev = data;
-   unsigned int pos[2];
-
-   DBG("move: %i, %i", x, y);
-   pos[0] = x; pos[1] = y;
-   ecore_x_window_prop_card32_set(ev->win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 
2);
-#else   
-   if (data)
-     {
-        DBG("move: %i, %i (fake)", x, y);
-     }
-#endif
-}
-
-#if 0
-/* Much better idea to always feed the XvImageSink and let him handle 
optimizing the rendering as we do */
-static void
-_block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data)
-{
-   if (blocked)
-     {
-        Emotion_Gstreamer_Video *ev = user_data;
-        GstEvent *gev;
-
-        gst_pad_unlink(ev->xvteepad, ev->xvpad);
-        gev = gst_event_new_eos();
-        gst_pad_send_event(ev->xvpad, gev);
-        gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL);
-     }
-}
-
-static void
-_block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data)
-{
-   if (blocked)
-     {
-        Emotion_Gstreamer_Video *ev = user_data;
-
-        gst_pad_link(ev->xvteepad, ev->xvpad);
-        if (ev->play)
-          gst_element_set_state(ev->xvsink, GST_STATE_PLAYING);
-        else
-          gst_element_set_state(ev->xvsink, GST_STATE_PAUSED);
-        gst_pad_set_blocked_async(pad, FALSE, _block_pad_link_cb, NULL);
-     }
-}
-#endif
-
-static void
-_video_show(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface 
*surface EINA_UNUSED)
-{
-#ifdef HAVE_ECORE_X
-   Emotion_Gstreamer_Video *ev = data;
-
-   DBG("show xv");
-   ecore_x_window_show(ev->win);
-#else
-   if (data)
-     {
-        DBG("show xv (fake)");
-     }
-#endif
-   /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_link_cb, ev); */
-}
-
-static void
-_video_hide(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_Surface 
*surface EINA_UNUSED)
-{
-#ifdef HAVE_ECORE_X
-   Emotion_Gstreamer_Video *ev = data;
-
-   DBG("hide xv");
-   ecore_x_window_hide(ev->win);
-#else
-   if (data)
-     {
-        DBG("hide xv (fake)");
-     }
-#endif
-   /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_unlink_cb, ev); 
*/
-}
-
-static void
-_video_update_pixels(void *data, Evas_Object *obj EINA_UNUSED, const 
Evas_Video_Surface *surface EINA_UNUSED)
-{
-   Emotion_Gstreamer_Video *ev = data;
-   Emotion_Gstreamer_Buffer *send;
-   EvasVideoSinkPrivate *priv = NULL;
-
-   if (!ev->send) return;
-
-   send = ev->send;
-   priv = send->sink;
-   send->force = EINA_TRUE;
-   ev->send = NULL;
-
-   if (priv->samsung)
-     {
-        _emotion_pending_ecore_begin();
-        evas_video_sink_samsung_main_render(send);
-     }
-   else
-     {
-        _emotion_pending_ecore_begin();
-        evas_video_sink_main_render(send);
-     }
-}
-
-static void
-_image_resize(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void 
*event_info EINA_UNUSED)
-{
-   Emotion_Gstreamer_Video *ev = data;
-   Evas_Coord width, height;
-   int image_area, src_area;
-   double ratio;
-
-   GstElementFactory *cfactory = NULL;
-   GstElement *convert = NULL, *filter = NULL, *queue = NULL;
-   GstPad *pad = NULL, *teepad = NULL;
-   GstCaps *caps = NULL;
-   Eina_List *l, *engines;
-   const char *ename, *engine = NULL;
-
-   evas_object_geometry_get(obj, NULL, NULL, &width, &height);
-   image_area = width * height;
-   src_area = ev->src_width * ev->src_height;
-   ratio = (double)image_area / (double)src_area;
-
-   // when an image is much smaller than original video size,
-   // add fimcconvert element to the pipeline
-   if (ratio < 0.8 && ev->stream && !ev->convert)
-     {
-        cfactory = gst_element_factory_find("fimcconvert");
-        if (!cfactory) return;
-
-        convert = gst_element_factory_create(cfactory, NULL);
-        if (!convert) return;
-
-        // add capsfilter to limit size and formats based on the backend
-        filter = gst_element_factory_make("capsfilter", "fimccapsfilter");
-        if (!filter)
-          {
-             gst_object_unref(convert);
-             return;
-          }
-
-        engines = evas_render_method_list();
-        EINA_LIST_FOREACH(engines, l, ename)
-          {
-             if (evas_render_method_lookup(ename) ==
-                 evas_output_method_get(evas_object_evas_get(obj)))
-               {
-                  engine = ename;
-                  break;
-               }
-          }
-
-        if (!engine) return;
-
-        if (strstr(engine, "software") != NULL)
-          {
-             caps = gst_caps_new_simple("video/x-raw-rgb",
-                                        "width", G_TYPE_INT, width,
-                                        "height", G_TYPE_INT, height,
-                                        NULL);
-          }
-        else if (strstr(engine, "gl") != NULL)
-          {
-             caps = gst_caps_new_simple("video/x-raw-yuv",
-                                        "width", G_TYPE_INT, width,
-                                        "height", G_TYPE_INT, height,
-                                        NULL);
-          }
-        g_object_set(G_OBJECT(filter), "caps", caps, NULL);
-        gst_caps_unref(caps);
-
-        // add new elements to the pipeline
-        queue = gst_bin_get_by_name(GST_BIN(ev->sink), "equeue");
-        gst_element_unlink(ev->tee, queue);
-        gst_element_release_request_pad(ev->tee, ev->eteepad);
-        gst_object_unref(ev->eteepad);
-
-        gst_bin_add_many(GST_BIN(ev->sink), convert, filter, NULL);
-        gst_element_link_many(ev->tee, convert, filter, queue, NULL);
-
-        pad = gst_element_get_pad(convert, "sink");
-        teepad = gst_element_get_request_pad(ev->tee, "src%d");
-        gst_pad_link(teepad, pad);
-        gst_object_unref(pad);
-
-        gst_element_sync_state_with_parent(convert);
-        gst_element_sync_state_with_parent(filter);
-
-        ev->eteepad = teepad;
-        ev->convert = convert;
-        evas_render_method_list_free(engines);
-
-        INF("add fimcconvert element. video size: %dx%d. emotion object size: 
%dx%d",
-            ev->src_width, ev->src_height, width, height);
-     }
-   // set size again to the capsfilter when the image is resized
-   else if (ev->convert)
-     {
-        filter = gst_bin_get_by_name(GST_BIN(ev->sink), "fimccapsfilter");
-
-        engines = evas_render_method_list();
-        EINA_LIST_FOREACH(engines, l, ename)
-          {
-             if (evas_render_method_lookup(ename) ==
-                 evas_output_method_get(evas_object_evas_get(obj)))
-               {
-                  engine = ename;
-                  break;
-               }
-          }
-
-        if (!engine) return;
-
-        if (strstr(engine, "software") != NULL)
-          {
-             caps = gst_caps_new_simple("video/x-raw-rgb",
-                                        "width", G_TYPE_INT, width,
-                                        "height", G_TYPE_INT, height,
-                                        NULL);
-          }
-        else if (strstr(engine, "gl") != NULL)
-          {
-             caps = gst_caps_new_simple("video/x-raw-yuv",
-                                        "width", G_TYPE_INT, width,
-                                        "height", G_TYPE_INT, height,
-                                        NULL);
-          }
-
-        g_object_set(G_OBJECT(filter), "caps", caps, NULL);
-        gst_caps_unref(caps);
-        evas_render_method_list_free(engines);
-
-        INF("set capsfilter size again:. video size: %dx%d. emotion object 
size: %dx%d",
-            ev->src_width, ev->src_height, width, height);
-     }
-}
-
-GstElement *
-gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
-                        Evas_Object *o,
-                        const char *uri)
-{
-   GstElement *playbin;
-   GstElement *bin = NULL;
-   GstElement *esink = NULL;
-   GstElement *xvsink = NULL;
-   GstElement *tee = NULL;
-   GstElement *queue = NULL;
-   Evas_Object *obj;
-   GstPad *pad;
-   GstPad *teepad;
-   int flags;
-   const char *launch;
-#if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
-   const char *engine = NULL;
-   Eina_List *engines;
-#endif
-
-   obj = emotion_object_image_get(o);
-   if (!obj)
-     {
-//        ERR("Not Evas_Object specified");
-        return NULL;
-     }
-
-   if (!uri)
-     return NULL;
-
-   launch = emotion_webcam_custom_get(uri);
-   if (launch)
-     {
-        GError *error = NULL;
-
-        playbin = gst_parse_bin_from_description(launch, 1, &error);
-        if (!playbin)
-          {
-             ERR("Unable to setup command : '%s' got error '%s'.", launch, 
error->message);
-             g_error_free(error);
-             return NULL;
-          }
-        if (error)
-          {
-             WRN("got recoverable error '%s' for command : '%s'.", 
error->message, launch);
-             g_error_free(error);
-          }
-     }
-   else
-     {
-        playbin = gst_element_factory_make("playbin2", "playbin");
-        if (!playbin)
-          {
-             ERR("Unable to create 'playbin' GstElement.");
-             return NULL;
-          }
-     }
-
-   bin = gst_bin_new(NULL);
-   if (!bin)
-     {
-       ERR("Unable to create GstBin !");
-       goto unref_pipeline;
-     }
-
-   tee = gst_element_factory_make("tee", NULL);
-   if (!tee)
-     {
-       ERR("Unable to create 'tee' GstElement.");
-       goto unref_pipeline;
-     }
-
-#if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
-   if (window_manager_video)
-     {
-        Eina_List *l;
-        const char *ename;
-        
-        engines = evas_render_method_list();
-
-        EINA_LIST_FOREACH(engines, l, ename)
-          {
-             if (evas_render_method_lookup(ename) == 
-                 evas_output_method_get(evas_object_evas_get(obj)))
-               {
-                  engine = ename;
-                  break;
-               }
-          }
-
-       if (ev->priority && engine && strstr(engine, "_x11") != NULL)
-        {
-          Ecore_Evas *ee;
-          Evas_Coord x, y, w, h;
-          Ecore_X_Window win;
-          Ecore_X_Window parent;
-
-          evas_object_geometry_get(obj, &x, &y, &w, &h);
-
-          ee = ecore_evas_ecore_evas_get(evas_object_evas_get(obj));
-
-          if (w < 4) w = 4;
-          if (h < 2) h = 2;
-
-          /* Here we really need to have the help of the window manager, this 
code will change when we update E17. */
-          parent = (Ecore_X_Window) ecore_evas_window_get(ee);
-          DBG("parent: %x", parent);
-
-          win = ecore_x_window_new(0, x, y, w, h);
-          DBG("creating window: %x [%i, %i, %i, %i]", win, x, y, w, h);
-          if (win)
-            {
-              Ecore_X_Window_State state[] = { 
ECORE_X_WINDOW_STATE_SKIP_TASKBAR, ECORE_X_WINDOW_STATE_SKIP_PAGER };
-
-              ecore_x_netwm_window_state_set(win, state, 2);
-              ecore_x_window_hide(win);
-              xvsink = gst_element_factory_make("xvimagesink", NULL);
-              if (xvsink)
-                {
-                  unsigned int pos[2];
-
-#ifdef HAVE_X_OVERLAY_SET
-                  gst_x_overlay_set_window_handle(GST_X_OVERLAY(xvsink), win);
-#else
-                  gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(xvsink), win);
-#endif
-                  ev->win = win;
-
-                  ecore_x_window_prop_card32_set(win, 
ECORE_X_ATOM_E_VIDEO_PARENT, &parent, 1);
-
-                  pos[0] = x; pos[1] = y;
-                  ecore_x_window_prop_card32_set(win, 
ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
-                }
-              else
-                {
-                  DBG("destroying win: %x", win);
-                  ecore_x_window_free(win);
-                }
-            }
-        }
-       evas_render_method_list_free(engines);
-     }
-#else
-//# warning "missing: ecore_x OR xoverlay"
-#endif
-
-   esink = gst_element_factory_make("emotion-sink", "sink");
-   if (!esink)
-     {
-        ERR("Unable to create 'emotion-sink' GstElement.");
-        goto unref_pipeline;
-     }
-
-   g_object_set(G_OBJECT(esink), "evas-object", obj, NULL);
-   g_object_set(G_OBJECT(esink), "ev", ev, NULL);
-
-   evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
-   evas_object_event_callback_add(obj, EVAS_CALLBACK_RESIZE, _image_resize, 
ev);
-
-   /* We need queue to force each video sink to be in its own thread */
-   queue = gst_element_factory_make("queue", "equeue");
-   if (!queue)
-     {
-        ERR("Unable to create 'queue' GstElement.");
-        goto unref_pipeline;
-     }
-
-   gst_bin_add_many(GST_BIN(bin), tee, queue, esink, NULL);
-   gst_element_link_many(queue, esink, NULL);
-
-   /* link both sink to GstTee */
-   pad = gst_element_get_pad(queue, "sink");
-   teepad = gst_element_get_request_pad(tee, "src%d");
-   gst_pad_link(teepad, pad);
-   gst_object_unref(pad);
-
-   ev->eteepad = teepad;
-
-   if (xvsink)
-     {
-        GstElement *fakeeos;
-
-        queue = gst_element_factory_make("queue", "xvqueue");
-        fakeeos = GST_ELEMENT(GST_BIN(g_object_new(GST_TYPE_FAKEEOS_BIN, 
"name", "eosbin", NULL)));
-        if (queue && fakeeos)
-          {
-             GstPad *queue_pad;
-
-             gst_bin_add_many(GST_BIN(bin), fakeeos, NULL);
-
-             gst_bin_add_many(GST_BIN(fakeeos), queue, xvsink, NULL);
-             gst_element_link_many(queue, xvsink, NULL);
-             queue_pad = gst_element_get_pad(queue, "sink");
-             gst_element_add_pad(fakeeos, gst_ghost_pad_new("sink", 
queue_pad));
-
-             pad = gst_element_get_pad(fakeeos, "sink");
-             teepad = gst_element_get_request_pad(tee, "src%d");
-             gst_pad_link(teepad, pad);
-
-             xvsink = fakeeos;
-
-             ev->xvteepad = teepad;
-             ev->xvpad = pad;
-         }
-       else
-         {
-             if (fakeeos) gst_object_unref(fakeeos);
-             if (queue) gst_object_unref(queue);
-             gst_object_unref(xvsink);
-             xvsink = NULL;
-         }
-     }
-
-   teepad = gst_element_get_pad(tee, "sink");
-   gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
-   gst_object_unref(teepad);
-
-#define GST_PLAY_FLAG_NATIVE_VIDEO  (1 << 6)
-#define GST_PLAY_FLAG_DOWNLOAD      (1 << 7)
-#define GST_PLAY_FLAG_AUDIO         (1 << 1)
-#define GST_PLAY_FLAG_NATIVE_AUDIO  (1 << 5)
-
-   if (launch)
-     {
-        g_object_set(G_OBJECT(playbin), "sink", bin, NULL);
-     }
-   else
-     {
-        g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
-        g_object_set(G_OBJECT(playbin), "flags", flags | 
GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | 
GST_PLAY_FLAG_NATIVE_AUDIO, NULL);
-        g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL);
-        g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
-     }
-
-   evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
-
-   ev->stream = EINA_TRUE;
-
-   if (xvsink)
-     {
-        Evas_Video_Surface video;
-
-        video.version = EVAS_VIDEO_SURFACE_VERSION;
-        video.data = ev;
-        video.parent = NULL;
-        video.move = _video_move;
-        video.resize = _video_resize;
-        video.show = _video_show;
-        video.hide = _video_hide;
-        video.update_pixels = _video_update_pixels;
-
-        evas_object_image_video_surface_set(obj, &video);
-        ev->stream = EINA_FALSE;
-     }
-
-   eina_stringshare_replace(&ev->uri, uri);
-   ev->pipeline = playbin;
-   ev->sink = bin;
-   ev->esink = esink;
-   ev->xvsink = xvsink;
-   ev->tee = tee;
-   ev->threads = eina_list_append(ev->threads,
-                                  ecore_thread_run(_emotion_gstreamer_pause,
-                                                   _emotion_gstreamer_end,
-                                                   _emotion_gstreamer_cancel,
-                                                   ev));
-
-   /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp 
EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
-   /** then call dot -Tpng -oemotion_pipeline.png 
/tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
-#if defined(HAVE_GETUID) && defined(HAVE_GETEUID)
-   if (getuid() == geteuid())
-#endif
-     {
-        if (getenv("EMOTION_GSTREAMER_DOT")) 
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, 
getenv("EMOTION_GSTREAMER_DOT"));
-     }
-
-   return playbin;
-
- unref_pipeline:
-   gst_object_unref(xvsink);
-   gst_object_unref(esink);
-   gst_object_unref(tee);
-   gst_object_unref(bin);
-   gst_object_unref(playbin);
-   return NULL;
-}
diff --git a/src/modules/emotion/gstreamer/meson.build 
b/src/modules/emotion/gstreamer/meson.build
deleted file mode 100644
index e3e69e5746..0000000000
--- a/src/modules/emotion/gstreamer/meson.build
+++ /dev/null
@@ -1,18 +0,0 @@
-generic_src = files([
-  'emotion_gstreamer.h',
-  'emotion_gstreamer.c',
-  'emotion_alloc.c',
-  'emotion_convert.c',
-  'emotion_sink.c'
-])
-
-generic_deps = [dependency('gstreamer-0.10')]
-
-shared_module(emotion_loader,
-    generic_src,
-    include_directories : config_dir,
-    dependencies: [eina, ecore_x, evas, emotion, generic_deps],
-    install: true,
-    install_dir : mod_install_dir,
-    c_args : package_c_args,
-)
diff --git a/src/modules/emotion/meson.build b/src/modules/emotion/meson.build
index 0090ea7492..e20729ad22 100644
--- a/src/modules/emotion/meson.build
+++ b/src/modules/emotion/meson.build
@@ -1,5 +1,4 @@
 emotion_loaders = [
-'gstreamer',
 'gstreamer1',
 'libvlc',
 'xine'

-- 


Reply via email to