Hi Thiago,
Yes, I did notice this problem and I have corrected the same. Presently,
I am creating two different pipelines (with different elements) in it. I
try to pause stream1 and this works. But I cannot play the second stream
for some reason.
I have attached the code along with this email. Actually, this time
around, on pausing the first stream and playing the second stream I get
an error like "Cannot play stream 16", and this I see, comes from
Freescale codec plugin.
Any clue as to why this is happening ?
regards
Rajesh Marathe.
On 10/22/2009 05:52 PM, thiagossan...@gmail.com wrote:
On Tue, Oct 20, 2009 at 9:21 AM, Rajesh Marathe <rmara...@i-rode.com
<mailto:rmara...@i-rode.com>> wrote:
Hi,
I am new to gstreamer and started working on it very recently. While I
am studying, I wrote a program to play two streams alternately using
gstreamer. Details below:
Development environment:
-------------------------------------
LTIB and iMX27ADS board. Host PC has Redhat EL5.0.
Problem description:
-----------------------------
I have two MP4 files. My intention is to play the first file for a few
seconds and pause the same and play the second file fully. After
getting
End-of-Steam message for second file, resume and complete the first
file. My program looks like this:
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstStateChangeReturn retval;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements */
masterpipe = GstCreatePipelineMaster ("master-player", argv[1]);
slavepipe = GstCreatePipelineSlave ("slave-player", argv[2]);
retval = gst_element_set_state (slavepipe, GST_STATE_PAUSED);
retval = gst_element_set_state (masterpipe, GST_STATE_PLAYING);
g_timeout_add (4000, (GSourceFunc) cb_print_position, masterpipe);
g_main_loop_run (loop);
...
...
}
static GstElement * GstCreatePipelineMaster (char *name, char *file)
{
GstElement *pipeline;
/* Create gstreamer elements */
pipeline = gst_pipeline_new (name);
source = gst_element_factory_make ("filesrc","file-source");
demuxer = gst_element_factory_make
("mfw_mp4demuxer","avi-demuxer");
decvd = gst_element_factory_make ("mfw_vpudecoder",
"video-decoder");
decad = gst_element_factory_make ("mad", "mp3-decoder");
vdsink = gst_element_factory_make ("mfw_v4lsink",
"video-sink");
vdqueue = gst_element_factory_make ("queue",
"video-queue");
adqueue = gst_element_factory_make ("queue",
"audio-queue");
adsink = gst_element_factory_make ("fakesink",
"audio-sink");
g_object_set (decvd, "codec-type", "std_mpeg4", NULL);
if (!pipeline || !source || !demuxer || !decvd || !decad || !vdsink
|| !vdqueue || !adqueue || !adsink) {
return NULL;
}
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source), "location", file, NULL);
/* we add all elements into the pipeline */
/* file-source | ogg-demuxer | vorbis-decoder | converter |
alsa-output */
gst_bin_add_many (GST_BIN (pipeline),
source, demuxer, decvd, decad,/* adqueue,
vdqueue,*/ vdsink, adsink, NULL);
/* we link the elements together */
/* file-source -> ogg-demuxer ~> vorbis-decoder -> converter ->
alsa-output */
gst_element_link (source, demuxer);
gst_element_link (decvd, vdsink);
// gst_element_link (vdqueue, vdsink);
// gst_element_link (decad, adqueue);
// gst_element_link (adqueue, adsink);
gst_element_link (decad, adsink);
g_signal_connect (demuxer, "pad-added", G_CALLBACK
(on_pad_added), NULL);
return pipeline;
}
static GstElement * GstCreatePipelineSlave (char *name, char *file)
{
GstElement *pipeline;
/* Create gstreamer elements */
pipeline = gst_pipeline_new (name);
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source), "location", file, NULL);
/* we add all elements into the pipeline */
/* file-source | ogg-demuxer | vorbis-decoder | converter |
alsa-output */
gst_bin_add_many (GST_BIN (pipeline),
source, demuxer, decvd, decad,/* adqueue,
vdqueue,*/ vdsink, adsink, NULL);
return pipeline;
}
Please note above, that I am creating two pipelines Master and
slave and
adding same elements to the master adn slave 'bin's. Is this okay
to do ?
The elements can only have a single parent bin, it can't be inside two
bins at the same time.
You would probably notice the problem by checking the result of
gst_bin_add*
I do see the master playing and after few seconds the timer
function is
called in which I move the master to 'PAUSE' state and slave to
'PLAYING'. But the slave media does not play at all??
Can anybody let me know what is wrong above ?
regards,
Rajesh Marathe.
------------------------------------------------------------------------------
Come build with us! The BlackBerry(R) Developer Conference in SF, CA
is the only developer event you need to attend this year.
Jumpstart your
developing skills, take BlackBerry mobile applications to market
and stay
ahead of the curve. Join us from November 9 - 12, 2009. Register now!
http://p.sf.net/sfu/devconference
_______________________________________________
Gstreamer-embedded mailing list
Gstreamer-embedded@lists.sourceforge.net
<mailto:Gstreamer-embedded@lists.sourceforge.net>
https://lists.sourceforge.net/lists/listinfo/gstreamer-embedded
--
Thiago Sousa Santos
#include <gst/gst.h>
#include <glib.h>
#include <string.h>
GstElement *masterpipe;
GstElement *slavepipe;
static GstElement * GstCreatePipelineMaster (char *name, char *file);
static GstElement * GstCreatePipelineSlave (char *name, char *file);
static GstElement *source, *demuxer, *vdqueue, *adqueue, *vdsink, *adsink, *decvd, *decad;
static GstElement *sources, *demuxers, *vdqueues, *adqueues, *vdsinks, *adsinks, *decvds, *decads;
static gboolean
cb_print_position (GstElement *pipeline)
{
static int count = 0;
GstStateChangeReturn retval;
#if 0
GstFormat fmt = GST_FORMAT_TIME;
gint64 pos, len;
if (gst_element_query_position (pipeline, &fmt, &pos)
&& gst_element_query_duration (pipeline, &fmt, &len)) {
g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
GST_TIME_ARGS (pos), GST_TIME_ARGS (len));
g_print ("\nPOS: %lld ", pos);
g_print ("\nPOS: %lld ", pos);
/* Set the pipeline to "paused" state*/
}
else g_print ("\n Returned false");
/* call me again */
#endif
if (count == 0) {
g_print ("\nNow pausing...Master");
g_print ("\nNow pausing...Master");
retval = gst_element_set_state (masterpipe, GST_STATE_PAUSED);
g_print ("\n ");
g_print ("\n retval = %d", retval);
retval = gst_element_set_state (slavepipe, GST_STATE_PLAYING);
g_print ("\n ");
g_print ("\n retval = %d", retval);
}
count ++;
return FALSE;
}
void on_pad_added_slave (GstElement *element, GstPad *pad)
{
g_debug ("Signal: pad-added-slave");
GstCaps *caps;
GstStructure *str;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
if (g_strrstr (gst_structure_get_name (str), "video")) {
g_debug ("Linking video pad to dec_vd");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decvds, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
gst_object_unref (targetsink);
}
if (g_strrstr (gst_structure_get_name (str), "audio")) {
g_debug ("Linking audio pad to dec_ad");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decads, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
gst_object_unref (targetsink);
}
gst_caps_unref (caps);
}
void on_pad_added (GstElement *element, GstPad *pad)
{
g_debug ("Signal: pad-added");
GstCaps *caps;
GstStructure *str;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
if (g_strrstr (gst_structure_get_name (str), "video")) {
g_debug ("Linking video pad to dec_vd");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decvd, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
gst_object_unref (targetsink);
}
if (g_strrstr (gst_structure_get_name (str), "audio")) {
g_debug ("Linking audio pad to dec_ad");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decad, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
gst_object_unref (targetsink);
}
gst_caps_unref (caps);
}
static gboolean
bus_call_slave (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
GstStateChangeReturn retval;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of slave stream\n");
g_print ("End of slave stream\n");
retval = gst_element_set_state (masterpipe, GST_STATE_PLAYING);
g_print ("\n\n bus_call_slave: retval = %d", retval);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Slave Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_BUFFERING:{
gint percent;
g_print ("\nBuffering the stream from file");
g_print ("\nBuffering the stream from file");
gst_element_set_state (masterpipe, GST_STATE_PAUSED);
gst_message_parse_buffering (msg, &percent);
g_print (": %d\n", percent);
g_print (": %d\n", percent);
if (100 == percent) {
/* Set the pipeline to "playing" state*/
g_print ("\nNow playing...");
g_print ("\nNow playing...");
gst_element_set_state (masterpipe, GST_STATE_PLAYING);
}
break;
}
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_printerr ("Error: %s\n", error->message);
g_printerr ("Error: %s\n", error->message);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
g_print ("\n def error");
g_print ("\n def error");
g_print ("\n def error");
break;
}
return TRUE;
}
void GstCreateHandlers (GstElement *pipeline, gboolean (*x)(), GMainLoop *loop)
{
GstBus *bus;
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, x, loop);
gst_object_unref (bus);
}
static GstElement * GstCreatePipelineSlave (char *name, char *file)
{
GstElement *pipeline;
g_print ("\n file = %s", file);
/* Create gstreamer elements */
pipeline = gst_pipeline_new (name);
sources = gst_element_factory_make ("filesrc","file-source-slave");
demuxers = gst_element_factory_make ("mfw_mp4demuxer","avi-demuxer-slave");
decvds = gst_element_factory_make ("mfw_vpudecoder", "video-decoder-slave");
decads = gst_element_factory_make ("mad", "mp3-decoder-slave");
vdsinks = gst_element_factory_make ("mfw_v4lsink", "video-sink-slave");
vdqueues = gst_element_factory_make ("queue", "video-queue-slave");
adqueues = gst_element_factory_make ("queue", "audio-queue-slave");
adsinks = gst_element_factory_make ("fakesink", "audio-sink-slave");
g_object_set (decvd, "codec-type", "std_mpeg4", NULL);
if (!pipeline || !sources || !demuxers || !decvds || !decads || !vdsinks || !vdqueues || !adqueues || !adsinks) {
g_printerr ("One element could not be created. Exiting.\n");
g_printerr ("One element could not be created. Exiting.\n");
g_printerr ("One element could not be created. Exiting.\n");
return NULL;
}
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (sources), "location", file, NULL);
/* we add all elements into the pipeline */
/* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */
gst_bin_add_many (GST_BIN (pipeline),
sources, demuxers, decvds, decads,/* adqueue, vdqueue,*/ vdsinks, adsinks, NULL);
/* we link the elements together */
/* file-source -> ogg-demuxer ~> vorbis-decoder -> converter -> alsa-output */
gst_element_link (sources, demuxers);
gst_element_link (decvds, vdsinks);
// gst_element_link (vdqueue, vdsink);
// gst_element_link (decad, adqueue);
// gst_element_link (adqueue, adsink);
gst_element_link (decads, adsinks);
g_signal_connect (demuxers, "pad-added", G_CALLBACK (on_pad_added_slave), NULL);
return pipeline;
}
static GstElement * GstCreatePipelineMaster (char *name, char *file)
{
GstElement *pipeline;
g_print ("\n file = %s", file);
/* Create gstreamer elements */
pipeline = gst_pipeline_new (name);
source = gst_element_factory_make ("filesrc","file-source");
demuxer = gst_element_factory_make ("mfw_mp4demuxer","avi-demuxer");
decvd = gst_element_factory_make ("mfw_vpudecoder", "video-decoder");
decad = gst_element_factory_make ("mad", "mp3-decoder");
vdsink = gst_element_factory_make ("mfw_v4lsink", "video-sink");
vdqueue = gst_element_factory_make ("queue", "video-queue");
adqueue = gst_element_factory_make ("queue", "audio-queue");
adsink = gst_element_factory_make ("fakesink", "audio-sink");
g_object_set (decvd, "codec-type", "std_mpeg4", NULL);
if (!pipeline || !source || !demuxer || !decvd || !decad || !vdsink || !vdqueue || !adqueue || !adsink) {
g_printerr ("One element could not be created. Exiting.\n");
g_printerr ("One element could not be created. Exiting.\n");
g_printerr ("One element could not be created. Exiting.\n");
return NULL;
}
/* Set up the pipeline */
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source), "location", file, NULL);
/* we add all elements into the pipeline */
/* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */
gst_bin_add_many (GST_BIN (pipeline),
source, demuxer, decvd, decad,/* adqueue, vdqueue,*/ vdsink, adsink, NULL);
/* we link the elements together */
/* file-source -> ogg-demuxer ~> vorbis-decoder -> converter -> alsa-output */
gst_element_link (source, demuxer);
gst_element_link (decvd, vdsink);
// gst_element_link (vdqueue, vdsink);
// gst_element_link (decad, adqueue);
// gst_element_link (adqueue, adsink);
gst_element_link (decad, adsink);
g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), NULL);
return pipeline;
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstStateChangeReturn retval;
GstState state, pend;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Check input arguments */
if (argc != 3) {
g_printerr ("Usage: %s %s %s <Video H264 filename>\n", argv[0], argv[1], argv[2]);
return -1;
}
/* Create gstreamer elements */
masterpipe = GstCreatePipelineMaster ("master-player", argv[1]);
slavepipe = GstCreatePipelineSlave ("slave-player", argv[2]);
/* note that the demuxer will be linked to the decoder dynamically.
The reason is that Ogg may contain various streams (for example
audio and video). The source pad(s) will be created at run time,
by the demuxer when it detects the amount and nature of streams.
Therefore we connect a callback function which will be executed
when the "pad-added" is emitted.*/
/* Set the pipeline to "pausing" state*/
#if 0
GstCreateHandlers (masterpipe, bus_call, loop);
GstCreateHandlers (slavepipe, bus_call_slave, loop);
#endif
#if 0
retval = gst_element_set_state (slavepipe, GST_STATE_READY);
g_print ("\n 22 state-change retval = %d", retval);
retval = gst_element_set_state (slavepipe, GST_STATE_PAUSED);
g_print ("\n 33 state-change retval = %d", retval);
#endif
#if 0
retval = gst_element_set_state (masterpipe, GST_STATE_READY);
g_print ("\n 55 state-change retval = %d", retval);
retval = gst_element_set_state (masterpipe, GST_STATE_PAUSED);
g_print ("\n 66 state-change retval = %d", retval);
#endif
retval = gst_element_set_state (slavepipe, GST_STATE_PLAYING);
retval = gst_element_set_state (slavepipe, GST_STATE_PAUSED);
retval = gst_element_set_state (masterpipe, GST_STATE_PLAYING);
g_print ("\n 77 state-change retval = %d", retval);
retval = gst_element_get_state (masterpipe, &state, &pend,
GST_CLOCK_TIME_NONE);
g_print ("\n 66 state-change retval = %d state = %d pend = %d", retval, state, pend);
/* Iterate */
g_print ("Running...\n");
g_print ("Running...\n");
g_timeout_add (4000, (GSourceFunc) cb_print_position, masterpipe);
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (masterpipe, GST_STATE_NULL);
// gst_element_set_state (slavepipe, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (masterpipe));
// gst_object_unref (GST_OBJECT (slavepipe));
return 0;
}
------------------------------------------------------------------------------
Come build with us! The BlackBerry(R) Developer Conference in SF, CA
is the only developer event you need to attend this year. Jumpstart your
developing skills, take BlackBerry mobile applications to market and stay
ahead of the curve. Join us from November 9 - 12, 2009. Register now!
http://p.sf.net/sfu/devconference
_______________________________________________
Gstreamer-embedded mailing list
Gstreamer-embedded@lists.sourceforge.net
https://lists.sourceforge.net/lists/listinfo/gstreamer-embedded