Hi,

With reference to the attached snippet.

Video playback works Ok (ish), but I don't get any audio. I have tried fiddling 
with the buffer-mode, but this did not help.

I do get callbacks for both the video and audio dynamic pads. 

I am aware that I should probably check the caps in order to select the correct 
depayloader. but for now would be happy if I could get A/V on an MPEG2 program 
stream...

Any suggestions welcome :)

Best regards
Steve
static void rtp_pad_added_cb (GstElement * rtpbin, GstPad * new_pad, GstElement * depay)
{
	GstPad*			sinkpad;
	GstPadLinkReturn 	lres;
	gchar* 			pad_name = GST_PAD_NAME (new_pad);
	GstElement* 		audio_depay;

	g_print ("new payload on pad: %s\n", pad_name);
  
	if (!strncmp(pad_name, "recv_rtp_src_0", 14)) { // Video pad
		sinkpad = gst_element_get_static_pad(depay, "sink");
		g_assert (sinkpad);
  		lres = gst_pad_link (new_pad, sinkpad);
  		gst_object_unref(sinkpad);
  		g_assert (lres == GST_PAD_LINK_OK);
	} else if (!strncmp(pad_name, "recv_rtp_src_1", 14)) { // Audio pad
		sinkpad = gst_element_get_static_pad(media_player.audiodepay, "sink");
		g_assert (sinkpad);
  		lres = gst_pad_link (new_pad, sinkpad);
  		gst_object_unref(sinkpad);
  		g_assert (lres == GST_PAD_LINK_OK);
  	}
  	g_free (pad_name);
}


 
static int roll_rtsp_player(MEDIA_PLAYER_STATUS* p_player, const char* filename)
{
	gboolean 		res;
	GstPadLinkReturn 	lres;

   /* the pipeline to hold everything */
	p_player->pipeline = gst_pipeline_new (NULL);
	g_assert (p_player->pipeline);

  /* the rtsp we will use for RTP and RTCP */
	p_player->rtspsrc = gst_element_factory_make ("rtspsrc", "rtsp_src");
	g_assert (p_player->rtspsrc);
	g_object_set (p_player->rtspsrc, "location", filename, NULL);
	g_object_set (p_player->rtspsrc, "latency", LATENCY, NULL);
	g_object_set (p_player->rtspsrc, "buffer-mode", BUFFER_MODE, NULL);

	gst_bin_add (GST_BIN(p_player->pipeline), p_player->rtspsrc);

  /* the video depayloading and decoding */
	p_player->videodepay = gst_element_factory_make (VIDEO_DEPAY, "videodepay");
	g_assert (p_player->videodepay);
	p_player->videodec = gst_element_factory_make (VIDEO_DEC, "videodec");
	g_assert (p_player->videodec);
	p_player->video_sink = gst_element_factory_make (VIDEO_SINK, "video_sink");
	g_assert (p_player->video_sink);
 
	gst_bin_add_many (GST_BIN (p_player->pipeline), p_player->videodepay, 
				   p_player->videodec, p_player->video_sink, NULL);
	res = gst_element_link_many (p_player->videodepay, p_player->videodec, 
				     p_player->video_sink, NULL);      
	g_assert (res == TRUE);
  
/* Audio depayloading, etc */
  	p_player->audiodepay = gst_element_factory_make (AUDIO_DEPAY, "audiodepay");
	g_assert (p_player->audiodepay);
	p_player->audio_sink = gst_element_factory_make (AUDIO_SINK, "audio_sink");
	g_assert (p_player->audio_sink);
 
	gst_bin_add_many (GST_BIN (p_player->pipeline), p_player->audiodepay, p_player->audio_sink, NULL);
	res = gst_element_link_many (p_player->audiodepay, p_player->audio_sink, NULL);      
	g_assert (res == TRUE);
/* Connect the pad-added signal so that we can link dynamic pads */
      
	g_signal_connect (p_player->rtspsrc, "pad-added", G_CALLBACK (rtp_pad_added_cb), 
	                  p_player->videodepay);

   /* set the pipeline to playing */
   
	g_print ("starting receiver pipeline\n");
	gst_element_set_state (p_player->pipeline, GST_STATE_PLAYING);

  /* we need to run a GLib main loop to get the messages */
	p_player->loop = g_main_loop_new (NULL, FALSE);
	g_main_loop_run (p_player->loop);

	g_print ("stopping receiver pipeline\n");
	gst_element_set_state (p_player->pipeline, GST_STATE_NULL);

	gst_object_unref (p_player->pipeline);
	return 0;
}
_______________________________________________
gstreamer-embedded mailing list
gstreamer-embedded@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/gstreamer-embedded

Reply via email to