Gstreamer rtsp application for audio and video
I was trying to develop an application for the pipeline:
gst-launch-1.0 rtspsrc location="rtsp://192.168.3.30:8554/rajvi" latency=0 name=demux demux. ! queue ! rtpmp4gdepay ! aacparse ! avdec_aac ! audioconvert ! audioresample ! autoaudiosink demux. ! queue ! rtph264depay ! h264parse ! omxh264dec ! videoconvert ! videoscale ! video/x-raw,width=176, height=144 ! ximagesink
Following is the code which I have implemented: #include
static void onPadAdded(GstElement *element, GstPad *pad, gpointer data) { gchar *name;
name = gst_pad_get_name(pad);
g_print("A new pad %s was createdn", name);
GstCaps * p_caps = gst_pad_get_pad_template_caps (pad);
gchar * description = gst_caps_to_string(p_caps);
g_free(description);
GstElement *depay = GST_ELEMENT(data);
if(gst_element_link_pads(element, name, depay, "sink") == 0)
{
g_print("cb_new_rtspsrc_pad : failed to link elements n");
}
g_free(name);
}
int main(int argc, char *argv[]) {
GstElement *source, *videosink, *audio, *video, *convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
GstPad *sinkpad,*ghost_sinkpad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make ("rtspsrc", "source");
/*audio bin*/
audioQueue = gst_element_factory_make ("queue", "audio-queue");
audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer");
audioParse = gst_element_factory_make ("aacparse", "audio-parser");
audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder");
audioConvert = gst_element_factory_make ("audioconvert", "aconv");
audioResample = gst_element_factory_make ("audioresample", "audio-resample");
audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
{
g_printerr("Cannot create audio elements n");
return 0;
g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
g_object_set(source, "latency", 0, NULL);
g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), audioDepay);
gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
audioConvert, audioResample, audioSink, NULL);
if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Error linking fields ...1 n");
return 0;
}
video = gst_bin_new ("videobin");
videoQueue = gst_element_factory_make ("queue", "video-queue");
videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer");
videoParser = gst_element_factory_make ("h264parse", "video-parser");
videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements n");
return 0;
}
gst_bin_add_many(GST_BIN(video),videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
videosink, NULL);
/* set property value */
link_ok = gst_element_link_filtered(videoConvert,videosink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
sinkpad = gst_element_get_static_pad (videoConvert, "sink");
ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
gst_pad_set_active (ghost_sinkpad, TRUE);
gst_element_add_pad (video, ghost_sinkpad);
if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
{
g_printerr("Error linking fields... 2 n");
return 0;
}
gst_bin_add_many (GST_BIN(pipeline), video,NULL);
/* Start playing */
gst_element_set_state ( pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
Getting error to link pipeline->audio->video bins
If you put the video and audio in the pipeline bin all together then you can do it. Figure out what you caps are for the video and audio and should be able to link them.
// ----------------------------------
// pad-added signal
// ----------------------------------
static void onPadAdded(GstElement* element, GstPad* pad, gpointer user_data)
{
gchar *name;
GstCaps * p_caps;
GstElement* nextElement;
GstElement* pipeline = (GstElement*)user_data;
name = gst_pad_get_name(pad);
g_print("A new pad %s was createdn", name);
p_caps = gst_pad_get_pad_template_caps(pad);
if (strstr(name, "[CAPS FOR VIDEO CONTAIN]") != NULL)
{
std::cout << std::endl << "------------------------ Video -------------------------------" << std::endl;
nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "video-depayer");
}
else if (strstr(name, "[CAPS FOR AUDIO CONTAIN]") != NULL)
{
std::cout << std::endl << "------------------------ Audio -------------------------------" << std::endl;
nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "audio-depayer");
}
if (nextElement != NULL)
{
if (!gst_element_link_filtered(element, nextElement, p_caps))
//if (!gst_element_link_pads_filtered(element, name, nextElement, "sink", p_caps))
{
std::cout << std::endl << "Failed to link video element to src to sink" << std::endl;
}
gst_object_unref(nextElement);
}
g_free(name);
gst_caps_unref(p_caps);
}
// ----------------------------------
// main
// ----------------------------------
int main(int argc, char *argv[])
{
GstElement *source, *videosink, *audio,*convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init(&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make("rtspsrc", "source");
/*audio bin*/
audioQueue = gst_element_factory_make("queue", "audio-queue");
audioDepay = gst_element_factory_make("rtpmp4gdepay", "audio-depayer");
audioParse = gst_element_factory_make("aacparse", "audio-parser");
audioDecode = gst_element_factory_make("avdec_aac", "audio-decoder");
audioConvert = gst_element_factory_make("audioconvert", "aconv");
audioResample = gst_element_factory_make("audioresample", "audio-resample");
audioSink = gst_element_factory_make("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
{
g_printerr("Cannot create audio elements n");
return 0;
g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
g_object_set(source, "latency", 0, NULL);
g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), pipeline);
gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
audioConvert, audioResample, audioSink, NULL);
if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Error linking fields ...1 n");
return 0;
}
videoQueue = gst_element_factory_make("queue", "video-queue");
videoDepay = gst_element_factory_make("rtph264depay", "video-depayer");
videoParser = gst_element_factory_make("h264parse", "video-parser");
videoDecode = gst_element_factory_make("omxh264dec", "video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements n");
return 0;
}
gst_bin_add_many(GST_BIN(pipeline), videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
videosink, NULL);
/* set property value */
link_ok = gst_element_link_filtered(videoConvert, videosink, capsFilter);
gst_caps_unref(capsFilter);
if (!link_ok) {
g_warning("Failed to link element1 and element2!");
}
if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
{
g_printerr("Error linking fields... 2 n");
return 0;
}
/* Start playing */
gst_element_set_state(pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus(pipeline);
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,(GstMessageType)( GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Free resources */
if (msg != NULL)
gst_message_unref(msg);
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
return 0;
}
}
链接地址: http://www.djcxy.com/p/43918.html