Gstreamer rtsp音频和视频应用程序

问题描述 投票:0回答:1

我正在尝试为管道开发一个应用程序:

gst-launch-1.0 rtspsrc location =“rtsp://192.168.3.30:8554 / rajvi”latency = 0 name = demux demux。 !排队! rtpmp4gdepay! aacparse! avdec_aac! audioconvert!听觉样本! autoaudiosink demux。 !排队! rtph264depay! h264parse! omxh264dec!视频转换!视频! video / x-raw,width = 176,height = 144! ximagesink

以下是我实现的代码:#include

static void onPadAdded(GstElement * element,GstPad * pad,gpointer data){gchar * name;

    name = gst_pad_get_name(pad);
    g_print("A new pad %s was created\n", name);
    GstCaps * p_caps = gst_pad_get_pad_template_caps (pad);

    gchar * description = gst_caps_to_string(p_caps);
    g_free(description);

    GstElement *depay = GST_ELEMENT(data);
    if(gst_element_link_pads(element, name, depay, "sink") == 0)
    {
            g_print("cb_new_rtspsrc_pad : failed to link elements \n");
    }

    g_free(name);
}

int main(int argc, char *argv[]) {
    GstElement *source, *videosink, *audio, *video, *convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
               *audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
    GstCaps *capsFilter;
    GstBus *bus;
    GstMessage *msg;
    GstPad *pad;
    GstPad *sinkpad,*ghost_sinkpad;
    gboolean link_ok;
    GstStateChangeReturn ret;

    /* Initialize GStreamer */
    gst_init (&argc, &argv);


    /* Create Elements */
    pipeline = gst_pipeline_new("rtsp-pipeline");
    source = gst_element_factory_make ("rtspsrc", "source");


    /*audio bin*/
    audioQueue = gst_element_factory_make ("queue", "audio-queue");
    audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer");
    audioParse = gst_element_factory_make ("aacparse", "audio-parser");
    audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder");
    audioConvert = gst_element_factory_make ("audioconvert", "aconv");
    audioResample = gst_element_factory_make ("audioresample", "audio-resample");
    audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");

    if (!audioQueue || !audioDepay || !audioParse || !audioConvert ||     !audioResample || !audioSink)
    {
            g_printerr("Cannot create audio elements \n");
            return 0;
 g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
    g_object_set(source, "latency", 0, NULL);

    g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), audioDepay);

    gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
                    audioConvert, audioResample, audioSink, NULL);

    if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample,  audioSink, NULL))
    {
            g_printerr("Error linking fields ...1 \n");
            return 0;
    }

    video  = gst_bin_new ("videobin");
    videoQueue = gst_element_factory_make ("queue", "video-queue");
    videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer");
    videoParser = gst_element_factory_make ("h264parse", "video-parser");
    videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder");
    videoConvert = gst_element_factory_make("videoconvert", "convert");
    videoScale = gst_element_factory_make("videoscale", "video-scale");
    videoSink = gst_element_factory_make("ximagesink", "video-sink");
    capsFilter = gst_caps_new_simple("video/x-raw",
                    "width", G_TYPE_INT, 176,
                    "height", G_TYPE_INT, 144,
                    NULL);

    if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
    {
            g_printerr("Cannot create video elements \n");
            return 0;
    }

    gst_bin_add_many(GST_BIN(video),videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
                    videosink, NULL);
    /* set property value */
    link_ok = gst_element_link_filtered(videoConvert,videosink, capsFilter);
    gst_caps_unref (capsFilter);
    if (!link_ok) {
            g_warning ("Failed to link element1 and element2!");
    }

    sinkpad = gst_element_get_static_pad (videoConvert, "sink");
    ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
    gst_pad_set_active (ghost_sinkpad, TRUE);
    gst_element_add_pad (video, ghost_sinkpad);

    if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
    {
            g_printerr("Error linking fields... 2 \n");
            return 0;
    }

    gst_bin_add_many (GST_BIN(pipeline), video,NULL);
  /* Start playing */
    gst_element_set_state ( pipeline, GST_STATE_PLAYING);

    /* Wait until error or EOS */
    bus = gst_element_get_bus (pipeline);
    msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

    /* Free resources */
    if (msg != NULL)
            gst_message_unref (msg);
    gst_object_unref (bus);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
    return 0;
}

获取错误链接管道 - >音频 - >视频箱

gstreamer rtsp-client
1个回答
0
投票

如果您将视频和音频放在管道箱中,那么您就可以执行此操作。弄清楚你对视频和音频的限制,并且应该能够链接它们。

// ----------------------------------
//  pad-added signal
// ----------------------------------

    static void onPadAdded(GstElement* element, GstPad* pad, gpointer user_data)
    {
      gchar *name;
      GstCaps * p_caps;
      GstElement* nextElement;
      GstElement* pipeline = (GstElement*)user_data;
      name = gst_pad_get_name(pad);
      g_print("A new pad %s was created\n", name);
      p_caps = gst_pad_get_pad_template_caps(pad);

      if (strstr(name, "[CAPS FOR VIDEO CONTAIN]") != NULL)
      {
          std::cout << std::endl << "------------------------ Video -------------------------------" << std::endl;

        nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "video-depayer");
      }

      else if (strstr(name, "[CAPS FOR AUDIO CONTAIN]") != NULL)
      {
        std::cout << std::endl << "------------------------ Audio -------------------------------" << std::endl;

        nextElement = gst_bin_get_by_name(GST_BIN(pipeline), "audio-depayer");

      }
      if (nextElement != NULL)
      {
        if (!gst_element_link_filtered(element, nextElement, p_caps))
            //if (!gst_element_link_pads_filtered(element, name, nextElement, "sink", p_caps))
        {
            std::cout << std::endl << "Failed to link video element to src to sink" << std::endl;
        }
        gst_object_unref(nextElement);
    }

    g_free(name);
    gst_caps_unref(p_caps);
    }
// ----------------------------------
//  main
// ----------------------------------

    int main(int argc, char *argv[]) 
    {
    GstElement *source, *videosink, *audio,*convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
        *audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink;
    GstCaps *capsFilter;
    GstBus *bus;
    GstMessage *msg;
    GstPad *pad;
    gboolean link_ok;
    GstStateChangeReturn ret;

    /* Initialize GStreamer */
    gst_init(&argc, &argv);


    /* Create Elements */
    pipeline = gst_pipeline_new("rtsp-pipeline");
    source = gst_element_factory_make("rtspsrc", "source");


    /*audio bin*/
    audioQueue = gst_element_factory_make("queue", "audio-queue");
    audioDepay = gst_element_factory_make("rtpmp4gdepay", "audio-depayer");
    audioParse = gst_element_factory_make("aacparse", "audio-parser");
    audioDecode = gst_element_factory_make("avdec_aac", "audio-decoder");
    audioConvert = gst_element_factory_make("audioconvert", "aconv");
    audioResample = gst_element_factory_make("audioresample", "audio-resample");
    audioSink = gst_element_factory_make("autoaudiosink", "audiosink");

    if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
    {
        g_printerr("Cannot create audio elements \n");
        return 0;
        g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL);
        g_object_set(source, "latency", 0, NULL);

        g_signal_connect(G_OBJECT(source), "pad-added", G_CALLBACK(onPadAdded), pipeline);

        gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode,
            audioConvert, audioResample, audioSink, NULL);

        if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
        {
            g_printerr("Error linking fields ...1 \n");
            return 0;
        }

        videoQueue = gst_element_factory_make("queue", "video-queue");
        videoDepay = gst_element_factory_make("rtph264depay", "video-depayer");
        videoParser = gst_element_factory_make("h264parse", "video-parser");
        videoDecode = gst_element_factory_make("omxh264dec", "video-decoder");
        videoConvert = gst_element_factory_make("videoconvert", "convert");
        videoScale = gst_element_factory_make("videoscale", "video-scale");
        videoSink = gst_element_factory_make("ximagesink", "video-sink");
        capsFilter = gst_caps_new_simple("video/x-raw",
            "width", G_TYPE_INT, 176,
            "height", G_TYPE_INT, 144,
            NULL);

        if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
        {
            g_printerr("Cannot create video elements \n");
            return 0;
        }

        gst_bin_add_many(GST_BIN(pipeline), videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale,
            videosink, NULL);
        /* set property value */
        link_ok = gst_element_link_filtered(videoConvert, videosink, capsFilter);
        gst_caps_unref(capsFilter);
        if (!link_ok) {
            g_warning("Failed to link element1 and element2!");
        }

        if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoScale, NULL))
        {
            g_printerr("Error linking fields... 2 \n");
            return 0;
        }

        /* Start playing */
        gst_element_set_state(pipeline, GST_STATE_PLAYING);

        /* Wait until error or EOS */
        bus = gst_element_get_bus(pipeline);
        msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,(GstMessageType)( GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

        /* Free resources */
        if (msg != NULL)
            gst_message_unref(msg);
        gst_object_unref(bus);
        gst_element_set_state(pipeline, GST_STATE_NULL);
        gst_object_unref(pipeline);
        return 0;
    }
  }
© www.soinside.com 2019 - 2024. All rights reserved.