无法连接RTSP服务器

问题描述 投票:0回答:1

我编写了一个类来代表我的 RTSP 服务器。实施:

gstRtsp::gstRtsp()
{
    initializeGst();
    m_server = gst_rtsp_server_new ();
    gst_rtsp_server_set_service (m_server, "6666"); //set port number


    m_mounts = gst_rtsp_server_get_mount_points (m_server);
}

gstRtsp::~gstRtsp(){
  if (m_running) {
        gst_rtsp_mount_points_remove_factory(m_mounts, "/stream");
        g_source_remove(m_serverId);
  }
  g_object_unref(m_server);
  g_object_unref (m_mounts);

}
void gstRtsp::initializeGst(int debugLevel)
{
    if (!gst_is_initialized())
    {
        setenv("GST_DEBUG", ("*:" + std::to_string(debugLevel)).c_str(), 1);
        gst_init(nullptr, nullptr);
    }
}

void gstRtsp::rtspSendData(const uint8_t *data)
{


    mtx.lock();
    for (RtspClient &client: rtspClients) {
        if (!GST_IS_ELEMENT(client.appsrc)) {
            std::vector<RtspClient>::iterator i = std::find_if(rtspClients.begin(), rtspClients.end(), client);
            rtspClients.erase(i);

        } else {
            if (client.data_needed) {
                GstBuffer *buffer;
                GstMemory *memory;
                memory = gst_memory_new_wrapped( (GstMemoryFlags)0, (gpointer) data, m_width * m_height * 3, 0, m_width * m_height * 3, NULL, NULL);
                buffer = gst_buffer_new();
                gst_buffer_append_memory(buffer, memory);

                if (client.timepoint == 0) {
                    client.timepoint = currentTime.toMSecsSinceEpoch();
                } else {
                    client.timestamp = currentTime.toMSecsSinceEpoch()* 1000000;
                }

                GST_BUFFER_PTS (buffer) = client.timestamp;
                GST_BUFFER_DTS (buffer) = client.timestamp;
                GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, (int)m_frameRate);

                gst_app_src_push_buffer(GST_APP_SRC(client.appsrc), buffer);
            }
        }
    }
    mtx.unlock();

}

/* called when we need to give data to appsrc */
static void rtspDataNeeded(GstElement * appsrc, guint unused, gstRtsp *instance)
{
    instance->mtx.lock();
    std::vector<gstRtsp::RtspClient>::iterator i = std::find_if(instance->rtspClients.begin(), instance->rtspClients.end(),
                                                                   [appsrc] (const gstRtsp::RtspClient &client) { return client.appsrc == appsrc; } );
    if (i != instance->rtspClients.end()) {
        (*i).data_needed = true;
    }
    instance->mtx.unlock();
}

/* called when a new media pipeline is constructed. We can query the pipeline and configure our appsrc */
static void rtspMediaConfigure(GstRTSPMediaFactory * , GstRTSPMedia * media, gstRtsp *instance)
{

    GstElement *element, *appsrc;

    /* get the element used for providing the streams of the media */
    element = gst_rtsp_media_get_element (media);

    /* get our appsrc, we named it 'mysrc' with the name property */
    appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "mysrc");

    /* this instructs appsrc that we will be dealing with timed buffer */
    gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
    /* configure the caps of the video */
    g_object_set (G_OBJECT (appsrc), "caps",
                 gst_caps_new_simple ("video/x-raw",
                                     "format", G_TYPE_STRING, "RGB",
                                     "width", G_TYPE_INT, instance->width(),
                                     "height", G_TYPE_INT, instance->height(),
                                     "interlace-mode", G_TYPE_STRING, "progressive",
                                     "framerate", GST_TYPE_FRACTION, (int)(instance->frameRate()), 1,
                                     "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
                                     NULL), NULL);

    instance->mtx.lock();
    instance->rtspClients.push_back({appsrc});
    instance->mtx.unlock();

    /* install the callback that will be called when a buffer is needed */
    g_signal_connect (appsrc, "need-data", (GCallback) rtspDataNeeded, instance);
    gst_object_unref (appsrc);
    gst_object_unref (element);
}

void gstRtsp::startRtsp(int width, int height, int frameRate )
{
    m_frameRate = frameRate;
    m_width = width;
    m_height = height;

    /* make a media factory for a test stream. The default media factory can use
   * gst-launch syntax to create pipelines.
   * any launch line works as long as it contains elements named pay%d. Each
   * element with pay%d names will be a stream */
    GstRTSPMediaFactory *factory = gst_rtsp_media_factory_new ();


    std::string initializationString = "( appsrc name=mysrc ! videoconvert  ! avenc_h264_omx ! h264parse ! rtph264pay name=pay0 pt=96 )";
    gst_rtsp_media_factory_set_launch (factory, initializationString.c_str() );
    gst_rtsp_media_factory_set_shared (factory, TRUE);
    gst_rtsp_media_factory_set_eos_shutdown(factory, TRUE);



    /* notify when our media is ready, This is called whenever someone asks for
    * the media and a new pipeline with our appsrc is created */
    g_signal_connect (factory, "media-configure", (GCallback) rtspMediaConfigure, this);
    rtspCounter = 0;
    rtspCounterTimer = QDateTime::currentDateTime().toMSecsSinceEpoch();
    m_running = true;
    gst_rtsp_mount_points_add_factory (m_mounts, "/stream", factory);
    m_serverId = gst_rtsp_server_attach (m_server, NULL);

}

int gstRtsp::width(){
  return m_width;
}

int gstRtsp::height(){
  return m_height;
}

int gstRtsp::frameRate(){
  return m_frameRate;
}

为了测试它,我每隔 0.3 秒调用 rtspSendData() 填充 160x120x3 数组(分辨率 160x120,RGB)(只是一个睡眠循环)。但是,接收端的协商失败(管道“gst-launch-1.0 rtspsrc location=rtsp://IP:6666/stream Latency=100!rtph264depay!avdec_h264!autovideosink”失败,元素 /GstPipeline:pipeline0/GstRTSPSrc:rtspsrc0 出现错误:未处理的错误)。

在服务器端的调试输出(在 RPI0w 上)有一些警告

0:00:46.544840112  4180   0x7f4c00ef00 WARN            videoencoder gstvideoencoder.c:755:gst_video_encoder_setcaps:<avenc_h264_omx0> rejected caps video/x-raw, width=(int)160, height=(int)120, framerate=(fraction)9/1, pixel-aspect-ratio=(fraction)1/1, format=(string)I420, interlace-mode=(string)progressive
0:00:46.549196023  4180   0x7f4c00ef00 WARN            videoencoder gstvideoencoder.c:755:gst_video_encoder_setcaps:<avenc_h264_omx0> rejected caps video/x-raw, width=(int)160, height=(int)120, framerate=(fraction)9/1, pixel-aspect-ratio=(fraction)1/1, format=(string)I420, interlace-mode=(string)progressive
0:00:46.549330711  4180   0x7f4c00ef00 WARN                GST_PADS gstpad.c:4303:gst_pad_peer_query:<videoconvert0:src> could not send sticky events
0:01:06.517746667  4180   0x55c95532a0 WARN               rtspmedia rtsp-media.c:3576:wait_preroll: failed to preroll pipeline
0:01:06.517892501  4180   0x55c95532a0 WARN               rtspmedia rtsp-media.c:3946:gst_rtsp_media_prepare: failed to preroll pipeline
0:01:06.517987866  4180   0x55c95532a0 INFO               rtspmedia rtsp-media.c:4079:gst_rtsp_media_unprepare: unprepare media 0x7f540e01b0

还有两个错误

0:01:06.549108304  4180   0x55c95532a0 ERROR             rtspclient rtsp-client.c:1087:find_media: client 0x55c9636140: can't prepare media
...
0:01:06.558417003  4180   0x55c95532a0 ERROR             rtspclient rtsp-client.c:3346:handle_describe_request: client 0x55c9636140: no media

我错过了什么?据我了解,两端的管道都开始正常,所以需要进行一些协商?

c++ raspberry-pi gstreamer rtsp-server
1个回答
0
投票

好的,所以 RPI 上的 h264 编码显然存在一些问题(https://github.com/aiortc/aiortc/issues/899;使用 v4l 编码器时,即使使用

GST_DEBUG=2 gst-launch-1.0 videotestsrc ! v4l2h264enc ! fakesink 
,我也遇到相同的错误),所以我已经切换到 JPEG 编码器,暂时删除了互斥体(这肯定不会让我烦恼),然后我们就可以运行了!


gstRtsp::gstRtsp()
{
    initializeGst();
    m_server = gst_rtsp_server_new ();
    gst_rtsp_server_set_service (m_server, "8554"); //set port number


    m_mounts = gst_rtsp_server_get_mount_points (m_server);
}

gstRtsp::~gstRtsp(){
  if (m_running) {
        gst_rtsp_mount_points_remove_factory(m_mounts, "/stream");
        g_source_remove(m_serverId);
  }
  g_object_unref(m_server);
  g_object_unref (m_mounts);

}
void gstRtsp::initializeGst(int debugLevel)
{
    if (!gst_is_initialized())
    {
        setenv("GST_DEBUG", ("*:" + std::to_string(debugLevel)).c_str(), 1);
        gst_init(nullptr, nullptr);
    }
}

void gstRtsp::rtspSendData(const uint8_t *data)
{

    for (client_t &client: m_clients) {
        if (!GST_IS_ELEMENT(client.appsrc)) {
            std::vector<client_t>::iterator i = std::find_if(m_clients.begin(), m_clients.end(), client);
            m_clients.erase(i);

        } else {
            if (client.data_needed) {
                GstBuffer *buffer;
                GstMemory *memory;
                memory = gst_memory_new_wrapped( (GstMemoryFlags)0, (gpointer) data, m_width * m_height * 3, 0, m_width * m_height * 3, NULL, NULL);
                buffer = gst_buffer_new();
                gst_buffer_append_memory(buffer, memory);

                if (client.timepoint == 0) {
                    client.timepoint = QDateTime::currentDateTime().toMSecsSinceEpoch();
                } else {
                    client.timestamp = (QDateTime::currentDateTime().toMSecsSinceEpoch() - client.timepoint) * 1000000;
                }

                GST_BUFFER_PTS (buffer) = client.timestamp;
                GST_BUFFER_DTS (buffer) = client.timestamp;
                GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, (int)m_frameRate);

                gst_app_src_push_buffer(GST_APP_SRC(client.appsrc), buffer);
            }
        }
    }

}

/* called when we need to give data to appsrc */
static void rtspDataNeeded(GstElement * appsrc, guint/* unused*/, gstRtsp *instance)
{
    std::vector<gstRtsp::client_t>::iterator i = std::find_if(instance->m_clients.begin(), instance->m_clients.end(),
                                                                   [appsrc] (const gstRtsp::client_t &client) { return client.appsrc == appsrc; } );
    if (i != instance->m_clients.end()) {
        (*i).data_needed = true;
    }
}

/* called when a new media pipeline is constructed. We can query the pipeline and configure our appsrc */
static void rtspMediaConfigure(GstRTSPMediaFactory * , GstRTSPMedia * media, gstRtsp *instance)
{

    GstElement *element, *appsrc;

    /* get the element used for providing the streams of the media */
    element = gst_rtsp_media_get_element (media);

    /* get our appsrc, we named it 'mysrc' with the name property */
    appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "mysrc");

    /* this instructs appsrc that we will be dealing with timed buffer */
    gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
    /* configure the caps of the video */
    g_object_set (G_OBJECT (appsrc), "caps",
                 gst_caps_new_simple ("video/x-raw",
                                     "format", G_TYPE_STRING, "RGB",
                                     "width", G_TYPE_INT, instance->width(),
                                     "height", G_TYPE_INT, instance->height(),
                                     "interlace-mode", G_TYPE_STRING, "progressive",
                                     "framerate", GST_TYPE_FRACTION, (int)(instance->frameRate()), 1,
                                     "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
                                     NULL), NULL);

    instance->m_clients.push_back({appsrc});

    /* install the callback that will be called when a buffer is needed */
    g_signal_connect (appsrc, "need-data", (GCallback) rtspDataNeeded, instance);
    gst_object_unref (appsrc);
    gst_object_unref (element);

}

void gstRtsp::startRtsp(int width, int height, int frameRate )
{
    m_frameRate = frameRate;
    m_width = width;
    m_height = height;

    /* make a media factory for a test stream. The default media factory can use
   * gst-launch syntax to create pipelines.
   * any launch line works as long as it contains elements named pay%d. Each
   * element with pay%d names will be a stream */
    GstRTSPMediaFactory *factory = gst_rtsp_media_factory_new ();


    gst_rtsp_media_factory_set_launch (factory, "( appsrc name=mysrc ! videoconvert ! v4l2jpegenc ! jpegparse ! rtpjpegpay name=pay0 pt=96 )" );
    gst_rtsp_media_factory_set_shared (factory, TRUE);
    gst_rtsp_media_factory_set_eos_shutdown(factory, TRUE);

    /* notify when our media is ready, This is called whenever someone asks for
    * the media and a new pipeline with our appsrc is created */
    g_signal_connect (factory, "media-configure", (GCallback) rtspMediaConfigure, this);

    m_running = true;
    gst_rtsp_mount_points_add_factory (m_mounts, "/stream", factory);
    m_serverId = gst_rtsp_server_attach (m_server, NULL);

}

int gstRtsp::width(){
  return m_width;
}

int gstRtsp::height(){
  return m_height;
}

int gstRtsp::frameRate(){
  return m_frameRate;
}


最新问题
© www.soinside.com 2019 - 2024. All rights reserved.