无法使用 gstreamer c 中的输入选择器切换到其他摄像头

问题描述 投票:0回答:1

在代码中,正如你所看到的,我有两个输入管道 gst_element_link_many(v4l2src1,videoconvert1,capsfilter1,queue1,input_selector,NULL); gst_element_link_many(v4l2src2,videoconvert2,capsfilter2,queue2,input_selector,NULL);

所以最初第一个摄像机或 v4l2src1 正在运行,在摄像机运行后,如果在控制台中按 1 swicth_camera 函数被调用,但它不会切换到其他摄像机。 所以我想知道我们如何切换相机,我是 gstreaemer 的初学者,所以如果可能的话请给我 swicth_camera 的完整代码以在运行时切换相机。

我正在使用 gstreamer 1.16.3


#include <gst/gst.h>
#include <unistd.h>

static GMainLoop *loop;

GstElement *pipeline, *v4l2src1, *v4l2src2, *input_selector, *videoconvert1, *videoconvert2,   
    *x264enc, *h264parse, *flvmux, *rtmpsink,
    *alsasrc, *audioconvert, *avenc_aac, *aacparse, *capsfilter1, *capsfilter2, *queue1, *queue2;

void switch_camera(char *key) {
    gst_element_set_state(pipeline, GST_STATE_PAUSED);

    GstPad *new_active_pad = gst_element_get_static_pad(queue2, "src");
    if (new_active_pad) {
        g_print("selected pad");
    }

    GstPad *old_active_pad = NULL;
    g_object_get(G_OBJECT(input_selector), "active-pad", &old_active_pad, NULL);

    if (old_active_pad != NULL) {
        gst_object_unref(old_active_pad);
        g_print("\nunref the old active pad");
    } else {
        g_print("\nNot able to unref");
    }

    g_print("\nbefore\n");
    g_object_set(G_OBJECT(input_selector), "active-pad", new_active_pad, NULL);
    g_print("\nafter\n");

    gst_element_set_state(pipeline, GST_STATE_PLAYING);
}

gboolean check_keyboard_input(gpointer user_data) {
    char key;
    if (read(STDIN_FILENO, &key, 1) == 1) {
        if (key == '1') {
            switch_camera(&key);
        } else if (key == '2') {
            switch_camera(&key);
        }
    }
    return TRUE;
}

int main(int argc, char *argv[]) {
    gst_init(&argc, &argv);

    loop = g_main_loop_new(NULL, FALSE);

    pipeline = gst_pipeline_new("multi-camera-stream");
    v4l2src1 = gst_element_factory_make("v4l2src", "source1");

    v4l2src2 = gst_element_factory_make("v4l2src", "source2");
    input_selector = gst_element_factory_make("input-selector", "selector");
    videoconvert1 = gst_element_factory_make("videoconvert", "videoconvert1");
    videoconvert2 = gst_element_factory_make("videoconvert", "videoconvert2");
    x264enc = gst_element_factory_make("x264enc", "x264enc");
    h264parse = gst_element_factory_make("h264parse", "h264parse");
    flvmux = gst_element_factory_make("flvmux", "flvmux");
    rtmpsink = gst_element_factory_make("rtmpsink", "rtmpsink");
    alsasrc = gst_element_factory_make("alsasrc", "alsasrc");
    audioconvert = gst_element_factory_make("audioconvert", "audioconvert");
    avenc_aac = gst_element_factory_make("avenc_aac", "avenc_aac");
    aacparse = gst_element_factory_make("aacparse", "aacparse");
    capsfilter1 = gst_element_factory_make("capsfilter", "capsfilter1");
    capsfilter2 = gst_element_factory_make("capsfilter", "capsfilter2");
    GstElement *videoscale = gst_element_factory_make("videoscale", "videoscale");
    // Create queue elements for buffer management
    queue1 = gst_element_factory_make("queue", "q1");
    queue2 = gst_element_factory_make("queue", "q2");

    // Check if elements were created successfully
    if (!pipeline || !v4l2src1 || !v4l2src2 || !input_selector || !videoconvert1 || !videoconvert2 
    || !x264enc || !h264parse || !flvmux ||
        !rtmpsink || !alsasrc || !audioconvert || !avenc_aac || !aacparse || !capsfilter1 || 
    !capsfilter2 || !queue1 || !queue2) {
        g_printerr("Not all elements could be created.\n");
        return -1;
    }

    // Set the input source for video and audio
    g_object_set(G_OBJECT(v4l2src1), "device", "/dev/video2", NULL);
    g_object_set(G_OBJECT(v4l2src2), "device", "/dev/video4", NULL);
    g_object_set(G_OBJECT(alsasrc), "device", "hw:0", NULL);

    // Set the location for RTMP sink
    //g_object_set(G_OBJECT(rtmpsink), "location", "rtmp://a.rtmp.youtube.com/live2/h22t-x1zz- 
    1x6f-8mqp-cy2k live=1", NULL);
    g_object_set(G_OBJECT(rtmpsink), "location", 
    "rtmp://live.twitch.tv/app/live_995412749_duF1ntZdWZnLf4li3NcOSrD3MK0qJM live=1", NULL);
    g_object_set(G_OBJECT(x264enc), "bitrate", 4000, NULL);
    g_object_set(G_OBJECT(x264enc), "key-int-max", 30, NULL);
    g_object_set(G_OBJECT(x264enc), "speed-preset", 1, NULL);
    g_object_set(G_OBJECT(x264enc), "tune", 4, NULL);

    // Set audio-related properties
    g_object_set(G_OBJECT(avenc_aac), "bitrate", 128000, NULL);

    // Set video caps for the capsfilter
    GstCaps *caps1 = gst_caps_new_simple("video/x-raw",
                                        "format", G_TYPE_STRING, "I420",
                                        "framerate", GST_TYPE_FRACTION, 30, 1,
                                        "width", G_TYPE_INT, 640,
                                        "height", G_TYPE_INT, 480,
                                        NULL);

    // Set video caps for the capsfilter
    GstCaps *caps2 = gst_caps_new_simple("video/x-raw",
                                        "format", G_TYPE_STRING, "I420",
                                        "framerate", GST_TYPE_FRACTION, 30, 1,
                                        "width", G_TYPE_INT, 640,
                                        "height", G_TYPE_INT, 480,
                                        NULL);

    g_object_set(G_OBJECT(capsfilter1), "caps", caps1, NULL);
    g_object_set(G_OBJECT(capsfilter2), "caps", caps2, NULL);

    g_print("Running\n");

    // Create a GIO channel to watch for keyboard input
    GIOChannel *io_channel = g_io_channel_unix_new(STDIN_FILENO);
    g_io_add_watch(io_channel, G_IO_IN, (GIOFunc)check_keyboard_input, NULL);

    //gst_element_set_state(v4l2src1, GST_STATE_READY);

    gst_bin_add_many(GST_BIN(pipeline), v4l2src1, v4l2src2, input_selector, videoscale, 
    videoconvert1, videoconvert2, x264enc, h264parse, flvmux,
                     rtmpsink, alsasrc, audioconvert, avenc_aac, aacparse, capsfilter1, 
    capsfilter2, queue1, queue2, NULL);

    // Link v4l2src1 to input_selector
    gst_element_link_many(v4l2src1, videoconvert1, capsfilter1, queue1, input_selector, NULL);
    // Link v4l2src2 to input_selector
    gst_element_link_many(v4l2src2, videoconvert2, capsfilter2, queue2, input_selector, NULL);
    // Link input_selector to x264enc
    gst_element_link_many(input_selector, x264enc, h264parse, flvmux, rtmpsink, NULL);

    gst_element_link_many(alsasrc, audioconvert, avenc_aac, aacparse, flvmux, NULL);

    guint numPads;

    g_object_get(G_OBJECT(input_selector), "n-pads", &numPads, NULL);

    g_print("Number of sink pads of input selector: %u\n", numPads);

    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    g_main_loop_run(loop);

    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(GST_OBJECT(pipeline));
    g_main_loop_unref(loop);
    g_io_channel_unref(io_channel);

    return 0;
}
gstreamer live-streaming gstreamer-1.0 multimedia
1个回答
0
投票

您必须使用选择器的水槽垫。 您可以尝试这个简化版本的代码:

#include <gst/gst.h>
#include <unistd.h>

static GMainLoop *loop;
static GstElement *pipeline, *input_selector;

void switch_camera(char *key) {
    GstPad *new_active_pad;
    switch(*key) {
    case '1':
    {
        new_active_pad = gst_element_get_static_pad(input_selector, "sink_0");
        break;
    }   
    case '2':
    {
        new_active_pad = gst_element_get_static_pad(input_selector, "sink_1");
        break;
    }
    default:
        return;
    }   
    
    gst_element_set_state(pipeline, GST_STATE_PAUSED);
    g_object_set(G_OBJECT(input_selector), "active-pad", new_active_pad, NULL);
    gst_object_unref(new_active_pad);
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
}

gboolean check_keyboard_input(gpointer user_data) {
    char key;
    if (read(STDIN_FILENO, &key, 1) == 1) {
        if (key == '1') {
            switch_camera(&key);
        } else if (key == '2') {
            switch_camera(&key);
        }
    }
    return TRUE;
}

int main(int argc, char *argv[]) {
    gst_init(&argc, &argv);

    loop = g_main_loop_new(NULL, FALSE);
    
    const char* pipelineStr = "input-selector name=selector ! videoconvert ! autovideosink    videotestsrc ! selector.sink_0     videotestsrc pattern=ball ! selector.sink_1";  
    pipeline = gst_parse_launch(pipelineStr, NULL);
    input_selector = gst_bin_get_by_name(GST_BIN(pipeline), "selector");

    // Create a GIO channel to watch for keyboard input
    GIOChannel *io_channel = g_io_channel_unix_new(STDIN_FILENO);
    g_io_add_watch(io_channel, G_IO_IN, (GIOFunc)check_keyboard_input, NULL);

    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    g_main_loop_run(loop);

    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(GST_OBJECT(pipeline));
    g_main_loop_unref(loop);
    g_io_channel_unref(io_channel);

    return 0;
}

构建并运行时,在启动应用程序的终端中,只需键入 1 或 2,然后按 Enter,它应该切换到请求的输入。

© www.soinside.com 2019 - 2024. All rights reserved.