Gstreamer-我想在本地目录中存在的同一 src 文件中的音频和视频之间切换

问题描述 投票:0回答:1

我一直在尝试在视频和音频之间切换。要求:前3秒只播放视频(不播放音频),然后播放音频,直到eos结束音频,最后从第3秒开始恢复视频。为此,我尝试使用阀门元件关闭音频,然后关闭视频,但不知何故视频和音频同时播放。

#include <gst/gst.h>

GstElement *pipeline, *source, *demux, *audio_queue, *video_queue, *audio_sink, *video_sink, *audio_valve, *video_valve;
GstPad *demux_audio_pad, *demux_video_pad, *demux_pad_template;

static gboolean audio_valve_open = TRUE;
static gboolean video_valve_open = FALSE;

static void pad_added_callback(GstElement *src, GstPad *new_pad, gpointer data) {
    GstPad *sink_pad;
    GstElement *queue = (GstElement *)data;
    GstCaps *new_pad_caps = NULL;
    GstStructure *new_pad_struct = NULL;
    const gchar *new_pad_type;

    g_print("Received new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(src));

    // Check the pad's type
    new_pad_caps = gst_pad_get_current_caps(new_pad);
    new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
    new_pad_type = gst_structure_get_name(new_pad_struct);

    // If it's audio, link it to the audio_queue
    if (g_str_has_prefix(new_pad_type, "audio/x-raw")) {
        g_print("Audio pad added\n");
        sink_pad = gst_element_get_static_pad(audio_queue, "sink");
        gst_pad_link(new_pad, sink_pad);
    } else if (g_str_has_prefix(new_pad_type, "video/x-raw")) {
        // If it's video, link it to the video_queue
        g_print("Video pad added\n");
        sink_pad = gst_element_get_static_pad(video_queue, "sink");
        GstPadLinkReturn ret = gst_pad_link(new_pad, sink_pad);
        if (ret != GST_PAD_LINK_OK) {
            g_error("Failed to link demux video pad to video queue pad. Error: %s",                                 
            gst_pad_link_get_name(ret));
        }
    }

    if (new_pad_caps != NULL)
        gst_caps_unref(new_pad_caps);
}

static gboolean switch_to_video(gpointer data) {
    g_print("Switching to Video...\n");
    // Close audio valve
    gst_element_set_state(audio_valve, GST_STATE_NULL);
    audio_valve_open = FALSE;
    // Open video valve
    gst_element_set_state(video_valve, GST_STATE_PLAYING);
    video_valve_open = TRUE;
    return FALSE; // Don't call this function again
}

int main(int argc, char *argv[]) {
    GstBus *bus;
    GstMessage *msg;

    // Initialize GStreamer
    gst_init(&argc, &argv);

    // Create the pipeline
    pipeline = gst_pipeline_new("pipeline");

    // Create elements
    source = gst_element_factory_make("filesrc", "source");
    demux = gst_element_factory_make("decodebin", "demux");
    audio_queue = gst_element_factory_make("queue", "audio_queue");
    video_queue = gst_element_factory_make("queue", "video_queue");
    audio_sink = gst_element_factory_make("alsasink", "audio_sink");
    video_sink = gst_element_factory_make("autovideosink", "video_sink");
    audio_valve = gst_element_factory_make("valve", "audio_valve");
    video_valve = gst_element_factory_make("valve", "video_valve");

    // Check elements creation
    if (!pipeline || !source || !demux || !audio_queue || !video_queue || !audio_sink || !video_sink || !audio_valve || !video_valve) {
        g_error("Failed to create elements. Exiting.");
        return -1;
    }

    // Set the location of the file to play
    g_object_set(source, "location", "/home/lg/ml/new.webm", NULL);

    // Add elements to the pipeline
    gst_bin_add_many(GST_BIN(pipeline), source, demux, audio_queue, video_queue, audio_sink, video_sink, audio_valve, video_valve, NULL);

    // Link elements
    gst_element_link(source, demux);
    gst_element_link_many(audio_queue, audio_valve, audio_sink, NULL);
    gst_element_link_many(video_queue, video_valve, video_sink, NULL);

    // Connect pad-added signal
    g_signal_connect(demux, "pad-added", G_CALLBACK(pad_added_callback), NULL);

    // Start playing
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    // Wait for 5 seconds to switch to video
    g_timeout_add_seconds(5, switch_to_video, NULL);

    // Listen for messages on the pipeline's bus
    bus = gst_element_get_bus(pipeline);
    msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

    // Free resources
    gst_message_unref(msg);
    gst_object_unref(bus);
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);

    return 0;
}


这是用于解复用音频视频并在本地目录位置流式传输文件的代码:/home/lg/ml/new.webm 运行代码后,我得到了文件,视频+音频播放得很好,没有满足任何阀门条件。

gstreamer gstreamer-1.0
1个回答
0
投票

g_timeout_add_seconds 设计用于与 GMainLoop 一起使用。在您的情况下,您没有运行 GMainLoop,并且您的 switch_to_video 函数永远不会被调用。

在您的示例中,添加

g_main_loop_run(g_main_loop_new(NULL, FALSE));

在 g_timeout_add_seconds 调用完成工作后。

现在,在 switch_to_video() 函数中,您可能应该设置阀门的“drop”属性:

g_object_set(audio_valve, "drop", 1, NULL); //Start dropping frames

按照您的代码,这将在 5 秒后“关闭音频”。 您还可以使用 g_object_get 获取 drop 属性的当前值(以轻松打开和关闭)。

最后,您的代码将与您所描述的并不真正匹配。我将让您使用 drop 属性来实现您想要的效果。

© www.soinside.com 2019 - 2024. All rights reserved.