我正在尝试为 MP4 文件应用程序构建 Gstreamer 管道。但是,我无法完成完整的管道。我在网上做了一些研究,但有些答案很旧,比如这个
这是我尝试过的。
filesrc = gst_element_factory_make("filesrc", "input-file-source");
demux = gst_element_factory_make("qtdemux", "demux-input-video");
h264_decode = gst_element_factory_make("avdec_h264", "h-264-decode-video");
sink = gst_element_factory_make("xvimagesink", "video-sink");
bin
将它们添加到
gst_bin_add_many
filesrc
链接
demux
和
gst_element_link_many
pad-added
信号 g_signal_connect(demux, "pad-added", G_CALLBACK(pad_added_handler), demux);
pipeline
设置为运行状态。但是,我没有得到输出。
另外,我不想使用
playbin
或 decodebin
,因为它不允许自定义,而且我不会学习如何为不同的文件格式构建管道。
有人可以帮忙吗
根据@SeB评论,我创建了一个管道并且正在运行。
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
/*
gst-launch-1.0 -v filesrc location=g51.mp4 ! qtdemux ! h264parse ! avdec_h264 ! queue ! videoconvert ! xvimagesink
*/
/* Handler for the pad-added signal */
static void pad_added_handler(GstElement *src, GstPad *pad, GstElement *data);
int main(int argc, char *argv[])
{
GstElement *pipeline, *filesrc , *demux, *h264_parse, *h264_decode, *raw_queue, *raw_v_convert, *sink;
GstStateChangeReturn ret;
GstBus *bus;
GstMessage *msg;
gboolean terminate = FALSE;
/* Initialize Gstreamer */
gst_init (&argc, &argv);
g_print ("Gstreamer initialization complete\n");
filesrc = gst_element_factory_make("filesrc", "input-file-source");
demux = gst_element_factory_make("qtdemux", "demux-input-video");
h264_parse = gst_element_factory_make("h264parse", "h264_parse");
h264_decode = gst_element_factory_make("avdec_h264", "h-264-decode-video");
raw_queue = gst_element_factory_make("queue", "raw-queue");
raw_v_convert = gst_element_factory_make("videoconvert", "video-convert");
sink = gst_element_factory_make("xvimagesink", "video-sink");
if (!filesrc || !demux || !h264_parse || !h264_decode || \
!raw_queue || !raw_v_convert || !sink)
{
g_printerr ("Not all elements got created !\n");
}
g_object_set(G_OBJECT(filesrc), "location", argv[1], NULL);
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline)
{
g_printerr ("Pipeline bin didn't create successfully!\n");
}
/* Adding elements */
gst_bin_add_many (GST_BIN (pipeline), filesrc, demux, h264_parse, h264_decode, \
raw_queue, raw_v_convert, sink, NULL);
if (gst_element_link_many(filesrc, demux, NULL) != TRUE)
{
g_printerr ("Pipeline linking-1 failed!!\n");
}
if (gst_element_link_many(h264_parse, h264_decode, raw_queue, raw_v_convert, sink, NULL) != TRUE)
{
g_printerr ("Pipeline linking-2 failed!!\n");
}
/* Connect to the pad-added signal */
g_signal_connect(demux, "pad-added", G_CALLBACK(pad_added_handler), h264_parse);
ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(pipeline);
return -1;
}
bus = gst_element_get_bus(pipeline);
do
{
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL)
{
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n",
GST_OBJECT_NAME(msg->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info
: "none");
g_clear_error(&err);
g_free(debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the
pipeline */
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(pipeline))
{
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state,
&pending_state);
g_print("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name(old_state),
gst_element_state_get_name(new_state));
}
break;
default:
/* We should not reach here */
g_printerr("Unexpected message received.\n");
break;
}
gst_message_unref(msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler(GstElement *src, GstPad *new_pad, GstElement *h264_parse)
{
GstPad *sink_pad = NULL;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
GstPadLinkReturn ret;
g_print("Received new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad), \
GST_ELEMENT_NAME(src));
new_pad_caps = gst_pad_get_current_caps(new_pad);
new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
new_pad_type = gst_structure_get_name(new_pad_struct);
if (g_str_has_prefix(new_pad_type, "audio/mpeg"))
{
g_print("init audio GstPad\n");
sink_pad = gst_element_get_static_pad(h264_parse, "sink");
}
else if (g_str_has_prefix(new_pad_type, "video/x-h264"))
{
g_print("init video GstPad\n");
sink_pad = gst_element_get_static_pad(h264_parse, "sink");
}
else
{
g_print("It has type '%s' which is not raw audio or video. Ignoring.\n",
new_pad_type);
goto exit;
}
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked(sink_pad))
{
g_print("We are already linked. Ignoring.\n");
goto exit;
}
/* Attempt the link */
ret = gst_pad_link(new_pad, sink_pad);
if (GST_PAD_LINK_FAILED(ret))
{
g_print("Type is '%s' but link failed for reason '%i'.\n",
new_pad_type, ret);
}
else
{
g_print("Link succeeded (type '%s').\n", new_pad_type);
}
exit:
g_print ("Reached exit \n\n");
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref(new_pad_caps);
/* Unreference the sink pad */
gst_object_unref(sink_pad);
}