如何在 C 代码中实现 GStreamer tee
How to implement GStreamer tee in C code
我有以下工作流水线。它已经使用命令行工具 gst-launch-1.0 和函数 gst_parse_launch()
进行了测试,并且在这两种情况下都有效。
videotestsrc ! video/x-raw,width=640,height=480 ! videocrop left=80 right=80 ! tee name=t ! queue ! glupload ! glimagesink t. ! queue ! jpegenc ! avimux ! filesink location=output.avi
我尝试在代码中手动设置它,但我现在遇到以下错误(应用程序打开,但没有显示视频):
Error received from element videotestsrc0 : Internal data flow error.
Debugging information: gstbasesrc.c(2948): gst_base_src_loop ():
/GstPipeline:pipeline0/GstVideoTestSrc:videotestsrc0: streaming task
paused, reason not-negotiated (-4)
我在 Qt 应用程序中使用 GStreamer,glimagesink 将视频链接到 QML 类型。所有与 GStreamer 相关的代码都位于一个名为 GStreamer 的 GStreamer class 中。整个 cpp 文件发布在下面,以防问题出在我猜不到的地方。对于不相关的代码,我深表歉意。
static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data);
GStreamer::GStreamer(QQuickItem *parent) : QQuickItem(parent)
{
qDebug() << "Constructed GSteamer";
}
void GStreamer::createPipeline()
{
qDebug() << "Creating pipeline";
if(m_source.isEmpty()){
qDebug() << "Error: Missing source property for GStreamer component";
return;
}
if(m_videoItem.isEmpty()){
qDebug() << "Error: Missing videoItem property for GStreamer component";
return;
}
m_pipeline = gst_pipeline_new(NULL);
m_sink = NULL;
QByteArray ba = m_source.toLatin1();
m_src = gst_element_factory_make(ba.data(), NULL);
g_assert(m_src);
m_filter = gst_element_factory_make("capsfilter", "filter");
g_assert(m_filter);
g_object_set(G_OBJECT (m_filter), "caps", gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
NULL),
NULL);
m_convert = gst_element_factory_make("videoconvert", NULL);
g_assert(m_convert);
m_crop = gst_element_factory_make("videocrop", "crop");
g_assert(m_crop);
g_object_set(G_OBJECT (m_crop), "left", 80, "right", 80, NULL);
// Tee
m_tee = gst_element_factory_make("tee", "videotee");
g_assert(m_tee);
// Display queue
m_displayQueue = gst_element_factory_make("queue", "displayQueue");
g_assert(m_displayQueue);
m_upload = gst_element_factory_make("glupload", NULL);
g_assert(m_upload);
m_sink = gst_element_factory_make("qmlglsink", NULL);
g_assert(m_sink);
// Record queue
m_recordQueue = gst_element_factory_make("queue", "recordQueue");
g_assert(m_recordQueue);
m_encode = gst_element_factory_make("jpegenc", NULL);
g_assert(m_encode);
m_mux = gst_element_factory_make("avimux", NULL);
g_assert(m_mux);
m_filesink = gst_element_factory_make("filesink", NULL);
g_assert(m_filesink);
g_object_set(G_OBJECT(m_filesink), "location", "output.avi", NULL);
gst_bin_add_many(GST_BIN (m_pipeline), m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL);
gst_bin_add_many(GST_BIN(m_pipeline), m_tee, m_displayQueue, m_recordQueue, m_encode, m_mux, m_filesink, NULL);
// If I only link this simple pipeline, it works fine
/*
if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL)){
qDebug() << "Unable to link source";
}
*/
if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_tee, NULL)){
qDebug() << "Unable to link source";
}
if(!gst_element_link_many(m_displayQueue, m_upload, m_sink, NULL)){
qDebug() << "Unable to link display queue";
}
if(!gst_element_link_many(m_recordQueue, m_encode, m_mux, m_filesink, NULL)){
qDebug() << "Unable to link record queue";
}
GstPad *teeDisplayPad = gst_element_get_request_pad(m_tee, "src_%u");
GstPad *queueDisplayPad = gst_element_get_static_pad(m_displayQueue, "sink");
GstPad *teeRecordPad = gst_element_get_request_pad(m_tee, "src_%u");
GstPad *queueRecordPad = gst_element_get_static_pad(m_recordQueue, "sink");
if(gst_pad_link(teeDisplayPad, queueDisplayPad) != GST_PAD_LINK_OK){
qDebug() << "Unable to link display tee";
}
if(gst_pad_link(teeRecordPad, queueRecordPad) != GST_PAD_LINK_OK){
qDebug() << "Unable to link record tee";
}
//gst_object_unref(teeDisplayPad);
gst_object_unref(queueDisplayPad);
//gst_object_unref(teeRecordPad);
gst_object_unref(queueRecordPad);
QQuickItem *videoItem = window()->findChild<QQuickItem *> (m_videoItem);
g_object_set(m_sink, "widget", videoItem, NULL);
// This will call gst_element_set_state(m_pipeline, GST_STATE_PLAYING) when the window is ready
window()->scheduleRenderJob (new SetPlaying (m_pipeline), QQuickWindow::BeforeSynchronizingStage);
m_bus = gst_element_get_bus(m_pipeline);
gst_bus_add_watch(m_bus, busCallback, m_loop);
gst_object_unref(m_bus);
m_loop = g_main_loop_new(NULL, false);
g_main_loop_run(m_loop);
}
static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data){
qDebug() << "Callback function reached";
switch(GST_MESSAGE_TYPE(message)){
case GST_MESSAGE_ERROR:
GError *error;
gchar *debugInfo;
gst_message_parse_error(message, &error, &debugInfo);
qDebug() << "Error received from element" << GST_OBJECT_NAME(message->src) << ":" << error->message;
qDebug() << "Debugging information:" << (debugInfo ? debugInfo : "none");
//g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (m_message->src), error->message);
//g_printerr ("Debugging information: %s\n", debugInfo ? debugInfo : "none");
g_clear_error (&error);
g_free (debugInfo);
g_main_loop_quit(static_cast<GMainLoop *>(data));
break;
case GST_MESSAGE_EOS:
qDebug() << "End-Of-Stream reached.";
g_main_loop_quit(static_cast<GMainLoop *>(data));
break;
default:
qDebug() << "Unexpected message received.";
break;
}
return true;
}
/**
The rest of the code is probably not relevant. It contains
only destructor and some getters and setters.
**/
GStreamer::~GStreamer()
{
gst_object_unref(m_bus);
gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_object_unref(m_pipeline);
}
QString GStreamer::source() const
{
return m_source;
}
void GStreamer::setSource(const QString &source)
{
if(source != m_source){
m_source = source;
}
}
QString GStreamer::videoItem() const
{
return m_videoItem;
}
void GStreamer::setVideoItem(const QString &videoItem)
{
if(videoItem != m_videoItem){
m_videoItem = videoItem;
}
}
.h文件中定义了所有成员变量。
如果我不将 tee 元素添加到容器中并将其链接到管道中,则视频会按预期显示在屏幕上。所以我想我弄乱了 T 形元件上的衬垫。
我一直在关注 GStreamers 文档中的教程,所以我不明白为什么它不起作用。
希望有人能提供帮助。
好的,所以提供的 gst-launch 行和应用程序代码之间的区别是使用 qmlglsink
元素代替 glimagesink
。
问题是 qmlglsink
只接受 RGBA 格式的视频缓冲区,但是 tee
的另一个分支中的 jpegenc
不接受 RGBA 格式的视频缓冲区。这会导致协商问题,因为 tee
.
的两个分支都不支持通用格式
解决方法是在 jpegenc
之前添加一个 videoconvert
元素或在 qmlglsink
之前添加一个 glcolorconvert
元素,以便 tee
的两个分支都可以协商到相同的视频格式。
旁注:glimagesink
内部包含一个 glupload ! glcolorconvert ! actual-sink
,因此已经在转换视频格式。
我有以下工作流水线。它已经使用命令行工具 gst-launch-1.0 和函数 gst_parse_launch()
进行了测试,并且在这两种情况下都有效。
videotestsrc ! video/x-raw,width=640,height=480 ! videocrop left=80 right=80 ! tee name=t ! queue ! glupload ! glimagesink t. ! queue ! jpegenc ! avimux ! filesink location=output.avi
我尝试在代码中手动设置它,但我现在遇到以下错误(应用程序打开,但没有显示视频):
Error received from element videotestsrc0 : Internal data flow error.
Debugging information: gstbasesrc.c(2948): gst_base_src_loop (): /GstPipeline:pipeline0/GstVideoTestSrc:videotestsrc0: streaming task paused, reason not-negotiated (-4)
我在 Qt 应用程序中使用 GStreamer,glimagesink 将视频链接到 QML 类型。所有与 GStreamer 相关的代码都位于一个名为 GStreamer 的 GStreamer class 中。整个 cpp 文件发布在下面,以防问题出在我猜不到的地方。对于不相关的代码,我深表歉意。
static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data);
GStreamer::GStreamer(QQuickItem *parent) : QQuickItem(parent)
{
qDebug() << "Constructed GSteamer";
}
void GStreamer::createPipeline()
{
qDebug() << "Creating pipeline";
if(m_source.isEmpty()){
qDebug() << "Error: Missing source property for GStreamer component";
return;
}
if(m_videoItem.isEmpty()){
qDebug() << "Error: Missing videoItem property for GStreamer component";
return;
}
m_pipeline = gst_pipeline_new(NULL);
m_sink = NULL;
QByteArray ba = m_source.toLatin1();
m_src = gst_element_factory_make(ba.data(), NULL);
g_assert(m_src);
m_filter = gst_element_factory_make("capsfilter", "filter");
g_assert(m_filter);
g_object_set(G_OBJECT (m_filter), "caps", gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
NULL),
NULL);
m_convert = gst_element_factory_make("videoconvert", NULL);
g_assert(m_convert);
m_crop = gst_element_factory_make("videocrop", "crop");
g_assert(m_crop);
g_object_set(G_OBJECT (m_crop), "left", 80, "right", 80, NULL);
// Tee
m_tee = gst_element_factory_make("tee", "videotee");
g_assert(m_tee);
// Display queue
m_displayQueue = gst_element_factory_make("queue", "displayQueue");
g_assert(m_displayQueue);
m_upload = gst_element_factory_make("glupload", NULL);
g_assert(m_upload);
m_sink = gst_element_factory_make("qmlglsink", NULL);
g_assert(m_sink);
// Record queue
m_recordQueue = gst_element_factory_make("queue", "recordQueue");
g_assert(m_recordQueue);
m_encode = gst_element_factory_make("jpegenc", NULL);
g_assert(m_encode);
m_mux = gst_element_factory_make("avimux", NULL);
g_assert(m_mux);
m_filesink = gst_element_factory_make("filesink", NULL);
g_assert(m_filesink);
g_object_set(G_OBJECT(m_filesink), "location", "output.avi", NULL);
gst_bin_add_many(GST_BIN (m_pipeline), m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL);
gst_bin_add_many(GST_BIN(m_pipeline), m_tee, m_displayQueue, m_recordQueue, m_encode, m_mux, m_filesink, NULL);
// If I only link this simple pipeline, it works fine
/*
if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL)){
qDebug() << "Unable to link source";
}
*/
if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_tee, NULL)){
qDebug() << "Unable to link source";
}
if(!gst_element_link_many(m_displayQueue, m_upload, m_sink, NULL)){
qDebug() << "Unable to link display queue";
}
if(!gst_element_link_many(m_recordQueue, m_encode, m_mux, m_filesink, NULL)){
qDebug() << "Unable to link record queue";
}
GstPad *teeDisplayPad = gst_element_get_request_pad(m_tee, "src_%u");
GstPad *queueDisplayPad = gst_element_get_static_pad(m_displayQueue, "sink");
GstPad *teeRecordPad = gst_element_get_request_pad(m_tee, "src_%u");
GstPad *queueRecordPad = gst_element_get_static_pad(m_recordQueue, "sink");
if(gst_pad_link(teeDisplayPad, queueDisplayPad) != GST_PAD_LINK_OK){
qDebug() << "Unable to link display tee";
}
if(gst_pad_link(teeRecordPad, queueRecordPad) != GST_PAD_LINK_OK){
qDebug() << "Unable to link record tee";
}
//gst_object_unref(teeDisplayPad);
gst_object_unref(queueDisplayPad);
//gst_object_unref(teeRecordPad);
gst_object_unref(queueRecordPad);
QQuickItem *videoItem = window()->findChild<QQuickItem *> (m_videoItem);
g_object_set(m_sink, "widget", videoItem, NULL);
// This will call gst_element_set_state(m_pipeline, GST_STATE_PLAYING) when the window is ready
window()->scheduleRenderJob (new SetPlaying (m_pipeline), QQuickWindow::BeforeSynchronizingStage);
m_bus = gst_element_get_bus(m_pipeline);
gst_bus_add_watch(m_bus, busCallback, m_loop);
gst_object_unref(m_bus);
m_loop = g_main_loop_new(NULL, false);
g_main_loop_run(m_loop);
}
static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data){
qDebug() << "Callback function reached";
switch(GST_MESSAGE_TYPE(message)){
case GST_MESSAGE_ERROR:
GError *error;
gchar *debugInfo;
gst_message_parse_error(message, &error, &debugInfo);
qDebug() << "Error received from element" << GST_OBJECT_NAME(message->src) << ":" << error->message;
qDebug() << "Debugging information:" << (debugInfo ? debugInfo : "none");
//g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (m_message->src), error->message);
//g_printerr ("Debugging information: %s\n", debugInfo ? debugInfo : "none");
g_clear_error (&error);
g_free (debugInfo);
g_main_loop_quit(static_cast<GMainLoop *>(data));
break;
case GST_MESSAGE_EOS:
qDebug() << "End-Of-Stream reached.";
g_main_loop_quit(static_cast<GMainLoop *>(data));
break;
default:
qDebug() << "Unexpected message received.";
break;
}
return true;
}
/**
The rest of the code is probably not relevant. It contains
only destructor and some getters and setters.
**/
GStreamer::~GStreamer()
{
gst_object_unref(m_bus);
gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_object_unref(m_pipeline);
}
QString GStreamer::source() const
{
return m_source;
}
void GStreamer::setSource(const QString &source)
{
if(source != m_source){
m_source = source;
}
}
QString GStreamer::videoItem() const
{
return m_videoItem;
}
void GStreamer::setVideoItem(const QString &videoItem)
{
if(videoItem != m_videoItem){
m_videoItem = videoItem;
}
}
.h文件中定义了所有成员变量。
如果我不将 tee 元素添加到容器中并将其链接到管道中,则视频会按预期显示在屏幕上。所以我想我弄乱了 T 形元件上的衬垫。
我一直在关注 GStreamers 文档中的教程,所以我不明白为什么它不起作用。
希望有人能提供帮助。
好的,所以提供的 gst-launch 行和应用程序代码之间的区别是使用 qmlglsink
元素代替 glimagesink
。
问题是 qmlglsink
只接受 RGBA 格式的视频缓冲区,但是 tee
的另一个分支中的 jpegenc
不接受 RGBA 格式的视频缓冲区。这会导致协商问题,因为 tee
.
解决方法是在 jpegenc
之前添加一个 videoconvert
元素或在 qmlglsink
之前添加一个 glcolorconvert
元素,以便 tee
的两个分支都可以协商到相同的视频格式。
旁注:glimagesink
内部包含一个 glupload ! glcolorconvert ! actual-sink
,因此已经在转换视频格式。