上一章介绍使用QT播放GStreamer视频流 【QGroundControl二次开发】八. QT实现播放gstreamer视频。
这章介绍如何在原有基础上保存为视频,同时保存为一个个规定大小的小视频。
一. 思想
之前的文章展示了如何在QT中播放GST视频流,这章在原有的基础上增加了一部分代码。原理就是使用Gstreamer的tee为管道开一条支流。大致图示如下(增加一条保存分路):
二. 增加代码
2.1 创建文件分路所需元素
record_queue = gst_element_factory_make("queue", "record_queue");
video_rate = gst_element_factory_make("videorate", "video_rate");
jpeg_enc = gst_element_factory_make("jpegenc", "jpeg_enc");
avi_mux = gst_element_factory_make("avimux", "avi_mux");
file_sink = gst_element_factory_make ("filesink", "file_sink");
2.2 把这些保存视频要使用的元素放进管道。
gst_bin_add_many(GST_BIN (pipeline), record_queue,video_rate,jpeg_enc,avi_mux,file_sink, NULL);
gst_element_sync_state_with_parent(record_queue);
// link element
if(gst_element_link_many(record_queue,video_rate,jpeg_enc,avi_mux,file_sink, NULL) != TRUE){
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
g_object_set (G_OBJECT(file_sink),"location",videofilename,NULL);
2.3 把支路和主路连接起来。
//Manually link the Tee, which has "Request" pads
tee_record_pad = gst_element_get_request_pad (tee, "src_%u");
g_print ("Obtained request pad %s for record branch.\n", gst_pad_get_name (tee_record_pad));
queue_record_pad = gst_element_get_static_pad (record_queue, "sink");
if (!queue_record_pad) g_printerr ("queue_record_pad not be get.\n");
// 链接tee的MP4分支到mp4mux
if (gst_pad_link(tee_record_pad, queue_record_pad) != GST_PAD_LINK_OK) {
g_printerr("Tee MP4 branch could not be linked.\n");
gst_object_unref(pipeline);
return -1;
}
//设置队列参数
g_object_set (G_OBJECT(record_queue),"max-size-buffers",0,NULL);
g_object_set (G_OBJECT(record_queue),"max-size-time",0,NULL);
g_object_set (G_OBJECT(record_queue),"max-size-bytes",512000000,NULL);
//释放pad
gst_object_unref (queue_record_pad);
最后保存的为MP4文件,尝试x264enc,mp4mux 无发连接管道,但是使用jpeg_enc,avi_mux可以保存和播放MP4视频。。。
三. 改进
由于一直保存视频会导致文件特别大,需要将保存的视频按需自动分成一个一个的规定大小或时间的视频,就要用到splitmuxsink 元素。
splitmuxsink 是 GStreamer 中一个非常有用的元素,它可以在达到最大文件大小或时间阈值时,自动分割视频文件,且分割操作在视频关键帧边界进行,保证了视频片段的完整性。通俗讲splitmuxsink = muxer + sink。
默认情况下,splitmuxsink 使用 mp4mux 作为muxer和 filesink 作为sink,但也可以通过设置相应的属性来使用其他的muxer和sink。
//文件分路所需元素
record_queue = gst_element_factory_make("queue", "record_queue");
video_rate = gst_element_factory_make("videorate", "video_rate");
jpeg_enc = gst_element_factory_make("jpegenc", "jpeg_enc");
avi_mux = gst_element_factory_make("avimux", "avi_mux");
splitmuxsink= gst_element_factory_make("splitmuxsink", "splitmuxsink");
file_sink = gst_element_factory_make ("filesink", "file_sink");
g_object_set(G_OBJECT(splitmuxsink),
"muxer", avi_mux,
"sink", file_sink,
"max-size-time", (guint64)90*GST_SECOND,
"max-files", 20,
"location",filename , NULL);
这段代码设置splitmuxsink参数,每段视频最长90秒,每次最多保存20个视频文件超出覆盖最旧的文件。max-size-time 和 max-size-bytes 属性分别用来设置文件的最大时间和大小限制。max-files 属性允许你指定要保留在磁盘上的最大文件数。当达到此最大值时,最旧的文件将开始被删除以为新文件腾出空间。
四. 效果
五. 完整代码:
#include <QApplication>
#include <QWidget>
#include <QtConcurrent/QtConcurrent>
#include <gst/gst.h>
#include <glib.h>
#include <gst/video/videooverlay.h>
#include "ui_mainwindow.h"
int main(int argc, char *argv[]) {
// 获取当前时间
QString filename = "video%02d.mp4";
//初始化程序
QApplication a(argc, argv);
GstElement *pipeline, *udpsrc, *capsfilter, *disp_queue, *rtph264depay, *h264parse, *avdec_h264, *videoconvert, *vsink;
GstCaps *caps;
// 创建新元素
GstElement *tee, *record_queue,*video_rate, *avi_mux, *jpeg_enc, *file_sink,*splitmuxsink;
GstPad *tee_record_pad, *queue_record_pad;
GstStateChangeReturn ret;
QWidget *window = new QWidget();
window->resize(1920, 1080);
window->show();
WId xwinid = window->winId();
// 初始化 GStreamer
gst_init(NULL, NULL);
// 创建元素
pipeline = gst_pipeline_new("my-pipeline");
udpsrc = gst_element_factory_make("udpsrc", "udpsrc");
capsfilter = gst_element_factory_make("capsfilter", "capsfilter");
disp_queue = gst_element_factory_make("queue", "disp_queue");
tee = gst_element_factory_make ("tee", "tee");
h264parse = gst_element_factory_make("h264parse", "h264parse");
avdec_h264 = gst_element_factory_make("avdec_h264", "avdec_h264");
rtph264depay = gst_element_factory_make("rtph264depay", "rtph264depay");
videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
vsink = gst_element_factory_make("xvimagesink", "vsink");//glimagesink
//文件分路所需元素
record_queue = gst_element_factory_make("queue", "record_queue");
video_rate = gst_element_factory_make("videorate", "video_rate");
jpeg_enc = gst_element_factory_make("jpegenc", "jpeg_enc");
avi_mux = gst_element_factory_make("avimux", "avi_mux");
splitmuxsink= gst_element_factory_make("splitmuxsink", "splitmuxsink");
file_sink = gst_element_factory_make ("filesink", "file_sink");
//设置splitmuxsink参数,每段视频最长90秒,每次最多保存20个视频文件超出覆盖最旧的文件
g_object_set(G_OBJECT(splitmuxsink),
"muxer", avi_mux,
"sink", file_sink,
"max-size-time", (guint64)90*GST_SECOND,
"max-files", 20,
"location",filename , NULL);
// 设置 udpsrc 元素的参数
g_object_set(udpsrc, "port", 25600, NULL);
// 创建 caps
caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "video",
"clock-rate", G_TYPE_INT, 90000,
"encoding-name", G_TYPE_STRING, "H264",
/*"depth", G_TYPE_STRING, "8",
* "width", G_TYPE_STRING, "1920",
* "height", G_TYPE_STRING, "1080",*/
NULL);
g_object_set(capsfilter, "caps", caps, NULL);
gst_caps_unref(caps);
//检查元素
if (!pipeline || !udpsrc || !capsfilter || !disp_queue || !rtph264depay || !h264parse || !avdec_h264 || !videoconvert || !file_sink || !vsink)
{
g_printerr("Failed to create elements. Exiting.\n0000000");
return -1;
}
// 将元素添加到管道中
gst_bin_add_many(GST_BIN(pipeline), udpsrc, capsfilter, disp_queue, rtph264depay, h264parse, avdec_h264, videoconvert, vsink,tee,/*queue2, x264enc,mp4mux,file_sink,*/ NULL);
if (gst_element_link_many (udpsrc,capsfilter,rtph264depay,h264parse, avdec_h264,videoconvert,tee,disp_queue, vsink, NULL) != TRUE /*||
gst_element_link_many (queue2, x264enc,mp4mux,file_sink, NULL) != TRUE */)
{
g_printerr ("main Elements could not be linked.\n1111111111111111");
gst_object_unref (pipeline);
return -1;
}
//录像支路
gst_bin_add_many(GST_BIN (pipeline), record_queue,video_rate,jpeg_enc,splitmuxsink, NULL);
gst_element_sync_state_with_parent(record_queue);
// link element
if(gst_element_link_many(record_queue,video_rate,jpeg_enc,splitmuxsink, NULL) != TRUE){
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
// g_object_set (G_OBJECT(file_sink),"location",videofilename,NULL);
//Manually link the Tee, which has "Request" pads
tee_record_pad = gst_element_get_request_pad (tee, "src_%u");
g_print ("Obtained request pad %s for record branch.\n", gst_pad_get_name (tee_record_pad));
queue_record_pad = gst_element_get_static_pad (record_queue, "sink");
if (!queue_record_pad) g_printerr ("queue_record_pad not be get.\n");
// 链接tee的MP4分支到mp4mux
if (gst_pad_link(tee_record_pad, queue_record_pad) != GST_PAD_LINK_OK) {
g_printerr("Tee MP4 branch could not be linked.\n");
gst_object_unref(pipeline);
return -1;
}
//设置队列参数
g_object_set (G_OBJECT(record_queue),"max-size-buffers",0,NULL);
g_object_set (G_OBJECT(record_queue),"max-size-time",0,NULL);
g_object_set (G_OBJECT(record_queue),"max-size-bytes",512000000,NULL);
//释放pad
gst_object_unref (queue_record_pad);
// 链接QT界面
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), xwinid);
// 设置管道状态为播放
ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Failed to set pipeline state to PLAYING. Exiting.\n");
gst_object_unref(pipeline);
return -1;
}
auto res = a.exec();
// 释放资源
g_free(videofilename);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
return res;
}