本文讲述如何利用FFmpeg SDK与OpenCV 从RTSP流中获取图像(OpenCV MAT 对象格式)。
一,构造RTSP视频流
因为是在本机实验,所以我自己构造了一个RTSP流。如果你有现成的RTSP流也可以的。
实验用的源视频是黑神话·悟空的《云宫讯音》。
自己产生一个RTSP视频流得需要一个RTSP流媒体服务器。目前可以用的RTSP流媒体服务器有两个,分别是ZLMediaKit和MediaMTX两个。ZLMediaKit需要编译,我编译好了。MediaMTX下载即用。这里我选了MediaMTX,下载下来就可以用,比较轻量化。
1,启动流媒体服务器
在MediaMTX的安装目录下启动即可。
2,启动FFmpeg推流
ffmpeg -re -stream_loop -1 -i “wukong.mp4” -vcodec copy -acodec copy -f rtsp -rtsp_transport tcp rtsp://192.168.76.189:8554/live/test
这是流的相关信息:
Input #0, rtsp, from ‘rtsp://192.168.76.189:8554/live/test’:f=0/0
Metadata:
title : No Name
Duration: N/A, start: 0.000000, bitrate: N/A
Stream #0:0: Video: h264 (Main), yuv420p(tv, bt709, progressive), 1920x1080 [SAR 1:1 DAR 16:9], 30 fps, 30 tbr, 90k tbn
Stream #0:1: Audio: aac (LC), 44100 Hz, stereo, fltp
3,用FFplay播放一下
二,源代码
#include <stdio.h>
#include <thread>
#include <string>
//#include <io.h>
//#include <vector>
//#define __STDC_CONSTANT_MACROS
#include <iostream>
using namespace std;
extern "C"
{
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include "libavutil/time.h"
//#include "libavutil/log.h"
//#include "libavutil/mathematics.h"
#include "libswscale/swscale.h"
//#include "libavutil/imgutils.h"
}
#include <opencv2/opencv.hpp>
using namespace cv;
int main(int argc, char* argv[])
{
Mat pCvMat;
//const string sourceWindow = "test";
//namedWindow(sourceWindow,1);
//cout<<avcodec_version()<<endl;
//cout<<FFMPEG_VERSION<<endl;
unsigned codecVer = avcodec_version();
int ver_major, ver_minor, ver_micro;
ver_major = (codecVer >> 16) & 0xff;
ver_minor = (codecVer >> 8) & 0xff;
ver_micro = (codecVer) & 0xff;
printf("Current ffmpeg version is: ,avcodec version is: %d=%d.%d.%d\n",
codecVer, ver_major, ver_minor, ver_micro);
int startTime = 0; // 记录播放开始
int currentFrame = 0; // 当前帧序号
double fps = 0; // 帧率
double interval = 0; // 帧间隔
const AVInputFormat* p_ifmt_v = NULL;
// ffmpeg相关变量预先定义与分配
AVFormatContext* pAVFormatContext = 0; // ffmpeg的全局上下文,所有ffmpeg操作都需要
AVStream* pAVStream = 0; // ffmpeg流信息
AVCodecContext* pAVCodecContext = avcodec_alloc_context3(NULL); // ffmpeg编码上下文
const AVCodec* pAVCodec = 0; // ffmpeg编码器
AVPacket* pAVPacket = 0; // ffmpag单帧数据包
AVFrame* pAVFrame = 0; // ffmpeg单帧缓存
//AVFrame *pAVFrameBGR24 = 0; // ffmpeg单帧缓存转换颜色空间后的缓存
struct SwsContext* pSwsContext = 0; // ffmpeg编码数据格式转换
AVDictionary* pAVDictionary = 0; // ffmpeg数据字典,用于配置一些编码器属性等
//unsigned char * outBuffer = 0; // 解码后的数据存放缓存区
//const char* url_v = "video.sdp";
int ret = 0; // 函数执行结果
int videoIndex = -1; // 音频流所在的序号
//int numBytes = 0; // 解码后的数据长度
pAVFormatContext = avformat_alloc_context(); // 分配
//pAVFormatContext->flags |= AVFMT_NOFILE;
//添加白名单,这里很重要,如果不申请内存,在avformat_close_input中会宕
//pAVFormatContext->protocol_whitelist = (char*)av_malloc(sizeof("file,udp,rtp"));
//memcpy(pAVFormatContext->protocol_whitelist, "file,udp,rtp", sizeof("file,udp,rtp"));
//pAVPacket = av_packet_alloc(); // 分配
pAVPacket = (AVPacket*)av_malloc(sizeof(AVPacket));
pAVFrame = av_frame_alloc(); // 分配
//pAVFrameBGR24 = av_frame_alloc(); // 分配
p_ifmt_v = av_find_input_format("rtsp");
//if(!pAVFormatContext || !pAVPacket || !pAVFrame || !pAVFrameBGR24)
if (!pAVFormatContext || !pAVPacket || !pAVFrame)
{
cout << "Failed to alloc" << endl;
system("pause");
return -1;
}
// 步骤一:注册所有容器和编解码器(也可以只注册一类,如注册容器、注册编码器等)
//av_register_all();
avformat_network_init();
// 步骤二:打开文件(ffmpeg成功则返回0)
//std::string src = "rtsp://192.168.76.215/live/test";
std::string src = argv[1];
cout << "打开:" << src << endl;
AVDictionary* format_opts = NULL;
av_dict_set(&format_opts, "rtsp_transport", "tcp", 0); //设置推流的方式,默认udp。
ret = avformat_open_input(&pAVFormatContext, src.c_str(), p_ifmt_v, &format_opts);//nullptr
//ret = avformat_open_input(&pAVFormatContext, src.c_str(), p_ifmt_v, nullptr);
if (ret)
{
cout << "Failed" << endl;
system("pause");
return -1;
}
// 步骤三:探测流媒体信息
ret = avformat_find_stream_info(pAVFormatContext, 0);
if (ret < 0)
{
cout << "Failed to avformat_find_stream_info(pAVFormatContext, 0)" << endl;
system("pause");
return -1;
}
printf("******************\n");
av_dump_format(pAVFormatContext, 0, src.c_str(), 0);
printf("******************\n");
// 步骤四:提取流信息,提取视频信息
for (int index = 0; index < pAVFormatContext->nb_streams; index++)
{
if (pAVFormatContext->streams[index]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
cout << "流序号:" << index << "\n类型为:" << "AVMEDIA_TYPE_VIDEO" << endl;
//pAVCodecContext = pAVFormatContext->streams[index]->codecpar->;
avcodec_parameters_to_context(pAVCodecContext, pAVFormatContext->streams[index]->codecpar);
pAVStream = pAVFormatContext->streams[index];
videoIndex = index;
break;
}
}
if (videoIndex == -1 || !pAVCodecContext)
{
cout << "Failed to find video stream" << endl;
system("pause");
return -1;
}
// 步骤五:对找到的视频流寻解码器
pAVCodec = avcodec_find_decoder(pAVCodecContext->codec_id);
if (!pAVCodec)
{
cout << "Fialed to avcodec_find_decoder(pAVCodecContext->codec_id):"
<< pAVCodecContext->codec_id << endl;
system("pause");
return -1;
}
// 步骤六:打开解码器
// 设置缓存大小 1024000byte
av_dict_set(&pAVDictionary, "buffer_size", "8192000", 0);
// 设置超时时间 20s
av_dict_set(&pAVDictionary, "stimeout", "20000000", 0);
// 设置最大延时 3s
av_dict_set(&pAVDictionary, "max_delay", "30000000", 0);
// 设置打开方式 tcp/udp
av_dict_set(&pAVDictionary, "rtsp_transport", "tcp", 0);
//ret = avcodec_open2(pAVCodecContext, pAVCodec, &pAVDictionary);
ret = avcodec_open2(pAVCodecContext, pAVCodec, &pAVDictionary);
if (ret)
{
cout << "Failed to avcodec_open2(pAVCodecContext, pAVCodec, pAVDictionary)" << endl;
system("pause");
return -1;
}
// 显示视频相关的参数信息(编码上下文)
cout << "比特率:" << pAVCodecContext->bit_rate << endl;
cout << "宽高:" << pAVCodecContext->width << "x" << pAVCodecContext->height << endl;
cout << "格式:" << pAVCodecContext->pix_fmt << endl; // AV_PIX_FMT_YUV420P 0
cout << "帧率分母:" << pAVCodecContext->time_base.den << endl;
cout << "帧率分子:" << pAVCodecContext->time_base.num << endl;
cout << "帧率分母:" << pAVStream->avg_frame_rate.den << endl;
cout << "帧率分子:" << pAVStream->avg_frame_rate.num << endl;
cout << "总时长:" << pAVStream->duration / 10000.0 << "s" << endl;
cout << "总帧数:" << pAVStream->nb_frames << endl;
// 有总时长的时候计算帧率(较为准确)
// fps = pAVStream->nb_frames / (pAVStream->duration / 10000.0);
// interval = pAVStream->duration / 10.0 / pAVStream->nb_frames;
// 没有总时长的时候,使用分子和分母计算
fps = pAVStream->avg_frame_rate.num * 1.0f / pAVStream->avg_frame_rate.den;
interval = 1 * 1000 / fps;
cout << "平均帧率:" << fps << endl;
cout << "帧间隔:" << interval << "ms" << endl;
// 步骤七:对拿到的原始数据格式进行缩放转换为指定的格式高宽大小
pAVCodecContext->pix_fmt = (AVPixelFormat)AV_PIX_FMT_YUV420P;
pSwsContext = sws_getContext(pAVCodecContext->width,
pAVCodecContext->height,
pAVCodecContext->pix_fmt,
pAVCodecContext->width,
pAVCodecContext->height,
AV_PIX_FMT_BGR24,
SWS_FAST_BILINEAR,
0,
0,
0);
cout << "sws_getContext!" << endl;
pCvMat = Mat(pAVCodecContext->height, pAVCodecContext->width, CV_8UC3);
int cvLinesizes[1];
cvLinesizes[0] = pCvMat.step1();
// 步骤八:读取一帧数据的数据包
//av_free_packet(pAVPacket);
//av_packet_unref(pAVPacket);
int frame_count = 0;
//cout << "now av_read_frame!" << endl;
//namedWindow("img", 0);
while (av_read_frame(pAVFormatContext, pAVPacket) >= 0)
{
if (pAVPacket->stream_index == videoIndex)
{
//cout << "av_read_frame!" << endl;
// 步骤八:对读取的数据包进行解码
ret = avcodec_send_packet(pAVCodecContext, pAVPacket);
//ret = avcodec_decode_video2(pAVCodecContext, pAVFrame, &got_picture, pAVPacket);
if (ret)
{
cout << "Failed to avcodec_send_packet(pAVCodecContext, pAVPacket) ,ret ="
<< ret << endl;
break;
}
//cout << "avcodec_send_packet!" << endl;
av_frame_unref(pAVFrame);
while (!avcodec_receive_frame(pAVCodecContext, pAVFrame))
{
//pCvMat.data=NULL;
sws_scale(pSwsContext,
(const uint8_t* const*)pAVFrame->data,
pAVFrame->linesize,
0,
pAVCodecContext->height,
&pCvMat.data,
cvLinesizes);
//cout << "avcodec_receive_frame!" << endl;
frame_count++;
if (frame_count % int(25) == 0) {
//sprintf()
//char * temp;
//itoa(frame_count,temp,10);
//string sss=temp;
imwrite("./image/test_" + std::to_string(frame_count) + ".bmp", pCvMat);
//imshow("img", pCvMat);
cout << "SAVE: " << std::to_string(frame_count) << endl;
}
//threshold(pCvMat,pCvMat,128,255,THRESH_BINARY);
//imshow(sourceWindow,pCvMat);
//waitKey(1);
av_frame_unref(pAVFrame);
}
// 下一帧
currentFrame++;
while (av_gettime() - startTime < currentFrame * interval)
{
//Sleep(1);
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
//cout << "current:" << currentFrame <<"," << time << av_gettime()- startTime <<endl;
}
//av_free_packet(pAVPacket);
av_packet_unref(pAVPacket);
}
cout << "释放回收资源" << endl;
if (pSwsContext)
{
sws_freeContext(pSwsContext);
pSwsContext = 0;
cout << "sws_freeContext(pSwsContext)" << endl;
}
if (pAVFrame)
{
av_frame_free(&pAVFrame);
pAVFrame = 0;
cout << "av_frame_free(pAVFrame)" << endl;
}
if (pAVPacket)
{
//av_free_packet(pAVPacket);
pAVPacket = 0;
cout << "av_free_packet(pAVPacket)" << endl;
}
if (pAVCodecContext)
{
avcodec_free_context(&pAVCodecContext);
avcodec_close(pAVCodecContext);
pAVCodecContext = 0;
cout << "avcodec_close(pAVCodecContext);" << endl;
}
if (pAVFormatContext)
{
avformat_close_input(&pAVFormatContext);
avformat_free_context(pAVFormatContext);
pAVFormatContext = 0;
cout << "avformat_free_context(pAVFormatContext)" << endl;
}
pCvMat = NULL;
system("pause");
return 0;
}
三,运行一下
TestFFmpegOpencv_RTSP.exe rtsp://192.168.76.189:8554/live/test
四,结果
提取出的帧结果保存在image文件夹下: