1.概述
主要功能是调过live555 testRTSPClient 简单封装的rtsp客户端库,拉取RTSP流,然后调过3403的VDEC模块进行解码,送个NPU进行目标检测,输出到hdmi,这样保证了开发没有sensor的时候可以识别其它摄像头的视频流;
2.如何搭建一个RTSPServer
2.1使用live555 mediaServer搭建rtspServer
#这里可以去http://live555.com/官网查看
wget http://live555.com/liveMedia/public/live.2024.10.31.tar.gz
tar xvzf live.2024.10.31.tar.gz
cd live
./genMakefiles linux-no-std-lib
make -j
cd mediaServer
./live555MediaServer
执行的效果如下,默认支持的文件list如下,只需要把对应类型的文件复制到mediaServer目录即可,可以用vlc测试是否正常
LIVE555 Media Server
version 1.13 (LIVE555 Streaming Media library version 2024.10.31).
Play streams from this server using the URL
rtsp://192.168.8.8:8554/<filename>
where <filename> is a file present in the current directory.
Each file's type is inferred from its name suffix:
".264" => a H.264 Video Elementary Stream file
".265" => a H.265 Video Elementary Stream file
".aac" => an AAC Audio (ADTS format) file
".ac3" => an AC-3 Audio file
".amr" => an AMR Audio file
".dv" => a DV Video file
".m4e" => a MPEG-4 Video Elementary Stream file
".mkv" => a Matroska audio+video+(optional)subtitles file
".mp3" => a MPEG-1 or 2 Audio file
".mpg" => a MPEG-1 or 2 Program Stream (audio+video) file
".ogg" or ".ogv" or ".opus" => an Ogg audio and/or video file
".ts" => a MPEG Transport Stream file
(a ".tsx" index file - if present - provides server 'trick play' support)
".vob" => a VOB (MPEG-2 video with AC-3 audio) file
".wav" => a WAV Audio file
".webm" => a WebM audio(Vorbis)+video(VP8) file
See http://www.live555.com/mediaServer/ for additional documentation.
(We use port 8000 for optional RTSP-over-HTTP tunneling).)
2.2 ffmpeg转264文件
如果是mp4文件可以用ffmpeg简单的提取264文件,命令如下
#sudo apt-get install ffmpeg
ffmpeg -i input.mp4 -an -codec:v copy output.264
3.封装RtspClinet
代码参考live/testProgs/testRTSPClient.cpp
需要注意Nal头即可,不同RtspServer会有不同的发包方式
void DummySink::afterGettingFrame(unsigned frameSize,
unsigned numTruncatedBytes,
struct timeval presentationTime,
unsigned /*durationInMicroseconds*/)
{
//frameSize 这个是一帧图像
// u_int8_t *fReceiveBuffer;这个是frame数据内容
//struct timeval presentationTime,这个是当前frame的pts
}
完整代码参考开源仓库下面目录
//thridpart/live555/librtspclient.h
RTSPCLI_API int MyRTSP_Init(RTSP_Handle** handle);/*句柄 返回0表示成功,返回非0表示失败 */
RTSPCLI_API int MyRTSP_Deinit(RTSP_Handle* handle);/* 释放RTSPClient 参数为RTSPClient句柄 */
RTSPCLI_API int MyRTSP_OpenStream(RTSP_Handle* handle, const char* _url, EASY_RTP_CONNECT_TYPE _connType,int _reconn/*1000表示长连接,即如果网络断开自动重连, 其它值为连接次数*/);/* 打开网络流 */
RTSPCLI_API int MyRTSP_SetCallback(RTSP_Handle* handle,RTSPSourceCallBack _callback, void* userptr);/* 设置数据回调 */
RTSPCLI_API int MyRTSP_Run(RTSP_Handle* handle);
RTSPCLI_API int MyRTSP_CloseStream(RTSP_Handle* handle);/* 关闭网络流 */
4.整合数据给VDEC
#include "librtspclient.h"
RTSP_Handle* hRTSPHandle_;
int RtspRunnig_ = 0;
pthread_t rtsp_thd_;
void *pRtspFrame = NULL;
static char sps[32];
static char pps[32];
static int spslen = 0;
static int ppslen = 0;
static int initvpss = 0;
static int RTSPSourceCall(EASY_FRAME_INFO_T frameinfo, void* userdata)
{
// printf("frameinfo.framesize:%d,bIFrame:%d:NaluType:%d\n", frameinfo.framesize, frameinfo.bIFrame, frameinfo.NaluType);
//这里做了简单是数据拼接,主要是为了给VDEC 完整的264帧,有问题可以调试下这部分数据,需要有nal头00 00 00 01 +xxx
//I帧 nal+sps + nal+pps +nal +iframe
//p帧 nal + pframe
if (frameinfo.NaluType == 0x07)
{
memcpy(sps, frameinfo.framebuff, frameinfo.framesize);
spslen = frameinfo.framesize;
return 0;
}
else if (frameinfo.NaluType == 0x08)
{
memcpy(pps, frameinfo.framebuff, frameinfo.framesize);
ppslen = frameinfo.framesize;
return 0;
}
uint32_t len = 0;
if (frameinfo.bIFrame)
{
memcpy(pRtspFrame, sps, spslen);
len += spslen;
memcpy(pRtspFrame + len, pps, ppslen);
len += ppslen;
memcpy(pRtspFrame + len, frameinfo.framebuff, frameinfo.framesize);
len += frameinfo.framesize;
}
else
{
memcpy(pRtspFrame + len, frameinfo.framebuff, frameinfo.framesize);
len += frameinfo.framesize;
}
ot_vdec_stream stream;
ot_vdec_chn vdecchn = 0;
td_s32 milli_sec = 40;
ot_vpss_grp grp = 0;
ss_mpi_sys_get_cur_pts(&stream.pts);
stream.addr = pRtspFrame;
stream.len = len;
stream.end_of_frame = TD_TRUE;
stream.end_of_stream = TD_FALSE;
stream.need_display = TD_TRUE;
ss_mpi_vdec_send_stream(vdecchn, &stream, -1);
ot_video_frame_info frame_info;
ot_vdec_supplement_info supplement;
ss_mpi_vdec_get_frame(vdecchn, &frame_info, &supplement, milli_sec);
if (initvpss == 0 && frame_info.video_frame.width > 0)
{
//第一帧解析成功才创建vpss通道
ot_size in_size;
in_size.width = frame_info.video_frame.width;
in_size.height = frame_info.video_frame.height;
printf("vpss init W: H:%d\n", in_size.width, in_size.height);
sample_vio_start_vpss(grp, &in_size);
initvpss = 1;
}
ss_mpi_vpss_send_frame(grp, &frame_info, milli_sec);
ss_mpi_vdec_release_frame(vdecchn, &frame_info);
return 0;
}
void* RtspProcess(void* args) {
while (RtspRunnig_) {
MyRTSP_Run(hRTSPHandle_);
}
printf("Rtsp thread Finish\n");
return NULL;
}
void RtspStart(const char* url)
{
pRtspFrame = malloc(1024*1024);
RtspRunnig_ = 1;
MyRTSP_Init(&hRTSPHandle_);
MyRTSP_SetCallback(hRTSPHandle_, RTSPSourceCall, NULL);
MyRTSP_OpenStream(hRTSPHandle_, url, EASY_RTP_OVER_TCP, 0);
pthread_create(&rtsp_thd_, 0, RtspProcess, NULL);
}
void RtspStop()
{
RtspRunnig_ = 0;
MyRTSP_Deinit(hRTSPHandle_);
pthread_join(rtsp_thd_, NULL);
free(pRtspFrame);
pRtspFrame = NULL;
}
5.完整Demo
后端处理的pipleline参考,直接把vdec流送给vpss,后面npu的部分在以前yolov8_deepsort_mp4分支中查看即可
6.工程代码
6.1代码仓库地址
giee仓库地址
6.2下载编译代码
git clone -b yolov8_deepsort_rtsp --depth=1 --single-branch https://gitee.com/apchy_ll/ss928_yolov5s.git
cd ss928_yolov5s
./build.sh
cp -rf output ~/work/nfs/3403/
6.3板端运行
./rundemo.sh rtsp://192.168.8.8:8554/output.264
7.教学视频
11 SS928 Yolov8检测RTSP流
8.谢谢
请多多支持!