摘要://该函数在所有基于ffmpeg的应用程序中几乎都是第一个被调用的。只有调用了该函数,才能使用复用器,编码器等。注意你只需要调用av_register_all()一次,因此我们在主函数main()中来调用它。av_register_all();avformat_network_init();//pFormatCtx=avformat_alloc_context();//pFormatCtx-˃interrupt_callback.callback=interrupt_cb;//--------注册回调函数//pFormatCtx-˃interrupt_callback.opaque=pFormatCtx;//打开视频文件,通过参数filepath来获得文件名。这个函数读取文件的头部并且把信息保存到我们给的AVFormatContext结构体中。//最后2个参数用来指定特殊的文件格式,缓冲大小和格式参数,但如果把它们设置为空NULL或者0,libavformat将自动检测这些参数。
//ffmpegDecode.h
#ifndef __FFMPEG_DECODE_H__
#define __FFMPEG_DECODE_H__
#include "global.h"
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
//图像转换结构需要引入的头文件
#include "libswscale/swscale.h"
};
classffmpegDecode
{
public:
ffmpegDecode(char * file =NULL);
~ffmpegDecode();
cv::Mat getDecodedFrame();
cv::Mat getLastFrame();
intreadOneFrame();
int getFrameIndex(){returnm_framIndex;};
private:
AVFrame *pAvFrame;
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
inti;
intvideoindex;
intm_framIndex;
char *filepath;
intret, got_picture;
SwsContext *img_convert_ctx;
inty_size;
AVPacket *packet;
cv::Mat *pCvMat;
boolm_initResult;
boolinit();
boolopenDecode();
voidprepare();
void get(AVCodecContext *pCodecCtx, SwsContext *img_convert_ctx,AVFrame *pFrame);
public:
boolgetInitResult();
};
#endif
//ffmpegDecode.cpp
#include "ffmpegDecode.h"
#include <QDebug>
int time_out = 0;
int firsttimeplay = 1;
int interrupt_cb(void *ctx)
{
//do something
time_out++;
if (time_out > 40) {
time_out=0;
if(firsttimeplay) {
firsttimeplay=0;
return -1;//这个就是超时的返回
}
}
return 0;
}
ffmpegDecode :: ~ffmpegDecode()
{
pCvMat->release();
pCvMat->release();
//释放本次读取的帧内存
av_free_packet(packet);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
}
ffmpegDecode :: ffmpegDecode(char *file)
{
firsttimeplay = 1;
pAvFrame = NULL/**pFrameRGB = NULL*/;
pFormatCtx =NULL;
pCodecCtx =NULL;
pCodec =NULL;
pCvMat = newcv::Mat();
i=0;
videoindex=0;
m_framIndex =0;
ret = 0;
got_picture = 0;
img_convert_ctx =NULL;
y_size = 0;
packet =NULL;
if (NULL ==file)
{
filepath = "rtsp://admin:admin123@192.168.10.239:554";
}
else
{
filepath =file;
}
m_initResult = false;
if(init())
{
if(openDecode())
{
prepare();
m_initResult =true;
}
}
}
boolffmpegDecode::getInitResult()
{
returnm_initResult;
}
boolffmpegDecode :: init()
{
printf("init start...
");
//ffmpeg注册复用器,编码器等的函数av_register_all()。
//该函数在所有基于ffmpeg的应用程序中几乎都是第一个被调用的。只有调用了该函数,才能使用复用器,编码器等。
//这里注册了所有的文件格式和编解码器的库,所以它们将被自动的使用在被打开的合适格式的文件上。注意你只需要调用 av_register_all()一次,因此我们在主函数main()中来调用它。如果你喜欢,也可以只注册特定的格式和编解码器,但是通常你没有必要这样做。
av_register_all();
avformat_network_init();
//pFormatCtx = avformat_alloc_context();
//pFormatCtx->interrupt_callback.callback = interrupt_cb;//--------注册回调函数
//pFormatCtx->interrupt_callback.opaque = pFormatCtx;
//打开视频文件,通过参数filepath来获得文件名。这个函数读取文件的头部并且把信息保存到我们给的AVFormatContext结构体中。
//最后2个参数用来指定特殊的文件格式,缓冲大小和格式参数,但如果把它们设置为空NULL或者0,libavformat将自动检测这些参数。
AVDictionary* options =NULL;
av_dict_set(&options, "rtsp_transport", "tcp", 0);
av_dict_set(&options, "stimeout", "2000000", 0); //设置超时断开连接时间,单位微秒
if(avformat_open_input(&pFormatCtx,filepath,NULL,&options)!=0)
{
printf("无法打开文件
");
return false;
}
//查找文件的流信息,avformat_open_input函数只是检测了文件的头部,接着要检查在文件中的流的信息
if(avformat_find_stream_info(pFormatCtx,&options)<0)
{
printf("Couldn't find stream information.
");
return false;
}
printf("init finished...
");
return true;
}
boolffmpegDecode :: openDecode()
{
printf("openDecode start...
");
//在库里面查找支持该格式的解码器
videoindex = -1;
for(i=0; i<pFormatCtx->nb_streams; i++)
{
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
{
videoindex=i;
break;
}
}
if(videoindex==-1)
{
printf("Didn't find a video stream.
");
return false;
}
pCodecCtx=pFormatCtx->streams[videoindex]->codec;
//在库里面查找支持该格式的解码器
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL)
{
printf("Codec not found.
");
return false;
}
//打开解码器
if(avcodec_open2(pCodecCtx, pCodec,NULL) < 0)
{
printf("Could not open codec.
");
return false;
}
printf("openDecode finished
");
return true;
}
voidffmpegDecode :: prepare()
{
//printf("prepare int
");
//分配一个帧指针,指向解码后的原始帧
pAvFrame=av_frame_alloc();
y_size = pCodecCtx->width * pCodecCtx->height;
//分配帧内存
packet=(AVPacket *)av_malloc(sizeof(AVPacket));
av_new_packet(packet, y_size);
//输出一下信息-----------------------------
printf("文件信息-----------------------------------------
");
av_dump_format(pFormatCtx,0,filepath,0);
//av_dump_format只是个调试函数,输出文件的音、视频流的基本信息了,帧率、分辨率、音频采样等等
//printf("prepare out
");
}
intffmpegDecode :: readOneFrame()
{
int result = 0;
pCvMat->release();
result =av_read_frame(pFormatCtx, packet);
returnresult;
}
cv::Mat ffmpegDecode :: getDecodedFrame()
{
readOneFrame();
if(packet->stream_index==videoindex)
{
//解码一个帧
ret = avcodec_decode_video2(pCodecCtx, pAvFrame, &got_picture, packet);
if(ret < 0)
{
printf("解码错误
");
returncv::Mat();
}
if(got_picture)
{
m_framIndex++;
//根据编码信息设置渲染格式
if(img_convert_ctx ==NULL){
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
}
//----------------------opencv
if (pCvMat->empty())
{
pCvMat->create(cv::Size(pCodecCtx->width, pCodecCtx->height),CV_8UC3);
}
if(img_convert_ctx !=NULL)
{
get(pCodecCtx, img_convert_ctx, pAvFrame);
}
}
}
av_free_packet(packet);
return *pCvMat;
}
cv::Mat ffmpegDecode :: getLastFrame()
{
ret = avcodec_decode_video2(pCodecCtx, pAvFrame, &got_picture, packet);
if(got_picture)
{
//根据编码信息设置渲染格式
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
if(img_convert_ctx !=NULL)
{
get(pCodecCtx, img_convert_ctx,pAvFrame);
}
}
return *pCvMat;
}
void ffmpegDecode :: get(AVCodecContext * pCodecCtx, SwsContext * img_convert_ctx, AVFrame *pFrame)
{
if (pCvMat->empty())
{
pCvMat->create(cv::Size(pCodecCtx->width, pCodecCtx->height),CV_8UC3);
}
AVFrame *pFrameRGB =NULL;
uint8_t *out_bufferRGB =NULL;
pFrameRGB =av_frame_alloc();
//给pFrameRGB帧加上分配的内存;
int size = avpicture_get_size(AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height);
out_bufferRGB = newuint8_t[size];
avpicture_fill((AVPicture *)pFrameRGB, out_bufferRGB, AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height);
//YUV to RGB
sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
memcpy(pCvMat->data,out_bufferRGB,size);
delete[] out_bufferRGB;
av_free(pFrameRGB);
}
//crtspdecodethread.h
#ifndef CRTSPDECODETHREAD_H
#define CRTSPDECODETHREAD_H
#include <QThread>
#include <QMutex>
#include "ffmpegDecode.h"
class CRTSPDecodeThread : publicQThread
{
Q_OBJECT
public:
CRTSPDecodeThread(QObject*parent);
~CRTSPDecodeThread();
void SetCameraParam(QString, int, intcameraID);
voidrun();
signals:
voidSendVideoFrame(cv::Mat);
voidSendDetectFrame(cv::Mat);
private:
ffmpegDecode*m_pVdoDecode;
boolm_isExist;
unsigned longm_FrameCount;
intm_detectInterval;
VideoCapture m_VideoCap;
QString m_cameraURL;
QMutex m_Mutex;
intm_cameraID;
boolm_decodeInitResult;
};
#endif //CRTSPDECODETHREAD_H
//crtspdecodethread.cpp
#include "crtspdecodethread.h"
#include "ffmpegDecode.h"
#include <QDebug>
#include <QDateTime>
#include <queue>
#include <QMutexLocker>
extern boolg_ImportLib;
std::queue<ST_IMGINFO>g_OrgImgQueue;
CRTSPDecodeThread::CRTSPDecodeThread(QObject*parent):QThread(parent)
{
m_isExist = false;
}
CRTSPDecodeThread::~CRTSPDecodeThread()
{
requestInterruption();
quit();
wait();
m_isExist = true;
}
void CRTSPDecodeThread::SetCameraParam(QString strURL, int iInterval, intcameraID)
{
m_cameraID =cameraID;
m_detectInterval =iInterval;
m_cameraURL =strURL;
if(m_cameraURL == "USB")
{
bool bRet = m_VideoCap.open(0);
if(!bRet)
{
qDebug()<<"打开USB摄像头失败...";
}
}
else
{
m_pVdoDecode = new ffmpegDecode((char*)strURL.toStdString().c_str());
m_decodeInitResult = m_pVdoDecode->getInitResult();
}
}
voidCRTSPDecodeThread::run()
{
m_FrameCount = 0;
cv::Mat img;
unsigned long iRTSPOfflineTick =GetTickCount();
while(!isInterruptionRequested())
{
if(m_isExist)
{
break;
}
if(m_cameraURL == "USB")
{
m_VideoCap>>img;
}
else
{
if(m_decodeInitResult)
{
img =m_pVdoDecode->getDecodedFrame();
}
}
if(!img.empty())
{
m_FrameCount++;
//cvtColor(img, img, COLOR_BGR2RGB);
iRTSPOfflineTick =GetTickCount();
emit SendVideoFrame(img);
if(m_FrameCount % m_detectInterval == 0)
{
ST_IMGINFO imgInfo;
img.copyTo(imgInfo.img);
imgInfo.camera_id =m_cameraID;//m_pManager->GetCameraID();
QDateTime dtTime;
imgInfo.time = dtTime.currentDateTime().toString("yyyy-MM-dd HH:mm:ss");
QMutexLocker lock(&m_Mutex);
g_OrgImgQueue.push(imgInfo);
}
img.release();
}
else
{
qDebug()<<"获取原始视频帧失败...";
if( (GetTickCount() -iRTSPOfflineTick ) > 1000*15)
{
qDebug()<<"重新打开视频流...";
iRTSPOfflineTick =GetTickCount();
if(m_cameraURL == "USB")
{
bool bRet = m_VideoCap.open(0);
if(!bRet)
{
qDebug()<<"打开USB摄像头失败...";
}
}
else
{
deletem_pVdoDecode;
m_pVdoDecode =NULL;
m_pVdoDecode = new ffmpegDecode((char*)m_cameraURL.toStdString().c_str());
m_decodeInitResult = m_pVdoDecode->getInitResult();
}
}
}
}
}