获取摄像头demo
videodecodethread.cpp
#include "videodecodethread.h"
VideodecodeThread::VideodecodeThread(QObject *parent)
:QThread(parent)
{
avdevice_register_all();
avformat_network_init();
}
VideodecodeThread::~VideodecodeThread()
{
if(pFormatCtx)
{
avformat_close_input(&pFormatCtx);
}
if(packet)
{
av_packet_free(&packet);
}
if(pAvCodecCtx)
{
avcodec_close(pAvCodecCtx);
}
if(pAvFrame)
{
av_free(pAvFrame);
}
}
void VideodecodeThread::run()
{
fmt = av_find_input_format("dshow");
av_dict_set(&options, "video_size", "640*480", 0);
av_dict_set(&options, "framerate", "30", 0);
ret = avformat_open_input(&pFormatCtx, "video=ov9734_azurewave_camera", fmt, &options);
if (ret < 0)
{
qDebug() << "Couldn't open input stream." << ret;
return;
}
ret = avformat_find_stream_info(pFormatCtx, &options);
if(ret < 0)
{
qDebug()<< "Couldn't find stream information.";
return;
}
videoIndex = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &pAvCodec, 0);
if(videoIndex < 0)
{
qDebug()<< "Couldn't av_find_best_stream.";
return;
}
pAvCodec = avcodec_find_decoder(pFormatCtx->streams[videoIndex]->codecpar->codec_id);
if(!pAvCodec)
{
qDebug()<< "Couldn't avcodec_find_decoder.";
return;
}
qDebug()<<"pAVCodec->name:" << QString::fromStdString(pAvCodec->name);
if(pFormatCtx->streams[videoIndex]->avg_frame_rate.den != 0)
{
float fps_ = pFormatCtx->streams[videoIndex]->avg_frame_rate.num / pFormatCtx->streams[videoIndex]->avg_frame_rate.den;
qDebug() <<"fps:" << fps_;
}
int64_t video_length_sec_ = pFormatCtx->duration/AV_TIME_BASE;
qDebug() <<"video_length_sec_:" << video_length_sec_;
pAvCodecCtx = avcodec_alloc_context3(pAvCodec);
if(!pAvCodecCtx)
{
qDebug()<< "Couldn't avcodec_alloc_context3.";
return;
}
ret = avcodec_parameters_to_context(pAvCodecCtx, pFormatCtx->streams[videoIndex]->codecpar);
if(ret < 0)
{
qDebug()<< "Couldn't avcodec_parameters_to_context.";
return;
}
ret = avcodec_open2(pAvCodecCtx, pAvCodec, nullptr);
if(ret!=0)
{
qDebug("avcodec_open2 %d", ret);
return;
}
pAvFrame = av_frame_alloc();
pAVFrameRGB = av_frame_alloc();
pSwsCtx = sws_getContext(pAvCodecCtx->width, pAvCodecCtx->height, pAvCodecCtx->pix_fmt,
pAvCodecCtx->width, pAvCodecCtx->height, AV_PIX_FMT_RGB32,
SWS_BICUBIC, NULL, NULL, NULL);
m_size = av_image_get_buffer_size(AVPixelFormat(AV_PIX_FMT_RGB32), pAvCodecCtx->width, pAvCodecCtx->height, 1);
buffer = (uint8_t*)av_malloc(m_size);
//为已经分配的空间的结构体AVPicture挂上一段用于保存数据的空间
av_image_fill_arrays(pAVFrameRGB->data, pAVFrameRGB->linesize, buffer, AV_PIX_FMT_RGB32, pAvCodecCtx->width, pAvCodecCtx->height, 1);
packet = av_packet_alloc();
av_new_packet(packet, pAvCodecCtx->width * pAvCodecCtx->height);
while(runFlag && !av_read_frame(pFormatCtx, packet))
{
if (packet->stream_index == videoIndex)
{
//解码一帧视频数据
int iGotPic = avcodec_send_packet(pAvCodecCtx, packet);
if(iGotPic != 0)
{
qDebug("VideoIndex avcodec_send_packet error :%d", iGotPic);
continue;
}
iGotPic = avcodec_receive_frame(pAvCodecCtx, pAvFrame);
if(iGotPic == 0){
//转换像素
sws_scale(pSwsCtx, (uint8_t const * const *)pAvFrame->data, pAvFrame->linesize, 0,
pAvFrame->height, pAVFrameRGB->data, pAVFrameRGB->linesize);
QImage desImage = QImage((uchar*)buffer, pAvCodecCtx->width,pAvCodecCtx->height,
QImage::Format_RGB32); //RGB32
emit sigSendQImage(desImage);//得到图片的时候触发信号
byte = QByteArray((char*)pAvFrame->data);
videoQueue.push(byte);
videoCount++;
msleep(25);
}
}
av_packet_unref(packet);
}
}
void VideodecodeThread::setRunFlag(bool flag)
{
runFlag = flag;
}
videodecodethread.h
#ifndef VIDEODECODETHREAD_H
#define VIDEODECODETHREAD_H
#include <QObject>
#include <QThread>
#include <QDebug>
#include <mutex>
#include <QImage>
#include "SharedVariables.h"
extern "C" {
#include "libavdevice/avdevice.h" // 调用输入设备需要的头文件
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
#include "libavutil/pixfmt.h"
#include "libavutil/error.h"
#include "libswresample/swresample.h"
#include "libavfilter/avfilter.h"
}
class VideodecodeThread :public QThread
{
Q_OBJECT
public:
VideodecodeThread(QObject *parent = nullptr);
~VideodecodeThread();
void run() override;
void setRunFlag(bool flag);
signals:
void sigSendQImage(QImage);
private:
const AVInputFormat *fmt = nullptr;
AVFormatContext *pFormatCtx = nullptr;
AVDictionary *options = nullptr;
AVPacket *packet = nullptr;
AVFrame* pAvFrame = nullptr;
const AVCodec *pAvCodec = nullptr;
AVCodecContext *pAvCodecCtx = nullptr;
AVFrame* pAVFrameRGB = nullptr;
SwsContext* pSwsCtx = nullptr;
QByteArray byte;
int m_size = 0;
uint8_t* buffer = nullptr;
int ret = -1;
int videoIndex = -1;
bool runFlag = true;
int videoCount = 0;
};
#endif // VIDEODECODETHREAD_H
摄像头麦克风打开正常
目前复用还有问题继续修改