cv::Mat转AVFrame相互转换

发布时间:2024年01月10日

最近在使用ffmpeg取鱼眼相机的视频流做全景播放时遇到需要cv::Mat转AVFrame的转换,看到了这篇文章,记录一下

1.OpenCV cv::Mat转换为FFmpeg AVFrame

下面是两种方法

void CvMatToAVFrame(const cv::Mat& input_mat, AVFrame* out_avframe)
{
    int image_width = input_mat.cols;
    int image_height = input_mat.rows;
    int cvLinesizes[1];
    cvLinesizes[0] = input_mat.step1();

    SwsContext* openCVBGRToAVFrameSwsContext = sws_getContext(
        image_width,
        image_height,
        AVPixelFormat::AV_PIX_FMT_BGR24,
        image_width,
        image_height,
        AVPixelFormat::AV_PIX_FMT_YUV420P,
        SWS_FAST_BILINEAR,
        nullptr, nullptr, nullptr
    );

    sws_scale(openCVBGRToAVFrameSwsContext,
        &input_mat.data,
        cvLinesizes,
        0,
        image_height,
        out_avframe->data,
        out_avframe->linesize);

    if (openCVBGRToAVFrameSwsContext != nullptr)
    {
        sws_freeContext(openCVBGRToAVFrameSwsContext);
        openCVBGRToAVFrameSwsContext = nullptr;
    }
}
#include <opencv2/opencv.hpp>

extern "C" {
#include <libavcodec/avcodec.h>
#include <libavutil/frame.h>
#include <libavutil/imgutils.h>
}

void convertMatToAVPicture(const cv::Mat& mat, AVFrame* frame)
{
    int width = mat.cols;
    int height = mat.rows;
    int channels = mat.channels();

    int ret;
    frame->width = width;
    frame->height = height;
    frame->format = AV_PIX_FMT_BGR24;
    enum AVPixelFormat pix_fmt = AV_PIX_FMT_BGR24;

    // 为AVFrame分配内存
    ret = av_image_alloc(frame->data, frame->linesize, width, height, pix_fmt , 32);
    if (ret < 0)
    {
        return;
    }

    // 将opencv的Mat转换成AVFrame
    int step = width * channels;
    for (int row = 0; row < height; row++) 
    {
        memcpy(frame->data[0] + row * frame->linesize[0], mat.data + row * step, step);
    }
}

2.FFmpeg AVFrame转换为OpenCV cv::Mat

cv::Mat AVFrameToCvMat(AVFrame* input_avframe)
{
    int image_width = input_avframe->width;
    int image_height = input_avframe->height;

    cv::Mat resMat(image_height, image_width, CV_8UC3);
    int cvLinesizes[1];
    cvLinesizes[0] = resMat.step1();

    SwsContext* avFrameToOpenCVBGRSwsContext = sws_getContext(
        image_width,
        image_height,
        AVPixelFormat::AV_PIX_FMT_YUV420P,
        image_width,
        image_height,
        AVPixelFormat::AV_PIX_FMT_BGR24,
        SWS_FAST_BILINEAR,
        nullptr, nullptr, nullptr
    );

    sws_scale(avFrameToOpenCVBGRSwsContext,
        input_avframe->data,
        input_avframe->linesize,
        0,
        image_height,
        &resMat.data,
        cvLinesizes);

    if (avFrameToOpenCVBGRSwsContext != nullptr)
    {
        sws_freeContext(avFrameToOpenCVBGRSwsContext);
        avFrameToOpenCVBGRSwsContext = nullptr;
    }

    return resMat;
}

3.使用示例

#include <iostream>

// ffmpeg
extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libswscale/swscale.h"
#include <libavutil/imgutils.h>
}

// opencv
#include "opencv/cv.h"
#include "opencv2/opencv.hpp"

void CvMatToAVFrame(const cv::Mat& input_mat, AVFrame* out_avframe)
{
    int image_width = input_mat.cols;
    int image_height = input_mat.rows;
    int cvLinesizes[1];
    cvLinesizes[0] = input_mat.step1();

    SwsContext* openCVBGRToAVFrameSwsContext = sws_getContext(
        image_width,
        image_height,
        AVPixelFormat::AV_PIX_FMT_BGR24,
        image_width,
        image_height,
        AVPixelFormat::AV_PIX_FMT_YUV420P,
        SWS_FAST_BILINEAR,
        nullptr, nullptr, nullptr
    );

    sws_scale(openCVBGRToAVFrameSwsContext,
        &input_mat.data,
        cvLinesizes,
        0,
        image_height,
        out_avframe->data,
        out_avframe->linesize);

    if (openCVBGRToAVFrameSwsContext != nullptr)
    {
        sws_freeContext(openCVBGRToAVFrameSwsContext);
        openCVBGRToAVFrameSwsContext = nullptr;
    }
}

cv::Mat AVFrameToCvMat(AVFrame* input_avframe)
{
    int image_width = input_avframe->width;
    int image_height = input_avframe->height;

    cv::Mat resMat(image_height, image_width, CV_8UC3);
    int cvLinesizes[1];
    cvLinesizes[0] = resMat.step1();

    SwsContext* avFrameToOpenCVBGRSwsContext = sws_getContext(
        image_width,
        image_height,
        AVPixelFormat::AV_PIX_FMT_YUV420P,
        image_width,
        image_height,
        AVPixelFormat::AV_PIX_FMT_BGR24,
        SWS_FAST_BILINEAR,
        nullptr, nullptr, nullptr
    );

    sws_scale(avFrameToOpenCVBGRSwsContext,
        input_avframe->data,
        input_avframe->linesize,
        0,
        image_height,
        &resMat.data,
        cvLinesizes);

    if (avFrameToOpenCVBGRSwsContext != nullptr)
    {
        sws_freeContext(avFrameToOpenCVBGRSwsContext);
        avFrameToOpenCVBGRSwsContext = nullptr;
    }

    return resMat;
}


int main()
{
    cv::Mat input_image = cv::imread("C:/Users/Administrator/Desktop/example.jpg");
    AVFrame* avFrame = av_frame_alloc();
    avFrame->format = AVPixelFormat::AV_PIX_FMT_YUV420P;
    avFrame->width = input_image.cols;
    avFrame->height = input_image.rows;

    // 为需要创建的YUV Frame分配内存
    if (av_frame_get_buffer(avFrame, 0) < 0)
    {
        av_frame_free(&avFrame);
        avFrame = nullptr;
        return -1;
    }


    cv::imshow("解码前", input_image);

    // OpenCV cv::Mat转换成AVFrame
    CvMatToAVFrame(input_image,avFrame);


    // 将AVFrame转换成OpenCV cv::Mat
    cv::Mat out_avFrameToMat = AVFrameToCvMat(avFrame);

    cv::imshow("解码后", out_avFrameToMat);


    cv::waitKey(0);
    cv::destroyAllWindows();

    // free memory
    if (avFrame != nullptr)
    {
        av_frame_free(&avFrame);
        avFrame = nullptr;
    }

    return 0;
}
文章来源:https://blog.csdn.net/T_T_T_T_/article/details/135507175
本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。