Warning: file_get_contents(/data/phpspider/zhask/data//catemap/4/c/55.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
C++ 从C+编码视频+;使用libavcodec和VAAPI_C++_C_Encoding_Libavcodec_Vaapi - Fatal编程技术网

C++ 从C+编码视频+;使用libavcodec和VAAPI

C++ 从C+编码视频+;使用libavcodec和VAAPI,c++,c,encoding,libavcodec,vaapi,C++,C,Encoding,Libavcodec,Vaapi,我正在尝试用libavcodec(版本3.4.6)对H.264中的视频进行编码。当我使用软件编码器“libx264”时,它可以工作,但当我尝试使用带有VAAPI的英特尔cpu的硬件编码器时,它不能工作。通过VAAPI使用ffmpeg进行硬件编码的工作方式是从命令行(使用来自的命令) 显然,没有关于如何使用VAAPI和libav*编码的示例或教程。我通读了相关的用例(硬件解码、软件编码、muxing),并尝试相应地调整它们 当我设置VAAPI编码器时,avcodec\u open2()返回AVER

我正在尝试用libavcodec(版本3.4.6)对H.264中的视频进行编码。当我使用软件编码器“libx264”时,它可以工作,但当我尝试使用带有VAAPI的英特尔cpu的硬件编码器时,它不能工作。通过VAAPI使用ffmpeg进行硬件编码的工作方式是从命令行(使用来自的命令)

显然,没有关于如何使用VAAPI和libav*编码的示例或教程。我通读了相关的用例(硬件解码、软件编码、muxing),并尝试相应地调整它们

当我设置VAAPI编码器时,
avcodec\u open2()
返回
AVERROR(EINVAL)
(-22),并将以下错误消息打印到控制台:

AVCodecContext.pix_fmt和AVHWFramesContext.format不匹配

您可以在我的代码中的
Encoder::setupEncoder()
末尾找到它。我错过了什么

下面是我的代码,分为三个文件:

编码器

#ifndef ENCODER_H
#define ENCODER_H
#include <cassert>

extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#include <libavutil/hwcontext.h>
}

class Encoder
{
public:
    Encoder(const bool hwAccel);
    void addFrame(AVFrame* frame);
    void flush();

    static constexpr int s_width = 640;
    static constexpr int s_height = 480;
    static constexpr int s_fps = 25;
private:
    void setup();
    void setupEncoder();
    void encodeFrame(AVFrame* frame);

    // members
    int m_frameId = 1;
    const bool m_hardwareAcceleration = false;

    AVCodecContext* m_encoder = nullptr;
    AVFormatContext* m_muxer = nullptr;
    AVStream* m_avStream = nullptr;
    AVBufferRef* m_device = nullptr;

    AVFrame* m_hwFrame = nullptr;
};

#endif // ENCODER_H
#ifndef ENCODER_H
#define ENCODER_H
#include <cassert>

extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#include <libavutil/hwcontext.h>

#include <libavfilter/avfilter.h>
#include <libavfilter/buffersink.h>
#include <libavfilter/buffersrc.h>
}

class Encoder
{
public:
    Encoder(const bool hwAccel);
    void addFrame(AVFrame* frame);
    void flush();

    static constexpr int s_width = 640;
    static constexpr int s_height = 480;
    static constexpr int s_fps = 25;
private:
    void setup();
    void setupEncoder();
    void initFilters();
    void initInputFilters(AVFilterInOut* inputs);
    void initOutputFilters(AVFilterInOut* outputs);
    void filterFrame(AVFrame* inFrame, AVFrame* outFrame);
    void encodeFrame(AVFrame* frame);

    // members
    int m_frameId = 1;
    const bool m_hardwareAcceleration = false;

    AVCodecContext* m_encoder = nullptr;
    AVFormatContext* m_muxer = nullptr;
    AVStream* m_avStream = nullptr;
    AVBufferRef* m_device = nullptr;

    AVFrame* m_hwFrame = nullptr;

    AVFilterGraph* m_filterGraph = nullptr;
    AVFilterContext* m_bufferSrc = nullptr;
    AVFilterContext* m_bufferSink = nullptr;
    AVFilterContext* m_formatFilter = nullptr;
};

#endif // ENCODER_H
#ifndef编码器
#定义编码器
#包括
外部“C”{
#包括
#包括
#包括
#包括
}
类编码器
{
公众:
编码器(const bool-hwAccel);
void addFrame(AVFrame*frame);
无效冲洗();
静态constexpr int s_width=640;
静态constexpr int s_height=480;
静态constexpr int s_fps=25;
私人:
无效设置();
void setupEncoder();
空帧编码(AVFrame*frame);
//成员
int m_frameId=1;
const bool m_hardwareAcceleration=false;
AVCodecContext*m_编码器=nullptr;
AVFormatContext*m_muxer=nullptr;
AVStream*m_AVStream=nullptr;
AVBufferRef*m_设备=nullptr;
AVFrame*m_hwFrame=nullptr;
};
#endif//编码器
编码器

#include "encoder.h"

extern "C" {

static enum AVPixelFormat get_vaapi_format(AVCodecContext*, const enum AVPixelFormat *pix_fmts)
{
    const enum AVPixelFormat *p;
    for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) {
        if (*p == AV_PIX_FMT_VAAPI)
            return *p;
    }
    fprintf(stderr, "Failed to get HW surface format.\n");
    return AV_PIX_FMT_NONE;
}

}

Encoder::Encoder(const bool hwAccel)
  : m_hardwareAcceleration(hwAccel)
{
    setup();
}
void Encoder::addFrame(AVFrame* frame)
{
    AVFrame* frameToEncode = frame;
    if(m_hardwareAcceleration) {
        assert(frame->format == AV_PIX_FMT_NV12);
        av_hwframe_transfer_data(m_hwFrame, frame, 0);
        assert(m_hwFrame->format == AV_PIX_FMT_VAAPI);
        frameToEncode = m_hwFrame;
    }

    frameToEncode->pts = m_frameId++;
    encodeFrame(frameToEncode);
}
void Encoder::flush()
{
    encodeFrame(nullptr);
    av_write_trailer(m_muxer);
}

void Encoder::setup()
{
    assert(avformat_alloc_output_context2(&m_muxer, nullptr, "matroska", nullptr) == 0);
    assert(m_muxer != nullptr);

    setupEncoder();

    m_avStream = avformat_new_stream(m_muxer, nullptr);
    assert(m_avStream != nullptr);
    m_avStream->id = m_muxer->nb_streams-1;
    m_avStream->time_base = m_encoder->time_base;

    // Some formats want stream headers to be separate.
    if(m_muxer->oformat->flags & AVFMT_GLOBALHEADER)
        m_encoder->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;

    assert(avcodec_parameters_from_context(m_avStream->codecpar, m_encoder) == 0);
    assert(avio_open(&m_muxer->pb, m_hardwareAcceleration? "hardware.mkv": "software.mkv", AVIO_FLAG_WRITE) == 0);
    assert(avformat_write_header(m_muxer, nullptr) == 0);
}
void Encoder::setupEncoder()
{
    const char* encoderName = m_hardwareAcceleration? "h264_vaapi": "libx264";
    AVCodec* videoCodec = avcodec_find_encoder_by_name(encoderName);
    m_encoder = avcodec_alloc_context3(videoCodec);
    m_encoder->bit_rate = s_width * s_height * s_fps * 2;
    m_encoder->width = s_width;
    m_encoder->height = s_height;
    m_encoder->time_base = (AVRational){1, s_fps};
    m_encoder->framerate = (AVRational){s_fps, 1};

    m_encoder->gop_size = s_fps;  // have at least 1 I-frame per second
    m_encoder->max_b_frames = 1;
    m_encoder->pix_fmt = AV_PIX_FMT_YUV420P;

    if(m_hardwareAcceleration) {
        m_encoder->pix_fmt = AV_PIX_FMT_VAAPI;
        m_encoder->get_format = get_vaapi_format;

        assert(av_hwdevice_ctx_create(&m_device, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", nullptr, 0) == 0);

        AVHWDeviceContext* deviceCtx = (AVHWDeviceContext*) m_device->data;
        assert(deviceCtx->type == AV_HWDEVICE_TYPE_VAAPI);

        m_encoder->hw_device_ctx = av_hwframe_ctx_alloc(m_device);
        m_encoder->hw_frames_ctx = av_buffer_ref(m_device);
        m_hwFrame = av_frame_alloc();
        av_hwframe_get_buffer(m_encoder->hw_device_ctx, m_hwFrame, 0);
    }

    assert(avcodec_open2(m_encoder, videoCodec, nullptr) == 0);  // <-- returns -22 (EINVAL) for hardware encoder

    m_muxer->video_codec_id = videoCodec->id;
    m_muxer->video_codec = videoCodec;
}
void Encoder::encodeFrame(AVFrame* frame)
{
    assert(avcodec_send_frame(m_encoder, frame) == 0);

    AVPacket packet;
    av_init_packet(&packet);
    int ret = 0;
    while(ret >= 0) {
        ret = avcodec_receive_packet(m_encoder, &packet);
        if(ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
            return;  // nothing to write
        }
        assert(ret >= 0);

        av_packet_rescale_ts(&packet, m_encoder->time_base, m_avStream->time_base);
        packet.stream_index = m_avStream->index;
        av_interleaved_write_frame(m_muxer, &packet);
        av_packet_unref(&packet);
    }
}
#包括“encoder.h”
外部“C”{
静态枚举AVPixelFormat get_vaapi_格式(AVCodecContext*,const enum AVPixelFormat*pix_fmts)
{
常量枚举AVPixelFormat*p;
对于(p=pix\u fmts;*p!=AV\u pix\u FMT\u NONE;p++){
如果(*p==AV\U PIX\U FMT\U VAAPI)
返回*p;
}
fprintf(stderr,“无法获取硬件表面格式。\n”);
返回AV_PIX_FMT_NONE;
}
}
编码器::编码器(const bool hwAccel)
:m_硬件加速(hwAccel)
{
设置();
}
void编码器::addFrame(AVFrame*frame)
{
AVFrame*frameToEncode=帧;
if(硬件加速){
断言(帧->格式==AV\U PIX\U FMT\U NV12);
av硬件帧传输数据(m硬件帧,帧,0);
断言(m_hwFrame->format==AV_PIX_FMT_VAAPI);
frameToEncode=m_hwFrame;
}
frameToEncode->pts=m_frameId++;
编码帧(frameToEncode);
}
void编码器::flush()
{
编码帧(nullptr);
av_写入_预告片(m_muxer);
}
void编码器::setup()
{
断言(avformat_alloc_output_context2)(&m_muxer,nullptr,“matroska”,nullptr)=0);
断言(m_muxer!=nullptr);
setupEncoder();
m_avStream=avformat_new_stream(m_muxer,nullptr);
断言(m_avStream!=nullptr);
m_avStream->id=m_muxer->nb_streams-1;
m_avStream->time_base=m_编码器->time_base;
//某些格式希望流标题是独立的。
if(muxer->oformat->flags和AVFMT_GLOBALHEADER)
m_encoder->flags |=AV_CODEC_FLAG_GLOBAL_头;
断言(来自上下文的avcodec_参数(m_avStream->codecpar,m_编码器)==0);
断言(avio_open(&m_muxer->pb,m_hardwareAcceleration?“hardware.mkv”:“software.mkv”,avio_FLAG_WRITE)==0);
断言(avformat_write_头(m_muxer,nullptr)==0);
}
void编码器::setupEncoder()
{
const char*encoderName=m_硬件加速?“h264_vaapi”:“libx264”;
AVCodec*videoCodec=AVCodec\u按名称查找编码器(encoderName);
m_编码器=avcodec_alloc_context3(视频编解码器);
m_编码器->比特率=s_宽度*s_高度*s_fps*2;
m_编码器->宽度=s_宽度;
m_编码器->高度=s_高度;
m_编码器->时间_基=(AVRational){1,s_fps};
m_编码器->帧率=(AVRational){s_fps,1};
m_encoder->gop_size=s_fps;//每秒至少有1个I帧
m_编码器->最大b_帧=1;
m_编码器->pix_fmt=AV_pix_fmt_YUV420P;
if(硬件加速){
m_编码器->pix_fmt=AV_pix_fmt_VAAPI;
m_编码器->获取格式=获取格式;
断言(av_设备ctx_创建(&m_设备,av_设备类型VAAPI,“/dev/dri/render128”,nullptr,0)=0);
AVHWDeviceContext*DeviceContext=(AVHWDeviceContext*)m_设备->数据;
断言(deviceCtx->type==AV\uHwDevice\uType\uVAAPI);
m_编码器->硬件设备\u ctx=av_硬件帧\u ctx\u alloc(m_设备);
m_编码器->hw_帧\u ctx=av_缓冲器\u参考(m_设备);
m_hwFrame=av_frame_alloc();
av_-hwframe_-get_缓冲区(m_编码器->硬件设备ctx,m_-hwframe,0);
}
断言(avcodec_open2(m_encoder,videoCodec,nullptr)==0);//video_codec_id=videoCodec->id;
m_muxer->video_codec=videoCodec;
}
void编码器::encodeFrame(AVFrame*frame)
{
断言(avcodec_发送_帧(m_编码器,帧)==0);
数据包;
av_初始_数据包(&数据包);
int-ret=0;
而(ret>=0){
ret=avcodec_接收_数据包(m_编码器和数据包);
如果(ret==AVERROR(EAGAIN)| | ret==AVERROR_EOF){
return;//没有要写的内容
}
断言(ret>=0);
AVU数据包重新缩放(和数据包,m_编码器->时基,m_avStream->时基);
packet.stream_index=m_avStream->index;
av交织写入帧(复用器和分组);
av_数据包_unref(&packet);
}
}
main.cpp

#include "encoder.h"

AVFrame* createFrame(const int format)
{
    AVFrame* frame = av_frame_alloc();
    frame->format = format;
    frame->width  = Encoder::s_width;
    frame->height = Encoder::s_height;
    assert(av_frame_get_buffer(frame, 0) == 0);
    assert(av_frame_make_writable(frame) == 0);

    // Y
    for(int y=0; y<frame->height; y++) {
        for(int x=0; x<frame->width; x++) {
            frame->data[0][y * frame->linesize[0] + x] = 0;
        }
    }

    // CbCr
    const int widthCbCr  = frame->width / 2;
    const int heightCbCr = frame->height / 2;

    if(format == AV_PIX_FMT_YUV420P) {
        for(int y=0; y<heightCbCr; y++) {
            for(int x=0; x<widthCbCr; x++) {
                frame->data[1][y * frame->linesize[1] + x] = 0;  // Cb
                frame->data[2][y * frame->linesize[2] + x] = 0;  // Cr
            }
        }
        return frame;
    }
    else if(format == AV_PIX_FMT_NV12) {
        for(int y=0; y<heightCbCr; y++) {
            for(int x=0; x<widthCbCr; x++) {
                frame->data[1][y * frame->linesize[0] + x] = 0;
            }
        }
        return frame;
    }

    return nullptr;
}

int main()
{
    av_register_all();

    AVFrame* yuv420pFrame = createFrame(AV_PIX_FMT_YUV420P);
    AVFrame* nv12Frame = createFrame(AV_PIX_FMT_NV12);

    // works well
    Encoder softwareEncoder(false);
    for(int i=0; i<100; ++i)
        softwareEncoder.addFrame(yuv420pFrame);
    softwareEncoder.flush();

    // does not work
    Encoder hardwareEncoder(true);
    for(int i=0; i<100; ++i)
        hardwareEncoder.addFrame(nv12Frame);
    hardwareEncoder.flush();

    return 0;
}
#包括“encoder.h”
AVFrame*createFrame(常量整数格式)
{
AVFrame*frame=av_frame_alloc();
框架->格式=格式;
帧->宽度=编码器::s_宽度;
帧->高度=编码器::s_高度;
断言(av\U帧\U get\U缓冲区(帧,0)=0);
断言(av_frame_make_writable(frame)==0);
//Y
for(int y=0;yheight;y++){
用于(int x=0;xwidth;x++){
帧->数据[0][y*帧->线宽[0]+x]=0;
}
}
//CbCr
const int widthCbCr=帧->宽度/2;
#include "encoder.h"

extern "C" {

static enum AVPixelFormat get_vaapi_format(AVCodecContext*, const enum AVPixelFormat *pix_fmts)
{
    const enum AVPixelFormat *p;
    for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) {
        if (*p == AV_PIX_FMT_VAAPI)
            return *p;
    }
    fprintf(stderr, "Failed to get HW surface format.\n");
    return AV_PIX_FMT_NONE;
}

}

Encoder::Encoder(const bool hwAccel)
  : m_hardwareAcceleration(hwAccel)
{
    setup();
}
void Encoder::addFrame(AVFrame* frame)
{
    AVFrame* frameToEncode = frame;
    if(m_hardwareAcceleration) {
        filterFrame(frame, m_hwFrame);
        assert(m_hwFrame->format == AV_PIX_FMT_VAAPI);
        frameToEncode = m_hwFrame;
    }

    frameToEncode->pts = m_frameId++;
    encodeFrame(frameToEncode);
}
void Encoder::flush()
{
    encodeFrame(nullptr);
    av_write_trailer(m_muxer);
}

void Encoder::setup()
{
    assert(avformat_alloc_output_context2(&m_muxer, nullptr, "matroska", nullptr) == 0);
    assert(m_muxer != nullptr);

    setupEncoder();

    m_avStream = avformat_new_stream(m_muxer, nullptr);
    assert(m_avStream != nullptr);
    m_avStream->id = m_muxer->nb_streams-1;
    m_avStream->time_base = m_encoder->time_base;

    // Some formats want stream headers to be separate.
    if(m_muxer->oformat->flags & AVFMT_GLOBALHEADER)
        m_encoder->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;

    assert(avcodec_parameters_from_context(m_avStream->codecpar, m_encoder) == 0);
    assert(avio_open(&m_muxer->pb, m_hardwareAcceleration? "hardware.mkv": "software.mkv", AVIO_FLAG_WRITE) == 0);
    assert(avformat_write_header(m_muxer, nullptr) == 0);
}
void Encoder::setupEncoder()
{
    const char* encoderName = m_hardwareAcceleration? "h264_vaapi": "libx264";
    AVCodec* videoCodec = avcodec_find_encoder_by_name(encoderName);
    m_encoder = avcodec_alloc_context3(videoCodec);
    m_encoder->bit_rate = s_width * s_height * s_fps * 2;
    m_encoder->width = s_width;
    m_encoder->height = s_height;
    m_encoder->time_base = (AVRational){1, s_fps};
    m_encoder->framerate = (AVRational){s_fps, 1};

    m_encoder->gop_size = s_fps;  // have at least 1 I-frame per second
    m_encoder->max_b_frames = 1;
    m_encoder->pix_fmt = AV_PIX_FMT_YUV420P;

    if(m_hardwareAcceleration) {
        m_encoder->pix_fmt = AV_PIX_FMT_VAAPI;
        m_encoder->get_format = get_vaapi_format;

        assert(av_hwdevice_ctx_create(&m_device, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", nullptr, 0) == 0);
        const AVHWDeviceContext* deviceCtx = (AVHWDeviceContext*) m_device->data;
        assert(deviceCtx->type == AV_HWDEVICE_TYPE_VAAPI);

        initFilters();

        m_encoder->hw_device_ctx = nullptr;
        m_encoder->hw_frames_ctx = av_buffer_ref(av_buffersink_get_hw_frames_ctx(m_bufferSink));
    }

    assert(avcodec_open2(m_encoder, videoCodec, nullptr) == 0);

    if(m_hardwareAcceleration) {
        m_encoder->hw_device_ctx = av_hwframe_ctx_alloc(m_device);
        m_hwFrame = av_frame_alloc();
        av_hwframe_get_buffer(m_encoder->hw_device_ctx, m_hwFrame, 0);
    }

    m_muxer->video_codec_id = videoCodec->id;
    m_muxer->video_codec = videoCodec;
}
void Encoder::initFilters()
{
    AVFilterInOut* inputs = nullptr;
    AVFilterInOut* outputs = nullptr;
    m_filterGraph = avfilter_graph_alloc();
    assert(avfilter_graph_parse2(m_filterGraph, "format=nv12,hwupload", &inputs, &outputs) == 0);

    for(unsigned i=0; i<m_filterGraph->nb_filters; i++) {
        m_filterGraph->filters[i]->hw_device_ctx = av_buffer_ref(m_device);
        assert(m_filterGraph->filters[i]->hw_device_ctx != nullptr);
    }

    initInputFilters(inputs);
    initOutputFilters(outputs);

    assert(avfilter_graph_config(m_filterGraph, nullptr) == 0);
}
void Encoder::initInputFilters(AVFilterInOut* inputs)
{
    assert(inputs != nullptr);
    assert(inputs->next == nullptr);

    char args[512];
    snprintf(args, sizeof(args),
            "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
            s_width, s_height, AV_PIX_FMT_YUV420P,
            1, s_fps,
            1, 1);

    assert(avfilter_graph_create_filter(&m_bufferSrc, avfilter_get_by_name("buffer"), "in",
                                        args, nullptr, m_filterGraph) == 0);
    assert(avfilter_link(m_bufferSrc, 0, inputs->filter_ctx, inputs->pad_idx) == 0);
}
void Encoder::initOutputFilters(AVFilterInOut* outputs)
{
    assert(outputs != nullptr);
    assert(outputs->next == nullptr);

    assert(avfilter_graph_create_filter(&m_bufferSink, avfilter_get_by_name("buffersink"), "out",
                                       nullptr, nullptr, m_filterGraph) == 0);
    assert(avfilter_graph_create_filter(&m_formatFilter, avfilter_get_by_name("format"), "format",
                                       "vaapi_vld", nullptr, m_filterGraph) == 0);
    assert(avfilter_link(outputs->filter_ctx, outputs->pad_idx, m_formatFilter, 0) == 0);
    assert(avfilter_link(m_formatFilter, 0, m_bufferSink, 0) == 0);
}
void Encoder::filterFrame(AVFrame* inFrame, AVFrame* outFrame)
{
    assert(av_buffersrc_add_frame_flags(m_bufferSrc, inFrame, AV_BUFFERSRC_FLAG_KEEP_REF) == 0);
    assert(av_buffersink_get_frame(m_bufferSink, outFrame) == 0);
}
void Encoder::encodeFrame(AVFrame* frame)
{
    assert(avcodec_send_frame(m_encoder, frame) == 0);

    AVPacket packet;
    av_init_packet(&packet);
    int ret = 0;
    while(ret >= 0) {
        ret = avcodec_receive_packet(m_encoder, &packet);
        if(ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
            return;  // nothing to write
        }
        assert(ret >= 0);

        av_packet_rescale_ts(&packet, m_encoder->time_base, m_avStream->time_base);
        packet.stream_index = m_avStream->index;
        av_interleaved_write_frame(m_muxer, &packet);
        av_packet_unref(&packet);
    }
}
void Encoder::setupEncoder()
{
    const char* encoderName = m_hardwareAcceleration? "h264_vaapi": "libx264";
    AVCodec* videoCodec = avcodec_find_encoder_by_name(encoderName);
    m_encoder = avcodec_alloc_context3(videoCodec);
    m_encoder->bit_rate = s_width * s_height * s_fps * 2;
    m_encoder->width = s_width;
    m_encoder->height = s_height;
    m_encoder->time_base = (AVRational){1, s_fps};
    m_encoder->framerate = (AVRational){s_fps, 1};

    m_encoder->gop_size = s_fps;  // have at least 1 I-frame per second
    m_encoder->max_b_frames = 1;
    m_encoder->pix_fmt = AV_PIX_FMT_YUV420P;

    if(m_hardwareAcceleration) {
        m_encoder->pix_fmt = AV_PIX_FMT_VAAPI;
        m_encoder->get_format = get_vaapi_format;

        assert(av_hwdevice_ctx_create(&m_device, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", nullptr, 0) == 0);

        AVHWDeviceContext* deviceCtx = (AVHWDeviceContext*) m_device->data;
        assert(deviceCtx->type == AV_HWDEVICE_TYPE_VAAPI);

        // Fix error: Mismatching AVCodecContext.pix_fmt and AVHWFramesContext.format
        // See doc/examples/vaapi_transcode.c "set_hwframe_ctx()"
        {
            AVBufferRef *hw_frames_ref;
            AVHWFramesContext *frames_ctx = NULL;

            assert((hw_frames_ref = av_hwframe_ctx_alloc(m_device)) != nullptr);
            frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data);
            frames_ctx->format    = AV_PIX_FMT_VAAPI;
            frames_ctx->sw_format = AV_PIX_FMT_NV12;
            frames_ctx->width     = s_width;
            frames_ctx->height    = s_height;
            frames_ctx->initial_pool_size = 20;
            assert(av_hwframe_ctx_init(hw_frames_ref) == 0);
            m_encoder->hw_frames_ctx = av_buffer_ref(hw_frames_ref);
            assert(m_encoder->hw_frames_ctx != nullptr);
        }

        // Fix error: Driver does not support any RC mode compatible with selected options (supported modes: CQP).
        assert(av_opt_set(m_encoder->priv_data, "rc_mode", "CQP", AV_OPT_SEARCH_CHILDREN) == 0);

        // Fix warning, cosmetical only: No quality level set; using default (20).
        m_encoder->global_quality = 20;

        m_encoder->hw_device_ctx = av_hwframe_ctx_alloc(m_device);
        //m_encoder->hw_frames_ctx = av_buffer_ref(m_device);           // Fix: Not required, done by av_hwframe_ctx_alloc
        m_hwFrame = av_frame_alloc();
        av_hwframe_get_buffer(m_encoder->hw_frames_ctx, m_hwFrame, 0); // Fix: Must pass hw_frames_ctx, not m_encoder->hw_device_ctx
        assert(m_hwFrame != nullptr);
    }

    assert(avcodec_open2(m_encoder, videoCodec, nullptr) == 0);  // <-- returns -22 (EINVAL) for hardware encoder

    m_muxer->video_codec_id = videoCodec->id;
    m_muxer->video_codec = videoCodec;
}