Warning: file_get_contents(/data/phpspider/zhask/data//catemap/4/video/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
使用FFmpeg库的Qt-H.264视频流_Qt_Video_Ffmpeg_Video Streaming_H.264 - Fatal编程技术网

使用FFmpeg库的Qt-H.264视频流

使用FFmpeg库的Qt-H.264视频流,qt,video,ffmpeg,video-streaming,h.264,Qt,Video,Ffmpeg,Video Streaming,H.264,我正在尝试在Qt小部件应用程序中获取我的IP摄像头流。首先,我连接到IP摄像机的UDP端口。IP摄像头正在传输H.264编码的视频。绑定套接字之后,在每个readyRead()信号上,我都用接收到的数据报填充缓冲区,以获得完整的帧 变量初始化: AVCodec *codec; AVCodecContext *codecCtx; AVFrame *frame; AVPacket packet; this->buffer.clear(); this->socket = new QUdpS

我正在尝试在Qt小部件应用程序中获取我的IP摄像头流。首先,我连接到IP摄像机的UDP端口。IP摄像头正在传输H.264编码的视频。绑定套接字之后,在每个readyRead()信号上,我都用接收到的数据报填充缓冲区,以获得完整的帧

变量初始化:

AVCodec *codec;
AVCodecContext *codecCtx;
AVFrame *frame;
AVPacket packet;
this->buffer.clear();
this->socket = new QUdpSocket(this);

QObject::connect(this->socket, &QUdpSocket::connected, this, &H264VideoStreamer::connected);
QObject::connect(this->socket, &QUdpSocket::disconnected, this, &H264VideoStreamer::disconnected);
QObject::connect(this->socket, &QUdpSocket::readyRead, this, &H264VideoStreamer::readyRead);
QObject::connect(this->socket, &QUdpSocket::hostFound, this, &H264VideoStreamer::hostFound);
QObject::connect(this->socket, SIGNAL(error(QAbstractSocket::SocketError)), this, SLOT(error(QAbstractSocket::SocketError)));
QObject::connect(this->socket, &QUdpSocket::stateChanged, this, &H264VideoStreamer::stateChanged);

avcodec_register_all();

codec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!codec){
   qDebug() << "Codec not found";
   return;
}

codecCtx = avcodec_alloc_context3(codec);
if (!codecCtx){
    qDebug() << "Could not allocate video codec context";
    return;
}

if (codec->capabilities & CODEC_CAP_TRUNCATED)
      codecCtx->flags |= CODEC_FLAG_TRUNCATED;

codecCtx->flags2 |= CODEC_FLAG2_CHUNKS;

AVDictionary *dictionary = nullptr;

if (avcodec_open2(codecCtx, codec, &dictionary) < 0) {
    qDebug() << "Could not open codec";
    return;
}
AVCodec*编解码器;
AVCodecContext*codecCtx;
AVFrame*frame;
数据包;
这->buffer.clear();
this->socket=新的QUdpSocket(this);
QObject::connect(此->套接字,&QUdpSocket::connected,此,&H264VideoStreamer::connected);
QObject::connect(此->套接字,&QUdpSocket::断开连接,此,&H264视频流::断开连接);
QObject::connect(this->socket,&QUdpSocket::readyRead,this,&H264VideoStreamer::readyRead);
QObject::connect(this->socket,&QUdpSocket::hostFound,this,&H264VideoStreamer::hostFound);
QObject::connect(此->套接字,信号(错误(QAbstractSocket::SocketError)),此,插槽(错误(QAbstractSocket::SocketError));
QObject::connect(this->socket,&QUdpSocket::stateChanged,this,&H264VideoStreamer::stateChanged);
avcodec_寄存器_all();
codec=avcodec\u find\u解码器(AV\u codec\u ID\u H264);
如果(!编解码器){
qDebug()标志|=编解码器标志被截断;
codecCtx->flags2 |=CODEC_FLAG2_块;
AVDictionary*dictionary=nullptr;
if(avcodec_open2(codecCtx、codec和dictionary)<0){
qDebug()套接字->pendingDatagramSize());
QHostAddress发送者;
发送端口;
此->套接字->读取数据报(datagram.data()、datagram.size()、&sender和&senderPort);
QByteArray rtpHeader=数据报。左(12);
数据报。删除(0,12);
int nal\u unit\u type=数据报[0]&0x1F;
bool start=(数据报[1]&0x80)!=0;
int seqNo=rtpHeader[3]&0xFF;
qDebug()buffer.append(char(0x00));
this->buffer.append(char(0x01));
}
qDebug()buffer.append(数据报);
}
  • 数据报的前12个字节是RTP报头
  • 其他一切都是视频数据
  • 第一个视频数据字节的最后5位表示它是哪种NAL单元类型。我总是得到以下4个值中的一个(1个编码的非IDR片,5个编码的IDR片,7个SP,8个PPS)
  • 第2个视频数据字节中的第5位表示该数据报是否为帧中的起始数据
  • 所有视频数据都存储在从START开始的缓冲区中
  • 一旦新的帧到达-开始被设置,它将被解码并生成新的缓冲区
  • 解码帧的生成方式如下:

    00 00 01

    SPS

    00 00 01

    PPS

    00 00 01

    级联视频数据

  • 使用FFmpeg库中的avcodec_decode_video2()函数进行解码

    void H264VideoStreamer::decode() {
    
    av_init_packet(&packet);
    
    av_new_packet(&packet, this->buffer.size());
    memcpy(packet.data, this->buffer.data_ptr(), this->buffer.size());
    packet.size = this->buffer.size();    
    
    frame = av_frame_alloc();
    if(!frame){
        qDebug() << "Could not allocate video frame";
        return;
    }
    
    int got_frame = 1;
    
    int len = avcodec_decode_video2(codecCtx, frame, &got_frame, &packet);
    
    if (len < 0){
        qDebug() << "Error while encoding frame.";
        return;
    }
    
    //if(got_frame > 0){ // got_frame is always 0
    //    qDebug() << "Data decoded: " << frame->data[0];
    //}
    
    char * frameData = (char *) frame->data[0];
    QByteArray decodedFrame;
    decodedFrame.setRawData(frameData, len);
    
    qDebug() << "Data decoded: " << decodedFrame;
    
    av_frame_unref(frame);
    av_free_packet(&packet);
    
    emit imageReceived(decodedFrame);
    }
    
    void h264视频流::解码(){
    av_初始_数据包(&数据包);
    av_新建_数据包(&packet,this->buffer.size());
    memcpy(packet.data,this->buffer.data_ptr(),this->buffer.size());
    packet.size=this->buffer.size();
    frame=av_frame_alloc();
    如果(!帧){
    
    qDebug()手动渲染帧的整个过程可以留给另一个库。如果唯一的目的是一个Qt GUI和来自IP摄像头的实时提要,那么您可以使用libvlc库。您可以在这里找到一个示例:

    谢谢您的建议,但我想坚持使用FFmpeg库。是否可以使用libvlc获取udp流?我不确定。我想你可以,根据我粘贴的链接中的评论。如果你打开VLC客户端并进入Media->open network stream并将链接粘贴到那里,你可以自己检查。如果流启动,那么你可能也可以使用libvlc。是的,这是有意义的,因为VLC是基于od libvlc的。好吧,感谢到目前为止的答案,这将是我的答案备份计划,如果我无法使用FFmpeg获取流,仍在等待回答。
    void H264VideoStreamer::decode() {
    
    av_init_packet(&packet);
    
    av_new_packet(&packet, this->buffer.size());
    memcpy(packet.data, this->buffer.data_ptr(), this->buffer.size());
    packet.size = this->buffer.size();    
    
    frame = av_frame_alloc();
    if(!frame){
        qDebug() << "Could not allocate video frame";
        return;
    }
    
    int got_frame = 1;
    
    int len = avcodec_decode_video2(codecCtx, frame, &got_frame, &packet);
    
    if (len < 0){
        qDebug() << "Error while encoding frame.";
        return;
    }
    
    //if(got_frame > 0){ // got_frame is always 0
    //    qDebug() << "Data decoded: " << frame->data[0];
    //}
    
    char * frameData = (char *) frame->data[0];
    QByteArray decodedFrame;
    decodedFrame.setRawData(frameData, len);
    
    qDebug() << "Data decoded: " << decodedFrame;
    
    av_frame_unref(frame);
    av_free_packet(&packet);
    
    emit imageReceived(decodedFrame);
    }