C++ 访问冲突读取位置0x000000148965F000

C++ 访问冲突读取位置0x000000148965F000,c++,c,ffmpeg,C++,C,Ffmpeg,我尝试对BMP图像进行编码,我从缓冲区中获取BMP图像并将其存储为H264视频。我被这些错误所困扰,这些错误是随机而反复出现的 我正在使用Visual Studio 2012 1) 访问冲突读取位置0x000000148965F000 2) 堆损坏 调试显示了此时的错误 struct SwsContext* fooContext = sws_getContext(_imgWidth,_imgHeight,PIX_FMT_RGB32,c->width,c->height,PIX

我尝试对BMP图像进行编码,我从缓冲区中获取BMP图像并将其存储为H264视频。我被这些错误所困扰,这些错误是随机而反复出现的

我正在使用Visual Studio 2012

1) 访问冲突读取位置0x000000148965F000

2) 堆损坏

调试显示了此时的错误

    struct SwsContext* fooContext = sws_getContext(_imgWidth,_imgHeight,PIX_FMT_RGB32,c->width,c->height,PIX_FMT_YUV420P, SWS_FAST_BILINEAR,NULL,NULL,NULL);
                    sws_scale(fooContext, inpic->data, inpic->linesize, 0, c->height, outpic->data, outpic->linesize);    // converting frame size and format
我猜读冲突是由于非预初始化值引起的。但我不明白为什么。我还附上了下面的部分代码

PagedImage *inImg = getUpdatedInputImage(0);
        ML_CHECK(inImg);
        ImageVector imgExt = inImg->getImageExtent();
        if ((imgExt.x == _imgWidth) && (imgExt.y == _imgHeight))
        {
            if (((imgExt.x % 4) == 0) && ((imgExt.y % 4) == 0))
            {
               _numFramesFld->setIntValue(_numFramesFld->getIntValue() + 1);
                MLFree(unicodeFilename);
                // configure header
                //BITMAPINFO bitmapInfo
                // read out input image and write output image into video
                // get input image as an array
                void* imgData = NULL;
                SubImageBox imageBox(imgExt); // get the whole image
                getTile(inImg, imageBox, MLuint8Type, &imgData);
                MLuint8* iData = (MLuint8*)imgData;
                // since we have only images with
                // a z-ext of 1, we can compute the c stride as follows
                int cStride = _imgWidth * _imgHeight;
                int offset  = 0;
                MLuint8 r=0, g=0, b=0;
                // pointer into the bitmap that is
                // used to write images into an video
                UCHAR* dst = (UCHAR*)_bits;
                for (int y = _imgHeight-1; y >= 0; y--)
                { // reversely scan the image. if y-rows of DIB are set in normal order, no compression will be available.
                    offset = _imgWidth * y;
                    for (int x = 0; x < _imgWidth; x++)
                    {
                        if (_isGreyValueImage)
                        {
                            r = iData[offset + x];
                            *dst++ = (UCHAR)r;
                            *dst++ = (UCHAR)r;
                            *dst++ = (UCHAR)r;
                        } 
                        else
                        {
                            b = iData[offset + x]; // windows bitmap need reverse order: bgr instead of rgb
                            g = iData[offset + x + cStride          ];
                            r = iData[offset + x + cStride + cStride];
                            *dst++ = (UCHAR)r;
                            *dst++ = (UCHAR)g;
                            *dst++ = (UCHAR)b;
                        }
                        // alpha channel in input image is ignored
                    }
                }
                outbuf_size = 100000 + c->width*c->height*(32>>3);      // allocate output buffer
                outbuf = static_cast<uint8_t *>(malloc(outbuf_size));
                fileName_ = (_outputFilenameFld->getStringValue()).c_str();
                FILE* f = fopen(fileName_,"wb");                    // opening video file for writing
                if(!f)
                {
                    _messageFld->setStringValue("Cannot open file");
                }
                else _messageFld->setStringValue("Opened video file for writing\n");

                //for(i=0;i<_numFramesFld->getIntValue();i++)
                //{
                    fflush(stdout);
                    int nbytes = avpicture_get_size(PIX_FMT_YUV420P, c->width, c->height);                                // allocating outbuffer
                    uint8_t* outbuffer = (uint8_t*)av_malloc(nbytes*sizeof(uint8_t));
                    AVFrame* inpic = avcodec_alloc_frame();                                                               // mandatory frame allocation
                    AVFrame* outpic = avcodec_alloc_frame();
                    //outpic->pts = (int64_t)((float)i * (1000.0/((float)(c->time_base.den))) * 90);                        // setting frame pts
                    avpicture_fill((AVPicture*)inpic,(uint8_t*)dst, PIX_FMT_RGB32, c->width, c->height);                            // fill image with input screenshot
                    avpicture_fill((AVPicture*)outpic, outbuffer, PIX_FMT_YUV420P, c->width, c->height);                  // clear output picture for buffer copy
                    av_image_alloc(outpic->data, outpic->linesize, c->width, c->height, c->pix_fmt, 1); 

                    inpic->data[0] += inpic->linesize[0]*(c->height-1);                                                   // flipping frame
                    inpic->linesize[0] = -inpic->linesize[0];                                                             // flipping frame

                    struct SwsContext* fooContext = sws_getContext(_imgWidth,_imgHeight,PIX_FMT_RGB32,c->width,c->height,PIX_FMT_YUV420P, SWS_FAST_BILINEAR,NULL,NULL,NULL);
                    sws_scale(fooContext, inpic->data, inpic->linesize, 0, c->height, outpic->data, outpic->linesize);    // converting frame size and format
                    out_size = avcodec_encode_video(c, outbuf, outbuf_size, outpic);                                      // encoding video
                    _messageFld->setStringValue("Encoding frame %3d (size=%5d)\n");
                     fwrite(outbuf, 1, out_size, f);
                     delete [] dst;                                                                                         // freeing memory
                    av_free(outbuffer);     
                    av_free(inpic);
                    av_free(outpic);
                    av_free(fooContext);
                    DeleteObject(_hbitmap);

                    for(int Z = 0; Z<out_size; i++)
                    {
                        fflush(stdout);
                        out_size = avcodec_encode_video(c, outbuf, outbuf_size, outpic);                                              // encode the delayed frames
                        fwrite(outbuf, 1, out_size, f);
                    }
                    //outbuf[0] = 0x00;
                    //outbuf[1] = 0x00;                                                                                               // add sequence end code to have a real mpeg file
                    //outbuf[2] = 0x01;
                    //outbuf[3] = 0xb7;
                    //fwrite(outbuf, 1, 4, f);
                    fclose(f);
                    avcodec_close(c);                                                                                               // freeing memory
                    free(outbuf);
                    av_free(c);
                    printf("Closed codec and Freed\n");
                }
            }
PagedImage*inImg=getUpdatedInputImage(0);
ML_检查(inImg);
ImageVector imgExt=inImg->getImageExtent();
if((imgExt.x==\u imgWidth)和&(imgExt.y==\u imgHeight))
{
如果((imgExt.x%4)==0)和((imgExt.y%4)==0))
{
_numFramesFld->setIntValue(_numFramesFld->getIntValue()+1);
MLFree(unicodeFilename);
//配置标头
//BITMAPINFO BITMAPINFO
//读取输入图像并将输出图像写入视频
//以数组形式获取输入图像
void*imgData=NULL;
子imageBox imageBox(imgExt);//获取整个图像
getTile(inImg、imageBox、MLuint8Type和imgData);
MLuint8*iData=(MLuint8*)imgData;
//因为我们只有图像
//如果z-ext为1,我们可以计算c步距,如下所示
int cStride=_imgWidth*_imgHeight;
整数偏移=0;
MLuint8 r=0,g=0,b=0;
//指向位图的指针
//用于将图像写入视频
UCHAR*dst=(UCHAR*)_位;
对于(int y=_imgHeight-1;y>=0;y--)
{//反向扫描图像。如果按正常顺序设置DIB的y行,则无法进行压缩。
偏移量=_imgWidth*y;
对于(int x=0;x<_imgWidth;x++)
{
如果(_isgreyValue图像)
{
r=iData[偏移量+x];
*dst++=(UCHAR)r;
*dst++=(UCHAR)r;
*dst++=(UCHAR)r;
} 
其他的
{
b=iData[offset+x];//windows位图需要颠倒顺序:bgr而不是rgb
g=iData[偏移量+x+cStride];
r=iData[偏移量+x+cStride+cStride];
*dst++=(UCHAR)r;
*dst++=(UCHAR)g;
*dst++=(UCHAR)b;
}
//忽略输入图像中的alpha通道
}
}
EXBUF_size=100000+c->width*c->height*(32>>3);//分配输出缓冲区
EXBUFF=静态铸件(malloc(EXBUF尺寸));
文件名=(\u outputFilenameFld->getStringValue()).c_str();
FILE*f=fopen(文件名?,“wb”);//打开视频文件进行写入
如果(!f)
{
_messageFld->setStringValue(“无法打开文件”);
}
else _messageFld->setStringValue(“打开的视频文件以供写入”);
//对于(i=0;igetIntValue();i++)
//{
fflush(stdout);
int nbytes=avpicture\u get\u size(PIX\u FMT\u YUV420P,c->width,c->height);//
uint8_t*exputffer=(uint8_t*)av_malloc(n字节*大小(uint8_t));
AVFrame*inpic=avcodec_alloc_frame();//强制帧分配
AVFrame*outpic=avcodec_alloc_frame();
//outpic->pts=(int64_t)((float)i*(1000.0/((float)(c->time_base.den)))*90);//设置帧pts
avpicture_fill((avpicture*)inpic,(uint8_t*)dst,PIX_FMT_RGB32,c->width,c->height);//用输入屏幕截图填充图像
avpicture_fill((avpicture*)outpic,exputffer,PIX_FMT_YUV420P,c->width,c->height);//清除缓冲区拷贝的输出图片
av_图像分配(输出->数据,输出->线条尺寸,c->宽度,c->高度,c->pix_fmt,1);
inpic->data[0]+=inpic->linesize[0]*(c->height-1);//翻转帧
inpic->linesize[0]=-inpic->linesize[0];//翻转帧
struct SwsContext*fooContext=sws_getContext(_imgWidth,_imghight,PIX_FMT_RGB32,c->width,c->height,PIX_FMT_YUV420P,sws_FAST_双线性,NULL,NULL,NULL);
sws_比例(fooContext,inpic->data,inpic->linesize,0,c->height,outpic->data,outpic->linesize);//转换帧大小和格式
out_size=avcodec_encode_video(c、EXBUF、EXBUF_size、outpic);//编码视频
_messageFld->setString值(“编码帧%3d(大小=%5d)\n”);
fwrite(f,1,out_size,f);
删除[]dst;//释放内存
无AVU(突发事件);
无av_(inpic);
无av_(outpic);
av_免费(fooContext);
DeleteObject(hbitmap);

对于(intz=0;Z访问冲突可能很难调试。 由于存在读访问冲突,可能是因为您在某个地方