Ios 从图像阵列创建mp4视频时出现内存警告

Ios 从图像阵列创建mp4视频时出现内存警告,ios,Ios,我正在尝试创建一个视频文件-图像文件。我正在设置NSArray中图像文件的名称。当图像文件的数量很大(超过80或100个)时,我会收到内存警告,有时应用程序会崩溃。这是我的代码: -(void)writeImageAsMovie:(NSArray *)images toPath:(NSString*)path size:(CGSize)size duration:(int)duration { NSError *error = nil; videoWriter = [[

我正在尝试创建一个视频文件-图像文件。我正在设置NSArray中图像文件的名称。当图像文件的数量很大(超过80或100个)时,我会收到内存警告,有时应用程序会崩溃。这是我的代码:

   -(void)writeImageAsMovie:(NSArray *)images toPath:(NSString*)path size:(CGSize)size duration:(int)duration
{

    NSError *error = nil;

    videoWriter = [[AVAssetWriter alloc] initWithURL:
                   [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                                               error:&error];


    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];
    AVAssetWriterInput* writerInput = [AVAssetWriterInput
                                       assetWriterInputWithMediaType:AVMediaTypeVideo
                                       outputSettings:videoSettings] ;




    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:nil];


    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
    [videoWriter addInput:writerInput];


    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];

    CVPixelBufferRef buffer = NULL;

    //convert uiimage to CGImage.

    //Write samples:
    for (int i=0; i<images.count ; i++) {

        UIImage *image = [UIImage imageWithData:[NSData dataWithContentsOfFile:[[images objectAtIndex:i] objectForKey:@"image"]]];
        int time = [[[images objectAtIndex:i] objectForKey:@"time"] intValue];
        buffer = [self pixelBufferFromCGImage:image.CGImage];
        while(! adaptor.assetWriterInput.readyForMoreMediaData );
        [adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(time,1000)];
        image=nil;
    }

    while(!adaptor.assetWriterInput.readyForMoreMediaData);


    //Finish the session:
    [writerInput markAsFinished];

    [videoWriter finishWritingWithCompletionHandler:^(){
        NSLog (@"finished writing %d",images.count);
    }];

    NSLog(@"%d",[videoWriter status]);
    while([videoWriter status] != AVAssetWriterStatusFailed && [videoWriter status] != AVAssetWriterStatusCompleted) {
        NSLog(@"Status: %d", [videoWriter status]);
        sleep(1);
    }
    NSLog(@"%d",[videoWriter status]);
    NSString *tmpdir = NSTemporaryDirectory();
    NSString *mydir = [tmpdir stringByAppendingPathComponent:@"vidimages"];
    [[NSFileManager defaultManager] removeItemAtPath:mydir error:nil];

    }




    - (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
    {
        CGFloat screenWidth = [[UIScreen mainScreen] bounds].size.width;

        NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                                 [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                                 [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                                 nil];
        CVPixelBufferRef pxbuffer = NULL;

        CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, screenWidth,
                                              screenWidth, kCVPixelFormatType_32BGRA, (__bridge CFDictionaryRef) options,
                                              &pxbuffer);
        NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

        CVPixelBufferLockBaseAddress(pxbuffer, 0);
        void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

        NSParameterAssert(pxdata != NULL);

        CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
        CGContextRef context1 = CGBitmapContextCreate(pxdata, screenWidth,
                                                      screenWidth, 8, 4*screenWidth, rgbColorSpace,
                                                      kCGImageAlphaNoneSkipLast);
        NSParameterAssert(context1);
        CGContextConcatCTM(context1, CGAffineTransformMakeRotation(0));
        CGContextDrawImage(context1, CGRectMake(0, 0, screenWidth,
                                                screenWidth), image);
        CGColorSpaceRelease(rgbColorSpace);
        CGContextRelease(context1);

        CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

        return pxbuffer;
    }
-(void)writeImageAsMovie:(NSArray*)图像到路径:(NSString*)路径大小:(CGSize)大小持续时间:(int)持续时间
{
n错误*错误=nil;
videoWriter=[[AVAssetWriter alloc]initWithURL:
[NSURL fileURLWithPath:path]文件类型:AVFileTypeQuickTimeMovie
错误:&错误];
NSParameterAssert(视频编写器);
NSDictionary*videoSettings=[NSDictionary Dictionary WithObjectsAndKeys:
AVVideoCodecH264,AVVideoCodeKey,
[NSNumber numberWithInt:size.width],AVVideoWidthKey,
[NSNumber numberWithInt:size.height],AVVideoHeightKey,
零];
AVAssetWriterInput*writerInput=[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
输出设置:视频设置];
AvassetWriterInputPixelBufferAdapter*适配器=[AvassetWriterInputPixelBufferAdapter]
AssetWriterInputPixelBufferAdapter与AssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter附加输入:writerInput];
//启动会话:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:CMTimeMake(0,1000)];
CVPixelBufferRef buffer=NULL;
//将uiimage转换为CGImage。
//编写示例:

for(int i=0;i看起来像是在分配一块自动释放的UIImage和NSData对象:

for (int i=0; i<images.count ; i++) {

    UIImage *image = [UIImage imageWithData:[NSData dataWithContentsOfFile:[[images objectAtIndex:i] objectForKey:@"image"]]];
    int time = [[[images objectAtIndex:i] objectForKey:@"time"] intValue];
    buffer = [self pixelBufferFromCGImage:image.CGImage];
    while(! adaptor.assetWriterInput.readyForMoreMediaData );
    [adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(time,1000)];
    image=nil;
}

for(int i=0;i)您尽可能多地使用可用内存。这就是您被警告和/或系统崩溃的原因。这不是代码的问题,而是系统的问题。虚拟内存有时可能会有所帮助,并可能导致系统更不稳定(更频繁地崩溃)。
for (int i=0; i<images.count ; i++) {
    @autoreleasepool {
        UIImage *image = [UIImage imageWithData:[NSData dataWithContentsOfFile:[[images objectAtIndex:i] objectForKey:@"image"]]];
        int time = [[[images objectAtIndex:i] objectForKey:@"time"] intValue];
        buffer = [self pixelBufferFromCGImage:image.CGImage];
        while(! adaptor.assetWriterInput.readyForMoreMediaData );
        [adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(time,1000)];
        image=nil;
    }
}