Ios GPUImageFramebuffer帧缓冲区溢出错误

Ios GPUImageFramebuffer帧缓冲区溢出错误,ios,objective-c,gpuimage,Ios,Objective C,Gpuimage,我根据FilterShowcase->UI元素中的示例代码创建了这段代码,并且不断地出现崩溃。该代码的目标是在视频录制时在视频顶部放置秒表或倒计时 以下是例外情况的详细信息: *** Assertion failure in -[GPUImageFramebuffer unlock], /Users/itjunkii/Dropbox/Workspace-iOS/PocketWOD/Pods/GPUImage/framework/Source/GPUImageFramebuffer.m:269

我根据FilterShowcase->UI元素中的示例代码创建了这段代码,并且不断地出现崩溃。该代码的目标是在视频录制时在视频顶部放置秒表或倒计时

以下是例外情况的详细信息:

*** Assertion failure in -[GPUImageFramebuffer unlock], /Users/itjunkii/Dropbox/Workspace-iOS/PocketWOD/Pods/GPUImage/framework/Source/GPUImageFramebuffer.m:269

2015-04-15 12:06:16.063 RSSReader[5416:730515] *** Terminating app due to uncaught exception 'NSInternalInconsistencyException', reason: 'Tried to overrelease a framebuffer, did you forget to call -useNextFrameForImageCapture before using -imageFromCurrentFramebuffer?'

*** First throw call stack:

(0x1852b42d8 0x196a800e4 0x1852b4198 0x186168ed4 0x100546220 0x100584cc0 0x100540510 0x100585550 0x10053df4c 0x100540558 0x10058c9b4 0x10058d2f0 0x10058deb0 0x101418fd4 0x101418f94 0x101423db8 0x10141c2c4 0x1014265d4 0x101428248 0x1972b122c 0x1972b0ef0)

libc++abi.dylib: terminating with uncaught exception of type NSException
代码如下:

- (void)editVideo:(BOOL)isFront
{
    NSString *preset = AVCaptureSessionPreset1280x720;

    float presetW = 720;
    float presetH = 1280;

    NSString *device = [UIDevice currentDevice].platformString;
    //NSLog(@"device = %@", device);

    if ([device isEqualToString:@"iPhone 4S"] ||
        [device isEqualToString:@"iPhone 4"] ||
        [device isEqualToString:@"iPad mini 1G"]) {
        preset = AVCaptureSessionPreset640x480;
        presetW = 480.f;
        presetH = 640.f;
    } else {
        preset = AVCaptureSessionPreset1280x720;
        presetW = 720.f;
        presetH = 1280.f;
    }

    if (!isFront)
        videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:preset
                                                      cameraPosition:AVCaptureDevicePositionBack];
    else
        videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:preset
                                                          cameraPosition:AVCaptureDevicePositionFront];

    videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
    videoCamera.horizontallyMirrorFrontFacingCamera = NO;
    videoCamera.horizontallyMirrorRearFacingCamera = NO;
    //videoCamera.audioEncodingTarget = nil;

    filter = [[GPUImageBrightnessFilter alloc] init];
    [filter forceProcessingAtSizeRespectingAspectRatio:CGSizeMake(presetW, presetH)];

    GPUImageAlphaBlendFilter *blendFilter = [[GPUImageAlphaBlendFilter alloc] init];
    blendFilter.mix = 1.0;
    [blendFilter forceProcessingAtSizeRespectingAspectRatio:CGSizeMake(presetW, presetH)];

    UIView *contentView = [[UIView alloc] initWithFrame:CGRectMake(0,0,screenWidth,screenHeight-20)];
    contentView.backgroundColor = [UIColor clearColor];

    CGFloat vWidth = [[UIScreen mainScreen] bounds].size.width;
    CGFloat vHeight = [[UIScreen mainScreen] bounds].size.height;

    UIView *bg = [[UIView alloc] initWithFrame:CGRectMake(0,vHeight-100,vWidth,100)];
    bg.backgroundColor = [UIColor colorWithRed:0 green:0 blue:0 alpha:0.5];
    [contentView addSubview:bg];

    float bgHeight = bg.frame.size.height;
    timeLabel = [[UILabel alloc] initWithFrame:CGRectMake(0,vHeight-(bgHeight+10), vWidth, bgHeight)];
    timeLabel.font = [UIFont fontWithName:@"HelveticaNeue-Bold" size:68.0f];
    timeLabel.text = @"00:00.00";
    timeLabel.textAlignment = NSTextAlignmentCenter;
    timeLabel.textColor = [UIColor redColor];
    timeLabel.tag = 1;
    timeLabel.backgroundColor = [UIColor clearColor];
    [contentView addSubview:timeLabel];

    [contentView addSubview:cDownLabel];

    uiElementInput = [[GPUImageUIElement alloc] initWithView:contentView];

    [filter addTarget:blendFilter];
    [uiElementInput addTarget:blendFilter];

    [movieFile addTarget:filter];

    // Only rotate the video for display, leave orientation the same for recording
    GPUImageView *filterView = (GPUImageView *)vwVideo;
    //[filter addTarget:filterView];
    [blendFilter addTarget:filterView];

    [videoCamera addTarget:filter];

#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Warc-retain-cycles"
#pragma clang diagnostic ignored "-Wgnu"
    __block BOOL isFirstRun = NO;
    __unsafe_unretained GPUImageUIElement *weakUIElementInput = uiElementInput;
    [filter setFrameProcessingCompletionBlock:^(GPUImageOutput * filter, CMTime frameTime){

        NSDate *currentDate = [NSDate date];
        NSTimeInterval timeInterval = [currentDate timeIntervalSinceDate:sTime];
        NSDate *timerDate = [NSDate dateWithTimeIntervalSince1970:timeInterval];

        NSDate *futureDate = [sTime dateByAddingTimeInterval:(minutes*60)];

        if (isCountdown) {
            cDownLabel.text = [NSString stringWithFormat:@"%i", countSeconds];
        }

        if (isRecording && !isTimeSet) {
            sTime = [NSDate date];
            isTimeSet = YES;
        }

        float elapsedTime = -[sTime timeIntervalSinceNow];

        if (isRecording && !isAmrap) {
            if (elapsedTime < 3600) {
                [dateFormatter setDateFormat:@"mm:ss.SS"];
            } else {
                [dateFormatter setDateFormat:@"HH:mm:ss.SS"];
                timeLabel.font = [UIFont fontWithName:@"HelveticaNeue-Bold" size:52.0f];
            }
            [dateFormatter setTimeZone:[NSTimeZone timeZoneForSecondsFromGMT:0.0]];
            NSString *timeString=[dateFormatter stringFromDate:timerDate];
            timeLabel.text = timeString;
        } else if (isRecording && isAmrap) {
            NSTimeInterval iv = [futureDate timeIntervalSinceNow];
            int h = iv / 3600;
            int m = (iv - h * 3600) / 60;
            int s = iv - h * 3600 - m * 60;
            if (h <= 0)
                timeLabel.text = [NSString stringWithFormat:@"%02d:%02d", m, s];
            else
                timeLabel.text = [NSString stringWithFormat:@"%02d:%02d:%02d", h, m, s];
            if (h + m + s <= 0 && isFirstRun) {
                [buzzerPlayer play];
                [self stopRecording:nil];
            }
            isFirstRun = YES;
        }
        [weakUIElementInput update];
    }];

    NSDate *currentDate = [NSDate date];
    NSDateFormatter *dFormatter = [[NSDateFormatter alloc] init];
    [dFormatter setDateFormat:@"mmddyy"];
    [dFormatter setTimeZone:[NSTimeZone timeZoneForSecondsFromGMT:0.0]];
    fileNameString=[dFormatter stringFromDate:currentDate];

    pathToMovie = [NSHomeDirectory()
                   stringByAppendingPathComponent:[NSString stringWithFormat:@"Documents/%@.mov", fileNameString]];

    //NSLog(@"path = %@", pathToMovie);

    unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
    movieURL = [NSURL fileURLWithPath:pathToMovie];

    NSDictionary* settings = [NSDictionary dictionaryWithObjectsAndKeys:
                              AVVideoCodecH264,AVVideoCodecKey,
                              [NSDictionary dictionaryWithObjectsAndKeys:
                               [NSNumber numberWithInt:2000000],AVVideoAverageBitRateKey,
                               AVVideoProfileLevelH264High40,AVVideoProfileLevelKey,
                               [NSNumber numberWithInt:30],AVVideoMaxKeyFrameIntervalKey,nil],
                              AVVideoCompressionPropertiesKey,
                              [NSNumber numberWithFloat:presetW], AVVideoWidthKey,
                              [NSNumber numberWithFloat:presetH], AVVideoHeightKey, nil];

    //movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(720.0, 1280.0)];
    movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL
                                                           size:CGSizeMake(presetW, presetH)
                                                       fileType:AVFileTypeQuickTimeMovie
                                                 outputSettings:settings];

    movieWriter.encodingLiveVideo = YES;
    [blendFilter addTarget:movieWriter];
    videoCamera.audioEncodingTarget = movieWriter;

    [videoCamera startCameraCapture];
-(void)editVideo:(BOOL)isFront
{
NSString*preset=AVCaptureSessionPreset1280x720;
浮动预设W=720;
浮动预设H=1280;
NSString*device=[UIDevice currentDevice].platformString;
//NSLog(@“设备=%@”,设备);
如果([设备IsequalString:@“iPhone 4S”]||
[设备isEqualToString:@“iPhone 4”]||
[设备isEqualToString:@“iPad mini 1G”]){
预设=AVCaptureSessionPreset640x480;
预设W=480.f;
预设H=640.f;
}否则{
预设=AVCaptureSessionPreset1280x720;
预设W=720.f;
预设H=1280.f;
}
如果(!isFront)
摄像机=[[GPUImageVideoCamera alloc]初始化与会话预设:预设
摄像机位置:AVCaptureDevicePositionBack];
其他的
摄像机=[[GPUImageVideoCamera alloc]初始化与会话预设:预设
摄像机位置:AVCaptureDevicePositionFront];
videoCamera.outputImageOrientation=ui接口方向纵向;
videoCamera.horizontallyMirrorFrontFacingCamera=否;
videoCamera.HorizontallyMirror后向摄像头=否;
//videoCamera.audioEncodingTarget=零;
filter=[[GPUImageBrightnessFilter alloc]init];
[过滤力处理粒度检测光谱:CGSizeMake(预设w,预设h)];
GPUImageAlphaBlendFilter*blendFilter=[[GPUImageAlphaBlendFilter alloc]init];
blendFilter.mix=1.0;
[blendFilter Force ProcessingAtSizeRespectingSpectratio:CGSizeMake(预设W、预设H)];
UIView*contentView=[[UIView alloc]initWithFrame:CGRectMake(0,0,屏幕宽度,屏幕高度-20)];
contentView.backgroundColor=[UIColor clearColor];
CGFloat vWidth=[[UIScreen mainScreen]边界].size.width;
CGFloat VHHEIGHT=[[UIScreen mainScreen]界限].size.height;
UIView*bg=[[UIView alloc]initWithFrame:CGRectMake(0,vHeight-100,vWidth,100)];
背景色=[UIColor COLOR WITHRED:0绿色:0蓝色:0阿尔法:0.5];
[contentView addSubview:bg];
浮动bgHeight=bg.frame.size.height;
timeLabel=[[UILabel alloc]initWithFrame:CGRectMake(0,vHight-(bgHeight+10),vWidth,bgHeight)];
timeLabel.font=[UIFont fontWithName:@“HelveticaNeue粗体”大小:68.0f];
timeLabel.text=@“00:00.00”;
timeLabel.textAlignment=NSTextAlignmentCenter;
timeLabel.textColor=[UIColor redColor];
timeLabel.tag=1;
timeLabel.backgroundColor=[UIColor clearColor];
[contentView addSubview:timeLabel];
[contentView addSubview:cDownLabel];
uiElementInput=[[GPUImageUIElement alloc]initWithView:contentView];
[过滤器添加目标:blendFilter];
[uiElementInput addTarget:blendFilter];
[movieFile addTarget:filter];
//仅旋转视频进行显示,保持方向与录制方向相同
GPUImageView*过滤器视图=(GPUImageView*)视频;
//[过滤器添加目标:过滤器视图];
[blendFilter addTarget:filterView];
[摄像机添加目标:过滤器];
#pragma-clang诊断推送
#pragma-clang诊断被忽略“-Warc保留周期”
#pragma-clang诊断被忽略“-Wgnu”
__block BOOL isFirstRun=否;
__不安全\u未恢复的GPUImageUIElement*WeakuilementInput=uiElementInput;
[filter setFrameProcessingCompletionBlock:^(GPUImageOutput*过滤器,CMTime-frameTime){
NSDate*currentDate=[NSDate日期];
NSTimeInterval timeInterval=[currentDate TimeIntervalsIncestDate:sTime];
NSDate*timerDate=[NSDate DATE WITH TIMEIVERSICENC1970:TIMEIVERSION];
NSDate*futureDate=[添加时间间隔的时间日期:(分钟*60)];
如果(isCountdown){
cDownLabel.text=[NSString stringWithFormat:@“%i”,countSeconds];
}
如果(isRecording&!isTimeSet){
时间=[NSDate日期];
istimest=是;
}
float elapsedTime=-[sTime timeintervalencenow];
if(isRecording&!isAmrap){
如果(elapsedTime<3600){
[日期格式化程序setDateFormat:@“mm:ss.ss”];
}否则{
[dateFormatter setDateFormat:@“HH:mm:ss.ss”];
timeLabel.font=[UIFont fontWithName:@“HelveticaNeue粗体”大小:52.0f];
}
[dateFormatter setTimeZone:[NSTimeZone timeZoneForSecondsFromGMT:0.0]];
NSString*timeString=[dateFormatter stringFromDate:timerDate];
timeLabel.text=时间字符串;
}else if(isRecording&&isAmrap){
NSTimeInterval iv=[futureDate timeIntervalSinceNow];
int h=iv/3600;
int m=(iv-h*3600)/60;
INTS=iv-h*3600-m*60;

如果(h)在注释掉movieWriter部分时是否发生崩溃?(即仅显示在GPUImageView上?)您的blendFilter有2个目标,提供的缓冲区可能会被第一个目标解锁。顺便问一下,有人找到了解决方案吗?没有@EJoshuaS,这是GPUImage作者Brad Larson关于此问题的消息:“核心问题是我如何构造帧缓冲区重用缓存的问题,即使我进行了处理,也可能需要一段时间。很抱歉,我不能告诉你比这更好的事情。”-我放弃了这个项目,希望快速重写会更稳定。