CIContext drawImage导致EXC\u访问错误-iOS 6

CIContext drawImage导致EXC\u访问错误-iOS 6,ios,ios6,opengl-es-2.0,avfoundation,core-image,Ios,Ios6,Opengl Es 2.0,Avfoundation,Core Image,我正在尝试将一个简单的核心图像过滤器应用于实时摄像机输入。我认为我的代码还可以,但是在captureOutput方法中使用drawImage:inRect:fromRect方法会导致EXC\u BAD\u访问,或者[\uu nscfn数字drawImage:inRect:fromRect:]:无法识别的选择器,这使我在尝试调用drawImage时认为我的上下文已被释放。这对我来说没有意义,因为我的CIContext是类成员 这个问题似乎不是来自OpenGL,因为我尝试了一个简单的上下文(不是从E

我正在尝试将一个简单的核心图像过滤器应用于实时摄像机输入。我认为我的代码还可以,但是在captureOutput方法中使用
drawImage:inRect:fromRect
方法会导致
EXC\u BAD\u访问
,或者
[\uu nscfn数字drawImage:inRect:fromRect:]:无法识别的
选择器,这使我在尝试调用drawImage时认为我的上下文已被释放。这对我来说没有意义,因为我的
CIContext
是类成员

这个问题似乎不是来自OpenGL,因为我尝试了一个简单的上下文(不是从
EAGLContext
创建的),我也遇到了同样的问题

我正在iPhone5和ios 6上测试它,因为摄像头不能在模拟器上工作

你能帮我吗?非常感谢您抽出时间

我有我的.h文件:

<!-- language: c# -->

    //  CameraController.h

    #import <UIKit/UIKit.h>
    #import <OpenGLES/EAGL.h>
    #import <AVFoundation/AVFoundation.h>
    #import <GLKit/GLKit.h>
    #import <CoreMedia/CoreMedia.h>
    #import <CoreVideo/CoreVideo.h>
    #import <QuartzCore/QuartzCore.h>
    #import <CoreImage/CoreImage.h>
    #import <ImageIO/ImageIO.h>

    @interface CameraController : GLKViewController <AVCaptureVideoDataOutputSampleBufferDelegate>{

        AVCaptureSession *avCaptureSession;
        CIContext *coreImageContext;
        CIContext *ciTestContext;
        GLuint _renderBuffer;
        EAGLContext *glContext;
    }

    @end

//CameraController.h
#进口
#进口
#进口
#进口
#进口
#进口
#进口
#进口
#进口
@接口CameraController:GLKViewController{
AVCaptureSession*AVCaptureSession;
CIContext*CoreMageContext;
CIContext*ciTestContext;
GLuint_renderBuffer;
EAGLContext*glContext;
}
@结束
还有我的.m文件

<!-- language: c# -->

    //  CameraController.m

    #import "CameraController.h"

    @interface CameraController ()

    @end

    @implementation CameraController

    - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
    {
        self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
        if (self) {

        }
        return self;
    }

    - (void)viewDidLoad
    {
        [super viewDidLoad];

        // Initialize Open GL ES2 Context
        glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
        if (!glContext) {
            NSLog(@"Failed to create ES context");
        }
        [EAGLContext setCurrentContext:nil];

        // Gets the GL View and sets the depth format to 24 bits, and the context of the view to be the Open GL context created above
        GLKView *view = (GLKView *)self.view;
        view.context = glContext;
        view.drawableDepthFormat = GLKViewDrawableDepthFormat24;

        // Creates CI Context from  EAGLContext
        NSMutableDictionary *options = [[NSMutableDictionary alloc] init];
        [options setObject: [NSNull null] forKey: kCIContextWorkingColorSpace];
        coreImageContext = [CIContext contextWithEAGLContext:glContext options:options];

        glGenRenderbuffers(1, &_renderBuffer);
        glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);

        // Initialize Video Capture Device
        NSError *error;
        AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];

        // Initialize Video Output object and set output settings
        AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];

        [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
        [dataOutput setVideoSettings:[NSDictionary  dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
                                                                  forKey:(id)kCVPixelBufferPixelFormatTypeKey]];


        // Delegates the SampleBuffer to the current object which implements the AVCaptureVideoDataOutputSampleBufferDelegate interface via the captureOutput method
        [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];

        // Initialize the capture session, add input, output, start urnning
        avCaptureSession = [[AVCaptureSession alloc] init];
        [avCaptureSession beginConfiguration];
        [avCaptureSession setSessionPreset:AVCaptureSessionPreset1280x720];
        [avCaptureSession addInput:input];
        [avCaptureSession addOutput:dataOutput];
        [avCaptureSession commitConfiguration];
        [avCaptureSession startRunning];


    }

    -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {

        // Creates a CIImage from the sample buffer of the camera frame
        CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
        CIImage *inputImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];

        // Creates the relevant filter
        CIFilter *filter = [CIFilter filterWithName:@"CISepiaTone"];
        [filter setValue:inputImage forKey:kCIInputImageKey];
        [filter setValue:[NSNumber numberWithFloat:0.8f] forKey:@"InputIntensity"];

        // Creates a reference to the output of the filter
        CIImage *result = [filter valueForKey:kCIOutputImageKey];

        // Draw to the context
        [coreImageContext drawImage:result inRect:[result extent] fromRect:[result extent]]; // 5

        [glContext presentRenderbuffer:GL_RENDERBUFFER];
    }

    - (void)didReceiveMemoryWarning
    {
        [super didReceiveMemoryWarning];
        // Dispose of any resources that can be recreated.
    }


    @end

//CameraController.m
#导入“CameraController.h”
@接口CameraController()
@结束
@CameraController的实现
-(id)initWithNibName:(NSString*)nibNameOrNil bundle:(NSBundle*)nibBundleOrNil
{
self=[super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
如果(自我){
}
回归自我;
}
-(无效)viewDidLoad
{
[超级视图下载];
//初始化OpenGL ES2上下文
glContext=[[EAGLContext alloc]initWithAPI:keagrenderingapiopengles2];
如果(!glContext){
NSLog(@“未能创建ES上下文”);
}
[EAGLContext setCurrentContext:nil];
//获取总账视图,并将深度格式设置为24位,视图的上下文为上面创建的打开总账上下文
GLKView*视图=(GLKView*)self.view;
view.context=glContext;
view.drawableDepthFormat=GLKViewDrawableDepthFormat24;
//从EAGLContext创建CI上下文
NSMutableDictionary*选项=[[NSMutableDictionary alloc]init];
[options setObject:[NSNull null]forKey:kCIContextWorkingColorSpace];
CoreMageContext=[CIContext ContextWithEagleContext:glContext选项:选项];
glGenRenderbuffers(1和u renderBuffer);
glbinderbuffer(GL_RENDERBUFFER,_RENDERBUFFER);
//初始化视频捕获设备
n错误*错误;
AVCaptureDevice*videoDevice=[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput*输入=[AVCaptureDeviceInputDeviceInputWithDevice:videoDevice错误:&error];
//初始化视频输出对象并设置输出设置
AVCaptureVideoDataOutput*数据输出=[[AVCaptureVideoDataOutput alloc]init];
[dataOutput setAlwaysDiscardsLateVideoFrames:是];
[dataOutput setVideoSettings:[NSDictionary Dictionary WithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
//将SampleBuffer委托给当前对象,该对象通过captureOutput方法实现AVCaptureVideoDataOutputSampleBufferDelegate接口
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
//初始化捕获会话、添加输入、输出、启动urnning
avCaptureSession=[[avCaptureSession alloc]init];
[avCaptureSession beginConfiguration];
[avCaptureSession setSessionPreset:AVCaptureSessionPreset1280x720];
[avCaptureSession addInput:输入];
[avCaptureSession addOutput:dataOutput];
[avCaptureSession CommitteConfiguration];
[avCaptureSession startRunning];
}
-(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)SampleBufferfromConnection:(AVCaptureConnection*)连接{
//从相机帧的采样缓冲区创建图像
CVPixelBufferRef PixelBufferRef=(CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage*inputImage=[CIImage imageWithCVPixelBuffer:pixelBuffer];
//创建相关的过滤器
CIFilter*过滤器=[CIFilter过滤器名称:@“CISepiaTone”];
[过滤器设置值:InputImageForkey:KCIIInputImageKey];
[filter setValue:[NSNumber numberWithFloat:0.8f]forKey:@“InputTensity”];
//创建对过滤器输出的引用
CIImage*结果=[filter valueForKey:kCIOutputImageKey];
//切题
[CoreMageContext drawImage:result inRect:[result extent]fromRect:[result extent]];//5
[glContext presentRenderbuffer:GL_RENDERBUFFER];
}
-(无效)未收到记忆警告
{
[超级记忆警告];
//处置所有可以重新创建的资源。
}
@结束

在viewDidLoad方法中,您有:

coreImageContext = [CIContext contextWithEAGLContext:glContext options:options];

如果要在captureOutput方法中使用CoreMageContext,则需要保留它

好的,谢谢,现在它开始工作了,是因为captureOutput是协议的实现吗?不是,是因为ContextWithEagleContext返回的CIContext对象是一个自动释放的对象。当它到达captureOutput方法时,它已经被释放了。您能否提供您如何保留该对象的详细信息。我正在尝试实现与您类似的东西,并且遇到了对象被释放的相同问题。我尝试通过使变量成为强原子属性来保留对象(在启用了ARC的objective C中),但应用程序仍然抛出sa