Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/asp.net/31.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 基于AVCapture的viewController崩溃_Ios_Opengl Es_Cadisplaylink_Wait Fences - Fatal编程技术网

Ios 基于AVCapture的viewController崩溃

Ios 基于AVCapture的viewController崩溃,ios,opengl-es,cadisplaylink,wait-fences,Ios,Opengl Es,Cadisplaylink,Wait Fences,我正在开发一个在UIViewController中使用OpenGL视频捕获的应用程序 在该控制器的第一次初始化时,我在控制台中收到以下消息: wait_fences: failed to receive reply: 10004003 该消息显示在viewController初始化之后 之后,如果我切换到另一个控制器并再次启动此控制器,则在2/4启动具有EXC\u BAD\u访问权限的ViewController后会发生崩溃 asm输出没有多大帮助 0x32f56a34: bne 0x

我正在开发一个在UIViewController中使用OpenGL视频捕获的应用程序

在该控制器的第一次初始化时,我在控制台中收到以下消息:

wait_fences: failed to receive reply: 10004003
该消息显示在viewController初始化之后

之后,如果我切换到另一个控制器并再次启动此控制器,则在2/4启动具有EXC\u BAD\u访问权限的ViewController后会发生崩溃

asm输出没有多大帮助

0x32f56a34:  bne    0x32f56ad4               ; memmove$VARIANT$CortexA9 + 276
0x32f56a38:  subs   r2, r2, #60
0x32f56a3c:  blo    0x32f56a84               ; memmove$VARIANT$CortexA9 + 196
0x32f56a40:  tst    r0, #28
0x32f56a44:  beq    0x32f56a5c               ; memmove$VARIANT$CortexA9 + 156
0x32f56a48:  ldr    r3, [r1], #4
0x32f56a4c:  subs   r2, r2, #4
0x32f56a50:  str    r3, [r0], #4
0x32f56a54:  bhs    0x32f56a40               ; memmove$VARIANT$CortexA9 + 128
0x32f56a58:  b      0x32f56a84               ; memmove$VARIANT$CortexA9 + 196
0x32f56a5c:  push   {r5, r6, r8, r10}
0x32f56a60:  ldm    r1!, {r3, r4, r5, r6, r8, r9, r10, r12}
0x32f56a64:  subs   r2, r2, #64
以下是源代码的一些相关部分:

- (id)init
{

    self = [super init];

    if (self)
    {

        ADLog(@"AR_ogl_ViewController init");

        animating = FALSE;

        EAGLContext *aContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];

        if (!aContext)
        {
            aContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
        }

        if (!aContext)
        {
            ADLog(@"Failed to create ES context");
        }
        else if (![EAGLContext setCurrentContext:aContext])
        {   
            ADLog(@"Failed to set ES context current");
        }

        self.context = aContext;

        ADSafeRelease(aContext);


        [(AR_EAGLView *)self.view setContext:context];
        [(AR_EAGLView *)self.view setFramebuffer];

        [self createVBO];

        if ([context API] == kEAGLRenderingAPIOpenGLES2)
            [self loadShaders];

        animating               = FALSE;
        displayLinkSupported    = FALSE;
        animationFrameInterval  = 1;
        displayLink             = nil;
        animationTimer          = nil;

        // Use of CADisplayLink requires iOS version 3.1 or greater.
        // The NSTimer object is used as fallback when it isn't available.
        /*  NSString *reqSysVer = @"3.1";

        NSString *currSysVer = [[UIDevice currentDevice] systemVersion];

        if ([currSysVer compare:reqSysVer options:NSNumericSearch] != NSOrderedAscending)*/

        displayLinkSupported = TRUE;

        // start capture
        [self setupCaptureSession];

        // initialize
        frameData = 0;
        pTexData = (GLuint*)malloc(TEX_SIZE*TEX_SIZE*sizeof(GLuint));

        incrementNeedsVideoFrame();

        float x = 0.0f;
        float y = 0.0f;
        float z = 1.0f;
        float angle = 90.0 * PI_OVER_180;

        afIdentity[0]  = 1+(1-cos(angle))*(x*x-1);
        afIdentity[1]  = -z*sin(angle)+(1-cos(angle))*x*y;
        afIdentity[2]  = y*sin(angle)+(1-cos(angle))*x*z;
        afIdentity[3]  = 0;
        afIdentity[4]  = z*sin(angle)+(1-cos(angle))*x*y;
        afIdentity[5]  = 1+(1-cos(angle))*(y*y-1);
        afIdentity[6]  = -x*sin(angle)+(1-cos(angle))*y*z;
        afIdentity[7]  = 0;
        afIdentity[8]  = -y*sin(angle)+(1-cos(angle))*x*z;
        afIdentity[9]  = x*sin(angle)+(1-cos(angle))*y*z;
        afIdentity[10] = 1+(1-cos(angle))*(z*z-1);
        afIdentity[11] = 0;
        afIdentity[12] = 0;
        afIdentity[13] = 0;
        afIdentity[14] = 0;
        afIdentity[15] = 1;

        y = 1.0f;
        z = 0.0f;
        angle = 180.0 * PI_OVER_180;

        afIdentity2[0]  = 1+(1-cos(angle))*(x*x-1);
        afIdentity2[1]  = -z*sin(angle)+(1-cos(angle))*x*y;
        afIdentity2[2]  = y*sin(angle)+(1-cos(angle))*x*z;
        afIdentity2[3]  = 0;
        afIdentity2[4]  = z*sin(angle)+(1-cos(angle))*x*y;
        afIdentity2[5]  = 1+(1-cos(angle))*(y*y-1);
        afIdentity2[6]  = -x*sin(angle)+(1-cos(angle))*y*z;
        afIdentity2[7]  = 0;
        afIdentity2[8]  = -y*sin(angle)+(1-cos(angle))*x*z;
        afIdentity2[9]  = x*sin(angle)+(1-cos(angle))*y*z;
        afIdentity2[10] = 1+(1-cos(angle))*(z*z-1);
        afIdentity2[11] = 0;
        afIdentity2[12] = 0;
        afIdentity2[13] = 0;
        afIdentity2[14] = 0;
        afIdentity2[15] = 1;

    }

    ADLog(@"AR_ogl_ViewController init end");


    return self;

}

- (void)dealloc
{
    ADLog(@"dealloc");
    ADSafeRelease(session);

    //ADSafeRelease(input);
    //ADSafeRelease(output);

    if (program)
    {
        glDeleteProgram(program);
        program = 0;
    }

        if (m_ui32Vbo) 
    {
                glDeleteBuffers(1, &m_ui32Vbo);
                m_ui32Vbo = 0;
        }

        if (frameData) 
    {
                free(frameData);
        frameData = nil;
        }

    if (pTexData) 
    {
                free(pTexData);
        pTexData = nil;
        }

    // Tear down context.
    if ([EAGLContext currentContext] == context)
        [EAGLContext setCurrentContext:nil];

    [context release];
    context = nil;

    [super dealloc];

}

- (void)viewDidAppear:(BOOL)animated
{
    ADLog(@"viewDidAppear");
    [super viewDidAppear:animated];
    [self startAnimation];

}

- (void)viewDidDisappear:(BOOL)animated
{
    [super viewDidDisappear:animated];
    [self stopAnimation];

}

- (void)viewDidUnload
{
        [super viewDidUnload];

    if (program)
    {
        glDeleteProgram(program);
        program = 0;
    }

    // Tear down context.
    if ([EAGLContext currentContext] == context)
        [EAGLContext setCurrentContext:nil];
        self.context = nil; 
}



- (void)startAnimation
{

    ADLog(@"startAnimation");

    if (displayLink)
    {
        [displayLink invalidate];
        displayLink = nil;
    }

    if (animating==FALSE && displayLink==nil)
    {
            ADLog(@"generating displayLink");
            displayLink = [NSClassFromString(@"CADisplayLink") displayLinkWithTarget:self selector:@selector(drawFrame)];
            [displayLink setFrameInterval:animationFrameInterval];
            [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
            animating = TRUE;

    }

    ADLog(@"startAnimation end");

}

- (void)stopAnimation
{
    ADLog(@"stopAnimation");

    if (displayLink)
    {

        [displayLink invalidate];
        displayLink = nil;

        animating   = FALSE;
    }


}


// Create and configure a capture session and start it running
- (void)setupCaptureSession 
{
    NSError *error = nil;

    // Create the session
    session = [[AVCaptureSession alloc] init];

    // Configure the session to produce lower resolution video frames, if your 
    // processing algorithm can cope. We'll specify medium quality for the
    // chosen device.
        // Low   : 192 x 144
        // Medium: 480 x 320
        // High  : 1280 x 720
    session.sessionPreset = AVCaptureSessionPresetMedium;

    // Find a suitable AVCaptureDevice
    AVCaptureDevice *device = [AVCaptureDevice
                                                           defaultDeviceWithMediaType:AVMediaTypeVideo];

    // Create a device input with the device and add it to the session.
    input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];

    if (!input) 
    {
        // Handling the error appropriately.
    }

    [session addInput:input];

    // Create a VideoDataOutput and add it to the session
    output = [[AVCaptureVideoDataOutput alloc] init];

    [session addOutput:output];

    // Configure your output.
    CaptureQueue = dispatch_queue_create("CaptureQueue", NULL);
    [output setSampleBufferDelegate:self queue:CaptureQueue];
    dispatch_release(CaptureQueue);

    // Specify the pixel format
    output.videoSettings = 
        [NSDictionary dictionaryWithObject:
         [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] 
                                                                forKey:(id)kCVPixelBufferPixelFormatTypeKey];

        output.alwaysDiscardsLateVideoFrames = NO;

    //  If you wish to cap the frame rate to a known value, such as 15 fps, set 
    //  minFrameDuration.
    //  [output setVideoMinFrameDuration:CMTimeMake(1, 20)];

    // Start the session running to start the flow of data

    [session startRunning];

}



@end

有任何跟踪此崩溃的建议吗?

您设置视图的时间太短了。上述大多数代码应该在
viewdiload
viewdide:
,而不是
init


另请参见

将大部分初始化代码移动到viewDidLoad删除wait_fences日志,但不会解决崩溃问题。我(随机)在memmove$VARIANT$CortexA9+196或捕获队列线程中发生崩溃,我注意到您没有使用访问器。导致崩溃的最常见原因是直接访问IVAR。切换到访问器。另外,为什么要使用
NSClassFromString(@“CADisplayLink”)
?最可疑的呼叫是
dispatch\u release(CaptureQueue)抛开不正确的命名(变量应该是驼峰大小写,没有前导大写),您似乎正在释放自己的ivar?我改为displayLink=[CADisplayLink displayLinkWithTarget:self selector:@selector(drawFrame)];(这是从github上的一个项目复制/粘贴)+将dispatch\u release移动到dealloc>>同样的问题,但您没有切换到访问器。你没有保留displayLink。如果可以多次调用setupCaptureSession,您将遇到问题。同样,访问器会防止这种情况发生。此外,您还需要发布实际的堆栈跟踪,而不是汇编程序。而且
drawFrame
是私有的。为什么你认为打那个电话是可以的,而且你现在可以安全地打?