Iphone 如何在不重新创建会话的情况下使自动对焦在第二个AVCaptureSession中工作?

Iphone 如何在不重新创建会话的情况下使自动对焦在第二个AVCaptureSession中工作?,iphone,avcapturesession,autofocus,Iphone,Avcapturesession,Autofocus,当我创建第二个AVCaptureSession时,自动对焦在第一个AVCaptureSession上不起作用。要创建的第二个会话是自动对焦工作的会话,而创建的第一个会话不自动对焦 我希望其中一个会话在另一个会话停止后启动时能够自动对焦,就像两个会话的自动白平衡和自动曝光一样。如果您使用下面的示例代码观察日志窗口,您可以看到键值观察消息通过;但是,在顶级会话运行时,永远不要更改焦点消息 旁注:不幸的是,我正在使用的第三方库中有一个bug,它阻止我在切换会话时完全重新创建会话(它正在泄漏其AVCap

当我创建第二个AVCaptureSession时,自动对焦在第一个AVCaptureSession上不起作用。要创建的第二个会话是自动对焦工作的会话,而创建的第一个会话不自动对焦

我希望其中一个会话在另一个会话停止后启动时能够自动对焦,就像两个会话的自动白平衡和自动曝光一样。如果您使用下面的示例代码观察日志窗口,您可以看到键值观察消息通过;但是,在顶级会话运行时,永远不要更改焦点消息

旁注:不幸的是,我正在使用的第三方库中有一个bug,它阻止我在切换会话时完全重新创建会话(它正在泄漏其AVCaptureSessions,最终导致应用程序被杀死)。完整的故事是,这个库正在为我创建一个捕获会话,它有一个公共API来启动和停止会话,我希望创建另一个会话。下面的代码演示了这个问题,但没有使用第三方库

我已经用下面列出的代码创建了一个测试应用程序和一个XIB文件,该文件有两个视图,一个在另一个视图之上,还有一个按钮连接到演示问题的switchSessions方法

这可能与这里描述的问题有关, ,尽管没有提到两个捕获会话

头文件:

#import <UIKit/UIKit.h>

@class AVCaptureSession;
@class AVCaptureStillImageOutput;
@class AVCaptureVideoPreviewLayer;
@class AVCaptureDevice;
@class AVCaptureDeviceInput;

@interface AVCaptureSessionFocusBugViewController : UIViewController {

    IBOutlet UIView *_topView;
    IBOutlet UIView *_bottomView;

    AVCaptureDevice *_device;

    AVCaptureSession *_topSession;

    AVCaptureStillImageOutput *_outputTopSession;
    AVCaptureVideoPreviewLayer *_previewLayerTopSession;
    AVCaptureDeviceInput *_inputTopSession;

    AVCaptureSession *_bottomSession;

    AVCaptureStillImageOutput *_outputBottomSession;
    AVCaptureVideoPreviewLayer *_previewLayerBottomSession;
    AVCaptureDeviceInput *_inputBottomSession;
}

- (IBAction)switchSessions:(id)sender;

@end
#导入
@类AVCaptureSession;
@类AVCaptureStillImageOutput;
@AVCaptureVideoPreviewLayer类;
@AVCaptureDevice类;
@AVCaptureDeviceInput类;
@接口AVCaptureSessionFocusBugViewController:UIViewController{
IBUIView*_topView;
IBUIView*_底部视图;
AVCaptureDevice*\u设备;
AVCaptureSession*_topSession;
AVCaptureStillImageOutput*_输出会话;
AVCaptureVideoPreviewLayer*_previewLayerTopSession;
AVCaptureDeviceInput*\u InputOpSession;
AVCaptureSession*\u-bottomSession;
AVCaptureStillImageOutput*\u outputBottomSession;
AVCaptureVideoPreviewLayer*\u previewLayerBottomSession;
AVCaptureDeviceInput*\u inputBottomSession;
}
-(iAction)交换会话:(id)发送方;
@结束
实施文件:

#import "AVCaptureSessionFocusBugViewController.h"
#import <AVFoundation/AVFoundation.h>

@interface AVCaptureSessionFocusBugViewController ()

- (void)setupCaptureSession:(AVCaptureSession **)session
                     output:(AVCaptureStillImageOutput **)output
               previewLayer:(AVCaptureVideoPreviewLayer **)previewLayer
                      input:(AVCaptureDeviceInput **)input
                       view:(UIView *)view;

- (void)tearDownSession:(AVCaptureSession **)session
                 output:(AVCaptureStillImageOutput **)output
           previewLayer:(AVCaptureVideoPreviewLayer **)previewLayer
                  input:(AVCaptureDeviceInput **)input
                   view:(UIView *)view;

@end

@implementation AVCaptureSessionFocusBugViewController

- (IBAction)switchSessions:(id)sender
{
    if ([_topSession isRunning]) {
        [_topSession stopRunning];
        [_bottomSession startRunning];
        NSLog(@"Bottom session now running.");
    }
    else {
        [_bottomSession stopRunning];
        [_topSession startRunning];
        NSLog(@"Top session now running.");
    }
}

- (void)observeValueForKeyPath:(NSString *)keyPath 
                      ofObject:(id)object 
                        change:(NSDictionary *)change 
                       context:(void *)context
{
    NSLog(@"Observed value for key at key path %@.", keyPath);
    // Enable to confirm that the focusMode is set correctly.
    //NSLog(@"Autofocus for the device is set to %d.", [_device focusMode]);
}

- (void)viewDidLoad {
    [super viewDidLoad];

    _device = [[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] retain];

    [self setupCaptureSession:&_topSession 
                       output:&_outputTopSession
                 previewLayer:&_previewLayerTopSession
                        input:&_inputTopSession
                         view:_topView];

    [self setupCaptureSession:&_bottomSession 
                       output:&_outputBottomSession
                 previewLayer:&_previewLayerBottomSession
                        input:&_inputBottomSession
                         view:_bottomView];

    // NB: We only need to observe one device, since the top and bottom sessions use the same device.
    [_device addObserver:self forKeyPath:@"adjustingFocus" options:NSKeyValueObservingOptionNew context:nil];
    [_device addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:nil];
    [_device addObserver:self forKeyPath:@"adjustingWhiteBalance" options:NSKeyValueObservingOptionNew context:nil];

    [_topSession startRunning];
    NSLog(@"Starting top session.");
}


- (void)setupCaptureSession:(AVCaptureSession **)session
                     output:(AVCaptureStillImageOutput **)output
               previewLayer:(AVCaptureVideoPreviewLayer **)previewLayer
                      input:(AVCaptureDeviceInput **)input
                       view:(UIView *)view
{    
    *session = [[AVCaptureSession alloc] init];

    // Create the preview layer.
    *previewLayer = [[AVCaptureVideoPreviewLayer layerWithSession:*session] retain];

    [*previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];

    [*previewLayer setFrame:[view bounds]];

    [[view layer] addSublayer:*previewLayer];

    // Configure the inputs and outputs.
    [*session setSessionPreset:AVCaptureSessionPresetMedium];

    NSError *error = nil;

    *input = [[AVCaptureDeviceInput deviceInputWithDevice:_device error:&error] retain];

    if (!*input) {
        NSLog(@"Error creating input device:%@", [error localizedDescription]);
        return;
    }

    [*session addInput:*input];

    *output = [[AVCaptureStillImageOutput alloc] init];

    [*session addOutput:*output];

    NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];

    [*output setOutputSettings:outputSettings];

    [outputSettings release];
}

- (void)viewDidUnload {
    [_topView release];
    _topView = nil;

    [_bottomView release];
    _bottomView = nil;

    [_device release];
    _device = nil;

    [self tearDownSession:&_topSession
                   output:&_outputTopSession
             previewLayer:&_previewLayerTopSession
                    input:&_inputTopSession
                     view:_topView];

    [self tearDownSession:&_bottomSession 
                       output:&_outputBottomSession
                 previewLayer:&_previewLayerBottomSession
                        input:&_inputBottomSession
                         view:_bottomView];
}

- (void)tearDownSession:(AVCaptureSession **)session
                 output:(AVCaptureStillImageOutput **)output
           previewLayer:(AVCaptureVideoPreviewLayer **)previewLayer
                  input:(AVCaptureDeviceInput **)input
                   view:(UIView *)view
{
    if ([*session isRunning]) {
        [*session stopRunning];
    }

    [*session removeOutput:*output];

    [*output release];
    *output = nil;

    [*session removeInput:*input];

    [*input release];
    *input = nil;

    [*previewLayer removeFromSuperlayer];

    [*previewLayer release];
    *previewLayer = nil;

    [*session release];
    *session = nil;
}

@end
#导入“AVCaptureSessionFocusBugViewController.h”
#进口
@接口AVCaptureSessionFocusBugViewController()
-(无效)setupCaptureSession:(AVCaptureSession**)会话
输出:(AVCaptureStillImageOutput**)输出
previewLayer:(AVCaptureVideoPreviewLayer**)previewLayer
输入:(AVCaptureDeviceInput**)输入
视图:(UIView*)视图;
-(无效)拆卸会话:(AVCaptureSession**)会话
输出:(AVCaptureStillImageOutput**)输出
previewLayer:(AVCaptureVideoPreviewLayer**)previewLayer
输入:(AVCaptureDeviceInput**)输入
视图:(UIView*)视图;
@结束
@AVCaptureSessionFocusBugViewController的实现
-(iAction)交换会话:(id)发送方
{
如果([\u topSession正在运行]){
[_topSession停止运行];
[_]开始固定];
NSLog(@“底部会话正在运行”);
}
否则{
[_]会话停止运行];
[_TopSessionStartrunning];
NSLog(@“顶级会话正在运行”);
}
}
-(void)observeValueForKeyPath:(NSString*)键路径
ofObject:(id)对象
更改:(NSDictionary*)更改
上下文:(void*)上下文
{
NSLog(@“键路径%@.处键的观察值”,键路径);
//启用以确认焦点模式设置正确。
//NSLog(@“设备的自动聚焦设置为%d.”,[\u设备聚焦模式];
}
-(无效)viewDidLoad{
[超级视图下载];
_设备=[[AvCaptureDeviceDefaultDeviceWithMediaType:AVMediaTypeVideo]保留];
[自我设置捕获会话:&U topSession
输出:&_outputTopSession
previewLayer:&U previewLayerTopSession
输入:&_inputOpSession
视图:_topView];
[自我设置捕获会话:&U底部会话
输出:&_outputBottomSession
previewLayer:&U previewLayerBottomSession
输入:&_inputBottomSession
视图:_bottomView];
//注意:我们只需要观察一个设备,因为顶部和底部会话使用相同的设备。
[_deviceaddobserver:self forKeyPath:@“调整焦点”选项:NSKeyValueObservingOptionNew context:nil];
[_设备添加观察者:自分叉路径:@“调整曝光”选项:NSKeyValueObservingOptionNew context:nil];
[_deviceaddobserver:self forKeyPath:@“adjusting whitebalance”选项:NSKeyValueObservingOptionNew context:nil];
[_TopSessionStartrunning];
NSLog(@“启动顶级会话”);
}
-(无效)setupCaptureSession:(AVCaptureSession**)会话
输出:(AVCaptureStillImageOutput**)输出
previewLayer:(AVCaptureVideoPreviewLayer**)previewLayer
输入:(AVCaptureDeviceInput**)输入
视图:(UIView*)视图
{    
*会话=[[AVCaptureSession alloc]init];
//创建预览层。
*previewLayer=[[AVCaptureVideoPreviewLayer layerWithSession:*session]retain];
[*预览层设置视频重力:AVLayerVideoGravityResizeAspectFill];
[*预览层设置框架:[视图边界]];
[[查看层]添加子层:*预览层];
//配置输入和输出。
[*会话设置会话预设:AVCaptureSessionPresetMedium];
n错误*错误=nil;
*输入=[[AvCaptureDeviceInputDeviceInputWithDevice:\u设备错误:&错误]保留];
如果(!*输入){
NSLog(@“创建输入设备时出错:%@,[Error localizedDescription]);
返回;
}
[*会话添加输入:*输入];
*输出=[[AVCaptureStillI]