在iOS10中使用AVCapturePhotoOutput-NSGenericeException

在iOS10中使用AVCapturePhotoOutput-NSGenericeException,ios,nsexception,avcapture,Ios,Nsexception,Avcapture,我目前正试图弄清楚如何使用iOS 10的AVCapturePhotoOutput方法,但在这样做时遇到了麻烦。我觉得我即将把它做好,但仍然收到一个错误: 由于未捕获的异常“NSGenericeException”而终止应用程序,原因:“-[AVCapturePhotoOutput capturePhotoWithSettings:delegate:]没有活动和启用的视频连接” 我已尝试将这行代码放入AVCapturePhotoCaptureDelegate或didPressTakePhoto函数

我目前正试图弄清楚如何使用iOS 10的AVCapturePhotoOutput方法,但在这样做时遇到了麻烦。我觉得我即将把它做好,但仍然收到一个错误:

由于未捕获的异常“NSGenericeException”而终止应用程序,原因:“-[AVCapturePhotoOutput capturePhotoWithSettings:delegate:]没有活动和启用的视频连接”

我已尝试将这行代码放入AVCapturePhotoCaptureDelegate或didPressTakePhoto函数中:

if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
     videoConnection.videoOrientation = AVCaptureVideoOrientation.portrait;
     ...
}
以下是我目前掌握的代码:

import AVFoundation
import UIKit

class Camera: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate, AVCapturePhotoCaptureDelegate {

    @IBOutlet weak var cameraView: UIView!
    @IBOutlet weak var imageView: UIImageView!

    var captureSession : AVCaptureSession?
    var stillImageOutput : AVCapturePhotoOutput?
    var stillImageOutputSettings : AVCapturePhotoSettings?
    var previewLayer : AVCaptureVideoPreviewLayer?

    var didTakePhoto = Bool();

    override func viewDidLoad() {
        super.viewDidLoad()

        // Do any additional setup after loading the view.
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated);

        previewLayer?.frame = cameraView.bounds;
    }

    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated);

        captureSession = AVCaptureSession();
        captureSession?.sessionPreset = AVCaptureSessionPreset1920x1080;

        stillImageOutput = AVCapturePhotoOutput();

        let backCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo);

        do {
            let input = try AVCaptureDeviceInput(device: backCamera)

            if (captureSession?.canAddInput(input))! {
                captureSession?.addInput(input);

                if (captureSession?.canAddOutput(stillImageOutput))! {
                    captureSession?.canAddOutput(stillImageOutput);

                    previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
                    previewLayer?.videoGravity = AVLayerVideoGravityResizeAspect;
                    previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
                    cameraView.layer.addSublayer(previewLayer!);
                    captureSession?.startRunning();
                }
            }
        } catch {
            print(error);
        }
    }

    func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
        if let error = error {
            print(error.localizedDescription);
        }

        if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
            print(UIImage(data: dataImage)?.size as Any);

            let dataProvider = CGDataProvider(data: dataImage as CFData);
            let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent);
            let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right);

            self.imageView.image = image;
            self.imageView.isHidden = false;
        }
    }

    func didPressTakePhoto() {
            stillImageOutputSettings = AVCapturePhotoSettings();

            let previewPixelType = stillImageOutputSettings?.availablePreviewPhotoPixelFormatTypes.first!;
            let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
                                 kCVPixelBufferWidthKey as String: 160,
                                 kCVPixelBufferHeightKey as String: 160];
            stillImageOutputSettings?.previewPhotoFormat = previewFormat;

            stillImageOutput.capturePhoto(with: stillImageOutputSettings!, delegate: self);
    }

    func didPressTakeAnother() {
        if (didTakePhoto == true) {
            imageView.isHidden = true;
            didTakePhoto = false;
        } else {
            captureSession?.startRunning();
            didTakePhoto = true;
            didPressTakePhoto();
        }
    }

    override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
        didPressTakeAnother();
    }
}
有什么建议吗


提前谢谢

对于其他任何可能想弄明白这一点的人,以下资源帮助了我:

供参考和代码布局

新iOS10摄像头功能的实际实现和使用


对于其他任何可能试图解决这个问题的人,以下资源帮助了我:

供参考和代码布局

新iOS10摄像头功能的实际实现和使用

将AVCaptureSessionPreset1920x1080更改为AVCaptureSessionPresetHigh

尝试将AVCaptureSessionPreset1920x1080更改为AVCaptureSessionPresetHigh

试试看

代码错误

应该是哪一个

if (captureSession?.canAddOutput(stillImageOutput))!{
      captureSession?.addOutput(stillImageOutput)
}
代码错误

应该是哪一个

if (captureSession?.canAddOutput(stillImageOutput))!{
      captureSession?.addOutput(stillImageOutput)
}

你在模拟器上运行这个吗?不,在我的iPhone 6S+上运行它吗?你在模拟器上运行这个吗?不,在我的iPhone 6S上运行它+