Ios AVCaptureSession真的很慢

Ios AVCaptureSession真的很慢,ios,swift,Ios,Swift,我不明白为什么我的相机应用程序使用AVFoundation framework拍摄照片时速度非常慢 每个捕获会话大约需要2/3秒。 我认为这是一个记忆问题 我用的是Swift 1.1。 我使用的代码是: class ViewController: UIViewController { let captureSession = AVCaptureSession() var previewLayer : AVCaptureVideoPreviewLayer? var

我不明白为什么我的相机应用程序使用AVFoundation framework拍摄照片时速度非常慢

每个捕获会话大约需要2/3秒。 我认为这是一个记忆问题

我用的是Swift 1.1。

我使用的代码是:

   class ViewController: UIViewController {
    let captureSession = AVCaptureSession()
    var previewLayer : AVCaptureVideoPreviewLayer?
    var captureDevice : AVCaptureDevice?
    var captureConnection: AVCaptureConnection?
    var stillImageOutput = AVCaptureStillImageOutput()
    let targetRegion = CALayer()
    var currentImage: UIImage?

    @IBOutlet weak var cameraView: UIImageView!
    @IBOutlet weak var imageDisplayed: UIImageView!


    override func viewDidLoad() {
        super.viewDidLoad()

        navigationController?.setNavigationBarHidden(true, animated: true)

        captureSession.sessionPreset = AVCaptureSessionPreset1920x1080
        let devices = AVCaptureDevice.devices()
        for device in devices {
            if device.hasMediaType(AVMediaTypeVideo) {
                if device.position == AVCaptureDevicePosition.Back {
                    captureDevice = device as? AVCaptureDevice
                }
            }
        }
        if captureDevice != nil {
            println("Device trovato")
            beginSession()
        }


    }


    func beginSession() {

        var err: NSError? = nil
        captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))

        if err != nil {
            println("err \(err?.localizedDescription)")
            return
        }

        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        self.view.layer.addSublayer(previewLayer)

        previewLayer?.frame = CGRect(x: cameraView.frame.origin.x, y: cameraView.frame.origin.y, width: cameraView.frame.size.width, height: cameraView.frame.size.height)

        captureSetup()
        captureSession.startRunning()
    }

    func captureSetup() {

        let outputSetting = NSDictionary(dictionary: [AVVideoCodecKey: AVVideoCodecJPEG])
        self.stillImageOutput.outputSettings = outputSetting
        self.captureSession.addOutput(stillImageOutput)
        for connection:AVCaptureConnection in self.stillImageOutput.connections as [AVCaptureConnection] {

            for port:AVCaptureInputPort in connection.inputPorts! as [AVCaptureInputPort] {
                if port.mediaType == AVMediaTypeVideo {
                    captureConnection = connection as AVCaptureConnection
                    break
                }
            }
            if captureConnection != nil {
                break
            }
        }
    }


    var i = 0;

    func captureScene() {


        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), {
            if self.captureConnection != nil {
                self.stillImageOutput.captureStillImageAsynchronouslyFromConnection(self.captureConnection, completionHandler:{ (imageSampleBuffer:CMSampleBuffer!, _) -> Void in

                    let imageDataJpeg = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageSampleBuffer)
                    var pickedImage: UIImage = UIImage(data:imageDataJpeg)!



                    if let data = UIImagePNGRepresentation(pickedImage) {


                        let filename = self.getDocumentsDirectory().stringByAppendingPathComponent("\(self.i).png")
                        data.writeToFile(filename, atomically: true)

                        self.i++
                    }


                })
            }
        })

    }



    func getDocumentsDirectory() -> NSString {
        let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
        let documentsDirectory: AnyObject = paths[0]


        return documentsDirectory as NSString
    }




    override func touchesBegan(touches: NSSet, withEvent event: UIEvent) {
        captureScene()
    }

}

当UI更改时,对
AVCapture
相关方法和主线程使用后台调用。您能提供答案吗?当UI更改时,对
AVCapture
相关方法和主线程使用后台调用。您能提供答案吗?