Swift 运行时错误。不支持多个AVCaptureInputs

Swift 运行时错误。不支持多个AVCaptureInputs,swift,debugging,Swift,Debugging,相机手电筒根据我的代码自动打开,但随后应用程序崩溃。在发射屏幕上卡住,手电筒仍然打开 带信号SIGABRT的调试绿线中断指向AppDelegate 我粘贴了尽可能少的代码。如果你看到这个问题,告诉我可以从这个问题中删掉什么,我会把它删掉 class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { let captureSession = AVCaptureSession() v

相机手电筒根据我的代码自动打开,但随后应用程序崩溃。在发射屏幕上卡住,手电筒仍然打开

带信号SIGABRT的调试绿线中断指向AppDelegate

我粘贴了尽可能少的代码。如果你看到这个问题,告诉我可以从这个问题中删掉什么,我会把它删掉

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

let captureSession = AVCaptureSession()
var captureDevice : AVCaptureDevice?
var validFrameCounter: Int = 0

// for sampling from the camera
enum CurrentState {
    case statePaused
    case stateSampling
}
var currentState = CurrentState.statePaused

override func viewDidLoad() {
    super.viewDidLoad()

    captureSession.sessionPreset = AVCaptureSessionPresetHigh

    let devices = AVCaptureDevice.devices()

    // Loop through all the capture devices on this phone
    for device in devices {
        // Make sure this particular device supports video
        if (device.hasMediaType(AVMediaTypeVideo)) {
            // Finally check the position and confirm we've got the back camera
            if(device.position == AVCaptureDevicePosition.Back) {
                captureDevice = device as? AVCaptureDevice
                if captureDevice != nil {
                    //println("Capture device found")
                    beginSession() // fatal error
                }
            }
        }
    }

}

// configure device for camera and focus mode


// start capturing frames
func beginSession() {

    // Create the AVCapture Session

    var err : NSError? = nil
    captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))

    if err != nil {
        println("error: \(err?.localizedDescription)")
    }

    // Automatic Switch ON torch mode
    if  captureDevice!.hasTorch {
        // lock your device for configuration
        captureDevice!.lockForConfiguration(nil)
        // check if your torchMode is on or off. If on turns it off otherwise turns it on
        captureDevice!.torchMode = captureDevice!.torchActive ? AVCaptureTorchMode.Off : AVCaptureTorchMode.On
        // sets the torch intensity to 100%
        captureDevice!.setTorchModeOnWithLevel(1.0, error: nil)
        // unlock your device
        captureDevice!.unlockForConfiguration()
    }

    // Create a AVCaptureInput with the camera device
    if let deviceInput = AVCaptureDeviceInput.deviceInputWithDevice(captureDevice, error: &err) as? AVCaptureInput {
    // Set the input device
    captureSession.addInput(deviceInput)
    }
    else {
    println("error: \(err?.localizedDescription)")
    }

    // Set the output
    var videoOutput : AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()

    // create a queue to run the capture on
    var captureQueue : dispatch_queue_t = dispatch_queue_create("captureQueue", nil)

    // setup ourself up as the capture delegate
    videoOutput.setSampleBufferDelegate(self, queue: captureQueue)

    // configure the pixel format
    videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_32BGRA)]
    // kCVPixelBufferPixelFormatTypeKey is a CFString btw.

    // set the minimum acceptable frame rate to 10 fps
    captureDevice!.activeVideoMinFrameDuration = CMTimeMake(1, 10)

    // and the size of the frames we want - we'll use the smallest frame size available
    captureSession.sessionPreset = AVCaptureSessionPresetLow

    // Add the output
    captureSession.addOutput(videoOutput)


    // Start the session
    captureSession.startRunning()


    func setState(state: CurrentState){
        switch state
        {
        case .statePaused:
            // is this correct code?
            UIApplication.sharedApplication().idleTimerDisabled = false
        case .stateSampling:
            // is this correct code?
            UIApplication.sharedApplication().idleTimerDisabled = true  // singletons

        }
    }

    // sampling from the camera
    currentState = CurrentState.stateSampling


    // stop the app from sleeping
    UIApplication.sharedApplication().idleTimerDisabled = true

    // update our UI on a timer every 0.1 seconds
    NSTimer.scheduledTimerWithTimeInterval(0.1, target: self, selector: Selector("update"), userInfo: nil, repeats: true)

    func stopCameraCapture() {
        captureSession.stopRunning()

    }


    // pragma mark Pause and Resume of detection
    func pause() {
        if currentState == CurrentState.statePaused {
            return
        }

        // switch off the torch
        if captureDevice!.isTorchModeSupported(AVCaptureTorchMode.On) {
            captureDevice!.lockForConfiguration(nil)
            captureDevice!.torchMode = AVCaptureTorchMode.Off
            captureDevice!.unlockForConfiguration()
        }
        currentState = CurrentState.statePaused
        // let the application go to sleep if the phone is idle
        UIApplication.sharedApplication().idleTimerDisabled = false
    }


    func resume() {
        if currentState != CurrentState.statePaused {
            return
        }

        // switch on the torch
        if captureDevice!.isTorchModeSupported(AVCaptureTorchMode.On) {
            captureDevice!.lockForConfiguration(nil)
            captureDevice!.torchMode = AVCaptureTorchMode.On
            captureDevice!.unlockForConfiguration()
        }
        currentState = CurrentState.stateSampling
        // stop the app from sleeping
        UIApplication.sharedApplication().idleTimerDisabled = true
  }

 }
}