Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/106.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 获取EXC#U断点错误,原因是';我不知道如何将CMSampleBuffer输入captureOutput函数_Ios_Xcode_Image_Buffer_Swift4 - Fatal编程技术网

Ios 获取EXC#U断点错误,原因是';我不知道如何将CMSampleBuffer输入captureOutput函数

Ios 获取EXC#U断点错误,原因是';我不知道如何将CMSampleBuffer输入captureOutput函数,ios,xcode,image,buffer,swift4,Ios,Xcode,Image,Buffer,Swift4,我已经看过了教程,现在我不断遇到EXC_断点错误,这是我的ViewController.swift(我的Main.storyboard是空的): 有人能告诉我如何给captureOutput函数加上feed以避免错误并使currentFrame变量充满image吗?我不明白CMSampleBuffer的工作方式,有人能解释一下吗 // // ViewController.swift // LiveDetector // // Created by 周昱先 on 2017/9/18. //

我已经看过了教程,现在我不断遇到EXC_断点错误,这是我的ViewController.swift(我的Main.storyboard是空的):

有人能告诉我如何给captureOutput函数加上feed以避免错误并使currentFrame变量充满image吗?我不明白CMSampleBuffer的工作方式,有人能解释一下吗

//
//  ViewController.swift
//  LiveDetector
//
//  Created by 周昱先 on 2017/9/18.
//  Copyright © 2017年 周昱先. All rights reserved.
//

import UIKit
import AVFoundation


class ViewController: UIViewController {

    var previewView : UIView!
    var boxView:UIView!

    //Camera Capture requiered properties
    var videoDataOutput: AVCaptureVideoDataOutput!
    var videoDataOutputQueue: DispatchQueue!
    var previewLayer:AVCaptureVideoPreviewLayer!
    var captureDevice : AVCaptureDevice!
    let session = AVCaptureSession()
    var currentFrame: CIImage!
    var done = false

    override func viewDidLoad() {
        super.viewDidLoad()
        previewView = UIView(frame: CGRect(x: 0, y: 0, width:  UIScreen.main.bounds.size.width, height: UIScreen.main.bounds.size.height))
        previewView.contentMode = UIViewContentMode.scaleAspectFit
        view.addSubview(previewView)

        //Add a box view
        //boxView = UIView(frame: CGRect(x: 0, y: 0, width: 300, height: 600))
        //boxView.backgroundColor = UIColor.green
        //boxView.alpha = 0.3
        //view.addSubview(boxView)

        self.setupAVCapture()
        let img1 = captureOutput(videoDataOutput, didOutputSampleBuffer: videoDataOutputQueue as! CMSampleBuffer!, from: videoDataOutput.connection(with: AVMediaType.video))
        //var img1:UIImage = convert(cmage: self.currentFrame)

    }

    override func viewWillAppear(_ animated: Bool) {
        if !done {
            session.startRunning()
        }
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    override var shouldAutorotate: Bool {
        if (UIDevice.current.orientation == UIDeviceOrientation.landscapeLeft ||
            UIDevice.current.orientation == UIDeviceOrientation.landscapeRight ||
            UIDevice.current.orientation == UIDeviceOrientation.unknown) {
            return false
        }
        else {
            return true
        }
    }

}



// AVCaptureVideoDataOutputSampleBufferDelegate protocol and related methods
extension ViewController:  AVCaptureVideoDataOutputSampleBufferDelegate{
    func setupAVCapture(){
        session.sessionPreset = AVCaptureSession.Preset.vga640x480
        guard let device = AVCaptureDevice
            .default(.builtInWideAngleCamera,
                     for: AVMediaType.video,
                           position: .back) else{
                            return
        }
        captureDevice = device
        beginSession()
        done = true
    }

    func beginSession(){
        var err : NSError? = nil
        var deviceInput:AVCaptureDeviceInput?
        do {
            deviceInput = try AVCaptureDeviceInput(device: captureDevice)
        } catch let error as NSError {
            err = error
            deviceInput = nil
        }
        if err != nil {
            print("error: \(err?.localizedDescription)")
        }
        if self.session.canAddInput(deviceInput!){
            self.session.addInput(deviceInput!)
        }

        videoDataOutput = AVCaptureVideoDataOutput()
        videoDataOutput.alwaysDiscardsLateVideoFrames=true
        videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
        videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
        if session.canAddOutput(self.videoDataOutput){
            session.addOutput(self.videoDataOutput)
        }
        videoDataOutput.connection(with: AVMediaType.video)?.isEnabled = true

        self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
        self.previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect

        let rootLayer: CALayer = self.previewView.layer
        rootLayer.masksToBounds = true
        self.previewLayer.frame = rootLayer.bounds
        rootLayer.addSublayer(self.previewLayer)
        session.startRunning()
    }

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
        currentFrame =   self.convertImageFromCMSampleBufferRef(sampleBuffer)
        session.stopRunning()
    }


    // clean up AVCapture
    func stopCamera(){
        session.stopRunning()
        done = false
    }

    func convertImageFromCMSampleBufferRef(_ sampleBuffer:CMSampleBuffer) -> CIImage{
        let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
        let ciImage:CIImage = CIImage(cvImageBuffer: pixelBuffer)
        return ciImage
    }
    func convert(cmage:CIImage) -> UIImage
    {

        let context:CIContext = CIContext.init(options: nil)
        let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)!
        let image:UIImage = UIImage.init(cgImage: cgImage)
        return image
    }
    /*
    @IBAction func sendToServer(_, sender:AnyObject){
        let image:UIImage = convert(cmage: self.currentFrame)
        let compressionRate:CGFloat = 1
        let img = UIImageJPEGRepresentation(image, compressionRate)
    }
    */


}