Ios 自定义条形码扫描仪,轻按至焦点

Ios 自定义条形码扫描仪,轻按至焦点,ios,swift,barcode-scanner,Ios,Swift,Barcode Scanner,请参考以下问题: 上面的链接很旧,已经过时了。我试图使用上面提供的答案,但没有用。。。下面是我对他们的尝试 我需要在读取条形码时点击屏幕,以实现对视图中对象的聚焦 这是我的代码尝试 var captureDevice: AVCaptureDevice? //capture device Is this right? override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) { l

请参考以下问题:

上面的链接很旧,已经过时了。我试图使用上面提供的答案,但没有用。。。下面是我对他们的尝试

我需要在读取条形码时点击屏幕,以实现对视图中对象的聚焦

这是我的代码尝试

var captureDevice: AVCaptureDevice? //capture device Is this right?

override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
    let screenSize = videoPreviewLayer!.bounds.size
    if let touchPoint = touches.first {
        let x = touchPoint.location(in: self.view).y / screenSize.height
        let y = 1.0 - touchPoint.location(in: self.view).x / screenSize.width
        let focusPoint = CGPoint(x: x, y: y)

        if let device = captureDevice {
            do {
                try device.lockForConfiguration()

                device.focusPointOfInterest = focusPoint
                //device.focusMode = .continuousAutoFocus
                device.focusMode = .autoFocus
                //device.focusMode = .locked
                device.exposurePointOfInterest = focusPoint
                device.exposureMode = AVCaptureDevice.ExposureMode.continuousAutoExposure
                device.unlockForConfiguration()
            }
            catch {
                // just ignore
            }
        }
    }
}
var captureDevice:AVCaptureDevice//捕获设备是这样吗?
覆盖func TouchesBegind(Touchs:Set,带有事件:UIEvent?){
让screenSize=videoPreviewLayer!.bounds.size
如果让touchPoint=touchs.first{
设x=接触点。位置(在:self.view中)。y/屏幕大小。高度
设y=1.0-接触点位置(in:self.view).x/screenSize.width
设focusPoint=CGPoint(x:x,y:y)
如果let device=captureDevice{
做{
请尝试device.lockForConfiguration()
device.focusPointOfInterest=焦点点
//device.focusMode=.continuousAutoFocus
device.focusMode=.autoFocus
//device.focusMode=.locked
device.exposurePointOfInterest=焦点点
device.exposureMode=AVCaptureDevice.exposureMode.continuousAutoExposure
device.unlockForConfiguration()
}
抓住{
//不管
}
}
}
}
这段代码不像我点击时那样工作,不会发生对焦

这是我其余的相机代码

import UIKit
import AVFoundation


class BarcodeScanVC: UIViewController {

    struct GlobalVariable{
        static var senderTags = 0
    }



    var captureSession = AVCaptureSession()
    var videoPreviewLayer: AVCaptureVideoPreviewLayer?
    var qrCodeFrameView: UIView?
    var row = 0
    var senderTag = 0

    var waybillData: String = ""
    var diagnosticErrorCodeData: String = ""
    var hddSerialNumberData: String = ""


    var scanRectView: UIView?
    var delegate: BarcodeScanDelegate?
    var captureDevice: AVCaptureDevice?


    private let supportedCodeTypes = [AVMetadataObject.ObjectType.upce,
                                      AVMetadataObject.ObjectType.code39,
                                      AVMetadataObject.ObjectType.code39Mod43,
                                      AVMetadataObject.ObjectType.code93,
                                      AVMetadataObject.ObjectType.code128,
                                      AVMetadataObject.ObjectType.ean8,
                                      AVMetadataObject.ObjectType.ean13,
                                      AVMetadataObject.ObjectType.aztec,
                                      AVMetadataObject.ObjectType.pdf417,
                                      AVMetadataObject.ObjectType.itf14,
                                      AVMetadataObject.ObjectType.dataMatrix,
                                      AVMetadataObject.ObjectType.interleaved2of5,
                                      AVMetadataObject.ObjectType.qr]

    override func viewDidLoad() {
        super.viewDidLoad()

        // Get the back-facing camera for capturing videos
        //let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera], mediaType: AVMediaType.video, position: .back)
        let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back)

        guard let captureDevice = deviceDiscoverySession.devices.first else {
            print("Failed to get the camera device")
            return
        }

        do {
            // Get an instance of the AVCaptureDeviceInput class using the previous device object.
            let input = try AVCaptureDeviceInput(device: captureDevice)

            // Set the input device on the capture session.
            captureSession.addInput(input)

            // Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
            let captureMetadataOutput = AVCaptureMetadataOutput()
            captureSession.addOutput(captureMetadataOutput)

            // Set delegate and use the default dispatch queue to execute the call back
            captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
            captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
            //            captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]

        } catch {
            // If any error occurs, simply print it out and don't continue any more.
            print(error)
            return
        }

        captureSession.commitConfiguration()
        // Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
        videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
        videoPreviewLayer?.frame = view.layer.bounds
        //videoPreviewLayer?.frame

//        let height: CGFloat = ((videoPreviewLayer?.frame.size.width)!)/2
//        let width: CGFloat = ((videoPreviewLayer?.frame.size.width)!)/2

        let height: CGFloat = (view.frame.size.height)/2
        let width: CGFloat = (view.frame.size.width) - 200
        let path = UIBezierPath()
        //Corner1
        path.move(to: CGPoint(x: 5, y: 50))
        path.addLine(to: CGPoint(x: 5, y: 5))
        path.addLine(to: CGPoint(x: 50, y: 5))
        //Corner2
        path.move(to: CGPoint(x: height - 55, y: 5))
        path.addLine(to: CGPoint(x: height - 5, y: 5))
        path.addLine(to: CGPoint(x: height - 5, y: 55))
        //Corner3
        path.move(to: CGPoint(x: 5, y: width - 55))
        path.addLine(to: CGPoint(x: 5, y: width - 5))
        path.addLine(to: CGPoint(x: 55, y: width - 5))
        //Corner4 -bottom right
        path.move(to: CGPoint(x: height - 5, y: width - 55))
        path.addLine(to: CGPoint(x: height - 5, y: width - 5))
        path.addLine(to: CGPoint(x: height - 55, y: width - 5))
        let shape = CAShapeLayer()
        shape.path = path.cgPath
        shape.strokeColor = UIColor.white.cgColor
        shape.lineWidth = 5
        shape.frame.origin.x = 20
        shape.frame.origin.y = 180
        shape.fillColor = UIColor.clear.cgColor
        videoPreviewLayer?.addSublayer(shape)
        view.layer.addSublayer(videoPreviewLayer!)
        //videoPreviewLayer?.anchorPoint.centerXAnchor.constraint(equalTo: view.centerXAnchor).isActive = true
        //view.layer.addSublayer(scanRectView)

        // Start video capture.
        captureSession.startRunning()

        // Move the message label and top bar to the front
        //view.bringSubview(toFront: messageLabel)
        //view.bringSubview(toFront: topbar)

        // Initialize QR Code Frame to highlight the QR code
        qrCodeFrameView = UIView()

        if let qrCodeFrameView = qrCodeFrameView {
            qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
            qrCodeFrameView.layer.borderWidth = 2
            view.addSubview(qrCodeFrameView)
            view.bringSubviewToFront(qrCodeFrameView)
        }
    }

    override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
        let screenSize = videoPreviewLayer!.bounds.size
        if let touchPoint = touches.first {
            let x = touchPoint.location(in: self.view).y / screenSize.height
            let y = 1.0 - touchPoint.location(in: self.view).x / screenSize.width
            let focusPoint = CGPoint(x: x, y: y)

            if let device = captureDevice {
                do {
                    try device.lockForConfiguration()

                    device.focusPointOfInterest = focusPoint
                    //device.focusMode = .continuousAutoFocus
                    device.focusMode = .autoFocus
                    //device.focusMode = .locked
                    device.exposurePointOfInterest = focusPoint
                    device.exposureMode = AVCaptureDevice.ExposureMode.continuousAutoExposure
                    device.unlockForConfiguration()
                }
                catch {
                    // just ignore
                }
            }
        }
    }


    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

    func launchApp(barcodeScan: String) {

//        if presentedViewController != nil {
//            return
//        }

        guard presentedViewController == nil else {
            return
        }


        let alertPrompt = UIAlertController(title: "Barcode Found", message: "\(barcodeScan)", preferredStyle: .actionSheet)
        let confirmAction = UIAlertAction(title: "Confirm", style: UIAlertAction.Style.default, handler: { (action) -> Void in

            if self.senderTag == 1 {
                GlobalVariable.senderTags = 1
                self.delegate?.didScan(barcodeData: barcodeScan)
                self.navigationController?.popViewController(animated: true)
            }
            if self.senderTag == 2 {
                GlobalVariable.senderTags = 2
                self.delegate?.didScan(barcodeData: barcodeScan)
                self.navigationController?.popViewController(animated: true)
            }
            if self.senderTag == 3 {
                GlobalVariable.senderTags = 3
                self.delegate?.didScan(barcodeData: barcodeScan)
                self.navigationController?.popViewController(animated: true)
            }
            if self.senderTag != 1 && self.senderTag != 2 && self.senderTag != 3  {
                let indexPath = IndexPath(row: self.row, section: 0)
                let cell: PartsOrderRequestTableCell = globalPartsOrderRequestTableVC?.tableView.cellForRow(at: indexPath) as! PartsOrderRequestTableCell
                cell.diagnosticCodeLabel.text = barcodeScan
                cell.diagnosticCodeLabel.endEditing(true)

                self.navigationController?.popViewController(animated: true)
                //return
            }
        })

        let cancelAction = UIAlertAction(title: "Cancel", style: UIAlertAction.Style.cancel, handler: nil)

        alertPrompt.addAction(confirmAction)
        alertPrompt.addAction(cancelAction)

        present(alertPrompt, animated: true, completion: nil)
    }

}

extension BarcodeScanVC: AVCaptureMetadataOutputObjectsDelegate {

    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        // Check if the metadataObjects array is not nil and it contains at least one object.
        if metadataObjects.count == 0 {
            qrCodeFrameView?.frame = CGRect.zero
            //messageLabel.text = "No QR code is detected"
            return
        }

        // Get the metadata object.
        let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject

        if supportedCodeTypes.contains(metadataObj.type) {
            // If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
            let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
            qrCodeFrameView?.frame = barCodeObject!.bounds

            if metadataObj.stringValue != nil {
                launchApp(barcodeScan: metadataObj.stringValue!)
                //messageLabel.text = metadataObj.stringValue
            }
        }
    }

    private func updatePreviewLayer(layer: AVCaptureConnection, orientation: AVCaptureVideoOrientation) {

        layer.videoOrientation = orientation

        videoPreviewLayer?.frame = self.view.bounds

    }

    override func viewDidLayoutSubviews() {
        super.viewDidLayoutSubviews()

        if let connection =  self.videoPreviewLayer?.connection  {

            let currentDevice: UIDevice = UIDevice.current

            let orientation: UIDeviceOrientation = currentDevice.orientation

            let previewLayerConnection : AVCaptureConnection = connection

            if previewLayerConnection.isVideoOrientationSupported {

                switch (orientation) {
                case .portrait: updatePreviewLayer(layer: previewLayerConnection, orientation: .portrait)

                    break

                case .landscapeRight: updatePreviewLayer(layer: previewLayerConnection, orientation: .landscapeLeft)

                    break

                case .landscapeLeft: updatePreviewLayer(layer: previewLayerConnection, orientation: .landscapeRight)

                    break

                case .portraitUpsideDown: updatePreviewLayer(layer: previewLayerConnection, orientation: .portraitUpsideDown)

                    break

                default: updatePreviewLayer(layer: previewLayerConnection, orientation: .portrait)

                    break
                }
            }
        }
    }
}
导入UIKit
进口AVF基金会
类BarcodeScanVC:UIViewController{
结构全局变量{
静态变量senderTags=0
}
var captureSession=AVCaptureSession()
var videoPreviewLayer:AVCaptureVideoPreviewLayer?
var qrCodeFrameView:UIView?
变量行=0
var senderTag=0
var waybillData:String=“”
var DiagnosticeErrorCodeData:String=“”
var hddSerialNumberData:String=“”
var scanRectView:UIView?
var代表:条码扫描门?
var captureDevice:AVCaptureDevice?
private let supportedCodeTypes=[AVMetadataObject.ObjectType.upce,
AVMetadataObject.ObjectType.code39,
AVMetadataObject.ObjectType.code39Mod43,
AVMetadataObject.ObjectType.code93,
AVMetadataObject.ObjectType.code128,
AVMetadataObject.ObjectType.ean8,
AVMetadataObject.ObjectType.ean13,
AVMetadataObject.ObjectType.aztec,
AVMetadataObject.ObjectType.pdf417,
AVMetadataObject.ObjectType.itf14,
AVMetadataObject.ObjectType.dataMatrix,
AVMetadataObject.ObjectType.Interleaved25,
AVMetadataObject.ObjectType.qr]
重写func viewDidLoad(){
super.viewDidLoad()
//获取背面摄像头以捕获视频
//let deviceDiscoverySession=AVCaptureDevice.DiscoverySession(设备类型:[.builtInDualCamera],媒体类型:AVMediaType.video,位置:.back)
let deviceDiscoverySession=AVCaptureDevice.DiscoverySession(设备类型:[.builtInWideAngleCamera],媒体类型:AVMediaType.video,位置:.back)
guard let captureDevice=deviceDiscoverySession.devices.first else{
打印(“获取相机设备失败”)
回来
}
做{
//使用前面的设备对象获取AVCaptureDeviceInput类的实例。
让输入=尝试AVCaptureDeviceInput(设备:captureDevice)
//在捕获会话上设置输入设备。
captureSession.addInput(输入)
//初始化AvCaptureMataOutput对象并将其设置为捕获会话的输出设备。
让captureMataOutput=avcaptureMataDataOutput()
captureSession.addOutput(CaptureMataOutput)
//设置委托并使用默认调度队列执行回调
CaptureMataOutput.setMetadataObjectsDelegate(self,队列:DispatchQueue.main)
CaptureMataOutput.metadataObjectTypes=supportedCodeTypes
//CaptureMataOutput.metadataObjectTypes=[AVMetadataObject.ObjectType.qr]
}抓住{
//如果发生任何错误,只需打印出来,不再继续。
打印(错误)
回来
}
captureSession.commitConfiguration()的配置
//初始化视频预览层并将其作为子层添加到viewPreview视图的层中。
videoPreviewLayer=AVCaptureVideoPreviewLayer(会话:captureSession)
videoPreviewLayer?.videoGravity=AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame=view.layer.bounds
//videoPreviewLayer?.frame
//让高度:CGFloat=((videoPreviewLayer?.frame.size.width)!)/2
//让宽度:CGFloat=((videoPreviewLayer?.frame.size.width)!)/2
let height:CGFloat=(view.frame.size.height)/2
let width:CGFloat=(view.frame.size.width)-200
let path=UIBezierPath()
//角落1
移动路径(到:CGPoint(x:5,y:50))
path.addLine(到:CGPoint(x:5,y:5))
路径添加线(到:CGPoint(x:50,y:5))
//角落2
移动路径(到:CGPoint(x:height-55,y:5))
path.addLine(到:CGPoint(x:height-5,y:5))
路径添加线(到:CGPoint(x:height-5,y:55))
//角落3
移动路径(到:CGPoint(x:5,y:width-55))
addLine(到:CGPoint(x:5,y:width-5))
路径添加线(到:CGPoint(x:55,y:width-5))
//拐角4-右下角
移动路径(到:CGPoint(x:height-5,y:width-55))
帕
if let device = captureDevice
guard let captureDevice = deviceDiscoverySession.devices.first else {
    print("Failed to get the camera device")
    return
}
captureDevice = deviceDiscoverySession.devices.first
override func viewDidLoad() {
    super.viewDidLoad()

    // Get the back-facing camera for capturing videos
    //let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera], mediaType: AVMediaType.video, position: .back)
    let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back)

    captureDevice = deviceDiscoverySession.devices.first

    if let captureDevice = captureDevice {
        do {
            // Get an instance of the AVCaptureDeviceInput class using the previous device object.
            let input = try AVCaptureDeviceInput(device: captureDevice)

            // Set the input device on the capture session.
            captureSession.addInput(input)

            // Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
            let captureMetadataOutput = AVCaptureMetadataOutput()
            captureSession.addOutput(captureMetadataOutput)

            // Set delegate and use the default dispatch queue to execute the call back
            captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
            captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
            //            captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]

        } catch {
            // If any error occurs, simply print it out and don't continue any more.
            print(error)
            return
        }
    }
    ..... Method cut short as no other changes.