Ios 如何修复使用前向摄像头后视镜拍摄图像图像,AVFoundation,Swift

Ios 如何修复使用前向摄像头后视镜拍摄图像图像,AVFoundation,Swift,ios,swift,view,camera,avfoundation,Ios,Swift,View,Camera,Avfoundation,如何修复使用前向摄像头镜像拍摄图像的问题? SnapChat似乎和WhatsApp和Instagram一样能解决这个问题,我该怎么办? 我真的很想找到解决这个问题的办法。。。真烦人…谢谢你 我见过,但它翻转了后摄像头和前摄像头的图像,这并不能真正解决任何人的问题。如果有人能帮我找出如何只翻转前摄像头图像或任何其他解决方案,这将是伟大的 import UIKit import AVFoundation @available(iOS 10.0, *) class CameraViewControl

如何修复使用前向摄像头镜像拍摄图像的问题? SnapChat似乎和WhatsApp和Instagram一样能解决这个问题,我该怎么办? 我真的很想找到解决这个问题的办法。。。真烦人…谢谢你

我见过,但它翻转了后摄像头和前摄像头的图像,这并不能真正解决任何人的问题。如果有人能帮我找出如何只翻转前摄像头图像或任何其他解决方案,这将是伟大的

import UIKit
import AVFoundation

@available(iOS 10.0, *)
class CameraViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {

let photoSettings = AVCapturePhotoSettings()
    var audioPlayer = AVAudioPlayer()
    var captureSession = AVCaptureSession()
    var videoDeviceInput: AVCaptureDeviceInput!
    var previewLayer = AVCaptureVideoPreviewLayer()
    var frontCamera: Bool = false
    var captureDevice:AVCaptureDevice!
    var takePhoto = false

    override func viewDidLoad() {
        super.viewDidLoad()
    }

    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        prepareCamera()
    }

    func prepareCamera() {
        captureSession.sessionPreset = AVCaptureSessionPresetPhoto

        if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
            captureDevice = availableDevices.first
            beginSession()
        } 
    }

    func frontCamera(_ front: Bool){
        let devices = AVCaptureDevice.devices()

        do{
            try captureSession.removeInput(AVCaptureDeviceInput(device:captureDevice!)) 
        }catch{
            print("Error")
        }

        for device in devices!{
            if((device as AnyObject).hasMediaType(AVMediaTypeVideo)){
                if front{
                    if (device as AnyObject).position == AVCaptureDevicePosition.front {
                        captureDevice = device as? AVCaptureDevice

                        do{
                            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
                        }catch{}
                        break
                    }
                }else{
                    if (device as AnyObject).position == AVCaptureDevicePosition.back {
                        captureDevice = device as? AVCaptureDevice

                        do{
                            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
                        }catch{}
                        break
                    }
                }
            }
        }
    }

    func beginSession () {
        do {
            let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
            if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            self.previewLayer = previewLayer
            containerView.layer.addSublayer(previewLayer as? CALayer ?? CALayer())
            self.previewLayer.frame = self.view.layer.frame
            self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
            previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
            captureSession.startRunning()

            let dataOutput = AVCaptureVideoDataOutput()
            dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)]

            dataOutput.alwaysDiscardsLateVideoFrames = true

            if captureSession.canAddOutput(dataOutput) {
                captureSession.addOutput(dataOutput)

                photoSettings.isHighResolutionPhotoEnabled = true
                photoSettings.isAutoStillImageStabilizationEnabled = true
            }

            captureSession.commitConfiguration()

            let queue = DispatchQueue(label: "com.NightOut.captureQueue")
            dataOutput.setSampleBufferDelegate(self, queue: queue) 
        }
    }
        @IBAction func takePhoto(_ sender: Any) {
            takePhoto = true

            photoSettings.isHighResolutionPhotoEnabled = true
            photoSettings.isAutoStillImageStabilizationEnabled = true
    }

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
        if takePhoto {
            takePhoto = false
            if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
                let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoPreviewViewController

                photoVC.takenPhoto = image

                DispatchQueue.main.async {
                    self.present(photoVC, animated: true, completion: {
                        self.stopCaptureSession()
                    })
                }
            }  
        }
    }

    func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
        if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
            let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
            let context = CIContext()

            let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))

            if let image = context.createCGImage(ciImage, from: imageRect) {
                return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
            }
    }
        return nil
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        self.captureSession.stopRunning()
    }

    func stopCaptureSession () {
        self.captureSession.stopRunning()

        if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
            for input in inputs {
                self.captureSession.removeInput(input)
            }
        }
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    @IBAction func DismissButtonAction(_ sender: UIButton) {

        UIView.animate(withDuration: 0.1, animations: {
            self.DismissButton.transform = CGAffineTransform.identity.scaledBy(x: 0.8, y: 0.8)
        }, completion: { (finish) in
            UIView.animate(withDuration: 0.1, animations: {
                self.DismissButton.transform = CGAffineTransform.identity
            })
        })
        performSegue(withIdentifier: "Segue", sender: nil)
    }
}

我很久以前就这么做了。我没有我的系统,但我可以说,转换你的预览层水平,它会显示准确的结果,因为你想要的。对于保存和导出,您可以在这里找到许多示例。

我自己就知道了,以下是解决方案:

if captureDevice.position == AVCaptureDevicePosition.back {
    if let image = context.createCGImage(ciImage, from: imageRect) {
        return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
    }
}
                
if captureDevice.position == AVCaptureDevicePosition.front {
    if let image = context.createCGImage(ciImage, from: imageRect) {
        return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
    }
}

我建议使用
AVCaptureConnection
的本机
isVideoMirrored
属性,而不使用转换

以下是示例步骤:

  • 在自定义摄影机类中保存当前摄影机位置:
  • private var currentCameraPosition:AVCaptureDevice.Position=.back

  • 每次用户旋转相机时,更新
    currentCameraPosition

  • 最后,在您的
    capturePhoto()
    方法集
    isVideoMirrored
    中:

     guard let connection = stillImageOutput.connection(with: .video) else { return }
     connection.isVideoMirrored = currentCameraPosition == .front
    
  • 这将在使用前摄像头时镜像照片

    使用相同的方法镜像前置摄像头视频:

        guard let connection = videoCaptureOutput.connection(with: .video) else { return }
        connection.isVideoMirrored = currentCameraPosition == .front
       
    

    就这样

    可能重复我看到的..
    LeftMirrored
    对前摄像头起作用,但使后摄像头以您不希望的方式翻转图像@CleverError当我离开镜像层时,它也会镜像我的后摄像头图像…这不是一个好的解决方案,正确的解决方案是当它是captureOutput回调中的前摄像头时,将AVCaptureConnection对象的isVideoMirrored属性设置为true,在调用CMSampleBufferGetImageBuffer之前。@Boon我能更好地理解,你能把它写成解决方案吗?@RandomGeek在临时存储捕获的图像时,UIImageView是图像视图吗?非常适合我!