iOS Swift ImagePickerController:改进编辑模式

iOS Swift ImagePickerController:改进编辑模式,ios,swift,uiimagepickercontroller,Ios,Swift,Uiimagepickercontroller,我有一个允许编辑为true的ImagePickerController。但我想在编辑屏幕上有更多选项,而不仅仅是调整大小模式。如何改进它?您无法改进本机图像/视频编辑屏幕。如果需要更多选项,则需要创建自己的控制器,该控制器将接受原始摄影机输出,并对其进行编辑 您无法改进本机图像/视频编辑屏幕。如果需要更多选项,则需要创建自己的控制器,该控制器将接受原始摄影机输出,并对其进行编辑 我同意@Lope的观点,您可以通过收集摄像头输入来制作自己的: import UIKit import

我有一个允许编辑为true的ImagePickerController。但我想在编辑屏幕上有更多选项,而不仅仅是调整大小模式。如何改进它?

您无法改进本机图像/视频编辑屏幕。如果需要更多选项,则需要创建自己的控制器,该控制器将接受原始摄影机输出,并对其进行编辑

您无法改进本机图像/视频编辑屏幕。如果需要更多选项,则需要创建自己的控制器,该控制器将接受原始摄影机输出,并对其进行编辑

我同意@Lope的观点,您可以通过收集摄像头输入来制作自己的:

    import UIKit
    import AVFoundation
    import AssetsLibrary

class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {

    var SessionRunningAndDeviceAuthorizedContext = "SessionRunningAndDeviceAuthorizedContext"
    var CapturingStillImageContext = "CapturingStillImageContext"

@available(iOS 4.0, *)
    public func fileOutput(_ captureOutput: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {

    }


    // MARK: property

    var sessionQueue: DispatchQueue!
    var session: AVCaptureSession?
    var videoDeviceInput: AVCaptureDeviceInput?
    @objc var stillImageOutput: AVCaptureStillImageOutput?

    var deviceAuthorized: Bool  = false
    var backgroundRecordId: UIBackgroundTaskIdentifier = UIBackgroundTaskInvalid
    var sessionRunningAndDeviceAuthorized: Bool {
        get {
            return (self.session?.isRunning != nil && self.deviceAuthorized )
        }
    }

    var runtimeErrorHandlingObserver: AnyObject?
    var lockInterfaceRotation: Bool = false

    @IBOutlet weak var previewView: AVCamPreviewView!
    @IBOutlet weak var snapButton: UIButton!
    @IBOutlet weak var cameraButton: UIButton!

    var snappedimage = UIImage()

 override func viewDidLoad() {
        super.viewDidLoad()
 let session: AVCaptureSession = AVCaptureSession()
        session.sessionPreset = AVCaptureSession.Preset.photo
        self.session = session

        self.previewView.session = session

        self.checkDeviceAuthorizationStatus()

        let sessionQueue: DispatchQueue = DispatchQueue(label: "KehoeGames.whatever",attributes: [])

        self.sessionQueue = sessionQueue
        sessionQueue.async {
            self.backgroundRecordId = UIBackgroundTaskInvalid

            let videoDevice: AVCaptureDevice! = ViewController.deviceWithMediaType(AVMediaType.video.rawValue, preferringPosition: AVCaptureDevice.Position.back)
            var error: NSError? = nil


            var videoDeviceInput: AVCaptureDeviceInput?
            do {
                videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
            } catch let error1 as NSError {
                error = error1
                videoDeviceInput = nil
            } catch {
                fatalError()
            }

            if (error != nil) {
                print(error)
                let alert = UIAlertController(title: "Error", message: error!.localizedDescription
                    , preferredStyle: .alert)
                alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
                self.present(alert, animated: true, completion: nil)
            }

            if session.canAddInput(videoDeviceInput!){
                session.addInput(videoDeviceInput!)
                self.videoDeviceInput = videoDeviceInput

                DispatchQueue.main.async {


                    let orientation: AVCaptureVideoOrientation =  AVCaptureVideoOrientation(rawValue: UIDevice.current.orientation.rawValue)!

                    (self.previewView.layer as! AVCaptureVideoPreviewLayer).connection?.videoOrientation = .portrait
                    (self.previewView.layer as! AVCaptureVideoPreviewLayer).videoGravity = AVLayerVideoGravity.resizeAspectFill

                }
            }
            //Audio

            let stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
            if session.canAddOutput(stillImageOutput) {
                stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
                session.addOutput(stillImageOutput)

                self.stillImageOutput = stillImageOutput
            }
        }
}

override func viewWillAppear(_ animated: Bool) {
        self.sessionQueue.async {

            self.addObserver(self, forKeyPath: "sessionRunningAndDeviceAuthorized", options: [.old , .new] , context: &SessionRunningAndDeviceAuthorizedContext)
            self.addObserver(self, forKeyPath: "stillImageOutput.capturingStillImage", options:[.old , .new], context: &CapturingStillImageContext)

            NotificationCenter.default.addObserver(self, selector: #selector(ViewController.subjectAreaDidChange(_:)), name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object: self.videoDeviceInput?.device)

            self.runtimeErrorHandlingObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVCaptureSessionRuntimeError, object: self.session, queue: nil) {
                (note: Notification?) in
                self.sessionQueue.async { [unowned self] in
                    if let sess = self.session {
                        sess.startRunning()
                    }

                }
            }
            self.session?.startRunning()
        }
    }

override func viewWillDisappear(_ animated: Bool) {

        self.sessionQueue.async {

            if let sess = self.session {
                sess.stopRunning()

                NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object: self.videoDeviceInput?.device)
                NotificationCenter.default.removeObserver(self.runtimeErrorHandlingObserver!)

                self.removeObserver(self, forKeyPath: "sessionRunningAndDeviceAuthorized", context: &SessionRunningAndDeviceAuthorizedContext)

                self.removeObserver(self, forKeyPath: "stillImageOutput.capturingStillImage", context: &CapturingStillImageContext)
            }
        }
    }

 override var shouldAutorotate : Bool {
        return !self.lockInterfaceRotation
    }

    override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {

        if context == &CapturingStillImageContext{
            let isCapturingStillImage: Bool = (change![NSKeyValueChangeKey.newKey]! as AnyObject).boolValue
            if isCapturingStillImage {
                self.runStillImageCaptureAnimation()
            }

        }else {
            return super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
        }
    }

@objc func subjectAreaDidChange(_ notification: Notification){
        let devicePoint: CGPoint = CGPoint(x: 0.5, y: 0.5)
        self.focusWithMode(AVCaptureDevice.FocusMode.continuousAutoFocus, exposureMode: AVCaptureDevice.ExposureMode.continuousAutoExposure, point: devicePoint, monitorSubjectAreaChange: false)
    }

    // MARK:  Custom Function

    func focusWithMode(_ focusMode:AVCaptureDevice.FocusMode, exposureMode:AVCaptureDevice.ExposureMode, point:CGPoint, monitorSubjectAreaChange:Bool){

        self.sessionQueue.async {
            let device: AVCaptureDevice! = self.videoDeviceInput!.device

            do {
                try device.lockForConfiguration()

                if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(focusMode){
                    device.focusMode = focusMode
                    device.focusPointOfInterest = point
                }
                if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode){
                    device.exposurePointOfInterest = point
                    device.exposureMode = exposureMode
                }
                device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
                device.unlockForConfiguration()

            } catch {
                print(error)
            }
        }

    }

    class func setFlashMode(_ flashMode: AVCaptureDevice.FlashMode, device: AVCaptureDevice){

        if device.hasFlash && device.isFlashModeSupported(flashMode) {
            var error: NSError? = nil
            do {
                try device.lockForConfiguration()
                device.flashMode = flashMode
                device.unlockForConfiguration()

            } catch let error1 as NSError {
                error = error1
                print(error)
            }
        }
    }

    func runStillImageCaptureAnimation(){
        DispatchQueue.main.async {
            self.previewView.layer.opacity = 0.0
            UIView.animate(withDuration: 0.25, animations: {
                self.previewView.layer.opacity = 1.0
            })
        }
    }

    class func deviceWithMediaType(_ mediaType: String, preferringPosition:AVCaptureDevice.Position) -> AVCaptureDevice? {

        var devices = AVCaptureDevice.devices(for: AVMediaType(rawValue: mediaType));

        if (devices.isEmpty) {
            print("This device has no camera. Probably the simulator.")
            return nil
        } else {
            var captureDevice: AVCaptureDevice = devices[0]

            for device in devices {
                if (device as AnyObject).position == preferringPosition {
                    captureDevice = device
                    break
                }
            }
            return captureDevice
        }
    }

 func checkDeviceAuthorizationStatus(){
        let mediaType:String = AVMediaType.video.rawValue;

        AVCaptureDevice.requestAccess(for: AVMediaType(rawValue: mediaType)) { (granted: Bool) in
            if granted {
                self.deviceAuthorized = true;
            } else {

                DispatchQueue.main.async {
                    let alert: UIAlertController = UIAlertController(
                        title: "AVCam",
                        message: "AVCam does not have permission to access camera",
                        preferredStyle: UIAlertControllerStyle.alert)
                    let action = UIAlertAction(title: "OK", style: .default) { _ in }
                    alert.addAction(action)
                    self.present(alert, animated: true, completion: nil)
                }
                self.deviceAuthorized = false;
            }
        }
    }

    var snaptrue = false

    @IBAction func snapStillImage(_ sender: AnyObject) {
        if snaptrue == false {
        snaptrue = true
        self.sessionQueue.async {
            // Update the orientation on the still image output video connection before capturing.

            let videoOrientation =  (self.previewView.layer as! AVCaptureVideoPreviewLayer).connection?.videoOrientation

            self.stillImageOutput!.connection(with: AVMediaType.video)?.videoOrientation = videoOrientation!

            // Flash set to Auto for Still Capture
            MeChoosePic.setFlashMode(AVCaptureDevice.FlashMode.off, device: self.videoDeviceInput!.device)
            self.stillImageOutput!.captureStillImageAsynchronously(from: self.stillImageOutput!.connection(with:AVMediaType.video)!, completionHandler: { (imageDataSampleBuffer: CMSampleBuffer?, error: Error?) in
                if error == nil {
                    let data:Data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer!)!
                    //PNG
                    let imagesnap:UIImage = UIImage(data: data)!
                    self.snappedimage = imagesnap
                    Timer.scheduledTimer(timeInterval: 1.3, target: self, selector: #selector(self.move), userInfo: nil, repeats: false)
                } else {
                    print(error)
                }
            })
        }
        }
    }

 @IBAction func changeCamera(_ sender: AnyObject) {
        self.cameraButton.isEnabled = false
        self.snapButton.isEnabled = false

        self.sessionQueue.async {

            let currentVideoDevice:AVCaptureDevice = self.videoDeviceInput!.device
            let currentPosition: AVCaptureDevice.Position = currentVideoDevice.position
            var preferredPosition: AVCaptureDevice.Position = AVCaptureDevice.Position.unspecified

            switch currentPosition {
            case AVCaptureDevice.Position.front:
                preferredPosition = AVCaptureDevice.Position.back
            case AVCaptureDevice.Position.back:
                preferredPosition = AVCaptureDevice.Position.front
            case AVCaptureDevice.Position.unspecified:
                preferredPosition = AVCaptureDevice.Position.back

            }

            guard let device:AVCaptureDevice? = MeChoosePic.deviceWithMediaType(AVMediaType.video.rawValue, preferringPosition: preferredPosition) else {
                print("there is no AVCapture Device")
                return
            }

            var videoDeviceInput: AVCaptureDeviceInput?

            do {
                videoDeviceInput = try AVCaptureDeviceInput(device: device!)
            } catch _ as NSError {
                videoDeviceInput = nil
            } catch {
                fatalError()
            }

            self.session!.beginConfiguration()

            self.session!.removeInput(self.videoDeviceInput!)

            if self.session!.canAddInput(videoDeviceInput!) {

                NotificationCenter.default.removeObserver(self, name:NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object:currentVideoDevice)

                ViewController.setFlashMode(AVCaptureDevice.FlashMode.auto, device: device!)

                NotificationCenter.default.addObserver(self, selector: #selector(ViewController.subjectAreaDidChange(_:)), name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object: device)

                self.session!.addInput(videoDeviceInput!)
                self.videoDeviceInput = videoDeviceInput

            } else {
                self.session!.addInput(self.videoDeviceInput!)
            }

            self.session!.commitConfiguration()

            DispatchQueue.main.async {
                self.snapButton.isEnabled = true
                self.cameraButton.isEnabled = true
            }

        }
    }

    @IBAction func focusAndExposeTap(_ gestureRecognizer: UIGestureRecognizer) {
        let devicePoint: CGPoint = (self.previewView.layer as! AVCaptureVideoPreviewLayer).captureDevicePointConverted(fromLayerPoint: gestureRecognizer.location(in: gestureRecognizer.view))

        self.focusWithMode(AVCaptureDevice.FocusMode.autoFocus, exposureMode: AVCaptureDevice.ExposureMode.autoExpose, point: devicePoint, monitorSubjectAreaChange: true)
    }
在AVCamPreviewView中:

import Foundation
import UIKit
import AVFoundation


class AVCamPreviewView: UIView {

    var session: AVCaptureSession? {
        get {
            return (self.layer as! AVCaptureVideoPreviewLayer).session
        }
        set (session) {
            (self.layer as! AVCaptureVideoPreviewLayer).session = session
        }
    }

    override class var layerClass : AnyClass {
        return AVCaptureVideoPreviewLayer.self
    }
}
您是否还需要选择已在照相胶卷中拍摄的照片的选项?-您只需要创建一个集合视图并将所有图像提取到该视图。让我知道


尼尔

我同意@Lope的观点,您可以通过收集摄像头输入来制作自己的:

    import UIKit
    import AVFoundation
    import AssetsLibrary

class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {

    var SessionRunningAndDeviceAuthorizedContext = "SessionRunningAndDeviceAuthorizedContext"
    var CapturingStillImageContext = "CapturingStillImageContext"

@available(iOS 4.0, *)
    public func fileOutput(_ captureOutput: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {

    }


    // MARK: property

    var sessionQueue: DispatchQueue!
    var session: AVCaptureSession?
    var videoDeviceInput: AVCaptureDeviceInput?
    @objc var stillImageOutput: AVCaptureStillImageOutput?

    var deviceAuthorized: Bool  = false
    var backgroundRecordId: UIBackgroundTaskIdentifier = UIBackgroundTaskInvalid
    var sessionRunningAndDeviceAuthorized: Bool {
        get {
            return (self.session?.isRunning != nil && self.deviceAuthorized )
        }
    }

    var runtimeErrorHandlingObserver: AnyObject?
    var lockInterfaceRotation: Bool = false

    @IBOutlet weak var previewView: AVCamPreviewView!
    @IBOutlet weak var snapButton: UIButton!
    @IBOutlet weak var cameraButton: UIButton!

    var snappedimage = UIImage()

 override func viewDidLoad() {
        super.viewDidLoad()
 let session: AVCaptureSession = AVCaptureSession()
        session.sessionPreset = AVCaptureSession.Preset.photo
        self.session = session

        self.previewView.session = session

        self.checkDeviceAuthorizationStatus()

        let sessionQueue: DispatchQueue = DispatchQueue(label: "KehoeGames.whatever",attributes: [])

        self.sessionQueue = sessionQueue
        sessionQueue.async {
            self.backgroundRecordId = UIBackgroundTaskInvalid

            let videoDevice: AVCaptureDevice! = ViewController.deviceWithMediaType(AVMediaType.video.rawValue, preferringPosition: AVCaptureDevice.Position.back)
            var error: NSError? = nil


            var videoDeviceInput: AVCaptureDeviceInput?
            do {
                videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
            } catch let error1 as NSError {
                error = error1
                videoDeviceInput = nil
            } catch {
                fatalError()
            }

            if (error != nil) {
                print(error)
                let alert = UIAlertController(title: "Error", message: error!.localizedDescription
                    , preferredStyle: .alert)
                alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
                self.present(alert, animated: true, completion: nil)
            }

            if session.canAddInput(videoDeviceInput!){
                session.addInput(videoDeviceInput!)
                self.videoDeviceInput = videoDeviceInput

                DispatchQueue.main.async {


                    let orientation: AVCaptureVideoOrientation =  AVCaptureVideoOrientation(rawValue: UIDevice.current.orientation.rawValue)!

                    (self.previewView.layer as! AVCaptureVideoPreviewLayer).connection?.videoOrientation = .portrait
                    (self.previewView.layer as! AVCaptureVideoPreviewLayer).videoGravity = AVLayerVideoGravity.resizeAspectFill

                }
            }
            //Audio

            let stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
            if session.canAddOutput(stillImageOutput) {
                stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
                session.addOutput(stillImageOutput)

                self.stillImageOutput = stillImageOutput
            }
        }
}

override func viewWillAppear(_ animated: Bool) {
        self.sessionQueue.async {

            self.addObserver(self, forKeyPath: "sessionRunningAndDeviceAuthorized", options: [.old , .new] , context: &SessionRunningAndDeviceAuthorizedContext)
            self.addObserver(self, forKeyPath: "stillImageOutput.capturingStillImage", options:[.old , .new], context: &CapturingStillImageContext)

            NotificationCenter.default.addObserver(self, selector: #selector(ViewController.subjectAreaDidChange(_:)), name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object: self.videoDeviceInput?.device)

            self.runtimeErrorHandlingObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVCaptureSessionRuntimeError, object: self.session, queue: nil) {
                (note: Notification?) in
                self.sessionQueue.async { [unowned self] in
                    if let sess = self.session {
                        sess.startRunning()
                    }

                }
            }
            self.session?.startRunning()
        }
    }

override func viewWillDisappear(_ animated: Bool) {

        self.sessionQueue.async {

            if let sess = self.session {
                sess.stopRunning()

                NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object: self.videoDeviceInput?.device)
                NotificationCenter.default.removeObserver(self.runtimeErrorHandlingObserver!)

                self.removeObserver(self, forKeyPath: "sessionRunningAndDeviceAuthorized", context: &SessionRunningAndDeviceAuthorizedContext)

                self.removeObserver(self, forKeyPath: "stillImageOutput.capturingStillImage", context: &CapturingStillImageContext)
            }
        }
    }

 override var shouldAutorotate : Bool {
        return !self.lockInterfaceRotation
    }

    override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {

        if context == &CapturingStillImageContext{
            let isCapturingStillImage: Bool = (change![NSKeyValueChangeKey.newKey]! as AnyObject).boolValue
            if isCapturingStillImage {
                self.runStillImageCaptureAnimation()
            }

        }else {
            return super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
        }
    }

@objc func subjectAreaDidChange(_ notification: Notification){
        let devicePoint: CGPoint = CGPoint(x: 0.5, y: 0.5)
        self.focusWithMode(AVCaptureDevice.FocusMode.continuousAutoFocus, exposureMode: AVCaptureDevice.ExposureMode.continuousAutoExposure, point: devicePoint, monitorSubjectAreaChange: false)
    }

    // MARK:  Custom Function

    func focusWithMode(_ focusMode:AVCaptureDevice.FocusMode, exposureMode:AVCaptureDevice.ExposureMode, point:CGPoint, monitorSubjectAreaChange:Bool){

        self.sessionQueue.async {
            let device: AVCaptureDevice! = self.videoDeviceInput!.device

            do {
                try device.lockForConfiguration()

                if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(focusMode){
                    device.focusMode = focusMode
                    device.focusPointOfInterest = point
                }
                if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode){
                    device.exposurePointOfInterest = point
                    device.exposureMode = exposureMode
                }
                device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
                device.unlockForConfiguration()

            } catch {
                print(error)
            }
        }

    }

    class func setFlashMode(_ flashMode: AVCaptureDevice.FlashMode, device: AVCaptureDevice){

        if device.hasFlash && device.isFlashModeSupported(flashMode) {
            var error: NSError? = nil
            do {
                try device.lockForConfiguration()
                device.flashMode = flashMode
                device.unlockForConfiguration()

            } catch let error1 as NSError {
                error = error1
                print(error)
            }
        }
    }

    func runStillImageCaptureAnimation(){
        DispatchQueue.main.async {
            self.previewView.layer.opacity = 0.0
            UIView.animate(withDuration: 0.25, animations: {
                self.previewView.layer.opacity = 1.0
            })
        }
    }

    class func deviceWithMediaType(_ mediaType: String, preferringPosition:AVCaptureDevice.Position) -> AVCaptureDevice? {

        var devices = AVCaptureDevice.devices(for: AVMediaType(rawValue: mediaType));

        if (devices.isEmpty) {
            print("This device has no camera. Probably the simulator.")
            return nil
        } else {
            var captureDevice: AVCaptureDevice = devices[0]

            for device in devices {
                if (device as AnyObject).position == preferringPosition {
                    captureDevice = device
                    break
                }
            }
            return captureDevice
        }
    }

 func checkDeviceAuthorizationStatus(){
        let mediaType:String = AVMediaType.video.rawValue;

        AVCaptureDevice.requestAccess(for: AVMediaType(rawValue: mediaType)) { (granted: Bool) in
            if granted {
                self.deviceAuthorized = true;
            } else {

                DispatchQueue.main.async {
                    let alert: UIAlertController = UIAlertController(
                        title: "AVCam",
                        message: "AVCam does not have permission to access camera",
                        preferredStyle: UIAlertControllerStyle.alert)
                    let action = UIAlertAction(title: "OK", style: .default) { _ in }
                    alert.addAction(action)
                    self.present(alert, animated: true, completion: nil)
                }
                self.deviceAuthorized = false;
            }
        }
    }

    var snaptrue = false

    @IBAction func snapStillImage(_ sender: AnyObject) {
        if snaptrue == false {
        snaptrue = true
        self.sessionQueue.async {
            // Update the orientation on the still image output video connection before capturing.

            let videoOrientation =  (self.previewView.layer as! AVCaptureVideoPreviewLayer).connection?.videoOrientation

            self.stillImageOutput!.connection(with: AVMediaType.video)?.videoOrientation = videoOrientation!

            // Flash set to Auto for Still Capture
            MeChoosePic.setFlashMode(AVCaptureDevice.FlashMode.off, device: self.videoDeviceInput!.device)
            self.stillImageOutput!.captureStillImageAsynchronously(from: self.stillImageOutput!.connection(with:AVMediaType.video)!, completionHandler: { (imageDataSampleBuffer: CMSampleBuffer?, error: Error?) in
                if error == nil {
                    let data:Data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer!)!
                    //PNG
                    let imagesnap:UIImage = UIImage(data: data)!
                    self.snappedimage = imagesnap
                    Timer.scheduledTimer(timeInterval: 1.3, target: self, selector: #selector(self.move), userInfo: nil, repeats: false)
                } else {
                    print(error)
                }
            })
        }
        }
    }

 @IBAction func changeCamera(_ sender: AnyObject) {
        self.cameraButton.isEnabled = false
        self.snapButton.isEnabled = false

        self.sessionQueue.async {

            let currentVideoDevice:AVCaptureDevice = self.videoDeviceInput!.device
            let currentPosition: AVCaptureDevice.Position = currentVideoDevice.position
            var preferredPosition: AVCaptureDevice.Position = AVCaptureDevice.Position.unspecified

            switch currentPosition {
            case AVCaptureDevice.Position.front:
                preferredPosition = AVCaptureDevice.Position.back
            case AVCaptureDevice.Position.back:
                preferredPosition = AVCaptureDevice.Position.front
            case AVCaptureDevice.Position.unspecified:
                preferredPosition = AVCaptureDevice.Position.back

            }

            guard let device:AVCaptureDevice? = MeChoosePic.deviceWithMediaType(AVMediaType.video.rawValue, preferringPosition: preferredPosition) else {
                print("there is no AVCapture Device")
                return
            }

            var videoDeviceInput: AVCaptureDeviceInput?

            do {
                videoDeviceInput = try AVCaptureDeviceInput(device: device!)
            } catch _ as NSError {
                videoDeviceInput = nil
            } catch {
                fatalError()
            }

            self.session!.beginConfiguration()

            self.session!.removeInput(self.videoDeviceInput!)

            if self.session!.canAddInput(videoDeviceInput!) {

                NotificationCenter.default.removeObserver(self, name:NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object:currentVideoDevice)

                ViewController.setFlashMode(AVCaptureDevice.FlashMode.auto, device: device!)

                NotificationCenter.default.addObserver(self, selector: #selector(ViewController.subjectAreaDidChange(_:)), name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, object: device)

                self.session!.addInput(videoDeviceInput!)
                self.videoDeviceInput = videoDeviceInput

            } else {
                self.session!.addInput(self.videoDeviceInput!)
            }

            self.session!.commitConfiguration()

            DispatchQueue.main.async {
                self.snapButton.isEnabled = true
                self.cameraButton.isEnabled = true
            }

        }
    }

    @IBAction func focusAndExposeTap(_ gestureRecognizer: UIGestureRecognizer) {
        let devicePoint: CGPoint = (self.previewView.layer as! AVCaptureVideoPreviewLayer).captureDevicePointConverted(fromLayerPoint: gestureRecognizer.location(in: gestureRecognizer.view))

        self.focusWithMode(AVCaptureDevice.FocusMode.autoFocus, exposureMode: AVCaptureDevice.ExposureMode.autoExpose, point: devicePoint, monitorSubjectAreaChange: true)
    }
在AVCamPreviewView中:

import Foundation
import UIKit
import AVFoundation


class AVCamPreviewView: UIView {

    var session: AVCaptureSession? {
        get {
            return (self.layer as! AVCaptureVideoPreviewLayer).session
        }
        set (session) {
            (self.layer as! AVCaptureVideoPreviewLayer).session = session
        }
    }

    override class var layerClass : AnyClass {
        return AVCaptureVideoPreviewLayer.self
    }
}
您是否还需要选择已在照相胶卷中拍摄的照片的选项?-您只需要创建一个集合视图并将所有图像提取到该视图。让我知道


尼尔

需要很多步骤,这取决于应用程序的要求。我不知道从哪里开始,假设我想添加过滤器。我的意思是,我应该构建一个自定义视图控制器吗。当用户按下TakePhoto按钮时,它将直接重定向到自定义视图控制器?我说的对吗?是的,你需要建立一个新的视图控制器来过滤照片。当用户按下拍照按钮时,您将像现在一样打开图像选择器控制器,并禁用AllowsEdit。您将通过委托获得用户的照片,现在您打开新的自定义VC并将此照片传递给它。用户将进行编辑,您的新VC将把编辑后的照片返回到您需要的任何位置(可能与拍照按钮所在的位置相同。需要执行很多步骤,这取决于你的应用程序的要求。你对具体步骤有问题吗?我不知道从哪里开始,让我们假设我想添加过滤器。我的意思是,我是否应该构建自定义视图控制器。当用户按下拍照按钮时,它将直接重定向。)你需要建立一个新的视图控制器来过滤照片。当用户按下拍照按钮时,你将像现在一样打开图像选择器控制器,并且禁用AllowsEdit。你将通过代理获取用户的照片,现在你打开新的自定义VC并将此照片传递给它。用户将进行编辑,您的新VC将把编辑后的照片返回到您需要的任何位置(可能与拍照按钮所在的位置相同)。