Ios 如何使用接近传感器拍照?
启用接近传感器后,我无法使用后视摄像头拍摄设备图像。我不希望相机预览显示,只希望设备拍摄照片并在imageView中显示。我让接近传感器工作,我正在使用Ios 如何使用接近传感器拍照?,ios,swift,image,uiimageview,proximitysensor,Ios,Swift,Image,Uiimageview,Proximitysensor,启用接近传感器后,我无法使用后视摄像头拍摄设备图像。我不希望相机预览显示,只希望设备拍摄照片并在imageView中显示。我让接近传感器工作,我正在使用imagePicker.takePicture()在接近传感器启用时拍摄图像,但这似乎不起作用。无需用户输入,我可以使用什么方法/函数以编程方式拍摄照片 这是我目前的代码: class ViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerCo
imagePicker.takePicture()
在接近传感器启用时拍摄图像,但这似乎不起作用。无需用户输入,我可以使用什么方法/函数以编程方式拍摄照片
这是我目前的代码:
class ViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate {
@IBOutlet var imageView: UIImageView!
var imagePicker: UIImagePickerController!
//*The function in question*
func proximityChanged(notification: NSNotification) {
let device = notification.object as? UIDevice
if device?.proximityState == true {
print("\(device) detected!")
如果使用
UIImagePickerController
拍摄照片时遇到问题,我建议使用AVFoundation
下面是一个工作示例。光电捕获由接近传感器触发
如果需要,可以添加预览
import UIKit
import AVFoundation
final class CaptureViewController: UIViewController {
@IBOutlet weak var imageView: UIImageView!
private static let captureSessionPreset = AVCaptureSessionPresetPhoto
private var captureSession: AVCaptureSession!
private var photoOutput: AVCaptureStillImageOutput!
private var initialized = false
override func viewDidLoad() {
super.viewDidLoad()
initialized = setupCaptureSession()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
if initialized {
captureSession.startRunning()
UIDevice.currentDevice().proximityMonitoringEnabled = true
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(proximityStateDidChange), name: UIDeviceProximityStateDidChangeNotification, object: nil)
}
}
override func viewDidDisappear(animated: Bool) {
super.viewDidDisappear(animated)
if initialized {
NSNotificationCenter.defaultCenter().removeObserver(self, name: UIDeviceProximityStateDidChangeNotification, object: nil)
UIDevice.currentDevice().proximityMonitoringEnabled = false
captureSession.stopRunning()
}
}
dynamic func proximityStateDidChange(notification: NSNotification) {
if UIDevice.currentDevice().proximityState {
captureImage()
}
}
// MARK: - Capture Image
private func captureImage() {
if let c = findConnection() {
photoOutput.captureStillImageAsynchronouslyFromConnection(c) { sampleBuffer, error in
if let jpeg = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer),
let image = UIImage(data: jpeg)
{
dispatch_async(dispatch_get_main_queue()) { [weak self] in
self?.imageView.image = image
}
}
}
}
}
private func findConnection() -> AVCaptureConnection? {
for c in photoOutput.connections {
let c = c as? AVCaptureConnection
for p in c?.inputPorts ?? [] {
if p.mediaType == AVMediaTypeVideo {
return c
}
}
}
return nil
}
// MARK: - Setup Capture Session
private func setupCaptureSession() -> Bool {
captureSession = AVCaptureSession()
if captureSession.canSetSessionPreset(CaptureViewController.captureSessionPreset) {
captureSession.sessionPreset = CaptureViewController.captureSessionPreset
if setupCaptureSessionInput() && setupCaptureSessionOutput() {
return true
}
}
return false
}
private func setupCaptureSessionInput() -> Bool {
if let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo),
let captureDeviceInput = try? AVCaptureDeviceInput.init(device: captureDevice)
{
if captureSession.canAddInput(captureDeviceInput) {
captureSession.addInput(captureDeviceInput)
return true
}
}
return false
}
private func setupCaptureSessionOutput() -> Bool {
photoOutput = AVCaptureStillImageOutput()
photoOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(photoOutput) {
captureSession.addOutput(photoOutput)
return true
}
return false
}
}
你能澄清一下吗:哪部分不起作用?是否触发了接近传感器但没有图片,或者根本没有触发,或者其他什么?是的,正在触发接近传感器,但没有拍摄图片。使用AVFoundation而不通过图像选择器不是更好吗?请更具体一些?我该怎么做。签出此链接:以正确设置它。