Ios 如何使相机功能仅显示在一个选项卡栏项中?
我目前正在Xcode 10中创建一个QR扫描仪/阅读器应用程序。我有一个选项卡控制器,它有两个选项卡栏项 1)家 2.)QR扫描仪 虽然我的代码可以工作,我可以在“第二视图控制器”中扫描二维码,但每当我切换到“第一视图控制器”时,它仍然会在我不想扫描二维码时扫描二维码。如果您对我如何解决这个问题有任何建议,我们将不胜感激 第一视图控制器:Ios 如何使相机功能仅显示在一个选项卡栏项中?,ios,swift,Ios,Swift,我目前正在Xcode 10中创建一个QR扫描仪/阅读器应用程序。我有一个选项卡控制器,它有两个选项卡栏项 1)家 2.)QR扫描仪 虽然我的代码可以工作,我可以在“第二视图控制器”中扫描二维码,但每当我切换到“第一视图控制器”时,它仍然会在我不想扫描二维码时扫描二维码。如果您对我如何解决这个问题有任何建议,我们将不胜感激 第一视图控制器: import UIKit class FirstViewController: UIViewController{ override func vi
import UIKit
class FirstViewController: UIViewController{
override func viewDidLoad() {
super.viewDidLoad()
}
}
import UIKit
import AVFoundation
class SecondViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var video = AVCaptureVideoPreviewLayer()
@IBOutlet weak var square: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
//Creating session
let session = AVCaptureSession()
//Define Capture device
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
do {
let input = try AVCaptureDeviceInput(device: captureDevice!)
session.addInput(input)
} catch {
print (">>>>Error")
}
let output = AVCaptureMetadataOutput ()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubviewToFront(square)
session.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects != nil && metadataObjects.count != 0 {
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject{
if object.type == AVMetadataObject.ObjectType.qr{
let alert = UIAlertController(title: "QR code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
present(alert,animated: true, completion: nil)
}
}
}
}
}
class SecondViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
let session = AVCaptureSession()
override func viewDidLoad() {
// any setup you may need
}
override func viewWillAppear(_ animated: Bool) {
session.startRunning()
print("Start running")
}
override func viewDidDisappear(_ animated: Bool) {
session.stopRunning()
print ("Stop running")
}
}
第二视图控制器:
import UIKit
class FirstViewController: UIViewController{
override func viewDidLoad() {
super.viewDidLoad()
}
}
import UIKit
import AVFoundation
class SecondViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var video = AVCaptureVideoPreviewLayer()
@IBOutlet weak var square: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
//Creating session
let session = AVCaptureSession()
//Define Capture device
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
do {
let input = try AVCaptureDeviceInput(device: captureDevice!)
session.addInput(input)
} catch {
print (">>>>Error")
}
let output = AVCaptureMetadataOutput ()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubviewToFront(square)
session.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects != nil && metadataObjects.count != 0 {
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject{
if object.type == AVMetadataObject.ObjectType.qr{
let alert = UIAlertController(title: "QR code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
present(alert,animated: true, completion: nil)
}
}
}
}
}
class SecondViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
let session = AVCaptureSession()
override func viewDidLoad() {
// any setup you may need
}
override func viewWillAppear(_ animated: Bool) {
session.startRunning()
print("Start running")
}
override func viewDidDisappear(_ animated: Bool) {
session.stopRunning()
print ("Stop running")
}
}
您已启动
AVCaptureSession
会话,但未明确停止它。在控制器之间切换时,系统可能会将控制器保留在内存中
在离开控制器并进行清理之前,您需要执行session.stopRunning()
。例如,在视图中将消失或类似
Ref:您已经启动了AVCaptureSession
会话,但没有明确停止它。在控制器之间切换时,系统可能会将控制器保留在内存中
在离开控制器并进行清理之前,您需要执行session.stopRunning()
。例如,在视图中将消失或类似
Ref:我建议将会话作为成员变量添加到类中,而不仅仅是viewDidLoad()方法
现在,在ViewWillAspect()方法中,您可以调用session.startRunning(),在ViewWillEnglish中,您可以调用session.stopRunning()
以下是一个基本布局:
class SecondViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
let session = AVCaptureSession()
override func viewDidLoad() {
// any setup you may need
}
override func viewWillAppear() {
// ensure everything is setup correctly
session.startRunning()
}
override func viewWillDisappear() {
// anything before navigating away
session.stopRunning()
}
}
我建议将会话作为成员变量添加到类中,而不仅仅是viewDidLoad()方法
现在,在ViewWillAspect()方法中,您可以调用session.startRunning(),在ViewWillEnglish中,您可以调用session.stopRunning()
以下是一个基本布局:
class SecondViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
let session = AVCaptureSession()
override func viewDidLoad() {
// any setup you may need
}
override func viewWillAppear() {
// ensure everything is setup correctly
session.startRunning()
}
override func viewWillDisappear() {
// anything before navigating away
session.stopRunning()
}
}
谢谢大家的帮助。在查看代码后,我意识到我要做的是重写viewdidappease()和viewdiddefine()
工作代码:
import UIKit
class FirstViewController: UIViewController{
override func viewDidLoad() {
super.viewDidLoad()
}
}
import UIKit
import AVFoundation
class SecondViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var video = AVCaptureVideoPreviewLayer()
@IBOutlet weak var square: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
//Creating session
let session = AVCaptureSession()
//Define Capture device
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
do {
let input = try AVCaptureDeviceInput(device: captureDevice!)
session.addInput(input)
} catch {
print (">>>>Error")
}
let output = AVCaptureMetadataOutput ()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubviewToFront(square)
session.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects != nil && metadataObjects.count != 0 {
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject{
if object.type == AVMetadataObject.ObjectType.qr{
let alert = UIAlertController(title: "QR code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
present(alert,animated: true, completion: nil)
}
}
}
}
}
class SecondViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
let session = AVCaptureSession()
override func viewDidLoad() {
// any setup you may need
}
override func viewWillAppear(_ animated: Bool) {
session.startRunning()
print("Start running")
}
override func viewDidDisappear(_ animated: Bool) {
session.stopRunning()
print ("Stop running")
}
}
谢谢大家的帮助。在查看代码后,我意识到我要做的是重写viewdidappease()和viewdiddefine()
工作代码:
import UIKit
class FirstViewController: UIViewController{
override func viewDidLoad() {
super.viewDidLoad()
}
}
import UIKit
import AVFoundation
class SecondViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var video = AVCaptureVideoPreviewLayer()
@IBOutlet weak var square: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
//Creating session
let session = AVCaptureSession()
//Define Capture device
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
do {
let input = try AVCaptureDeviceInput(device: captureDevice!)
session.addInput(input)
} catch {
print (">>>>Error")
}
let output = AVCaptureMetadataOutput ()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubviewToFront(square)
session.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects != nil && metadataObjects.count != 0 {
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject{
if object.type == AVMetadataObject.ObjectType.qr{
let alert = UIAlertController(title: "QR code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
present(alert,animated: true, completion: nil)
}
}
}
}
}
class SecondViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
let session = AVCaptureSession()
override func viewDidLoad() {
// any setup you may need
}
override func viewWillAppear(_ animated: Bool) {
session.startRunning()
print("Start running")
}
override func viewDidDisappear(_ animated: Bool) {
session.stopRunning()
print ("Stop running")
}
}
我会很快让您知道我的结果。@tharriott如果这对您有帮助,请将其标记为已批准的答案:)在您的示例中,您正在覆盖viewDidLoad
twiceWhoops!多谢各位@PeterPajchlI将很快让您知道我的结果。@tharriott如果这对您有帮助,请将其标记为已批准的答案:)在您的示例中,您正在覆盖viewDidLoad
twiceWhoops!多谢各位@彼得帕奇勒