Swift ARKit检测平面之间的交点

Swift ARKit检测平面之间的交点,swift,scenekit,augmented-reality,arkit,image-recognition,Swift,Scenekit,Augmented Reality,Arkit,Image Recognition,我正在使用ARKit(带有场景工具包),并试图找到一种方法来获取ARReference图像和水平Arplan检测之间的交点,以便在检测到的图像正前方的曲面上显示3D角色,例如,在红色圆圈内繁殖参见下图 目前,我能够让角色在检测到的图像前繁殖,但是,角色在空中漂浮,而不是站在表面上 let realWorldPositon = SCNVector3Make(anchor.transform.columns.3.x, anchor.transform.columns.3.y, anchor.tra

我正在使用ARKit(带有场景工具包),并试图找到一种方法来获取ARReference图像和水平Arplan检测之间的交点,以便在检测到的图像正前方的曲面上显示3D角色,例如,在红色圆圈内繁殖参见下图

目前,我能够让角色在检测到的图像前繁殖,但是,角色在空中漂浮,而不是站在表面上

let realWorldPositon = SCNVector3Make(anchor.transform.columns.3.x, anchor.transform.columns.3.y, anchor.transform.columns.3.z)
let hitTest = self.sceneView.scene.rootNode.hitTestWithSegment(from: self.sceneView.scene.rootNode.worldPosition, to: realWorldPositon, options: nil)
overlayNode.position = SCNVector3Make((hitTest.first?.worldCoordinates.x)!, 0, (hitTest.first?.worldCoordinates.z)!) 
self.sceneView.scene.rootNode.addChildNode(overlayNode)
在此方面的任何帮助都将不胜感激,谢谢


我认为您使用
hitTestWithSegment
功能检测
ARImageAnchor
ARPlaneAnchor
之间的交叉点是正确的

我没有试图解释我尝试回答的每一步,而是提供了完整注释的代码,因此它应该是相当自解释的

我的示例运行得相当好(虽然它肯定不是完美的),并且肯定需要一些调整

例如,您需要更准确地确定从
ARReferenceImage
ARPlaneAnchor
等的距离

我可以将模型(口袋妖怪)放置在正确的位置,并且非常靠近
ARReferenceImage
的前面,尽管它需要调整

话虽如此,我认为这将是一个相当好的基础,让您开始完善代码并获得更准确的结果

然而,值得注意的是,我刚刚启用了一个
ARPlaneAnchor
(为了简单起见),并假设您将检测图像标记前面的一个平面

我没有考虑旋转或类似的事情。当然,根据你提出的方案;它还假设您的图像位于桌子或其他平面上

无论如何,我的答案是(希望它应该是不言自明的):

导入UIKit
进口阿基特
//-----------------------
//MARK:ARSCNViewDelegate
//-----------------------
扩展视图控制器:ARSCNViewDelegate{
func渲染器(渲染器:SCNSceneRenderer,diddad节点:SCNNode,用于锚点:ARAnchor){
//1.如果我们检测到ImageTarget,则创建一个平面来可视化它
如果让currentImageAnchor=锚定为?ARImageAnchor{
createReferenceImagePlaneForNode(currentImageAnchor,节点:节点)
allowTracking=true
}
//2.如果检测到水平面,则创建一个水平面
如果让currentPlaneAnchor=锚定为?ARPlaneAnchor{
如果planeNode==nil&&!createdModel{createReferencePlaneForNode(currentPlaneAnchor,node:node)}
}
}
func渲染器(渲染器:SCNSceneRenderer,didUpdate节点:SCNNode,用于锚点:ARAnchor){
//1.检查ARPLANEACHOR是否已更新
防护罩下锚=锚定为?ARPlaneAnchor,
//2.检查它是否是我们的PlaneNode
设existingPlane=planeNode,
//3.获取平面节点的几何图形
设planeGeometry=existingPlane.geometry为?SCNPlane else{return}
//4.调整其尺寸和位置
planeGeometry.width=CGFloat(锚定.extent.x)
planeGeometry.height=CGFloat(锚定.extent.z)
planeNode?.position=SCInvector3Make(锚定.center.x,0.01,锚定.center.z)
}
func渲染器(u渲染器:SCNSceneRenderer,updateAtime时间:TimeInterval){
//1.检测ARPlaneAnchor和Arimagencho的交叉点
如果允许跟踪{detectIntersetionOfImageTarget()}
}
}
//---------------------------------------
//标记:模型生成和识别
//---------------------------------------
扩展视图控制器{
///检测是否与有效的图像目标相交
func detectIntersetionOfImageTarget(){
//如果我们尚未创建模型,请检查是否检测到现有平面
如果!createdModel{
//a、 对任何现有平面在屏幕中心执行命中测试
guard let planeHitTest=self.augmentedRealityView.hitTest(屏幕中心,类型:。existingPlaneUsingExtent)。首先,
设planeAnchor=planeHitTest.anchor为?ARPlaneAnchor else{return}
//b、 获取ARPlane锚点的变换
设x=planeAnchor.transform.columns.3.x
设y=planeAnchor.transform.columns.3.y
设z=planeAnchor.transform.columns.3.z
//b、 创建锚向量
设anchorVector=SCInvector3(x,y,z)
//从ImageAnchor向量到Anchors向量执行另一个HitTest
如果让u=self.augmentedRealityView.scene.rootNode.hitTestWithSegment(from:imageAnchorVector,to:anchorVector,options:nil)。第一个?.node{
//a、 如果我们尚未创建模型,则在发生交叉时立即放置模型
如果createdModel==false{
//b、 加载模型
loadModelAtVector(SCInvector3(imageAnchorVector.x,y,imageAnchorVector.z))
createdModel=true
planeNode?.removeFromParentNode()
}
}
}
}
}
类ViewController:UIViewController{
//1.参考我们的ImageTarget捆绑包
让AR_BUNDLE=“AR资源”
//2.存储检测图像位置的向量
var imageAnchorVector:SCInvector3!
//3.允许跟踪和确定我们的模型是否已放置的变量
var allowTracking=false
var createdModel=false
//4.在显示摄影机提要的故事板中创建对ARSCNView的引用
@IBOUTLE弱var增强真实视图:ARSCNView!
//5.创建我们的ARWorld跟踪配置
let configuration=ARWorldTrackingConfiguration()
//6.创建我们的会话
import UIKit
import ARKit

//-----------------------
//MARK: ARSCNViewDelegate
//-----------------------

extension ViewController: ARSCNViewDelegate{

    func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {

        //1. If We Have Detected Our ImageTarget Then Create A Plane To Visualize It
        if let currentImageAnchor = anchor as? ARImageAnchor {

            createReferenceImagePlaneForNode(currentImageAnchor, node: node)
            allowTracking = true

        }

        //2. If We Have Detected A Horizontal Plane Then Create One
        if let currentPlaneAnchor = anchor as? ARPlaneAnchor{

            if planeNode == nil && !createdModel{ createReferencePlaneForNode(currentPlaneAnchor, node: node) }
        }

    }

    func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {

        //1. Check To See Whether An ARPlaneAnchor Has Been Updated
        guard let anchor = anchor as? ARPlaneAnchor,
              //2. Check It Is Our PlaneNode
              let existingPlane = planeNode,
              //3. Get The Geometry Of The PlaneNode
              let planeGeometry = existingPlane.geometry as? SCNPlane else { return }

        //4. Adjust It's Size & Positions
        planeGeometry.width = CGFloat(anchor.extent.x)
        planeGeometry.height = CGFloat(anchor.extent.z)

        planeNode?.position = SCNVector3Make(anchor.center.x, 0.01, anchor.center.z)
    }

    func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {

        //1. Detect The Intersection Of The ARPlaneAnchor & ARImageAncho
        if allowTracking { detectIntersetionOfImageTarget() }

    }

}

//---------------------------------------
//MARK: Model Generation & Identification
//---------------------------------------

extension ViewController {

    /// Detects If We Have Intersected A Valid Image Target
    func detectIntersetionOfImageTarget(){

        //If We Havent Created Our Model Then Check To See If We Have Detected An Existing Plane
        if !createdModel{

            //a. Perform A HitTest On The Center Of The Screen For AnyExisting Planes
            guard let planeHitTest = self.augmentedRealityView.hitTest(screenCenter, types: .existingPlaneUsingExtent).first,
                let planeAnchor = planeHitTest.anchor as? ARPlaneAnchor else { return }

            //b. Get The Transform Of The ARPlane Anchor
            let x = planeAnchor.transform.columns.3.x
            let y = planeAnchor.transform.columns.3.y
            let z = planeAnchor.transform.columns.3.z

            //b. Create The Anchors Vector
            let anchorVector = SCNVector3(x,y, z)

            //Perform Another HitTest From The ImageAnchor Vector To The Anchors Vector
            if let _ = self.augmentedRealityView.scene.rootNode.hitTestWithSegment(from: imageAnchorVector, to: anchorVector, options: nil).first?.node {

                //a. If We Havent Created The Model Then Place It As Soon As An Intersection Occures
                if createdModel == false{

                    //b. Load The Model

                   loadModelAtVector(SCNVector3(imageAnchorVector.x, y, imageAnchorVector.z))

                    createdModel = true

                    planeNode?.removeFromParentNode()

                }
            }
        }
    }

}

class ViewController: UIViewController {

    //1. Reference To Our ImageTarget Bundle
    let AR_BUNDLE = "AR Resources"

    //2. Vector To Store The Position Of Our Detected Image
    var imageAnchorVector: SCNVector3!

    //3. Variables To Allow Tracking & To Determine Whether Our Model Has Been Placed
    var allowTracking = false
    var createdModel = false

    //4. Create A Reference To Our ARSCNView In Our Storyboard Which Displays The Camera Feed
    @IBOutlet weak var augmentedRealityView: ARSCNView!

    //5. Create Our ARWorld Tracking Configuration
    let configuration = ARWorldTrackingConfiguration()

    //6. Create Our Session
    let augmentedRealitySession = ARSession()

    //7. ARReference Images
    lazy var staticReferenceImages: Set<ARReferenceImage> = {

        let images = ARReferenceImage.referenceImages(inGroupNamed: AR_BUNDLE, bundle: nil)
        return images!

    }()

    //8. Scrren Center Reference
    var screenCenter: CGPoint!

    //9. PlaneNode
    var planeNode: SCNNode?

    //--------------------
    //MARK: View LifeCycle
    //--------------------

    override func viewDidLoad() {

        super.viewDidLoad()

        //1. Get Reference To The Center Of The Screen For RayCasting
        DispatchQueue.main.async { self.screenCenter = CGPoint(x: self.view.bounds.width/2, y: self.view.bounds.height/2) }

        //2. Setup Our ARSession
        setupARSessionWithStaticImages()


    }

    override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() }

    //---------------------------------
    //MARK: ARImageAnchor Vizualization
    //---------------------------------

    /// Creates An SCNPlane For Visualizing The Detected ARImageAnchor
    ///
    /// - Parameters:
    ///   - imageAnchor: ARImageAnchor
    ///   - node: SCNNode
    func createReferenceImagePlaneForNode(_ imageAnchor: ARImageAnchor, node: SCNNode){

        //1. Get The Targets Width & Height
        let width = imageAnchor.referenceImage.physicalSize.width
        let height = imageAnchor.referenceImage.physicalSize.height

        //2. Create A Plane Geometry To Cover The ARImageAnchor
        let planeNode = SCNNode()
        let planeGeometry = SCNPlane(width: width, height: height)
        planeGeometry.firstMaterial?.diffuse.contents = UIColor.white
        planeNode.opacity = 0.5
        planeNode.geometry = planeGeometry

        //3. Rotate The PlaneNode To Horizontal
        planeNode.eulerAngles.x = -.pi/2

        //4. The Node Is Centered In The Anchor (0,0,0)
        node.addChildNode(planeNode)

        //5. Store The Vector Of The ARImageAnchor
        imageAnchorVector = SCNVector3(imageAnchor.transform.columns.3.x, imageAnchor.transform.columns.3.y, imageAnchor.transform.columns.3.z)

        let fadeOutAction = SCNAction.fadeOut(duration: 5)
        planeNode.runAction(fadeOutAction)

    }

    //-------------------------
    //MARK: Plane Visualization
    //-------------------------

    /// Creates An SCNPlane For Visualizing The Detected ARAnchor
    ///
    /// - Parameters:
    ///   - imageAnchor: ARAnchor
    ///   - node: SCNNode
    func createReferencePlaneForNode(_ anchor: ARPlaneAnchor, node: SCNNode){

        //1. Get The Anchors Width & Height
        let width = CGFloat(anchor.extent.x)
        let height = CGFloat(anchor.extent.z)

        //2. Create A Plane Geometry To Cover The ARImageAnchor
        planeNode = SCNNode()
        let planeGeometry = SCNPlane(width: width, height: height)
        planeGeometry.firstMaterial?.diffuse.contents = UIColor.white
        planeNode?.opacity = 0.5
        planeNode?.geometry = planeGeometry

        //3. Rotate The PlaneNode To Horizontal
        planeNode?.eulerAngles.x = -.pi/2

        //4. The Node Is Centered In The Anchor (0,0,0)
        node.addChildNode(planeNode!)

    }

    //-------------------
    //MARK: Model Loading
    //-------------------


    /// Loads Our Model Based On The Resulting Vector Of Our ARAnchor
    ///
    /// - Parameter worldVector: SCNVector3
    func loadModelAtVector(_ worldVector: SCNVector3) {

        let modelPath = "ARModels.scnassets/Scatterbug.scn"

        //1. Get The Reference To Our SCNScene & Get The Model Root Node
        guard let model = SCNScene(named: modelPath),
              let pokemonModel = model.rootNode.childNode(withName: "RootNode", recursively: false) else { return }

        //2.Add It To Our SCNView
        augmentedRealityView.scene.rootNode.addChildNode(pokemonModel)

        //3. Scale The Scatterbug
        pokemonModel.scale = SCNVector3(0.003, 0.003, 0.003)

        pokemonModel.position = worldVector

        augmentedRealityView.scene.rootNode.addChildNode(pokemonModel)


    }

    //---------------
    //MARK: ARSession
    //---------------

    /// Sets Up The AR Session With Static Or Dynamic AEImages
    func setupARSessionWithStaticImages(){

        //1. Set Our Configuration
        configuration.detectionImages = staticReferenceImages
        configuration.planeDetection = .horizontal

        //2. Run The Configuration
        augmentedRealitySession.run(configuration, options: [.resetTracking, .removeExistingAnchors])

        //3. Set The Session & Delegate
        augmentedRealityView?.session = augmentedRealitySession
        self.augmentedRealityView?.delegate = self

    }

}