Unity3d AR摄像机距离测量

Unity3d AR摄像机距离测量,unity3d,augmented-reality,arcore,Unity3d,Augmented Reality,Arcore,我有一个关于增强现实的问题 我想知道如何显示AR相机和目标对象之间的距离信息(如中心米…)。(使用智能手机) 我能团结一致吗?我应该使用AR基础吗?阿科呢?如何编写代码 我试图找到一些相关的代码(如下),但它看起来就像在对象和对象之间打印信息,而不是“AR相机” 再次感谢 我也在研究AR深度图像,其基本思想是: 使用API获取图像,通常格式为Depth16 将图像分割为短缓冲区,因为Depth16表示每个像素为16位 获取距离值,该值存储在每个shortbuffer较低的13位中,您可以通过执行

我有一个关于增强现实的问题

我想知道如何显示AR相机和目标对象之间的距离信息(如中心米…)。(使用智能手机)

我能团结一致吗?我应该使用AR基础吗?阿科呢?如何编写代码

我试图找到一些相关的代码(如下),但它看起来就像在对象和对象之间打印信息,而不是“AR相机”


再次感谢

我也在研究AR深度图像,其基本思想是:

  • 使用API获取图像,通常格式为Depth16
  • 将图像分割为短缓冲区,因为Depth16表示每个像素为16位
  • 获取距离值,该值存储在每个shortbuffer较低的13位中,您可以通过执行(shortbuffer&0x1ff)来实现,然后您可以获得每个像素的距离,通常以毫米为单位
  • 通过对所有像素执行此操作,您可以创建深度图像并将其存储为jpg或其他格式,以下是使用AR引擎获取距离的示例代码:

    try (Image depthImage = arFrame.acquireDepthImage()) {
            int imwidth = depthImage.getWidth();
            int imheight = depthImage.getHeight();
            Image.Plane plane = depthImage.getPlanes()[0];
            ShortBuffer shortDepthBuffer = plane.getBuffer().asShortBuffer();
            File sdCardFile = Environment.getExternalStorageDirectory();
            Log.i(TAG, "The storage path is " + sdCardFile);
            File file = new File(sdCardFile, "RawdepthImage.jpg");
    
            Bitmap disBitmap = Bitmap.createBitmap(imwidth, imheight, Bitmap.Config.RGB_565);
            for (int i = 0; i < imheight; i++) {
                for (int j = 0; j < imwidth; j++) {
                    int index = (i * imwidth + j) ;
                    shortDepthBuffer.position(index);
                    short depthSample = shortDepthBuffer.get();
                    short depthRange = (short) (depthSample & 0x1FFF);
                    //If you only want the distance value, here it is
                    byte value = (byte) depthRange;
              byte value = (byte) depthRange ;
                    disBitmap.setPixel(j, i, Color.rgb(value, value, value));
                }
            }
            //I rotate the image for a better view
            Matrix matrix = new Matrix();
            matrix.setRotate(90);
            Bitmap rotatedBitmap = Bitmap.createBitmap(disBitmap, 0, 0, imwidth, imheight, matrix, true);
    
            try {
                FileOutputStream out = new FileOutputStream(file);
                rotatedBitmap.compress(Bitmap.CompressFormat.JPEG, 90, out);
                out.flush();
                out.close();
                MainActivity.num++;
            } catch (Exception e) {
                e.printStackTrace();
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    
    try(Image depthImage=arFrame.acquireddepthimage()){
    int imwidth=depthImage.getWidth();
    int imheight=depthImage.getHeight();
    Image.Plane Plane=depthImage.getPlanes()[0];
    ShortBuffer shortDepthBuffer=plane.getBuffer().asShortBuffer();
    文件sdCardFile=Environment.getExternalStorageDirectory();
    Log.i(标记“存储路径为”+sdCardFile);
    File File=新文件(sdCardFile,“RawdepthImage.jpg”);
    Bitmap disBitmap=Bitmap.createBitmap(imwidth、imheight、Bitmap.Config.RGB_565);
    对于(int i=0;i下面是如何做统一和AR基础4.1。
    此示例脚本在深度纹理的中心以米为单位打印深度,并使用ARCore和ARKit:

    using System;
    using System.Collections;
    using UnityEngine;
    using UnityEngine.Assertions;
    using UnityEngine.XR.ARFoundation;
    using UnityEngine.XR.ARSubsystems;
    
    
    public class GetDepthOfCenterPixel : MonoBehaviour {
        // assign this field in inspector
        [SerializeField] AROcclusionManager manager = null;
        
        
        IEnumerator Start() {
            while (ARSession.state < ARSessionState.SessionInitializing) {
                // manager.descriptor.supportsEnvironmentDepthImage will return a correct value if ARSession.state >= ARSessionState.SessionInitializing 
                yield return null;
            }
            
            if (!manager.descriptor.supportsEnvironmentDepthImage) {
                Debug.LogError("!manager.descriptor.supportsEnvironmentDepthImage");
                yield break;
            }
            
            while (true) {
                if (manager.TryAcquireEnvironmentDepthCpuImage(out var cpuImage) && cpuImage.valid) {
                    using (cpuImage) {
                        Assert.IsTrue(cpuImage.planeCount == 1);
                        var plane = cpuImage.GetPlane(0);
                        var dataLength = plane.data.Length;
                        var pixelStride = plane.pixelStride;
                        var rowStride = plane.rowStride;
                        Assert.AreEqual(0, dataLength % rowStride, "dataLength should be divisible by rowStride without a remainder");
                        Assert.AreEqual(0, rowStride % pixelStride, "rowStride should be divisible by pixelStride without a remainder");
    
                        var numOfRows = dataLength / rowStride;
                        var centerRowIndex = numOfRows / 2;
                        var centerPixelIndex = rowStride / (pixelStride * 2);
                        var centerPixelData = plane.data.GetSubArray(centerRowIndex * rowStride + centerPixelIndex * pixelStride, pixelStride);
                        var depthInMeters = convertPixelDataToDistanceInMeters(centerPixelData.ToArray(), cpuImage.format);
                        print($"depth texture size: ({cpuImage.width},{cpuImage.height}), pixelStride: {pixelStride}, rowStride: {rowStride}, pixel pos: ({centerPixelIndex}, {centerRowIndex}), depthInMeters of the center pixel: {depthInMeters}");
                    }
                }
                
                yield return null;
            }
        }
    
        float convertPixelDataToDistanceInMeters(byte[] data, XRCpuImage.Format format) {
            switch (format) {
                case XRCpuImage.Format.DepthUint16:
                    return BitConverter.ToUInt16(data, 0) / 1000f;
                case XRCpuImage.Format.DepthFloat32:
                    return BitConverter.ToSingle(data, 0);
                default:
                    throw new Exception($"Format not supported: {format}");
            }
        }
    }
    
    使用系统;
    使用系统集合;
    使用UnityEngine;
    使用UnityEngine.Assertions;
    使用UnityEngine.XR.ARFoundation;
    使用UnityEngine.XR.ARSubsystems;
    公共类GetDepthofCenter像素:MonoBehavior{
    //在inspector中指定此字段
    [SerializeField]AROcclusionManager=null;
    IEnumerator Start(){
    while(ARSession.state=ARSessionState.SessionInitializing,则manager.descriptor.supportsEnvironmentDepthImage将返回正确的值
    收益返回空;
    }
    if(!manager.descriptor.supportsEnvironmentDepthImage){
    Debug.LogError(“!manager.descriptor.supportsEnvironmentDepthImage”);
    屈服断裂;
    }
    while(true){
    if(manager.TryAcquireEnvironmentDepthCpuImage(out var cpuImage)和&cpuImage.valid){
    使用(cpuImage){
    Assert.IsTrue(cpuImage.planeCount==1);
    var plane=cpuImage.GetPlane(0);
    var dataLength=plane.data.Length;
    var pixelStride=plane.pixelStride;
    var rowStride=plane.rowStride;
    AreEqual(0,dataLength%rowStride,“dataLength应该可以被rowStride整除,不带余数”);
    AreEqual(0,rowStride%pixelStride,“rowStride应该可以被pixelStride整除,没有余数”);
    var numorrows=dataLength/rowStride;
    var centerRowIndex=numorrows/2;
    var centerPixelIndex=行步长/(像素步长*2);
    var centerPixelData=plane.data.GetSubArray(centerRowIndex*rowStride+centerPixelIndex*pixelStride,pixelStride);
    var depthInMeters=convertPixelDataToDistanceInMeters(centerPixelData.ToArray(),cpuImage.format);
    打印($”深度纹理大小:({cpuImage.width},{cpuImage.height}),像素跨距:{pixelStride},行跨距:{rowStride},像素位置:({centerPixelIndex},{centerprowstride}),中心像素的深度计:{depthInMeters});
    }
    }
    收益返回空;
    }
    }
    浮点转换PixelDataToDistanceInMeters(字节[]数据,XRCpuImage.Format){
    开关(格式){
    案例XRCpuImage.Format.DepthUint16:
    返回BitConverter.ToUInt16(数据,0)/1000f;
    案例XRCpuImage.Format.DepthFloat32:
    返回BitConverter.ToSingle(数据,0);
    违约:
    抛出新异常($“不支持格式:{Format}”);
    }
    }
    }
    
    虽然答案很好,但对于这个问题来说,答案可能太复杂和高级了,这个问题是关于ARCamera和另一个对象之间的距离,而不是像素的深度和occ
    using System;
    using System.Collections;
    using UnityEngine;
    using UnityEngine.Assertions;
    using UnityEngine.XR.ARFoundation;
    using UnityEngine.XR.ARSubsystems;
    
    
    public class GetDepthOfCenterPixel : MonoBehaviour {
        // assign this field in inspector
        [SerializeField] AROcclusionManager manager = null;
        
        
        IEnumerator Start() {
            while (ARSession.state < ARSessionState.SessionInitializing) {
                // manager.descriptor.supportsEnvironmentDepthImage will return a correct value if ARSession.state >= ARSessionState.SessionInitializing 
                yield return null;
            }
            
            if (!manager.descriptor.supportsEnvironmentDepthImage) {
                Debug.LogError("!manager.descriptor.supportsEnvironmentDepthImage");
                yield break;
            }
            
            while (true) {
                if (manager.TryAcquireEnvironmentDepthCpuImage(out var cpuImage) && cpuImage.valid) {
                    using (cpuImage) {
                        Assert.IsTrue(cpuImage.planeCount == 1);
                        var plane = cpuImage.GetPlane(0);
                        var dataLength = plane.data.Length;
                        var pixelStride = plane.pixelStride;
                        var rowStride = plane.rowStride;
                        Assert.AreEqual(0, dataLength % rowStride, "dataLength should be divisible by rowStride without a remainder");
                        Assert.AreEqual(0, rowStride % pixelStride, "rowStride should be divisible by pixelStride without a remainder");
    
                        var numOfRows = dataLength / rowStride;
                        var centerRowIndex = numOfRows / 2;
                        var centerPixelIndex = rowStride / (pixelStride * 2);
                        var centerPixelData = plane.data.GetSubArray(centerRowIndex * rowStride + centerPixelIndex * pixelStride, pixelStride);
                        var depthInMeters = convertPixelDataToDistanceInMeters(centerPixelData.ToArray(), cpuImage.format);
                        print($"depth texture size: ({cpuImage.width},{cpuImage.height}), pixelStride: {pixelStride}, rowStride: {rowStride}, pixel pos: ({centerPixelIndex}, {centerRowIndex}), depthInMeters of the center pixel: {depthInMeters}");
                    }
                }
                
                yield return null;
            }
        }
    
        float convertPixelDataToDistanceInMeters(byte[] data, XRCpuImage.Format format) {
            switch (format) {
                case XRCpuImage.Format.DepthUint16:
                    return BitConverter.ToUInt16(data, 0) / 1000f;
                case XRCpuImage.Format.DepthFloat32:
                    return BitConverter.ToSingle(data, 0);
                default:
                    throw new Exception($"Format not supported: {format}");
            }
        }
    }