Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/lua/3.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Unity3d 如何在Unity中使用场景摄影机和Agora.io_Unity3d_Agora.io - Fatal编程技术网

Unity3d 如何在Unity中使用场景摄影机和Agora.io

Unity3d 如何在Unity中使用场景摄影机和Agora.io,unity3d,agora.io,Unity3d,Agora.io,在Unity中,我集成了Agora.io,这样我就可以在虚拟现实应用程序中,将视频通话连接到网页上的外部用户。VR用户可以看到网站用户,但网站用户无法看到VR用户,因为没有可用的物理摄像头可供使用。有没有一种方法可以将场景摄影机用于Agora视频源?这意味着网站用户将能够看到虚拟现实用户的世界是的。虽然我以前没有在虚拟现实中做过项目,但是概念应该在那里。您可以使用外部视频源发送视频的任何帧,就像从物理摄像机发送一样。对于场景摄影机,可以使用渲染器输出摄影机提要,并从渲染器中提取原始数据。因此,这

在Unity中,我集成了Agora.io,这样我就可以在虚拟现实应用程序中,将视频通话连接到网页上的外部用户。VR用户可以看到网站用户,但网站用户无法看到VR用户,因为没有可用的物理摄像头可供使用。有没有一种方法可以将场景摄影机用于Agora视频源?这意味着网站用户将能够看到虚拟现实用户的世界

是的。虽然我以前没有在虚拟现实中做过项目,但是概念应该在那里。您可以使用外部视频源发送视频的任何帧,就像从物理摄像机发送一样。对于场景摄影机,可以使用渲染器输出摄影机提要,并从渲染器中提取原始数据。因此,这些步骤是:

  • 将相机设置为输出到渲染器(如果需要,加上在本地某处显示此渲染器的逻辑)
  • 另外,请确保在设置Agora RTC引擎时,使用以下调用启用外部视频源:

    mRtcEngine.SetExternalVideoSource(真、假)

  • 在每一帧,从RenderTexture提取原始图像数据

  • 将原始帧数据发送到SDK函数rtc.pushVideoFrame()
  • 您可以在此处找到最后一步的代码

    我修改了Agora io编辑的sharescreen代码,以提取渲染纹理。问题是,当我的渲染纹理是深度cam视频流时,我的接收器上只有白色或黑色屏幕

    using System.Collections;
    using System.Collections.Generic;
    using UnityEngine;
    using agora_gaming_rtc;
    using UnityEngine.UI;
    using System.Globalization;
    using System.Runtime.InteropServices;
    using System;
    public class ShareScreen : MonoBehaviour
    {
        Texture2D mTexture;
        Rect mRect;
        [SerializeField]
        private string appId = "Your_AppID";
        [SerializeField]
        private string channelName = "agora";
        public IRtcEngine mRtcEngine;
        int i = 100;
        public RenderTexture depthMap;
        void Start()
        {
            Debug.Log("ScreenShare Activated");
            mRtcEngine = IRtcEngine.getEngine(appId);
    
            mRtcEngine.SetLogFilter(LOG_FILTER.DEBUG | LOG_FILTER.INFO | LOG_FILTER.WARNING | LOG_FILTER.ERROR | LOG_FILTER.CRITICAL);
    
        mRtcEngine.SetParameters("{\"rtc.log_filter\": 65535}");
    
        mRtcEngine.SetExternalVideoSource(true, false);
    
        mRtcEngine.EnableVideo();
    
        mRtcEngine.EnableVideoObserver();
    
        mRtcEngine.JoinChannel(channelName, null, 0);
    
        mRect = new Rect(0, 0, depthMap.width, depthMap.height); 
    
        mTexture = new Texture2D((int)mRect.width, (int)mRect.height, TextureFormat.RGBA32, false);
    }
    void Update()
    {
        //Start the screenshare Coroutine
        StartCoroutine(shareScreen());
    }
    //Screen Share
    IEnumerator shareScreen()
    {
        yield return new WaitForEndOfFrame();
        //FB activate automaticaly the render texture for the copy
        RenderTexture.active = depthMap;
        //Read the Pixels inside the Rectangle
        mTexture.ReadPixels(mRect, 0, 0);
        //Apply the Pixels read from the rectangle to the texture
        mTexture.Apply();
    
    
        // Get the Raw Texture data from the the from the texture and apply it to an array of bytes
        byte[] bytes = mTexture.GetRawTextureData();
        // Make enough space for the bytes array
        int size = Marshal.SizeOf(bytes[0]) * bytes.Length;
        // Check to see if there is an engine instance already created
        IRtcEngine rtc = IRtcEngine.QueryEngine();
        //if the engine is present
        if (rtc != null)
        {
            //Create a new external video frame
            ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame();
            //Set the buffer type of the video frame
            externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA;
            // Set the video pixel format
            externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_BGRA;
            //apply raw data you are pulling from the rectangle you created earlier to the video frame
            externalVideoFrame.buffer = bytes;
            //Set the width of the video frame (in pixels)
            externalVideoFrame.stride = (int)mRect.width;
            //Set the height of the video frame
            externalVideoFrame.height = (int)mRect.height;
            //Remove pixels from the sides of the frame
            externalVideoFrame.cropLeft = 0;
            externalVideoFrame.cropTop = 0;
            externalVideoFrame.cropRight = 0;
            externalVideoFrame.cropBottom = 0;
            //Rotate the video frame (0, 90, 180, or 270)
            externalVideoFrame.rotation = 180;
            // increment i with the video timestamp
            externalVideoFrame.timestamp = i++;
            //Push the external video frame with the frame we just created
            int a = rtc.PushVideoFrame(externalVideoFrame);
            Debug.Log(" pushVideoFrame =       " + a);
        }
    }
    

    }

    我使用Unity 2019.3渲染纹理称为深度贴图