Android ARCore和Twilio是如何实现的?

Android ARCore和Twilio是如何实现的?,android,twilio,arcore,Android,Twilio,Arcore,我想用Twilio的视频通话实现ARCore。文件上说这是可能的,但我不知道怎么做。谁能告诉我我做错了什么 这是我的活动: class MixActivity : AppCompatActivity() { private lateinit var mArFragment: ArFragment private lateinit var mVideoView: ArSceneView private var mScreenVideoTrack: LocalVideoTrack? = null

我想用Twilio的视频通话实现ARCore。文件上说这是可能的,但我不知道怎么做。谁能告诉我我做错了什么

这是我的活动:

class MixActivity : AppCompatActivity() {

private lateinit var mArFragment: ArFragment
private lateinit var mVideoView: ArSceneView
private var mScreenVideoTrack: LocalVideoTrack? = null

override fun onCreate(savedInstanceState: Bundle?) {
    super.onCreate(savedInstanceState)

    setContentView(R.layout.activity_mix)

    mArFragment = ar_fragment as ArFragment
    mVideoView = mArFragment.arSceneView

    mScreenVideoTrack = LocalVideoTrack.create(this, true,
        ViewCapturer(mVideoView)
    )

} }
这是一种观点:

<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
            android:id="@+id/container"
            android:layout_width="match_parent"
            android:layout_height="match_parent">

<fragment
        android:id="@+id/ar_fragment"
        android:name="com.google.ar.sceneform.ux.ArFragment"
        android:layout_width="match_parent"
        android:layout_height="match_parent"/>

 </RelativeLayout>

和视频捕获:

internal class ViewCapturer(private val view: View) : VideoCapturer, PixelCopy.OnPixelCopyFinishedListener {
private val handler = Handler(Looper.getMainLooper())
private var videoCapturerListener: VideoCapturer.Listener? = null
private val started = AtomicBoolean(false)
private lateinit var mViewBitmap: Bitmap

private val viewCapturer = object : Runnable {
    override fun run() {
        val dropFrame = view.width == 0 || view.height == 0

        // Only capture the view if the dimensions have been established
        if (!dropFrame) {
            // Draw view into bitmap backed canvas
            val measuredWidth = View.MeasureSpec.makeMeasureSpec(
                view.width,
                View.MeasureSpec.EXACTLY
            )
            val measuredHeight = View.MeasureSpec.makeMeasureSpec(
                view.height,
                View.MeasureSpec.EXACTLY
            )
            view.measure(measuredWidth, measuredHeight)
            view.layout(0, 0, view.measuredWidth, view.measuredHeight)

            mViewBitmap = Bitmap.createBitmap(
                view.width, view.height,
                Bitmap.Config.ARGB_8888
            )
            val viewCanvas = Canvas(mViewBitmap)
            view.draw(viewCanvas)

            // Extract the frame from the bitmap
            val bytes = mViewBitmap.byteCount
            val buffer = ByteBuffer.allocate(bytes)
            mViewBitmap.copyPixelsToBuffer(buffer)
            val array = buffer.array()
            val captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime())

            // Create video frame
            val dimensions = VideoDimensions(view.width, view.height)
            val videoFrame = VideoFrame(
                array,
                dimensions, VideoFrame.RotationAngle.ROTATION_0, captureTimeNs
            )

            // Notify the listener
            if (started.get()) {
                videoCapturerListener!!.onFrameCaptured(videoFrame)
            }
        }

        // Schedule the next capture
        if (started.get()) {
            handler.postDelayed(this, VIEW_CAPTURER_FRAMERATE_MS.toLong())
        }
    }
}

/**
 * Returns the list of supported formats for this view capturer. Currently, only supports
 * capturing to RGBA_8888 bitmaps.
 *
 * @return list of supported formats.
 */
override fun getSupportedFormats(): List<VideoFormat> {
    val videoFormats = ArrayList<VideoFormat>()
    val videoDimensions = VideoDimensions(view.width, view.height)
    val videoFormat = VideoFormat(videoDimensions, 30, VideoPixelFormat.RGBA_8888)

    videoFormats.add(videoFormat)

    return videoFormats
}

/**
 * Returns true because we are capturing screen content.
 */
override fun isScreencast(): Boolean {
    return true
}

/**
 * This will be invoked when it is time to start capturing frames.
 *
 * @param videoFormat the video format of the frames to be captured.
 * @param listener capturer listener.
 */
override fun startCapture(videoFormat: VideoFormat, listener: VideoCapturer.Listener) {
    // Store the capturer listener
    this.videoCapturerListener = listener
    this.started.set(true)

    // Notify capturer API that the capturer has started
    val capturerStarted = handler.postDelayed(
        viewCapturer,
        VIEW_CAPTURER_FRAMERATE_MS.toLong()
    )
    this.videoCapturerListener!!.onCapturerStarted(capturerStarted)
}

/**
 * Stop capturing frames. Note that the SDK cannot receive frames once this has been invoked.
 */
override fun stopCapture() {
    this.started.set(false)
    handler.removeCallbacks(viewCapturer)
}

override fun onPixelCopyFinished(i: Int) {
    // Extract the frame from the bitmap
    val bytes = mViewBitmap.getByteCount()
    val buffer = ByteBuffer.allocate(bytes)
    mViewBitmap.copyPixelsToBuffer(buffer)
    val array = buffer.array()
    val captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime())

    // Create video frame
    val dimensions = VideoDimensions(view.width, view.height)
    val videoFrame = VideoFrame(
        array,
        dimensions, VideoFrame.RotationAngle.ROTATION_0, captureTimeNs
    )

    // Notify the listener
    if (started.get()) {
        videoCapturerListener?.onFrameCaptured(videoFrame)
    }
    if (started.get()) {
        handler.postDelayed(viewCapturer, VIEW_CAPTURER_FRAMERATE_MS.toLong())
    }
}

companion object {
    private val VIEW_CAPTURER_FRAMERATE_MS = 100
}
}
内部类ViewCapturer(私有val视图:视图):VideoCapturer,PixelCopy.OnPixelCopyFinishedListener{
private val handler=handler(Looper.getMainLooper())
私有变量videoCapturerListener:VideoCapturer.Listener?=null
private val start=AtomicBoolean(false)
私有lateinit var mViewBitmap:位图
private val viewCapturer=对象:可运行{
覆盖趣味跑(){
val dropFrame=view.width==0 | | view.height==0
//仅在已建立尺寸标注的情况下捕获视图
如果(!dropFrame){
//将视图绘制到位图背景画布中
val measuredWidth=View.MeasureSpec.makeMeasureSpec(
视图宽度,
确切地说
)
val measuredHeight=View.MeasureSpec.MakeMasureSpec(
视图高度,
确切地说
)
视图.测量(测量宽度,测量高度)
视图.布局(0,0,view.measuredWidth,view.measuredHeight)
mViewBitmap=Bitmap.createBitmap(
view.width,view.height,
Bitmap.Config.ARGB_8888
)
val viewCanvas=Canvas(mViewBitmap)
view.draw(viewCanvas)
//从位图中提取帧
val bytes=mViewBitmap.byteCount
val buffer=ByteBuffer.allocate(字节)
mViewBitmap.copyPixelsToBuffer(缓冲区)
val数组=buffer.array()
val captureTimeNs=TimeUnit.millizes.toNanos(SystemClock.elapsedRealtime())
//创建视频帧
val尺寸=视频尺寸(视图.宽度,视图.高度)
视频帧(
阵列,
尺寸,视频帧。旋转角度。旋转0,捕获时间
)
//通知听众
if(start.get()){
videoCapturerListener!!.onFrameCaptured(视频帧)
}
}
//安排下一次捕获
if(start.get()){
handler.postDelayed(这是视图\u CAPTURER\u帧率\u MS.toLong())
}
}
}
/**
*返回此视图捕获器支持的格式列表。当前,仅支持
*捕获到RGBA_8888位图。
*
*@返回支持的格式列表。
*/
覆盖getSupportedFormats():列表{
val videoFormats=ArrayList()
val videoDimensions=videoDimensions(视图.宽度,视图.高度)
val videoFormat=videoFormat(videoDimensions,30,VideoPixelFormat.RGBA_8888)
视频格式。添加(视频格式)
返回视频格式
}
/**
*返回true,因为我们正在捕获屏幕内容。
*/
重写Creencast():布尔值{
返回真值
}
/**
*这将在开始捕获帧时调用。
*
*@param videoFormat要捕获的帧的视频格式。
*@param listener捕获器listener。
*/
覆盖有趣的startCapture(videoFormat:videoFormat,侦听器:VideoCapturer.listener){
//存储捕获器侦听器
this.videoCapturerListener=侦听器
this.start.set(true)
//通知捕获器API捕获器已启动
val capturerStarted=handler.postDelayed(
viewCapturer,
视图\u捕获器\u帧率\u MS.toLong()
)
这个.videoCapturerListener!!.onCapturerStarted(capturerStarted)
}
/**
*停止捕获帧。请注意,一旦调用,SDK将无法接收帧。
*/
覆盖趣味stopCapture(){
this.start.set(false)
handler.removeCallbacks(viewCapturer)
}
覆盖乐趣onPixelCopyFinished(i:Int){
//从位图中提取帧
val bytes=mViewBitmap.getByteCount()
val buffer=ByteBuffer.allocate(字节)
mViewBitmap.copyPixelsToBuffer(缓冲区)
val数组=buffer.array()
val captureTimeNs=TimeUnit.millizes.toNanos(SystemClock.elapsedRealtime())
//创建视频帧
val尺寸=视频尺寸(视图.宽度,视图.高度)
视频帧(
阵列,
尺寸,视频帧。旋转角度。旋转0,捕获时间
)
//通知听众
if(start.get()){
videoCapturerListener?.onFrameCaptured(视频帧)
}
if(start.get()){
handler.postDelayed(viewCapturer、VIEW\u CAPTURER\u FRAMERATE\u MS.toLong())
}
}
伴星{
私有val视图\u捕获器\u帧率\u MS=100
}
}
ARCore部分工作,但Twilio部分不工作。 我提到了另一篇关于它的帖子,但它并不完整:

您好,stackoverflow不是一个提供解决方案的站点。请分享您尝试过的内容和您坚持的地方,以获得更好的响应。为了提高您获得更好回复的机会,请查看