Java ARcore与Twilio视频通话
如何将ArFragment与twilio视频通话结合使用? 我使用房间参与者添加了带有twilio的ArFragment,但我的接收器端出现了黑屏 我尝试了以下链接解决方案: 在我的活动中: localVideoTrack=localVideoTrack.create(这个, 是的, 新的ViewCapturer(arFragment.getArSceneView()), 本地(视频、曲目、名称); if(localVideoTrack!=null){ localVideoTrack.addRenderer(videoTextureView); } ViewCapturer类: 公共类ViewCapturer实现了VideoCapturer{ 私有静态最终整数视图\u捕获器\u帧率\u MS=100Java ARcore与Twilio视频通话,java,android,twilio,arcore,Java,Android,Twilio,Arcore,如何将ArFragment与twilio视频通话结合使用? 我使用房间参与者添加了带有twilio的ArFragment,但我的接收器端出现了黑屏 我尝试了以下链接解决方案: 在我的活动中: localVideoTrack=localVideoTrack.create(这个, 是的, 新的ViewCapturer(arFragment.getArSceneView()), 本地(视频、曲目、名称); if(localVideoTrack!=null){ localVideoTrack.addR
private final View view;
private Handler handler = new Handler(Looper.getMainLooper());
private VideoCapturer.Listener videoCapturerListener;
private AtomicBoolean started = new AtomicBoolean(false);
private final Runnable viewCapturer = new Runnable() {
@Override
public void run() {
boolean dropFrame = view.getWidth() == 0 || view.getHeight() == 0;
// Only capture the view if the dimensions have been established
if (!dropFrame) {
// Draw view into bitmap backed canvas
int measuredWidth = View.MeasureSpec.makeMeasureSpec(view.getWidth(),
View.MeasureSpec.EXACTLY);
int measuredHeight = View.MeasureSpec.makeMeasureSpec(view.getHeight(),
View.MeasureSpec.EXACTLY);
view.measure(measuredWidth, measuredHeight);
view.layout(0, 0, view.getMeasuredWidth(), view.getMeasuredHeight());
Bitmap viewBitmap = Bitmap.createBitmap(view.getWidth(), view.getHeight(),
Bitmap.Config.ARGB_8888);
Canvas viewCanvas = new Canvas(viewBitmap);
view.draw(viewCanvas);
// Extract the frame from the bitmap
int bytes = viewBitmap.getByteCount();
ByteBuffer buffer = ByteBuffer.allocate(bytes);
viewBitmap.copyPixelsToBuffer(buffer);
byte[] array = buffer.array();
final long captureTimeNs =
TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
// Create video frame
VideoDimensions dimensions = new VideoDimensions(view.getWidth(), view.getHeight());
VideoFrame videoFrame = new VideoFrame(array,
dimensions, VideoFrame.RotationAngle.ROTATION_0, captureTimeNs);
// Notify the listener
if (started.get()) {
videoCapturerListener.onFrameCaptured(videoFrame);
}
}
// Schedule the next capture
if (started.get()) {
handler.postDelayed(this, VIEW_CAPTURER_FRAMERATE_MS);
}
}
};
public ViewCapturer(View view) {
this.view = view;
}
/**
* Returns the list of supported formats for this view capturer. Currently, only supports
* capturing to RGBA_8888 bitmaps.
*
* @return list of supported formats.
*/
@NonNull
@Override
public List<VideoFormat> getSupportedFormats() {
List<VideoFormat> videoFormats = new ArrayList<>();
VideoDimensions videoDimensions = new VideoDimensions(view.getWidth(), view.getHeight());
VideoFormat videoFormat = new VideoFormat(videoDimensions, 30, VideoPixelFormat.RGBA_8888);
videoFormats.add(videoFormat);
return videoFormats;
}
/**
* Returns true because we are capturing screen content.
*/
@Override
public boolean isScreencast() {
return true;
}
/**
* This will be invoked when it is time to start capturing frames.
*
* @param videoFormat the video format of the frames to be captured.
* @param listener capturer listener.
*/
@Override
public void startCapture(VideoFormat videoFormat, Listener listener) {
// Store the capturer listener
this.videoCapturerListener = listener;
this.started.set(true);
// Notify capturer API that the capturer has started
boolean capturerStarted = handler.postDelayed(viewCapturer,
VIEW_CAPTURER_FRAMERATE_MS);
this.videoCapturerListener.onCapturerStarted(capturerStarted);
}
/**
* Stop capturing frames. Note that the SDK cannot receive frames once this has been invoked.
*/
@Override
public void stopCapture() {
this.started.set(false);
handler.removeCallbacks(viewCapturer);
}
私有最终视图;
私有处理程序Handler=新处理程序(Looper.getMainLooper());
私人视频捕获器。侦听器视频捕获器侦听器;
private AtomicBoolean start=新的AtomicBoolean(false);
private final Runnable viewCapturer=new Runnable(){
@凌驾
公开募捐{
布尔dropFrame=view.getWidth()=0 | | view.getHeight()=0;
//仅在已建立尺寸标注的情况下捕获视图
如果(!dropFrame){
//将视图绘制到位图背景画布中
int measuredWidth=View.MeasureSpec.makeMeasureSpec(View.getWidth(),
视图、测量(特别是精确);
int measuredhight=View.MeasureSpec.makeMeasureSpec(View.getHeight(),
视图、测量(特别是精确);
视图。测量(测量宽度、测量高度);
布局(0,0,view.getMeasuredWidth(),view.getMeasuredHeight());
位图viewBitmap=Bitmap.createBitmap(view.getWidth(),view.getHeight(),
位图.Config.ARGB_8888);
画布viewCanvas=新画布(viewBitmap);
view.draw(viewCanvas);
//从位图中提取帧
int bytes=viewBitmap.getByteCount();
ByteBuffer缓冲区=ByteBuffer.allocate(字节);
viewBitmap.copyPixelsToBuffer(缓冲区);
字节[]数组=buffer.array();
最终长捕获时间=
TimeUnit.millides.toNanos(SystemClock.elapsedRealtime());
//创建视频帧
VideoDimensions=新的VideoDimensions(view.getWidth(),view.getHeight());
VideoFrame VideoFrame=新的视频帧(阵列,
尺寸、视频帧、旋转角度、旋转角度(0、捕获时间);
//通知听众
if(start.get()){
videoCapturerListener.onFrameCaptured(视频帧);
}
}
//安排下一次捕获
if(start.get()){
postDelayed(这是视图捕获器帧率);
}
}
};
公共视图捕获器(视图){
this.view=视图;
}
/**
*返回此视图捕获器支持的格式列表。当前,仅支持
*捕获到RGBA_8888位图。
*
*@返回支持的格式列表。
*/
@非空
@凌驾
公共列表getSupportedFormats(){
列表videoFormats=新的ArrayList();
VideoDimensions VideoDimensions=新的VideoDimensions(view.getWidth(),view.getHeight());
VideoFormat VideoFormat=新的视频格式(videoDimensions,30,VideoPixelFormat.RGBA_8888);
videoFormats.add(videoFormat);
返回视频格式;
}
/**
*返回true,因为我们正在捕获屏幕内容。
*/
@凌驾
公共布尔值isScreencast(){
返回true;
}
/**
*这将在开始捕获帧时调用。
*
*@param videoFormat要捕获的帧的视频格式。
*@param listener捕获器listener。
*/
@凌驾
public void startCapture(视频格式、视频格式、侦听器){
//存储捕获器侦听器
this.videoCapturerListener=侦听器;
this.start.set(true);
//通知捕获器API捕获器已启动
boolean capturerStarted=handler.postDelayed(viewCapturer,
视图(捕获器)(帧率);;
此.videoCapturerListener.onCapturerStarted(capturerStarted);
}
/**
*停止捕获帧。请注意,一旦调用,SDK将无法接收帧。
*/
@凌驾
公共void stopCapture(){
this.start.set(false);
handler.removeCallbacks(viewCapturer);
}
}
我已经添加了ArFragment和Twilio视频通话使用房间。
如何使用视频通话共享Ar摄像头