Javascript 使用desktopCapture API和socket.io共享chrome选项卡以远程显示内容
是否可以使用socket.io服务器对使用chrome.desktopCapture API捕获的媒体源进行流式传输? 我有文档中的示例desktopCapture应用程序,我确信经过一些修改后,它可以与socket.io服务器一起使用Javascript 使用desktopCapture API和socket.io共享chrome选项卡以远程显示内容,javascript,vue.js,google-chrome-extension,Javascript,Vue.js,Google Chrome Extension,是否可以使用socket.io服务器对使用chrome.desktopCapture API捕获的媒体源进行流式传输? 我有文档中的示例desktopCapture应用程序,我确信经过一些修改后,它可以与socket.io服务器一起使用 // Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
'use strict';
const DESKTOP_MEDIA = ['screen', 'tab', 'audio'];
var pending_request_id = null;
var pc1 = null;
var pc2 = null;
// Launch the chooseDesktopMedia().
document.querySelector('#start').addEventListener('click', function(event) {
pending_request_id = chrome.desktopCapture.chooseDesktopMedia(
DESKTOP_MEDIA, onAccessApproved);
});
document.querySelector('#cancel').addEventListener('click', function(event) {
if (pending_request_id != null) {
chrome.desktopCapture.cancelChooseDesktopMedia(pending_request_id);
}
});
document.querySelector('#startFromBackgroundPage')
.addEventListener('click', function(event) {
chrome.runtime.sendMessage(
{}, function(response) { console.log(response.farewell); });
});
// Launch webkitGetUserMedia() based on selected media id.
function onAccessApproved(id, options) {
if (!id) {
console.log('Access rejected.');
return;
}
var audioConstraint = {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: id
}
};
console.log(options.canRequestAudioTrack);
if (!options.canRequestAudioTrack)
audioConstraint = false;
navigator.webkitGetUserMedia({
audio: audioConstraint,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: id,
maxWidth:screen.width,
maxHeight:screen.height} }
}, gotStream, getUserMediaError);
}
function getUserMediaError(error) {
console.log('navigator.webkitGetUserMedia() errot: ', error);
}
// Capture video/audio of media and initialize RTC communication.
function gotStream(stream) {
console.log('Received local stream', stream);
var video = document.querySelector('video');
try {
video.srcObject = stream;
} catch (error) {
video.src = URL.createObjectURL(stream);
}
stream.onended = function() { console.log('Ended'); };
// pc1 = new RTCPeerConnection();
// pc1.onicecandidate = function(event) {
// onIceCandidate(pc1, event);
// };
// pc2 = new RTCPeerConnection();
// pc2.onicecandidate = function(event) {
// onIceCandidate(pc2, event);
// };
// pc1.oniceconnectionstatechange = function(event) {
// onIceStateChange(pc1, event);
// };
// pc2.oniceconnectionstatechange = function(event) {
// onIceStateChange(pc2, event);
// };
// pc2.onaddstream = gotRemoteStream;
// pc1.addStream(stream);
// pc1.createOffer(onCreateOfferSuccess, function() {});
}
// function onCreateOfferSuccess(desc) {
// pc1.setLocalDescription(desc);
// pc2.setRemoteDescription(desc);
// // Since the 'remote' side has no media stream we need
// // to pass in the right constraints in order for it to
// // accept the incoming offer of audio and video.
// var sdpConstraints = {
// 'mandatory': {
// 'OfferToReceiveAudio': true,
// 'OfferToReceiveVideo': true
// }
// };
// pc2.createAnswer(onCreateAnswerSuccess, function(){}, sdpConstraints);
// }
// function gotRemoteStream(event) {
// // Call the polyfill wrapper to attach the media stream to this element.
// console.log('hitting this code');
// try {
// remoteVideo.srcObject = event.stream;
// } catch (error) {
// remoteVideo.src = URL.createObjectURL(event.stream);
// }
// }
// function onCreateAnswerSuccess(desc) {
// pc2.setLocalDescription(desc);
// pc1.setRemoteDescription(desc);
// }
// function onIceCandidate(pc, event) {
// if (event.candidate) {
// var remotePC = (pc === pc1) ? pc2 : pc1;
// remotePC.addIceCandidate(new RTCIceCandidate(event.candidate));
// }
// }
// function onIceStateChange(pc, event) {
// if (pc) {
// console.log('ICE state change event: ', event);
// }
// }
在原始代码中,我认为可以替换注释部分,使用socket.io或socket.io-p2p传递用户捕获的选项卡/桌面窗口。我想做一个p2p私有共享,所以我使用peerId在vue中使用url路由创建可访问的房间。因此,用户A获取临时url,用户B使用包含对等id的url查看用户A共享的远程选项卡
谁能给我指一下方向吗
更新
我有这个工作代码,我可以让用户选择标签源
chrome.browserAction.onClicked.addListener( async (tab) => {
var displayMediaOptions = {
video: true,
audio: true
};
let chunks = [];
let captureStream = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
console.log(captureStream);
const mediaRecorder = new MediaRecorder(captureStream);
mediaRecorder.start();
console.log(mediaRecorder.state);
mediaRecorder.ondataavailable = (e) => {
console.log(e);
chunks.push(e.data);
}
if( chunks > 0 ){
console.log(chunks)
//socket.broadcast('stream', chunks); //emit('', chunks)
}
setInterval( () => { mediaRecorder.requestData(); },1000);
console.log(mediaRecorder);
captureStream.oninactive = (e) => {
mediaRecorder.stop();
}
});
问题在于事件,当用户通过单击捕获选项卡/窗口上的“可见”上的“中断”按钮停止媒体录制和流时,我无法停止媒体录制和流。我没有使用ChromeAPI,我无法弄清楚如何将流传递到媒体记录器,以便为socket.io提供数据
有什么建议吗