Javascript 如何将getusermedia()获得的blob发送到socket.io,然后将其发送回客户端并放入视频元素中

Javascript 如何将getusermedia()获得的blob发送到socket.io,然后将其发送回客户端并放入视频元素中,javascript,node.js,webrtc,broadcast,live-streaming,Javascript,Node.js,Webrtc,Broadcast,Live Streaming,代码如下: var mediaSource = new MediaSource(); mediaSource.addEventListener('sourceopen', handleSourceOpen, false); var mediaRecorder; var recordedBlobs; var sourceBuffer; var socket = io(); var recordedVideo = document.querySelector('video#record

代码如下:

 var mediaSource = new MediaSource();
 mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
 var mediaRecorder;
 var recordedBlobs;
 var sourceBuffer;
 var socket = io();
 var recordedVideo = document.querySelector('video#recorded');
 var gumVideo = document.querySelector('video#gum');
 var translateButton = document.querySelector('button#record');

 translateButton.onclick = startTranslate;
 recordedVideo.src = window.URL.createObjectURL(mediaSource);

 socket.on('video', function (data) {
   sourceBuffer.appendBuffer(data);
 });

 navigator.mediaDevices.getUserMedia(constraints)
 .then(handleSuccess).catc(handleError);

 function handleSourceOpen(event) {
      console.log('MediaSource opened');
      sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
      sourceBuffer.onupdate = function(){
        console.log("updating");
      };
      console.log('Source buffer: ', sourceBuffer);
 }

 function handleSuccess(stream) {
     console.log('getUserMedia() got stream: ', stream);
     window.stream = stream;
     if (window.URL) {
       gumVideo.src = window.URL.createObjectURL(stream);
     } else {
       gumVideo.src = stream;
     }
 }

 function startTranslate() {
     recordedBlobs = [];
     var options = {mimeType: 'video/webm;codecs=vp9'};
     if (!MediaRecorder.isTypeSupported(options.mimeType)) {
       console.log(options.mimeType + ' is not Supported');
       options = {mimeType: 'video/webm;codecs=vp8'};
       if (!MediaRecorder.isTypeSupported(options.mimeType)) {
         console.log(options.mimeType + ' is not Supported');
         options = {mimeType: 'video/webm'};
         if (!MediaRecorder.isTypeSupported(options.mimeType)) {
           console.log(options.mimeType + ' is not Supported');
           options = {mimeType: ''};
         }
       }
     }
     try {
       mediaRecorder = new MediaRecorder(window.stream, options);
     } 
     catch (e) {
       console.error('Exception while creating MediaRecorder: ' + e);
       alert('Exception while creating MediaRecorder: '+ e +'.mimeType: ' + options.mimeType);
       return;
     }
     console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
     recordButton.textContent = 'Stop Recording';
     playButton.disabled = true;
     downloadButton.disabled = true;
     mediaRecorder.onstop = handleStop;
     mediaRecorder.ondataavailable = handleDataAvailable;
     mediaRecorder.start(10); // collect 10ms of data
     console.log('MediaRecorder started', mediaRecorder);
 }

 function handleDataAvailable(event) {
     if (event.data && event.data.size > 0) {
     socket.emit('video',event.data);
   }
 }
当我在2-3秒后单击“翻译”按钮时 错误如下:

未能对“SourceBuffer”执行“appendBuffer”:此SourceBuffer 已从父媒体源中删除


这个错误是什么意思?还是WebRTC的错误?

不要。使用为此而制作的WebRTC。而且,1-70毫秒是不合理和不可能的。编解码器分块工作。上面的代码中没有使用WebRTC。真的,你所做的是徒劳的。MediaRecorder未配置为低延迟。我强烈建议放弃这种方法,使用WebRTC。我不想像广播公司->服务器查看器那样使用点对点连接。我怎么能在这里使用WebRTC???你不必使用WebRTC,但你确实需要一个针对延迟优化的视频编码产品,以及针对延迟优化的播放端。如果你认真考虑你的延迟要求,你会使用p2p连接来删除额外的跃点。我不知道这对你有用,但还有一些其他问题可能与此相关:以及