结合autobahn websockets、gstreamers和html5 mediaSource API的实现
我正在使用autobahn | python运行websocket服务器。在服务器端,我还运行了一个gstreamer管道,我正在使用它使用“appsink”捕获webm帧。 实施的gstreamer管道是:结合autobahn websockets、gstreamers和html5 mediaSource API的实现,websocket,html5-video,autobahn,webm,vp8,Websocket,Html5 Video,Autobahn,Webm,Vp8,我正在使用autobahn | python运行websocket服务器。在服务器端,我还运行了一个gstreamer管道,我正在使用它使用“appsink”捕获webm帧。 实施的gstreamer管道是: gst-launch-1.0 v4l2src ! video/x-raw,width=640,height=480 ! videoconvert ! vp8enc ! webmmux ! appsink name="sink" 每次我在appsink中收到缓冲区时,我都会使用sendMe
gst-launch-1.0 v4l2src ! video/x-raw,width=640,height=480 ! videoconvert ! vp8enc ! webmmux ! appsink name="sink"
每次我在appsink中收到缓冲区时,我都会使用sendMessage将其作为二进制“消息”通过websocket发送
def on_new_buffer(appsink):
global once
gstsample = appsink.emit('pull-sample')
gstbuffer = gstsample.get_buffer()
frame_data = gstbuffer.extract_dup(0,gstbuffer.get_size())
for c in global_clients:
c.sendMessage(frame_data,True)
print("Directly sent: {0} bytes".format(len(frame_data)))
return False
在客户端,我有一个复杂的接收到的frame_数据blob流。
有一个文件读取器、MediaSource和源缓冲区。每当接收到帧_数据时,使用filereader将其作为缓冲区读取。如果文件读取器正忙于读取前一帧数据,它会将其附加到“缓冲池”中。一旦帧_数据被读取为缓冲区,它就会被附加到“sourceBuffer”中。如果“sourceBuffer”仍在更新前一个区块,则它将被追加到“sourceBufferpool”中
呸,描述得很长!!!我希望你能做到这一点。
现在,真正的问题是:
请帮忙 嘿,车坦!很好的描述,主题有进展吗?我刚开始研究simular任务(vp8&MSE&Websockets)。据我所知,我必须首先将初始化段附加到
sourceBuffer
。但我不知道如何从Videostream获取初始段。可能,我必须以某种方式使用gstreamer
或ffmpeg
生成它。。。
<script>
var video = document.getElementById('v');
var playButton = document.getElementById('playbutton');
var mediaSource;
var sourceBuffer;
var buffer_pool = [];
var sourceBufferpool = [];
function setupVideo() {
window.MediaSource = window.MediaSource || window.WebKitMediaSource;
if (!!!window.MediaSource) {
alert('MediaSource API is not available');
}
mediaSource = new MediaSource();
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
try {
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
} catch(e) {
console.log('Exception calling addSourceBuffer for video', e);
return;
}
//sourceBuffer.addEventListener('updatestart', function(e) { console.log('updatestart: ' + e.target + mediaSource.readyState); });
//sourceBuffer.addEventListener('updateend', function(e) { console.log('updateend: ' + e.target + mediaSource.readyState); });
sourceBuffer.addEventListener('error', function(e) { console.log('error: ' + e.target + mediaSource.readyState); });
sourceBuffer.addEventListener('abort', function(e) { console.log('abort: ' + e.target + mediaSource.readyState); });
sourceBuffer.addEventListener('update', function() {
if (sourceBufferpool.length > 0 && !sourceBuffer.updating) {
try {
sourceBuffer.appendBuffer(sourceBufferpool.shift());
console.log('update: pooled buffer appended ' + sourceBufferpool.length + mediaSource.readyState);
}catch(e){
console.log('Exception calling appendBuffer for video ', e);
return;
}
}
},false)
if (video.paused) {
video.play()
}
startWSStreaming();
},false)
mediaSource.addEventListener('sourceended', function(e) { console.log('sourceended: ' + mediaSource.readyState); });
mediaSource.addEventListener('sourceclose', function(e) { console.log('sourceclose: ' + mediaSource.readyState); });
mediaSource.addEventListener('error', function(e) { console.log('error: ' + mediaSource.readyState); });
}
function startWSStreaming() {
var reader = new FileReader();
reader.onload = function (evt) {
if (sourceBuffer.updating || sourceBufferpool.length > 0){
sourceBufferpool.push(new Uint8Array(evt.target.result));
console.log('update: pooled buffer appended ' + sourceBufferpool.length + mediaSource.readyState);
}else{
sourceBuffer.appendBuffer(new Uint8Array(evt.target.result));
console.log('update: direct buffer appended ' + sourceBufferpool.length + mediaSource.readyState);
}
}
reader.onloadend = function (evt) {
if (buffer_pool.length > 0) {
var chunk = new Blob([buffer_pool.shift()], {type: 'video/webm'});
evt.target.readAsArrayBuffer(chunk);
console.log('Processed buffer pool: current size ' + buffer_pool.length);
}
}
ws = new WebSocket("ws://localhost:9000/");
ws.onopen = function () {
document.getElementById("MSG1").innerHTML = 'Websocket opened <br>';
}
ws.onmessage = function(e) {
myBuffer = e.data;
if (reader.readyState == 1 || buffer_pool.length > 0) {
buffer_pool.push(myBuffer);
console.log('Received buffer pooled: current size ' + buffer_pool.length);
}else{
var chunk = new Blob([myBuffer], {type: 'video/webm'});
reader.readAsArrayBuffer(chunk);
console.log('First buffer processed');
}
}
}
</script>
Timestamp Property Value
00:00:00 00 pipeline_state kCreated
00:00:00 00 EVENT PIPELINE_CREATED
00:00:00 00 EVENT WEBMEDIAPLAYER_CREATED
00:00:00 00 url blob:http%3A//localhost%3A8080/09060a78-9759-4fcd-97a2-997121ba6122
00:00:00 00 pipeline_state kInitDemuxer
00:00:01 668 duration unknown
00:00:01 669 pipeline_state kInitVideoRenderer
00:00:01 685 pipeline_state kPlaying
00:00:03 820 EVENT PLAY
00:00:04 191 error Got a block with a timecode before the previous block.
00:00:04 191 pipeline_error pipeline: decode error
00:00:04 191 pipeline_state kStopping
00:00:04 192 pipeline_state kStopped
00:00:28 483 EVENT WEBMEDIAPLAYER_DESTROYED