Javascript MediaRecorderAPI-自动记录
我目前正在尝试模拟音频自动录制。用户讲话,停止后,音频应提交到后端。 我已经有了一个示例脚本,可以提交带有开始和停止单击功能的音频 我正在尝试获取某种类型的值,例如振幅、音量或阈值,但我不确定MediaRecorder是否支持此功能,或者是否需要查看Web音频API或其他解决方案Javascript MediaRecorderAPI-自动记录,javascript,html,audio,Javascript,Html,Audio,我目前正在尝试模拟音频自动录制。用户讲话,停止后,音频应提交到后端。 我已经有了一个示例脚本,可以提交带有开始和停止单击功能的音频 我正在尝试获取某种类型的值,例如振幅、音量或阈值,但我不确定MediaRecorder是否支持此功能,或者是否需要查看Web音频API或其他解决方案 我可以用MediaRecorder实现这一点吗?关于麦克风输入的音频分析,以下示例演示如何获取麦克风捕获的音频,使用webkitAudioContext的CreateAnalyzer方法创建分析仪,将流连接到分析仪并计
我可以用MediaRecorder实现这一点吗?关于麦克风输入的音频分析,以下示例演示如何获取麦克风捕获的音频,使用
webkitAudioContext
的CreateAnalyzer
方法创建分析仪,将流连接到分析仪并计算指定大小的FFT,以计算音高并显示输出声波
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var audioContext = null;
var isPlaying = false;
var sourceNode = null;
var analyser = null;
var theBuffer = null;
var audioCtx = null;
var mediaStreamSource = null;
var rafID = null;
var j = 0;
var waveCanvas = null;
window.onload = function() {
audioContext = new AudioContext();
audioCtx = document.getElementById( "waveform" );
canvasCtx = audioCtx.getContext("2d");
};
function getUserMedia(dictionary, callback) {
try {
navigator.getUserMedia =
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia;
navigator.getUserMedia(dictionary, callback, error);
} catch (e) {
alert('getUserMedia threw exception :' + e);
}
}
function gotStream(stream) {
// Create an AudioNode from the stream.
mediaStreamSource = audioContext.createMediaStreamSource(stream);
// Connect it to the destination.
analyser = audioContext.createAnalyser();
analyser.fftSize = 1024;
mediaStreamSource.connect( analyser );
updatePitch();
}
function toggleLiveInput()
{
canvasCtx.clearRect(0, 0, audioCtx.width, audioCtx.height);
canvasCtx.beginPath();
j = 0;
buflen = 1024;
buf = new Float32Array( buflen );
document.getElementById('toggleLiveInput').disabled = true;
document.getElementById('toggleLiveInputStop').disabled = false;
if (isPlaying) {
//stop playing and return
sourceNode.stop( 0 );
sourceNode = null;
//analyser = null;
isPlaying = false;
if (!window.cancelAnimationFrame)
window.cancelAnimationFrame = window.webkitCancelAnimationFrame;
window.cancelAnimationFrame( rafID );
}
getUserMedia(
{
"audio": {
"mandatory": {
"googEchoCancellation": "false",
"googAutoGainControl": "false",
"googNoiseSuppression": "false",
"googHighpassFilter": "false"
},
"optional": []
},
}, gotStream);
}
function stop()
{
document.getElementById('toggleLiveInput').disabled = false;
document.getElementById('toggleLiveInputStop').disabled = true;
//waveCanvas.closePath();
if (!window.cancelAnimationFrame)
window.cancelAnimationFrame = window.webkitCancelAnimationFrame;
window.cancelAnimationFrame( rafID );
return "start";
}
function updatePitch()
{
analyser.fftSize = 1024;
analyser.getFloatTimeDomainData(buf);
canvasCtx.strokeStyle = "red";
for (var i=0;i<2;i+=2)
{
x = j*5;
if(audioCtx.width < x)
{
x = audioCtx.width - 5;
previousImage = canvasCtx.getImageData(5, 0, audioCtx.width, audioCtx.height);
canvasCtx.putImageData(previousImage, 0, 0);
canvasCtx.beginPath();
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = "red";
prex = prex - 5;
canvasCtx.lineTo(prex,prey);
prex = x;
prey = 128+(buf[i]*128);
canvasCtx.lineTo(x,128+(buf[i]*128));
canvasCtx.stroke();
}
else
{
prex = x;
prey = 128+(buf[i]*128);
canvasCtx.lineWidth = 2;
canvasCtx.lineTo(x,128+(buf[i]*128));
canvasCtx.stroke();
}
j++;
}
if (!window.requestAnimationFrame)
window.requestAnimationFrame = window.webkitRequestAnimationFrame;
rafID = window.requestAnimationFrame( updatePitch );
}
function error() {
console.error(new Error('error while generating audio'));
}
window.AudioContext=window.AudioContext | | window.webkitadiocontext;
var audioContext=null;
var显示=假;
var sourceNode=null;
var分析仪=零;
var-theBuffer=null;
var audioCtx=null;
var mediaStreamSource=null;
var-rafID=null;
var j=0;
var=null;
window.onload=函数(){
audioContext=新的audioContext();
audioCtx=document.getElementById(“波形”);
canvasCtx=audioCtx.getContext(“2d”);
};
函数getUserMedia(字典、回调){
试一试{
navigator.getUserMedia=
navigator.getUserMedia||
navigator.webkitGetUserMedia||
navigator.mozGetUserMedia;
getUserMedia(字典、回调、错误);
}捕获(e){
警报('getUserMedia引发异常:'+e);
}
}
函数gotStream(stream){
//从流中创建AudioNode。
mediaStreamSource=audioContext.createMediaStreamSource(流);
//将其连接到目的地。
Analyzer=audioContext.createAnalyzer();
Analyzer.fftSize=1024;
mediaStreamSource.connect(分析仪);
updateTech();
}
函数toggleLiveInput()
{
canvasCtx.clearRect(0,0,audioCtx.width,audioCtx.height);
canvasCtx.beginPath();
j=0;
buflen=1024;
buf=新的Float32Array(buflen);
document.getElementById('toggleLiveInput')。disabled=true;
document.getElementById('toggleLiveInputStop')。disabled=false;
如果(显示){
//别玩了,回来吧
sourceNode.stop(0);
sourceNode=null;
//分析仪=零;
isplay=false;
如果(!window.cancelAnimationFrame)
window.cancelAnimationFrame=window.webkitCancelAnimationFrame;
window.cancelAnimationFrame(rafID);
}
getUserMedia(
{
“音频”:{
“强制性”:{
“googEchoCancellation”:“false”,
“googAutoGainControl”:“false”,
“googNoiseSuppression”:“false”,
“googHighpassFilter”:“false”
},
“可选”:[]
},
},gotStream);
}
函数停止()
{
document.getElementById('toggleLiveInput')。disabled=false;
document.getElementById('toggleLiveInputStop')。disabled=true;
//waveCanvas.closePath();
如果(!window.cancelAnimationFrame)
window.cancelAnimationFrame=window.webkitCancelAnimationFrame;
window.cancelAnimationFrame(rafID);
返回“开始”;
}
函数updateTech()
{
Analyzer.fftSize=1024;
Analyzer.getFloatTimeDomainData(buf);
canvasCtx.strokeStyle=“红色”;
对于(var i=0;i