Javascript 使用recorder.js在web2py中实现音频录制

Javascript 使用recorder.js在web2py中实现音频录制,javascript,html,json,web2py,bower,Javascript,Html,Json,Web2py,Bower,我正在开发一个web应用程序,我想在其中使用recorder.js文件来实现音频录制。我有这段代码,但无法在web2py框架中使用。你能告诉我如何使用它的详细信息吗? 有一个.bower.json文件在web2py中上传时被隐藏。我们如何使用它并链接所有文件以便进行录制 这是my.bower.json脚本: { "name": "Recorderjs", "homepage": "https://github.com/mattdiamond/Recorderjs", "_relea

我正在开发一个web应用程序,我想在其中使用recorder.js文件来实现音频录制。我有这段代码,但无法在web2py框架中使用。你能告诉我如何使用它的详细信息吗? 有一个.bower.json文件在web2py中上传时被隐藏。我们如何使用它并链接所有文件以便进行录制

这是my.bower.json脚本:

 {
  "name": "Recorderjs",
  "homepage": "https://github.com/mattdiamond/Recorderjs",
  "_release": "f814ac7b3f",
  "_resolution": {
"type": "branch",
"branch": "master",
"commit": "f814ac7b3f4ed4f62729860d5a02720f167480b3"
  },
 "_source": "git://github.com/mattdiamond/Recorderjs.git",
 "_target": "*",
 "_originalSource": "Recorderjs",
 "_direct": true
 }
这是我的html代码:

<body>
<script src="bower_components/Recorderjs/recorder.js">
</script>
<script src="app.js"></script>

<button onclick="record()">Record</button>
<button onclick="stop()">Stop</button>
/recorder.js

(function(window){

  var WORKER_PATH = 'recorderWorker.js';

  var Recorder = function(source, cfg){
  var config = cfg || {};
   var bufferLen = config.bufferLen || 4096;
this.context = source.context;
this.node = (this.context.createScriptProcessor ||
             this.context.createJavaScriptNode).call(this.context,
                                                     bufferLen, 2, 2);
var worker = new Worker(config.workerPath || WORKER_PATH);
worker.postMessage({
  command: 'init',
  config: {
    sampleRate: this.context.sampleRate
  }
});
var recording = false,
  currCallback;

this.node.onaudioprocess = function(e){
  if (!recording) return;
  worker.postMessage({
    command: 'record',
    buffer: [
      e.inputBuffer.getChannelData(0),
      e.inputBuffer.getChannelData(1)
    ]
  });
}

this.configure = function(cfg){
  for (var prop in cfg){
    if (cfg.hasOwnProperty(prop)){
      config[prop] = cfg[prop];
    }
  }
}

this.record = function(){
  recording = true;
}

this.stop = function(){
  recording = false;
}

this.clear = function(){
  worker.postMessage({ command: 'clear' });
}

this.getBuffer = function(cb) {
  currCallback = cb || config.callback;
  worker.postMessage({ command: 'getBuffer' })
}
recorderWorker.js:

var recLength = 0,
recBuffersL = [],
recBuffersR = [],
sampleRate;

this.onmessage = function(e){
switch(e.data.command){
case 'init':
  init(e.data.config);
  break;
case 'record':
  record(e.data.buffer);
  break;
case 'exportWAV':
  exportWAV(e.data.type);
  break;
case 'getBuffer':
  getBuffer();
  break;
case 'clear':
  clear();
  break;
  }
  };

 function init(config){
 sampleRate = config.sampleRate;
 }

  function record(inputBuffer){
  recBuffersL.push(inputBuffer[0]);
  recBuffersR.push(inputBuffer[1]);
  recLength += inputBuffer[0].length;
  }

 function exportWAV(type){
 var bufferL = mergeBuffers(recBuffersL, recLength);
 var bufferR = mergeBuffers(recBuffersR, recLength);
 var interleaved = interleave(bufferL, bufferR);
 var dataview = encodeWAV(interleaved);
 var audioBlob = new Blob([dataview], { type: type });

 this.postMessage(audioBlob);
 }

 function getBuffer() {
 var buffers = [];
 buffers.push( mergeBuffers(recBuffersL, recLength) );
 buffers.push( mergeBuffers(recBuffersR, recLength) );
 this.postMessage(buffers);
  }

 function clear(){
 recLength = 0;
 recBuffersL = [];
 recBuffersR = [];
 }

 function mergeBuffers(recBuffers, recLength){
 var result = new Float32Array(recLength);
 var offset = 0;
 for (var i = 0; i < recBuffers.length; i++){
 result.set(recBuffers[i], offset);
 offset += recBuffers[i].length;
 }
 return result;
 }

function interleave(inputL, inputR){
 var length = inputL.length + inputR.length;
   var result = new Float32Array(length);

  var index = 0,
   inputIndex = 0;

   while (index < length){
   result[index++] = inputL[inputIndex];
   result[index++] = inputR[inputIndex];
    inputIndex++;
    }
    return result;
   }

      function floatTo16BitPCM(output, offset, input){
        for (var i = 0; i < input.length; i++, offset+=2){
      var s = Math.max(-1, Math.min(1, input[i]));
     output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
     }
     }

     function writeString(view, offset, string){
      for (var i = 0; i < string.length; i++){
      view.setUint8(offset + i, string.charCodeAt(i));
      }
      }

      function encodeWAV(samples){
      var buffer = new ArrayBuffer(44 + samples.length * 2);
      var view = new DataView(buffer);

      /* RIFF identifier */
        writeString(view, 0, 'RIFF');
     /* file length */
       view.setUint32(4, 32 + samples.length * 2, true);
      /* RIFF type */
       writeString(view, 8, 'WAVE');
      /* format chunk identifier */
         writeString(view, 12, 'fmt ');
       /* format chunk length */
        view.setUint32(16, 16, true);
        /* sample format (raw) */
       view.setUint16(20, 1, true);
      /* channel count */
        view.setUint16(22, 2, true);
     /* sample rate */
      view.setUint32(24, sampleRate, true);
       /* byte rate (sample rate * block align) */
      view.setUint32(28, sampleRate * 4, true);
      /* block align (channel count * bytes per sample) */
      view.setUint16(32, 4, true);
         /* bits per sample */
            view.setUint16(34, 16, true);
       /* data chunk identifier */
        writeString(view, 36, 'data');
        /* data chunk length */
         view.setUint32(40, samples.length * 2, true);

          floatTo16BitPCM(view, 44, samples);

          return view;
          }
var recLength=0,
recBuffersL=[],
recBuffersR=[],
采样器;
this.onmessage=函数(e){
开关(如数据命令){
案例“init”:
init(e.data.config);
打破
案例“记录”:
记录(如数据缓冲区);
打破
案例“exportWAV”:
exportWAV(即数据类型);
打破
案例“getBuffer”:
getBuffer();
打破
案例“明确”:
清除();
打破
}
};
函数初始化(配置){
sampleRate=config.sampleRate;
}
功能记录(inputBuffer){
recBuffersL.push(inputBuffer[0]);
recBuffersR.push(inputBuffer[1]);
recLength+=inputBuffer[0]。长度;
}
函数exportWAV(类型){
var bufferL=合并缓冲区(recBuffersL、recLength);
var bufferR=合并缓冲区(recBuffersR、recLength);
var interleave=交织(bufferL,bufferR);
var dataview=encodeWAV(交错);
var audioBlob=新Blob([dataview],{type:type});
这是postMessage(audioBlob);
}
函数getBuffer(){
var缓冲区=[];
push(mergeBuffers(recBuffersL,recLength));
buffers.push(mergeBuffers(recBuffersR,recLength));
这是postMessage(缓冲区);
}
函数clear(){
倾角=0;
recBuffersL=[];
recBuffersR=[];
}
函数合并缓冲区(recBuffers、recLength){
var结果=新的浮点数组(重新长度);
var偏移=0;
对于(var i=0;i
我不知道这是否是唯一的问题,但以下脚本源代码引用是相对URL:

<script src="bower_components/Recorderjs/recorder.js"></script>
<script src="app.js"></script>
关于内部URL,假设您将所有内容都移动到/myapp/static/js文件夹中,URL如下:

/bower_components/Recorderjs/recorderWorker.js
将改为:

/myapp/static/js/bower_components/Recorderjs/recorderWorker.js

相对的内部URL应该可以继续工作而不进行更改。

我是否需要更改其中的任何内容才能上传到web2py?{“名称”:“Recorderjs”,“主页”:“发布”:“f814ac7b3f”,“解析”:{“类型”:“分支”,“分支”:“主”,“提交”:“f814ac7b3f4ed4f62729860d5a02720f167480b3”},“来源”:”git://github.com/mattdiamond/Recorderjs.git“,”目标“:”*“,”原始源“:”记录器JS”,“_direct”:true}我不确定。试试看会发生什么。一般来说,任何JS库都应该与web2py一起使用——你只需要确保你的脚本
src
引用了正确的URL,并且任何其他内部URL都是正确的。我们无法理解脚本中什么应该是正确的内部URL(app.js、recorder.js中的工作路径)和.bower.json文件
/bower_components/Recorderjs/recorderWorker.js
/myapp/static/js/bower_components/Recorderjs/recorderWorker.js