Javascript 如何多次播放音频文件

Javascript 如何多次播放音频文件,javascript,audio,Javascript,Audio,每当我的游戏中发生碰撞时,我想发出碰撞声。为此,如何多次运行音频文件 var audioMgr = new AudioManager({ fire: { filename: "assets/fire.ogg", samples: 8, }, explosion: { filename: "assets/explosion.ogg", samples: 6, }, hitshield: { filen

每当我的游戏中发生碰撞时,我想发出碰撞声。为此,如何多次运行音频文件

var audioMgr = new AudioManager({                               
  fire:      { filename: "assets/fire.ogg",      samples: 8, }, 
  explosion: { filename: "assets/explosion.ogg", samples: 6, }, 
  hitshield: { filename: "assets/hitshield.ogg", samples: 6, }, 
  launch:    { filename: "assets/launch.ogg",    samples: 2, }, 
  gameover:  { filename: "assets/gameover.ogg",  samples: 1, }, 
  play:      { filename: "assets/play.ogg",      samples: 1, }, 
});                                                             
 audioMgr.playSound('explosion');                               
 audioMgr.playSound('fire');                                    

下面是一个例子,音乐一直在播放:。但如何在If条件下多次播放小mp3文件?

我建议您更改冲突逻辑,以发出新的事件样式,然后您可以捕获该事件以触发播放音频调用。下面显示了一个模拟碰撞,它发送新的发射事件,这些事件被监听以播放音频

<html>
<head>
    <meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
</head>
<body>
    <script>

        var sound_on_click = (function() {

            // --- setup audio logic

            var array_audio_files = []; // array to hold list available media filenames

            array_audio_files.push("awesome_blip.mp3");
            array_audio_files.push("smash_crunch.wav");
            array_audio_files.push("wild_screech.wav");

            var play_sound = function (given_index) {

                var cool_tune = new Audio(array_audio_files[given_index]);

                cool_tune.play();
            };

            // --- event emit

            var event_collision = new Event("see_a_collision"); // define new event type

            document.addEventListener("see_a_collision", function(e) { 

                // randomly pick a media file from all available
                var index_media_file = Math.floor(Math.random() * array_audio_files.length);

                console.log("about to play sound ", index_media_file);

                play_sound(index_media_file);
            });


            // --- below is a mock up of some collision condition

            (function mock_collision(){

                document.dispatchEvent(event_collision); // collision happened so emit event

                setTimeout(mock_collision, 2000); // launch every x milliseconds

            }());

        }());

    </script>
</body>
</html>
var audioMgr = new AudioManager({                               
  fire:      { filename: "assets/fire.ogg",      samples: 8, }, 
  explosion: { filename: "assets/explosion.ogg", samples: 6, }, 
  hitshield: { filename: "assets/hitshield.ogg", samples: 6, }, 
  launch:    { filename: "assets/launch.ogg",    samples: 2, }, 
  gameover:  { filename: "assets/gameover.ogg",  samples: 1, }, 
  play:      { filename: "assets/play.ogg",      samples: 1, }, 
});                                                             
 audioMgr.playSound('explosion');                               
 audioMgr.playSound('fire');                                    

var sound_on_click=(函数(){
//---设置音频逻辑
var array_audio_files=[];//用于保存列表可用媒体文件名的数组
array_audio_files.push(“awesome_blip.mp3”);
array_audio_files.push(“smash_crunch.wav”);
array_audio_files.push(“wild_screech.wav”);
var播放声音=功能(给定索引){
var cool_tune=新音频(数组音频文件[给定索引]);
很酷的曲调。播放();
};
//---事件发射
var event_collision=new event(“see_a_collision”);//定义新的事件类型
addEventListener(“参见碰撞”,函数(e){
//从所有可用的媒体文件中随机选取一个媒体文件
var index_media_file=Math.floor(Math.random()*array_audio_files.length);
log(“即将播放声音”,索引\媒体\文件);
播放声音(索引媒体文件);
});
//---下面是一些碰撞条件的模型
(函数mock_collision(){
document.dispatchEvent(event_collision);//冲突发生,因此发出事件
setTimeout(模拟碰撞,2000);//每x毫秒启动一次
}());
}());

这实际上是一个很大的问题。我建议使用图书馆

var audioMgr = new AudioManager({                               
  fire:      { filename: "assets/fire.ogg",      samples: 8, }, 
  explosion: { filename: "assets/explosion.ogg", samples: 6, }, 
  hitshield: { filename: "assets/hitshield.ogg", samples: 6, }, 
  launch:    { filename: "assets/launch.ogg",    samples: 2, }, 
  gameover:  { filename: "assets/gameover.ogg",  samples: 1, }, 
  play:      { filename: "assets/play.ogg",      samples: 1, }, 
});                                                             
 audioMgr.playSound('explosion');                               
 audioMgr.playSound('fire');                                    
Web音频API可以说是在浏览器中播放音频的最佳方式。但不幸的是,它并不是在每个浏览器上都可用,这意味着你需要某种退路。Chrome、Safari和Firefox都支持它。IE支持即将到来,但还不存在

var audioMgr = new AudioManager({                               
  fire:      { filename: "assets/fire.ogg",      samples: 8, }, 
  explosion: { filename: "assets/explosion.ogg", samples: 6, }, 
  hitshield: { filename: "assets/hitshield.ogg", samples: 6, }, 
  launch:    { filename: "assets/launch.ogg",    samples: 2, }, 
  gameover:  { filename: "assets/gameover.ogg",  samples: 1, }, 
  play:      { filename: "assets/play.ogg",      samples: 1, }, 
});                                                             
 audioMgr.playSound('explosion');                               
 audioMgr.playSound('fire');                                    
这里有一个图书馆

(function(global) {
  var webAudioAPI = window.AudioContext || window.webkitAudioContext || window.mozAudioContext;

  // To play a sound, simply call audio.playSound(id), where id is
  // one of the keys of the g_sound_files array, e.g. "damage".

  // options:
  //   startedOnTouchCallback: on iOS no sounds can be played unless at least one is first initiated during
  //       a use gesture. If a function is attached here it will be called when that user gesture has happened.
  //       This is useful for situtations where sounds 'should' start right from the beginning
  //       even if the player as not touched the screen. In that case we put up a message, "touch the screen"
  //       and remove that message when we get this callback
  //
  //   callback: called when all the sounds have loaded.
  var AudioManager = function(sounds, options) {
    options = options || {};
    var g_context;
    var g_audioMgr;
    var g_soundBank = {};
    var g_canPlay = false;
    var g_canPlayOgg;
    var g_canPlayMp3;
    var g_canPlayWav;
    var g_canPlayAif;
    var g_createFromFileFn;

    var changeExt = function(filename, ext) {
      return filename.substring(0, filename.length - 3) + ext;
    };

    this.needUserGesture = (function() {
      var iOS = ( navigator.userAgent.match(/(iPad|iPhone|iPod)/g) ? true : false );
      var needUserGesture = iOS;
      return function() {
        return needUserGesture;
      };
    }());

    var WebAudioBuffer = function() {
    };

    WebAudioBuffer.prototype.play = function(opt_when, opt_loop) {
      if (!this.buffer) {
        console.log(this.name, " not loaded");
        return;
      }
      var src = g_context.createBufferSource();
      src.buffer = this.buffer;
      src.loop = opt_loop || false;
      src.connect(g_context.destination);
      if (src.start) {
        src.start(opt_when);
      } else {
        src.noteOn(opt_when);
      }
      return src;
    };

    function WebAudioSound(name, filename, samples, opt_callback) {
      this.name = name;
      var that = this;
      var req = new XMLHttpRequest();
      req.open("GET", filename, true);
      req.responseType = "arraybuffer";
      req.onload = function() {
        g_context.decodeAudioData(req.response, function onSuccess(decodedBuffer) {
          // Decoding was successful, do something useful with the audio buffer
          that.buffer = decodedBuffer;
          if (opt_callback) {
            opt_callback(false);
          }
        }, function onFailure() {
           console.error("failed to decoding audio buffer: " + filename);
           if (opt_callback) {
             opt_callback(true);
           }
        });
      }
      req.addEventListener("error", function(e) {
        console.error("failed to load:", filename, " : ", e.target.status);
      }, false);
      req.send();
    }

    WebAudioSound.prototype = new WebAudioBuffer();

    function AudioTagSound(name, filename, samples, opt_callback) {
      this.waiting_on_load = samples;
      this.samples = samples || 1;
      this.name = name;
      this.play_idx = 0;
      this.audio = {};
      for (var i = 0; i < samples; i++) {
        var audio = new Audio();
        var that = this;
        var checkCallback = function(err) {
          that.waiting_on_load--;
          if (opt_callback) {
            opt_callback(err);
          }
        };
        audio.addEventListener("canplaythrough", function() {
          checkCallback(false);
        }, false);
        audio.src = filename;
        audio.onerror = function() {
          checkCallback(true);
        };
        audio.load();
        this.audio[i] = audio;
      }
    };

    AudioTagSound.prototype.play = function(opt_when, opt_loop) {
      if (this.waiting_on_load > 0) {
        console.log(this.name, " not loaded");
        return;
      }
      this.play_idx = (this.play_idx + 1) % this.samples;
      var a = this.audio[this.play_idx];
      // console.log(this.name, ":", this.play_idx, ":", a.src);
      var b = new Audio();
      b.src = a.src;
      // TODO: use when
      b.addEventListener("canplaythrough", function() {
        b.play();
        }, false);
      b.load();
    };

    var handleError = function(filename, audio) {
      return function(e) {
        console.error("can't load ", filename);
      }
    };

    this.playSound = function(name, opt_when, opt_loop) {
      if (!g_canPlay)
        return;
      var sound = g_soundBank[name];
      if (!sound) {
        console.error("audio: '" + name + "' not known.");
        return;
      }
      return sound.play(opt_when, opt_loop);
    }.bind(this);

    this.getTime = function() {
      return g_context ? g_context.currentTime : Date.now() * 0.001;
    }.bind(this);

    // on iOS and possibly other devices you can't play any
    // sounds in the browser unless you first play a sound
    // in response to a user gesture. So, make something
    // to respond to a user gesture.
    var setupGesture = function() {
      if (this.needUserGesture()) {
        var count = 0;
        var elem = window;
        var that = this;
        var eventNames = ['touchstart', 'mousedown'];
        var playSoundToStartAudio = function() {
          ++count;
         if (count < 3) {
            // just playing any sound does not seem to work.
            var source = g_context.createOscillator();
            var gain = g_context.createGain();
            source.frequency.value = 440;
            source.connect(gain);
            gain.gain.value = 0;
            gain.connect(g_context.destination);
            if (source.start) {
              source.start(0);
            } else {
              source.noteOn(0);
            }
            setTimeout(function() {
              source.disconnect();
            }, 100);
          }
          if (count == 3) {
            for (var ii = 0; ii < eventNames.length; ++ii) {
              elem.removeEventListener(eventNames[ii], playSoundToStartAudio, false);
            }
            if (options.startedOnTouchCallback) {
              options.startedOnTouchCallback();
            }
          }
        }
        for (var ii = 0; ii < eventNames.length; ++ii) {
          elem.addEventListener(eventNames[ii], playSoundToStartAudio, false);
        }
      }
    }.bind(this);

    this.loadSound = function(soundName, filename, samples, opt_callback) {
      var ext = filename.substring(filename.length - 3);
      if (ext == 'ogg' && !g_canPlayOgg) {
        filename = changeExt(filename, "mp3");
      } else if (ext == 'mp3' && !g_canPlayMp3) {
        filename = changeExt(filename, "ogg");
      }
      var s = new g_createFromFileFn(soundName, filename, samples, opt_callback);
      g_soundBank[soundName] = s;
      return s;
    }.bind(this);

    this.init = function(sounds) {
      var a = new Audio()
      g_canPlayOgg = a.canPlayType("audio/ogg");
      g_canPlayMp3 = a.canPlayType("audio/mp3");
      g_canPlayWav = a.canPlayType("audio/wav");
      g_canPlayAif = a.canPlayType("audio/aif") || a.canPlayType("audio/aiff");
      g_canPlay = g_canPlayOgg || g_canPlayMp3;
      if (!g_canPlay)
        return;

      if (webAudioAPI) {
        console.log("Using Web Audio API");
        g_context = new webAudioAPI();

        if (!g_context.createGain) { g_context.createGain = g_context.createGainNode.bind(g_context); }

        g_createFromFileFn = WebAudioSound;
      } else {
        console.log("Using Audio Tag");
        g_createFromFileFn = AudioTagSound;
      }

      var soundsPending = 1;
      var soundsLoaded = function() {
        --soundsPending;
        if (soundsPending == 0 && options.callback) {
          options.callback();
        }
      };

      if (sounds) {
        Object.keys(sounds).forEach(function(sound) {
          var data = sounds[sound];
          ++soundsPending;
          this.loadSound(sound, data.filename, data.samples, soundsLoaded);
        }.bind(this));
      }

      // so that we generate a callback even if there are no sounds.
      // That way users don't have to restructure their code if they have no sounds or if they
      // disable sounds by passing none in.
      setTimeout(soundsLoaded, 0);

      if (webAudioAPI) {
        setupGesture();
      }
    }.bind(this);
    this.init(sounds);

    this.getSoundIds = function() {
      return Object.keys(g_soundBank);
    };
  };

  AudioManager.hasWebAudio = function() {
    return webAudioAPI !== undefined;
  };

  global.AudioManager = AudioManager;
}(this));
var audioMgr = new AudioManager({                               
  fire:      { filename: "assets/fire.ogg",      samples: 8, }, 
  explosion: { filename: "assets/explosion.ogg", samples: 6, }, 
  hitshield: { filename: "assets/hitshield.ogg", samples: 6, }, 
  launch:    { filename: "assets/launch.ogg",    samples: 2, }, 
  gameover:  { filename: "assets/gameover.ogg",  samples: 1, }, 
  play:      { filename: "assets/play.ogg",      samples: 1, }, 
});                                                             
 audioMgr.playSound('explosion');                               
 audioMgr.playSound('fire');                                    
在那之后,你可以用它来播放声音

var audioMgr = new AudioManager({                               
  fire:      { filename: "assets/fire.ogg",      samples: 8, }, 
  explosion: { filename: "assets/explosion.ogg", samples: 6, }, 
  hitshield: { filename: "assets/hitshield.ogg", samples: 6, }, 
  launch:    { filename: "assets/launch.ogg",    samples: 2, }, 
  gameover:  { filename: "assets/gameover.ogg",  samples: 1, }, 
  play:      { filename: "assets/play.ogg",      samples: 1, }, 
});                                                             
 audioMgr.playSound('explosion');                               
 audioMgr.playSound('fire');                                    
等等

var audioMgr = new AudioManager({                               
  fire:      { filename: "assets/fire.ogg",      samples: 8, }, 
  explosion: { filename: "assets/explosion.ogg", samples: 6, }, 
  hitshield: { filename: "assets/hitshield.ogg", samples: 6, }, 
  launch:    { filename: "assets/launch.ogg",    samples: 2, }, 
  gameover:  { filename: "assets/gameover.ogg",  samples: 1, }, 
  play:      { filename: "assets/play.ogg",      samples: 1, }, 
});                                                             
 audioMgr.playSound('explosion');                               
 audioMgr.playSound('fire');                                    
samples
是您希望能够同时播放的声音的大小。任何支持Web音频API的浏览器都不需要此功能。换句话说,它只是IE需要的

var audioMgr = new AudioManager({                               
  fire:      { filename: "assets/fire.ogg",      samples: 8, }, 
  explosion: { filename: "assets/explosion.ogg", samples: 6, }, 
  hitshield: { filename: "assets/hitshield.ogg", samples: 6, }, 
  launch:    { filename: "assets/launch.ogg",    samples: 2, }, 
  gameover:  { filename: "assets/gameover.ogg",  samples: 1, }, 
  play:      { filename: "assets/play.ogg",      samples: 1, }, 
});                                                             
 audioMgr.playSound('explosion');                               
 audioMgr.playSound('fire');                                    
另请注意,据我所知,Firefox不支持MP3,因此您需要为其提供
.ogg
文件。相反,Safari不支持
.ogg
。无论您在初始化库时指定了什么,库都会处理加载
.mp3
.ogg
文件。换句话说,如果您输入
文件名:“foo.mp3”
库将尝试加载
foo.mp3
foo.ogg
,具体取决于您所在的浏览器是否支持其中一种在我用来检测碰撞的
if
条件内。实际上,我只是需要播放音频文件的语法。如果我想播放多个文件,我会使用你的代码。所以我接受了你的回答。谢谢:)
var audioMgr = new AudioManager({                               
  fire:      { filename: "assets/fire.ogg",      samples: 8, }, 
  explosion: { filename: "assets/explosion.ogg", samples: 6, }, 
  hitshield: { filename: "assets/hitshield.ogg", samples: 6, }, 
  launch:    { filename: "assets/launch.ogg",    samples: 2, }, 
  gameover:  { filename: "assets/gameover.ogg",  samples: 1, }, 
  play:      { filename: "assets/play.ogg",      samples: 1, }, 
});                                                             
 audioMgr.playSound('explosion');                               
 audioMgr.playSound('fire');