Javascript 如何将可视化工具连接到此合成器 let switchSound=“false”; var startSound=document.querySelector(“#startSound”); var mute=document.querySelector('#mute'); const synth=new Tone.PolySynth(); const synth1=新音调。膜同步(); 类乐器{ 构造函数(){ this.synthType=null; this.synth=null; this.gain=新音调.gain(); 这是我的目标; } 获取默认设置(){ 返回{ 合成器:{ 振荡器:{type:'triangle'}, 信封:{ 攻击:0.05, 衰减:0.1, 维持:0.3, 发布:1 } } }; } updateSynthType(synthType){ let newSynth=新音调[synthType]( 此.defaultSettings[synthType]); log(newSynth.envelope.attack); } } window.onload=函数(){ startSound.addEventListener('click',function(){ if(switchSound==“false”){ let inst=新仪器(); inst.updateSynthType('Synth'); switchSound=“true”; var context=新的AudioContext(); const$inputs=document.querySelectorAll('input'), 和弦=[ ‘G0 C1 E1 B1 C1’、‘F1 A1 C1 E2’、‘G1 B1 D1’, ‘D1 F1 A1 C2’、‘E1 G1 B1’ ].map(格式和弦); var Chordix=0, 步长=0; //synth.monitor.type='sine'; 让增益=新音调增益(0.2); 让混响=新音调。混响(2,0.1); 获得目标(); //混响连接(增益).toDestination(); 合成连接(混响)连接(增益); Array.from($input).forEach($input=>{ $input.ADDEVENTLISTER('change',()=>{ if($input.checked)handleChord($input.value); }) }); 函数handleChord(valueString){ chordIX=parseInt(valueString)-1; } Tone.Transport.scheduleRepeat(onRepeat,'16n'); Tone.Transport.bpm.value=100; Tone.Transport.start(); 函数onRepeat(时间){ 设chord=和弦[chordix], 注释=弦[步长%弦长]; 合成TriggeratackRelease(注,“32n”,时间); step++; } 函数formatChords(chordString){ 设chord=chordString.split(“”); 设arr=[]; for(设i=0;i
我希望添加一个可视化工具,当您选择不同的Synth时播放。你有没有想过把这个放在哪里?我认为一个可爱的可视化工具对于这个项目来说是非常棒的,我不知道如何将两者联系起来 我可以创建可视化工具,但是连接它们是不可能的。是否在start()函数中播放音乐Javascript 如何将可视化工具连接到此合成器 let switchSound=“false”; var startSound=document.querySelector(“#startSound”); var mute=document.querySelector('#mute'); const synth=new Tone.PolySynth(); const synth1=新音调。膜同步(); 类乐器{ 构造函数(){ this.synthType=null; this.synth=null; this.gain=新音调.gain(); 这是我的目标; } 获取默认设置(){ 返回{ 合成器:{ 振荡器:{type:'triangle'}, 信封:{ 攻击:0.05, 衰减:0.1, 维持:0.3, 发布:1 } } }; } updateSynthType(synthType){ let newSynth=新音调[synthType]( 此.defaultSettings[synthType]); log(newSynth.envelope.attack); } } window.onload=函数(){ startSound.addEventListener('click',function(){ if(switchSound==“false”){ let inst=新仪器(); inst.updateSynthType('Synth'); switchSound=“true”; var context=新的AudioContext(); const$inputs=document.querySelectorAll('input'), 和弦=[ ‘G0 C1 E1 B1 C1’、‘F1 A1 C1 E2’、‘G1 B1 D1’, ‘D1 F1 A1 C2’、‘E1 G1 B1’ ].map(格式和弦); var Chordix=0, 步长=0; //synth.monitor.type='sine'; 让增益=新音调增益(0.2); 让混响=新音调。混响(2,0.1); 获得目标(); //混响连接(增益).toDestination(); 合成连接(混响)连接(增益); Array.from($input).forEach($input=>{ $input.ADDEVENTLISTER('change',()=>{ if($input.checked)handleChord($input.value); }) }); 函数handleChord(valueString){ chordIX=parseInt(valueString)-1; } Tone.Transport.scheduleRepeat(onRepeat,'16n'); Tone.Transport.bpm.value=100; Tone.Transport.start(); 函数onRepeat(时间){ 设chord=和弦[chordix], 注释=弦[步长%弦长]; 合成TriggeratackRelease(注,“32n”,时间); step++; } 函数formatChords(chordString){ 设chord=chordString.split(“”); 设arr=[]; for(设i=0;i,javascript,tone.js,Javascript,Tone.js,我希望添加一个可视化工具,当您选择不同的Synth时播放。你有没有想过把这个放在哪里?我认为一个可爱的可视化工具对于这个项目来说是非常棒的,我不知道如何将两者联系起来 我可以创建可视化工具,但是连接它们是不可能的。是否在start()函数中播放音乐 let switchSound = "false"; var startSound = document.querySelector('#startsound'); var mute = document.querySelecto
let switchSound = "false";
var startSound = document.querySelector('#startsound');
var mute = document.querySelector('#mute');
const synth = new Tone.PolySynth();
const synth1 = new Tone.MembraneSynth();
class Instrument {
constructor() {
this.synthType = null;
this.synth = null;
this.gain = new Tone.Gain();
this.gain.toDestination;
}
get defaultSettings() {
return {
Synth: {
oscillator: { type: 'triangle' },
envelope: {
attack: 0.05,
decay: 0.1,
sustain: 0.3,
release: 1
}
}
};
}
updateSynthType(synthType) {
let newSynth = new Tone[synthType](
this.defaultSettings[synthType]);
console.log(newSynth.envelope.attack);
}
}
window.onload = function(){
startSound.addEventListener('click', function() {
if (switchSound === "false"){
let inst = new Instrument();
inst.updateSynthType('Synth');
switchSound = "true";
var context = new AudioContext();
const $inputs = document.querySelectorAll('input'),
chords = [
'G0 C1 E1 B1 C1', 'F1 A1 C1 E2', 'G1 B1 D1',
'D1 F1 A1 C2', 'E1 G1 B1'
].map(formatChords);
var chordIdx = 0,
step = 0;
// synth.oscillator.type = 'sine';
let gain = new Tone.Gain(0.2);
let reverb = new Tone.Reverb(2, 0.1);
gain.toDestination();
// reverb.connect(gain).toDestination();
synth.connect(reverb).connect(gain);
Array.from($inputs).forEach($input => {
$input.addEventListener('change', () => {
if ($input.checked) handleChord($input.value);
})
});
function handleChord(valueString) {
chordIdx = parseInt(valueString) - 1;
}
Tone.Transport.scheduleRepeat(onRepeat, '16n');
Tone.Transport.bpm.value = 100;
Tone.Transport.start();
function onRepeat(time) {
let chord = chords[chordIdx],
note = chord[step % chord.length];
synth.triggerAttackRelease(note, '32n', time);
step++;
}
function formatChords(chordString) {
let chord = chordString.split(' ');
let arr = [];
for (let i= 0; i< 2; i++) {
for (let j = 0; j < chord.length; j++){
let noteOct = chord[j].split('')
note = noteOct[0];
let oct = (noteOct[1] === "0") ? i + 2 : i + 4;
note += oct;
arr.push(note);
}
}
return arr;
}
mute.onclick = function() {
if(mute.getAttribute('data-muted') === 'false') {
gain.gain.rampTo(0);
mute.setAttribute('data-muted', 'true');
mute.innerHTML = "unmute";
} else {
gain.gain.rampTo(0.6);
mute.setAttribute('data-muted', 'false');
mute.innerHTML = "mute";
};
}
}
});
}