Javascript 前馈神经网络在JS中不工作

Javascript 前馈神经网络在JS中不工作,javascript,neural-network,Javascript,Neural Network,我正在用Javascript进行一个项目,在用户指定了神经网络的大小和权重后,我的程序将创建一个前馈神经网络,它可以根据已经定义的权重预测输入集的结果。我的程序没有通过反向传播或其他任何方式训练网络,它只是接收预定义的权重,以便通过网络提供信息 我已经通过backprop在Simbrain(基于Java的神经网络软件)中训练了一个神经网络,并且我已经将在Simbrain中训练网络产生的权重导出到一个JS文件中,该文件将在创建神经网络时输入这些权重 我的问题是,尽管我的神经网络在大多数神经网络(甚

我正在用Javascript进行一个项目,在用户指定了神经网络的大小和权重后,我的程序将创建一个前馈神经网络,它可以根据已经定义的权重预测输入集的结果。我的程序没有通过反向传播或其他任何方式训练网络,它只是接收预定义的权重,以便通过网络提供信息

我已经通过backprop在Simbrain(基于Java的神经网络软件)中训练了一个神经网络,并且我已经将在Simbrain中训练网络产生的权重导出到一个JS文件中,该文件将在创建神经网络时输入这些权重

我的问题是,尽管我的神经网络在大多数神经网络(甚至是那些超过3层的神经网络)上的表现与预期的一样(),但根据Simbrain()中的神经网络,它得到的输出与预期的结果非常不同,因此我的问题是,我做错了什么

我已经确定Simbrain和我自己的程序都使用相同的sigmoidal函数(f(x)=1/(1+1/e^x)),所以问题不是这样

非常感谢您提供的任何帮助,包括与我的问题没有直接关系,但与神经网络有关的帮助。谢谢你读到这里

NN的构造函数

function NeuralNetwork(layers /*2d array*/, weight3 /*3d array*/) {
/*
for the second test case, layers and weight3 are respectively:
[
  [0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5],
  [0,0,0,0,0,0,0,0,0,0],
  [0,0,0,0,0,0,0,0,0,0,0],
  [0,0,0]
];    , and
[
  [
    [0.8026458866414493,-4.88941104413936,-4.226087540974577,-4.530573464438122,-1.4556229469839264,-4.31689275950382,-2.598025750964335,-1.4024486094322206,-4.2746136202736595,-1.114811712353249],
    [-0.7018557602796472,-1.5899008962198744,-0.16868647158179298,-1.5305564633774085,-1.4933903630534928,-1.0700257428899125,-4.302200807112217,-1.6005131049772037,0.1368813977388942,-1.5986983805280093],
    [-0.29876796358044566,-2.4380450712541997,-1.5397608205098134,-2.3059188256916676,-1.5036183940283618,-1.7713981036865771,-1.2577757481948733,-1.7750243327951924,-1.2961124127198986,-1.6383194273199955],
    [-0.6170049336232494,-3.3128244088884964,-3.331978879084264,-3.1456639968607307,-1.2964187225461807,-2.9267790861009417,-1.9560748559032006,-1.3973642251104967,-3.4449373490550164,-1.2039858563847703],
    [0.28760582772383936,-0.029647036107864125,2.401661305865335,0.15614131284929608,0.9967571587559184,1.3920493637059834,2.053160398236636,1.560406973436222,2.5003616706837324,1.1406280698546443],
    [-1.431355486584377,-0.5254956135639378,0.585966810464151,-0.47056989178558917,-0.34719568935262807,3.0873904709552016,2.7466680699367627,-0.3183084147224785,0.4307418811280014,-0.4347019809871141],
    [4.730305578112657,2.794618188382498,2.7725054494795303,2.7971993062957767,3.0121313133902494,4.52697776884291,0.1861088251573733,3.3324377979102677,3.4776335904379945,3.220162438476049],
    [1.3365670221393215,4.151102261635236,4.448937517824129,3.818527635050038,1.1622076847092544,5.056756438356793,-4.811867833736578,1.4279903810573407,5.067869165923729,1.2084930462786259],
    [-7.653884362627585,3.4481239221814506,-1.3517036269721663,2.9744225300717084,-3.4121450698649567,-6.262463291313358,10.0,-5.000134578924327,-1.701089610696345,-4.510549176789293]
  ], [
    [0.738937123642736,1.0027735638897857,-0.4895642945264585,-0.4966409487023605,-3.411429095495459,-0.645660237498346,0.4795890293073677,-1.1530391624279934,-0.5844011923153196,-0.18971906794059312,0.24259889837466253],
    [-1.3289019929201453,-1.3846783936643814,-3.1027185772051857,-3.051664033849648,0.8551718079643078,-1.0150243513426276,-1.1322190191703165,-0.8017220694962384,-3.3343695886290257,-1.5355207800814192,-1.1708434001066501],
    [-1.2376066287024123,-1.3769667430967405,-2.301168906630486,-2.325621305132306,1.9450338961713791,-0.6571756279826005,-1.1591625309611004,-0.40914317621655133,-2.489673013800612,-1.3075292847341096,-0.9491990659165409],
    [-1.2735133823287095,-1.2998934938770366,-2.7684415870018477,-2.795194685738332,0.6119467566598199,-0.9188836490585277,-1.1200651160455346,-0.6609228479081031,-2.9594533679041617,-1.4427187863617232,-1.0109174567197428],
    [-1.3424373302583281,-1.5343976853123078,-0.9412451891990541,-0.9181715664124199,0.39356360921646094,-1.0424607517601023,-1.502583319884496,-1.0152736611391637,-0.9513379926677091,-0.8563445028619694,-1.2613129065351194],
    [1.479041505318804,1.810007067391299,-2.8023717750684107,-2.7812453328904354,-2.3035371159432065,0.03115788970802594,1.9657984684801135,-0.06598157999814092,-3.127064931997624,-0.823555626227005,1.264759264824068],
    [-2.888035414770285,-2.994031589353307,3.6767014622022303,3.6660677492715528,-9.181646436584106,-3.263884016795336,-3.88349373084228,-3.7712237821378527,4.041549967683737,-0.687691881794572,-3.4265218341885384],
    [-1.778832083041489,-2.12273644922548,-1.0963638066243724,-1.030260217185849,1.1187575804787673,-1.2409528854652865,-2.1738688894809637,-1.2743917544089247,-1.107812865029798,-1.0629428830636494,-1.7751739289407722],
    [-1.004719873122501,-1.0443553807939716,-2.577499515628139,-2.5776692043229663,2.9709867841159,-0.5274719498845833,-0.843890283520379,-0.20671418403893513,-2.7179118826886026,-1.306839176041484,-0.7570735658813299],
    [-1.6272666988539848,-1.827183409776364,-0.9438773753269729,-0.9435987081211876,0.7660203923230526,-1.2259095997120846,-2.031459716170598,-1.3231868095404185,-0.9964657871022223,-0.9165692090752512,-1.58444425796673]
  ], [
    [3.725973720696792,-1.6646641113226457,-2.690673963411094],
    [3.800277429998268,-2.085738176798833,-2.63291827658551],
    [3.9016422516515155,2.0672177107134138,-3.855168383990615],
    [3.8880037099581446,2.1381928663964778,-3.8429910059416983],
    [-1.162825551537172,2.35558424769462,-2.2926681297161076],
    [2.9764021713068542,0.3197129292281053,-2.8308532112377027],
    [3.820130175045188,-1.7806950056811943,-3.106860347494969],
    [2.967938499849539,0.8691409343754017,-3.130904551603304],
    [4.0701841361555715,2.228471656300074,-4.053369245616366],
    [3.2231153003534962,0.9718378272580407,-2.658010203549737],
    [3.4876764892680967,-1.2622479268130227,-2.870582831864283]

  ]
];
I copied these directly from the simbrain weights matrices.
*/
this.layers = [];
  for(var i = 0; i < layers.length; i++) { // the creation of the layers
    this.layers.push(new Layer(layers[i]));
  }
  for(var f = 0; f < layers.length - 1; f++) { // the creation of the connections between neurons of different layers, uses predefined weights
    this.layers[f].connectLayers(layers[f + 1], weight3[f]);
  }
  /*this.getChances = function(neuron, layer) { // this function isn't important
    var total = 0;
    for(var g = 0; g < layer.neurons.length; g++) {
      total += layer.neurons[g].value;
    }
    var prob = layer.neurons[neuron].value / total * 100;
    var decOdds = 100 / prob;
    return [(prob).toFixed(2), decOdds.toFixed(2)];
  } */
  this.reset  = function(values) { // reset the neuron values
    for (var i = 0; i < this.layers.length; i++) {
      for (var t = 0; t < this.layers[i].neurons.length; t++) {
        this.layers[i].neurons[t].value = values[i][t];
      }
    }
  }
  this.run = function() { // the main function that will make the NN predict
    for (var t = 0; t < this.layers.length; t++) { // for each layer
      if(t !== 0) { // if not the first layer (input shouldn't be activated)
        this.activate(t);
      }
      if(t !== this.layers.length - 1) { // if not the last layer (the output cannot preActivate a layer that comes after it)
        this.preActivate(t); // this affects the layer that comes after it
      }
    }
  }
  this.preActivate = function(t) { // multiply the weights by the value of the neurons and add the result to the next layer (pre activation)
    for (var p = 0; p < this.layers[t].neurons.length; p++) { // for the neurons in the current layer
      for (var v = 0; v < this.layers[t].neurons[p].weights.length; v++) { // for the weights of the current neuron (the amount of weights is equal to the amount of neurons in the next layer)
        this.layers[t + 1].neurons[v].value += this.layers[t].neurons[p].weights[v].value * this.layers[t].neurons[p].value; // increment the neurons in the next layer
      }
    }
  }
  this.activate = function(t) { // take the sigmoid for each neuron in the current layer (activation)
    for (var hp = 0; hp < this.layers[t].neurons.length; hp++) {
      this.layers[t].neurons[hp].value = this.sigmoid(this.layers[t].neurons[hp].value);
    }
  }
  this.sigmoid = function(x) { // the sigmoidal function
    return 1 / (1 + 1 / pow(Math.E, x));
  }
}

function Layer(neurons /*1d array*/, weight2 /*2d array*/) { // create a new layer
  this.neurons = [];
  for(var i = 0; i < neurons.length; i++) {
    this.neurons.push(new Neuron(neurons[i]));
  }
  this.connectLayers = function(targetLayer /*1d array*/, weight2 /*2d array*/) { // create the connections
    for(var f = 0; f < this.neurons.length; f++) { // for each neuron in its own layer
      for(var t = 0; t < targetLayer.length; t++) { // for each neuron in the next layer
        this.neurons[f].weights.push(new Weight(weight2[f][t])); // get the weight from the predefined set and make it the value of that weights
      }
    }
  }
}

function Neuron(value /*float*/) {
  this.value = value;
  this.weights = [];
  this.x; // for drawing on a canvas
  this.y; // for drawing on a canvas
}

function Weight(value /*float*/) {
  this.value = value;
  this.x; // for drawing on a canvas
  this.y; // for drawing on a canvas
}
函数神经网络(层/*2d数组*/,权重3/*3d数组*/){
/*
对于第二个测试用例,层和权重3分别为:
[
[0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5],
[0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0],
[0,0,0]
];及
[
[
[0.8026458866414493,-4.88941104413936,-4.226087540974577,-4.530573464438122,-1.4556229469839264,-4.31689275950382,-2.598025750964335,-1.4024486094322206,-4.2746136202736595,-1.114811712353249],
[-0.7018557602796472,-1.5899008962198744,-0.16868647158179298,-1.5305564633774085,-1.4933903630534928,-1.0700257428899125,-4.302200807112217,-1.6005131049772037,0.1368813977388942,-1.5986983805280093],
[-0.29876796358044566,-2.4380450712541997,-1.5397608205098134,-2.3059188256916676,-1.5036183940283618,-1.7713981036865771,-1.2577757481948733,-1.7750243327951924,-1.2961124127198986,-1.6383194273199955],
[-0.6170049336232494,-3.3128244088884964,-3.331978879084264,-3.1456639968607307,-1.2964187225461807,-2.9267790861009417,-1.9560748559032006,-1.3973642251104967,-3.4449373490550164,-1.2039858563847703],
[0.28760582772383936,-0.029647036107864125,2.401661305865335,0.15614131284929608,0.9967571587559184,1.3920493637059834,2.053160398236636,1.560406973436222,2.5003616706837324,1.1406280698546443],
[-1.431355486584377,-0.5254956135639378,0.585966810464151,-0.47056989178558917,-0.34719568935262807,3.0873904709552016,2.7466680699367627,-0.3183084147224785,0.4307418811280014,-0.4347019809871141],
[4.730305578112657,2.794618188382498,2.7725054494795303,2.7971993062957767,3.0121313133902494,4.52697776884291,0.1861088251573733,3.3324377979102677,3.4776335904379945,3.220162438476049],
[1.3365670221393215,4.151102261635236,4.448937517824129,3.818527635050038,1.1622076847092544,5.056756438356793,-4.811867833736578,1.4279903810573407,5.067869165923729,1.2084930462786259],
[-7.653884362627585,3.4481239221814506,-1.3517036269721663,2.9744225300717084,-3.4121450698649567,-6.262463291313358,10.0,-5.000134578924327,-1.701089610696345,-4.510549176789293]
], [
[0.738937123642736,1.0027735638897857,-0.4895642945264585,-0.4966409487023605,-3.411429095495459,-0.645660237498346,0.4795890293073677,-1.1530391624279934,-0.5844011923153196,-0.18971906794059312,0.24259889837466253],
[-1.3289019929201453,-1.3846783936643814,-3.1027185772051857,-3.051664033849648,0.8551718079643078,-1.0150243513426276,-1.1322190191703165,-0.8017220694962384,-3.3343695886290257,-1.5355207800814192,-1.1708434001066501],
[-1.2376066287024123,-1.3769667430967405,-2.301168906630486,-2.325621305132306,1.9450338961713791,-0.6571756279826005,-1.1591625309611004,-0.40914317621655133,-2.489673013800612,-1.3075292847341096,-0.9491990659165409],
[-1.2735133823287095,-1.2998934938770366,-2.7684415870018477,-2.795194685738332,0.6119467566598199,-0.9188836490585277,-1.1200651160455346,-0.6609228479081031,-2.9594533679041617,-1.4427187863617232,-1.0109174567197428],
[-1.3424373302583281,-1.5343976853123078,-0.9412451891990541,-0.9181715664124199,0.39356360921646094,-1.0424607517601023,-1.502583319884496,-1.0152736611391637,-0.9513379926677091,-0.8563445028619694,-1.2613129065351194],
[1.479041505318804,1.810007067391299,-2.8023717750684107,-2.7812453328904354,-2.3035371159432065,0.03115788970802594,1.9657984684801135,-0.06598157999814092,-3.127064931997624,-0.823555626227005,1.264759264824068],
[-2.888035414770285,-2.994031589353307,3.6767014622022303,3.6660677492715528,-9.181646436584106,-3.263884016795336,-3.88349373084228,-3.7712237821378527,4.041549967683737,-0.687691881794572,-3.4265218341885384],
[-1.778832083041489,-2.12273644922548,-1.0963638066243724,-1.030260217185849,1.1187575804787673,-1.2409528854652865,-2.1738688894809637,-1.2743917544089247,-1.107812865029798,-1.0629428830636494,-1.7751739289407722],
[-1.004719873122501,-1.0443553807939716,-2.577499515628139,-2.5776692043229663,2.9709867841159,-0.5274719498845833,-0.843890283520379,-0.20671418403893513,-2.7179118826886026,-1.306839176041484,-0.7570735658813299],
[-1.6272666988539848,-1.827183409776364,-0.9438773753269729,-0.9435987081211876,0.7660203923230526,-1.2259095997120846,-2.031459716170598,-1.3231868095404185,-0.9964657871022223,-0.9165692090752512,-1.58444425796673]
], [
[3.725973720696792,-1.6646641113226457,-2.690673963411094],
[3.800277429998268,-2.085738176798833,-2.63291827658551],
[3.9016422516515155,2.0672177107134138,-3.855168383990615],
[3.8880037099581446,2.1381928663964778,-3.8429910059416983],
[-1.162825551537172,2.35558424769462,-2.2926681297161076],
[2.9764021713068542,0.31