Java 用深度学习4J训练简单神经网络
我开始使用deeplearning4j,并尝试创建一个简单的神经网络 我想近似计算函数sin(x)/x。从理论上讲,只有一个隐藏层就可以做到这一点 首先,我创建了一个模拟数据集Java 用深度学习4J训练简单神经网络,java,scala,neural-network,deep-learning,Java,Scala,Neural Network,Deep Learning,我开始使用deeplearning4j,并尝试创建一个简单的神经网络 我想近似计算函数sin(x)/x。从理论上讲,只有一个隐藏层就可以做到这一点 首先,我创建了一个模拟数据集(x,y),然后我尝试用一个具有20个隐藏节点和sigmoid激活函数的神经网络来近似该函数。不幸的是,使用NNy_est估计的值甚至不接近实际值y 我想知道错误在哪里 这是我当前的代码: package org.deeplearning4j.examples.myexamples import org.deeplear
(x,y)
,然后我尝试用一个具有20个隐藏节点和sigmoid激活函数的神经网络来近似该函数。不幸的是,使用NNy_est
估计的值甚至不接近实际值y
我想知道错误在哪里
这是我当前的代码:
package org.deeplearning4j.examples.myexamples
import org.deeplearning4j.nn.api.OptimizationAlgorithm
import org.deeplearning4j.nn.conf.{ MultiLayerConfiguration, NeuralNetConfiguration }
import org.deeplearning4j.nn.conf.layers.OutputLayer
import org.deeplearning4j.nn.conf.layers.DenseLayer
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork
import org.deeplearning4j.nn.weights.WeightInit
import org.deeplearning4j.optimize.listeners.ScoreIterationListener
import org.nd4j.linalg.api.ops.impl.transforms.Sin
import org.nd4j.linalg.dataset.DataSet
import org.nd4j.linalg.factory.Nd4j
import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction
import org.nd4j.linalg.api.ndarray.INDArray
import scalax.chart.api._
import breeze.linalg.linspace
/**
* Created by donbeo on 16/10/15.
*/
package object MyExample1 {
def main(args: Array[String]) = {
def plotXY(x:INDArray, y:INDArray):Unit = {
val dataPlot = for(i <- 0 to y.length()-1) yield (x.getFloat(i), y.getFloat(i))
val chart = XYLineChart(dataPlot)
chart.show()
}
val nSamples = 500
val xMin = -4
val xMax = 4
val x0 = linspace(xMin, xMax, nSamples)
val y0 = breeze.numerics.sin(x0) / x0
val x = Nd4j.create(x0.toArray).reshape(nSamples, 1)
val y = Nd4j.create(y0.toArray).reshape(nSamples, 1)
plotXY(x, y)
val numInputs = 1
val numOutputs = 1
val numHiddenNodes = 20
val seed = 123
val iterations = 100
val conf = new NeuralNetConfiguration.Builder()
.seed(seed)
.iterations(iterations)
.optimizationAlgo(OptimizationAlgorithm.LBFGS)
.list(2)
.layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
.activation("sigmoid")
.weightInit(WeightInit.XAVIER)
.build())
.layer(1, new OutputLayer.Builder(LossFunction.MSE)
.weightInit(WeightInit.XAVIER)
.activation("identity")
.nIn(numHiddenNodes).nOut(numOutputs).build())
.build()
val dataSet = new DataSet(x, y)
val network: MultiLayerNetwork = new MultiLayerNetwork(conf)
network.init()
network.setListeners(new ScoreIterationListener(1))
network.fit(dataSet)
val y_est = network.output(x)
plotXY(x, y_est)
}
}
package org.deeplearning4j.examples.myexamples
导入org.deeplearning4j.nn.api.OptimizationAlgorithm
导入org.deeplearning4j.nn.conf.{多层配置,神经网络配置}
导入org.deeplearning4j.nn.conf.layers.OutputLayer
导入org.deeplearning4j.nn.conf.layers.DenseLayer
导入org.deeplearning4j.nn.multilayer.multilayer网络
导入org.deeplearning4j.nn.weights.WeightInit
导入org.deeplearning4j.optimize.listeners.ScoreIterationListener
导入org.nd4j.linalg.api.ops.impl.transforms.Sin
导入org.nd4j.linalg.dataset.dataset
导入org.nd4j.linalg.factory.nd4j
导入org.nd4j.linalg.lossfunctions.lossfunctions.LossFunction
导入org.nd4j.linalg.api.ndarray.INDArray
导入scalax.chart.api_
导入breeze.linalg.linspace
/**
*由donbeo于2015年10月16日创建。
*/
包对象MyExample1{
def main(参数:数组[字符串])={
def plotXY(x:INDArray,y:INDArray):单位={
val dataPlot=for(i这是一个基本配置。我只玩了几分钟,但这应该会让你有一个好的开始
package org.deeplearning4j.examples.deepbelief;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.Updater;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.impl.transforms.Sin;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.lossfunctions.LossFunctions;
/**
* Created by agibsonccc on 10/17/15.
*/
public class RandomValues {
public static void main(String[] args) {
Nd4j.ENFORCE_NUMERICAL_STABILITY = true;
int numInputs = 1;
int numOutputs = 1;
int numHiddenNodes = 20;
int nSamples = 500;
INDArray x0 = Nd4j.linspace(-10, 10, 500).reshape(nSamples,1);
INDArray y0 = Nd4j.getExecutioner().execAndReturn(new Sin(x0, x0.dup())).div(x0);
System.out.println(y0);
int seed = 123;
int iterations = 100;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.seed(seed).constrainGradientToUnitNorm(true).learningRate(1e-1)
.iterations(iterations).constrainGradientToUnitNorm(true).l1(1e-1)
.l2(1e-3).regularization(true).miniBatch(false)
.optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT)
.list(2)
.layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
.activation("relu")
.weightInit(WeightInit.XAVIER)
.build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.RMSE_XENT)
.weightInit(WeightInit.XAVIER).updater(Updater.SGD)
.activation("identity").weightInit(WeightInit.XAVIER)
.nIn(numHiddenNodes).nOut(numOutputs).build()).backprop(true)
.build();
MultiLayerNetwork network = new MultiLayerNetwork(conf);
network.init();
network.setListeners(new ScoreIterationListener(1));
network.fit(new DataSet(x0, y0));
System.out.println(network.output(x0));
}
}
你能释放你的sbt文件吗?我得到一个错误,因为我的说层不是“org.deeplearning4j.nn.conf”的一部分,我怀疑这是因为我没有声明的库依赖项丢失。