Java Can';t将侦听器强制转换为集合,DL4J
我是第一次和dl4j一起工作,所以对我放松点 我编写了以下简单的程序Java Can';t将侦听器强制转换为集合,DL4J,java,collections,casting,dl4j,Java,Collections,Casting,Dl4j,我是第一次和dl4j一起工作,所以对我放松点 我编写了以下简单的程序 import org.datavec.api.records.reader.RecordReader; import org.datavec.api.records.reader.impl.csv.CSVRecordReader; import org.datavec.api.split.FileSplit; import org.deeplearning4j.datasets.datavec.RecordReaderData
import org.datavec.api.records.reader.RecordReader;
import org.datavec.api.records.reader.impl.csv.CSVRecordReader;
import org.datavec.api.split.FileSplit;
import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.Updater;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import org.deeplearning4j.eval.Evaluation;
import java.io.File;
import java.util.Collection;
public class MLPClassifierLinear
{
public static void main(String[] args) throws Exception
{
int seed = 123;
double learnRate = 0.01;
int batchSize = 50;
int nEpochs = 30;
int numInputs = 2;
int numOutputs = 2;
int numHiddenNodes = 20;
int labelField = 0;
int numOfLabels = 2;
//Load Training Data
RecordReader rr = new CSVRecordReader();
rr.initialize(new FileSplit(new File("C:\\Users\\Oria\\MLP\\linear_data_train.csv")));
DataSetIterator trainIter = new RecordReaderDataSetIterator(rr, batchSize,0,2);
//Load Testing Data
RecordReader rrTest = new CSVRecordReader();
rrTest.initialize(new FileSplit(new File("C:\\Users\\Oria\\MLP\\linear_data_eval.csv")));
DataSetIterator testIter = new RecordReaderDataSetIterator(rrTest, batchSize,0,2);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.seed(seed)
.iterations(1)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.learningRate(learnRate)
.updater(Updater.NESTEROVS).momentum(0.9)
.list()
.layer(0, new DenseLayer.Builder()
.nIn(numInputs)
.nOut(numHiddenNodes)
.weightInit(WeightInit.XAVIER)
.activation("relu")
.build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.weightInit(WeightInit.XAVIER)
.activation("softmax")
.nIn(numHiddenNodes)
.nOut(numOutputs)
.build())
.pretrain(false).backprop(true).build();
MultiLayerNetwork model = new MultiLayerNetwork(conf);
model.init();
model.setListeners(new ScoreIterationListener(10));
for(int i = 0; i < nEpochs; i++)
model.fit(trainIter);
System.out.println("Evaluate model.......");
Evaluation eval = new Evaluation(numOutputs);
while(testIter.hasNext())
{
DataSet t = testIter.next();
INDArray features = t.getFeatureMatrix();
INDArray lables = t.getLabels();
INDArray predicted = model.output(features,false);
eval.eval(lables,predicted);
}
System.out.println(eval.stats());
}
}
import org.datavec.api.records.reader.RecordReader;
导入org.datavec.api.records.reader.impl.csv.CSVRecordReader;
导入org.datavec.api.split.FileSplit;
导入org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator;
导入org.deeplearning4j.nn.api.OptimizationAlgorithm;
导入org.deeplearning4j.nn.conf.MultiLayerConfiguration;
导入org.deeplearning4j.nn.conf.NeuralNetConfiguration;
导入org.deeplearning4j.nn.conf.Updater;
导入org.deeplearning4j.nn.conf.layers.DenseLayer;
导入org.deeplearning4j.nn.conf.layers.OutputLayer;
导入org.deeplearning4j.nn.multilayer.multilayer网络;
导入org.deeplearning4j.nn.weights.WeightInit;
导入org.deeplearning4j.optimize.listeners.ScoreIterationListener;
导入org.nd4j.linalg.api.ndarray.INDArray;
导入org.nd4j.linalg.dataset.dataset;
导入org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
导入org.nd4j.linalg.lossfunctions.lossfunctions;
导入org.deeplearning4j.eval.Evaluation;
导入java.io.File;
导入java.util.Collection;
公共类MLPClassifierLinear
{
公共静态void main(字符串[]args)引发异常
{
int seed=123;
双学习率=0.01;
int batchSize=50;
int-nEpochs=30;
int numinput=2;
int numOutputs=2;
int numhiddenneds=20;
int-labelField=0;
int numOfLabels=2;
//负荷训练数据
RecordReader rr=新的CSVRecordReader();
rr.initialize(新文件分割(新文件(“C:\\Users\\Oria\\MLP\\linear\u data\u train.csv”));
DataSetIterator trainIter=新的RecordReaderDataSetIterator(rr,batchSize,0,2);
//负载测试数据
RecordReader rrTest=新的CSVRecordReader();
rrTest.initialize(新文件(新文件(“C:\\Users\\Oria\\MLP\\linear\u data\u eval.csv”));
DataSetIterator Tester=新的RecordReaderDataSetIterator(rrTest,batchSize,0,2);
多层配置conf=new NeuralNetConfiguration.Builder()
.种子
.迭代次数(1)
.优化算法(优化算法.随机梯度下降)
.学习率(learnRate)
.updater(updater.NESTEROVS).momentum(0.9)
.list()
.layer(0,新的DenseLayer.Builder()
.nIn(numInputs)
.nOut(numhiddenodes)
.weightInit(weightInit.XAVIER)
.激活(“relu”)
.build())
.layer(1,新的OutputLayer.Builder(LossFunctions.LossFunction.NegativeLosGlikeliHood)
.weightInit(weightInit.XAVIER)
.激活(“softmax”)
.nIn(纽黑德)
.nOut(numOutputs)
.build())
.pretrain(false).backprop(true).build();
多层网络模型=新的多层网络(conf);
model.init();
setListeners(新的ScoreIterationListener(10));
对于(int i=0;i
代码应该可以正常工作。这是一本关于dl4j的著名教程的副本
但是,代码不会编译。我在model.SetListeners行中得到一个错误,“类型MultiLayerNetwork中的方法SetListeners(Collection)不适用于参数(ScoreIterationListener)”
当我将其更改为“model.setListeners((Collection)new ScoreIterationListener(10));”时,编译错误消失了,但我在线程“main”java.lang.ClassCastException中得到了一个运行时错误“Exception:org.deeplearning4j.optimize.listeners.ScoreIterationListener无法转换为java.util.Collection
位于MLPClassifierLinear.main(MLPClassifierLinear.java:71)”
发生什么事了?任何有dl4j经验的人都可以帮助我解决这个问题?我将从我们的示例开始,确保您使用的是最新版本(撰写本文时为0.7.2): MultiLayerNetwork.setListeners在IterationListener上具有变量args:
我感觉真正的根本原因可能是其他原因?可能是其他原因。我已经对它进行了修补,现在得到了java.lang.ClassNotFoundException:org.bytedeco.javacpp.openblas异常。我想我的dl4j安装没有什么问题,而是有其他问题。CSVExample不工作。还说“java.lang.ClassNotFoundException:org.bytedeco.javacpp.openblas”,你能提出一个问题吗?我显然遗漏了一些东西:我们没有在gitter频道看到这一点。这个例子很好用。