多线程java和神经网络:同时读取2个csv文件

多线程java和神经网络:同时读取2个csv文件,java,multithreading,encog,Java,Multithreading,Encog,我正在从事一个神经网络项目,该项目需要使用不同的训练集运行两个神经网络程序实例,特别是同时运行两个实例。为此,我在java plus encog库中使用多线程来实现ANN。所以我创建了两个线程,每个线程都包含ANN实现,但是有两个不同的CSV文件。我有一部分结果是有效的,它返回CSV文件中每个组件的最小值和最大值,但问题是ANN的输出只计算一个文件。以下是我的实现: ReadfileMT.java public class ReadFileMT implements Runnable {

我正在从事一个神经网络项目,该项目需要使用不同的训练集运行两个神经网络程序实例,特别是同时运行两个实例。为此,我在java plus encog库中使用多线程来实现ANN。所以我创建了两个线程,每个线程都包含ANN实现,但是有两个不同的CSV文件。我有一部分结果是有效的,它返回CSV文件中每个组件的最小值和最大值,但问题是ANN的输出只计算一个文件。以下是我的实现:

ReadfileMT.java

    public class ReadFileMT implements Runnable {


    public static void dumpFieldInfo(EncogAnalyst analyst) {

     System.out.println("Fields found in file:");
    for (AnalystField field : analyst.getScript().getNormalize()
            .getNormalizedFields())
          {

        StringBuilder line = new StringBuilder();
        line.append(field.getName());
        line.append(",action=");
        line.append(field.getAction());
        line.append(",min=");
        line.append(field.getActualLow());
        line.append(",max=");
        line.append(field.getActualHigh());
        System.out.println(line.toString());
    }
}

     public void run() {
     File sourceFile = new File("d:\\data\\F21.csv");
                    File targetFile = new File("d:\\data\\F2_norm.csv"); 
        EncogAnalyst analyst = new EncogAnalyst();
                AnalystWizard wizard = new AnalystWizard(analyst);
                    AnalystField targetField = wizard.getTargetField();
                    wizard.setTargetField("Old_Resp");
        wizard.wizard(sourceFile, true, AnalystFileFormat.DECPNT_COMMA);
        dumpFieldInfo(analyst);
        final AnalystNormalizeCSV norm = new AnalystNormalizeCSV();
        norm.analyze(sourceFile, true, CSVFormat.ENGLISH, analyst);
                    norm.setProduceOutputHeaders(true);
                    norm.normalize(targetFile);
                   // Encog.getInstance().shutdown();
        //*****************************Read from the csv file**************************************************               



    final BasicNetwork network = EncogUtility.simpleFeedForward(4,  4, 0, 1,
                            false);

     network.addLayer(new BasicLayer(new     ActivationSigmoid(),false,4));
     network.addLayer(newBasicLayer(newActivationSigmoid(),false,4));
     network.addLayer(newBasicLayer(newActivationSigmoid(),false,1));
     network.getStructure().finalizeStructure();
     network.reset();
            //create training data
            final MLDataSet trainingSet = TrainingSetUtil.loadCSVTOMemory(
            CSVFormat.ENGLISH, "c:\\temp\\F2_norm.csv",false, 4, 1);

            // train the neural network
            System.out.println();
            System.out.println("Training Network");
     final   Backpropagationtrain=newBackpropagation
     (network,trainingSet,0.05,0.9);
            train.fixFlatSpot(false);
            int epoch = 1;

    do {
        train.iteration();
        System.out.println("Epoch #" + epoch + " Error:" +train.getError());
        epoch++;
    } while(train.getError() > 0.01);
    train.finishTraining();
           //final Train train=newResilientPropagation(network,trainingSet);
           /*int epoch = 1;
        do {
            train.iteration();
            System.out.println("Epoch #" + epoch + " Error:"
                    + train.getError() * 100 + "%");
            epoch++;
        } while (train.getError() > 0.015);*/

            /*int epoch = 1;

    do {
        train.iteration();
        System.out.println("Epoch #" + epoch + " Error:" +train.getError());
        epoch++;
    } while(train.getError() > 0.01);
    train.finishTraining();*/




            // test the neural network


            System.out.println("Neural Network Results:");
    for(MLDataPair pair: trainingSet ) {
        final MLData output = network.compute(pair.getInput());
         System.out.println(pair.getInput().getData(0)+",
         "+pair.getInput().getData(1)+","+ pair.getInput().getData(2)
         +","+   pair.getInput().getData(3) 
         + ", actual=" + output.getData(0) + ",ideal="
        +pair.getIdeal().getData(0));
    }

    Encog.getInstance().shutdown();

}
        }
ReadFileMT2.java

    public class ReadFileMT2 implements Runnable {


    public static void dumpFieldInfo(EncogAnalyst analyst) {

     System.out.println("Fields found in file:");
    for (AnalystField field : analyst.getScript().getNormalize()
            .getNormalizedFields())
          {

        StringBuilder line = new StringBuilder();
        line.append(field.getName());
        line.append(",action=");
        line.append(field.getAction());
        line.append(",min=");
        line.append(field.getActualLow());
        line.append(",max=");
        line.append(field.getActualHigh());
        System.out.println(line.toString());
     }
    }


     public void run() {
     File sourceFile = new File("d:\\data\\RespTime.csv");
     File targetFile =newFile("d:\\data\\RespTime_norm.csv"); 
        EncogAnalyst analyst = new EncogAnalyst();
                AnalystWizard wizard = new AnalystWizard(analyst);
                    AnalystField targetField = wizard.getTargetField();
                    wizard.setTargetField("Old_Resp");
        wizard.wizard(sourceFile, true, AnalystFileFormat.DECPNT_COMMA);
        dumpFieldInfo(analyst);
        final AnalystNormalizeCSV norm = new AnalystNormalizeCSV();
        norm.analyze(sourceFile, true, CSVFormat.ENGLISH, analyst);
                    norm.setProduceOutputHeaders(true);
                    norm.normalize(targetFile);
                   // Encog.getInstance().shutdown();
        //******Read from the csv file*************************              



    final BasicNetwork network = EncogUtility.simpleFeedForward(4, 4, 0, 1,
                            false);

            network.addLayer(newBasicLayer(newActivationSigmoid(),false,4));
            network.addLayer(newBasicLayer(newActivationSigmoid(),false,4));
            network.addLayer(newBasicLayer(newActivationSigmoid(),false,1));
            network.getStructure().finalizeStructure();
            network.reset();
            //create training data
            final MLDataSet trainingSet = TrainingSetUtil.loadCSVTOMemory(
            CSVFormat.ENGLISH, "c:\\temp\\RespTime_norm.csv",false, 4, 1);

            // train the neural network
            System.out.println();
            System.out.println("Training Network");

            final Backpropagation train = new Backpropagation
            (network,trainingSet,0.05, 0.9);
            train.fixFlatSpot(false);
            int epoch = 1;

    do {
        train.iteration();
        System.out.println("Epoch #" + epoch + " Error:" +train.getError());
        epoch++;
     } while(train.getError() > 0.01);
     train.finishTraining();

           /*int epoch = 1;
        do {
            train.iteration();
            System.out.println("Epoch #" + epoch + " Error:"
                    + train.getError() * 100 + "%");
            epoch++;
        } while (train.getError() > 0.015);*/

            /*int epoch = 1;

    do {
        train.iteration();
        System.out.println("Epoch #" + epoch + " Error:" +train.getError());
        epoch++;
    } while(train.getError() > 0.01);
    train.finishTraining();*/




            // test the neural network


            System.out.println("Neural Network Results:");
    for(MLDataPair pair: trainingSet ) {
        final MLData output = network.compute(pair.getInput());
        System.out.println(pair.getInput().getData(0) + ","
        +pair.getInput().getData(1)+",
        "+ pair.getInput().getData(2)+
        ","+ pair.getInput().getData(3) 
        + ", actual=" +output.getData(0)+",ideal="+
        pair.getIdeal().getData(0));
    }

    Encog.getInstance().shutdown();

      }
        }
main.java

    public static void main(String[] args) {

       ReadFileMT obj1 = new ReadFileMT();
       ReadFileMT2 obj2 = new ReadFileMT2();
       Thread t1 = new Thread(obj1);
       Thread t2 = new Thread(obj2);
        t1.start();
        t2.start();

             }
      }         

我不明白怎么了。备注:我是parralel编程新手

请添加您想要实现的具体目标:您想要将结果拆分为不同的文件还是将其写入一个文件中(按特定顺序)我使用神经网络预测时间序列,所以我有两个CSV,数据不同我希望ANN给我预测parralel中的两个CSV文件没有拆分文件它们被拆分,总结起来我希望parralel中运行的同一个ANN的两个实例,每个都使用CSV文件进行预测请告诉我我不能实现我想要的?有人帮我pleaze pleaze pleaze你是否尝试分别运行每个方法?每种方法都正确吗?请添加您想要实现的目标:是将结果拆分到不同的文件中,还是将结果写入一个文件中(按特定顺序)我使用神经网络预测时间序列,所以我有两个CSV,数据不同我希望ANN给我预测parralel中的两个CSV文件没有拆分文件它们被拆分,总结起来我希望parralel中运行的同一个ANN的两个实例,每个都使用CSV文件进行预测请告诉我我不能实现我想要的?有人帮我pleaze pleaze pleaze你是否尝试分别运行每个方法?每种方法都正确吗?