• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Java BasicMLDataSet类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Java中org.encog.ml.data.basic.BasicMLDataSet的典型用法代码示例。如果您正苦于以下问题:Java BasicMLDataSet类的具体用法?Java BasicMLDataSet怎么用?Java BasicMLDataSet使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



BasicMLDataSet类属于org.encog.ml.data.basic包,在下文中一共展示了BasicMLDataSet类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。

示例1: trainAndStore

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
@Test
public void trainAndStore() {
    BasicMLDataSet dataSet = getData();

    // Create network
    BasicNetwork network = getNetwork();

    // Train
    System.out.println("Training network...");
    Train train = new ResilientPropagation(network, dataSet);
    for (int i = 0; i < TRAIN_ITERATIONS; i++) {
        train.iteration();
    }
    System.out.println("Training finished, error: " + train.getError());

    // Save to file
    System.out.println("Saving to file...");
    saveToFile(network);
    System.out.println("Done");
}
 
开发者ID:Ignotus,项目名称:torcsnet,代码行数:21,代码来源:EncogMLPTrainingTest.java


示例2: loadTrainingData

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
private BasicMLDataSet loadTrainingData() throws Exception {
    TrainingData data = TrainingData.readData(Configuration.CSV_DIRECTORY, INPUTS, OUTPUTS, true);
    if (data == null) {
        throw new Exception("No data read");
    }
    /* Normalize and train on the data */
    Normalization normData = Normalization.createNormalization(data.input, data.target);
    System.out.println("Norm min target: " + normData.targetMin);
    System.out.println("Norm max target: " + normData.targetMax);

    Normalization norm = EvolvedController.createDefaultNormalization();
    norm.normalizeInput(data.input, 0, 1);
    norm.normalizeTarget(data.target, 0, 1);

    return new BasicMLDataSet(data.input.getData(), data.target.getData());
}
 
开发者ID:Ignotus,项目名称:torcsnet,代码行数:17,代码来源:EvolutionaryDriverAlgorithm.java


示例3: getData

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
private BasicMLDataSet getData() {
    TrainingData data = TrainingData.readData(Configuration.CSV_DIRECTORY, INPUTS, OUTPUTS, true);
    if (data == null) {
        System.out.println("No data read!");
        return null;
    }

    /* Prepare data */
    Normalization normData = Normalization.createNormalization(data.input, data.target);
    System.out.println("Norm min target: " + normData.targetMin);
    System.out.println("Norm max target: " + normData.targetMax);
    Normalization norm = EvolvedController.createDefaultNormalization();
    norm.normalizeInput(data.input, 0, 1);
    norm.normalizeTarget(data.target, 0, 1);
    return new BasicMLDataSet(data.input.getData(), data.target.getData());
}
 
开发者ID:Ignotus,项目名称:torcsnet,代码行数:17,代码来源:EncogMLPTrainingTest.java


示例4: train

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
@Override /** {@inheritDoc} */
public String train(final double[][] inputs, final double[][] expected, double learningRate, double maxError,
    long maxIterations) {

  this.learningRate = learningRate;
  trainingSet = new BasicMLDataSet(inputs, expected);
  train = new Backpropagation(network, trainingSet, this.learningRate, 0.3);
  train.fixFlatSpot(false);
  int epoch = 0;
  do {
    iterations.add(epoch);
    train.iteration();
    errors.add(train.getError());
    if (epoch++ > maxIterations) break;
  } while (train.getError() > maxError/100);
  return epoch + " " + train.getError();
}
 
开发者ID:Saka7,项目名称:Layer-Classifier,代码行数:18,代码来源:BackPropagationNeuralNet.java


示例5: main

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
public static void main(final String args[]) {

        MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
        NEATPopulation pop = new NEATPopulation(2,1,1000);
        pop.setInitialConnectionDensity(1.0);// not required, but speeds training
        pop.reset();

        CalculateScore score = new TrainingSetScore(trainingSet);
        // train the neural network

        final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);

        do {
            train.iteration();
            System.out.println("Epoch #" + train.getIteration() + " Error:" + train.getError()+ ", Species:" + pop.getSpecies().size());
        } while(train.getError() > 0.01);

        NEATNetwork network = (NEATNetwork)train.getCODEC().decode(train.getBestGenome());

        // test the neural network
        System.out.println("Neural Network Results:");
        EncogUtility.evaluate(network, trainingSet);

        Encog.getInstance().shutdown();
    }
 
开发者ID:jeffheaton,项目名称:aifh,代码行数:26,代码来源:NEATXORExample.java


示例6: main

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
/**
 * The main method.
 * @param args No arguments are used.
 */
public static void main(final String args[]) {

    // create a neural network, without using a factory
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null,true,2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
    network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
    network.getStructure().finalizeStructure();
    network.reset();

    // create training data
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final ResilientPropagation train = new ResilientPropagation(network, trainingSet);

    int epoch = 1;

    do {
        train.iteration();
        System.out.println("Epoch #" + epoch + " Error:" + train.getError());
        epoch++;
    } while(train.getError() > 0.01);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for(MLDataPair pair: trainingSet ) {
        final MLData output = network.compute(pair.getInput());
        System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
                + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
    }

    Encog.getInstance().shutdown();
}
 
开发者ID:neo4j-contrib,项目名称:neo4j-ml-procedures,代码行数:40,代码来源:XORHelloWorld.java


示例7: solve

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
@Override /** {@inheritDoc} */
public double[] solve(final double[][] inputs) {
  double[] results = new double[inputs.length];
  trainingSet = new BasicMLDataSet(inputs, null);
  int i = 0;
  for (MLDataPair pair : trainingSet) {
    final MLData output = network.compute(pair.getInput());
    results[i++] = output.getData(0);
  }
  Encog.getInstance().shutdown();
  return results;
}
 
开发者ID:Saka7,项目名称:Layer-Classifier,代码行数:13,代码来源:BackPropagationNeuralNet.java


示例8: solve

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
/** {@inheritDoc} */
public double[] solve(final double[][] inputs) {
  double[] results = new double[inputs.length];
  trainingSet = new BasicMLDataSet(inputs, null);
  int i = 0;
  for (MLDataPair pair : trainingSet) {
    final MLData output = network.compute(pair.getInput());
    results[i++] = output.getData(0);
  }
  Encog.getInstance().shutdown();
  return results;
}
 
开发者ID:Saka7,项目名称:Layer-Classifier,代码行数:13,代码来源:ResilientPropagationNeuralNet.java


示例9: MemoryDiskMLDataSet

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
/**
 * Constructor with {@link #fileName}, {@link #inputCount} and {@link #outputCount}
 */
public MemoryDiskMLDataSet(String fileName, int inputCount, int outputCount) {
    this.memoryDataSet = new BasicMLDataSet();
    this.inputCount = inputCount;
    this.outputCount = outputCount;
    this.fileName = fileName;
}
 
开发者ID:ShifuML,项目名称:guagua,代码行数:10,代码来源:MemoryDiskMLDataSet.java


示例10: main

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
/**
 * The main method.
 * @param args No arguments are used.
 */
public static void main(final String args[]) {

	// create a neural network, without using a factory
	BasicNetwork network = new BasicNetwork();
	network.addLayer(new BasicLayer(null,true,2));
	network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
	network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
	network.getStructure().finalizeStructure();
	network.reset();

	// create training data
	MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

	// train the neural network
	final ResilientPropagation train = new ResilientPropagation(network, trainingSet);

	int epoch = 1;

	do {
		train.iteration();
		System.out.println("Epoch #" + epoch + " Error:" + train.getError());
		epoch++;
	} while(train.getError() > 0.01);
	train.finishTraining();

	// test the neural network
	System.out.println("Neural Network Results:");
	for(MLDataPair pair: trainingSet ) {
		final MLData output = network.compute(pair.getInput());
		System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
				+ ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
	}

	Encog.getInstance().shutdown();
}
 
开发者ID:encog,项目名称:encog-sample-java,代码行数:40,代码来源:HelloWorld.java


示例11: transformData

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
private void transformData() {
    intermediateDataset = new BasicMLDataSet();
    for(int i = 0 ; i < this.dataset.getRecordCount(); i ++) {
        MLData input  = hiddenNet.compute(dataset.get(i).getInput());
        intermediateDataset.add(input, input);
    }
}
 
开发者ID:m4rkl1u,项目名称:AutoEncoder,代码行数:8,代码来源:AutoEncoder.java


示例12: main

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
/**
     * The main method.
     * @param args No arguments are used.
     */
    public static void main(final String args[]) {

//        // create a neural network, without using a factory
//        BasicNetwork network = new BasicNetwork();
//        network.addLayer(new BasicLayer(null,true,2));
//        network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
//        network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
//        network.getStructure().finalizeStructure();
//        network.reset();
//
//        // create training data
//        MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
//
//        // train the neural network
//        final ResilientPropagation train = new ResilientPropagation(network, trainingSet);
//
//        int epoch = 1;
//
//        do {
//            train.iteration();
//            System.out.println("Epoch #" + epoch + " Error:" + train.getError());
//            epoch++;
//        } while(train.getError() > 0.01);
//        train.finishTraining();
//
//        // test the neural network
//        System.out.println("Neural Network Results:");
//        for(MLDataPair pair: trainingSet ) {
//            final MLData output = network.compute(pair.getInput());
//            System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
//                    + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
//        }
//
//        Encog.getInstance().shutdown();

        MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
        NEATPopulation pop = new NEATPopulation(2,1,1000);
        pop.setInitialConnectionDensity(1.0);// not required, but speeds training
        pop.reset();

        CalculateScore score = new TrainingSetScore(trainingSet);
        // train the neural network

        final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop, score);

        do {
            train.iteration();
            System.out.println("Epoch #" + train.getIteration() + " Error:" + train.getError()+ ", Species:" + pop.getSpecies().size());
        } while(train.getError() > 0.01);

        NEATNetwork network = (NEATNetwork)train.getCODEC().decode(train.getBestGenome());

        // test the neural network
        System.out.println("Neural Network Results:");
        EncogUtility.evaluate(network, trainingSet);

        Encog.getInstance().shutdown();

    }
 
开发者ID:robrtj,项目名称:NeuralNetworkImageCompression,代码行数:64,代码来源:XorSample.java


示例13: train

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
public void train(final ArrayList<DataPoint> dataHistory) {
	if (isTraining()) {
		throw new IllegalStateException();
	}

	setTrainerThread(new Thread() {
		public void run() {
			// Clean and normalize the data history
			ArrayList<DataPoint> cleanedDataHistory = cleanDataHistory(dataHistory);
			ArrayList<DataPoint> normalizedDataHistory = normalizeDataHistory(cleanedDataHistory);

			// Create a new neural network and data set
			BasicNetwork neuralNetwork = EncogUtility.simpleFeedForward(2, getHiddenLayerNeurons(0),
					getHiddenLayerNeurons(1), 5, true);
			MLDataSet dataSet = new BasicMLDataSet();

			// Add all points of the data history to the data set
			for (DataPoint dataPoint : normalizedDataHistory) {
				MLData input = new BasicMLData(2);
				input.setData(0, dataPoint.getX());
				input.setData(1, dataPoint.getY());

				// If getButton() is 0, the output will be 0, 0, 0, 0
				// If getButton() is 2, the output will be 0, 1, 0, 0
				// If getButton() is 4, the output will be 0, 0, 0, 1
				MLData ideal = new BasicMLData(5);
				for (int i = 0; i <= 4; i++) {
					ideal.setData(i, (dataPoint.getButton() == i) ? 1 : 0);
				}

				MLDataPair pair = new BasicMLDataPair(input, ideal);
				dataSet.add(pair);
			}

			// Create a training method
			MLTrain trainingMethod = new ResilientPropagation((ContainsFlat) neuralNetwork, dataSet);
			long startTime = System.currentTimeMillis();
			int timeLeft = getMaxTrainingTime();
			int iteration = 0;

			// Train the network using multiple iterations on the training method
			do {
				trainingMethod.iteration();
				timeLeft = (int) ((startTime + getMaxTrainingTime()) - System.currentTimeMillis());
				iteration++;

				sendNeuralNetworkIteration(iteration, trainingMethod.getError(), timeLeft);
			} while (trainingMethod.getError() > getMaxTrainingError() && timeLeft > 0
					&& !trainingMethod.isTrainingDone());
			trainingMethod.finishTraining();

			// Return the neural network to all listeners
			sendNeuralNetworkTrainerResult(neuralNetwork);
		}
	});
	getTrainerThread().start();
}
 
开发者ID:bsmulders,项目名称:StepManiaSolver,代码行数:58,代码来源:NeuralNetworkTrainer.java


示例14: AutoEncoder

import org.encog.ml.data.basic.BasicMLDataSet; //导入依赖的package包/类
public AutoEncoder(){
    params  = new ArrayList<MLParams>();
    dataset = new BasicMLDataSet();
}
 
开发者ID:m4rkl1u,项目名称:AutoEncoder,代码行数:5,代码来源:AutoEncoder.java



注:本文中的org.encog.ml.data.basic.BasicMLDataSet类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Java StubBase类代码示例发布时间:2022-05-21
下一篇:
Java Filler类代码示例发布时间:2022-05-21
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap