本文整理汇总了C#中Encog.Neural.Networks.Training.Propagation.Resilient.ResilientPropagation类的典型用法代码示例。如果您正苦于以下问题:C# ResilientPropagation类的具体用法?C# ResilientPropagation怎么用?C# ResilientPropagation使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ResilientPropagation类属于Encog.Neural.Networks.Training.Propagation.Resilient命名空间,在下文中一共展示了ResilientPropagation类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: Main
static void Main(string[] args)
{
//create a neural network withtout using a factory
var network = new BasicNetwork();
network.AddLayer(new BasicLayer(null, true, 2));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
network.Structure.FinalizeStructure();
network.Reset();
IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
IMLTrain train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do
{
train.Iteration();
Console.WriteLine($"Epoch #{epoch} Error: {train.Error}");
epoch++;
} while (train.Error > 0.01);
train.FinishTraining();
Console.WriteLine("Neural Network Results:");
foreach (IMLDataPair iPair in trainingSet)
{
IMLData output = network.Compute(iPair.Input);
Console.WriteLine($"{iPair.Input[0]}, {iPair.Input[0]}, actual={output[0]}, ideal={iPair.Ideal[0]}");
}
EncogFramework.Instance.Shutdown();
Console.ReadKey();
}
开发者ID:zerazobz,项目名称:TestEncog,代码行数:34,代码来源:Program.cs
示例2: Main
static void Main(string[] args)
{
var network = new BasicNetwork();
network.AddLayer(new BasicLayer(null, true, 2));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
network.Structure.FinalizeStructure();
network.Reset();
var trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
var train = new ResilientPropagation(network, trainingSet);
var epoch = 1;
do
{
train.Iteration();
} while (train.Error > 0.01);
train.FinishTraining();
foreach (var pair in trainingSet)
{
var output = network.Compute(pair.Input);
Console.WriteLine(pair.Input[0] + @", " + pair.Input[1] + @" , actual=" + output[0] + @", ideal=" + pair.Ideal[0]);
}
EncogFramework.Instance.Shutdown();
Console.ReadLine();
}
开发者ID:akucherk,项目名称:HelloSystem,代码行数:29,代码来源:Program.cs
示例3: Preprocessing_Completed
private void Preprocessing_Completed(object sender, RunWorkerCompletedEventArgs e)
{
worker.ReportProgress(0, "Creating Network...");
BasicNetwork Network = new BasicNetwork();
Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.InputSize));
Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 50));
Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.IdealSize));
Network.Structure.FinalizeStructure();
Network.Reset();
DataContainer.NeuralNetwork.Network = Network;
ResilientPropagation training = new ResilientPropagation(DataContainer.NeuralNetwork.Network, DataContainer.NeuralNetwork.Data);
worker.ReportProgress(0, "Running Training: Epoch 0");
for(int i = 0; i < 200; i++)
{
training.Iteration();
worker.ReportProgress(0, "Running Training: Epoch " + (i+1).ToString() + " Current Training Error : " + training.Error.ToString());
if(worker.CancellationPending == true)
{
completed = true;
return;
}
}
completed = true;
}
开发者ID:ebosscha,项目名称:RailML-Neural,代码行数:26,代码来源:PerLineClassification.cs
示例4: TestRPROPContPersistEG
public void TestRPROPContPersistEG()
{
IMLDataSet trainingSet = XOR.CreateXORDataSet();
BasicNetwork net1 = XOR.CreateUnTrainedXOR();
BasicNetwork net2 = XOR.CreateUnTrainedXOR();
ResilientPropagation rprop1 = new ResilientPropagation(net1, trainingSet);
ResilientPropagation rprop2 = new ResilientPropagation(net2, trainingSet);
rprop1.Iteration();
rprop1.Iteration();
rprop2.Iteration();
rprop2.Iteration();
TrainingContinuation cont = rprop2.Pause();
EncogDirectoryPersistence.SaveObject(EG_FILENAME, cont);
TrainingContinuation cont2 = (TrainingContinuation)EncogDirectoryPersistence.LoadObject(EG_FILENAME);
ResilientPropagation rprop3 = new ResilientPropagation(net2, trainingSet);
rprop3.Resume(cont2);
rprop1.Iteration();
rprop3.Iteration();
for (int i = 0; i < net1.Flat.Weights.Length; i++)
{
Assert.AreEqual(net1.Flat.Weights[i], net2.Flat.Weights[i], 0.0001);
}
}
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:32,代码来源:TestPersistTrainingContinuation.cs
示例5: TestRPROPCont
public void TestRPROPCont()
{
IMLDataSet trainingSet = XOR.CreateXORDataSet();
BasicNetwork net1 = XOR.CreateUnTrainedXOR();
BasicNetwork net2 = XOR.CreateUnTrainedXOR();
ResilientPropagation rprop1 = new ResilientPropagation(net1, trainingSet);
ResilientPropagation rprop2 = new ResilientPropagation(net2, trainingSet);
rprop1.Iteration();
rprop1.Iteration();
rprop2.Iteration();
rprop2.Iteration();
TrainingContinuation cont = rprop2.Pause();
ResilientPropagation rprop3 = new ResilientPropagation(net2, trainingSet);
rprop3.Resume(cont);
rprop1.Iteration();
rprop3.Iteration();
for (int i = 0; i < net1.Flat.Weights.Length; i++)
{
Assert.AreEqual(net1.Flat.Weights[i], net2.Flat.Weights[i], 0.0001);
}
}
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:28,代码来源:TestPersistTrainingContinuation.cs
示例6: TestRPROP
public void TestRPROP()
{
IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);
BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained();
IMLTrain rprop = new ResilientPropagation(network, trainingData);
NetworkUtil.TestTraining(rprop, 0.03);
}
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:8,代码来源:TestTraining.cs
示例7: TrainNetwork
public ResilientPropagation TrainNetwork(BasicNetwork network, BasicMLDataSet trainingData)
{
var trainedNetwork = new ResilientPropagation(network, trainingData);
var epoch = 0;
do
{
trainedNetwork.Iteration();
epoch++;
Console.WriteLine("Epoch:{0}, Error{1}", epoch, trainedNetwork.Error);
} while (trainedNetwork.Error > 0.01);
return trainedNetwork;
}
开发者ID:MacarioTala,项目名称:Learning-Machine-Learning,代码行数:13,代码来源:BasicNeuralNetFunctions.cs
示例8: TestRPROPFolded
public void TestRPROPFolded()
{
IMLDataSet trainingData = XOR.CreateNoisyXORDataSet(10);
BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained();
var folded = new FoldedDataSet(trainingData);
IMLTrain train = new ResilientPropagation(network, folded);
var trainFolded = new CrossValidationKFold(train, 4);
EncogUtility.TrainToError(trainFolded, 0.2);
XOR.VerifyXOR((IMLRegression) trainFolded.Method, 0.2);
}
开发者ID:CreativelyMe,项目名称:encog-dotnet-core,代码行数:14,代码来源:TestFolded.cs
示例9: EvaluateTrain
/// <summary>
/// Evaluate how long it takes to calculate the error for the network. This
/// causes each of the training pairs to be run through the network. The
/// network is evaluated 10 times and the lowest time is reported.
/// </summary>
/// <param name="network">The training data to use.</param>
/// <param name="training">The number of seconds that it took.</param>
/// <returns></returns>
public static int EvaluateTrain(BasicNetwork network, IMLDataSet training)
{
// train the neural network
IMLTrain train = new ResilientPropagation(network, training);
int iterations = 0;
var watch = new Stopwatch();
watch.Start();
while (watch.ElapsedMilliseconds < (10*Milis))
{
iterations++;
train.Iteration();
}
return iterations;
}
开发者ID:neismit,项目名称:emds,代码行数:24,代码来源:Evaluate.cs
示例10: EvaluateTrain
public static int EvaluateTrain(BasicNetwork network, IMLDataSet training)
{
int num;
IMLTrain train = new ResilientPropagation(network, training);
if (0 == 0)
{
num = 0;
}
Stopwatch stopwatch = new Stopwatch();
stopwatch.Start();
while (stopwatch.ElapsedMilliseconds < 0x2710L)
{
num++;
train.Iteration();
}
return num;
}
开发者ID:neismit,项目名称:emds,代码行数:17,代码来源:Evaluate.cs
示例11: EvaluateMPROP
public double EvaluateMPROP(BasicNetwork network, IMLDataSet data)
{
var train = new ResilientPropagation(network, data);
long start = DateTime.Now.Ticks;
Console.WriteLine(@"Training 20 Iterations with MPROP");
for (int i = 1; i <= 20; i++)
{
train.Iteration();
Console.WriteLine("Iteration #" + i + " Error:" + train.Error);
}
//train.finishTraining();
long stop = DateTime.Now.Ticks;
double diff = new TimeSpan(stop - start).Seconds;
Console.WriteLine("MPROP Result:" + diff + " seconds.");
Console.WriteLine("Final MPROP error: " + network.CalculateError(data));
return diff;
}
开发者ID:Romiko,项目名称:encog-dotnet-core,代码行数:17,代码来源:MultiThreadBenchmark.cs
示例12: Train
public int Train(DataSet dataSet)
{
Network = new BasicNetwork();
Network.AddLayer(new BasicLayer(null, true, 8 * 21));
var first = ((8 * 21 + 4) * FirstLayerParameter);
Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, (int)first));
var second = ((8 * 21 + 4) * SecondLayerParameter);
Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, (int)second));
Network.AddLayer(new BasicLayer(null, false, 1));
// Network.AddLayer(new );
Network.Structure.FinalizeStructure();
Network.Reset();
//IMLData x = new BasicNeuralData();
var set = new double[dataSet.Signatures.Count + dataSet.Forgeries.Count][];
var ideal = new double[dataSet.Signatures.Count + dataSet.Forgeries.Count][];
for (int i = 0; i < dataSet.Signatures.Count; i++)
{
set[i] = dataSet.Signatures[i].Data.Cast<double>().ToArray();
ideal[i] = new double[] {1};
}
for (int i = dataSet.Signatures.Count; i < dataSet.Signatures.Count + dataSet.Forgeries.Count; i++)
{
set[i] = dataSet.Forgeries[i- dataSet.Signatures.Count].Data.Cast<double>().ToArray();
ideal[i] = new double[] { 0 };
}
IMLDataSet trainingSet = new BasicMLDataSet(set, ideal);
IMLTrain train = new ResilientPropagation(Network, trainingSet);
int epoch = 1;
var errors = new List<double>();
do
{
train.Iteration();
// Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
epoch++;
errors.Add(train.Error);
} while ( epoch < 10000);
train.FinishTraining();
return 1;
}
开发者ID:PawelReszka,项目名称:SignatureRecognition,代码行数:45,代码来源:EncogNeuralNetwork.cs
示例13: Main
static void Main(string[] args)
{
double[][] XOR_Input =
{
new[] {0.0,0.0},
new[] {1.0,0.0},
new[] {0.0,1.0},
new[] {1.0,1.0}
};
double[][] XOR_Ideal =
{
new[] {0.0},
new[] {1.0},
new[] {1.0},
new[] {0.0}
};
var trainingSet = new BasicMLDataSet(XOR_Input, XOR_Ideal);
BasicNetwork network = CreateNetwork();
var train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do
{
train.Iteration();
epoch++;
Console.WriteLine("Iteration No :{0}, Error: {1}", epoch, train.Error);
} while (train.Error > 0.001);
foreach (var item in trainingSet)
{
var output = network.Compute(item.Input);
Console.WriteLine("Input : {0}, {1} Ideal : {2} Actual : {3}", item.Input[0], item.Input[1], item.Ideal[0], output[0]);
}
Console.WriteLine("press any key to exit...");
Console.ReadLine();
}
开发者ID:nmukh,项目名称:neural-networks,代码行数:44,代码来源:XOR-Demo.cs
示例14: EvaluateTrain
/// <summary>
/// Evaluate how long it takes to calculate the error for the network. This
/// causes each of the training pairs to be run through the network. The
/// network is evaluated 10 times and the lowest time is reported.
/// </summary>
/// <param name="network">The training data to use.</param>
/// <param name="training">The number of seconds that it took.</param>
/// <returns></returns>
public static int EvaluateTrain(BasicNetwork network, IMLDataSet training)
{
// train the neural network
IMLTrain train = new ResilientPropagation(network, training);
int iterations = 0;
const int milis10 = Milis * 10;
var watch = new Stopwatch();
watch.Start();
while (true)
{
iterations++;
train.Iteration();
if((iterations & 0xff) == 0 && watch.ElapsedMilliseconds < milis10) break;
}
return iterations;
}
开发者ID:kedrzu,项目名称:encog-dotnet-core,代码行数:27,代码来源:Evaluate.cs
示例15: Evaluate
public static int Evaluate(BasicNetwork network, IMLDataSet training)
{
ResilientPropagation rprop = new ResilientPropagation(network, training);
int iterations = 0;
for (; ; )
{
rprop.Iteration();
iterations++;
if (rprop.Error < TARGET_ERROR)
{
return iterations;
}
if (iterations > 1000)
{
iterations = 0;
return -1;
}
}
}
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:21,代码来源:ElliottBenchmark.cs
示例16: TestRPROPConsistency
public void TestRPROPConsistency()
{
IMLDataSet training = EncoderTrainingFactory.generateTraining(4, false);
var network = EncogUtility.SimpleFeedForward(4, 2, 0, 4, true);
(new ConsistentRandomizer(-1, 1, 50)).Randomize(network);
var rprop = new ResilientPropagation(network, training);
for (var i = 0; i < 5; i++)
{
rprop.Iteration();
}
Assert.IsTrue(CompareArray.Compare(ExpectedWeights1, network.Flat.Weights,0.00001));
for (var i = 0; i < 5; i++)
{
rprop.Iteration();
}
Assert.IsTrue(CompareArray.Compare(ExpectedWeights2, network.Flat.Weights, 0.00001));
var e = network.CalculateError(training);
Assert.AreEqual(0.0767386807494191, e, 0.00001);
}
开发者ID:fxmozart,项目名称:encog-dotnet-core,代码行数:21,代码来源:TestConsistency.cs
示例17: Run
public double Run(List<int> topoplogy, int iterations)
{
_Network = new BasicNetwork();
_Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, _Features));
foreach (int layer in topoplogy)
{
_Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, layer));
}
_Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
_Network.Structure.FinalizeStructure();
_Network.Reset();
//Encog.Neural.Networks.Training.Propagation.Gradient.
ITrain train = new ResilientPropagation(_Network, _TrainingSet);
for (int i = 0; i < iterations; i++)
{
train.Iteration();
}
return train.Error;
}
开发者ID:KBrizzle,项目名称:TimeSeries,代码行数:21,代码来源:NueralNetwork.cs
示例18: Main
private static void Main(string[] args)
{
// create a neural network, without using a factory
var network = new BasicNetwork();
network.AddLayer(new BasicLayer(null, true, 2));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
network.Structure.FinalizeStructure();
network.Reset();
// create training data
IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
// train the neural network
IMLTrain train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do
{
train.Iteration();
Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
epoch++;
} while (train.Error > 0.01);
train.FinishTraining();
// test the neural network
Console.WriteLine(@"Neural Network Results:");
foreach (IMLDataPair pair in trainingSet)
{
IMLData output = network.Compute(pair.Input);
Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
+ @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
}
EncogFramework.Instance.Shutdown();
}
开发者ID:vibhatha,项目名称:encog-sample-csharp,代码行数:38,代码来源:Program.cs
示例19: XORTest
private static void XORTest()
{
double[][] XOR_Input =
{
new[] {0.0, 0.0},
new[] {1.0, 0.0},
new[] {0.0, 1.0},
new[] {1.0, 1.0}
};
double[][] XOR_Ideal =
{
new[] {0.0},
new[] {1.0},
new[] {1.0},
new[] {0.0}
};
var trainingSet = new BasicMLDataSet(XOR_Input, XOR_Ideal);
var network = CreateNetwork();
var train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do
{
train.Iteration();
epoch++;
Console.WriteLine($"Iteration No: {epoch}, Error: {train.Error}");
} while (train.Error > 0.001);
foreach (var item in trainingSet)
{
var output = network.Compute(item.Input);
Console.WriteLine($"Input : {item.Input[0]}, {item.Input[1]}, Ideal: {item.Ideal[0]}, Actual : {output[0]}");
}
}
开发者ID:podgito,项目名称:MachineLearning,代码行数:38,代码来源:Program.cs
示例20: Perform
public void Perform(int thread)
{
var stopwatch = new Stopwatch();
stopwatch.Start();
var network = new BasicNetwork();
network.AddLayer(new BasicLayer(INPUT_COUNT));
network.AddLayer(new BasicLayer(HIDDEN_COUNT));
network.AddLayer(new BasicLayer(OUTPUT_COUNT));
network.Structure.FinalizeStructure();
network.Reset();
IMLDataSet training = RandomTrainingFactory.Generate(1000, 50000,
INPUT_COUNT, OUTPUT_COUNT, -1, 1);
var rprop = new ResilientPropagation(network, training);
rprop.ThreadCount = thread;
for (int i = 0; i < 5; i++)
{
rprop.Iteration();
}
stopwatch.Stop();
Console.WriteLine("Result with " + thread + " was " + stopwatch.ElapsedMilliseconds + "ms");
}
开发者ID:JDFagan,项目名称:encog-dotnet-core,代码行数:23,代码来源:ThreadCount.cs
注:本文中的Encog.Neural.Networks.Training.Propagation.Resilient.ResilientPropagation类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论