本文整理汇总了C#中Encog.Neural.Networks.BasicNetwork类的典型用法代码示例。如果您正苦于以下问题:C# BasicNetwork类的具体用法?C# BasicNetwork怎么用?C# BasicNetwork使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
BasicNetwork类属于Encog.Neural.Networks命名空间,在下文中一共展示了BasicNetwork类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: Init
/// <inheritdoc />
public override void Init(BasicNetwork theNetwork, IMLDataSet theTraining)
{
base.Init(theNetwork, theTraining);
int weightCount = theNetwork.Structure.Flat.Weights.Length;
_training = theTraining;
_network = theNetwork;
_hessianMatrix = new Matrix(weightCount, weightCount);
_hessian = _hessianMatrix.Data;
// create worker(s)
var determine = new DetermineWorkload(
ThreadCount, _training.Count);
_workers = new ChainRuleWorker[determine.ThreadCount];
int index = 0;
// handle CPU
foreach (IntRange r in determine.CalculateWorkers())
{
_workers[index++] = new ChainRuleWorker((FlatNetwork) _flat.Clone(),
_training.OpenAdditional(), r.Low,
r.High);
}
}
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:28,代码来源:HessianCR.cs
示例2: EvaluateNetworks
public static double EvaluateNetworks(BasicNetwork network, BasicMLDataSet set)
{
int count = 0;
int correct = 0;
foreach (IMLDataPair pair in set)
{
IMLData input = pair.Input;
IMLData actualData = pair.Ideal;
IMLData predictData = network.Compute(input);
double actual = actualData[0];
double predict = predictData[0];
double diff = Math.Abs(predict - actual);
Direction actualDirection = DetermineDirection(actual);
Direction predictDirection = DetermineDirection(predict);
if (actualDirection == predictDirection)
correct++;
count++;
Console.WriteLine(@"Number" + @"count" + @": actual=" + Format.FormatDouble(actual, 4) + @"(" + actualDirection + @")"
+ @",predict=" + Format.FormatDouble(predict, 4) + @"(" + predictDirection + @")" + @",diff=" + diff);
}
double percent = correct / (double)count;
Console.WriteLine(@"Direction correct:" + correct + @"/" + count);
Console.WriteLine(@"Directional Accuracy:"
+ Format.FormatPercent(percent));
return percent;
}
开发者ID:JDFagan,项目名称:encog-dotnet-core,代码行数:31,代码来源:CreateEval.cs
示例3: Main
static void Main(string[] args)
{
//create a neural network withtout using a factory
var network = new BasicNetwork();
network.AddLayer(new BasicLayer(null, true, 2));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
network.Structure.FinalizeStructure();
network.Reset();
IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
IMLTrain train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do
{
train.Iteration();
Console.WriteLine($"Epoch #{epoch} Error: {train.Error}");
epoch++;
} while (train.Error > 0.01);
train.FinishTraining();
Console.WriteLine("Neural Network Results:");
foreach (IMLDataPair iPair in trainingSet)
{
IMLData output = network.Compute(iPair.Input);
Console.WriteLine($"{iPair.Input[0]}, {iPair.Input[0]}, actual={output[0]}, ideal={iPair.Ideal[0]}");
}
EncogFramework.Instance.Shutdown();
Console.ReadKey();
}
开发者ID:zerazobz,项目名称:TestEncog,代码行数:34,代码来源:Program.cs
示例4: Run
public override void Run()
{
testNetwork = new BasicNetwork();
testNetwork.AddLayer(new BasicLayer(null, true, 2));
testNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 4));
testNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
testNetwork.Structure.FinalizeStructure();
testNetwork.Reset();
// create training data
IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
// train the neural network
IMLTrain train = new Backpropagation(testNetwork, trainingSet);
//IMLTrain train = new ResilientPropagation(testNetwork, trainingSet); //Encog manual says it is the best general one
int epoch = 1;
do
{
train.Iteration();
Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
epoch++;
} while (train.Error > 0.0001);
// test the neural network
Console.WriteLine(@"Neural Network Results:");
foreach (IMLDataPair pair in trainingSet)
{
IMLData output = testNetwork.Compute(pair.Input);
Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
+ @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
}
}
开发者ID:mgcarmueja,项目名称:MPTCE,代码行数:35,代码来源:EncogTestContainer.cs
示例5: evaluateNetwork
public static void evaluateNetwork(BasicNetwork network, IMLDataSet training)
{
double total = 0;
int seed = 0;
int completed = 0;
Stopwatch sw = new Stopwatch();
sw.Start();
while (completed < SAMPLE_SIZE)
{
new ConsistentRandomizer(-1, 1, seed).Randomize(network);
int iter = Evaluate(network, training);
if (iter == -1)
{
seed++;
}
else
{
total += iter;
seed++;
completed++;
}
}
sw.Stop();
Console.WriteLine(network.GetActivation(1).GetType().Name + ": time="
+ Format.FormatInteger((int)sw.ElapsedMilliseconds)
+ "ms, Avg Iterations: "
+ Format.FormatInteger((int)(total / SAMPLE_SIZE)));
}
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:34,代码来源:ElliottBenchmark.cs
示例6: MeasurePerformance
/// <summary>
/// Measure the performance of the network
/// </summary>
/// <param name = "network">Network to analyze</param>
/// <param name = "dataset">Dataset with input and ideal data</param>
/// <returns>Error % of correct bits, returned by the network.</returns>
public static double MeasurePerformance(BasicNetwork network, BasicNeuralDataSet dataset)
{
int correctBits = 0;
float threshold = 0.0f;
IActivationFunction activationFunction = network.GetActivation(network.LayerCount - 1); //get the activation function of the output layer
if (activationFunction is ActivationSigmoid)
{
threshold = 0.5f; /* > 0.5, range of sigmoid [0..1]*/
}
else if (activationFunction is ActivationTANH)
{
threshold = 0.0f; /*> 0, range of bipolar sigmoid is [-1..1]*/
}
else
throw new ArgumentException("Bad activation function");
int n = (int) dataset.Count;
Parallel.For(0, n, (i) =>
{
IMLData actualOutputs = network.Compute(dataset.Data[i].Input);
lock (LockObject)
{
for (int j = 0, k = actualOutputs.Count; j < k; j++)
if ((actualOutputs[j] > threshold && dataset.Data[i].Ideal[j] > threshold)
|| (actualOutputs[j] < threshold && dataset.Data[i].Ideal[j] < threshold))
correctBits++;
}
});
long totalOutputBitsCount = dataset.Count*dataset.Data[0].Ideal.Count;
return (double) correctBits/totalOutputBitsCount;
}
开发者ID:jorik041,项目名称:soundfingerprinting,代码行数:39,代码来源:NetworkPerformanceMeter.cs
示例7: Preprocessing_Completed
private void Preprocessing_Completed(object sender, RunWorkerCompletedEventArgs e)
{
worker.ReportProgress(0, "Creating Network...");
BasicNetwork Network = new BasicNetwork();
Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.InputSize));
Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 50));
Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.IdealSize));
Network.Structure.FinalizeStructure();
Network.Reset();
DataContainer.NeuralNetwork.Network = Network;
ResilientPropagation training = new ResilientPropagation(DataContainer.NeuralNetwork.Network, DataContainer.NeuralNetwork.Data);
worker.ReportProgress(0, "Running Training: Epoch 0");
for(int i = 0; i < 200; i++)
{
training.Iteration();
worker.ReportProgress(0, "Running Training: Epoch " + (i+1).ToString() + " Current Training Error : " + training.Error.ToString());
if(worker.CancellationPending == true)
{
completed = true;
return;
}
}
completed = true;
}
开发者ID:ebosscha,项目名称:RailML-Neural,代码行数:26,代码来源:PerLineClassification.cs
示例8: TrainAdaline
public TrainAdaline(BasicNetwork network, IMLDataSet training, double learningRate)
: base(TrainingImplementationType.Iterative)
{
if (((uint) learningRate) > uint.MaxValue)
{
goto Label_003B;
}
Label_0009:
if (network.LayerCount > 2)
{
goto Label_003B;
}
Label_0012:
this._x87a7fc6a72741c2e = network;
this._x823a2b9c8bf459c5 = training;
this._x9b481c22b6706459 = learningRate;
return;
Label_003B:
throw new NeuralNetworkError("An ADALINE network only has two layers.");
if (0x7fffffff == 0)
{
goto Label_0009;
}
goto Label_0012;
}
开发者ID:neismit,项目名称:emds,代码行数:25,代码来源:TrainAdaline.cs
示例9: JacobianChainRule
public JacobianChainRule(BasicNetwork network, IMLDataSet indexableTraining)
{
BasicMLData data;
BasicMLData data2;
if (0 == 0)
{
goto Label_0055;
}
Label_0009:
this._x61830ac74d65acc3 = new BasicMLDataPair(data, data2);
return;
Label_0055:
this._xb12276308f0fa6d9 = indexableTraining;
if (0 == 0)
{
}
this._x87a7fc6a72741c2e = network;
this._xabb126b401219ba2 = network.Structure.CalculateSize();
this._x530ae94d583e0ea1 = (int) this._xb12276308f0fa6d9.Count;
this._xbdeab667c25bbc32 = EngineArray.AllocateDouble2D(this._x530ae94d583e0ea1, this._xabb126b401219ba2);
this._xc8a462f994253347 = new double[this._x530ae94d583e0ea1];
data = new BasicMLData(this._xb12276308f0fa6d9.InputSize);
data2 = new BasicMLData(this._xb12276308f0fa6d9.IdealSize);
if (-2147483648 != 0)
{
goto Label_0009;
}
goto Label_0055;
}
开发者ID:neismit,项目名称:emds,代码行数:29,代码来源:JacobianChainRule.cs
示例10: Main
static void Main(string[] args)
{
var network = new BasicNetwork();
network.AddLayer(new BasicLayer(null, true, 2));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
network.Structure.FinalizeStructure();
network.Reset();
var trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
var train = new ResilientPropagation(network, trainingSet);
var epoch = 1;
do
{
train.Iteration();
} while (train.Error > 0.01);
train.FinishTraining();
foreach (var pair in trainingSet)
{
var output = network.Compute(pair.Input);
Console.WriteLine(pair.Input[0] + @", " + pair.Input[1] + @" , actual=" + output[0] + @", ideal=" + pair.Ideal[0]);
}
EncogFramework.Instance.Shutdown();
Console.ReadLine();
}
开发者ID:akucherk,项目名称:HelloSystem,代码行数:29,代码来源:Program.cs
示例11: Execute
/// <summary>
/// Program entry point.
/// </summary>
/// <param name="app">Holds arguments and other info.</param>
public void Execute(IExampleInterface app)
{
// create a neural network, without using a factory
var network = new BasicNetwork();
network.AddLayer(new BasicLayer(null, true, 2));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
network.Structure.FinalizeStructure();
network.Reset();
// create training data
IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
// train the neural network
IMLTrain train = new ResilientPropagation(network, trainingSet);
int epoch = 1;
do
{
train.Iteration();
Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
epoch++;
} while (train.Error > 0.01);
// test the neural network
Console.WriteLine(@"Neural Network Results:");
foreach (IMLDataPair pair in trainingSet)
{
IMLData output = network.Compute(pair.Input);
Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
+ @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
}
}
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:38,代码来源:XORHelloWorld.cs
示例12: SaveNetwork
/// <summary>
/// Saves the network to the specified directory with the specified parameter name.
/// </summary>
/// <param name="directory">The directory.</param>
/// <param name="file">The file.</param>
/// <param name="anetwork">The network to save..</param>
public static void SaveNetwork(string directory, string file, BasicNetwork anetwork)
{
FileInfo networkFile = FileUtil.CombinePath(new FileInfo(directory), file);
EncogDirectoryPersistence.SaveObject(networkFile, anetwork);
return;
}
开发者ID:tonyc2a,项目名称:encog-dotnet-core,代码行数:13,代码来源:NetworkUtility.cs
示例13: TestSingleOutput
public void TestSingleOutput()
{
BasicNetwork network = new BasicNetwork();
network.AddLayer(new BasicLayer(null, true, 2));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
network.Structure.FinalizeStructure();
(new ConsistentRandomizer(-1, 1)).Randomize(network);
IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);
HessianFD testFD = new HessianFD();
testFD.Init(network, trainingData);
testFD.Compute();
HessianCR testCR = new HessianCR();
testCR.Init(network, trainingData);
testCR.Compute();
//dump(testFD, "FD");
//dump(testCR, "CR");
Assert.IsTrue(testCR.HessianMatrix.equals(testFD.HessianMatrix, 4));
}
开发者ID:JDFagan,项目名称:encog-dotnet-core,代码行数:25,代码来源:TestHessian.cs
示例14: BenchmarkEncog
public static long BenchmarkEncog(double[][] input, double[][] output)
{
var network = new BasicNetwork();
network.AddLayer(new BasicLayer(null, true,
input[0].Length));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), true,
HIDDEN_COUNT));
network.AddLayer(new BasicLayer(new ActivationSigmoid(), false,
output[0].Length));
network.Structure.FinalizeStructure();
network.Reset(23); // constant seed for repeatable testing
IMLDataSet trainingSet = new BasicMLDataSet(input, output);
// train the neural network
IMLTrain train = new Backpropagation(network, trainingSet, 0.7, 0.7);
var sw = new Stopwatch();
sw.Start();
// run epoch of learning procedure
for (int i = 0; i < ITERATIONS; i++)
{
train.Iteration();
}
sw.Stop();
return sw.ElapsedMilliseconds;
}
开发者ID:johannsutherland,项目名称:encog-dotnet-core,代码行数:28,代码来源:SimpleBenchmark.cs
示例15: RandomizeSynapse
/// <summary>
/// Randomize the connections between two layers.
/// </summary>
/// <param name="network">The network to randomize.</param>
/// <param name="fromLayer">The starting layer.</param>
private void RandomizeSynapse(BasicNetwork network, int fromLayer)
{
int toLayer = fromLayer + 1;
int toCount = network.GetLayerNeuronCount(toLayer);
int fromCount = network.GetLayerNeuronCount(fromLayer);
int fromCountTotalCount = network.GetLayerTotalNeuronCount(fromLayer);
IActivationFunction af = network.GetActivation(toLayer);
double low = CalculateRange(af, Double.NegativeInfinity);
double high = CalculateRange(af, Double.PositiveInfinity);
double b = 0.7d * Math.Pow(toCount, (1d / fromCount)) / (high - low);
for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
{
if (fromCount != fromCountTotalCount)
{
double w = RangeRandomizer.Randomize(-b, b);
network.SetWeight(fromLayer, fromCount, toNeuron, w);
}
for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
{
double w = RangeRandomizer.Randomize(0, b);
network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
}
}
}
开发者ID:jongh0,项目名称:MTree,代码行数:31,代码来源:NguyenWidrowRandomizer.cs
示例16: NeuralRobot
public NeuralRobot(BasicNetwork network, bool track, Position source, Position destination)
{
_hStats = new NormalizedField(NormalizationAction.Normalize, "Heading", 359, 0, .9, -.9);
_CanGoStats = new NormalizedField(NormalizationAction.Normalize, "CanGo", 1, 0, 0.9, -0.9);
_track = track;
_network = network;
sim = new RobotSimulator(source, destination);
}
开发者ID:tmassey,项目名称:mtos,代码行数:8,代码来源:NeuralRobot.cs
示例17: EvaluateNetwork
public void EvaluateNetwork(BasicNetwork trainedNetwork, BasicMLDataSet trainingData)
{
foreach (var trainingItem in trainingData)
{
var output = trainedNetwork.Compute(trainingItem.Input);
Console.WriteLine("Input:{0}, {1} Ideal: {2} Actual : {3}", trainingItem.Input[0], trainingItem.Input[1], trainingItem.Ideal, output[0]);
}
Console.ReadKey();
}
开发者ID:MacarioTala,项目名称:Learning-Machine-Learning,代码行数:9,代码来源:BasicNeuralNetFunctions.cs
示例18: frmTest
/// <summary>
/// Create a new tester form
/// </summary>
/// <param name="network">Trained neural network to test</param>
/// <param name="inputFields">List of input fields from Encog Analyst</param>
/// <param name="outputFields">List of output fields from Encog Analyst</param>
public frmTest(BasicNetwork network, List<AnalystField> inputFields,
List<AnalystField> outputFields)
{
InitializeComponent();
m_network = network;
m_inputFields = inputFields;
m_outputFields = outputFields;
foreach(AnalystField field in inputFields)
{
switch(field.Name)
{
case "vCoverageType":
foreach (ClassItem item in field.Classes)
cmbCoverageType.Items.Add(item.Name);
cmbCoverageType.SelectedIndex = 0;
break;
case "vTransaction":
foreach (ClassItem item in field.Classes)
cmbTransactionType.Items.Add(item.Name);
cmbTransactionType.SelectedIndex = 0;
break;
case "nLoanAmount":
trkLoanAmount.Minimum = (int)field.ActualLow;
trkLoanAmount.Maximum = (int)field.ActualHigh;
trkLoanAmount.TickFrequency = (int)(field.ActualHigh / 10.0);
txtLoanAmount.Text = String.Format("{0:C2}", trkLoanAmount.Value);
break;
case "nLiens":
trkLiens.Minimum = (int)field.ActualLow;
trkLiens.Maximum = (int)field.ActualHigh;
trkLiens.TickFrequency = (int)(field.ActualHigh / 10.0);
txtLiens.Text = trkLiens.Value.ToString();
break;
case "nActions":
trkActions.Minimum = (int)field.ActualLow;
trkActions.Maximum = (int)field.ActualHigh;
trkActions.TickFrequency = (int)(field.ActualHigh / 10.0);
txtActions.Text = trkActions.Value.ToString();
break;
case "nAuditEntriesPerDay":
trkAuditEntries.Minimum = (int)field.ActualLow;
trkAuditEntries.Maximum = (int)field.ActualHigh;
trkAuditEntries.TickFrequency = (int)(field.ActualHigh / 10.0);
txtAuditEntries.Text = trkAuditEntries.Value.ToString();
break;
case "nTotalNotesPerDay":
trkNotesLogged.Minimum = (int)field.ActualLow;
trkNotesLogged.Maximum = (int)field.ActualHigh;
trkNotesLogged.TickFrequency = (int)(field.ActualHigh / 10.0);
txtNotesLogged.Text = trkNotesLogged.Value.ToString();
break;
}
}
}
开发者ID:benw408701,项目名称:MLHCTransactionPredictor,代码行数:62,代码来源:frmTest.cs
示例19: CreateNetwork
public static void CreateNetwork(FileOps fileOps)
{
var network = new BasicNetwork();
network.AddLayer(new BasicLayer(new ActivationLinear(),true,4));
network.AddLayer(new BasicLayer(new ActivationTANH(), true, 6));
network.AddLayer(new BasicLayer(new ActivationTANH(), true, 2));
network.Structure.FinalizeStructure();
network.Reset();
EncogDirectoryPersistence.SaveObject(fileOps.TrainedNeuralNetworkFile, network);
}
开发者ID:MacarioTala,项目名称:Learning-Machine-Learning,代码行数:10,代码来源:Program.cs
示例20: CreateThreeLayerNet
public static BasicNetwork CreateThreeLayerNet()
{
var network = new BasicNetwork();
network.AddLayer(new BasicLayer(2));
network.AddLayer(new BasicLayer(3));
network.AddLayer(new BasicLayer(1));
network.Structure.FinalizeStructure();
network.Reset();
return network;
}
开发者ID:jongh0,项目名称:MTree,代码行数:10,代码来源:XOR.cs
注:本文中的Encog.Neural.Networks.BasicNetwork类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论