本文整理汇总了C#中ActivationNetwork类的典型用法代码示例。如果您正苦于以下问题:C# ActivationNetwork类的具体用法?C# ActivationNetwork怎么用?C# ActivationNetwork使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ActivationNetwork类属于命名空间,在下文中一共展示了ActivationNetwork类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: BackPropagationLearning
/// <summary>
/// Initializes a new instance of the <see cref="BackPropagationLearning"/> class
/// </summary>
///
/// <param name="network">Network to teach</param>
///
public BackPropagationLearning( ActivationNetwork network )
{
this.network = network;
// create error and deltas arrays
neuronErrors = new double[network.LayersCount][];
weightsUpdates = new double[network.LayersCount][][];
thresholdsUpdates = new double[network.LayersCount][];
// initialize errors and deltas arrays for each layer
for ( int i = 0, n = network.LayersCount; i < n; i++ )
{
Layer layer = network[i];
neuronErrors[i] = new double[layer.NeuronsCount];
weightsUpdates[i] = new double[layer.NeuronsCount][];
thresholdsUpdates[i] = new double[layer.NeuronsCount];
// for each neuron
for ( int j = 0; j < layer.NeuronsCount; j++ )
{
weightsUpdates[i][j] = new double[layer.InputsCount];
}
}
}
开发者ID:xieguigang,项目名称:Reference_SharedLib,代码行数:31,代码来源:BackPropagationLearning.cs
示例2: RunEpochTest1
public void RunEpochTest1()
{
Accord.Math.Tools.SetupGenerator(0);
double[][] input =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
double[][] output =
{
new double[] { -1 },
new double[] { 1 },
new double[] { 1 },
new double[] { -1 }
};
Neuron.RandGenerator = new ThreadSafeRandom(0);
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), 2, 2, 1);
var teacher = new LevenbergMarquardtLearning(network,
false, JacobianMethod.ByFiniteDifferences);
double error = 1.0;
while (error > 1e-5)
error = teacher.RunEpoch(input, output);
for (int i = 0; i < input.Length; i++)
Assert.AreEqual(network.Compute(input[i])[0], output[i][0], 0.1);
}
开发者ID:qusma,项目名称:framework,代码行数:35,代码来源:LevenbergMarquardtLearningTest.cs
示例3: MulticlassTest1
public void MulticlassTest1()
{
Accord.Math.Tools.SetupGenerator(0);
Neuron.RandGenerator = new ThreadSafeRandom(0);
int numberOfInputs = 3;
int numberOfClasses = 4;
int hiddenNeurons = 5;
double[][] input =
{
new double[] { -1, -1, -1 }, // 0
new double[] { -1, 1, -1 }, // 1
new double[] { 1, -1, -1 }, // 1
new double[] { 1, 1, -1 }, // 0
new double[] { -1, -1, 1 }, // 2
new double[] { -1, 1, 1 }, // 3
new double[] { 1, -1, 1 }, // 3
new double[] { 1, 1, 1 } // 2
};
int[] labels =
{
0,
1,
1,
0,
2,
3,
3,
2,
};
double[][] outputs = Accord.Statistics.Tools
.Expand(labels, numberOfClasses, -1, 1);
var function = new BipolarSigmoidFunction(2);
var network = new ActivationNetwork(function,
numberOfInputs, hiddenNeurons, numberOfClasses);
new NguyenWidrow(network).Randomize();
var teacher = new LevenbergMarquardtLearning(network);
double error = Double.PositiveInfinity;
for (int i = 0; i < 10; i++)
error = teacher.RunEpoch(input, outputs);
for (int i = 0; i < input.Length; i++)
{
int answer;
double[] output = network.Compute(input[i]);
double response = output.Max(out answer);
int expected = labels[i];
Assert.AreEqual(expected, answer);
}
}
开发者ID:RLaumeyer,项目名称:framework,代码行数:59,代码来源:LevenbergMarquardtLearningTest.cs
示例4: GaussianWeights
/// <summary>
/// Constructs a new Gaussian Weight initialization.
/// </summary>
///
/// <param name="network">The activation network whose weights will be initialized.</param>
/// <param name="stdDev">The standard deviation to be used. Common values lie in the 0.001-
/// 0.1 range. Default is 0.1.</param>
///
public GaussianWeights(ActivationNetwork network, double stdDev = 0.1)
{
this.network = network;
this.random = new GaussianGenerator(0f, (float)stdDev, Accord.Math.Tools.Random.Next());
this.UpdateThresholds = false;
}
开发者ID:qusma,项目名称:framework,代码行数:16,代码来源:GaussianWeights.cs
示例5: DeltaRuleLearning
/// <summary>
/// Initializes a new instance of the <see cref="DeltaRuleLearning"/> class.
/// </summary>
///
/// <param name="network">Network to teach.</param>
///
/// <exception cref="ArgumentException">Invalid nuaral network. It should have one layer only.</exception>
///
public DeltaRuleLearning( ActivationNetwork network )
{
// check layers count
if ( network.Layers.Length != 1 )
{
throw new ArgumentException( "Invalid nuaral network. It should have one layer only." );
}
this.network = network;
}
开发者ID:accord-net,项目名称:framework,代码行数:18,代码来源:DeltaRuleLearning.cs
示例6: NguyenWidrow
/// <summary>
/// Constructs a new Nguyen-Widrow Weight initialization.
/// </summary>
///
/// <param name="network">The activation network whose weights will be initialized.</param>
///
public NguyenWidrow(ActivationNetwork network)
{
this.network = network;
int hiddenNodes = network.Layers[0].Neurons.Length;
int inputNodes = network.Layers[0].InputsCount;
randRange = new Range(-0.5f, 0.5f);
beta = 0.7 * Math.Pow(hiddenNodes, 1.0 / inputNodes);
}
开发者ID:qusma,项目名称:framework,代码行数:16,代码来源:NguyenWidrow.cs
示例7: AnnAgent
public AnnAgent(bool learn, int boardSize, byte player = 1)
{
learning = learn;
playerNumber = player;
int boardFields = boardSize * boardSize;
if(File.Exists("ann" + boardSize + ".bin"))
network = (ActivationNetwork)Serialization.LoadNetwork("ann" + boardSize + ".bin");
else
network = new ActivationNetwork(new BipolarSigmoidFunction(), boardFields, 5, boardFields * 2);
backProp = new BackPropagationLearning(network);
teacher = new MinimaxAgent(2, player);
}
开发者ID:Armienn,项目名称:Virus,代码行数:12,代码来源:ANNAgent.cs
示例8: EvolutionaryFitness
/// <summary>
/// Initializes a new instance of the <see cref="EvolutionaryFitness"/> class.
/// </summary>
///
/// <param name="network">Neural network for which fitness will be calculated.</param>
/// <param name="input">Input data samples for neural network.</param>
/// <param name="output">Output data sampels for neural network (desired output).</param>
///
/// <exception cref="ArgumentException">Length of inputs and outputs arrays must be equal and greater than 0.</exception>
/// <exception cref="ArgumentException">Length of each input vector must be equal to neural network's inputs count.</exception>
///
public EvolutionaryFitness( ActivationNetwork network, double[][] input, double[][] output )
{
if ( ( input.Length == 0 ) || ( input.Length != output.Length ) )
{
throw new ArgumentException( "Length of inputs and outputs arrays must be equal and greater than 0." );
}
if ( network.InputsCount != input[0].Length )
{
throw new ArgumentException( "Length of each input vector must be equal to neural network's inputs count." );
}
this.network = network;
this.input = input;
this.output = output;
}
开发者ID:accord-net,项目名称:framework,代码行数:27,代码来源:EvolutionaryFitness.cs
示例9: RunEpochTest1
public void RunEpochTest1()
{
Accord.Math.Tools.SetupGenerator(0);
double[][] input =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
double[][] output =
{
new double[] { -1 },
new double[] { 1 },
new double[] { 1 },
new double[] { -1 }
};
Neuron.RandGenerator = new ThreadSafeRandom(0);
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), 2, 2, 1);
var teacher = new ParallelResilientBackpropagationLearning(network);
double error = 1.0;
while (error > 1e-5)
error = teacher.RunEpoch(input, output);
for (int i = 0; i < input.Length; i++)
{
double actual = network.Compute(input[i])[0];
double expected = output[i][0];
Assert.AreEqual(expected, actual, 0.01);
Assert.IsFalse(Double.IsNaN(actual));
}
}
开发者ID:CanerPatir,项目名称:framework,代码行数:39,代码来源:ResilientPropagationLearningTest.cs
示例10: TrainNewModel
public IForecastingModel TrainNewModel(double[][] iInput, double[][] iOutput)
{
int inputSize = iInput[0].Length, samplesNum = iOutput.Length;
if (samplesNum != iInput.Length)
throw new ArgumentException();
for (int i = 0; i < samplesNum;++i)
if (iInput[i].Length != inputSize || iOutput[i].Length != 1) //iInput isn't a square matrix or iOutput isn't a vector
throw new ArgumentException();
int[] neuronsCount = (int[]) ModelParametersDict[NeuronsInLayersKey];
string activationFunction = (string) ModelParametersDict[ActivationFunctionKey];
long maxIterNum = (long) ModelParametersDict[MaxIterationsNumberKey];
double stopError = (double)ModelParametersDict[StopErrorKey];
ActivationNetwork netToTrain = new ActivationNetwork(ActivationFunctionsDict[activationFunction], inputSize, neuronsCount);
DataNormalizer normalizer = new DataNormalizer(iInput.Concat(iOutput).ToArray());
IForecastingModel aModel = new ANNforecastingModel(netToTrain, normalizer);
ISupervisedLearning teacher = new ResilientBackpropagationLearning(netToTrain);
double[][] trainInputSet, trainOutputSet;
TrainingSubsetGenerator.GenerateRandomly(iInput, iOutput, out trainInputSet, out trainOutputSet, iMultiplier: TrainSubsetMultiplier);
trainInputSet = normalizer.Normalize(trainInputSet); trainOutputSet = normalizer.Normalize(trainOutputSet);
long epochsCount = 0;
double nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput), prevError;
do
{
prevError = nextError;
teacher.RunEpoch(trainInputSet, trainOutputSet);
nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput);
}
while (epochsCount++ <= maxIterNum && Math.Abs(prevError - nextError) >= stopError);
return aModel;
}
开发者ID:vladislav-horbatiuk,项目名称:time-series-forecasting,代码行数:36,代码来源:ANNmodelBuilder.cs
示例11: EvolutionaryLearning
/// <summary>
/// Initializes a new instance of the <see cref="EvolutionaryLearning"/> class.
/// </summary>
///
/// <param name="activationNetwork">Activation network to be trained.</param>
/// <param name="populationSize">Size of genetic population.</param>
/// <param name="chromosomeGenerator">Random numbers generator used for initialization of genetic
/// population representing neural network's weights and thresholds (see <see cref="DoubleArrayChromosome.chromosomeGenerator"/>).</param>
/// <param name="mutationMultiplierGenerator">Random numbers generator used to generate random
/// factors for multiplication of network's weights and thresholds during genetic mutation
/// (ses <see cref="DoubleArrayChromosome.mutationMultiplierGenerator"/>.)</param>
/// <param name="mutationAdditionGenerator">Random numbers generator used to generate random
/// values added to neural network's weights and thresholds during genetic mutation
/// (see <see cref="DoubleArrayChromosome.mutationAdditionGenerator"/>).</param>
/// <param name="selectionMethod">Method of selection best chromosomes in genetic population.</param>
/// <param name="crossOverRate">Crossover rate in genetic population (see
/// <see cref="Population.CrossoverRate"/>).</param>
/// <param name="mutationRate">Mutation rate in genetic population (see
/// <see cref="Population.MutationRate"/>).</param>
/// <param name="randomSelectionRate">Rate of injection of random chromosomes during selection
/// in genetic population (see <see cref="Population.RandomSelectionPortion"/>).</param>
///
public EvolutionaryLearning( ActivationNetwork activationNetwork, int populationSize,
IRandomNumberGenerator chromosomeGenerator,
IRandomNumberGenerator mutationMultiplierGenerator,
IRandomNumberGenerator mutationAdditionGenerator,
ISelectionMethod selectionMethod,
double crossOverRate, double mutationRate, double randomSelectionRate )
{
// Check of assumptions during debugging only
Debug.Assert( activationNetwork != null );
Debug.Assert( populationSize > 0 );
Debug.Assert( chromosomeGenerator != null );
Debug.Assert( mutationMultiplierGenerator != null );
Debug.Assert( mutationAdditionGenerator != null );
Debug.Assert( selectionMethod != null );
Debug.Assert( crossOverRate >= 0.0 && crossOverRate <= 1.0 );
Debug.Assert( mutationRate >= 0.0 && crossOverRate <= 1.0 );
Debug.Assert( randomSelectionRate >= 0.0 && randomSelectionRate <= 1.0 );
// networks's parameters
this.network = activationNetwork;
this.numberOfNetworksWeights = CalculateNetworkSize( activationNetwork );
// population parameters
this.populationSize = populationSize;
this.chromosomeGenerator = chromosomeGenerator;
this.mutationMultiplierGenerator = mutationMultiplierGenerator;
this.mutationAdditionGenerator = mutationAdditionGenerator;
this.selectionMethod = selectionMethod;
this.crossOverRate = crossOverRate;
this.mutationRate = mutationRate;
this.randomSelectionRate = randomSelectionRate;
}
开发者ID:EnergonV,项目名称:BestCS,代码行数:54,代码来源:EvolutionaryLearning.cs
示例12: RunEpochTest4
public void RunEpochTest4()
{
Accord.Math.Tools.SetupGenerator(0);
double[][] input =
{
new double[] { 0, 0 },
};
double[][] output =
{
new double[] { 0 },
};
Neuron.RandGenerator = new ThreadSafeRandom(0);
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), 2, 1);
var teacher = new LevenbergMarquardtLearning(network,
true, JacobianMethod.ByBackpropagation);
double error = 1.0;
for (int i = 0; i < 1000; i++)
error = teacher.RunEpoch(input, output);
for (int i = 0; i < input.Length; i++)
Assert.AreEqual(network.Compute(input[i])[0], output[i][0], 0.1);
}
开发者ID:RLaumeyer,项目名称:framework,代码行数:28,代码来源:LevenbergMarquardtLearningTest.cs
示例13: ConstructorTest
public void ConstructorTest()
{
// Four training samples of the xor function
// two inputs (x and y)
double[][] input =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
// one output (z = x ^ y)
double[][] output =
{
new double[] { -1 },
new double[] { 1 },
new double[] { 1 },
new double[] { -1 }
};
// create multi-layer neural network
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), // use a bipolar sigmoid activation function
2, // two inputs
3, // three hidden neurons
1 // one output neuron
);
// create teacher
LevenbergMarquardtLearning teacher = new LevenbergMarquardtLearning(
network, // the neural network
false, // whether or not to use Bayesian regularization
JacobianMethod.ByBackpropagation // Jacobian calculation method
);
// set learning rate and momentum
teacher.LearningRate = 0.1f;
// start the supervisioned learning
for (int i = 0; i < 1000; i++)
{
double error = teacher.RunEpoch(input, output);
}
// If we reached here, the constructor test has passed.
}
开发者ID:RLaumeyer,项目名称:framework,代码行数:50,代码来源:LevenbergMarquardtLearningTest.cs
示例14: SearchSolution
// Worker thread
void SearchSolution( )
{
// initialize input and output values
double[][] input = null;
double[][] output = null;
if ( sigmoidType == 0 )
{
// unipolar data
input = new double[4][] {
new double[] {0, 0},
new double[] {0, 1},
new double[] {1, 0},
new double[] {1, 1}
};
output = new double[4][] {
new double[] {0},
new double[] {1},
new double[] {1},
new double[] {0}
};
}
else
{
// bipolar data
input = new double[4][] {
new double[] {-1, -1},
new double[] {-1, 1},
new double[] { 1, -1},
new double[] { 1, 1}
};
output = new double[4][] {
new double[] {-1},
new double[] { 1},
new double[] { 1},
new double[] {-1}
};
}
// create neural network
ActivationNetwork network = new ActivationNetwork(
( sigmoidType == 0 ) ?
(IActivationFunction) new SigmoidFunction( sigmoidAlphaValue ) :
(IActivationFunction) new BipolarSigmoidFunction( sigmoidAlphaValue ),
2, 2, 1 );
// create teacher
var teacher = new ParallelResilientBackpropagationLearning(network);
// set learning rate and momentum
teacher.Reset(initialStep);
// iterations
int iteration = 0;
// statistic files
StreamWriter errorsFile = null;
try
{
// check if we need to save statistics to files
if ( saveStatisticsToFiles )
{
// open files
errorsFile = File.CreateText( "errors.csv" );
}
// erros list
ArrayList errorsList = new ArrayList( );
// loop
while ( !needToStop )
{
// run epoch of learning procedure
double error = teacher.RunEpoch( input, output );
errorsList.Add( error );
// save current error
if ( errorsFile != null )
{
errorsFile.WriteLine( error );
}
// show current iteration & error
SetText( currentIterationBox, iteration.ToString( ) );
SetText( currentErrorBox, error.ToString( ) );
iteration++;
// check if we need to stop
if ( error <= learningErrorLimit )
break;
}
// show error's dynamics
double[,] errors = new double[errorsList.Count, 2];
for ( int i = 0, n = errorsList.Count; i < n; i++ )
//.........这里部分代码省略.........
开发者ID:xyicheng,项目名称:Accord,代码行数:101,代码来源:MainForm.cs
示例15: RunEpochTest3
public void RunEpochTest3()
{
double[,] dataset = yinyang;
double[][] input = dataset.GetColumns(0, 1).ToArray();
double[][] output = dataset.GetColumn(2).ToArray();
Neuron.RandGenerator = new ThreadSafeRandom(0);
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), 2, 5, 1);
var teacher = new LevenbergMarquardtLearning(network,
true, JacobianMethod.ByBackpropagation);
Assert.IsTrue(teacher.UseRegularization);
double error = 1.0;
for (int i = 0; i < 500; i++)
error = teacher.RunEpoch(input, output);
double[][] actual = new double[output.Length][];
for (int i = 0; i < input.Length; i++)
actual[i] = network.Compute(input[i]);
for (int i = 0; i < input.Length; i++)
Assert.AreEqual(Math.Sign(output[i][0]), Math.Sign(actual[i][0]));
}
开发者ID:RLaumeyer,项目名称:framework,代码行数:29,代码来源:LevenbergMarquardtLearningTest.cs
示例16: loadFileToolStripMenuItem_Click
private void loadFileToolStripMenuItem_Click(object sender, EventArgs e)
{
var r = openFileDialog1.ShowDialog();
if (r != System.Windows.Forms.DialogResult.Cancel)
{
try
{
var n = Network.Load(openFileDialog1.FileName);
network = n as ActivationNetwork;
}
catch (Exception eg)
{
MessageBox.Show("Error occured!");
}
}
}
开发者ID:venusdharan,项目名称:Neural-net-trainer,代码行数:18,代码来源:BinarySplitForm.cs
示例17: JacobianByChainRuleTest
public void JacobianByChainRuleTest()
{
// Network with one hidden layer: 2-2-1
Accord.Math.Tools.SetupGenerator(0);
double[][] input =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
double[][] output =
{
new double[] { -1 },
new double[] { 1 },
new double[] { 1 },
new double[] { -1 }
};
Neuron.RandGenerator = new ThreadSafeRandom(0);
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), 2, 2, 1);
var teacher1 = new LevenbergMarquardtLearning(network,
false, JacobianMethod.ByFiniteDifferences);
var teacher2 = new LevenbergMarquardtLearning(network,
false, JacobianMethod.ByBackpropagation);
// Set lambda to lambda max so no iterations are performed
teacher1.LearningRate = 1e30f;
teacher2.LearningRate = 1e30f;
teacher1.RunEpoch(input, output);
teacher2.RunEpoch(input, output);
PrivateObject privateTeacher1 = new PrivateObject(teacher1);
PrivateObject privateTeacher2 = new PrivateObject(teacher2);
var jacobian1 = (float[][])privateTeacher1.GetField("jacobian");
var jacobian2 = (float[][])privateTeacher2.GetField("jacobian");
Assert.AreEqual(jacobian1[0][0], -0.47895513745387097, 1e-6);
Assert.AreEqual(jacobian1[0][1], -0.05863886707282373, 1e-6);
Assert.AreEqual(jacobian1[0][2], 0.057751100929897485, 1e-6);
Assert.AreEqual(jacobian1[0][3], 0.0015185010717608583, 1e-6);
Assert.AreEqual(jacobian1[7][0], -0.185400783651892, 1e-6);
Assert.AreEqual(jacobian1[7][1], 0.025575161626462877, 1e-6);
Assert.AreEqual(jacobian1[7][2], 0.070494677797224889, 1e-6);
Assert.AreEqual(jacobian1[7][3], 0.037740463822781616, 1e-6);
Assert.AreEqual(jacobian2[0][0], -0.4789595904719437, 1e-6);
Assert.AreEqual(jacobian2[0][1], -0.058636153936941729, 1e-6);
Assert.AreEqual(jacobian2[0][2], 0.057748435491340212, 1e-6);
Assert.AreEqual(jacobian2[0][3], 0.0015184453425611988, 1e-6);
Assert.AreEqual(jacobian2[7][0], -0.1854008206574258, 1e-6);
Assert.AreEqual(jacobian2[7][1], 0.025575150379247645, 1e-6);
Assert.AreEqual(jacobian2[7][2], 0.070494269423259301, 1e-6);
Assert.AreEqual(jacobian2[7][3], 0.037740117733922635, 1e-6);
for (int i = 0; i < jacobian1.Length; i++)
{
for (int j = 0; j < jacobian1[i].Length; j++)
{
double j1 = jacobian1[i][j];
double j2 = jacobian2[i][j];
Assert.AreEqual(j1, j2, 1e-4);
Assert.IsFalse(Double.IsNaN(j1));
Assert.IsFalse(Double.IsNaN(j2));
}
}
}
开发者ID:qusma,项目名称:framework,代码行数:82,代码来源:LevenbergMarquardtLearningTest.cs
示例18: JacobianByChainRuleTest4
public void JacobianByChainRuleTest4()
{
// Network with no hidden layers: 3-1
double[][] input =
{
new double[] {-1, -1 },
new double[] {-1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
double[][] output =
{
new double[] {-1 },
new double[] { 1 },
new double[] { 1 },
new double[] {-1 }
};
Neuron.RandGenerator = new ThreadSafeRandom(0);
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), 2, 1);
var teacher1 = new LevenbergMarquardtLearning(network,
false, JacobianMethod.ByFiniteDifferences);
var teacher2 = new LevenbergMarquardtLearning(network,
false, JacobianMethod.ByBackpropagation);
// Set lambda to lambda max so no iterations are performed
teacher1.LearningRate = 1e30f;
teacher2.LearningRate = 1e30f;
teacher1.RunEpoch(input, output);
teacher2.RunEpoch(input, output);
PrivateObject privateTeacher1 = new PrivateObject(teacher1);
PrivateObject privateTeacher2 = new PrivateObject(teacher2);
var jacobian1 = (float[][])privateTeacher1.GetField("jacobian");
var jacobian2 = (float[][])privateTeacher2.GetField("jacobian");
for (int i = 0; i < jacobian1.Length; i++)
{
for (int j = 0; j < jacobian1[i].Length; j++)
{
double j1 = jacobian1[i][j];
double j2 = jacobian2[i][j];
Assert.AreEqual(j1, j2, 1e-5);
Assert.IsFalse(Double.IsNaN(j1));
Assert.IsFalse(Double.IsNaN(j2));
}
}
}
开发者ID:RLaumeyer,项目名称:framework,代码行数:59,代码来源:LevenbergMarquardtLearningTest.cs
示例19: BlockHessianTest1
public void BlockHessianTest1()
{
// Network with no hidden layers: 3-1
Accord.Math.Tools.SetupGenerator(0);
double[][] input =
{
new double[] {-1, -1 },
new double[] {-1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
double[][] output =
{
new double[] {-1 },
new double[] { 1 },
new double[] { 1 },
new double[] {-1 }
};
Neuron.RandGenerator = new ThreadSafeRandom(0);
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), 2, 1);
var teacher1 = new LevenbergMarquardtLearning(network,
false, JacobianMethod.ByFiniteDifferences);
var teacher2 = new LevenbergMarquardtLearning(network,
false, JacobianMethod.ByBackpropagation);
teacher2.Blocks = 2;
// Set lambda to lambda max so no iterations are performed
teacher1.LearningRate = 1e30f;
teacher2.LearningRate = 1e30f;
teacher1.RunEpoch(input, output);
teacher2.RunEpoch(input, output);
var hessian1 = teacher1.Hessian;
var hessian2 = teacher1.Hessian;
for (int i = 0; i < hessian1.Length; i++)
{
for (int j = 0; j < hessian1[i].Length; j++)
{
double j1 = hessian1[i][j];
double j2 = hessian2[i][j];
Assert.AreEqual(j1, j2, 1e-4);
Assert.IsFalse(Double.IsNaN(j1));
Assert.IsFalse(Double.IsNaN(j2));
}
}
Assert.IsTrue(hessian1.IsUpperTriangular());
Assert.IsTrue(hessian2.IsUpperTriangular());
var gradient1 = teacher1.Gradient;
var gradient2 = teacher2.Gradient;
for (int i = 0; i < gradient1.Length; i++)
{
double j1 = gradient1[i];
double j2 = gradient2[i];
Assert.AreEqual(j1, j2, 1e-5);
Assert.IsFalse(Double.IsNaN(j1));
Assert.IsFalse(Double.IsNaN(j2));
}
}
开发者ID:RLaumeyer,项目名称:framework,代码行数:76,代码来源:LevenbergMarquardtLearningTest.cs
示例20: computeError
private static double computeError(double[][] inputs, double[][] outputs, ActivationNetwork ann)
{
// Compute the machine outputs
int miss = 0;
for (int i = 0; i < inputs.Length; i++)
{
var y = System.Math.Sign(ann.Compute(inputs[i])[0]);
var o = outputs[i][0];
if (y != o) miss++;
}
return (double)miss / inputs.Length;
}
开发者ID:Bruhankovi4,项目名称:Emotyper,代码行数:13,代码来源:MainForm.cs
注:本文中的ActivationNetwork类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论