本文整理汇总了C#中Accord.MachineLearning.VectorMachines.KernelSupportVectorMachine类的典型用法代码示例。如果您正苦于以下问题:C# KernelSupportVectorMachine类的具体用法?C# KernelSupportVectorMachine怎么用?C# KernelSupportVectorMachine使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
KernelSupportVectorMachine类属于Accord.MachineLearning.VectorMachines命名空间,在下文中一共展示了KernelSupportVectorMachine类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: LearnTest
public void LearnTest()
{
double[][] inputs =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
int[] xor =
{
-1,
1,
1,
-1
};
// Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(2), inputs[0].Length);
// Create the Least Squares Support Vector Machine teacher
LeastSquaresLearning learn = new LeastSquaresLearning(machine, inputs, xor);
learn.Complexity = 10;
// Run the learning algorithm
learn.Run();
int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p)));
for (int i = 0; i < output.Length; i++)
Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i]));
}
开发者ID:qusma,项目名称:framework,代码行数:35,代码来源:LeastSquaresLearningTest.cs
示例2: LearnTest
public void LearnTest()
{
double[][] inputs =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
int[] xor =
{
-1,
1,
1,
-1
};
// Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Polynomial(2), inputs[0].Length);
// Create the sequential minimal optimization teacher
SequentialMinimalOptimization learn = new SequentialMinimalOptimization(machine, inputs, xor);
// Run the learning algorithm
learn.Run();
int[] output = inputs.Apply(p => Math.Sign(machine.Compute(p)));
for (int i = 0; i < output.Length; i++)
Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i]));
}
开发者ID:KommuSoft,项目名称:accord_framework,代码行数:34,代码来源:SequentialMinimalOptimizationTest.cs
示例3: v3_0_1
public double v3_0_1()
{
var ksvm = new KernelSupportVectorMachine(new Polynomial(2), 2);
var smo = new SequentialMinimalOptimization(ksvm, inputs, outputs);
return smo.Run(computeError: false);
}
开发者ID:accord-net,项目名称:framework,代码行数:7,代码来源:KernelSupportVectorMachineTest.cs
示例4: Learn
public override Func<double[], double> Learn(LearningData learningData) {
var svm = new KernelSupportVectorMachine(_kernel, learningData.Variables.Count);
var smo = new SequentialMinimalOptimization(
svm, learningData.Inputs, learningData.Outputs);
smo.Run();
return svm.Compute;
}
开发者ID:RainsSoft,项目名称:Code2Xml,代码行数:7,代码来源:SvmLearnerWithLinear.cs
示例5: LearnSVM
static KernelSupportVectorMachine LearnSVM(HSL[] positives, HSL[] negatives,
double throwExceptionWhenErrorGreaterThan)
{
int[] classes = new int[positives.Length + negatives.Length];
double[][] vectors = new double[classes.Length][];
int index = 0;
for (int c = 0; c < positives.Length; c++, index++)
{
classes[index] = 1;
vectors[index] = HSLToDouble(positives[c]);
}
for (int c = 0; c < negatives.Length; c++, index++)
{
classes[index] = -1;
vectors[index] = HSLToDouble(negatives[c]);
}
KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Gaussian(.1), vectors[0].Length);
SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, vectors.ToArray(), classes);
//smo.Complexity = 1.0;
double error = smo.Run();
if (error > throwExceptionWhenErrorGreaterThan)
{
throw new Exception("Failed to get reasonable error value.");
}
return svm;
}
开发者ID:kwende,项目名称:SetSpotter,代码行数:29,代码来源:Program.cs
示例6: TrainningModel
//public SupportVectorMachine SVM
//{
// get { return svm; }
// private set { svm = value; }
//}
public override void TrainningModel(TrainningData trainningData)
{
ContinuousDataTableAdapter continuousDataTableAdapter = new ContinuousDataTableAdapter();
DataTable continuousDataTable = continuousDataTableAdapter.GetData();
DataTable dataTable = continuousDataTable.DefaultView.ToTable(false, TableMetaData.TestingAttributes);
string[] columnNames;
double[][] inputs = dataTable.ToArray(out columnNames);
int[] outputs = (int[])trainningData.ClassificationAttribute.Clone();
// Create output for SVM (-1 or 1)
for (int index = 0; index < outputs.Length; index++)
{
if (outputs[index] == 0)
{
outputs[index] = -1;
}
}
// Create a Support Vector Machine for the given inputs
//this.svm = new SupportVectorMachine(inputs[0].Length);
//// Create a Kernel Support Vector Machine for the given inputs
this.svm = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length);
// Instantiate a new learning algorithm for SVMs
SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, outputs);
// Set up the learning algorithm
smo.Complexity = 1.0;
// Run the learning algorithm
double error = smo.Run();
}
开发者ID:hpbaotho,项目名称:benhvien,代码行数:40,代码来源:SVMModel.cs
示例7: TrainTest
public void TrainTest()
{
Accord.Math.Tools.SetupGenerator(0);
// Example regression problem. Suppose we are trying
// to model the following equation: f(x, y) = 2x + y
double[][] inputs = // (x, y)
{
new double[] { 0, 1 }, // 2*0 + 1 = 1
new double[] { 4, 3 }, // 2*4 + 3 = 11
new double[] { 8, -8 }, // 2*8 - 8 = 8
new double[] { 2, 2 }, // 2*2 + 2 = 6
new double[] { 6, 1 }, // 2*6 + 1 = 13
new double[] { 5, 4 }, // 2*5 + 4 = 14
new double[] { 9, 1 }, // 2*9 + 1 = 19
new double[] { 1, 6 }, // 2*1 + 6 = 8
};
double[] outputs = // f(x, y)
{
1, 11, 8, 6, 13, 14, 19, 8
};
// Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
var machine = new KernelSupportVectorMachine(new Polynomial(2), inputs: 2);
// Create the sequential minimal optimization teacher
var learn = new SequentialMinimalOptimizationRegression(machine, inputs, outputs)
{
Complexity = 100
};
// Run the learning algorithm
double error = learn.Run();
// Compute the answer for one particular example
double fxy = machine.Compute(inputs[0]); // 1.0003849827673186
// Check for correct answers
double[] answers = new double[inputs.Length];
for (int i = 0; i < answers.Length; i++)
answers[i] = machine.Compute(inputs[i]);
Assert.AreEqual(1.0, fxy, 1e-2);
for (int i = 0; i < outputs.Length; i++)
Assert.AreEqual(outputs[i], answers[i], 1e-2);
}
开发者ID:accord-net,项目名称:framework,代码行数:48,代码来源:SequentialMinimalOptimizationRegressionTest.cs
示例8: ComputeTest
public void ComputeTest()
{
// Example AND problem
double[][] inputs =
{
new double[] { 0, 0 }, // 0 and 0: 0 (label -1)
new double[] { 0, 1 }, // 0 and 1: 0 (label -1)
new double[] { 1, 0 }, // 1 and 0: 0 (label -1)
new double[] { 1, 1 } // 1 and 1: 1 (label +1)
};
// Dichotomy SVM outputs should be given as [-1;+1]
int[] labels =
{
// 0, 0, 0, 1
-1, -1, -1, 1
};
// Create a Support Vector Machine for the given inputs
KernelSupportVectorMachine machine = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length);
// Instantiate a new learning algorithm for SVMs
SequentialMinimalOptimization smo = new SequentialMinimalOptimization(machine, inputs, labels);
// Set up the learning algorithm
smo.Complexity = 1.0;
// Run
double error = smo.Run();
Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[0])));
Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[1])));
Assert.AreEqual(-1, Math.Sign(machine.Compute(inputs[2])));
Assert.AreEqual(+1, Math.Sign(machine.Compute(inputs[3])));
Assert.AreEqual(error, 0);
Assert.AreEqual(-0.6640625, machine.Threshold);
Assert.AreEqual(1, machine.Weights[0]);
Assert.AreEqual(-0.34375, machine.Weights[1]);
Assert.AreEqual(-0.328125, machine.Weights[2]);
Assert.AreEqual(-0.328125, machine.Weights[3]);
}
开发者ID:RLaumeyer,项目名称:framework,代码行数:43,代码来源:KernelSupportVectorMachineTest.cs
示例9: RunTest
public void RunTest()
{
Accord.Math.Tools.SetupGenerator(0);
var dist = NormalDistribution.Standard;
double[] x =
{
+1.0312479734420776,
+0.99444115161895752,
+0.21835240721702576,
+0.47197291254997253,
+0.68701112270355225,
-0.58556461334228516,
-0.64154046773910522,
-0.66485315561294556,
+0.37940266728401184,
-0.61046308279037476
};
double[][] inputs = Jagged.ColumnVector(x);
IKernel kernel = new Linear();
var machine = new KernelSupportVectorMachine(kernel, inputs: 1);
var teacher = new OneclassSupportVectorLearning(machine, inputs)
{
Nu = 0.1
};
// Run the learning algorithm
double error = teacher.Run();
Assert.AreEqual(2, machine.Weights.Length);
Assert.AreEqual(0.39198910030993617, machine.Weights[0]);
Assert.AreEqual(0.60801089969006383, machine.Weights[1]);
Assert.AreEqual(inputs[0][0], machine.SupportVectors[0][0]);
Assert.AreEqual(inputs[7][0], machine.SupportVectors[1][0]);
}
开发者ID:RLaumeyer,项目名称:framework,代码行数:41,代码来源:OneclassSupportVectorLearningTest.cs
示例10: MainWindow_Loaded
void MainWindow_Loaded(object sender, RoutedEventArgs e)
{
_green = KernelSupportVectorMachine.Load("resources/green.svm");
_purple = KernelSupportVectorMachine.Load("resources/purple.svm");
_red = KernelSupportVectorMachine.Load("resources/red.svm");
FilterInfoCollection filter = new FilterInfoCollection(FilterCategory.VideoInputDevice);
FilterInfo desired = null;
foreach (FilterInfo info in filter)
{
if (info.Name == "QuickCam for Notebooks Deluxe")
{
desired = info;
break;
}
}
_device = new VideoCaptureDevice(desired.MonikerString);
_device.NewFrame += _device_NewFrame;
_device.Start();
return;
}
开发者ID:kwende,项目名称:SetSpotter,代码行数:22,代码来源:MainWindow.xaml.cs
示例11: PrintAccuracy
static void PrintAccuracy(string colorName, KernelSupportVectorMachine svm, HSL[] positives, HSL[] negatives)
{
int numberCorrect = 0;
for (int c = 0; c < positives.Length; c++)
{
double result = svm.Compute(HSLToDouble(positives[c]));
if (Math.Sign(result) == 1)
{
numberCorrect++;
}
}
for (int c = 0; c < negatives.Length; c++)
{
double result = svm.Compute(HSLToDouble(negatives[c]));
if (Math.Sign(result) == -1)
{
numberCorrect++;
}
}
Console.WriteLine(colorName + " accuracy is " +
(numberCorrect / (positives.Length + negatives.Length * 1.0)).ToString());
}
开发者ID:kwende,项目名称:SetSpotter,代码行数:23,代码来源:Program.cs
示例12: FixedWeightsTest
public void FixedWeightsTest()
{
var dataset = KernelSupportVectorMachineTest.training;
var inputs = dataset.Submatrix(null, 0, 3);
var labels = Tools.Scale(0, 1, -1, 1, dataset.GetColumn(4)).ToInt32();
KernelSupportVectorMachine machine = new KernelSupportVectorMachine(
Gaussian.Estimate(inputs), inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 10;
double error = smo.Run();
Assert.AreEqual(0.19047619047619047, error);
Assert.AreEqual(265.78327637381551, (machine.Kernel as Gaussian).Sigma);
Assert.AreEqual(29, machine.SupportVectors.Length);
double[] expectedWeights =
{
1.65717694716503, 1.20005456611466, -5.70824245415995, 10,
10, -2.38755497916487, 10, -8.15723436363058, 10, -10, 10,
10, -0.188634936781317, -5.4354281009458, -8.48341139483265,
-5.91105702760141, -5.71489190049223, 10, -2.37289205235858,
-3.33031262413522, -1.97545116517677, 10, -10, -9.563186799279,
-3.917941544845, -0.532584110773336, 4.81951847548326, 0.343668292727091,
-4.34159482731336
};
Assert.IsTrue(expectedWeights.IsEqual(machine.Weights, 1e-6));
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(8, matrix.FalseNegatives);
Assert.AreEqual(0, matrix.FalsePositives);
Assert.AreEqual(4, matrix.TruePositives);
Assert.AreEqual(30, matrix.TrueNegatives);
Assert.AreEqual(1 / 3.0, matrix.Sensitivity);
Assert.AreEqual(1, matrix.Specificity);
Assert.AreEqual(0.5, matrix.FScore);
Assert.AreEqual(0.5129891760425771, matrix.MatthewsCorrelationCoefficient);
}
开发者ID:KommuSoft,项目名称:accord_framework,代码行数:49,代码来源:SequentialMinimalOptimizationTest.cs
示例13: ComputeTest5
public void ComputeTest5()
{
var dataset = yinyang;
double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray();
int[] labels = dataset.GetColumn(2).ToInt32();
{
Linear kernel = new Linear();
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 1.0;
double error = smo.Run();
Assert.AreEqual(1.0, smo.Complexity);
Assert.AreEqual(1.0, smo.WeightRatio);
Assert.AreEqual(1.0, smo.NegativeWeight);
Assert.AreEqual(1.0, smo.PositiveWeight);
Assert.AreEqual(0.14, error);
Assert.AreEqual(30, machine.SupportVectors.Length);
double[] actualWeights = machine.Weights;
double[] expectedWeights = { -1, -1, 1, -1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 0.337065120144639, -1, 1, -0.337065120144639, -1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1 };
Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(7, matrix.FalseNegatives);
Assert.AreEqual(7, matrix.FalsePositives);
Assert.AreEqual(43, matrix.TruePositives);
Assert.AreEqual(43, matrix.TrueNegatives);
}
{
Linear kernel = new Linear();
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 1.0;
smo.PositiveWeight = 0.3;
smo.NegativeWeight = 1.0;
double error = smo.Run();
Assert.AreEqual(1.0, smo.Complexity);
Assert.AreEqual(0.3 / 1.0, smo.WeightRatio);
Assert.AreEqual(1.0, smo.NegativeWeight);
Assert.AreEqual(0.3, smo.PositiveWeight);
Assert.AreEqual(0.21, error);
Assert.AreEqual(24, machine.SupportVectors.Length);
double[] actualWeights = machine.Weights;
//string str = actualWeights.ToString(Accord.Math.Formats.CSharpArrayFormatProvider.InvariantCulture);
double[] expectedWeights = { -0.771026323762095, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -0.928973676237905, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 };
Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = (int)machine.Compute(inputs[i]);
ConfusionMatrix matrix = new ConfusionMatrix(actual, labels);
Assert.AreEqual(50, matrix.FalseNegatives);
Assert.AreEqual(0, matrix.FalsePositives);
Assert.AreEqual(0, matrix.TruePositives);
Assert.AreEqual(50, matrix.TrueNegatives);
}
{
Linear kernel = new Linear();
var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length);
var smo = new SequentialMinimalOptimization(machine, inputs, labels);
smo.Complexity = 1.0;
smo.PositiveWeight = 1.0;
smo.NegativeWeight = 0.3;
double error = smo.Run();
Assert.AreEqual(1.0, smo.Complexity);
Assert.AreEqual(1.0 / 0.3, smo.WeightRatio);
Assert.AreEqual(0.3, smo.NegativeWeight);
Assert.AreEqual(1.0, smo.PositiveWeight);
Assert.AreEqual(0.15, error);
Assert.AreEqual(19, machine.SupportVectors.Length);
double[] actualWeights = machine.Weights;
double[] expectedWeights = new double[] { 1, 1, -0.3, 1, -0.3, 1, 1, -0.3, 1, 1, 1, 1, 1, 1, 1, 1, 0.129080057278249, 1, 0.737797469918795 };
Assert.IsTrue(expectedWeights.IsEqual(actualWeights, 1e-10));
int[] actual = new int[labels.Length];
for (int i = 0; i < actual.Length; i++)
actual[i] = Math.Sign(machine.Compute(inputs[i]));
//.........这里部分代码省略.........
开发者ID:natepan,项目名称:framework,代码行数:101,代码来源:SequentialMinimalOptimizationTest.cs
示例14: LargeLearningTest1
public void LargeLearningTest1()
{
// Create large input vectors
int rows = 1000;
int dimension = 10000;
double[][] inputs = new double[rows][];
int[] outputs = new int[rows];
Random rnd = new Random();
for (int i = 0; i < inputs.Length; i++)
{
inputs[i] = new double[dimension];
if (i > rows / 2)
{
for (int j = 0; j < dimension; j++)
inputs[i][j] = rnd.NextDouble();
outputs[i] = -1;
}
else
{
for (int j = 0; j < dimension; j++)
inputs[i][j] = rnd.NextDouble() * 4.21 + 5;
outputs[i] = +1;
}
}
KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Polynomial(2), dimension);
SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, outputs)
{
UseComplexityHeuristic = true
};
double error = smo.Run();
Assert.AreEqual(0, error);
}
开发者ID:natepan,项目名称:framework,代码行数:42,代码来源:SequentialMinimalOptimizationTest.cs
示例15: btnCreate_Click
private void btnCreate_Click(object sender, EventArgs e)
{
if (dgvLearningSource.DataSource == null)
{
MessageBox.Show("Please load some data first.");
return;
}
// Finishes and save any pending changes to the given data
dgvLearningSource.EndEdit();
// Creates a matrix from the entire source data table
double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(out columnNames);
// Get only the input vector values (first two columns)
double[][] inputs = table.GetColumns(0).ToArray();
// Get only the outputs (last column)
double[] outputs = table.GetColumn(1);
// Create the specified Kernel
IKernel kernel = createKernel();
// Create the Support Vector Machine for 1 input variable
svm = new KernelSupportVectorMachine(kernel, inputs: 1);
// Creates a new instance of the SMO for regression learning algorithm
var smo = new SequentialMinimalOptimizationRegression(svm, inputs, outputs)
{
// Set learning parameters
Complexity = (double)numC.Value,
Tolerance = (double)numT.Value,
Epsilon = (double)numEpsilon.Value
};
try
{
// Run
double error = smo.Run();
lbStatus.Text = "Training complete!";
}
catch (ConvergenceException)
{
lbStatus.Text = "Convergence could not be attained. " +
"The learned machine might still be usable.";
}
// Check if we got support vectors
if (svm.SupportVectors.Length == 0)
{
dgvSupportVectors.DataSource = null;
graphSupportVectors.GraphPane.CurveList.Clear();
return;
}
// Show support vectors on the Support Vectors tab page
double[][] supportVectorsWeights = svm.SupportVectors.InsertColumn(svm.Weights);
string[] supportVectorNames = columnNames.RemoveAt(columnNames.Length - 1).Concatenate("Weight");
dgvSupportVectors.DataSource = new ArrayDataView(supportVectorsWeights, supportVectorNames);
// Show the support vector labels on the scatter plot
double[] supportVectorLabels = new double[svm.SupportVectors.Length];
for (int i = 0; i < supportVectorLabels.Length; i++)
{
int j = inputs.Find(sv => sv == svm.SupportVectors[i])[0];
supportVectorLabels[i] = outputs[j];
}
double[][] graph = svm.SupportVectors.InsertColumn(supportVectorLabels);
CreateScatterplot(graphSupportVectors, graph.ToMatrix());
// Get the ranges for each variable (X and Y)
DoubleRange range = Matrix.Range(table.GetColumn(0));
double[][] map = Matrix.Interval(range, 0.05).ToArray();
// Classify each point in the Cartesian coordinate system
double[] result = map.Apply(svm.Compute);
double[,] surface = map.ToMatrix().InsertColumn(result);
CreateScatterplot(zedGraphControl2, surface);
}
开发者ID:huanzl0503,项目名称:framework,代码行数:99,代码来源:MainForm.cs
示例16: MulticlassSupportVectorMachine
/// <summary>
/// Constructs a new Multi-class Kernel Support Vector Machine
/// </summary>
///
/// <param name="machines">
/// The machines to be used in each of the pairwise class subproblems.
/// </param>
///
public MulticlassSupportVectorMachine(KernelSupportVectorMachine[][] machines)
{
if (machines == null) throw new ArgumentNullException("machines");
this.machines = machines;
}
开发者ID:xyicheng,项目名称:Accord,代码行数:14,代码来源:MulticlassSupportVectorMachine.cs
示例17: KernelTest2
public void KernelTest2()
{
var dataset = SequentialMinimalOptimizationTest.yinyang;
var inputs = dataset.Submatrix(null, 0, 1).ToArray();
var labels = dataset.GetColumn(2).ToInt32();
var svm = new KernelSupportVectorMachine(new Linear(1), inputs: 2);
bool thrown = false;
try
{
new ProbabilisticCoordinateDescent(svm, inputs, labels);
}
catch (ArgumentException) { thrown = true; }
Assert.IsTrue(thrown);
}
开发者ID:KommuSoft,项目名称:accord_framework,代码行数:18,代码来源:ProbabilisticCoordinateDescentTest.cs
示例18: KernelTest1
public void KernelTest1()
{
var dataset = SequentialMinimalOptimizationTest.yinyang;
double[][] inputs = dataset.Submatrix(null, 0, 1).ToArray();
int[] labels = dataset.GetColumn(2).ToInt32();
double e1, e2;
double[] w1, w2;
{
Accord.Math.Tools.SetupGenerator(0);
var svm = new SupportVectorMachine(inputs: 2);
var teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels);
teacher.Tolerance = 1e-10;
teacher.Complexity = 1e+10;
e1 = teacher.Run();
w1 = svm.ToWeights();
}
{
Accord.Math.Tools.SetupGenerator(0);
var svm = new KernelSupportVectorMachine(new Linear(0), inputs: 2);
var teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels);
teacher.Tolerance = 1e-10;
teacher.Complexity = 1e+10;
e2 = teacher.Run();
w2 = svm.ToWeights();
}
Assert.AreEqual(e1, e2);
Assert.AreEqual(w1.Length, w2.Length);
Assert.AreEqual(w1[0], w2[0]);
Assert.AreEqual(w1[1], w2[1]);
Assert.AreEqual(w1[2], w2[2]);
}
开发者ID:KommuSoft,项目名称:accord_framework,代码行数:39,代码来源:ProbabilisticCoordinateDescentTest.cs
示例19: FromWeights
/// <summary>
/// Creates a new linear <see cref="SupportVectorMachine"/>
/// with the given set of linear <paramref name="weights"/>.
/// </summary>
///
/// <param name="weights">The machine's linear coefficients.</param>
///
/// <returns>
/// A <see cref="SupportVectorMachine"/> whose linear coefficients
/// are defined by the given <paramref name="weights"/> vector.
/// </returns>
///
public new static KernelSupportVectorMachine FromWeights(double[] weights)
{
var svm = new KernelSupportVectorMachine(new Linear(0), weights.Length - 1);
for (int i = 0; i < svm.Weights.Length; i++)
svm.Weights[i] = weights[i + 1];
svm.Threshold = weights[0];
return svm;
}
开发者ID:CanerPatir,项目名称:framework,代码行数:21,代码来源:KernelSupportVectorMachine.cs
示例20: DynamicalTimeWarpingConstructorTest
public void DynamicalTimeWarpingConstructorTest()
{
double[][] sequences =
{
new double[] // -1
{
0, 0, 0,
1, 1, 1,
2, 2, 2,
},
new double[] // -1
{
0, 1, 0,
0, 2, 0,
0, 3, 0
},
new double[] // +1
{
1, 1, 0,
1, 2, 0,
2, 1, 0,
},
new double[] // +1
{
0, 0, 1,
0, 0, 2,
0, 1, 3,
},
};
int[] outputs = { -1, -1, +1, +1 };
// Set the parameters of the kernel
double alpha = 0.85;
int innerVectorLength = 3;
// Create the kernel. Note that the input vector will be given out automatically
DynamicTimeWarping target = new DynamicTimeWarping(innerVectorLength, alpha);
// When using variable-length kernels, specify 0 as the input length.
KernelSupportVectorMachine svm = new KernelSupportVectorMachine(target, 0);
// Create the Sequential Minimal Optimization as usual
SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, sequences, outputs);
smo.Complexity = 1.5;
double error = smo.Run();
// Computing the training values
var a0 = svm.Compute(sequences[0]);
var a1 = svm.Compute(sequences[1]);
var a2 = svm.Compute(sequences[2]);
var a3 = svm.Compute(sequences[3]);
Assert.AreEqual(-1, System.Math.Sign(a0));
Assert.AreEqual(-1, System.Math.Sign(a1));
Assert.AreEqual(+1, System.Math.Sign(a2));
Assert.AreEqual(+1, System.Math.Sign(a3));
// Computing a new testing value
double[] test =
{
1, 0, 1,
0, 0, 2,
0, 1, 3,
};
var a4 = svm.Compute(test);
}
开发者ID:natepan,项目名称:framework,代码行数:79,代码来源:DynamicalTimeWarpingTest.cs
注:本文中的Accord.MachineLearning.VectorMachines.KernelSupportVectorMachine类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论