本文整理汇总了Java中weka.attributeSelection.AttributeSelection类的典型用法代码示例。如果您正苦于以下问题:Java AttributeSelection类的具体用法?Java AttributeSelection怎么用?Java AttributeSelection使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
AttributeSelection类属于weka.attributeSelection包,在下文中一共展示了AttributeSelection类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: useFilter
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
/**
* uses the filter
*/
protected static void useFilter(Instances data) throws Exception {
System.out.println("\n2. Filter");
weka.filters.supervised.attribute.AttributeSelection filter = new weka.filters.supervised.attribute.AttributeSelection();
CfsSubsetEval eval = new CfsSubsetEval();
GreedyStepwise search = new GreedyStepwise();
search.setSearchBackwards(true);
filter.setEvaluator(eval);
System.out.println("Set the evaluator : " + eval.toString());
filter.setSearch(search);
System.out.println("Set the search : " + search.toString());
filter.setInputFormat(data);
System.out.println("Set the input format : " + data.toString());
Instances newData = Filter.useFilter(data, filter);
System.out.println("Results of Filter:\n" + newData);
}
开发者ID:ajaybhat,项目名称:Essay-Grading-System,代码行数:20,代码来源:AttributeSelectionRunner.java
示例2: preProcessData
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
public static Instances preProcessData(Instances data) throws Exception{
/*
* Remove useless attributes
*/
RemoveUseless removeUseless = new RemoveUseless();
removeUseless.setOptions(new String[] { "-M", "99" }); // threshold
removeUseless.setInputFormat(data);
data = Filter.useFilter(data, removeUseless);
/*
* Remove useless attributes
*/
ReplaceMissingValues fixMissing = new ReplaceMissingValues();
fixMissing.setInputFormat(data);
data = Filter.useFilter(data, fixMissing);
/*
* Remove useless attributes
*/
Discretize discretizeNumeric = new Discretize();
discretizeNumeric.setOptions(new String[] {
"-O",
"-M", "-1.0",
"-B", "4", // no of bins
"-R", "first-last"}); //range of attributes
fixMissing.setInputFormat(data);
data = Filter.useFilter(data, fixMissing);
/*
* Select only informative attributes
*/
InfoGainAttributeEval eval = new InfoGainAttributeEval();
Ranker search = new Ranker();
search.setOptions(new String[] { "-T", "0.001" }); // information gain threshold
AttributeSelection attSelect = new AttributeSelection();
attSelect.setEvaluator(eval);
attSelect.setSearch(search);
// apply attribute selection
attSelect.SelectAttributes(data);
// remove the attributes not selected in the last run
data = attSelect.reduceDimensionality(data);
return data;
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-End-to-Endguide-for-Java-developers,代码行数:52,代码来源:KddCup.java
示例3: featureSelection
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
/**
* Method featureSelection, which uses an algorithm to select the most representative features of
* the data in patterns_krs table
*
* @param data The instances from patterns_krs table
*
* @return indexes The indexes of the attributes selected by the algorithm
*/
public int[] featureSelection(Instances data){
int[] indexes = null;
AttributeSelection attsel = new AttributeSelection();
//FuzzyRoughSubsetEval eval = new FuzzyRoughSubsetEval();
//HillClimber search = new HillClimber();
CfsSubsetEval eval = new CfsSubsetEval();
GreedyStepwise search = new GreedyStepwise();
attsel.setEvaluator(eval);
attsel.setSearch(search);
try {
attsel.SelectAttributes(data);
indexes = attsel.selectedAttributes();
logger.info("Selected Features: "+Utils.arrayToString(indexes));
} catch (Exception e) {
e.printStackTrace();
}
return indexes;
}
开发者ID:MusesProject,项目名称:MusesServer,代码行数:31,代码来源:DataMiner.java
示例4: selectFeatures
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
public void selectFeatures(){
AttributeSelection attSelection = new AttributeSelection();
CfsSubsetEval eval = new CfsSubsetEval();
BestFirst search = new BestFirst();
attSelection.setEvaluator(eval);
attSelection.setSearch(search);
try {
attSelection.SelectAttributes(iris);
int[] attIndex = attSelection.selectedAttributes();
System.out.println(Utils.arrayToString(attIndex));
} catch (Exception e) {
}
}
开发者ID:PacktPublishing,项目名称:Java-Data-Science-Cookbook,代码行数:14,代码来源:WekaFeatureSelectionTest.java
示例5: selectFeaturesWithFilter
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
public void selectFeaturesWithFilter(){
weka.filters.supervised.attribute.AttributeSelection filter = new weka.filters.supervised.attribute.AttributeSelection();
CfsSubsetEval eval = new CfsSubsetEval();
BestFirst search = new BestFirst();
filter.setEvaluator(eval);
filter.setSearch(search);
try {
filter.setInputFormat(iris);
Instances newData = Filter.useFilter(iris, filter);
System.out.println(newData);
} catch (Exception e) {
}
}
开发者ID:PacktPublishing,项目名称:Java-Data-Science-Cookbook,代码行数:14,代码来源:WekaFeatureSelectionTest.java
示例6: TrainedModelPredictionMaker
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
public TrainedModelPredictionMaker(String attributeSelectionObjPath, String modelObjPath, String instancesPath, String classIndex, String predictionPath)
{
//Go forth and load some instances
try
{
DataSource dataSource = new DataSource(new FileInputStream(instancesPath));
Instances instances = dataSource.getDataSet();
//Make sure to
if (instances.classIndex() == -1){
if(classIndex.equals("last"))
instances.setClassIndex(instances.numAttributes() - 1);
else
instances.setClassIndex(Integer.parseInt(classIndex));
}
//Load up the attribute selection if we need to
if(attributeSelectionObjPath != null){
AttributeSelection as = (AttributeSelection)weka.core.SerializationHelper.read(attributeSelectionObjPath);
instances = as.reduceDimensionality(instances);
}
//Load up yonder classifier
AbstractClassifier classifier = (AbstractClassifier)weka.core.SerializationHelper.read(modelObjPath);
//Make the evaluation
eval = new Evaluation(instances);
ClassifierRunner.EvaluatorThread thrd = new ClassifierRunner.EvaluatorThread(eval, classifier, instances, predictionPath);
thrd.run();
}catch(Exception e){
throw new RuntimeException(e);
}
}
开发者ID:dsibournemouth,项目名称:autoweka,代码行数:34,代码来源:TrainedModelPredictionMaker.java
示例7: useLowLevel
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
/**
* uses the low level approach
*/
protected static void useLowLevel(Instances data) throws Exception {
System.out.println("\n3. Low-level");
AttributeSelection attsel = new AttributeSelection();
attsel.SelectAttributes(data);
int[] indices = attsel.selectedAttributes();
for (int i = 0; i < indices.length; i++) {
System.out.println(data.attribute(i).toString());
}
}
开发者ID:ajaybhat,项目名称:Essay-Grading-System,代码行数:13,代码来源:AttributeSelectionRunner.java
示例8: getAttributeSelector
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
private static AttributeSelection getAttributeSelector(
Instances trainingData) throws Exception {
AttributeSelection selector = new AttributeSelection();
InfoGainAttributeEval evaluator = new InfoGainAttributeEval();
Ranker ranker = new Ranker();
ranker.setNumToSelect(Math.min(500, trainingData.numAttributes() - 1));
selector.setEvaluator(evaluator);
selector.setSearch(ranker);
selector.SelectAttributes(trainingData);
return selector;
}
开发者ID:qcri-social,项目名称:AIDR,代码行数:12,代码来源:ModelFactory.java
示例9: selectAttributes
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
@TimeThis(task="select-attributes")
protected String selectAttributes(@SuppressWarnings("unused") ProcessingContext<Corpus> ctx, IdentifiedInstances<Element> trainingSet) throws Exception {
ASEvaluation eval = ASEvaluation.forName(evaluator, evaluatorOptions);
return AttributeSelection.SelectAttributes(eval, getEvalOptions(), trainingSet);
}
开发者ID:Bibliome,项目名称:alvisnlp,代码行数:6,代码来源:WekaSelectAttributes.java
示例10: learnParameters
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
/**
*
* Learns the rule from parsed features in a cross validation and the set
* parameters. Additionally feature subset selection is conducted, if the
* parameters this.forwardSelection or this.backwardSelection are set
* accordingly.
*
* @param features
* Contains features to learn a classifier
*/
@Override
public Performance learnParameters(FeatureVectorDataSet features) {
// create training
Instances trainingData = transformToWeka(features, this.trainingSet);
try {
Evaluation eval = new Evaluation(trainingData);
// apply feature subset selection
if (this.forwardSelection || this.backwardSelection) {
GreedyStepwise search = new GreedyStepwise();
search.setSearchBackwards(this.backwardSelection);
this.fs = new AttributeSelection();
WrapperSubsetEval wrapper = new WrapperSubsetEval();
// Do feature subset selection, but using a 10-fold cross
// validation
wrapper.buildEvaluator(trainingData);
wrapper.setClassifier(this.classifier);
wrapper.setFolds(10);
wrapper.setThreshold(0.01);
this.fs.setEvaluator(wrapper);
this.fs.setSearch(search);
this.fs.SelectAttributes(trainingData);
trainingData = fs.reduceDimensionality(trainingData);
}
// perform 10-fold Cross Validation to evaluate classifier
eval.crossValidateModel(this.classifier, trainingData, 10, new Random(1));
System.out.println(eval.toSummaryString("\nResults\n\n", false));
this.classifier.buildClassifier(trainingData);
int truePositive = (int) eval.numTruePositives(trainingData.classIndex());
int falsePositive = (int) eval.numFalsePositives(trainingData.classIndex());
int falseNegative = (int) eval.numFalseNegatives(trainingData.classIndex());
Performance performance = new Performance(truePositive, truePositive + falsePositive,
truePositive + falseNegative);
return performance;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
开发者ID:olehmberg,项目名称:winter,代码行数:62,代码来源:WekaMatchingRule.java
示例11: AttributeSelectorThread
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
public AttributeSelectorThread(AttributeSelection selection, Instances inst)
{
mInstances = inst;
mSelection = selection;
}
开发者ID:dsibournemouth,项目名称:autoweka,代码行数:6,代码来源:ClassifierRunner.java
示例12: setAttributeSelection
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
public void setAttributeSelection(AttributeSelection search)
{
mAttributeSelection = search;
}
开发者ID:dsibournemouth,项目名称:autoweka,代码行数:5,代码来源:ClassifierResult.java
示例13: getAttributeSelection
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
public AttributeSelection getAttributeSelection()
{
return mAttributeSelection;
}
开发者ID:dsibournemouth,项目名称:autoweka,代码行数:5,代码来源:ClassifierResult.java
示例14: buildModel
import weka.attributeSelection.AttributeSelection; //导入依赖的package包/类
public static Model buildModel(int crisisID, int attributeID, Model oldModel)
throws Exception {
// TODO: Improve model training to try different classifiers and
// different mixes of old and new data
// Get training and evaluation data
Instances trainingSet = DataStore.getTrainingSet(crisisID, attributeID);
Instances evaluationSet = DataStore.getEvaluationSet(crisisID,
attributeID, trainingSet);
if (trainingSet.attribute(trainingSet.numAttributes() - 1).numValues() < 2) {
logger.info("ModelFactory" +
"All training examples have the same label. Postponing training.");
return oldModel;
}
if (evaluationSet.numInstances() < 2) {
logger.info("ModelFactory" +
"The evaluation set is too small. Postponing training.");
return oldModel;
}
// Do attribute selection
AttributeSelection selector = getAttributeSelector(trainingSet);
trainingSet = selector.reduceDimensionality(trainingSet);
evaluationSet = selector.reduceDimensionality(evaluationSet);
// Train classifier
Classifier classifier = trainClassifier(trainingSet);
// Create the model object
Model model = new Model(attributeID, classifier, getTemplateSet(trainingSet));
model.setTrainingSampleCount(trainingSet.size());
// Evaluate classifier
model.evaluate(evaluationSet);
double newPerformance = model.getWeightedPerformance();
double oldPerformance = 0;
if (oldModel != null) {
oldModel.evaluate(evaluationSet);
oldPerformance = oldModel.getWeightedPerformance();
}
// Koushik: Changed as per ChaTo's suggestion
/*
if (newPerformance > oldPerformance - EPSILON) {
return model;
} else {
return oldModel;
}*/
if (newPerformance > oldPerformance - PERFORMANCE_IMPROVEMENT_MARGIN) {
return model;
} else if( model.getTrainingSampleCount() > oldModel.getTrainingSampleCount() + TRAINING_EXAMPLES_FORCE_RETRAIN) {
return model;
} else {
return oldModel;
}
}
开发者ID:qcri-social,项目名称:AIDR,代码行数:59,代码来源:ModelFactory.java
注:本文中的weka.attributeSelection.AttributeSelection类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论