本文整理匯總了Java中weka.core.Instances.numAttributes方法的典型用法代碼示例。如果您正苦於以下問題:Java Instances.numAttributes方法的具體用法?Java Instances.numAttributes怎麽用?Java Instances.numAttributes使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類weka.core.Instances
的用法示例。
在下文中一共展示了Instances.numAttributes方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: instancesToDMatrix
import weka.core.Instances; //導入方法依賴的package包/類
public static DMatrix instancesToDMatrix(Instances instances) throws XGBoostError {
long[] rowHeaders = new long[instances.size()+1];
rowHeaders[0]=0;
List<Float> dataList = new ArrayList<>();
List<Integer> colList = new ArrayList<>();
float[] labels = new float[instances.size()];
for(int i=0; i<instances.size(); i++) {
Instance instance = instances.get(i);
rowHeaders[i] = dataList.size();
processInstance(instance, dataList, colList);
labels[i] = (float) instance.classValue();
}
rowHeaders[rowHeaders.length - 1] = dataList.size();
int colNum = instances.numAttributes()-1;
DMatrix dMatrix = createDMatrix(rowHeaders, dataList, colList, colNum);
dMatrix.setLabel(labels);
return dMatrix;
}
示例2: createFilter
import weka.core.Instances; //導入方法依賴的package包/類
public Filter createFilter(Instances data) throws Exception {
Set<Integer> indexes = new HashSet<Integer>();
for (int i = 0, cnt = this.size(); i < cnt; i++) {
indexes.add(this.get(i).index());
} // FOR
SortedSet<Integer> to_remove = new TreeSet<Integer>();
for (int i = 0, cnt = data.numAttributes(); i < cnt; i++) {
if (indexes.contains(i) == false) {
to_remove.add(i+1);
}
} // FOR
Remove filter = new Remove();
filter.setInputFormat(data);
String options[] = { "-R", StringUtil.join(",", to_remove) };
filter.setOptions(options);
return (filter);
}
示例3: getEvalResultbySMOTE
import weka.core.Instances; //導入方法依賴的package包/類
/***
* <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
* <p>Use C4.5 and <b>SMOTE</b> to classify the dataset.</p>
* @param path dataset path
* @throws Exception
*/
public static void getEvalResultbySMOTE(String path, int index) throws Exception{
Instances ins = DataSource.read(path);
int numAttr = ins.numAttributes();
ins.setClassIndex(numAttr - 1);
SMOTE smote = new SMOTE();
smote.setInputFormat(ins);
/** classifiers setting*/
J48 j48 = new J48();
// j48.setConfidenceFactor(0.4f);
j48.buildClassifier(ins);
FilteredClassifier fc = new FilteredClassifier();
fc.setClassifier(j48);
fc.setFilter(smote);
Evaluation eval = new Evaluation(ins);
eval.crossValidateModel(fc, ins, 10, new Random(1));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
// System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
results[index][0] = eval.precision(0);
results[index][1] = eval.recall(0);
results[index][2] = eval.fMeasure(0);
results[index][3] = eval.precision(1);
results[index][4] = eval.recall(1);
results[index][5] = eval.fMeasure(1);
results[index][6] = 1-eval.errorRate();
}
示例4: findBestPerf
import weka.core.Instances; //導入方法依賴的package包/類
public static Instance findBestPerf(Instances data){
int idx = data.numAttributes()-1;
double bestPerf = data.attributeStats(idx).numericStats.max;
for(int i=0;i<data.numInstances();i++)
if(data.get(i).value(idx)==bestPerf)
return data.get(i);
return null;//should never return NULL
}
示例5: findBestPerfIndex
import weka.core.Instances; //導入方法依賴的package包/類
public static int findBestPerfIndex(Instances data){
int idx = data.numAttributes()-1;
double bestPerf = data.attributeStats(idx).numericStats.max;
for(int i=0;i<data.numInstances();i++)
if(data.get(i).value(idx)==bestPerf)
return i;
return -1;//should never return -1
}
示例6: getEvalResultbyNo
import weka.core.Instances; //導入方法依賴的package包/類
/***
* <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
* <p>Only use C4.5 to classify the dataset.</p>
* @param path dataset path
* @throws Exception
*/
public static void getEvalResultbyNo(String path, int index) throws Exception{
Instances ins = DataSource.read(path);
int numAttr = ins.numAttributes();
ins.setClassIndex(numAttr - 1);
/** classifiers setting*/
J48 j48 = new J48();
// j48.setConfidenceFactor(0.4f);
j48.buildClassifier(ins);
Evaluation eval = new Evaluation(ins);
eval.crossValidateModel(j48, ins, 10, new Random(1));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
// System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
results[index][0] = eval.precision(0);
results[index][1] = eval.recall(0);
results[index][2] = eval.fMeasure(0);
results[index][3] = eval.precision(1);
results[index][4] = eval.recall(1);
results[index][5] = eval.fMeasure(1);
results[index][6] = 1-eval.errorRate();
}
示例7: getEvalResultbyResampling
import weka.core.Instances; //導入方法依賴的package包/類
/***
* <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
* <p>Use C4.5 and <b>Resampling</b> to classify the dataset.</p>
* @param path dataset path
* @throws Exception
*/
public static void getEvalResultbyResampling(String path, int index) throws Exception{
Instances ins = DataSource.read(path);
int numAttr = ins.numAttributes();
ins.setClassIndex(numAttr - 1);
Resample resample = new Resample();
resample.setInputFormat(ins);
/** classifiers setting*/
J48 j48 = new J48();
// j48.setConfidenceFactor(0.4f);
j48.buildClassifier(ins);
FilteredClassifier fc = new FilteredClassifier();
fc.setClassifier(j48);
fc.setFilter(resample);
Evaluation eval = new Evaluation(ins);
eval.crossValidateModel(fc, ins, 10, new Random(1));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
// System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
results[index][0] = eval.precision(0);
results[index][1] = eval.recall(0);
results[index][2] = eval.fMeasure(0);
results[index][3] = eval.precision(1);
results[index][4] = eval.recall(1);
results[index][5] = eval.fMeasure(1);
results[index][6] = 1-eval.errorRate();
}
示例8: getEvalResultbyCost
import weka.core.Instances; //導入方法依賴的package包/類
/***
* <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
* <p>Use C4.5 and <b>Cost-sensitive learning</b> to classify the dataset.</p>
* @param path dataset path
* @throws Exception
*/
public static void getEvalResultbyCost(String path, int index) throws Exception{
Instances ins = DataSource.read(path);
int numAttr = ins.numAttributes();
ins.setClassIndex(numAttr - 1);
/**Classifier setting*/
J48 j48 = new J48();
// j48.setConfidenceFactor(0.4f);
j48.buildClassifier(ins);
CostSensitiveClassifier csc = new CostSensitiveClassifier();
csc.setClassifier(j48);
csc.setCostMatrix(new CostMatrix(new BufferedReader(new FileReader("files/costm"))));
Evaluation eval = new Evaluation(ins);
eval.crossValidateModel(csc, ins, 10, new Random(1));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
// System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
results[index][0] = eval.precision(0);
results[index][1] = eval.recall(0);
results[index][2] = eval.fMeasure(0);
results[index][3] = eval.precision(1);
results[index][4] = eval.recall(1);
results[index][5] = eval.fMeasure(1);
results[index][6] = 1-eval.errorRate();
}
示例9: getEvalResultbyDefault
import weka.core.Instances; //導入方法依賴的package包/類
/***
* <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
* <p>Use C4.5 and <b>SMOTE</b> to classify the dataset.</p>
* @param path dataset path
* @throws Exception
*/
public static void getEvalResultbyDefault(String path, int index) throws Exception{
Instances ins = DataSource.read(path);
int numAttr = ins.numAttributes();
ins.setClassIndex(numAttr - 1);
SMOTE smote = new SMOTE();
smote.setInputFormat(ins);
/** classifiers setting*/
J48 j48 = new J48();
// j48.setConfidenceFactor(0.4f);
j48.buildClassifier(ins);
FilteredClassifier fc = new FilteredClassifier();
fc.setClassifier(j48);
fc.setFilter(smote);
Evaluation eval = new Evaluation(ins);
eval.crossValidateModel(fc, ins, 10, new Random(1));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
// System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
results[index][0] = eval.precision(0);
results[index][1] = eval.recall(0);
results[index][2] = eval.fMeasure(0);
results[index][3] = eval.precision(1);
results[index][4] = eval.recall(1);
results[index][5] = eval.fMeasure(1);
results[index][6] = 1-eval.errorRate();
}
示例10: instancesToDenseDMatrix
import weka.core.Instances; //導入方法依賴的package包/類
public static DMatrix instancesToDenseDMatrix(Instances instances) throws XGBoostError {
int colNum = instances.numAttributes()-1;
int rowNum = instances.size();
float[] data = new float[colNum*rowNum];
float[] labels = new float[instances.size()];
Attribute classAttribute = instances.classAttribute();
int classAttrIndex = classAttribute.index();
for(int i=0, dataIndex = 0; i<instances.size(); i++) {
Instance instance = instances.get(i);
labels[i] = (float) instance.classValue();
Enumeration<Attribute> attributeEnumeration = instance.enumerateAttributes();
while (attributeEnumeration.hasMoreElements()) {
Attribute attribute = attributeEnumeration.nextElement();
int attrIndex = attribute.index();
if(attrIndex == classAttrIndex){
continue;
}
data[dataIndex]= (float) instance.value(attribute);
dataIndex++;
}
}
DMatrix dMatrix = new DMatrix(data, rowNum, colNum);
dMatrix.setLabel(labels);
return dMatrix;
}
示例11: loadWekaInstance
import weka.core.Instances; //導入方法依賴的package包/類
private void loadWekaInstance(String path) {
try {
this.wekaInstance.loadInstanceFromPath(path);
} catch (Exception e) {
MiniMLLogger.INSTANCE.exception(e);
}
Instances data = (Instances) this.wekaInstance.getValue();
for (int i = 0; i < data.numAttributes(); i++)
{
String name = data.attribute(i).name();
this.attributes.add(i, name);
this.classifierSelect.addItem(name);
}
}
示例12: loadInstanceFromPath
import weka.core.Instances; //導入方法依賴的package包/類
public void loadInstanceFromPath(String path) throws Exception {
DataSource source = new DataSource(path);
Instances data = source.getDataSet();
if (data.classIndex() == -1) {
data.setClassIndex(data.numAttributes() - 1);
}
this.value = data;
MiniMLLogger.INSTANCE.info("Dataset loaded with these attributes");
for (int i = 0; i < data.numAttributes(); i++)
{
MiniMLLogger.INSTANCE.info(data.attribute(i));
}
}
示例13: getEvalResultbyChiSquare
import weka.core.Instances; //導入方法依賴的package包/類
/***
* <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
* <p>Use C4.5 and <b>SMOTE</b>, combined with <b>Chi-Square</b> to classify the dataset.</p>
* @param path dataset path
* @throws Exception
*/
public static void getEvalResultbyChiSquare(String path, int index) throws Exception{
Instances ins = DataSource.read(path);
int numAttr = ins.numAttributes();
ins.setClassIndex(numAttr - 1);
/**chi-squared filter to process the whole dataset first*/
ChiSquaredAttributeEval evall = new ChiSquaredAttributeEval();
Ranker ranker = new Ranker();
AttributeSelection selector = new AttributeSelection();
selector.setEvaluator(evall);
selector.setSearch(ranker);
selector.setInputFormat(ins);
ins = Filter.useFilter(ins, selector);
SMOTE smote = new SMOTE();
smote.setInputFormat(ins);
/** classifiers setting*/
J48 j48 = new J48();
// j48.setConfidenceFactor(0.4f);
j48.buildClassifier(ins);
FilteredClassifier fc = new FilteredClassifier();
fc.setClassifier(j48);
fc.setFilter(smote);
Evaluation eval = new Evaluation(ins);
eval.crossValidateModel(fc, ins, 10, new Random(1));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
// System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
results[index][0] = eval.precision(0);
results[index][1] = eval.recall(0);
results[index][2] = eval.fMeasure(0);
results[index][3] = eval.precision(1);
results[index][4] = eval.recall(1);
results[index][5] = eval.fMeasure(1);
results[index][6] = 1-eval.errorRate();
}
示例14: getEvalResultbyInfoGain
import weka.core.Instances; //導入方法依賴的package包/類
/***
* <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
* <p>Use C4.5 and <b>SMOTE</b>, combined with <b>Information Gain</b> to classify the dataset.</p>
* @param path dataset path
* @throws Exception
*/
public static void getEvalResultbyInfoGain(String path, int index) throws Exception{
Instances ins = DataSource.read(path);
int numAttr = ins.numAttributes();
ins.setClassIndex(numAttr - 1);
/**information gain filter to process the whole dataset first*/
InfoGainAttributeEval evall = new InfoGainAttributeEval();
Ranker ranker = new Ranker();
AttributeSelection selector = new AttributeSelection();
selector.setEvaluator(evall);
selector.setSearch(ranker);
selector.setInputFormat(ins);
ins = Filter.useFilter(ins, selector);
SMOTE smote = new SMOTE();
smote.setInputFormat(ins);
/** classifiers setting*/
J48 j48 = new J48();
// j48.setConfidenceFactor(0.4f);
j48.buildClassifier(ins);
FilteredClassifier fc = new FilteredClassifier();
fc.setClassifier(j48);
fc.setFilter(smote);
Evaluation eval = new Evaluation(ins);
eval.crossValidateModel(fc, ins, 10, new Random(1));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
// System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
results[index][0] = eval.precision(0);
results[index][1] = eval.recall(0);
results[index][2] = eval.fMeasure(0);
results[index][3] = eval.precision(1);
results[index][4] = eval.recall(1);
results[index][5] = eval.fMeasure(1);
results[index][6] = 1-eval.errorRate();
}
示例15: getEvalResultbyGainRatio
import weka.core.Instances; //導入方法依賴的package包/類
/***
* <p>To get 10-fold cross validation in one single arff in <b>path</b></p>
* <p>Use C4.5 and <b>SMOTE</b>, combined with <b>Information Gain Ratio</b> to classify the dataset.</p>
* @param path dataset path
* @throws Exception
*/
public static void getEvalResultbyGainRatio(String path, int index) throws Exception{
Instances ins = DataSource.read(path);
int numAttr = ins.numAttributes();
ins.setClassIndex(numAttr - 1);
/**information gain ratio filter to process the whole dataset first*/
GainRatioAttributeEval evall = new GainRatioAttributeEval();
Ranker ranker = new Ranker();
AttributeSelection selector = new AttributeSelection();
selector.setEvaluator(evall);
selector.setSearch(ranker);
selector.setInputFormat(ins);
ins = Filter.useFilter(ins, selector);
SMOTE smote = new SMOTE();
smote.setInputFormat(ins);
/** classifiers setting*/
J48 j48 = new J48();
// j48.setConfidenceFactor(0.4f);
j48.buildClassifier(ins);
FilteredClassifier fc = new FilteredClassifier();
fc.setClassifier(j48);
fc.setFilter(smote);
Evaluation eval = new Evaluation(ins);
eval.crossValidateModel(fc, ins, 10, new Random(1));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(0), eval.recall(0), eval.fMeasure(0));
// System.out.printf(" %4.3f %4.3f %4.3f", eval.precision(1), eval.recall(1), eval.fMeasure(1));
// System.out.printf(" %4.3f \n\n", (1-eval.errorRate()));
results[index][0] = eval.precision(0);
results[index][1] = eval.recall(0);
results[index][2] = eval.fMeasure(0);
results[index][3] = eval.precision(1);
results[index][4] = eval.recall(1);
results[index][5] = eval.fMeasure(1);
results[index][6] = 1-eval.errorRate();
}