本文整理汇总了Java中weka.core.ContingencyTables.entropy方法的典型用法代码示例。如果您正苦于以下问题:Java ContingencyTables.entropy方法的具体用法?Java ContingencyTables.entropy怎么用?Java ContingencyTables.entropy使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类weka.core.ContingencyTables
的用法示例。
在下文中一共展示了ContingencyTables.entropy方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: FayyadAndIranisMDL
import weka.core.ContingencyTables; //导入方法依赖的package包/类
/**
* Test using Fayyad and Irani's MDL criterion.
*
* @param priorCounts
* @param bestCounts
* @param numInstances
* @param numCutPoints
* @return true if the splits is acceptable
*/
private boolean FayyadAndIranisMDL(double[] priorCounts,
double[][] bestCounts, double numInstances, int numCutPoints) {
double priorEntropy, entropy, gain;
double entropyLeft, entropyRight, delta;
int numClassesTotal, numClassesRight, numClassesLeft;
// Compute entropy before split.
priorEntropy = ContingencyTables.entropy(priorCounts);
// Compute entropy after split.
entropy = ContingencyTables.entropyConditionedOnRows(bestCounts);
// Compute information gain.
gain = priorEntropy - entropy;
// Number of classes occuring in the set
numClassesTotal = 0;
for (double priorCount : priorCounts) {
if (priorCount > 0) {
numClassesTotal++;
}
}
// Number of classes occuring in the left subset
numClassesLeft = 0;
for (int i = 0; i < bestCounts[0].length; i++) {
if (bestCounts[0][i] > 0) {
numClassesLeft++;
}
}
// Number of classes occuring in the right subset
numClassesRight = 0;
for (int i = 0; i < bestCounts[1].length; i++) {
if (bestCounts[1][i] > 0) {
numClassesRight++;
}
}
// Entropy of the left and the right subsets
entropyLeft = ContingencyTables.entropy(bestCounts[0]);
entropyRight = ContingencyTables.entropy(bestCounts[1]);
// Compute terms for MDL formula
delta = Utils.log2(Math.pow(3, numClassesTotal) - 2)
- ((numClassesTotal * priorEntropy) - (numClassesRight * entropyRight) - (numClassesLeft * entropyLeft));
// Check if split is to be accepted
return (gain > (Utils.log2(numCutPoints) + delta) / numInstances);
}
示例2: FayyadAndIranisMDL
import weka.core.ContingencyTables; //导入方法依赖的package包/类
/**
* Test using Fayyad and Irani's MDL criterion.
*
* @param priorCounts
* @param bestCounts
* @param numInstances
* @param numCutPoints
* @return true if the splits is acceptable
*/
private boolean FayyadAndIranisMDL(double[] priorCounts,
double[][] bestCounts,
double numInstances,
int numCutPoints) {
double priorEntropy, entropy, gain;
double entropyLeft, entropyRight, delta;
int numClassesTotal, numClassesRight, numClassesLeft;
// Compute entropy before split.
priorEntropy = ContingencyTables.entropy(priorCounts);
// Compute entropy after split.
entropy = ContingencyTables.entropyConditionedOnRows(bestCounts);
// Compute information gain.
gain = priorEntropy - entropy;
// Number of classes occuring in the set
numClassesTotal = 0;
for (int i = 0; i < priorCounts.length; i++) {
if (priorCounts[i] > 0) {
numClassesTotal++;
}
}
// Number of classes occuring in the left subset
numClassesLeft = 0;
for (int i = 0; i < bestCounts[0].length; i++) {
if (bestCounts[0][i] > 0) {
numClassesLeft++;
}
}
// Number of classes occuring in the right subset
numClassesRight = 0;
for (int i = 0; i < bestCounts[1].length; i++) {
if (bestCounts[1][i] > 0) {
numClassesRight++;
}
}
// Entropy of the left and the right subsets
entropyLeft = ContingencyTables.entropy(bestCounts[0]);
entropyRight = ContingencyTables.entropy(bestCounts[1]);
// Compute terms for MDL formula
delta = Utils.log2(Math.pow(3, numClassesTotal) - 2) -
(((double) numClassesTotal * priorEntropy) -
(numClassesRight * entropyRight) -
(numClassesLeft * entropyLeft));
// Check if split is to be accepted
return (gain > (Utils.log2(numCutPoints) + delta) / (double)numInstances);
}
示例3: FayyadAndIranisMDL
import weka.core.ContingencyTables; //导入方法依赖的package包/类
/**
* Test using Fayyad and Irani's MDL criterion.
*
* @param priorCounts
* @param bestCounts
* @param numInstances
* @param numCutPoints
* @return true if the splits is acceptable
*/
private boolean FayyadAndIranisMDL(double[] priorCounts,
double[][] bestCounts,
double numInstances,
int numCutPoints) {
double priorEntropy, entropy, gain;
double entropyLeft, entropyRight, delta;
int numClassesTotal, numClassesRight, numClassesLeft;
// Compute entropy before split.
priorEntropy = ContingencyTables.entropy(priorCounts);
// Compute entropy after split.
entropy = ContingencyTables.entropyConditionedOnRows(bestCounts);
// Compute information gain.
gain = priorEntropy - entropy;
// Number of classes occuring in the set
numClassesTotal = 0;
for (int i = 0; i < priorCounts.length; i++) {
if (priorCounts[i] > 0) {
numClassesTotal++;
}
}
// Number of classes occuring in the left subset
numClassesLeft = 0;
for (int i = 0; i < bestCounts[0].length; i++) {
if (bestCounts[0][i] > 0) {
numClassesLeft++;
}
}
// Number of classes occuring in the right subset
numClassesRight = 0;
for (int i = 0; i < bestCounts[1].length; i++) {
if (bestCounts[1][i] > 0) {
numClassesRight++;
}
}
// Entropy of the left and the right subsets
entropyLeft = ContingencyTables.entropy(bestCounts[0]);
entropyRight = ContingencyTables.entropy(bestCounts[1]);
// Compute terms for MDL formula
delta = Utils.log2(Math.pow(3, numClassesTotal) - 2) -
(((double) numClassesTotal * priorEntropy) -
(numClassesRight * entropyRight) -
(numClassesLeft * entropyLeft));
// Check if split is to be accepted
return (gain > (Utils.log2(numCutPoints) + delta) / (double)numInstances);
}