本文整理汇总了Java中weka.classifiers.functions.LinearRegression.buildClassifier方法的典型用法代码示例。如果您正苦于以下问题:Java LinearRegression.buildClassifier方法的具体用法?Java LinearRegression.buildClassifier怎么用?Java LinearRegression.buildClassifier使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类weka.classifiers.functions.LinearRegression
的用法示例。
在下文中一共展示了LinearRegression.buildClassifier方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: buildRegression
import weka.classifiers.functions.LinearRegression; //导入方法依赖的package包/类
public void buildRegression(){
lReg = new LinearRegression();
try {
lReg.buildClassifier(cpu);
} catch (Exception e) {
}
System.out.println(lReg);
}
示例2: buildLinearModel
import weka.classifiers.functions.LinearRegression; //导入方法依赖的package包/类
/**
* Build a linear model for this node using those attributes specified in
* indices.
*
* @param indices an array of attribute indices to include in the linear model
* @throws Exception if something goes wrong
*/
private void buildLinearModel(int[] indices) throws Exception {
// copy the training instances and remove all but the tested
// attributes
Instances reducedInst = new Instances(m_instances);
Remove attributeFilter = new Remove();
attributeFilter.setInvertSelection(true);
attributeFilter.setAttributeIndicesArray(indices);
attributeFilter.setInputFormat(reducedInst);
reducedInst = Filter.useFilter(reducedInst, attributeFilter);
// build a linear regression for the training data using the
// tested attributes
LinearRegression temp = new LinearRegression();
temp.buildClassifier(reducedInst);
double[] lmCoeffs = temp.coefficients();
double[] coeffs = new double[m_instances.numAttributes()];
for (int i = 0; i < lmCoeffs.length - 1; i++) {
if (indices[i] != m_classIndex) {
coeffs[indices[i]] = lmCoeffs[i];
}
}
m_nodeModel = new PreConstructedLinearModel(coeffs,
lmCoeffs[lmCoeffs.length - 1]);
m_nodeModel.buildClassifier(m_instances);
}
示例3: buildLinearModel
import weka.classifiers.functions.LinearRegression; //导入方法依赖的package包/类
/**
* Build a linear model for this node using those attributes
* specified in indices.
*
* @param indices an array of attribute indices to include in the linear
* model
* @throws Exception if something goes wrong
*/
private void buildLinearModel(int [] indices) throws Exception {
// copy the training instances and remove all but the tested
// attributes
Instances reducedInst = new Instances(m_instances);
Remove attributeFilter = new Remove();
attributeFilter.setInvertSelection(true);
attributeFilter.setAttributeIndicesArray(indices);
attributeFilter.setInputFormat(reducedInst);
reducedInst = Filter.useFilter(reducedInst, attributeFilter);
// build a linear regression for the training data using the
// tested attributes
LinearRegression temp = new LinearRegression();
temp.buildClassifier(reducedInst);
double [] lmCoeffs = temp.coefficients();
double [] coeffs = new double [m_instances.numAttributes()];
for (int i = 0; i < lmCoeffs.length - 1; i++) {
if (indices[i] != m_classIndex) {
coeffs[indices[i]] = lmCoeffs[i];
}
}
m_nodeModel = new PreConstructedLinearModel(coeffs, lmCoeffs[lmCoeffs.length - 1]);
m_nodeModel.buildClassifier(m_instances);
}
示例4: setLinear
import weka.classifiers.functions.LinearRegression; //导入方法依赖的package包/类
/**
* This function gets called to set the node to use a linear regression
* and attribute filter.
* @throws Exception If can't set a default linear egression model.
*/
private void setLinear() throws Exception {
//then set default behaviour for node.
//set linear regression combined with attribute filter
//find the attributes used for splitting.
boolean[] attributeList = new boolean[m_training.numAttributes()];
for (int noa = 0; noa < m_training.numAttributes(); noa++) {
attributeList[noa] = false;
}
TreeClass temp = this;
attributeList[m_training.classIndex()] = true;
while (temp != null) {
attributeList[temp.m_attrib1] = true;
attributeList[temp.m_attrib2] = true;
temp = temp.m_parent;
}
int classind = 0;
//find the new class index
for (int noa = 0; noa < m_training.classIndex(); noa++) {
if (attributeList[noa]) {
classind++;
}
}
//count how many attribs were used
int count = 0;
for (int noa = 0; noa < m_training.numAttributes(); noa++) {
if (attributeList[noa]) {
count++;
}
}
//fill an int array with the numbers of those attribs
int[] attributeList2 = new int[count];
count = 0;
for (int noa = 0; noa < m_training.numAttributes(); noa++) {
if (attributeList[noa]) {
attributeList2[count] = noa;
count++;
}
}
m_filter = new Remove();
((Remove)m_filter).setInvertSelection(true);
((Remove)m_filter).setAttributeIndicesArray(attributeList2);
m_filter.setInputFormat(m_training);
Instances temp2 = Filter.useFilter(m_training, m_filter);
temp2.setClassIndex(classind);
m_classObject = new LinearRegression();
m_classObject.buildClassifier(temp2);
}