本文整理汇总了Java中weka.classifiers.functions.LinearRegression类的典型用法代码示例。如果您正苦于以下问题:Java LinearRegression类的具体用法?Java LinearRegression怎么用?Java LinearRegression使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
LinearRegression类属于weka.classifiers.functions包,在下文中一共展示了LinearRegression类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: train
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
public void train(String datasetFilename, boolean serialise)
{
String[] lines = Utils.readLines(datasetFilename);
int i = 1; // skip legend
try
{
for(i = 1; i < lines.length; i++) // skip legend
{
dataset.add(createFeatureVector(lines[i].split(","), true));
}
model = (Classifier) new LinearRegression();
((LinearRegression)model).setRidge(1.0e-10);
model.buildClassifier(dataset);
if(serialise)
{
SerializationHelper.write(new FileOutputStream(modelFilename), model);
}
}
catch(Exception e)
{
System.err.println("Error in line " + i + ": " + lines[i]);
e.printStackTrace();
}
}
示例2: initClassifier
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
private Classifier initClassifier(final String file) {
try {
Classifier retVal;
final BufferedReader br = new BufferedReader(new FileReader(file));
data = new Instances(br);
data.setClassIndex(data.numAttributes() - 1);
/* REPTree rt = new REPTree(); rt.setMaxDepth(-1); rt.setMinNum(2.0);
* rt.setMinVarianceProp(0.001); rt.setNoPruning(false);
* rt.setNumFolds(3); rt.setSeed(1); classifier = rt; */
final LinearRegression lr = new LinearRegression();
retVal = lr;
retVal.buildClassifier(data);
br.close();
return retVal;
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
示例3: LocallyWeightedLinearRegression
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
/**
* Constructor.
* @param dataset WEKA Instances object.
* @throws Exception
*/
public LocallyWeightedLinearRegression(Instances dataset) throws Exception
{
// set the method for local regression
lwl.setClassifier(new LinearRegression());
// set number of nearest neighbours to be used for local prediction
lwl.setKNN(10); // 10 by default
// set weighting kernel method (see comments on constants)
lwl.setWeightingKernel(LINEAR);
// set KDTree as nearest neighbour search method
lwl.setNearestNeighbourSearchAlgorithm(new KDTree());
// build the classifier
lwl.buildClassifier(dataset);
// store instance reference
this.dataset = dataset;
}
示例4: buildRegression
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
public void buildRegression(){
lReg = new LinearRegression();
try {
lReg.buildClassifier(cpu);
} catch (Exception e) {
}
System.out.println(lReg);
}
示例5: LR_Model
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
/**
* Generates a Weka LinearRegression function Model acting on our data instance with our parameters.
*/
public LR_Model(Instances d, String[] params) throws ModelConstructException,Exception {
super(d,params);
classifier = new LinearRegression();
prepare();
run();
}
示例6: createLinearRegression
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
public static LinearRegression createLinearRegression() {
LinearRegression linreg = new LinearRegression();
linreg.setAttributeSelectionMethod(new SelectedTag(LinearRegression.SELECTION_NONE, LinearRegression.TAGS_SELECTION));
linreg.setEliminateColinearAttributes(false);
// if wants debug info
//linreg.setDebug(true);
return linreg;
}
示例7: buildLinearModel
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
/**
* Build a linear model for this node using those attributes specified in
* indices.
*
* @param indices an array of attribute indices to include in the linear model
* @throws Exception if something goes wrong
*/
private void buildLinearModel(int[] indices) throws Exception {
// copy the training instances and remove all but the tested
// attributes
Instances reducedInst = new Instances(m_instances);
Remove attributeFilter = new Remove();
attributeFilter.setInvertSelection(true);
attributeFilter.setAttributeIndicesArray(indices);
attributeFilter.setInputFormat(reducedInst);
reducedInst = Filter.useFilter(reducedInst, attributeFilter);
// build a linear regression for the training data using the
// tested attributes
LinearRegression temp = new LinearRegression();
temp.buildClassifier(reducedInst);
double[] lmCoeffs = temp.coefficients();
double[] coeffs = new double[m_instances.numAttributes()];
for (int i = 0; i < lmCoeffs.length - 1; i++) {
if (indices[i] != m_classIndex) {
coeffs[indices[i]] = lmCoeffs[i];
}
}
m_nodeModel = new PreConstructedLinearModel(coeffs,
lmCoeffs[lmCoeffs.length - 1]);
m_nodeModel.buildClassifier(m_instances);
}
示例8: main
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
// Declare numeric attributes
attrs = new FastVector(6);
attrs.addElement(new Attribute("houseSize"));
attrs.addElement(new Attribute("lotSize"));
attrs.addElement(new Attribute("bedrooms"));
attrs.addElement(new Attribute("granite"));
attrs.addElement(new Attribute("bathroom"));
attrs.addElement(new Attribute("sellingPrice"));
// add the instance
createTrainingSet();
// Create a LinearRegression classifier
Classifier cModel = new LinearRegression();
cModel.buildClassifier(isTrainingSet);
// Print the result à la Weka explorer:
System.out.println(cModel.toString());
// TestWeka the model
Evaluation eTest = new Evaluation(isTrainingSet);
eTest.evaluateModel(cModel, isTrainingSet);
// Print the result à la Weka explorer:
System.out.println(eTest.toSummaryString());
// Specify that the instance belong to the training set
// in order to inherit from the set description
Instance iUse = createInstance(3198, 9669, 5, 0, 1, 0);
iUse.setDataset(isTrainingSet);
// Get the likelihood of each classes
double[] fDistribution = cModel.distributionForInstance(iUse);
System.out.println(fDistribution[0]);
}
示例9: buildLinearModel
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
/**
* Build a linear model for this node using those attributes
* specified in indices.
*
* @param indices an array of attribute indices to include in the linear
* model
* @throws Exception if something goes wrong
*/
private void buildLinearModel(int [] indices) throws Exception {
// copy the training instances and remove all but the tested
// attributes
Instances reducedInst = new Instances(m_instances);
Remove attributeFilter = new Remove();
attributeFilter.setInvertSelection(true);
attributeFilter.setAttributeIndicesArray(indices);
attributeFilter.setInputFormat(reducedInst);
reducedInst = Filter.useFilter(reducedInst, attributeFilter);
// build a linear regression for the training data using the
// tested attributes
LinearRegression temp = new LinearRegression();
temp.buildClassifier(reducedInst);
double [] lmCoeffs = temp.coefficients();
double [] coeffs = new double [m_instances.numAttributes()];
for (int i = 0; i < lmCoeffs.length - 1; i++) {
if (indices[i] != m_classIndex) {
coeffs[indices[i]] = lmCoeffs[i];
}
}
m_nodeModel = new PreConstructedLinearModel(coeffs, lmCoeffs[lmCoeffs.length - 1]);
m_nodeModel.buildClassifier(m_instances);
}
示例10: trainForecaster
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
private WekaForecaster trainForecaster(TreeMap<String, Long> data) {
try {
// data to weka instances
Instances instances = dataToInstances(data);
// new forecaster
WekaForecaster forecaster = new WekaForecaster();
// set target and date fields
forecaster.setFieldsToForecast(VOLUME_FIELD);
forecaster.getTSLagMaker().setTimeStampField(DATE_FIELD);
// set the underlying classifier
forecaster.setBaseForecaster(new LinearRegression());
// detect the periodicity automatically (similarly to the weka gui)
detectPeriodicity(forecaster, instances, DATE_FIELD);
// forecaster.getTSLagMaker().setMinLag(1);
// forecaster.getTSLagMaker().setMaxLag(12); // monthly data
// add a month of the year indicator field
// forecaster.getTSLagMaker().setAddMonthOfYear(true);
// add a quarter of the year indicator field
// forecaster.getTSLagMaker().setAddQuarterOfYear(true);
// build the model
System.out.println("Training forecaster");
forecaster.buildForecaster(instances, System.out);
System.out.println("Training done.");
return forecaster;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
示例11: StackingC
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
/**
* The constructor.
*/
public StackingC() {
m_MetaClassifier = new weka.classifiers.functions.LinearRegression();
((LinearRegression)(getMetaClassifier())).
setAttributeSelectionMethod(new
weka.core.SelectedTag(1, LinearRegression.TAGS_SELECTION));
}
示例12: processMetaOptions
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
/**
* Process options setting meta classifier.
*
* @param options the meta options to parse
* @throws Exception if parsing fails
*/
protected void processMetaOptions(String[] options) throws Exception {
String classifierString = Utils.getOption('M', options);
String [] classifierSpec = Utils.splitOptions(classifierString);
if (classifierSpec.length != 0) {
String classifierName = classifierSpec[0];
classifierSpec[0] = "";
setMetaClassifier(Classifier.forName(classifierName, classifierSpec));
} else {
((LinearRegression)(getMetaClassifier())).
setAttributeSelectionMethod(new
weka.core.SelectedTag(1,LinearRegression.TAGS_SELECTION));
}
}
示例13: getCapabilities
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
@Override
public Capabilities getCapabilities() {
return new LinearRegression().getCapabilities();
}
示例14: WekaLinRegData
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
public WekaLinRegData(Standardize standardize, LinearRegression linearRegression, int timeslot) {
this.standardize = standardize;
this.linearRegression = linearRegression;
this.timeslot = timeslot;
}
示例15: getLinearRegression
import weka.classifiers.functions.LinearRegression; //导入依赖的package包/类
public LinearRegression getLinearRegression() {
return linearRegression;
}