本文整理汇总了Java中weka.classifiers.functions.SMO类的典型用法代码示例。如果您正苦于以下问题:Java SMO类的具体用法?Java SMO怎么用?Java SMO使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
SMO类属于weka.classifiers.functions包,在下文中一共展示了SMO类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: runExps
import weka.classifiers.functions.SMO; //导入依赖的package包/类
public void runExps(){
Classifier c1 = new SMO();
Classifier c2 = new J48();
Classifier c3 = new NaiveBayes();
trainModel(c1, "SVM");
trainModel(c2, "J48");
trainModel(c3, "Naive Bayes");
}
示例2: Main
import weka.classifiers.functions.SMO; //导入依赖的package包/类
public Main() {
try {
BufferedReader datafile;
datafile = readDataFile("camping.txt");
Instances data = new Instances(datafile);
data.setClassIndex(data.numAttributes() - 1);
Instances trainingData = new Instances(data, 0, 14);
Instances testingData = new Instances(data, 14, 5);
Evaluation evaluation = new Evaluation(trainingData);
SMO smo = new SMO();
smo.buildClassifier(data);
evaluation.evaluateModel(smo, testingData);
System.out.println(evaluation.toSummaryString());
// Test instance
Instance instance = new DenseInstance(3);
instance.setValue(data.attribute("age"), 78);
instance.setValue(data.attribute("income"), 125700);
instance.setValue(data.attribute("camps"), 1);
instance.setDataset(data);
System.out.println("The instance: " + instance);
System.out.println(smo.classifyInstance(instance));
} catch (Exception ex) {
ex.printStackTrace();
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-End-to-Endguide-for-Java-developers,代码行数:30,代码来源:Main-SVG.java
示例3: trainClassifier
import weka.classifiers.functions.SMO; //导入依赖的package包/类
/**
* Train the classifier with the given dataset
* @param trainingData The dataset to be given for classification
* @throws ClassifierException If classification fails for some reason
*/
public void trainClassifier(Instances trainingData,String pathToSaveModel,
boolean crossValidate) throws ClassifierException {
log.info("Training the classifier with "+trainingData.numInstances()+" instances");
SMO smoClassifier = new SMO();
trainingData.setClass(trainingData.attribute("@@[email protected]@"));
try {
String[] options = Utils.splitOptions("-C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers" +
".functions.supportVector.PolyKernel -C 250007 -E 1.0\"");
smoClassifier.setOptions(options);
smoClassifier.buildClassifier(trainingData);
classifier = smoClassifier;
if(crossValidate){
crossValidate(trainingData);
}
saveModel(new File(pathToSaveModel,"latest.model").getAbsolutePath());
log.info("Model built and saved");
} catch (Exception e) {
log.error("Training classifier failed.",e);
throw new ClassifierException("Classification failed.",e);
}
}
示例4: initializeMembers
import weka.classifiers.functions.SMO; //导入依赖的package包/类
/**
* performs initialization of members
*/
@Override
protected void initializeMembers() {
super.initializeMembers();
m_TrainsetNew = null;
m_TestsetNew = null;
m_Alpha = 0.99;
m_Sigma = 1.0;
m_Repeats = 0;
m_SequenceLimit = SEQ_LIMIT_GRAPHKERNEL;
m_filterType = SMO.FILTER_NORMALIZE;
m_IncludeNumAttributes = true;
m_MatrixY = null;
m_MatrixW = null;
m_MatrixD = null;
m_MatrixS = null;
m_MatrixFStar = null;
m_Data = null;
m_DistanceFunction = new EuclideanDistance();
}
示例5: main
import weka.classifiers.functions.SMO; //导入依赖的package包/类
/**
* Main - do some tests.
*/
public static void main(String args[]) throws Exception {
Instances D = Evaluation.loadDataset(args);
MLUtils.prepareData(D);
int L = D.classIndex();
double CD[][] = null;
if (args[2].equals("L")) {
String I = "I";
if (args.length >= 3)
I = args[3];
CD = StatUtils.LEAD(D, new SMO(), new Random(), I);
}
else {
CD = StatUtils.margDepMatrix(D,args[2]);
}
System.out.println(M.toString(CD,"M"+args[2]));
}
示例6: testDeepML
import weka.classifiers.functions.SMO; //导入依赖的package包/类
public void testDeepML() {
System.out.println("Test Stacked Boltzmann Machines with an off-the-shelf multi-label classifier");
DeepML dbn = new DeepML();
MCC h = new MCC();
SMO smo = new SMO();
smo.setBuildLogisticModels(true);
h.setClassifier(smo);
dbn.setClassifier(h);
dbn.setE(100);
dbn.setH(30);
Result r = EvaluationTests.cvEvaluateClassifier(dbn);
System.out.println("DeepML + MCC" + r.info.get("Accuracy"));
String s = r.info.get("Accuracy");
assertTrue("DeepML+MCC Accuracy Correct", s.startsWith("0.53")); // Good enough
}
示例7: trainModel
import weka.classifiers.functions.SMO; //导入依赖的package包/类
public static void trainModel(String trainingPath, String outputPath, String learner) {
if(learner==null) {
learner = "SMO";
}
System.out.println("Training "+ learner+" model...");
if(learner.equals("SMO")) {
SMO.main(new String[] {
"-M",
"-d", outputPath + "/pageclassifier.model",
"-t", trainingPath + "/weka.arff",
"-C", "0.01"
});
} else if(learner.equals("RandomForest")) {
RandomForest.main(new String[] {
// "-K", "5", // k-fold cross validation
"-I", "100", // Number of trees to build
"-d", outputPath + "/pageclassifier.model",
"-t", trainingPath + "/weka.arff"
});
} else {
System.out.println("Unknow learner: "+learner);
return;
}
}
示例8: getClassifier
import weka.classifiers.functions.SMO; //导入依赖的package包/类
/**
* Returns a new classifier based on the given algorithm.
*/
protected weka.classifiers.Classifier getClassifier(
EClassificationAlgorithm algorithm) {
switch (algorithm) {
case DECISION_TREE_REP:
return new REPTree();
case SUPPORT_VECTOR_MACHINE_SMO:
return new SMO();
case COST_SENSITIVE_CLASSIFIER:
return new CostSensitiveClassifier();
case DECISION_TREE_J48:
return new J48();
default:
throw new AssertionError(
"Cannot create a classifier without a specified algorithm.");
}
}
示例9: WekaClassifier
import weka.classifiers.functions.SMO; //导入依赖的package包/类
public WekaClassifier(String wekaClassifierConfig) throws Exception {
if (wekaClassifierConfig == null) {
mCls = new SMO();
return;
}
String classifier = wekaClassifierConfig;
String[] options = null;
int firstSpaceIndex = wekaClassifierConfig.indexOf(' ');
if (firstSpaceIndex > 0) {
classifier = wekaClassifierConfig.substring(0, firstSpaceIndex);
options = wekaClassifierConfig.substring(firstSpaceIndex + 1).split(" ");
Log.d(TAG, "Using classifier: " + classifier);
Log.d(TAG, "options:" + Arrays.toString(options));
} else {
Log.d(TAG, "Using default classifier.");
}
mCls = Classifier.forName(classifier, options);
}
示例10: getCapabilities
import weka.classifiers.functions.SMO; //导入依赖的package包/类
/**
* Returns the capabilities of this evaluator.
*
* @return the capabilities of this evaluator
* @see Capabilities
*/
public Capabilities getCapabilities() {
Capabilities result;
result = new SMO().getCapabilities();
result.setOwner(this);
// only binary attributes are allowed, otherwise the NominalToBinary
// filter inside SMO will increase the number of attributes which in turn
// will lead to ArrayIndexOutOfBounds-Exceptions.
result.disable(Capability.NOMINAL_ATTRIBUTES);
result.enable(Capability.BINARY_ATTRIBUTES);
result.disableAllAttributeDependencies();
return result;
}
示例11: getSMO
import weka.classifiers.functions.SMO; //导入依赖的package包/类
/**
* Weka implementation of SVM
*/
public Classifier getSMO() throws Exception {
Classifier model = new SMO();
((PolyKernel) ((SMO) model).getKernel()).setExponent(2);
model.buildClassifier(instances);
return model;
}
示例12: LearnSVM
import weka.classifiers.functions.SMO; //导入依赖的package包/类
@Override
public void LearnSVM() throws Exception
{
trainedData.setClassIndex(trainedData.numAttributes()-1);
filter=new StringToWordVector();
classifier=new FilteredClassifier();
classifier.setFilter(filter);
classifier.setClassifier(new SMO());
classifier.buildClassifier(trainedData);
}
示例13: runSimple
import weka.classifiers.functions.SMO; //导入依赖的package包/类
public void runSimple()
throws Exception
{
String language = "en";
String trainCorpora = DemoConstants.TRAIN_FOLDER;
String trainFileSuffix = "*.txt";
String testCorpora= DemoConstants.TEST_FOLDER;
String testFileSuffix = "*.txt";
CollectionReaderDescription trainReader = CollectionReaderFactory.createReaderDescription(
LineTokenTagReader.class, LineTokenTagReader.PARAM_LANGUAGE, language,
LineTokenTagReader.PARAM_SOURCE_LOCATION, trainCorpora,
LineTokenTagReader.PARAM_PATTERNS, trainFileSuffix);
CollectionReaderDescription testReader = CollectionReaderFactory.createReaderDescription(
LineTokenTagReader.class, LineTokenTagReader.PARAM_LANGUAGE, language,
LineTokenTagReader.PARAM_SOURCE_LOCATION, testCorpora,
LineTokenTagReader.PARAM_PATTERNS, testFileSuffix);
FlexTagTrainTest flex = new FlexTagTrainTest(trainReader, testReader);
if (System.getProperty("DKPRO_HOME") == null) {
flex.setDKProHomeFolder("target/home");
}
flex.setExperimentName("WekaConfiguration");
flex.setFeatures(TcFeatureFactory.create(LuceneCharacterNGram.class,
LuceneCharacterNGram.PARAM_NGRAM_MIN_N, 2, LuceneCharacterNGram.PARAM_NGRAM_MAX_N,
4, LuceneCharacterNGram.PARAM_NGRAM_USE_TOP_K, 750));
List<Object> configuration = asList(new Object[] { SMO.class.getName() });
flex.setClassifier(Classifier.WEKA, configuration);
flex.addReport(TtWekaKnownUnknownWordAccuracyReport.class);
flex.execute();
}
示例14: runComplex
import weka.classifiers.functions.SMO; //导入依赖的package包/类
public void runComplex()
throws Exception
{
String language = "en";
String trainCorpora = DemoConstants.TRAIN_FOLDER;
String trainFileSuffix = "*.txt";
String testCorpora = DemoConstants.TEST_FOLDER;
String testFileSuffix = "*.txt";
CollectionReaderDescription trainReader = CollectionReaderFactory.createReaderDescription(
LineTokenTagReader.class, LineTokenTagReader.PARAM_LANGUAGE, language,
LineTokenTagReader.PARAM_SOURCE_LOCATION, trainCorpora,
LineTokenTagReader.PARAM_PATTERNS, trainFileSuffix);
CollectionReaderDescription testReader = CollectionReaderFactory.createReaderDescription(
LineTokenTagReader.class, LineTokenTagReader.PARAM_LANGUAGE, language,
LineTokenTagReader.PARAM_SOURCE_LOCATION, testCorpora,
LineTokenTagReader.PARAM_PATTERNS, testFileSuffix);
FlexTagTrainTest flex = new FlexTagTrainTest(trainReader, testReader);
if (System.getProperty("DKPRO_HOME") == null) {
flex.setDKProHomeFolder("target/home");
}
flex.setExperimentName("WekaConfiguration");
flex.setFeatures(TcFeatureFactory.create(LuceneCharacterNGram.class,
LuceneCharacterNGram.PARAM_NGRAM_MIN_N, 2, LuceneCharacterNGram.PARAM_NGRAM_MAX_N,
4, LuceneCharacterNGram.PARAM_NGRAM_USE_TOP_K, 50));
List<Object> configuration = asList(new Object[] { SMO.class.getName(), "-C", "1.0", "-K",
PolyKernel.class.getName() + " " + "-C -1 -E 2" });
flex.setClassifier(Classifier.WEKA, configuration);
flex.addReport(TtWekaKnownUnknownWordAccuracyReport.class);
flex.execute();
}
示例15: configureCostSensitiveClassifier
import weka.classifiers.functions.SMO; //导入依赖的package包/类
/**
* Sets a default CostSensitiveClassifier with SMO. The settings of this method must be modified if used for another project.
* @param data weka data containing instances and attributes
* @param costSensitiveClassifier unconfigured Cost Sensitive Classifier
* @param costMatrix Cost Matrix
* @return CostSensitiveClassifier fully configured Cost Sensitive Classifier
* @throws Exception
*/
protected static CostSensitiveClassifier configureCostSensitiveClassifier(CostSensitiveClassifier costSensitiveClassifier,
Instances data, String costMatrix) throws Exception {
String[] CscOptions = {"-cost-matrix", costMatrix, "-S", "1"};
String[] rankerOptions = {"-P",Integer.toString(data.numAttributes() - 1),"-T","-1.7976931348623157E308","-N","-1"};
String[] smoOptions = {"-C", "1.0", "-L", "0.0010", "-P", "1.0E-12", "-N", "0", "-V", "-1", "-W", "1", "-K",
PolyKernel.class.getName()+" -C 250007 -E 2.0"};
InfoGainAttributeEval igAttEval = new InfoGainAttributeEval();
Ranker ranker = new Ranker();
ranker.setOptions(rankerOptions);
AttributeSelection attSelect = new AttributeSelection();
attSelect.setEvaluator(igAttEval);
attSelect.setSearch(ranker);
SMO smo = new SMO();
smo.setOptions(smoOptions);
FilteredClassifier filteredClsfr = new FilteredClassifier();
filteredClsfr.setClassifier(smo);
filteredClsfr.setFilter(attSelect);
costSensitiveClassifier.setOptions(CscOptions);
costSensitiveClassifier.setClassifier(filteredClsfr);
costSensitiveClassifier.buildClassifier(data);
System.out.println("CostSensitiveClassifier built.");
return costSensitiveClassifier;
}