本文整理汇总了Java中weka.classifiers.functions.MultilayerPerceptron类的典型用法代码示例。如果您正苦于以下问题:Java MultilayerPerceptron类的具体用法?Java MultilayerPerceptron怎么用?Java MultilayerPerceptron使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
MultilayerPerceptron类属于weka.classifiers.functions包,在下文中一共展示了MultilayerPerceptron类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: NN_Model
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
/**
* Generates a Weka MultlayerPerceptron function Model acting on our data instance with our parameters.
*/
public NN_Model(Instances d, String[] params) throws ModelConstructException,Exception {
super(d,params);
classifier = new MultilayerPerceptron();
prepare();
run();
}
示例2: buildClassifier
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
public Classifier buildClassifier(Instances traindataset) {
MultilayerPerceptron m = new MultilayerPerceptron();
try {
m.buildClassifier(traindataset);
} catch (Exception ex) {
Logger.getLogger(ModelGenerator.class.getName()).log(Level.SEVERE, null, ex);
}
return m;
}
示例3: LearnNeuralNetwork
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
@Override
public void LearnNeuralNetwork() throws Exception
{
trainedData.setClassIndex(trainedData.numAttributes()-1);
filter=new StringToWordVector();
classifier=new FilteredClassifier();
classifier.setFilter(filter);
classifier.setClassifier(new MultilayerPerceptron());
classifier.buildClassifier(trainedData);
}
开发者ID:unsw-cse-soc,项目名称:Data-curation-API,代码行数:11,代码来源:ExtractClassificationTextNeuralNetworkImpl.java
示例4: trainMultilayerPerceptron
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
public static void trainMultilayerPerceptron(final Instances trainingSet) throws Exception {
// Create a classifier
final MultilayerPerceptron tree = new MultilayerPerceptron();
tree.buildClassifier(trainingSet);
// Test the model
final Evaluation eval = new Evaluation(trainingSet);
// eval.crossValidateModel(tree, trainingSet, 10, new Random(1));
eval.evaluateModel(tree, trainingSet);
// Print the result à la Weka explorer:
logger.info(eval.toSummaryString());
logger.info(eval.toMatrixString());
logger.info(tree.toString());
}
示例5: readFromFile
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
public static Model readFromFile(String directory) throws Exception {
Classifier cls = (Classifier) weka.core.SerializationHelper.read(directory);
Class wekaClass = cls.getClass();
Model ret = null;
if(wekaClass.equals(RBFNetwork.class)){
ret = (Model) RBF.class.getConstructor().newInstance();
}
else if(wekaClass.equals(RandomSubSpace.class)){
ret = (Model) RandomSubSpaces.class.getConstructor().newInstance();
}
else if(wekaClass.equals(MultilayerPerceptron.class)){
ret = (Model) MLPerceptron.class.getConstructor().newInstance();
}
else if(wekaClass.equals(SimpleLinearRegression.class)){
ret = (Model) LinearRegression.class.getConstructor().newInstance();
}
else if(wekaClass.equals(LeastMedSq.class)){
ret = (Model) LeastSquares.class.getConstructor().newInstance();
}
else if(wekaClass.equals(IsotonicRegression.class)){
ret = (Model) IsoRegression.class.getConstructor().newInstance();
}
else if(wekaClass.equals(GaussianProcesses.class)){
ret = (Model) GaussianCurves.class.getConstructor().newInstance();
}
else if(wekaClass.equals(RegressionByDiscretization.class)){
ret = (Model) Discretization.class.getConstructor().newInstance();
}
else if(wekaClass.equals(Bagging.class)){
ret = (Model) BagClassify.class.getConstructor().newInstance();
}
ret.setClassifier(cls);
return ret;
}
示例6: readSC
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
public MultilayerPerceptron readSC(String filename1, String filename2, String filename3,String filename4,String filename5) throws Exception
{
SCA = (BayesNet) SerializationHelper.read(filename1);
SCB = (MultilayerPerceptron) SerializationHelper.read(filename2);
SCC1 = (MultilayerPerceptron) SerializationHelper.read(filename3);
SCC2 = (MultilayerPerceptron) SerializationHelper.read(filename4);
SCC3 = (MultilayerPerceptron) SerializationHelper.read(filename5);
return SCC1;
}
示例7: main
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
if (args.length < 6) {
System.out
.println("Usage: Train <img_base> <categories> <cate_sample> <output_arff> <output_classifier> <output_model>");
return;
}
String imgBase = args[0];
String categories = args[1];
int cateSample = Integer.valueOf(args[2]);
String outputArff = args[3];
String outputClassifier = args[4];
String outputModel = args[5];
InstanceGenerator instanceGenerator = new InstanceGenerator(
categories.split(","));
TrainResult trainResult = instanceGenerator.train(imgBase, cateSample);
List<Instance> instances = trainResult.getInstances();
System.out.println("dumping arff to " + outputArff);
instanceGenerator.dumpArff(instances, outputArff);
System.out.println("running cross-validation using MLP");
String arguments = "-t " + outputArff + " -d " + outputClassifier
+ " -L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H a";
MultilayerPerceptron.main(arguments.split(" "));
List<Feature> words = trainResult.getWords();
Classifier classifier = ClassifyUtils.loadClassifier(outputClassifier);
Model model = new Model(categories.split(","), words, classifier);
SerializationUtils.dumpObject(outputModel, model);
System.out.println("model saved as " + outputModel);
}
示例8: EvaluateNeuralNetwork
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
@Override
public List<Classification> EvaluateNeuralNetwork() throws Exception
{
List<Classification> lstEvaluationDetail=new ArrayList<>();
trainedData.setClassIndex(trainedData.numAttributes()-1);
filter=new StringToWordVector();
classifier=new FilteredClassifier();
classifier.setFilter(filter);
classifier.setClassifier(new MultilayerPerceptron());
Evaluation eval=new Evaluation(trainedData);
eval.crossValidateModel(classifier, trainedData, 4, new Random(1));
/*try
{
for(int i=0;i<10000;i++)
{
cls.setPrecision(eval.precision(i));
cls.setRecall(eval.recall(i));
cls.setAuc(eval.areaUnderPRC(i));
cls.setFMeasure(eval.fMeasure(i));
cls.setFn(eval.falseNegativeRate(i));
cls.setFp(eval.falsePositiveRate(i));
cls.setTn(eval.trueNegativeRate(i));
cls.setTp(eval.truePositiveRate(i));
cls.setMeanAbsoluteError(eval.meanAbsoluteError());
cls.setRelativeAbsoluteError(eval.relativeAbsoluteError());
cls.setCorrect(eval.correct());
cls.setKappa(eval.kappa());
cls.setNumInstances(eval.numInstances());
cls.setInCorrect(eval.incorrect());
lstEvaluationDetail.add(new Classification(cls.getPrecision(),
cls.getRecall(),
cls.getAuc(),
cls.getCorrect(),
cls.getInCorrect(),
cls.getErrorRate(),
cls.getFn(),
cls.getFp(),
cls.getTn(),
cls.getTp(),
cls.getKappa(),
cls.getMeanAbsoluteError(),
cls.getNumInstances(),
cls.getRelativeAbsoluteError(),
cls.getFMeasure()));
}
}
catch(Exception ex)
{
}*/
return lstEvaluationDetail;
}
开发者ID:unsw-cse-soc,项目名称:Data-curation-API,代码行数:53,代码来源:ExtractClassificationTextNeuralNetworkImpl.java
示例9: setupLearner
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
/**
* Setup the classifier parameters'.
*/
private void setupLearner() {
logger.info("Applying default configuration to {}", this.classifier.getClass().getSimpleName());
if (this.classifier instanceof J48) {
J48 j48 = (J48) this.classifier;
j48.setCollapseTree(false);
j48.setBinarySplits(false);
j48.setUnpruned(false);
j48.setReducedErrorPruning(false);
j48.setConfidenceFactor(0.25f);
j48.setUseLaplace(true);
j48.setNumFolds(5);
j48.setSubtreeRaising(false);
} else if (this.classifier instanceof LibSVM) {
LibSVM libSVM = (LibSVM) this.classifier;
libSVM.setCacheSize(512); // MB
libSVM.setNormalize(true);
libSVM.setShrinking(true);
libSVM.setKernelType(new SelectedTag(LibSVM.KERNELTYPE_POLYNOMIAL, LibSVM.TAGS_KERNELTYPE));
libSVM.setDegree(3);
libSVM.setSVMType(new SelectedTag(LibSVM.SVMTYPE_C_SVC, LibSVM.TAGS_SVMTYPE));
} else if (this.classifier instanceof NaiveBayes) {
NaiveBayes naiveBayes = (NaiveBayes) this.classifier;
// Configure NaiveBayes
naiveBayes.setUseKernelEstimator(false);
naiveBayes.setUseSupervisedDiscretization(false);
} else if (this.classifier instanceof RandomForest) {
RandomForest rndForest = (RandomForest) this.classifier;
// Configure RandomForest
rndForest.setNumExecutionSlots(5);
rndForest.setNumTrees(50);
rndForest.setMaxDepth(3);
} else if (this.classifier instanceof MultilayerPerceptron) {
MultilayerPerceptron perceptron = (MultilayerPerceptron) this.classifier;
// Configure perceptron
perceptron.setAutoBuild(true);
perceptron.setTrainingTime(250); // epochs
perceptron.setNominalToBinaryFilter(false);
perceptron.setNormalizeAttributes(true);
}
}
示例10: MLPerceptron
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
public MLPerceptron() {
super();
classifier = new MultilayerPerceptron();
}
示例11: wekaOutputTEST
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
public static FCMWeka wekaOutputTEST() throws Exception {
StringBuilder sb = new StringBuilder();
sb.append("@relation level_of_satisfaction\n\n");
sb.append("@attribute speed_public_service numeric\n");
sb.append("@attribute accessibility numeric\n");
sb.append("@attribute regional_Gdp numeric\n");
sb.append("@attribute 'level of satisfaction' numeric\n\n");
sb.append("@data\n");
sb.append("0.6,0.2,0.6,0.2\n");
sb.append("0.6,0.4,0.6,0.2\n");
sb.append("0.6,0.4,0.8,0.2\n");
sb.append("0.4,0.6,0.8,0.4\n");
sb.append("0.8,1,1,0.8\n");
sb.append("1,1,1,1\n");
StringReader trainreader = new StringReader(sb.toString());
Instances train = new Instances(trainreader);
train.setClassIndex(train.numAttributes()-1);
MultilayerPerceptron classifier = new MultilayerPerceptron();
classifier.setHiddenLayers("0");
try {
classifier.buildClassifier(train);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
String wekaResp=classifier.toString();
FCMWeka output=new FCMWeka();
output.setMinimum(0);
output.setMaximum(1);
output.setMean(0.4f);
output.setStdDev(.658f);
output.setWekaString(wekaResp);
return output;
}
示例12: classifyMultiLayer
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
public MultilayerPerceptron classifyMultiLayer(Instances data) throws Exception {
MultilayerPerceptron layer = new MultilayerPerceptron();
layer.buildClassifier(data);
return layer;
}
示例13: MLMSimulator
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
public MLMSimulator() {
// Training data not initialized until training method is called
_trainingData = null;
_testData = new ArrayList<Instance>();
_mlp = new MultilayerPerceptron();
}
示例14: main
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
// // Declare two numeric attributes
// Attribute Attribute1 = new Attribute("firstNumeric");
// Attribute Attribute2 = new Attribute("secondNumeric");
//
// // Declare a nominal attribute along with its values
// FastVector fvNominalVal = new FastVector(3);
// fvNominalVal.addElement("blue");
// fvNominalVal.addElement("gray");
// fvNominalVal.addElement("black");
// Attribute Attribute3 = new Attribute("aNominal", fvNominalVal);
//
// // Declare the class attribute along with its values
// FastVector fvClassVal = new FastVector(2);
// fvClassVal.addElement("positive");
// fvClassVal.addElement("negative");
// Attribute ClassAttribute = new Attribute("theClass", fvClassVal);
//
// // Declare the feature vector
// FastVector fvWekaAttributes = new FastVector(4);
// fvWekaAttributes.addElement(Attribute1);
// fvWekaAttributes.addElement(Attribute2);
// fvWekaAttributes.addElement(Attribute3);
// fvWekaAttributes.addElement(ClassAttribute);
//
// // Create an empty training set
// Instances isTrainingSet = new Instances("Rel", fvWekaAttributes, 10);
// // Set class index
// isTrainingSet.setClassIndex(3);
//
// // Create the instance
// Instance iExample = new Instance(4);
// iExample.setValue((Attribute)fvWekaAttributes.elementAt(0), 1.0);
// iExample.setValue((Attribute)fvWekaAttributes.elementAt(1), 0.5);
// iExample.setValue((Attribute)fvWekaAttributes.elementAt(2), "gray");
// iExample.setValue((Attribute)fvWekaAttributes.elementAt(3), "positive");
//
// // add the instance
// isTrainingSet.add(iExample);
DataSource trainds = new DataSource("etc/train.csv");
Instances train = trainds.getDataSet();
train.setClassIndex(train.numAttributes()-1);
DataSource testds = new DataSource("etc/test.csv");
Instances test = testds.getDataSet();
test.setClassIndex(test.numAttributes()-1);
Classifier cModel = new MultilayerPerceptron();
cModel.buildClassifier(train);
// Test the model
Evaluation eTest = new Evaluation(train);
eTest.evaluateModel(cModel, test);
// Print the result à la Weka explorer:
String strSummary = eTest.toSummaryString();
System.out.println(strSummary);
}
示例15: classify
import weka.classifiers.functions.MultilayerPerceptron; //导入依赖的package包/类
public void classify() throws Exception {
FileReader trainreader = new FileReader("rawData_biomedical.arff");
Instances train = new Instances(trainreader);
train.setClassIndex(train.numAttributes() - 1);
double accuracy = 0 ;
for (int i = 0; i < 10; i++) {
MultilayerPerceptron mlp = new MultilayerPerceptron();
mlp.setOptions(Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H 4"));
mlp.buildClassifier(train);
Evaluation eval = new Evaluation(train);
//evaluation.crossValidateModel(rf, trainData, numFolds, new Random(1));
eval.crossValidateModel(mlp, train, 10, new Random(1));
// eval.evaluateModel(mlp, train);
System.out.println(eval.toSummaryString("\nResults\n======\n", false));
trainreader.close();
accuracy += eval.correlationCoefficient();
}
System.out.println("Avg Correlation: " + accuracy/10);
}