当前位置: 首页>>代码示例>>Java>>正文


Java Feature类代码示例

本文整理汇总了Java中de.bwaldvogel.liblinear.Feature的典型用法代码示例。如果您正苦于以下问题:Java Feature类的具体用法?Java Feature怎么用?Java Feature使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


Feature类属于de.bwaldvogel.liblinear包,在下文中一共展示了Feature类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: generateLibLinearProblem

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
public Problem generateLibLinearProblem(int[] features, FeatureNormalizer fn) {
	Problem problem = new Problem();
	Vector<Double> targets = new Vector<Double>();
	Vector<Feature[]> ftrVectors = new Vector<Feature[]>();
	List<Pair<FeaturePack<T>, Double>> plainVectors = getPlain();
	for (Pair<FeaturePack<T>, Double> vectAndGold : plainVectors) {
		ftrVectors.add(LibLinearModel.featureMapToFeatures(
				fn.ftrToNormalizedFtrArray(vectAndGold.first), features));
		targets.add(vectAndGold.second);
	}

	problem.l = ftrVectors.size();
	problem.n = features.length;
	problem.x = ftrVectors.toArray(new Feature[][]{});
	problem.y = Doubles.toArray(targets);
	return problem;
}
 
开发者ID:marcocor,项目名称:smaph,代码行数:18,代码来源:ExampleGatherer.java

示例2: Evaluate_SVM

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
/**
 * Make a new SVM model.
 * @param readLabelFile The category file of different source
 * @param limitRandom The percentage of train set
 * @param TestTrain The array to differentiate train and test set
 * 
 * */
public Evaluate_SVM(int dataNum, int dimension, double W[][], String readLabelFile, double limitRandom, int[]TestTrain) throws Exception{
	this.dataNums=dataNum;
	this.dimensions=dimension;
	this.readLabelFiles=readLabelFile;
	this.limitRandom=limitRandom;
	this.TestTrain=TestTrain;
	//******
	this.countTestTrain();
	vectrain=new Feature[train][dimensions];
	vectest	=new Feature[test][dimensions];
	trainattr=new double[train];
	testattr =new double[test];
	//******
	this.Ws=W;
	labels=new int[dataNums];
	this.makeSource();
}
 
开发者ID:thunlp,项目名称:MMDW,代码行数:25,代码来源:Evaluate_SVM.java

示例3: sigmoid

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
private double sigmoid(Feature[] features, double threshold) {
	if (null == weights) {
		throw new IllegalStateException();
	} else {
		double result = weights[weights.length-1];
		
		for (Feature feature : features) {
			int i = feature.getIndex()-1;
			
			if (i >= 0 && i < weights.length-1) {
				result += weights[i] * feature.getValue();
			}
		}
		return 1./(1+Math.exp(threshold-result));
	}
}
 
开发者ID:DevinZ1993,项目名称:EI328-Patent-Classifier,代码行数:17,代码来源:MyImpl.java

示例4: subproblem

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
private Problem subproblem(Problem problem, List<Integer> posIndex, int posIdx, int posNum,
		List<Integer> negIndex, int negIdx, int negNum) {
	Problem sub = new Problem();
	
	sub.bias = problem.bias;
	sub.n = problem.n;
	sub.x = new Feature[posNum+negNum][];
	sub.y = new double[posNum+negNum];
	sub.l = 0;
	for (int i=0; i<posNum; i++, sub.l++) {
		sub.x[sub.l] = problem.x[posIndex.get(posIdx+i)];
		sub.y[sub.l] = problem.y[posIndex.get(posIdx+i)];
	}
	for (int j=0; j<negNum; j++, sub.l++) {
		sub.x[sub.l] = problem.x[negIndex.get(negIdx+j)];
		sub.y[sub.l] = problem.y[negIndex.get(negIdx+j)];
	}
	return sub;
}
 
开发者ID:DevinZ1993,项目名称:EI328-Patent-Classifier,代码行数:20,代码来源:MinMaxJob.java

示例5: predictOne

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
public Matrix predictOne(Feature[] x) {
	Matrix result = null;
	if (model.isProbabilityModel()) {
		double[] probabilities = new double[model.getNrClass()];
		Linear.predictProbability(model, x, probabilities);
		result = Matrix.Factory.zeros(1, model.getNrClass());
		for (int i = 0; i < probabilities.length; i++) {
			int label = model.getLabels()[i];
			result.setAsDouble(probabilities[i], 0, label);
		}
	} else {
		double classId = Linear.predict(model, x);
		result = Matrix.Factory.zeros(1, Math.max(model.getNrClass(), (int) (classId + 1)));
		result.setAsDouble(1.0, 0, (int) classId);
	}

	return result;
}
 
开发者ID:jdmp,项目名称:java-data-mining-package,代码行数:19,代码来源:LibLinearClassifier.java

示例6: getSparseRepresentation

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
public Feature[] getSparseRepresentation()
{
    // int get total size of feature vector
    Feature[] out = new FeatureNode[numOfFeatures];
    int startPos = 0;
    int i = 0;
    for(FeatureInterface feat : vec)
    {
        out[i++] = feat.outputToSparseRepresentation(startPos);
        // shift right based on size of the feature representation 
        // (e.g., binary size=2, double=1, categorical=k, where k=num of categories, 
        // according to a 1-of-k representation
        startPos += feat.getSizeOfRepresentation(); 
    }
    return out;
}
 
开发者ID:sinantie,项目名称:PLTAG,代码行数:17,代码来源:FeatureVec.java

示例7: addTrainingData

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
public void addTrainingData(String value, String label) {
	// convert value to feature vector
	rawData.add(value);
	Collection<Feature> cfeat = rf.computeFeatures(value, "");
	Feature[] x = cfeat.toArray(new Feature[cfeat.size()]);
	// row.add(f.getName());
	svm_node[] testNodes = new svm_node[cfeat.size()];
	for (int k = 0; k < cfeat.size(); k++) {
		svm_node node = new svm_node();
		node.index = k;
		node.value = x[k].getScore();
		testNodes[k] = node;
	}
	this.trainData.add(testNodes);
	// convert label to a double class label
	if (labelMapping.containsKey(label)) {
		this.targets.add(labelMapping.get(label));
	} else {
		double lb = 0;
		if (!this.labelMapping.isEmpty()) {
			lb = Collections.max(labelMapping.values()) + 1;
		}
		this.labelMapping.put(label, lb);
		this.targets.add(lb);
	}
}
 
开发者ID:therelaxist,项目名称:spring-usc,代码行数:27,代码来源:RecordClassifier.java

示例8: getLabel

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
@Override
public String getLabel(String value) {
	Collection<Feature> cfeat = rf.computeFeatures(value, "");
	Feature[] x = cfeat.toArray(new Feature[cfeat.size()]);
	// row.add(f.getName());
	svm_node[] testNodes = new svm_node[cfeat.size()];
	for (int k = 0; k < cfeat.size(); k++) {
		svm_node node = new svm_node();
		node.index = k;
		node.value = x[k].getScore();
		testNodes[k] = node;
	}
	/* temp test */

	// double[] prob_estimates = new
	// double[this.labelMapping.keySet().size()];
	rescale(testNodes);
	double v = svm.svm_predict(model, testNodes);
	// find string lable
	return findLable(v);
}
 
开发者ID:therelaxist,项目名称:spring-usc,代码行数:22,代码来源:RecordClassifier.java

示例9: testLinearModel

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
private static Prediction[] testLinearModel(LibLINEARModel model, Feature[][] problem) {
	Prediction[] pred = new Prediction[problem.length];		
	for (int i = 0; i < problem.length; i++) {
		double[] decVal = new double[(model.getModel().getNrClass() <= 2) ? 1 : model.getModel().getNrClass()];
		if (!model.hasProbabilities()) {
			pred[i] = new Prediction(Linear.predictValues(model.getModel(), problem[i], decVal), i);
			pred[i].setProbabilities(false);
		} else {
			pred[i] = new Prediction(Linear.predictProbability(model.getModel(), problem[i], decVal), i);
			pred[i].setProbabilities(true);
		}
		pred[i].setDecisionValue(decVal);
		pred[i].setClassLabels(model.getModel().getLabels());
		pred[i].setPairWise(false); // LibLINEAR does not do pairwise multiclass prediction, but 1 vs all
		pred[i].setUsedKernel(model.getKernelSetting());
	}
	return pred;
}
 
开发者ID:Data2Semantics,项目名称:mustard,代码行数:19,代码来源:LibLINEAR.java

示例10: crossValidate

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
public static Prediction[] crossValidate(SparseVector[] featureVectors, double[] target, LibLINEARParameters params, int numberOfFolds) {
	Prediction[] pred = new Prediction[target.length];

	List<Integer> indices = Stratifier.stratifyFolds(target, numberOfFolds);
	double[] targetCopy = Stratifier.shuffle(target, indices);
	SparseVector[] fvCopy = Stratifier.shuffle(featureVectors, indices);

	Problem trainP;
	Feature[][] testP;
	Problem prob = createLinearProblem(fvCopy, targetCopy, params.getBias());

	for (int fold = 1; fold <= numberOfFolds; fold++) {
		trainP = createProblemTrainFold(prob, numberOfFolds, fold);
		testP  = createProblemTestFold(prob, numberOfFolds, fold);
		pred = CVUtils.addFold2Prediction(testLinearModel(trainLinearModel(trainP, params), testP), pred, numberOfFolds, fold);
	}
	pred = Stratifier.deshuffle(pred, indices);
	return pred;
}
 
开发者ID:Data2Semantics,项目名称:mustard,代码行数:20,代码来源:LibLINEAR.java

示例11: createTestProblem

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
private static Feature[][] createTestProblem(SparseVector[] featureVectors, int numberOfFeatures, double bias) {
	Feature[][] nodes = new FeatureNode[featureVectors.length][];

	for (int i = 0; i < featureVectors.length; i++) {
		Set<Integer> indices = featureVectors[i].getIndices();
		nodes[i] = new FeatureNode[(bias >= 0) ? indices.size() + 1 : indices.size()];

		int j = 0;
		for (int index : indices) {
			nodes[i][j] = new FeatureNode(index, featureVectors[i].getValue(index));
			j++;
		}
		if (bias >= 0) {
			nodes[i][j] = new FeatureNode(numberOfFeatures, bias);
		}
	}	
	return nodes;	
}
 
开发者ID:Data2Semantics,项目名称:mustard,代码行数:19,代码来源:LibLINEAR.java

示例12: train

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
public static void train() throws IOException, InvalidInputDataException{
	String file = "output\\svm/book_svm.svm";
	Problem problem = Problem.readFromFile(new File(file),-1);

	SolverType solver = SolverType.L2R_LR; // -s 0
	double C = 1.0;    // cost of constraints violation
	double eps = 0.01; // stopping criteria

	Parameter parameter = new Parameter(solver, C, eps);
	Model model = Linear.train(problem, parameter);
	File modelFile = new File("output/model");
	model.save(modelFile);
	System.out.println(modelFile.getAbsolutePath());
	// load model or use it directly
	model = Model.load(modelFile);

	Feature[] instance = { new FeatureNode(1, 4), new FeatureNode(2, 2) };
	double prediction = Linear.predict(model, instance);
	System.out.println(prediction);
	int nr_fold = 10;
    double[] target = new double[problem.l];
	Linear.crossValidation(problem, parameter, nr_fold, target);
}
 
开发者ID:laozhaokun,项目名称:sentimentclassify,代码行数:24,代码来源:Main.java

示例13: predict2

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
@Deprecated
public static int[] predict2(Model model, Feature[][] data, int[] labels) {

	int N = data.length;
	int[] pre_label = new int[N];

	for ( int i = 0; i < N; i ++ ) {
		pre_label[i] = Linear.predict(model, data[i]);
	}

	if (labels != null) {
		int cnt_correct = 0;
		for ( int i = 0; i < N; i ++ ) {
			if ( pre_label[i] == labels[i] )
				cnt_correct ++;
		}
		double accuracy = (double)cnt_correct / (double)N;
		System.out.println(String.format("Accuracy: %.2f%%\n", accuracy * 100));
	}

	return pre_label;

}
 
开发者ID:MingjieQian,项目名称:JML,代码行数:24,代码来源:MultiClassSVM.java

示例14: constructProblem

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
private static Problem constructProblem(List<Integer> vy, List<Feature[]> vx, int max_index, double bias) {
	Problem prob = new Problem();
	prob.bias = bias;
	prob.l = vy.size();
	prob.n = max_index;
	if (bias >= 0) {
		prob.n++;
	}
	prob.x = new Feature[prob.l][];
	for (int i = 0; i < prob.l; i++) {
		prob.x[i] = vx.get(i);

		if (bias >= 0) {
			assert prob.x[i][prob.x[i].length - 1] == null;
			prob.x[i][prob.x[i].length - 1] = new FeatureNode(max_index + 1, bias);
		}
	}

	prob.y = new int[prob.l];
	for (int i = 0; i < prob.l; i++)
		prob.y[i] = vy.get(i);

	return prob;
}
 
开发者ID:MingjieQian,项目名称:JML,代码行数:25,代码来源:MultiClassSVM.java

示例15: features2Matrix

import de.bwaldvogel.liblinear.Feature; //导入依赖的package包/类
/**
 * Convert features to matrix including bias features if bias is nonnegative.
 * Feature indices start from 1.
 * 
 * @param features a 2D feature array
 * 
 * @param bias dummy bias feature
 * 
 * @return a real matrix
 * 
 */
public static RealMatrix features2Matrix(Feature[][] features, double bias) {
	
	int maxIndex = 0;
	for (int i = 0; i < features.length; i++) {
		int j = 0;
		j = bias >= 0 ? features[i].length - 1: features[i].length;
		maxIndex = Math.max(maxIndex, features[i][j - 1].getIndex());
	}
	if (bias >= 0) {
		maxIndex++;
	}
	
	RealMatrix res = new OpenMapRealMatrix(maxIndex, features.length);
	int index = 0;
	double value = 0;
	for (int j = 0; j < features.length; j++) {
		for (int i = 0; i < features[j].length; i++) {
			index = features[j][i].getIndex() - 1;
			value = features[j][i].getValue();
			res.setEntry(index, j, value);
		}
	}
	return res;
	
}
 
开发者ID:MingjieQian,项目名称:JML,代码行数:37,代码来源:MultiClassSVM.java


注:本文中的de.bwaldvogel.liblinear.Feature类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。