当前位置: 首页>>代码示例>>Java>>正文


Java FeatureNode类代码示例

本文整理汇总了Java中de.bwaldvogel.liblinear.FeatureNode的典型用法代码示例。如果您正苦于以下问题:Java FeatureNode类的具体用法?Java FeatureNode怎么用?Java FeatureNode使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


FeatureNode类属于de.bwaldvogel.liblinear包,在下文中一共展示了FeatureNode类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getFeatureNodes

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
@Override
protected  FeatureNode[] getFeatureNodes(SortedSet<Feature> features) {

	FeatureNode[] vector;
	int length,index,i=0;

	length = features.size();
	vector = new FeatureNode[length];

	for (Feature feature:features) {

		index=feature.getIndex();			
		vector[i++] = new FeatureNode(index, feature.getValue());
	}
	
	return vector;
}
 
开发者ID:SI3P,项目名称:supWSD,代码行数:18,代码来源:LibLinearClassifier.java

示例2: makeNodes

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
/**
 * Creates a data node row for the LibSVM (sparse format, i.e. each node keeps the index and the
 * value if not default).
 */
public static FeatureNode[] makeNodes(Example e, FastExample2SparseTransform ripper, boolean useBias) {
	int[] nonDefaultIndices = ripper.getNonDefaultAttributeIndices(e);
	double[] nonDefaultValues = ripper.getNonDefaultAttributeValues(e, nonDefaultIndices);
	int offset = 0;
	if (useBias) {
		offset = 1;
	}
	FeatureNode[] nodeArray = new FeatureNode[nonDefaultIndices.length + offset];
	for (int a = 0; a < nonDefaultIndices.length; a++) {
		FeatureNode node = new FeatureNode(nonDefaultIndices[a] + 1, nonDefaultValues[a]);
		nodeArray[a] = node;
	}
	if (useBias) {
		nodeArray[nodeArray.length - 1] = new FeatureNode(nodeArray.length, 1);
	}

	return nodeArray;
}
 
开发者ID:transwarpio,项目名称:rapidminer,代码行数:23,代码来源:FastLargeMargin.java

示例3: makeNodes

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
/**
 * Creates a data node row for the LibSVM (sparse format, i.e. each node keeps the index and the
 * value if not default).
 */
public static FeatureNode[] makeNodes(Example e, FastExample2SparseTransform ripper, boolean useBias) {
	int[] nonDefaultIndices = ripper.getNonDefaultAttributeIndices(e);
	double[] nonDefaultValues = ripper.getNonDefaultAttributeValues(e, nonDefaultIndices);
	int offset = 0;
	if (useBias) {
		offset = 1;
	}
	FeatureNode[] nodeArray = new FeatureNode[nonDefaultIndices.length + offset];
	for (int a = 0; a < nonDefaultIndices.length; a++) {
		FeatureNode node = new FeatureNode(nonDefaultIndices[a] + 1, nonDefaultValues[a]);
		nodeArray[a] = node;
	}
	if (useBias) {
		// bias index is number of attributes +1
		nodeArray[nodeArray.length - 1] = new FeatureNode(e.getAttributes().size() + 1, 1);
	}

	return nodeArray;
}
 
开发者ID:rapidminer,项目名称:rapidminer-studio,代码行数:24,代码来源:FastLargeMargin.java

示例4: train

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
public void train(List<Pair<CounterInterface<Integer>,Integer>> trainSet) {
	Problem problem = new Problem();
	FeatureNode[][] x = new FeatureNode[trainSet.size()][];
	double[] y = new double[trainSet.size()];
	int maxFeature = 0;
	for (int i=0; i<x.length; ++i) {
		CounterInterface<Integer> features = trainSet.get(i).getFirst();
		for (Map.Entry<Integer, Double> feat : features.entries()) {
			maxFeature = Math.max(feat.getKey()+1, maxFeature);
		}
		x[i] = convertToFeatureNodes(features);
		y[i] = trainSet.get(i).getSecond();
	}
	
	problem.l = trainSet.size();
	problem.n = maxFeature;
	problem.x = x;
	problem.y = y;
	problem.bias = 0.0;
	
	Parameter parameter = new Parameter(solverType, C, eps);
	model = Linear.train(problem, parameter);
}
 
开发者ID:tberg12,项目名称:murphy,代码行数:24,代码来源:LibLinearWrapper.java

示例5: convertToFeatureNodes

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
private FeatureNode[] convertToFeatureNodes(CounterInterface<Integer> features) {
	FeatureNode[] x = new FeatureNode[features.size()];
	int j=0;
	for (Map.Entry<Integer, Double> feat : features.entries()) {
		assert !Double.isInfinite(feat.getValue());
		assert !Double.isNaN(feat.getValue());
		x[j] = new FeatureNode(feat.getKey()+1, feat.getValue());
		j++;
	}
	Arrays.sort(x, new Comparator<FeatureNode>() {
		public int compare(FeatureNode o1, FeatureNode o2) {
			if (o1.index > o2.index) {
				return 1;
			} else if (o1.index < o2.index) {
				return -1;
			} else {
				return 0;
			}
		}
	});
	return x;
}
 
开发者ID:tberg12,项目名称:murphy,代码行数:23,代码来源:LibLinearWrapper.java

示例6: score

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
@Override
public Map<OUTCOME_TYPE, Double> score(List<Feature> features) throws CleartkProcessingException {
  FeatureNode[] encodedFeatures = this.featuresEncoder.encodeAll(features);
  
  // get score for each outcome
  int[] encodedOutcomes = this.model.getLabels();
  double[] scores = new double[encodedOutcomes.length];
  if (this.model.isProbabilityModel()) {
    Linear.predictProbability(this.model, encodedFeatures, scores);
  } else {
    Linear.predictValues(this.model, encodedFeatures, scores);
  }
  
  // handle 2-class model, which is special-cased by LIBLINEAR to only return one score
  if (this.model.getNrClass() == 2 && scores[1] == 0.0) {
    scores[1] = -scores[0];
  }
  
  // create scored outcome objects
  Map<OUTCOME_TYPE, Double> scoredOutcomes = Maps.newHashMap();
  for (int i = 0; i < encodedOutcomes.length; ++i) {
    OUTCOME_TYPE outcome = this.outcomeEncoder.decode(encodedOutcomes[i]);
    scoredOutcomes.put(outcome, scores[i]);
  }
  return scoredOutcomes;
}
 
开发者ID:ClearTK,项目名称:cleartk,代码行数:27,代码来源:GenericLibLinearClassifier.java

示例7: getSparseRepresentation

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
public Feature[] getSparseRepresentation()
{
    // int get total size of feature vector
    Feature[] out = new FeatureNode[numOfFeatures];
    int startPos = 0;
    int i = 0;
    for(FeatureInterface feat : vec)
    {
        out[i++] = feat.outputToSparseRepresentation(startPos);
        // shift right based on size of the feature representation 
        // (e.g., binary size=2, double=1, categorical=k, where k=num of categories, 
        // according to a 1-of-k representation
        startPos += feat.getSizeOfRepresentation(); 
    }
    return out;
}
 
开发者ID:sinantie,项目名称:PLTAG,代码行数:17,代码来源:FeatureVec.java

示例8: createTestProblem

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
private static Feature[][] createTestProblem(SparseVector[] featureVectors, int numberOfFeatures, double bias) {
	Feature[][] nodes = new FeatureNode[featureVectors.length][];

	for (int i = 0; i < featureVectors.length; i++) {
		Set<Integer> indices = featureVectors[i].getIndices();
		nodes[i] = new FeatureNode[(bias >= 0) ? indices.size() + 1 : indices.size()];

		int j = 0;
		for (int index : indices) {
			nodes[i][j] = new FeatureNode(index, featureVectors[i].getValue(index));
			j++;
		}
		if (bias >= 0) {
			nodes[i][j] = new FeatureNode(numberOfFeatures, bias);
		}
	}	
	return nodes;	
}
 
开发者ID:Data2Semantics,项目名称:mustard,代码行数:19,代码来源:LibLINEAR.java

示例9: createProblemTrainFold

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
private static Problem createProblemTrainFold(Problem problem, int numberOfFolds, int fold) {
	int foldStart = CVUtils.foldStart(problem.x.length, numberOfFolds, fold);
	int foldEnd   = CVUtils.foldEnd(problem.x.length, numberOfFolds, fold);
	int foldLength = (foldEnd-foldStart);

	Problem prob = new Problem();
	prob.y = new double[problem.x.length - foldLength];
	prob.x = new FeatureNode[problem.x.length - foldLength][];
	prob.l = problem.x.length - foldLength;
	prob.n = problem.n;


	for (int i = 0; i < foldStart; i++) {
		prob.x[i] = problem.x[i];
		prob.y[i] = problem.y[i];
	}	
	for (int i = foldEnd; i < problem.x.length; i++) {
		prob.x[i - foldLength] = problem.x[i];
		prob.y[i - foldLength] = problem.y[i];
	}			
	return prob;
}
 
开发者ID:Data2Semantics,项目名称:mustard,代码行数:23,代码来源:LibLINEAR.java

示例10: train

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
public static void train() throws IOException, InvalidInputDataException{
	String file = "output\\svm/book_svm.svm";
	Problem problem = Problem.readFromFile(new File(file),-1);

	SolverType solver = SolverType.L2R_LR; // -s 0
	double C = 1.0;    // cost of constraints violation
	double eps = 0.01; // stopping criteria

	Parameter parameter = new Parameter(solver, C, eps);
	Model model = Linear.train(problem, parameter);
	File modelFile = new File("output/model");
	model.save(modelFile);
	System.out.println(modelFile.getAbsolutePath());
	// load model or use it directly
	model = Model.load(modelFile);

	Feature[] instance = { new FeatureNode(1, 4), new FeatureNode(2, 2) };
	double prediction = Linear.predict(model, instance);
	System.out.println(prediction);
	int nr_fold = 10;
    double[] target = new double[problem.l];
	Linear.crossValidation(problem, parameter, nr_fold, target);
}
 
开发者ID:laozhaokun,项目名称:sentimentclassify,代码行数:24,代码来源:Main.java

示例11: getProblem

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
/**
 * Creates a support vector problem for the LibSVM.
 * 
 * @throws UserError
 */
private Problem getProblem(ExampleSet exampleSet) throws UserError {
	log("Creating LibLinear problem.");
	FastExample2SparseTransform ripper = new FastExample2SparseTransform(exampleSet);
	int nodeCount = 0;
	Problem problem = new Problem();
	problem.l = exampleSet.size();

	boolean useBias = getParameterAsBoolean(PARAMETER_USE_BIAS);
	if (useBias) {
		problem.n = exampleSet.getAttributes().size() + 1;
	} else {
		problem.n = exampleSet.getAttributes().size();
	}

	problem.y = new double[exampleSet.size()];
	problem.x = new FeatureNode[exampleSet.size()][];
	Iterator<Example> i = exampleSet.iterator();
	Attribute label = exampleSet.getAttributes().getLabel();
	int j = 0;

	int firstIndex = label.getMapping().getNegativeIndex();

	while (i.hasNext()) {
		Example e = i.next();
		problem.x[j] = makeNodes(e, ripper, useBias);
		problem.y[j] = (int) e.getValue(label) == firstIndex ? 0 : 1;
		nodeCount += problem.x[j].length;
		j++;
	}
	log("Created " + nodeCount + " nodes for " + j + " examples.");
	return problem;
}
 
开发者ID:transwarpio,项目名称:rapidminer,代码行数:38,代码来源:FastLargeMargin.java

示例12: featureMapToFeatures

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
public static Feature[] featureMapToFeatures(double[] ftrArray, int[] ftrList) {
	Feature[] llFtrArray = new Feature[ftrList.length];
	int j = 0;
	for (int i = 0; i < ftrList.length; i++)
		llFtrArray[j++] = new FeatureNode(i + 1, ftrArray[ftrList[i] - 1]);
	return llFtrArray;
}
 
开发者ID:marcocor,项目名称:smaph,代码行数:8,代码来源:LibLinearModel.java

示例13: extractFeature

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
@Override
public Feature[] extractFeature(JCas cas) {
    Collection<String> documentText = preprocessor.getTokenStrings(cas);
    float featureVector[] = new float[featureCount];
    Feature[] features = new Feature[featureCount];
    for(int i = 0;i<featureVector.length;i++){
        featureVector[i] = 0.0f;
    }
    numberOfTokensFound = 0;
    for(String token:documentText){
        if(termPolarityMap.containsKey(token)){
            Float polarity[] = termPolarityMap.get(token);
            for(int i=0;i<totalNumberOfLabels;i++){
                featureVector[i]+=polarity[i];
            }
        }
        numberOfTokensFound++;
    }
    if(numberOfTokensFound != 0) {
        for(int i = 0;i<featureVector.length;i++){
            featureVector[i] /= numberOfTokensFound;
        }
    }
    for(int i=0;i<featureCount;i++){
        features[i] = new FeatureNode(i + offset + 1, featureVector[i]);
    }
    return features;
}
 
开发者ID:uhh-lt,项目名称:LT-ABSA,代码行数:29,代码来源:PolarityLexiconFeature.java

示例14: extractFeature

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
@Override
public Feature[] extractFeature(JCas cas) {

    Collection<String> documentText = preprocessor.getTokenStrings(cas);

    Feature[] ret = new Feature[1];
    Double value = Math.min(1.0, documentText.size() / maxDocumentLength);
    ret[0] = new FeatureNode(offset, value);
    return ret;
}
 
开发者ID:uhh-lt,项目名称:LT-ABSA,代码行数:11,代码来源:DocumentLengthFeature.java

示例15: getTfIdfScores

import de.bwaldvogel.liblinear.FeatureNode; //导入依赖的package包/类
/**
 * Calculates the instance array containing TF-IDF scores for each token
 * @param tokenCounts the token count for each token ID
 * @return an array of {@link Feature} elements
 */
private Feature[] getTfIdfScores(HashMap<Integer, Integer> tokenCounts) {
    int count;
    double idf;
    double weight;
    double normalizedWeight;
    double norm = 0;

    HashMap<Integer, Double> termWeights = new HashMap<>();
    // calculate TF-IDF scores for each token, also add to normalizer
    for (int tokenID : tokenCounts.keySet()) {
        count = tokenCounts.get(tokenID);
        idf = termIdf.get(tokenID);
        weight = count * idf;

        if (weight > 0.0) {
            norm += Math.pow(weight, 2);
            termWeights.put(tokenID, weight);
        }
    }
    // calculate normalization
    norm = Math.sqrt(norm);

    Feature[] instance = new Feature[termWeights.size()];
    ArrayList<Integer> list = new ArrayList<>(termWeights.keySet());
    Collections.sort(list);
    Double w;
    int i =0;
    // add normalized TF-IDF scores to the training instance
    for (int tokenId: list) {
        w = termWeights.get(tokenId);
        if (w == null) {
            w = 0.0;
        }
        normalizedWeight = w / norm;
        instance[i++] = new FeatureNode(tokenId+offset, normalizedWeight);
    }
    return instance;
}
 
开发者ID:uhh-lt,项目名称:LT-ABSA,代码行数:44,代码来源:TfIdfFeature.java


注:本文中的de.bwaldvogel.liblinear.FeatureNode类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。