当前位置: 首页>>代码示例>>Java>>正文


Java FeatureSelection类代码示例

本文整理汇总了Java中cc.mallet.types.FeatureSelection的典型用法代码示例。如果您正苦于以下问题:Java FeatureSelection类的具体用法?Java FeatureSelection怎么用?Java FeatureSelection使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


FeatureSelection类属于cc.mallet.types包,在下文中一共展示了FeatureSelection类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: addFullyConnectedStatesForThreeQuarterLabels

import cc.mallet.types.FeatureSelection; //导入依赖的package包/类
public void addFullyConnectedStatesForThreeQuarterLabels (InstanceList trainingSet)
{
	int numLabels = outputAlphabet.size();
	for (int i = 0; i < numLabels; i++) {
		String[] destinationNames = new String[numLabels];
		String[][] weightNames = new String[numLabels][];
		for (int j = 0; j < numLabels; j++) {
			String labelName = (String)outputAlphabet.lookupObject(j);
			destinationNames[j] = labelName;
			weightNames[j] = new String[2];
			// The "half-labels" will include all observational tests
			weightNames[j][0] = labelName;
			// The "transition" weights will include only the default feature
			String wn = (String)outputAlphabet.lookupObject(i) + "->" + (String)outputAlphabet.lookupObject(j);
			weightNames[j][1] = wn;
			int wi = getWeightsIndex (wn);
			// A new empty FeatureSelection won't allow any features here, so we only
			// get the default feature for transitions
			featureSelections[wi] = new FeatureSelection(trainingSet.getDataAlphabet());
		}
		addState ((String)outputAlphabet.lookupObject(i), 0.0, 0.0,
				destinationNames, destinationNames, weightNames);
	}
}
 
开发者ID:kostagiolasn,项目名称:NucleosomePatternClassifier,代码行数:25,代码来源:CRF.java

示例2: readObject

import cc.mallet.types.FeatureSelection; //导入依赖的package包/类
@SuppressWarnings("unchecked")
 private void readObject (ObjectInputStream in) throws IOException, ClassNotFoundException {
	in.readInt ();
	inputAlphabet = (Alphabet) in.readObject ();
	outputAlphabet = (Alphabet) in.readObject ();
	states = (ArrayList<State>) in.readObject ();
	initialStates = (ArrayList<State>) in.readObject ();
	name2state = (HashMap) in.readObject ();
	parameters = (Factors) in.readObject ();
	globalFeatureSelection = (FeatureSelection) in.readObject ();		
	featureSelections = (FeatureSelection[]) in.readObject ();
	featureInducers = (ArrayList<FeatureInducer>) in.readObject ();
	weightsValueChangeStamp = in.readInt ();
	weightsStructureChangeStamp = in.readInt ();
	cachedNumParametersStamp = in.readInt ();
	numParameters = in.readInt ();
}
 
开发者ID:kostagiolasn,项目名称:NucleosomePatternClassifier,代码行数:18,代码来源:CRF.java

示例3: train

import cc.mallet.types.FeatureSelection; //导入依赖的package包/类
public C45 train (InstanceList trainingList)
{
	FeatureSelection selectedFeatures = trainingList.getFeatureSelection();
	if (selectedFeatures != null)
		// xxx Attend to FeatureSelection!!!
		throw new UnsupportedOperationException ("FeatureSelection not yet implemented.");
	C45.Node root = new C45.Node(trainingList, null, m_minNumInsts);
	splitTree(root, 0);
	C45 tree = new C45 (trainingList.getPipe(), root);
	logger.info("C45 learned: (size=" + tree.getSize() + ")\n");
	tree.print();
	if (m_doPruning) {
		tree.prune();
		logger.info("\nPruned C45: (size=" + tree.getSize() + ")\n");
		root.print();
	}
	root.stopGrowth();
	this.classifier = tree;
	return classifier;
}
 
开发者ID:kostagiolasn,项目名称:NucleosomePatternClassifier,代码行数:21,代码来源:C45Trainer.java

示例4: induceFeatures

import cc.mallet.types.FeatureSelection; //导入依赖的package包/类
public void induceFeatures (InstanceList ilist, boolean withFeatureShrinkage, boolean inducePerClassFeatures)
	{
		if (inducePerClassFeatures) {
			int numClasses = ilist.getTargetAlphabet().size();
//			int numFeatures = ilist.getDataAlphabet().size();
			FeatureSelection[] pcfs = new FeatureSelection[numClasses];
			for (int j = 0; j < numClasses; j++)
				pcfs[j] = (FeatureSelection) ilist.getPerLabelFeatureSelection()[j].clone();
			for (int i = 0; i < ilist.size(); i++) {
				Object data = ilist.get(i).getData();
				AugmentableFeatureVector afv = (AugmentableFeatureVector) data;
				root.induceFeatures (afv, null, pcfs, ilist.getFeatureSelection(), ilist.getPerLabelFeatureSelection(),
														 withFeatureShrinkage, inducePerClassFeatures, addFeaturesClassEntropyThreshold);
			}
		} else {
			throw new UnsupportedOperationException ("Not yet implemented");
		}
	}
 
开发者ID:kostagiolasn,项目名称:NucleosomePatternClassifier,代码行数:19,代码来源:DecisionTree.java

示例5: split

import cc.mallet.types.FeatureSelection; //导入依赖的package包/类
public void split (FeatureSelection fs)
{
	if (ilist == null)
		throw new IllegalStateException ("Frozen.  Cannot split.");
	InstanceList ilist0 = new InstanceList (ilist.getPipe());
	InstanceList ilist1 = new InstanceList (ilist.getPipe());
	for (int i = 0; i < ilist.size(); i++) {
		Instance instance = ilist.get(i);
		FeatureVector fv = (FeatureVector) instance.getData ();
		// xxx What test should this be?  What to do with negative values?
			// Whatever is decided here should also go in InfoGain.calcInfoGains()
		if (fv.value (featureIndex) != 0) {
			//System.out.println ("list1 add "+instance.getUri()+" weight="+ilist.getInstanceWeight(i));
			ilist1.add (instance, ilist.getInstanceWeight(i));
		} else {
			//System.out.println ("list0 add "+instance.getUri()+" weight="+ilist.getInstanceWeight(i));
			ilist0.add (instance, ilist.getInstanceWeight(i));
		}
	}
	logger.info("child0="+ilist0.size()+" child1="+ilist1.size());
	child0 = new Node (ilist0, this, fs);
	child1 = new Node (ilist1, this, fs);
}
 
开发者ID:kostagiolasn,项目名称:NucleosomePatternClassifier,代码行数:24,代码来源:DecisionTree.java

示例6: MaxEnt

import cc.mallet.types.FeatureSelection; //导入依赖的package包/类
public MaxEnt (Pipe dataPipe,
			double[] parameters,
			FeatureSelection featureSelection,
			FeatureSelection[] perClassFeatureSelection)
	{
		super (dataPipe);
		assert (featureSelection == null || perClassFeatureSelection == null);
		if (parameters != null)
			this.parameters = parameters;
		else
			this.parameters = new double[getNumParameters(dataPipe)];
		this.featureSelection = featureSelection;
		this.perClassFeatureSelection = perClassFeatureSelection;
		this.defaultFeatureIndex = dataPipe.getDataAlphabet().size();
//		assert (parameters.getNumCols() == defaultFeatureIndex+1);
	}
 
开发者ID:kostagiolasn,项目名称:NucleosomePatternClassifier,代码行数:17,代码来源:MaxEnt.java

示例7: removeFeature

import cc.mallet.types.FeatureSelection; //导入依赖的package包/类
public void removeFeature(Set<String> featureNames) throws DataMiningException {
  Alphabet alphabet = featureAlphabet.getAlphabet();
  FeatureSelection fs = new FeatureSelection(alphabet);
  double[] counts = new double[alphabet.size()];
  for (int feat = 0; feat < counts.length; ++feat) {
    Object featureName = featureAlphabet.getAlphabet().lookupObject(feat);
    counts[feat] = featureNames.contains(featureName) ? 0 : 1;
  }
  Alphabet reducedAlphabet = new Alphabet();
  featureAlphabet.prune(counts, reducedAlphabet, 1);
  Iterator<?> it = reducedAlphabet.iterator();
  while (it.hasNext()) {
    fs.add(it.next());
  }
  data.setFeatureSelection(fs);
}
 
开发者ID:begab,项目名称:kpe,代码行数:17,代码来源:MalletDataHandler.java


注:本文中的cc.mallet.types.FeatureSelection类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。