本文整理汇总了Java中com.carrotsearch.hppc.IntIntHashMap类的典型用法代码示例。如果您正苦于以下问题:Java IntIntHashMap类的具体用法?Java IntIntHashMap怎么用?Java IntIntHashMap使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
IntIntHashMap类属于com.carrotsearch.hppc包,在下文中一共展示了IntIntHashMap类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: ProjectPushInfo
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
public ProjectPushInfo(List<SchemaPath> columns, ImmutableList<DesiredField> desiredFields, ImmutableList<RexNode> inputRefs) {
super();
this.columns = columns;
this.desiredFields = desiredFields;
this.inputsRefs = inputRefs;
this.fieldNames = Lists.newArrayListWithCapacity(desiredFields.size());
this.types = Lists.newArrayListWithCapacity(desiredFields.size());
IntIntHashMap oldToNewIds = new IntIntHashMap();
int i =0;
for (DesiredField f : desiredFields) {
fieldNames.add(f.name);
types.add(f.field.getType());
oldToNewIds.put(f.origIndex, i);
i++;
}
this.rewriter = new InputRewriter(oldToNewIds);
}
示例2: ProjectPushInfo
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
public ProjectPushInfo(List<SchemaPath> columns, ImmutableList<DesiredField> desiredFields) {
super();
this.columns = columns;
this.desiredFields = desiredFields;
this.fieldNames = Lists.newArrayListWithCapacity(desiredFields.size());
this.types = Lists.newArrayListWithCapacity(desiredFields.size());
IntIntHashMap oldToNewIds = new IntIntHashMap();
int i =0;
for (DesiredField f : desiredFields) {
fieldNames.add(f.name);
types.add(f.field.getType());
oldToNewIds.put(f.origIndex, i);
i++;
}
this.rewriter = new InputRewriter(oldToNewIds);
}
示例3: normalize
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
private static void normalize(final Collection<UnresolvedDependency> unresolvedDependencies,
final IntIntHashMap substitutions, final Set<UnresolvedDependency> newUnresolvedDependencies,
final Collection<UnlabelledDependency> newResolvedDependencies) {
for (final UnresolvedDependency dep : unresolvedDependencies) {
final int newID = substitutions.getOrDefault(dep.argumentID, Integer.MIN_VALUE);
if (newID != Integer.MIN_VALUE) {
if (newID == dep.argumentID) {
newUnresolvedDependencies.add(dep);
} else {
newUnresolvedDependencies.add(new UnresolvedDependency(dep.getHead(), dep.getCategory(), dep.getArgNumber(),
newID, dep.getPreposition()));
}
} else {
// Dependencies that don't get any attachment (e.g. in arbitrary control).
newResolvedDependencies.add(new UnlabelledDependency(dep.getHead(), dep.getCategory(), dep.getArgNumber(),
Collections.singletonList(dep.getHead()), dep.getPreposition()));
}
}
}
示例4: initializeForTypes
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
private void initializeForTypes (Alphabet alphabet) {
if (this.alphabet == null) {
this.alphabet = alphabet;
this.numTypes = alphabet.size();
this.typeTopicCounts = new IntIntHashMap[numTypes];
for (int fi = 0; fi < numTypes; fi++)
typeTopicCounts[fi] = new IntIntHashMap();
this.betaSum = beta * numTypes;
} else if (alphabet != this.alphabet) {
throw new IllegalArgumentException ("Cannot change Alphabet.");
} else if (alphabet.size() != this.numTypes) {
this.numTypes = alphabet.size();
IntIntHashMap[] newTypeTopicCounts = new IntIntHashMap[numTypes];
for (int i = 0; i < typeTopicCounts.length; i++)
newTypeTopicCounts[i] = typeTopicCounts[i];
for (int i = typeTopicCounts.length; i < numTypes; i++)
newTypeTopicCounts[i] = new IntIntHashMap();
// TODO AKM July 18: Why wasn't the next line there previously?
// this.typeTopicCounts = newTypeTopicCounts;
this.betaSum = beta * numTypes;
} // else, nothing changed, nothing to be done
}
示例5: NPTopicModel
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
/** @param alpha this parameter balances the local document topic counts with
* the global distribution over topics.
* @param gamma this parameter is the weight on a completely new, never-before-seen topic
* in the global distribution.
* @param beta this parameter controls the variability of the topic-word distributions
*/
public NPTopicModel (double alpha, double gamma, double beta) {
this.data = new ArrayList<TopicAssignment>();
this.topicAlphabet = AlphabetFactory.labelAlphabetOfSize(1);
this.alpha = alpha;
this.gamma = gamma;
this.beta = beta;
this.random = new Randoms();
tokensPerTopic = new IntIntHashMap();
docsPerTopic = new IntIntHashMap();
formatter = NumberFormat.getInstance();
formatter.setMaximumFractionDigits(5);
logger.info("Non-Parametric LDA");
}
示例6: pipe
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
public Instance pipe(Instance instance) {
IntIntHashMap localCounter = new IntIntHashMap();
if (instance.getData() instanceof FeatureSequence) {
FeatureSequence features = (FeatureSequence) instance.getData();
for (int position = 0; position < features.size(); position++) {
localCounter.putOrAdd(features.getIndexAtPosition(position), 1, 1);
}
}
else {
throw new IllegalArgumentException("Looking for a FeatureSequence, found a " +
instance.getData().getClass());
}
for (IntCursor feature: localCounter.keys()) {
counter.increment(feature);
}
numInstances++;
return instance;
}
示例7: rightLanguageForAllStates
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
/**
* Calculate the size of "right language" for each state in an FSA. The right
* language is the number of sequences encoded from a given node in the automaton.
*
* @param fsa The automaton to calculate right language for.
* @return Returns a map with node identifiers as keys and their right language
* counts as associated values.
*/
public static IntIntHashMap rightLanguageForAllStates(final FSA fsa) {
final IntIntHashMap numbers = new IntIntHashMap();
fsa.visitInPostOrder(new StateVisitor() {
public boolean accept(int state) {
int thisNodeNumber = 0;
for (int arc = fsa.getFirstArc(state); arc != 0; arc = fsa.getNextArc(arc)) {
thisNodeNumber += (fsa.isArcFinal(arc) ? 1 : 0)
+ (fsa.isArcTerminal(arc) ? 0 : numbers.get(fsa.getEndNode(arc)));
}
numbers.put(state, thisNodeNumber);
return true;
}
});
return numbers;
}
示例8: computeInlinkCount
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
/**
* Compute in-link count for each state.
*/
private IntIntHashMap computeInlinkCount(final FSA fsa) {
IntIntHashMap inlinkCount = new IntIntHashMap();
BitSet visited = new BitSet();
IntStack nodes = new IntStack();
nodes.push(fsa.getRootNode());
while (!nodes.isEmpty()) {
final int node = nodes.pop();
if (visited.get(node))
continue;
visited.set(node);
for (int arc = fsa.getFirstArc(node); arc != 0; arc = fsa.getNextArc(arc)) {
if (!fsa.isArcTerminal(arc)) {
final int target = fsa.getEndNode(arc);
inlinkCount.putOrAdd(target, 1, 1);
if (!visited.get(target))
nodes.push(target);
}
}
}
return inlinkCount;
}
示例9: computePerColumnHitRates
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
static void computePerColumnHitRates(IdMappingResult result,
Set<String> nodeIds,
NodeTableVisitor visitor) {
IntIntMap columnCounts = new IntIntHashMap();
NodeTableConsumer consumer = new NodeTableConsumer() {
@Override
public void startNode(int nodeId) {
result.totalNetworkNodes++;
}
@Override
public void cell(int columnId,
String value) {
if (nodeIds.contains(value)) {
columnCounts.addTo(columnId, 1);
}
}
@Override
public void endNode() {
}
};
visitor.visit(consumer);
result.coverage = columnCounts;
}
示例10: normalize
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
Coindexation normalize(final IntIntHashMap substitutions, final int minValue) {
Coindexation.IDorHead newIDorHead = idOrHead;
if (idOrHead.id != null) {
int newID = substitutions.getOrDefault(idOrHead.id, Integer.MIN_VALUE);
if (newID == Integer.MIN_VALUE) {
newID = substitutions.size() + minValue;
substitutions.put(idOrHead.id, newID);
}
newIDorHead = new Coindexation.IDorHead(newID);
}
return new Coindexation(left == null ? null : left.normalize(substitutions, minValue), right == null ? null
: right.normalize(substitutions, minValue), newIDorHead, preposition);
}
示例11: test
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
@Override
public int test() {
final IntIntHashMap m_map = new IntIntHashMap( m_keys.length, m_fillFactor );
for ( int i = 0; i < m_keys.length; ++i )
m_map.put( m_keys[ i ],m_keys[ i ] );
for ( int i = 0; i < m_keys.length; ++i )
m_map.put( m_keys[ i ],m_keys[ i ] );
return m_map.size();
}
示例12: recacheStateTopicDistribution
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
private void recacheStateTopicDistribution(int state, IntIntHashMap topicCounts) {
int[] currentStateTopicCounts = stateTopicCounts[state];
double[][] currentStateCache = topicLogGammaCache[state];
double[] cache;
for (IntCursor cursor: topicCounts.keys()) {
int topic = cursor.value;
cache = currentStateCache[topic];
cache[0] = 0.0;
for (int i=1; i < cache.length; i++) {
cache[i] =
cache[ i-1 ] +
Math.log( alpha[topic] + i - 1 +
currentStateTopicCounts[topic] );
}
}
docLogGammaCache[state][0] = 0.0;
for (int i=1; i < docLogGammaCache[state].length; i++) {
docLogGammaCache[state][i] =
docLogGammaCache[state][ i-1 ] +
Math.log( alphaSum + i - 1 +
stateTopicTotals[state] );
}
}
示例13: initializeTypeTopicCounts
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
private void initializeTypeTopicCounts () {
IntIntHashMap[] newTypeTopicCounts = new IntIntHashMap[numTypes];
for (int i = 0; i < typeTopicCounts.length; i++)
newTypeTopicCounts[i] = typeTopicCounts[i];
for (int i = typeTopicCounts.length; i < numTypes; i++)
newTypeTopicCounts[i] = new IntIntHashMap();
this.typeTopicCounts = newTypeTopicCounts;
}
示例14: OneLabelL2PRConstraints
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
protected OneLabelL2PRConstraints(IntObjectHashMap<OneLabelPRConstraint> constraints,
IntIntHashMap constraintIndices, StateLabelMap map, boolean normalized) {
this.constraints = new IntObjectHashMap<OneLabelPRConstraint>();
for (IntObjectCursor<OneLabelPRConstraint> keyVal : constraints) {
this.constraints.put(keyVal.key, keyVal.value.copy());
}
//this.constraints = constraints;
this.constraintIndices = constraintIndices;
this.map = map;
this.cache = new IntArrayList();
this.normalized = normalized;
}
示例15: setIndex2Location
import com.carrotsearch.hppc.IntIntHashMap; //导入依赖的package包/类
private void setIndex2Location ()
{
//System.out.println ("HashedSparseVector setIndex2Location indices.length="+indices.length+" maxindex="+indices[indices.length-1]);
assert (index2location == null);
assert (indices.length > 0);
this.maxIndex = indices[indices.length - 1];
this.index2location = new IntIntHashMap (numLocations ());
//index2location.setDefaultValue (-1);
for (int i = 0; i < indices.length; i++)
index2location.put (indices[i], i);
}