本文整理汇总了Java中com.carrotsearch.hppc.IntIntOpenHashMap类的典型用法代码示例。如果您正苦于以下问题:Java IntIntOpenHashMap类的具体用法?Java IntIntOpenHashMap怎么用?Java IntIntOpenHashMap使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
IntIntOpenHashMap类属于com.carrotsearch.hppc包,在下文中一共展示了IntIntOpenHashMap类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: ProjectPushInfo
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
public ProjectPushInfo(List<SchemaPath> columns, ImmutableList<DesiredField> desiredFields) {
super();
this.columns = columns;
this.desiredFields = desiredFields;
this.fieldNames = Lists.newArrayListWithCapacity(desiredFields.size());
this.types = Lists.newArrayListWithCapacity(desiredFields.size());
IntIntOpenHashMap oldToNewIds = new IntIntOpenHashMap();
int i =0;
for (DesiredField f : desiredFields) {
fieldNames.add(f.name);
types.add(f.field.getType());
oldToNewIds.put(f.origIndex, i);
i++;
}
this.rewriter = new InputRewriter(oldToNewIds);
}
示例2: removeFromVariableCaches
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
private void removeFromVariableCaches (Variable victim)
{
Set survivors = new HashSet(variablesSet ());
survivors.remove (victim);
int vi = 0;
IntIntOpenHashMap dict = new IntIntOpenHashMap (survivors.size ());
// dict.setDefaultValue (-1); No longer supported, but this.getIndex() written to avoid need for this.
my2global = new int[survivors.size ()];
for (Iterator it = survivors.iterator (); it.hasNext();) {
Variable var = (Variable) it.next ();
int gvi = var.getIndex ();
dict.put (gvi, vi);
my2global [vi] = gvi;
}
projectionMap = dict;
numNodes--; // do this at end b/c it affects getVertexSet()
}
示例3: recacheStateTopicDistribution
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
private void recacheStateTopicDistribution(int state, IntIntOpenHashMap topicCounts) {
int[] currentStateTopicCounts = stateTopicCounts[state];
double[][] currentStateCache = topicLogGammaCache[state];
double[] cache;
for (int topic: topicCounts.keys().toArray()) {
cache = currentStateCache[topic];
cache[0] = 0.0;
for (int i=1; i < cache.length; i++) {
cache[i] =
cache[ i-1 ] +
Math.log( alpha[topic] + i - 1 +
currentStateTopicCounts[topic] );
}
}
docLogGammaCache[state][0] = 0.0;
for (int i=1; i < docLogGammaCache[state].length; i++) {
docLogGammaCache[state][i] =
docLogGammaCache[state][ i-1 ] +
Math.log( alphaSum + i - 1 +
stateTopicTotals[state] );
}
}
示例4: initializeForTypes
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
private void initializeForTypes (Alphabet alphabet) {
if (this.alphabet == null) {
this.alphabet = alphabet;
this.numTypes = alphabet.size();
this.typeTopicCounts = new IntIntOpenHashMap[numTypes];
for (int fi = 0; fi < numTypes; fi++)
typeTopicCounts[fi] = new IntIntOpenHashMap();
this.betaSum = beta * numTypes;
} else if (alphabet != this.alphabet) {
throw new IllegalArgumentException ("Cannot change Alphabet.");
} else if (alphabet.size() != this.numTypes) {
this.numTypes = alphabet.size();
IntIntOpenHashMap[] newTypeTopicCounts = new IntIntOpenHashMap[numTypes];
for (int i = 0; i < typeTopicCounts.length; i++)
newTypeTopicCounts[i] = typeTopicCounts[i];
for (int i = typeTopicCounts.length; i < numTypes; i++)
newTypeTopicCounts[i] = new IntIntOpenHashMap();
// TODO AKM July 18: Why wasn't the next line there previously?
// this.typeTopicCounts = newTypeTopicCounts;
this.betaSum = beta * numTypes;
} // else, nothing changed, nothing to be done
}
示例5: NPTopicModel
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
/** @param alpha this parameter balances the local document topic counts with
* the global distribution over topics.
* @param gamma this parameter is the weight on a completely new, never-before-seen topic
* in the global distribution.
* @param beta this parameter controls the variability of the topic-word distributions
*/
public NPTopicModel (double alpha, double gamma, double beta) {
this.data = new ArrayList<TopicAssignment>();
this.topicAlphabet = AlphabetFactory.labelAlphabetOfSize(1);
this.alpha = alpha;
this.gamma = gamma;
this.beta = beta;
this.random = new Randoms();
tokensPerTopic = new IntIntOpenHashMap();
docsPerTopic = new IntIntOpenHashMap();
formatter = NumberFormat.getInstance();
formatter.setMaximumFractionDigits(5);
logger.info("Non-Parametric LDA");
}
示例6: pipe
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
public Instance pipe(Instance instance) {
IntIntOpenHashMap localCounter = new IntIntOpenHashMap();
if (instance.getData() instanceof FeatureSequence) {
FeatureSequence features = (FeatureSequence) instance.getData();
for (int position = 0; position < features.size(); position++) {
localCounter.putOrAdd(features.getIndexAtPosition(position), 1, 1);
}
}
else {
throw new IllegalArgumentException("Looking for a FeatureSequence, found a " +
instance.getData().getClass());
}
for (int feature: localCounter.keys().toArray()) {
counter.increment(feature);
}
numInstances++;
return instance;
}
示例7: remapGapIndices
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
/** Called by {@link CTReader#normalizeIndices(CTTree)}. */
static private void remapGapIndices(IntIntOpenHashMap map, int[] lastIndex, CTNode curr)
{
int gapIndex = curr.gapIndex;
if (map.containsKey(gapIndex))
{
curr.gapIndex = map.get(gapIndex);
}
else if (gapIndex != -1)
{
curr.gapIndex = lastIndex[0];
map.put(gapIndex, lastIndex[0]++);
}
for (CTNode child : curr.ls_children)
remapGapIndices(map, lastIndex, child);
}
示例8: fordFulkerson
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
public static ObjectOpenHashSet<Edge> fordFulkerson(DirectedGraph graph) {
ObjectOpenHashSet<Edge> result = new ObjectOpenHashSet<>();
ObjectOpenHashSet<Edge> edges = new ObjectOpenHashSet<>(
graph.getEdges());
IntIntOpenHashMap match = new IntIntOpenHashMap();
IntOpenHashSet whiteSet = new IntOpenHashSet();
IntOpenHashSet blackSet = new IntOpenHashSet();
for (ObjectCursor<Edge> ecur : edges) {
whiteSet.add(ecur.value.from);
blackSet.add(ecur.value.to);
match.put(ecur.value.to, -1);
}
for (IntCursor vcur : whiteSet) {
IntOpenHashSet visitedBlack = new IntOpenHashSet();
aug(edges, vcur.value, visitedBlack, match, blackSet);
}
for (IntIntCursor entrycur : match) {
result.add(new Edge(entrycur.value, entrycur.key));
}
return result;
}
示例9: aug
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
private static boolean aug(ObjectOpenHashSet<Edge> edges, int white,
IntOpenHashSet visitedBlack, IntIntOpenHashMap match,
IntOpenHashSet blackSet) {
for (IntCursor blackcur : blackSet) {
boolean contains = false;
final int black = blackcur.value;
for (ObjectCursor<Edge> ecur : edges) {
if ((white == ecur.value.from)
&& (black == ecur.value.to)) {
contains = true;
}
}
if (contains && !visitedBlack.contains(black)) {
visitedBlack.add(black);
if (match.get(black) == -1L
|| aug(edges, match.get(black), visitedBlack, match,
blackSet)) {
match.put(black, white);
return true;
}
}
}
return false;
}
示例10: setRawEdges
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
public void setRawEdges(int[] edgesFrom, int[] edgesTo, int edgesCount) {
this.edgesCount = edgesCount;
IntIntOpenHashMap inputVerticesMap = new IntIntOpenHashMap(
edgesCount >> 4);
int currentVertexNumber = 1;
for (int i = 0; i < edgesCount; i++) {
if (inputVerticesMap.putIfAbsent(edgesFrom[i], currentVertexNumber)) {
currentVertexNumber++;
}
if (inputVerticesMap.putIfAbsent(edgesTo[i], currentVertexNumber)) {
currentVertexNumber++;
}
}
maxVertexNumber = currentVertexNumber - 1;
this.edgesFrom = new int[edgesCount];
this.edgesTo = new int[edgesCount];
// System.out.println(Utils.largeIntArrayToString(edgesFrom));
// System.out.println(Utils.largeIntArrayToString(edgesTo));
for (int i = 0; i < edgesCount; i++) {
this.edgesFrom[i] = inputVerticesMap.get(edgesFrom[i]);
this.edgesTo[i] = inputVerticesMap.get(edgesTo[i]);
}
// System.out.println(Utils.largeIntArrayToString(this.edgesFrom));
// System.out.println(Utils.largeIntArrayToString(this.edgesTo));
areRawEdgesSet = true;
}
示例11: fromMap
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
public static IntDistribution fromMap(IntIntOpenHashMap counts) {
double values[] = new double[counts.assigned];
int keys[] = counts.keys().toArray();
Arrays.sort(keys);
for (int j = 0; j < keys.length; ++j) {
values[j] = counts.get(keys[j]);
}
return new IntDistribution(keys, values);
}
示例12: apply
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
@Override
public IntDistribution apply(ColouredGraph graph) {
IntIntOpenHashMap counts = new IntIntOpenHashMap();
Grph g = graph.getGraph();
IntArrayList outDegrees = g.getAllOutEdgeDegrees();
for (int i = 0; i < outDegrees.elementsCount; ++i) {
counts.putOrAdd(outDegrees.buffer[i], 1, 1);
}
return IntDistribution.fromMap(counts);
}
示例13: apply
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
@Override
public IntDistribution apply(ColouredGraph graph) {
IntIntOpenHashMap counts = new IntIntOpenHashMap();
Grph g = graph.getGraph();
IntArrayList inDegrees = g.getAllInEdgeDegrees();
for (int i = 0; i < inDegrees.elementsCount; ++i) {
counts.putOrAdd(inDegrees.buffer[i], 1, 1);
}
return IntDistribution.fromMap(counts);
}
示例14: CollapsingScoreCollector
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
public CollapsingScoreCollector(int maxDoc,
int segments,
SortedDocValues values,
int nullPolicy,
IntIntOpenHashMap boostDocs) {
this.maxDoc = maxDoc;
this.contexts = new AtomicReaderContext[segments];
this.collapsedSet = new FixedBitSet(maxDoc);
this.boostDocs = boostDocs;
if(this.boostDocs != null) {
//Set the elevated docs now.
IntOpenHashSet boostG = new IntOpenHashSet();
Iterator<IntIntCursor> it = this.boostDocs.iterator();
while(it.hasNext()) {
IntIntCursor cursor = it.next();
int i = cursor.key;
this.collapsedSet.set(i);
int ord = values.getOrd(i);
if(ord > -1) {
boostG.add(ord);
}
}
boostOrds = boostG.toArray();
Arrays.sort(boostOrds);
}
this.values = values;
int valueCount = values.getValueCount();
this.ords = new int[valueCount];
Arrays.fill(this.ords, -1);
this.scores = new float[valueCount];
Arrays.fill(this.scores, -Float.MAX_VALUE);
this.nullPolicy = nullPolicy;
if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
nullScores = new FloatArrayList();
}
}
示例15: CollapsingFieldValueCollector
import com.carrotsearch.hppc.IntIntOpenHashMap; //导入依赖的package包/类
public CollapsingFieldValueCollector(int maxDoc,
int segments,
SortedDocValues values,
int nullPolicy,
String field,
boolean max,
boolean needsScores,
FieldType fieldType,
IntIntOpenHashMap boostDocs,
FunctionQuery funcQuery, IndexSearcher searcher) throws IOException{
this.maxDoc = maxDoc;
this.contexts = new AtomicReaderContext[segments];
this.values = values;
int valueCount = values.getValueCount();
this.nullPolicy = nullPolicy;
this.needsScores = needsScores;
this.boostDocs = boostDocs;
if(funcQuery != null) {
this.fieldValueCollapse = new ValueSourceCollapse(maxDoc, field, nullPolicy, new int[valueCount], max, this.needsScores, boostDocs, funcQuery, searcher, values);
} else {
if(fieldType instanceof TrieIntField) {
this.fieldValueCollapse = new IntValueCollapse(maxDoc, field, nullPolicy, new int[valueCount], max, this.needsScores, boostDocs, values);
} else if(fieldType instanceof TrieLongField) {
this.fieldValueCollapse = new LongValueCollapse(maxDoc, field, nullPolicy, new int[valueCount], max, this.needsScores, boostDocs, values);
} else if(fieldType instanceof TrieFloatField) {
this.fieldValueCollapse = new FloatValueCollapse(maxDoc, field, nullPolicy, new int[valueCount], max, this.needsScores, boostDocs, values);
} else {
throw new IOException("min/max must be either TrieInt, TrieLong or TrieFloat.");
}
}
}