本文整理匯總了Java中it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap.get方法的典型用法代碼示例。如果您正苦於以下問題:Java Int2ObjectOpenHashMap.get方法的具體用法?Java Int2ObjectOpenHashMap.get怎麽用?Java Int2ObjectOpenHashMap.get使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap
的用法示例。
在下文中一共展示了Int2ObjectOpenHashMap.get方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: output
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; //導入方法依賴的package包/類
private void output() throws CouldNotReceiveResultException, ColumnNameMismatchException {
// Read the discovered INDs from the attributes
Int2ObjectOpenHashMap<IntList> dep2ref = new Int2ObjectOpenHashMap<IntList>(this.numColumns);
for (Attribute spiderAttribute : this.attributeId2attributeObject.values())
if (!spiderAttribute.getReferenced().isEmpty())
dep2ref.put(spiderAttribute.getAttributeId(), new IntArrayList(spiderAttribute.getReferenced()));
// Write the result to the resultReceiver
for (int dep : dep2ref.keySet()) {
String depTableName = this.getTableNameFor(dep, this.tableColumnStartIndexes);
String depColumnName = this.columnNames.get(dep);
for (int ref : dep2ref.get(dep)) {
String refTableName = this.getTableNameFor(ref, this.tableColumnStartIndexes);
String refColumnName = this.columnNames.get(ref);
this.resultReceiver.receiveResult(new InclusionDependency(new ColumnPermutation(new ColumnIdentifier(depTableName, depColumnName)), new ColumnPermutation(new ColumnIdentifier(refTableName, refColumnName))));
this.numUnaryINDs++;
}
}
}
示例2: generalize
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; //導入方法依賴的package包/類
public void generalize() {
int maxLevel = this.numAttributes;
// Build an index level->nodes for the top-down, level-wise traversal
Int2ObjectOpenHashMap<ArrayList<ElementLhsPair>> level2elements = new Int2ObjectOpenHashMap<>(maxLevel);
for (int level = 0; level < maxLevel; level++)
level2elements.put(level, new ArrayList<ElementLhsPair>());
this.addToIndex(level2elements, 0, new OpenBitSet(this.numAttributes));
// Traverse the levels top-down and add all direct generalizations
for (int level = maxLevel - 1; level >= 0; level--) {
for (ElementLhsPair pair : level2elements.get(level)) {
// Remove isFDs, because we will mark valid FDs later on
pair.element.removeAllFds();
// Generate and add generalizations
for (int lhsAttr = pair.lhs.nextSetBit(0); lhsAttr >= 0; lhsAttr = pair.lhs.nextSetBit(lhsAttr + 1)) {
pair.lhs.clear(lhsAttr);
FDTreeElement generalization = this.addGeneralization(pair.lhs, pair.element.getRhsAttributes());
if (generalization != null)
level2elements.get(level - 1).add(new ElementLhsPair(generalization, pair.lhs.clone()));
pair.lhs.set(lhsAttr);
}
}
}
}
示例3: getCandidates
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; //導入方法依賴的package包/類
/**
* Get the candidates: points which have at least one hash bucket in common.
*
* @param obj Query object
* @return Candidates
*/
protected DBIDs getCandidates(V obj) {
ModifiableDBIDs candidates = null;
final int numhash = hashtables.size();
double[] buf = new double[hashfunctions.get(0).getNumberOfProjections()];
for(int i = 0; i < numhash; i++) {
final Int2ObjectOpenHashMap<DBIDs> table = hashtables.get(i);
final LocalitySensitiveHashFunction<? super V> hashfunc = hashfunctions.get(i);
// Get the initial (unbounded) hash code:
int hash = hashfunc.hashObject(obj, buf);
// Reduce to hash table size
int bucket = hash % numberOfBuckets;
DBIDs cur = table.get(bucket);
if(cur != null) {
if(candidates == null) {
candidates = DBIDUtil.newHashSet(cur.size() * numhash);
}
candidates.addDBIDs(cur);
}
}
return (candidates == null) ? DBIDUtil.EMPTYDBIDS : candidates;
}
示例4: getEvents
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; //導入方法依賴的package包/類
void getEvents(ArrayList<byte[]> events, double eventTh,
Int2ObjectOpenHashMap<Feature[] > featureMap, boolean min, Attribute att) {
// getting events using merge tree
for (int tempBin : att.data.keySet()) {
Feature[] features = featureMap.get(tempBin);
GraphInput tf = functions.get(tempBin);
getEvents(events, tf, features, min, eventTh, false);
if (min) {
att.minThreshold.put(tempBin, new Float(eventTh));
} else {
att.maxThreshold.put(tempBin, new Float(eventTh));
}
}
}
示例5: merge
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; //導入方法依賴的package包/類
/**
* Execute the cluster merge
*
* @param size size of data set
* @param mat Matrix paradigm
* @param prots the prototypes of merges between clusters
* @param builder Result builder
* @param clusters the current clustering
* @param dq the range query
* @param bestd the distances to the nearest neighboring cluster
* @param besti the nearest neighboring cluster
* @param x first cluster to merge, with x > y
* @param y second cluster to merge, with y < x
*/
protected void merge(int size, MatrixParadigm mat, DBIDArrayMIter prots, PointerHierarchyRepresentationBuilder builder, Int2ObjectOpenHashMap<ModifiableDBIDs> clusters, DistanceQuery<O> dq, double[] bestd, int[] besti, int x, int y) {
final DBIDArrayIter ix = mat.ix.seek(x), iy = mat.iy.seek(y);
final double[] distances = mat.matrix;
int offset = MatrixParadigm.triangleSize(x) + y;
assert (y < x);
if(LOG.isDebuggingFine()) {
LOG.debugFine("Merging: " + DBIDUtil.toString(ix) + " -> " + DBIDUtil.toString(iy) + " " + distances[offset]);
}
ModifiableDBIDs cx = clusters.get(x), cy = clusters.get(y);
// Keep y
if(cy == null) {
cy = DBIDUtil.newHashSet();
cy.add(iy);
}
if(cx == null) {
cy.add(ix);
}
else {
cy.addDBIDs(cx);
clusters.remove(x);
}
clusters.put(y, cy);
// parent of x is set to y
builder.add(ix, distances[offset], iy, prots.seek(offset));
// Deactivate x in cache:
besti[x] = -1;
updateMatrices(size, mat, prots, builder, clusters, dq, bestd, besti, x, y);
if(besti[y] == x) {
findBest(size, distances, bestd, besti, y);
}
}
示例6: merge
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; //導入方法依賴的package包/類
/**
* Merges two clusters given by x, y, their points with smallest IDs, and y to
* keep
*
* @param size number of ids in the data set
* @param mat distance matrix
* @param prots calculated prototypes
* @param builder Result builder
* @param clusters the clusters
* @param dq distance query of the data set
* @param x first cluster to merge
* @param y second cluster to merge
*/
protected static void merge(int size, MatrixParadigm mat, DBIDArrayMIter prots, PointerHierarchyRepresentationBuilder builder, Int2ObjectOpenHashMap<ModifiableDBIDs> clusters, DistanceQuery<?> dq, int x, int y) {
assert (y < x);
final DBIDArrayIter ix = mat.ix.seek(x), iy = mat.iy.seek(y);
final double[] distances = mat.matrix;
int offset = MatrixParadigm.triangleSize(x) + y;
if(LOG.isDebuggingFine()) {
LOG.debugFine("Merging: " + DBIDUtil.toString(ix) + " -> " + DBIDUtil.toString(iy) + " " + distances[offset]);
}
ModifiableDBIDs cx = clusters.get(x), cy = clusters.get(y);
// Keep y
if(cy == null) {
cy = DBIDUtil.newHashSet();
cy.add(iy);
}
if(cx == null) {
cy.add(ix);
}
else {
cy.addDBIDs(cx);
clusters.remove(x);
}
clusters.put(y, cy);
// parent of x is set to y
builder.add(ix, distances[offset], iy, prots.seek(offset));
updateMatrices(size, mat, prots, builder, clusters, dq, y);
}
示例7: updateEntry
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; //導入方法依賴的package包/類
/**
* Update entry at x,y for distance matrix distances
*
* @param mat distance matrix
* @param prots calculated prototypes
* @param clusters the clusters
* @param dq distance query on the data set
* @param x index of cluster, x > y
* @param y index of cluster, y < x
* @param dimensions number of dimensions
*/
protected static void updateEntry(MatrixParadigm mat, DBIDArrayMIter prots, Int2ObjectOpenHashMap<ModifiableDBIDs> clusters, DistanceQuery<?> dq, int x, int y) {
assert (y < x);
final DBIDArrayIter ix = mat.ix, iy = mat.iy;
final double[] distances = mat.matrix;
ModifiableDBIDs cx = clusters.get(x), cy = clusters.get(y);
DBIDVar prototype = DBIDUtil.newVar(ix.seek(x)); // Default prototype
double minMaxDist;
// Two "real" clusters:
if(cx != null && cy != null) {
minMaxDist = findPrototype(dq, cx, cy, prototype, Double.POSITIVE_INFINITY);
minMaxDist = findPrototype(dq, cy, cx, prototype, minMaxDist);
}
else if(cx != null) {
// cy is singleton.
minMaxDist = findPrototypeSingleton(dq, cx, iy.seek(y), prototype);
}
else if(cy != null) {
// cx is singleton.
minMaxDist = findPrototypeSingleton(dq, cy, ix.seek(x), prototype);
}
else {
minMaxDist = dq.distance(ix.seek(x), iy.seek(y));
prototype.set(ix);
}
final int offset = MatrixParadigm.triangleSize(x) + y;
distances[offset] = minMaxDist;
prots.seek(offset).setDBID(prototype);
}
示例8: getDataInputOutput
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; //導入方法依賴的package包/類
/**
* Get the DataInputOutput for a vertex id, creating if necessary.
*
* @param partitionMap Partition map to look in
* @param vertexId Id of the vertex
* @return DataInputOutput for this vertex id (created if necessary)
*/
private DataInputOutput getDataInputOutput(
Int2ObjectOpenHashMap<DataInputOutput> partitionMap,
int vertexId) {
DataInputOutput dataInputOutput = partitionMap.get(vertexId);
if (dataInputOutput == null) {
dataInputOutput = config.createMessagesInputOutput();
partitionMap.put(vertexId, dataInputOutput);
}
return dataInputOutput;
}
示例9: getLayer
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; //導入方法依賴的package包/類
@Override
public final MappedByteBuffer getLayer(final int layerIndex)
throws OutOfMemoryError {
if(layerIndex == 0) {
return inputBuff;
}
Int2ObjectOpenHashMap<MappedByteBuffer> layersCache = thrLocLayersCache.get();
if(layersCache == null) {
layersCache = new Int2ObjectOpenHashMap<>(layersCacheCountLimit - 1);
thrLocLayersCache.set(layersCache);
}
// check if layer exists
MappedByteBuffer layer = layersCache.get(layerIndex - 1);
if(layer == null) {
// check if it's necessary to free the space first
int layersCountToFree = layersCacheCountLimit - layersCache.size() + 1;
final int layerSize = inputBuff.capacity();
if(layersCountToFree > 0) {
for(final int i : layersCache.keySet()) {
layer = layersCache.remove(i);
if(layer != null) {
layersCountToFree --;
DirectMemUtil.free(layer);
if(layersCountToFree == 0) {
break;
}
}
}
layersCache.trim();
}
// generate the layer
layer = (MappedByteBuffer) ByteBuffer.allocateDirect(layerSize);
final long layerSeed = Long.reverseBytes(
(xorShift(getInitialSeed()) << layerIndex) ^ layerIndex
);
generateData(layer, layerSeed);
layersCache.put(layerIndex - 1, layer);
}
return layer;
}