本文整理汇总了Java中it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap类的典型用法代码示例。如果您正苦于以下问题:Java Long2LongOpenHashMap类的具体用法?Java Long2LongOpenHashMap怎么用?Java Long2LongOpenHashMap使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
Long2LongOpenHashMap类属于it.unimi.dsi.fastutil.longs包,在下文中一共展示了Long2LongOpenHashMap类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: buildHashMap
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
/**
* build a hash map that maps row indices (a primitive long) to their position in the
* {@link SortedPartition} (also a primitive long) if the row index is part of an equivalence
* class with more than one element.
*
* @return
*/
public void buildHashMap() {
if (this.rowIndexToPosition != null) {
return;
}
this.rowIndexToPosition = new Long2LongOpenHashMap();
this.rowIndexToPosition.defaultReturnValue(POSITION_NOT_PRESENT);
for (long i = 0; i < this.orderedEquivalenceClasses.size64(); i++) {
final LongOpenHashBigSet equivalenceClass = this.orderedEquivalenceClasses.get(i);
if (equivalenceClass.size64() == 1) {
continue;
}
for (final long rowIndex : equivalenceClass) {
this.rowIndexToPosition.put(rowIndex, i);
}
}
}
示例2: ConcurrentSparkList
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
/**
* Constructor
*
* @param _maxSize set the maximal size
*/
public ConcurrentSparkList(final int _maxSize) {
// start spark node
SharedService.getInstance();
// check the parameter
if (_maxSize <= 2) {
throw new IllegalArgumentException("maxSize must not be <= 2");
}
atomicInteger = new AtomicInteger();
atomicInteger.set(0);
// set the parameters
this.maxSize = _maxSize;
data = new Long2LongOpenHashMap(maxSize);
item2ReadCount = SharedService.parallelizePairs(data);
item2timeStampData = SharedService.parallelizePairs(data);
numPartitions = item2ReadCount.context().defaultParallelism();
}
示例3: PMDSink
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
public PMDSink(File file) throws IOException {
version = new PMD.Version();
seconds = new PMD.Seconds();
orderAdded = new PMD.OrderAdded();
orderExecuted = new PMD.OrderExecuted();
orderCanceled = new PMD.OrderCanceled();
orderDeleted = new PMD.OrderDeleted();
brokenTrade = new PMD.BrokenTrade();
currentSecond = 0;
instrument = new Long2LongOpenHashMap();
side = new Long2ByteOpenHashMap();
buffer = ByteBuffer.allocate(BUFFER_CAPACITY);
writer = BinaryFILEWriter.open(file);
}
示例4: ChunkedHashStore
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
/** Creates a chunked hash store with given transformation strategy, hash width and progress logger.
*
* @param transform a transformation strategy for the elements.
* @param tempDir a temporary directory for the store files, or {@code null} for the current directory.
* @param hashWidthOrCountValues if positive, no associated data is saved in the store: {@link Chunk#data(long)} will return this many lower bits
* of the first of the three hashes associated with the key; zero, values are stored; if negative, values are stored and a map from values
* to their frequency is computed.
* @param pl a progress logger, or {@code null}.
*/
public ChunkedHashStore(final TransformationStrategy<? super T> transform, final File tempDir, final int hashWidthOrCountValues, final ProgressLogger pl) throws IOException {
this.transform = transform;
this.pl = pl;
this.tempDir = tempDir;
this.hashMask = hashWidthOrCountValues <= 0 ? 0 : -1L >>> Long.SIZE - hashWidthOrCountValues;
if (hashWidthOrCountValues < 0) value2FrequencyMap = new Long2LongOpenHashMap();
file = new File[DISK_CHUNKS];
writableByteChannel = new WritableByteChannel[DISK_CHUNKS];
byteBuffer = new ByteBuffer[DISK_CHUNKS];
// Create disk chunks
for(int i = 0; i < DISK_CHUNKS; i++) {
byteBuffer[i] = ByteBuffer.allocateDirect(BUFFER_SIZE).order(ByteOrder.nativeOrder());
writableByteChannel[i] = new FileOutputStream(file[i] = File.createTempFile(ChunkedHashStore.class.getSimpleName(), String.valueOf(i), tempDir)).getChannel();
file[i].deleteOnExit();
}
count = new int[DISK_CHUNKS];
}
示例5: testLengthLimitedHuffManGeom
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
@Test
public void testLengthLimitedHuffManGeom() {
final int size = 20;
final long[] symbols = new long[size];
final long[] frequency = new long[size];
for (int i = 0; i < size; i++) {
symbols[i] = i;
frequency[i] = 1 << i;
}
final Huffman huffman = new Codec.Huffman(5);
final Long2LongOpenHashMap frequencies = new Long2LongOpenHashMap(symbols, frequency);
final Coder coder = huffman.getCoder(frequencies);
final Decoder decoder = coder.getDecoder();
for (final long l: frequencies.keySet()) {
final long encoded = coder.encode(l);
final long longEncoded = Long.reverse(encoded) >>> 64 - coder.maxCodewordLength();
final long decoded = decoder.decode(longEncoded);
assertEquals(l, decoded);
}
}
示例6: getMostFrequentLabel
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
protected long getMostFrequentLabel(Node node) {
Long2LongMap commMap = new Long2LongOpenHashMap();
Iterable<Relationship> relationships = relType == null ? node.getRelationships() : node.getRelationships(relType);
for (Relationship r : relationships) {
Node other = r.getOtherNode(node);
long otherCommunity = (long) other.getProperty(attName);
// commMap.put(other.getId(), otherCommunity); WRONG
long count = commMap.getOrDefault(otherCommunity, 0L);
commMap.put(otherCommunity, count+1);
}
long mostFrequentLabel = -1;
long mostFrequentLabelCount = -1;
for( Entry<Long, Long> e : commMap.entrySet() ) {
if( e.getValue() > mostFrequentLabelCount ) {
mostFrequentLabelCount = e.getValue();
mostFrequentLabel = e.getKey();
}
}
return mostFrequentLabel;
}
示例7: calculateConditions
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
public List<LongArrayList> calculateConditions(PositionListIndex partialUnique,
PositionListIndex PLICondition,
int frequency,
List<LongArrayList> unsatisfiedClusters) {
List<LongArrayList> result = new LinkedList<>();
Long2LongOpenHashMap uniqueHashMap = partialUnique.asHashMap();
LongArrayList touchedClusters = new LongArrayList();
nextCluster:
for (LongArrayList cluster : PLICondition.getClusters()) {
int unsatisfactionCount = 0;
touchedClusters.clear();
for (long rowNumber : cluster) {
if (uniqueHashMap.containsKey(rowNumber)) {
if (touchedClusters.contains(uniqueHashMap.get(rowNumber))) {
unsatisfactionCount++;
} else {
touchedClusters.add(uniqueHashMap.get(rowNumber));
}
}
}
if (unsatisfactionCount == 0) {
result.add(cluster);
} else {
//if ((cluster.size() - unsatisfactionCount) >= frequency) {
unsatisfiedClusters.add(cluster);
//}
}
}
return result;
}
示例8: ConditionTask
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
public ConditionTask(int uniqueCluster, LongArrayList conditionClusters,
LongArrayList removedClusters, long size, long frequency,
Long2LongOpenHashMap andJointCluster) {
this.uniqueClusterNumber = uniqueCluster;
this.conditionClusters = conditionClusters.clone();
this.removedConditionClusters = removedClusters.clone();
this.size = size;
this.frequency = frequency;
this.andJointCluster = andJointCluster;
}
示例9: remove
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
public boolean remove(long conditionClusterNumber, ConditionEntry entryToRemove) {
if (entryToRemove.condition.getSetBits().size() < 2) {
if (((this.size - entryToRemove.cluster.size()) + this.andJointCluster.size())
>= this.frequency) {
this.size = this.size - entryToRemove.cluster.size();
this.conditionClusters.remove(conditionClusterNumber);
this.removedConditionClusters.add(conditionClusterNumber);
return true;
} else {
return false;
}
} else {
Long2LongOpenHashMap newAndJointCluster = andJointCluster.clone();
for (long row : entryToRemove.cluster) {
if (newAndJointCluster.containsKey(row)) {
long previousValue = newAndJointCluster.get(row);
previousValue--;
if (0 == previousValue) {
newAndJointCluster.remove(row);
} else {
newAndJointCluster.put(row, previousValue);
}
} else {
//dunno
}
}
if (this.size + newAndJointCluster.size() >= this.frequency) {
this.andJointCluster = newAndJointCluster;
this.conditionClusters.remove(conditionClusterNumber);
this.removedConditionClusters.add(conditionClusterNumber);
return true;
} else {
return false;
}
}
}
示例10: calculateConditions
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
public List<LongArrayList> calculateConditions(PositionListIndex partialUnique,
PositionListIndex PLICondition,
int frequency,
List<LongArrayList> unsatisfiedClusters) {
List<LongArrayList> result = new LinkedList<>();
Long2LongOpenHashMap uniqueHashMap = partialUnique.asHashMap();
LongArrayList touchedClusters = new LongArrayList();
nextCluster:
for (LongArrayList cluster : PLICondition.getClusters()) {
if (cluster.size() < frequency) {
continue;
}
int unsatisfactionCount = 0;
touchedClusters.clear();
for (long rowNumber : cluster) {
if (uniqueHashMap.containsKey(rowNumber)) {
if (touchedClusters.contains(uniqueHashMap.get(rowNumber))) {
unsatisfactionCount++;
} else {
touchedClusters.add(uniqueHashMap.get(rowNumber));
}
}
}
if (unsatisfactionCount == 0) {
result.add(cluster);
} else {
if ((cluster.size() - unsatisfactionCount) >= frequency) {
unsatisfiedClusters.add(cluster);
}
}
}
return result;
}
示例11: pliHashmapTest
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
@Test
public void pliHashmapTest()
throws CouldNotReceiveResultException, UnsupportedEncodingException, FileNotFoundException,
InputGenerationException, InputIterationException, AlgorithmConfigurationException {
//Setup
AbaloneFixture fixture = new AbaloneFixture();
RelationalInput input = fixture.getInputGenerator().generateNewCopy();
PLIBuilder builder = new PLIBuilder(input);
List<PositionListIndex> pliList = builder.getPLIList();
List<PositionListIndex> pliListCopy = new ArrayList<>();
for (int i = 0; i < pliList.size(); i++) {
pliListCopy.add(i, new PositionListIndex(pliList.get(i).getClusters()));
}
for (int i = 0; i < pliList.size(); i++) {
PositionListIndex pli = pliList.get(i);
PositionListIndex pliCopy = pliListCopy.get(i);
Long2LongOpenHashMap pliHash = pli.asHashMap();
Long2LongOpenHashMap pliCopyHash = pliCopy.asHashMap();
assertEquals(pliHash, pliCopyHash);
assertEquals(pliHash.keySet(), pliCopyHash.keySet());
for (long row : pliHash.keySet()) {
assertEquals(pliHash.get(row), pliCopyHash.get(row));
}
}
}
示例12: pliHashmapTest2
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
@Test
public void pliHashmapTest2()
throws CouldNotReceiveResultException, UnsupportedEncodingException, FileNotFoundException,
InputGenerationException, InputIterationException, AlgorithmConfigurationException {
//Setup
AbaloneFixture fixture = new AbaloneFixture();
RelationalInput input = fixture.getInputGenerator().generateNewCopy();
PLIBuilder builder = new PLIBuilder(input);
List<PositionListIndex> pliList = builder.getPLIList();
List<Long2LongOpenHashMap> pliListCopy = new ArrayList<>();
for (int i = 0; i < pliList.size(); i++) {
pliListCopy.add(i, pliList.get(i).asHashMap());
}
for (int i = 0; i < pliList.size(); i++) {
PositionListIndex pli = pliList.get(i);
Long2LongOpenHashMap pliHash = pli.asHashMap();
Long2LongOpenHashMap pliCopyHash = pliListCopy.get(i);
assertEquals(pliHash, pliCopyHash);
assertEquals(pliHash.keySet(), pliCopyHash.keySet());
for (long row : pliHash.keySet()) {
assertEquals(pliHash.get(row), pliCopyHash.get(row));
}
}
}
示例13: testGamma
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
@Test
public void testGamma() {
final Codec.Gamma gamma = new Codec.Gamma();
final Long2LongOpenHashMap frequencies = new Long2LongOpenHashMap(new long[] { 6, 9, 1, 2, 4, 5, 3, 4, 7, 10000000 }, new long[] { 64, 32, 16, 1, 8, 4, 20, 2, 1, 10 });
final Coder coder = gamma.getCoder(frequencies);
final Decoder decoder = coder.getDecoder();
for (int i = 0; i < 10000000; i++) {
final long encoded = coder.encode(i);
final long longEncoded = Long.reverse(encoded) >>> 64 - coder.maxCodewordLength();
final long decoded = decoder.decode(longEncoded);
assertEquals(i, decoded);
}
}
示例14: testHuffman
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
@Test
public void testHuffman() {
final Long2LongOpenHashMap frequencies = new Long2LongOpenHashMap(new long[] { 6, 9, 1, 2, 4, 5, 3, 4, 7, 1000 }, new long[] { 64, 32, 16, 1, 8, 4, 20, 2, 1, 10 });
final Huffman huffman = new Codec.Huffman();
final Coder coder = huffman.getCoder(frequencies);
final Decoder decoder = coder.getDecoder();
for (final long l: frequencies.keySet()) {
final long encoded = coder.encode(l);
final long longEncoded = Long.reverse(encoded) >>> 64 - coder.maxCodewordLength();
final long decoded = decoder.decode(longEncoded);
assertEquals(l, decoded);
}
}
示例15: testLengthLimitedLengthHuffman
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; //导入依赖的package包/类
@Test
public void testLengthLimitedLengthHuffman() {
final Long2LongOpenHashMap frequencies = new Long2LongOpenHashMap(new long[] { 6, 9, 1, 2, 4, 5, 3, 4, 7, 1000 }, new long[] { 64, 32, 16, 1, 8, 4, 20, 2, 1, 10 });
final Huffman huffman = new Codec.Huffman(2);
final Coder coder = huffman.getCoder(frequencies);
final Decoder decoder = coder.getDecoder();
for (final long l: frequencies.keySet()) {
final long encoded = coder.encode(l);
final long longEncoded = Long.reverse(encoded) >>> 64 - coder.maxCodewordLength();
final long decoded = decoder.decode(longEncoded);
assertEquals(l, decoded);
}
}