本文整理汇总了Java中gnu.trove.map.hash.TLongLongHashMap类的典型用法代码示例。如果您正苦于以下问题:Java TLongLongHashMap类的具体用法?Java TLongLongHashMap怎么用?Java TLongLongHashMap使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
TLongLongHashMap类属于gnu.trove.map.hash包,在下文中一共展示了TLongLongHashMap类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: handleMessage
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
final void handleMessage( final String json )
{
final FragmentSegmentLutMessage lutMsg = gson.fromJson( json, FragmentSegmentLutMessage.class );
System.out.println( "Message received" );
System.out.println( gson.toJson( lutMsg ) );
final TLongLongHashMap lut = new TLongLongHashMap();
final long[] fragments = lutMsg.data.fragments;
final long[] segments = lutMsg.data.segments;
final int n = Math.min( fragments.length, segments.length );
for ( int i = 0; i < n; ++i )
lut.put( fragments[ i ], segments[ i ] );
assignment.initLut( lut );
viewer.requestRepaint();
}
示例2: initAssignments
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
/**
* Initialize assignments.
*
* @param params
*/
protected void initAssignments( final P params )
{
final IHDF5Reader reader = HDF5Factory.openForReading( params.inFile );
/* fragment segment assignment */
assignment = new FragmentSegmentAssignment( idService );
final TLongLongHashMap lut = H5Utils.loadLongLongLut( reader, params.assignment, 1024 );
if ( lut != null )
assignment.initLut( lut );
/* complete fragments */
completeFragmentsAssignment = new FragmentAssignment();
final TLongHashSet set = new TLongHashSet();
H5Utils.loadLongCollection( set, reader, params.completeFragments, 1024 );
/* color stream */
colorStream = new ModalGoldenAngleSaturatedARGBStream( assignment );
colorStream.setAlpha( 0x20 );
reader.close();
}
示例3: loadLong2LongGZMap
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
public static TLongLongHashMap loadLong2LongGZMap(String mapFile, boolean reverse) throws IOException {
long time = System.currentTimeMillis();
TLongLongHashMap idMap = new TLongLongHashMap();
BufferedReader br = new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(mapFile))));
String line;
String[] parts;
while ((line = br.readLine()) != null) {
parts = line.split(SEP);
if (!reverse) {
idMap.put(Long.parseLong(parts[0]), Long.parseLong(parts[1]));
} else {
idMap.put(Long.parseLong(parts[1]), Long.parseLong(parts[0]));
}
}
logger.info(((System.currentTimeMillis() - time) / 1000d) + "s");
return idMap;
}
示例4: initLut
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
public void initLut( final TLongLongHashMap lut )
{
this.lut.clear();
this.ilut.clear();
this.lut.putAll( lut );
syncILut();
System.out.println( "Done" );
}
示例5: loadLongLongLut
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
/**
* Load a long to long lookup table from an HDF5 dataset
*
* @param reader
* @param dataset
* @param blockSize
*/
static public TLongLongHashMap loadLongLongLut(
final IHDF5Reader reader,
final String dataset,
final int blockSize )
{
final IHDF5LongReader uint64Reader = reader.uint64();
if ( !reader.exists( dataset ) )
return null;
final long[] dimensions = reader.object().getDimensions( dataset );
if ( !( dimensions.length == 2 && dimensions[ 0 ] == 2 ) )
{
System.err.println( "LUT is not a lookup table, dimensions = " + Arrays.toString( dimensions ) );
return null;
}
final long size = dimensions[ 1 ];
final TLongLongHashMap lut = new TLongLongHashMap(
Constants.DEFAULT_CAPACITY,
Constants.DEFAULT_LOAD_FACTOR,
Label.TRANSPARENT,
Label.TRANSPARENT );
for ( int offset = 0; offset < size; offset += blockSize )
{
final MDLongArray block = uint64Reader.readMDArrayBlockWithOffset(
dataset,
new int[]{ 2, ( int )Math.min( blockSize, size - offset ) },
new long[]{ 0, offset } );
for ( int i = 0; i < block.size( 1 ); ++i )
lut.put( block.get( 0, i ), block.get( 1, i ) );
}
return lut;
}
示例6: saveLongLongLut
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
/**
* Save a long to long lookup table into an HDF5 uint64 dataset.
*
* @param lut
* @param file
* @param dataset
* @param cellDimensions
*/
static public void saveLongLongLut(
final TLongLongHashMap lut,
final File file,
final String dataset,
final int blockSize )
{
final IHDF5Writer writer = HDF5Factory.open( file );
saveLongLongLut( lut, writer, dataset, blockSize );
writer.close();
}
示例7: testSaveAndLoadLongLongLut
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
@Test
public void testSaveAndLoadLongLongLut()
{
H5Utils.saveLongLongLut( lut, testDirPath + testH5Name, "/lut", 4 );
final TLongLongHashMap lutMap = H5Utils.loadLongLongLut( testDirPath + testH5Name, "/lut", 2 );
final long[] keys = lutMap.keys();
System.out.println( "loaded lut: " + new Gson().toJson( lutMap ) );
for ( final long key : keys )
if ( lut.get( key ) != lutMap.get( key ) )
fail( "loaded lut key '" + key + "' does not match lut." );
}
示例8: saveLongLongList
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
public static void saveLongLongList(String listFile, TLongLongHashMap map) throws FileNotFoundException, IOException {
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(listFile))));
TLongLongIterator iter = map.iterator();
while (iter.hasNext()) {
iter.advance();
bw.append(iter.value() + SEP + iter.key());
bw.newLine();
}
bw.flush();
bw.close();
}
示例9: testTroveLongLongMap
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
public int testTroveLongLongMap() {
final TLongLongHashMap map = new TLongLongHashMap(initialSize, loadFactor, NULL, NULL);
int errors = 0;
for(int i = 0; i < maxSize; i++) {
map.put(i, i);
}
for(int i = 0; i < maxSize; i++) {
long x = map.get(i);
if(x!=i) {
errors++;
}
}
return errors;
//log("Done: %s, errors: %s", map.size(), errors);
}
示例10: L_HammingLongs
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
public L_HammingLongs(File file) throws Exception {
DataInputStream in = new DataInputStream(new BufferedInputStream(
new FileInputStream(file)));
File bFile = new File(file.getAbsolutePath() + ".b");
bucketsRAF = new RandomAccessFile(bFile, "r");
fileChannel = bucketsRAF.getChannel();
h = in.readInt();
nh = 1 << h;
g = new G_HammingLongs(in);
int nNotNulls = in.readInt();
//bSize = new int[nh];
//bucketOffset = new long[nh];
bSize = new TLongIntHashMap();;
bucketOffset = new TLongLongHashMap();
long offsetAcc = 0;
for ( int i=0; i<nNotNulls; i++ ) {
int ib = in.readInt();
int csize = in.readInt();
//bSize[ib] = csize;
//bucketOffset[ib] = offsetAcc;
bSize.put(ib, csize);
bucketOffset.put(ib, offsetAcc);
if (csize != 0) {
offsetAcc += csize * Integer.BYTES;
}
}
in.close();
}
示例11: getLut
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
public TLongLongHashMap getLut()
{
return lut;
}
示例12: convertTextGraphAndCreateLongMapGZ
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
public static void convertTextGraphAndCreateLongMapGZ(String inputGraph, String outputGraph, String mapFile) throws IOException {
int count = 1;
TLongLongHashMap map = new TLongLongHashMap();
long time = System.currentTimeMillis();
BufferedReader br = new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(inputGraph))));
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(outputGraph))));
String line, newLine;
long id1, id2, tId1, tId2;
String[] parts;
while ((line = br.readLine()) != null) {
parts = line.split(SEP);
tId1 = Long.parseLong(parts[0]);
tId2 = Long.parseLong(parts[1]);
if (!map.contains(tId1)) {
map.put(tId1, count);
count++;
}
if (!map.contains(tId2)) {
map.put(tId2, count);
count++;
}
id1 = map.get(tId1);
id2 = map.get(tId2);
if (id1 != 0 && id2 != 0) {
newLine = "" + id1 + SEP + id2 + (parts.length > 2 ? (SEP + parts[2]) : "");
bw.write(newLine);
bw.newLine();
}
}
bw.flush();
bw.close();
br.close();
TxtGraphUtils.saveLongLongList(mapFile, map);
logger.info(((System.currentTimeMillis() - time) / 1000d) + "s");
}
示例13: testHeavyHitters
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
@Test
public void testHeavyHitters() {
double epsOfTotalCount = 0.0001;
double confidence = 0.99;
int seed = 7364181;
CountMinSketch sketch = new CountMinSketch(epsOfTotalCount, confidence, seed);
int maxItems = 1000;
Random random = new Random();
TLongLongMap actualItemCount = new TLongLongHashMap(maxItems);
random.setSeed(12102);
for (int i = 0; i < maxItems; i++) {
int itemCount = random.nextInt(20000);
sketch.add(i, itemCount);
actualItemCount.put(i, itemCount);
}
List<Item> itemsBagFromSketch = new ArrayList<Item>();
List<Item> itemsBagActual = new ArrayList<Item>();
for (int i = 0; i < maxItems; i++) {
itemsBagFromSketch.add(new Item(i, sketch.estimateCount(i)));
itemsBagActual.add(new Item(i, actualItemCount.get(i)));
}
Ordering<Item> ordering = new Ordering<Item>() {
@Override
public int compare(Item me, Item other) {
return Long.compare(me.count, other.count);
}
};
List<Item> topKItemsEstimated = ordering.greatestOf(itemsBagFromSketch, 5);
List<Item> topKItemsActual = ordering.greatestOf(itemsBagActual, 5);
/*
System.out.println("Top 5 estimated items");
for (Item item : topKItemsEstimated) {
System.out.println(item);
}
*/
/*
System.out.println("Top 5 actual");*/
for (int i = 0; i < topKItemsActual.size(); i++) {
assertEquals(true, topKItemsActual.get(i).checkForEquality(topKItemsEstimated.get(i)));
}
}
示例14: AutoDeltaLongLongMap
import gnu.trove.map.hash.TLongLongHashMap; //导入依赖的package包/类
public AutoDeltaLongLongMap() {
this.changes = new ArrayList<>(5);
this.container = new TLongLongHashMap();
this.baselineCommandCount = 0;
}