本文整理汇总了Java中com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap类的典型用法代码示例。如果您正苦于以下问题:Java ConcurrentLinkedHashMap类的具体用法?Java ConcurrentLinkedHashMap怎么用?Java ConcurrentLinkedHashMap使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ConcurrentLinkedHashMap类属于com.googlecode.concurrentlinkedhashmap包,在下文中一共展示了ConcurrentLinkedHashMap类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: setMaxInMemory
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
public void setMaxInMemory(int newInMemory) {
this.maxInMemory = newInMemory;
Map<Object, CacheLine<V>> oldmap = this.memoryTable;
if (newInMemory > 0) {
if (this.memoryTable instanceof ConcurrentLinkedHashMap<?, ?>) {
((ConcurrentLinkedHashMap<?, ?>) this.memoryTable).setCapacity(newInMemory);
return;
} else {
this.memoryTable =new Builder<Object, CacheLine<V>>()
.maximumWeightedCapacity(newInMemory)
.build();
}
} else {
this.memoryTable = new ConcurrentHashMap<Object, CacheLine<V>>();
}
this.memoryTable.putAll(oldmap);
}
示例2: SerializingCache
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
private SerializingCache(long capacity, Weigher<RefCountedMemory> weigher, ISerializer<V> serializer)
{
this.serializer = serializer;
EvictionListener<K,RefCountedMemory> listener = new EvictionListener<K, RefCountedMemory>()
{
public void onEviction(K k, RefCountedMemory mem)
{
mem.unreference();
}
};
this.map = new ConcurrentLinkedHashMap.Builder<K, RefCountedMemory>()
.weigher(weigher)
.maximumWeightedCapacity(capacity)
.concurrencyLevel(DEFAULT_CONCURENCY_LEVEL)
.listener(listener)
.build();
}
示例3: SuperTagger
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
public SuperTagger(Map<String, Integer> freqStruct,
Map<SuperTagElement, Integer> freqFringe, boolean estimateInterpol,
Map<ElementaryStringTree, String> predTreeFringeMap,
Map<ElementaryStringTree, String> predTreeStructMap,
Map<ElementaryStringTree, String> predTreeMainLeafMap)
{
this.freqStruct = freqStruct;
this.freqFringe = freqFringe;
if (estimateInterpol)
{
estimateInterpolation();
}
else
{
//SuperTagElement.setInterpol(0.8150451950523311, 0.08875637920595104, 0.09619842574171784);//supertag all;
SuperTagElement.setInterpol(0.9012691986830528, 0.08005882103417558, 0.01867198028277168);
//SuperTagStructElement.setInterpol(1.0, 0.0);
}
this.predTreeFringeMap = predTreeFringeMap;
this.predTreeStructMap = predTreeStructMap;
this.predTreeMainLeafMap = predTreeMainLeafMap;
// superTagStructCache = new LRUMap<SuperTagStructElement, Double>(100000);
superTagStructCache = new ConcurrentLinkedHashMap.Builder<SuperTagElement, Double>().maximumWeightedCapacity(10000).build();
superTagElementCache = new ConcurrentLinkedHashMap.Builder<SuperTagElement, Double>().maximumWeightedCapacity(10000).build();
}
示例4: putImageInCache
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
private static void putImageInCache(final ConcurrentLinkedHashMap<String, ImageCacheWrapper> imageCache,
final String imageKey,
final Image image,
final String originalImagePathName) {
final ImageCacheWrapper imageCacheWrapper = new ImageCacheWrapper(image, originalImagePathName, imageKey);
final ImageCacheWrapper oldWrapper = imageCache.put(imageKey, imageCacheWrapper);
if (oldWrapper != null) {
final Image oldImage = oldWrapper.image;
if (oldImage != null) {
oldImage.dispose();
}
}
}
示例5: run_normal
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
public int run_normal( ConcurrentLinkedHashMap<String,String> hm ) {
SimpleRandom R = new SimpleRandom();
int get_ops = 0;
int put_ops = 0;
int del_ops = 0;
while( !_stop ) {
int x = R.nextInt()&((1<<20)-1);
String key = KEYS[R.nextInt()&(KEYS.length-1)];
if( x < _gr ) {
get_ops++;
String val = hm.get(key);
if( val != null && !val.equals(key) ) {
throw new IllegalArgumentException("Mismatched key="+key+" and val="+val);
}
} else if( x < _pr ) {
put_ops++;
hm.putIfAbsent( key, key );
} else {
del_ops++;
hm.remove( key );
}
}
// We stopped; report results into shared result structure
return get_ops+put_ops+del_ops;
}
示例6: init
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
/**
* 初始化
* @param name
* @param entityCacheSize
* @param concurrencyLevel
*/
public void init(String name, int entityCacheSize, int concurrencyLevel) {
this.name = name;
this.evictions = new ConcurrentReferenceHashMap<Object, Object>(ReferenceType.STRONG, ReferenceType.WEAK);
this.store = new ConcurrentLinkedHashMap.Builder<Object, ValueWrapper>()
.maximumWeightedCapacity(entityCacheSize > 0 ? entityCacheSize : DEFAULT_MAX_CAPACITY_OF_ENTITY_CACHE)
.concurrencyLevel(concurrencyLevel).listener(new EvictionListener<Object, ValueWrapper>() {
@Override
public void onEviction(Object key, ValueWrapper value) {
if (value.get() != null) {
evictions.put(key, value.get());
}
}
}).build();
}
示例7: DeepPagingCache
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
public DeepPagingCache(long maxEntriesForDeepPaging) {
_hits = Metrics.newMeter(new MetricName(ORG_APACHE_BLUR, DEEP_PAGING_CACHE, HIT), HIT, TimeUnit.SECONDS);
_misses = Metrics.newMeter(new MetricName(ORG_APACHE_BLUR, DEEP_PAGING_CACHE, MISS), MISS, TimeUnit.SECONDS);
_evictions = Metrics.newMeter(new MetricName(ORG_APACHE_BLUR, DEEP_PAGING_CACHE, EVICTION), EVICTION,
TimeUnit.SECONDS);
_lruCache = new ConcurrentLinkedHashMap.Builder<DeepPageKeyPlusPosition, DeepPageContainer>()
.maximumWeightedCapacity(maxEntriesForDeepPaging)
.listener(new EvictionListener<DeepPageKeyPlusPosition, DeepPageContainer>() {
@Override
public void onEviction(DeepPageKeyPlusPosition key, DeepPageContainer value) {
_positionCache.remove(key);
_evictions.mark();
}
}).build();
Metrics.newGauge(new MetricName(ORG_APACHE_BLUR, DEEP_PAGING_CACHE, SIZE), new Gauge<Long>() {
@Override
public Long value() {
return _lruCache.weightedSize();
}
});
_positionCache = new ConcurrentSkipListMap<DeepPageKeyPlusPosition, DeepPageContainer>();
}
示例8: SerializingCache
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
public SerializingCache(int capacity, ICompactSerializer3<V> serializer, String tableName, String cfName)
{
this.serializer = serializer;
EvictionListener<K,FreeableMemory> listener = new EvictionListener<K, FreeableMemory>()
{
public void onEviction(K k, FreeableMemory mem)
{
mem.unreference();
}
};
this.map = new ConcurrentLinkedHashMap.Builder<K, FreeableMemory>()
.weigher(Weighers.<FreeableMemory>singleton())
.initialCapacity(capacity)
.maximumWeightedCapacity(capacity)
.concurrencyLevel(DEFAULT_CONCURENCY_LEVEL)
.listener(listener)
.build();
}
示例9: DayBasedStorageLocationMapper
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
public DayBasedStorageLocationMapper(Configuration configuration,
UTCCurrentDateFormatter dateFormatter) {
String s = configuration.getStorageIndexNamePrefix();
this.indexPrefixName = s.endsWith("-") ? s : s + "-";
this.currentDayFormatter = dateFormatter;
this.dateCachingEnabled = configuration.isIndexNameDateCachingEnabled();
indexNameSize = indexPrefixName.length() + 10;
if(this.dateCachingEnabled) {
dayCache = new ConcurrentLinkedHashMap.Builder<String,String>().maximumWeightedCapacity(configuration.getNumberOfIndexNamesToCache()).build();
} else {
dayCache = null;
}
}
示例10: TimedCache
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
/**
*
* @param capacity the maximum number of entries in the cache before the
* oldest entry is evicted.
* @param timeToLive specified in milliseconds
*/
public TimedCache(int capacity, int timeToLive) {
cache = new ConcurrentLinkedHashMap.Builder<K, Long>()
.maximumWeightedCapacity(capacity)
.build();
this.timeoutInterval = timeToLive;
}
示例11: createCache
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
private void createCache() {
if (chunks != null) {
chunks.clear();
chunks = null;
}
long cacheSize = SystemProfile.calculateMaxObjects(10265, 0.6);
System.out.println("Chunk Cache Size: " + cacheSize);
chunks = new ConcurrentLinkedHashMap.Builder<Long, Chunk>()
.maximumWeightedCapacity(cacheSize)
.build();
if (nullChunks != null) {
nullChunks.clear();
nullChunks = null;
}
cacheSize = SystemProfile.calculateMaxObjects(40, 0.1);
System.out.println("Null Chunk Cache Size: " + cacheSize);
nullChunks = new ConcurrentLinkedHashMap.Builder<Long, Byte>()
.maximumWeightedCapacity(cacheSize)
.build();
}
示例12: init
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
private void init() {
if (cache == null) {
synchronized (this) {
if (cache == null) {
cache = new ConcurrentLinkedHashMap.Builder<InnerQueryKey, SQLParsedState>().maximumWeightedCapacity(capacity).weigher(Weighers.singleton()).build();
}
}
}
}
示例13: PersistableCache
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
public PersistableCache(JdbcTemplate template, int size) {
dataMap = new ConcurrentLinkedHashMap.Builder<Long, Persistable>().maximumWeightedCapacity(size).weigher(Weighers.singleton())
.listener((id, data) -> {
if (data.isDirty()) {
//如果发现数据是脏的,那么重新put一次,保证及时入库
LOGGER.error("脏数据从缓存中移除了:" + id);
}
}).build();
}
示例14: TimedCache
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
/**
*
* @param capacity the maximum number of entries in the cache before the
* oldest entry is evicted.
* @param timeToLive specified in milliseconds
*/
public TimedCache(int capacity, int timeToLive) {
cache = new ConcurrentLinkedHashMap.Builder<K, Long>()
.maximumWeightedCapacity(capacity)
.build();
this.timeoutInterval = timeToLive;
}
示例15: createTileCache
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; //导入依赖的package包/类
private void createTileCache() {
long cacheSize = SystemProfile.calculateMaxObjects(128*128*2, 0.3);
System.out.println("Tile Cache Size: " + cacheSize);
tileMap = new ConcurrentLinkedHashMap.Builder<String, MCTile>()
.maximumWeightedCapacity(cacheSize)
.build();
}