本文整理汇总了Python中lru.LRU.clear方法的典型用法代码示例。如果您正苦于以下问题:Python LRU.clear方法的具体用法?Python LRU.clear怎么用?Python LRU.clear使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类lru.LRU
的用法示例。
在下文中一共展示了LRU.clear方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_stats
# 需要导入模块: from lru import LRU [as 别名]
# 或者: from lru.LRU import clear [as 别名]
def test_stats(self):
for size in SIZES:
l = LRU(size)
for i in range(size):
l[i] = str(i)
self.assertTrue(l.get_stats() == (0, 0))
val = l[0]
self.assertTrue(l.get_stats() == (1, 0))
val = l.get(0, None)
self.assertTrue(l.get_stats() == (2, 0))
val = l.get(-1, None)
self.assertTrue(l.get_stats() == (2, 1))
try:
val = l[-1]
except:
pass
self.assertTrue(l.get_stats() == (2, 2))
l.clear()
self.assertTrue(len(l) == 0)
self.assertTrue(l.get_stats() == (0, 0))
示例2: test_hits
# 需要导入模块: from lru import LRU [as 别名]
# 或者: from lru.LRU import clear [as 别名]
def test_hits(self):
for size in SIZES:
l = LRU(size)
for i in range(size):
l[i] = str(i)
val = l[0]
self.assertTrue(l.get_hits() == 1)
self.assertTrue(l.get_misses() == 0)
val = l.get(0, None)
self.assertTrue(l.get_hits() == 2)
self.assertTrue(l.get_misses() == 0)
val = l.get(-1, None)
self.assertTrue(l.get_hits() == 2)
self.assertTrue(l.get_misses() == 1)
try:
val = l[-1]
except:
pass
self.assertTrue(l.get_hits() == 2)
self.assertTrue(l.get_misses() == 2)
l.clear()
self.assertTrue(len(l) == 0)
self.assertTrue(l.get_hits() == 0)
self.assertTrue(l.get_misses() == 0)
示例3: test_clear
# 需要导入模块: from lru import LRU [as 别名]
# 或者: from lru.LRU import clear [as 别名]
def test_clear(self):
for size in SIZES:
l = LRU(size)
for i in range(size+5):
l[i] = str(i)
l.clear()
for i in range(size):
l[i] = str(i)
for i in xrange(size):
_ = l[random.randint(0, size-1)]
l.clear()
self.assertTrue(len(l) == 0)
示例4: FCP
# 需要导入模块: from lru import LRU [as 别名]
# 或者: from lru.LRU import clear [as 别名]
#.........这里部分代码省略.........
os.unlink(dest)
except OSError as e:
log.error("Failed to unlink %s, %s " % (dest, e), extra=self.d)
return False
else:
wfd = self.do_open(dest, self.wfd_cache, os.O_WRONLY, self._write_cache_limit)
else:
log.error("Failed to create output file %s" % dest, extra=self.d)
return False
# do the actual copy
self.write_bytes(rfd, wfd, work)
# update tally
self.cnt_filesize += work.length
if G.verbosity > 2:
log.debug("Transferred %s bytes from:\n\t [%s] to [%s]" %
(self.cnt_filesize, src, dest), extra=self.d)
return True
def do_no_interrupt_checkpoint(self):
a = Thread(target=self.do_checkpoint)
a.start()
a.join()
log.debug("checkpoint: %s" % self.checkpoint_file, extra=self.d)
def do_checkpoint(self):
for k in self.wfd_cache.keys():
os.close(self.wfd_cache[k])
# clear the cache
self.wfd_cache.clear()
tmp_file = self.checkpoint_file + ".part"
with open(tmp_file, "wb") as f:
cobj = Checkpoint(self.src, self.dest, self.get_workq(), self.totalsize)
pickle.dump(cobj, f, pickle.HIGHEST_PROTOCOL)
# POSIX requires rename to be atomic
os.rename(tmp_file, self.checkpoint_file)
def process(self):
"""
The only work is "copy"
TODO: clean up other actions such as mkdir/fini_check
"""
if not G.use_store:
curtime = MPI.Wtime()
if curtime - self.checkpoint_last > self.checkpoint_interval:
self.do_no_interrupt_checkpoint()
log.info("Checkpointing done ...", extra=self.d)
self.checkpoint_last = curtime
work = self.deq()
self.reduce_items += 1
if isinstance(work, FileChunk):
self.do_copy(work)
else:
log.warn("Unknown work object: %s" % work, extra=self.d)
def reduce_init(self, buf):
buf['cnt_filesize'] = self.cnt_filesize
def reduce(self, buf1, buf2):
buf1['cnt_filesize'] += buf2['cnt_filesize']
示例5: PolygonIndex
# 需要导入模块: from lru import LRU [as 别名]
# 或者: from lru.LRU import clear [as 别名]
class PolygonIndex(object):
include_only_properties = None
simplify_tolerance = 0.0001
preserve_topology = True
persistent_polygons = False
cache_size = 0
fix_invalid_polygons = False
INDEX_FILENAME = None
POLYGONS_DB_DIR = 'polygons'
def __init__(self, index=None, polygons=None, polygons_db=None, save_dir=None,
index_filename=None,
polygons_db_path=None,
include_only_properties=None):
if save_dir:
self.save_dir = save_dir
else:
self.save_dir = None
if not index_filename:
index_filename = self.INDEX_FILENAME
self.index_path = os.path.join(save_dir or '.', index_filename)
if not index:
self.create_index(overwrite=True)
else:
self.index = index
if include_only_properties and hasattr(include_only_properties, '__contains__'):
self.include_only_properties = include_only_properties
if not polygons and not self.persistent_polygons:
self.polygons = {}
elif polygons and not self.persistent_polygons:
self.polygons = polygons
elif self.persistent_polygons and self.cache_size > 0:
self.polygons = LRU(self.cache_size)
if polygons:
for key, value in six.iteritems(polygons):
self.polygons[key] = value
self.cache_hits = 0
self.cache_misses = 0
self.get_polygon = self.get_polygon_cached
if not polygons_db_path:
polygons_db_path = os.path.join(save_dir or '.', self.POLYGONS_DB_DIR)
if not polygons_db:
self.polygons_db = LevelDB(polygons_db_path)
else:
self.polygons_db = polygons_db
self.setup()
self.i = 0
def create_index(self, overwrite=False):
raise NotImplementedError('Children must implement')
def index_polygon(self, polygon):
raise NotImplementedError('Children must implement')
def setup(self):
pass
def clear_cache(self, garbage_collect=True):
if self.persistent_polygons and self.cache_size > 0:
self.polygons.clear()
if garbage_collect:
gc.collect()
def simplify_polygon(self, poly, simplify_tolerance=None, preserve_topology=None):
if simplify_tolerance is None:
simplify_tolerance = self.simplify_tolerance
if preserve_topology is None:
preserve_topology = self.preserve_topology
return poly.simplify(simplify_tolerance, preserve_topology=preserve_topology)
def index_polygon_properties(self, properties):
pass
def polygon_geojson(self, poly, properties):
return {
'type': 'Feature',
'geometry': mapping(poly),
}
def add_polygon(self, poly, properties, cache=False, include_only_properties=None):
if include_only_properties is not None:
properties = {k: v for k, v in properties.iteritems() if k in include_only_properties}
if not self.persistent_polygons or cache:
self.polygons[self.i] = prep(poly)
if self.persistent_polygons:
self.polygons_db.Put(self.polygon_key(self.i), json.dumps(self.polygon_geojson(poly, properties)))
#.........这里部分代码省略.........