本文整理汇总了Python中cache.Cache.update方法的典型用法代码示例。如果您正苦于以下问题:Python Cache.update方法的具体用法?Python Cache.update怎么用?Python Cache.update使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类cache.Cache
的用法示例。
在下文中一共展示了Cache.update方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: MetadataReader
# 需要导入模块: from cache import Cache [as 别名]
# 或者: from cache.Cache import update [as 别名]
class MetadataReader(object):
"""Get metadata from images"""
def __init__(self, globstring, cache=None):
super(MetadataReader, self).__init__()
self.globstring = globstring
if cache:
from cache import Cache
self.cache = Cache(cache)
else:
self.cache = None
def read(self):
files = glob.glob(self.globstring)
if self.cache:
self.cache.update(files, metadata_read)
metadatas = self.cache.get_metadatas()
else:
metadatas = []
for fname in files:
timestamp, meta, mtime = metadata_read(fname)
d = dict(file=os.path.basename(fname),
meta=meta,
timestamp=unicode(timestamp))
metadatas.append(d)
return metadatas
示例2: syncCache
# 需要导入模块: from cache import Cache [as 别名]
# 或者: from cache.Cache import update [as 别名]
def syncCache():
cache1 = Cache(GIT_DIR)
cache1.start()
cache2 = Cache(GIT_DIR)
cache2.initial()
for path in cache2.list():
if not cache1.contains(path):
cache1.update(path)
if not isdir(join(CC_DIR, path.file)):
copy(path.file)
cache1.write()
示例3: __init__
# 需要导入模块: from cache import Cache [as 别名]
# 或者: from cache.Cache import update [as 别名]
class LinkedMarkMail:
def __init__(self, base="http://linkedmarkmail.wikier.org", log="linkedmarkmail.log"):
self.base = base
self.api = MarkMail("http://markmail.org")
self.cache = Cache()
self.cache.register(Post, "message-%s.rdf")
self.cache.register(Thread, "thread-%s.rdf")
logging.basicConfig(level=logging.DEBUG, format="%(asctime)s %(levelname)s: %(message)s", filename=log)
logging.info("Created a new instance of LinkedMarkMail at %s" % datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
def search(self, query):
search = self.api.search(query)
return "" #FIXME
def get_message(self, key):
if (self.cache.is_cached(key, Post)):
logging.info("Recovering message %s from cache..." % key)
return self.cache.read(key, Post)
else:
logging.info("Trying to get message %s from MarkMail..." % key)
message = self.api.get_message(key)
if (message != None):
url = "%s/message/%s" % (self.base, key)
post = Post(url, key, message["title"], message["content"])
triples = len(post)
#if (not self.cache.is_cached(post.get_key(), post.__class__)):
# self.cache.write(post)
# logging.info("Updated cache of post %s (%d triples)" % (key, triples))
logging.info("Returning %d triples of post %s" % (triples, key))
return post.get_data_xml()
else:
logging.error("Post %s not found" % key)
return None
def get_thread(self, key):
logging.info("Trying to get thread %s" % key)
thread = self.api.get_thread(key)
if (thread != None):
siocThread = Thread(self.base, key, thread["subject"], thread["permalink"], thread["atomlink"], thread["messages"]["message"])
triples = len(siocThread)
if (self.cache.is_dirty(siocThread)):
self.cache.update(siocThread)
logging.info("Updated cache of thread %s (%d triples)" % (key, triples))
logging.info("Returning %d triples of thread %s" % (triples, key))
return siocThread.get_data_xml()
else:
logging.error("Thread %s not found" % key)
return None
示例4: testLoad
# 需要导入模块: from cache import Cache [as 别名]
# 或者: from cache.Cache import update [as 别名]
def testLoad(self):
dir = tempfile.mkdtemp()
f = open(join(dir, cache.FILE), 'w')
f.write(TEMP1)
f.close()
try:
c = Cache(dir)
self.assertFalse(c.isChild(CCFile('file.py', '/main/a/1')))
self.assertFalse(c.isChild(CCFile('file.py', r'\main\a\1')))
self.assertTrue(c.isChild(CCFile('file.py', '/main/a/b/c/1')))
self.assertFalse(c.isChild(CCFile('file.py', '/main/a/c/1')))
c.update(CCFile('file.py', '/main/a/b/2'))
c.update(CCFile('file2.py', '/main/c/2'))
c.write()
f = open(join(dir, cache.FILE), 'r')
try:
self.assertEqual(TEMP1_EXPECTED, f.read())
finally:
f.close()
finally:
shutil.rmtree(dir)
示例5: main
# 需要导入模块: from cache import Cache [as 别名]
# 或者: from cache.Cache import update [as 别名]
def main():
version = '0.0.1'
program_name = 'gathergit'
parser = argparse.ArgumentParser(prog=program_name, description='A description')
# general args
parser.add_argument('-V', action='version', version='%(prog)s {version}'.format(version=version))
parser.add_argument('--confdir',
action='store',
dest='confdir',
help='directory to search for configuration files (default: config/)',
default='config/')
parser.add_argument('--all',
action='store_true',
dest='sync_all',
help='Initialize, update and synchronize ALL repositories',
default=False)
parser_results = parser.parse_args()
confdir = parser_results.confdir
sync_all = parser_results.sync_all
# config parsing
cfg_parser = ConfigParser(confdir)
config = cfg_parser.dump()
# logging
logconfig = config.get('settings', {}).get('logging', {})
logger = Helper().create_logger(program_name, logconfig)
# let's start working now
logger.debug('Starting new instance of %s', program_name)
logger.debug('Raw configuration: %s', config)
# collecting deployment configuration
deployments = {}
repolists = {}
repoindex = Repoindex()
for deployment_name, deployment_settings in config.get('deployments', {}).items():
repos = deployment_settings.get('repos')
if repos is None:
continue
deployments[deployment_name] = {'target': deployment_settings.get('target'), 'defaults': deployment_settings.get('defaults', {})}
if deployment_name not in repolists.keys():
repolists[deployment_name] = {}
repolists[deployment_name].update(repos)
# updating caches
for deployment_name, repolist in Helper.sorted_dict(repolists).items():
for repoid, repo_settings in Helper.sorted_dict(repolist).items():
repo_name = repo_settings.get('name', repoid)
repo_defaults = repo_settings.get('defaults', {})
branches = repo_settings.get('branches')
if branches is None:
logger.info('Skipping repo %s of deployment definition %s, is doesn\'t have any branches defined', repo_name,
deployment_name)
continue
# adding repo to repoindex
repo = Repo()
repo['name'] = repo_name
repo['defaults'] = repo_defaults
repo['target'] = deployments[deployment_name].get('target')
repo.add_branches(branches, deployments[deployment_name])
repoindex.add_repo(deployment_name, repoid, repo)
cache_name = repoindex[deployment_name][repoid].get('defaults').get('cache')
cache_settings = config.get('settings').get('caches').get(cache_name)
cache = Cache(name=cache_name, settings=cache_settings)
cache.init()
updated_refs = cache.update(repoindex[deployment_name][repoid])
if updated_refs:
repoindex[deployment_name][repoid]['updates'] = {'updated_refs': updated_refs, 'cache': cache}
elif sync_all:
repoindex[deployment_name][repoid]['updates'] = {'cache': cache}
repoindex.sync_repos(sync_all)
# Everything is done, closing now
logger.debug('Shutting down..')