本文整理汇总了Python中cms.db.FileCacher.FileCacher类的典型用法代码示例。如果您正苦于以下问题:Python FileCacher类的具体用法?Python FileCacher怎么用?Python FileCacher使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了FileCacher类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
def __init__(self, shard):
logger.initialize(ServiceCoord("Worker", shard))
Service.__init__(self, shard, custom_logger=logger)
self.file_cacher = FileCacher(self)
self.work_lock = threading.Lock()
self.ignore_job = False
示例2: __init__
def __init__(self, shard):
logger.initialize(ServiceCoord("Worker", shard))
Service.__init__(self, shard, custom_logger=logger)
self.file_cacher = FileCacher(self)
self.work_lock = gevent.coros.RLock()
self._ignore_job = False
示例3: __init__
def __init__(self, contest_id, spool_dir):
self.contest_id = contest_id
self.spool_dir = spool_dir
self.upload_dir = os.path.join(self.spool_dir, "upload")
self.contest = None
self.file_cacher = FileCacher()
示例4: __init__
def __init__(self, shard):
logger.initialize(ServiceCoord("Worker", shard))
Service.__init__(self, shard, custom_logger=logger)
self.file_cacher = FileCacher(self)
self.task_type = None
self.work_lock = threading.Lock()
self.session = None
示例5: extract_complexity
def extract_complexity(task_id, file_lengther=None):
"""Extract the complexity of all submissions of the task. The
results are stored in a file task_<id>.info
task_id (int): the id of the task we are interested in.
file_lengther (class): a File-like object that tell the dimension
of the input (see example above for how to
write one).
return (int): 0 if operation was successful.
"""
with SessionGen() as session:
task = Task.get_from_id(task_id, session)
if task is None:
return -1
# Extracting the length of the testcase.
file_cacher = FileCacher()
testcases_lengths = [file_length(testcase.input,
file_cacher, file_lengther)
for testcase in task.testcases]
file_cacher.purge_cache()
# Compute the complexity of the solutions.
with open("task_%s.info" % task_id, "wt") as info:
for submission in task.contest.get_submissions():
if submission.task_id == task_id and \
submission.evaluated():
print submission.user.username
result = extract_complexity_submission(testcases_lengths,
submission)
if result[1] is None:
continue
info.write("Submission: %s" % submission.id)
info.write(" - user: %15s" % submission.user.username)
info.write(" - task: %s" % task.name)
if result[0] is not None:
info.write(" - score: %6.2lf" % result[0])
info.write(" - complexity: %20s" %
complexity_to_string(result[1]))
if result[2] is not None:
info.write(" - confidence %5.1lf" % result[2])
info.write("\n")
return 0
示例6: __init__
def __init__(self, drop, import_source, only_files, no_files):
self.drop = drop
self.only_files = only_files
self.no_files = no_files
self.import_source = import_source
self.import_dir = import_source
self.file_cacher = FileCacher()
示例7: file_length
def file_length(digest, file_cacher=None, file_lengther=None):
"""Compute the length of the file identified by digest.
digest (string): the digest of the file.
file_cacher (FileCacher): the cacher to use, or None.
file_lengther (class): a File-like object that tell the dimension
of the input (see example above for how to
write one).
return (int): the length of the tile.
"""
if file_cacher is None:
file_cacher = FileCacher()
if file_lengther is None:
file_lengther = FileLengther
lengther = file_lengther()
file_cacher.get_file(digest, file_obj=lengther)
return lengther.tell()
示例8: __init__
def __init__(self, shard):
logger.initialize(ServiceCoord("TestFileCacher", shard))
TestService.__init__(self, shard, custom_logger=logger)
# Assume we store the cache in "./cache/fs-cache-TestFileCacher-0/"
self.cache_base_path = os.path.join(config.cache_dir, "fs-cache-TestFileCacher-0")
self.cache_path = None
self.content = None
self.fake_content = None
self.digest = None
self.file_obj = None
self.file_cacher = FileCacher(self)
示例9: __init__
def __init__(self, drop, import_source,
only_files, no_files, no_submissions):
self.drop = drop
self.only_files = only_files
self.no_files = no_files
self.no_submissions = no_submissions
self.import_source = import_source
self.import_dir = import_source
self.file_cacher = FileCacher()
configure_mappers()
示例10: __init__
def __init__(self, drop, import_source,
load_files, load_model, light,
skip_submissions, skip_user_tests):
self.drop = drop
self.load_files = load_files
self.load_model = load_model
self.light = light
self.skip_submissions = skip_submissions
self.skip_user_tests = skip_user_tests
self.import_source = import_source
self.import_dir = import_source
self.file_cacher = FileCacher()
示例11: __init__
def __init__(self, contest_id, export_target, skip_submissions, skip_user_tests, light):
self.contest_id = contest_id
self.skip_submissions = skip_submissions
self.skip_user_tests = skip_user_tests
self.light = light
# If target is not provided, we use the contest's name.
if export_target == "":
with SessionGen(commit=False) as session:
contest = Contest.get_from_id(self.contest_id, session)
self.export_target = "dump_%s.tar.gz" % contest.name
else:
self.export_target = export_target
self.file_cacher = FileCacher()
示例12: __init__
def __init__(self, contest_id, export_target,
dump_files, dump_model, light,
skip_submissions, skip_user_tests):
self.contest_id = contest_id
self.dump_files = dump_files
self.dump_model = dump_model
self.light = light
self.skip_submissions = skip_submissions
self.skip_user_tests = skip_user_tests
# If target is not provided, we use the contest's name.
if export_target == "":
with SessionGen(commit=False) as session:
contest = Contest.get_from_id(self.contest_id, session)
self.export_target = "dump_%s.tar.gz" % contest.name
logger.warning("export_target not given, using \"%s\""
% self.export_target)
else:
self.export_target = export_target
self.file_cacher = FileCacher()
示例13: run
#.........这里部分代码省略.........
lang, team = re.findall("^([A-Za-z0-9_]+) \(([A-Za-z0-9_]+)\)\.pdf$", f)[0]
data_by_lang.add((task, lang, team))
for f in os.listdir(os.path.join(data_dir, task, "by_team")):
# f == "team (lang).pdf"
team, lang = re.findall("^([A-Za-z0-9_]+) \(([A-Za-z0-9_]+)\)\.pdf$", f)[0]
data_by_team.add((task, lang, team))
if data_by_lang != data_by_team:
print "ERROR: PDF files in 'data' are not complete"
print repr(data_by_lang - data_by_team)
print repr(data_by_team - data_by_lang)
return
if task_by_team != data_by_lang:
print "ERROR: PDF files in 'data' do not match JSON data"
print repr(task_by_team - data_by_lang)
print repr(data_by_lang - task_by_team)
return
print "Hooray! Data is consistent!"
# Pick one at random: they're all equal.
translations = task_by_team
# Determine language codes used in CMS.
codes = dict()
# Read JSON files in 'tasks' again as it provides data already
# grouped as we need it, and not simply as a list of tuples.
for t in os.listdir(task_dir):
if t.endswith('.json'):
task = t[:-5]
task_path = os.path.join(task_dir, t)
with open(task_path) as task_file:
data = json.load(task_file)
if "langs" in data:
for lang, v in data["langs"].iteritems():
if len(v) == 0:
pass
elif len(v) == 1 and v[0] != official_team:
for team in v:
codes[(task, lang, team)] = "%s" % lang
else:
for team in v:
codes[(task, lang, team)] = "%s_%s" % (lang, ioi_to_iso2[team])
# Store the files as Statement objects.
file_cacher = FileCacher()
for task, lang, team in translations:
if team == official_team:
assert lang == "en"
digest = file_cacher.put_file(
path=os.path.join(data_dir, task, "by_lang", "%s (%s).pdf" % (lang, team)),
description="Statement for task %s" % task)
else:
digest = file_cacher.put_file(
path=os.path.join(data_dir, task, "by_lang", "%s (%s).pdf" % (lang, team)),
description="Statement for task %s, translated into %s (%s) by %s (%s)" %
(task, langs[lang], lang, teams[team], team))
s = Statement(codes[(task, lang, team)], digest, task=contest.get_task(task))
session.add(s)
session.commit()
primary = dict()
# Retrieve the statements selected by each team.
for t in os.listdir(team_dir):
if t.endswith('.json'):
team = t[:-5]
team_path = os.path.join(team_dir, t)
with open(team_path) as team_file:
data = json.load(team_file)
for team2, lang, task in data.get("selected", []):
# A team could have selected a statement that later got removed.
if (task, lang, team2) in codes:
primary.setdefault(team, {}).setdefault(task, []).append(codes[(task, lang, team2)])
# Add the ones they uploaded themselves.
for task, lang, team in translations:
# Don't worry about duplicates, CWS filters them out.
primary.setdefault(team, {}).setdefault(task, []).append(codes[(task, lang, team)])
# Set the primary statements for tasks (i.e. the ones of the official team)
for task, primary2 in primary.get(official_team, {}).iteritems():
contest.get_task(task).primary_statements = json.dumps(primary2)
# Set the primary statements for teams
for team, primary2 in primary.iteritems():
session.execute("UPDATE users SET primary_statements = '%s' WHERE username LIKE '%s%%';" % (json.dumps(primary2), team))
session.commit()
print "Statements stored in the DB!"
示例14: Worker
class Worker(Service):
"""This service implement the possibility to compile and evaluate
submissions in a sandbox. The instructions to follow for the
operations are in the TaskType classes, while the sandbox is in
the Sandbox module.
"""
JOB_TYPE_COMPILATION = "compile"
JOB_TYPE_EVALUATION = "evaluate"
def __init__(self, shard):
logger.initialize(ServiceCoord("Worker", shard))
Service.__init__(self, shard, custom_logger=logger)
self.file_cacher = FileCacher(self)
self.task_type = None
self.work_lock = threading.Lock()
self.session = None
@rpc_method
def ignore_job(self):
"""RPC that inform the worker that its result for the current
action will be discarded. The worker will try to return as
soon as possible even if this means that the result are
inconsistent.
"""
# We inform the task_type to quit as soon as possible.
logger.info("Trying to interrupt job as requested.")
try:
self.task_type.ignore_job = True
except AttributeError:
pass # Job concluded right under our nose, that's ok too.
# FIXME - rpc_threaded is disable because it makes the call fail:
# we should investigate on this
@rpc_method
@rpc_threaded
def precache_files(self, contest_id):
"""RPC to ask the worker to precache of files in the contest.
contest_id (int): the id of the contest
"""
# Lock is not needed if the admins correctly placed cache and
# temp directories in the same filesystem. This is what
# usually happens since they are children of the same,
# cms-created, directory.
logger.info("Precaching files for contest %d." % contest_id)
with SessionGen(commit=False) as session:
contest = Contest.get_from_id(contest_id, session)
for digest in contest.enumerate_files(skip_submissions=True,
skip_user_tests=True):
self.file_cacher.get_file(digest)
logger.info("Precaching finished.")
@rpc_method
@rpc_threaded
def execute_job(self, job_dict):
job = Job.import_from_dict_with_type(job_dict)
if self.work_lock.acquire(False):
try:
logger.operation = "job '%s'" % (job.info)
logger.info("Request received")
job.shard = self.shard
self.task_type = get_task_type(job, self.file_cacher)
self.task_type.execute_job()
logger.info("Request finished.")
return job.export_to_dict()
except:
err_msg = "Worker failed on operation `%s'" % logger.operation
logger.error("%s\n%s" % (err_msg, traceback.format_exc()))
raise JobException(err_msg)
finally:
self.task_type = None
self.session = None
logger.operation = ""
self.work_lock.release()
else:
err_msg = "Request '%s' received, " \
"but declined because of acquired lock" % \
(job.info)
logger.warning(err_msg)
raise JobException(err_msg)
示例15: Worker
class Worker(Service):
"""This service implement the possibility to compile and evaluate
submissions in a sandbox. The instructions to follow for the
operations are in the TaskType classes, while the sandbox is in
the Sandbox module.
"""
JOB_TYPE_COMPILATION = "compile"
JOB_TYPE_EVALUATION = "evaluate"
def __init__(self, shard):
logger.initialize(ServiceCoord("Worker", shard))
Service.__init__(self, shard, custom_logger=logger)
self.file_cacher = FileCacher(self)
self.task_type = None
self.work_lock = threading.Lock()
self.session = None
def get_submission_data(self, submission_id):
"""Given the id, returns the submission object and a new task
type object of the correct type.
submission_id (int): id of the submission.
return (Submission, TaskType): corresponding objects.
raise: JobException if id or task type not found.
"""
submission = Submission.get_from_id(submission_id, self.session)
if submission is None:
err_msg = "Couldn't find submission %s " \
"in the database." % submission_id
logger.critical(err_msg)
raise JobException(err_msg)
try:
task_type = get_task_type(submission, self.file_cacher)
except KeyError as error:
err_msg = "Task type `%s' not known for " \
"submission %s (error: %s)." % (
submission.task.task_type, submission_id, error)
logger.error(err_msg)
raise JobException(err_msg)
return (submission, task_type)
@rpc_method
def ignore_job(self):
"""RPC that inform the worker that its result for the current
action will be discarded. The worker will try to return as
soon as possible even if this means that the result are
inconsistent.
"""
# We inform the task_type to quit as soon as possible.
logger.info("Trying to interrupt job as requested.")
try:
self.task_type.ignore_job = True
except AttributeError:
pass # Job concluded right under our nose, that's ok too.
@rpc_method
@rpc_threaded
def compile(self, submission_id):
"""RPC to ask the worker to compile the submission.
submission_id (int): the id of the submission to compile.
"""
return self.action(submission_id, Worker.JOB_TYPE_COMPILATION)
@rpc_method
@rpc_threaded
def evaluate(self, submission_id):
"""RPC to ask the worker to evaluate the submission.
submission_id (int): the id of the submission to evaluate.
"""
return self.action(submission_id, Worker.JOB_TYPE_EVALUATION)
# FIXME - rpc_threaded is disable because it makes the call fail:
# we should investigate on this
@rpc_method
@rpc_threaded
def precache_files(self, contest_id):
"""RPC to ask the worker to precache of files in the contest.
contest_id (int): the id of the contest
"""
# Lock is not needed if the admins correctly placed cache and
# temp directories in the same filesystem. This is what
# usually happens since they are children of the same,
# cms-created, directory.
logger.info("Precaching files for contest %d." % contest_id)
with SessionGen(commit=False) as session:
#.........这里部分代码省略.........