本文整理汇总了Python中cms.db.filecacher.FileCacher.put_file_from_fobj方法的典型用法代码示例。如果您正苦于以下问题:Python FileCacher.put_file_from_fobj方法的具体用法?Python FileCacher.put_file_from_fobj怎么用?Python FileCacher.put_file_from_fobj使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类cms.db.filecacher.FileCacher
的用法示例。
在下文中一共展示了FileCacher.put_file_from_fobj方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_testcases
# 需要导入模块: from cms.db.filecacher import FileCacher [as 别名]
# 或者: from cms.db.filecacher.FileCacher import put_file_from_fobj [as 别名]
def test_testcases(base_dir, soluzione, language, assume=None):
global task, file_cacher
# Use a FileCacher with a NullBackend in order to avoid to fill
# the database with junk
if file_cacher is None:
file_cacher = FileCacher(null=True)
# Load the task
# TODO - This implies copying a lot of data to the FileCacher,
# which is annoying if you have to do it continuously; it would be
# better to use a persistent cache (although local, possibly
# filesystem-based instead of database-based) and somehow detect
# when the task has already been loaded
if task is None:
loader = YamlLoader(
os.path.realpath(os.path.join(base_dir, "..")),
file_cacher)
# Normally we should import the contest before, but YamlLoader
# accepts get_task() even without previous get_contest() calls
task = loader.get_task(os.path.split(os.path.realpath(base_dir))[1])
# Prepare the EvaluationJob
dataset = task.active_dataset
if dataset.task_type != "OutputOnly":
digest = file_cacher.put_file_from_path(
os.path.join(base_dir, soluzione),
"Solution %s for task %s" % (soluzione, task.name))
executables = {task.name: Executable(filename=task.name,
digest=digest)}
jobs = [(t, EvaluationJob(
language=language,
task_type=dataset.task_type,
task_type_parameters=json.loads(dataset.task_type_parameters),
managers=dict(dataset.managers),
executables=executables,
input=dataset.testcases[t].input,
output=dataset.testcases[t].output,
time_limit=dataset.time_limit,
memory_limit=dataset.memory_limit)) for t in dataset.testcases]
tasktype = get_task_type(dataset=dataset)
else:
print("Generating outputs...", end='')
files = {}
for t in sorted(dataset.testcases.keys()):
with file_cacher.get_file(dataset.testcases[t].input) as fin:
with TemporaryFile() as fout:
print("%s" % t, end='')
call(soluzione, stdin=fin, stdout=fout, cwd=base_dir)
fout.seek(0)
digest = file_cacher.put_file_from_fobj(fout)
outname = "output_%s.txt" % t
files[outname] = File(filename=outname, digest=digest)
jobs = [(t, EvaluationJob(
task_type=dataset.task_type,
task_type_parameters=json.loads(dataset.task_type_parameters),
managers=dict(dataset.managers),
files=files,
input=dataset.testcases[t].input,
output=dataset.testcases[t].output,
time_limit=dataset.time_limit,
memory_limit=dataset.memory_limit)) for t in dataset.testcases]
for k, job in jobs:
job._key = k
tasktype = get_task_type(dataset=dataset)
print()
ask_again = True
last_status = "ok"
status = "ok"
stop = False
info = []
points = []
comments = []
tcnames = []
for jobinfo in sorted(jobs):
print(jobinfo[0], end='')
sys.stdout.flush()
job = jobinfo[1]
# Skip the testcase if we decide to consider everything to
# timeout
if stop:
info.append("Time limit exceeded")
points.append(0.0)
comments.append("Timeout.")
continue
# Evaluate testcase
last_status = status
tasktype.evaluate(job, file_cacher)
if dataset.task_type != "OutputOnly":
status = job.plus["exit_status"]
info.append("Time: %5.3f Wall: %5.3f Memory: %s" %
(job.plus["execution_time"],
job.plus["execution_wall_clock_time"],
mem_human(job.plus["execution_memory"])))
else:
status = "ok"
info.append("N/A")
points.append(float(job.outcome))
#.........这里部分代码省略.........
示例2: TestFileCacher
# 需要导入模块: from cms.db.filecacher import FileCacher [as 别名]
# 或者: from cms.db.filecacher.FileCacher import put_file_from_fobj [as 别名]
class TestFileCacher(TestService):
"""Service that performs automatically some tests for the
FileCacher service.
"""
def __init__(self, shard):
logger.initialize(ServiceCoord("TestFileCacher", shard))
TestService.__init__(self, shard, custom_logger=logger)
# Assume we store the cache in "./cache/fs-cache-TestFileCacher-0/"
self.cache_base_path = os.path.join(config.cache_dir,
"fs-cache-TestFileCacher-0")
self.cache_path = None
self.content = None
self.fake_content = None
self.digest = None
self.file_obj = None
self.file_cacher = FileCacher(self)
#self.file_cacher = FileCacher(self, path="fs-storage")
def prepare(self):
"""Initialization for the test code - make sure that the cache
is empty before testing.
"""
logger.info("Please delete directory %s before." %
self.cache_base_path)
### TEST 000 ###
def test_000(self):
"""Send a ~100B random binary file to the storage through
FileCacher as a file-like object. FC should cache the content
locally.
"""
self.size = 100
self.content = "".join(chr(random.randint(0, 255))
for unused_i in xrange(self.size))
logger.info(" I am sending the ~100B binary file to FileCacher")
try:
data = self.file_cacher.put_file_from_fobj(StringIO(self.content),
u"Test #000")
except Exception as error:
self.test_end(False, "Error received: %r." % error)
return
if not os.path.exists(os.path.join(self.cache_base_path, data)):
self.test_end(False, "File not stored in local cache.")
elif open(os.path.join(self.cache_base_path, data), "rb").read() != \
self.content:
self.test_end(False, "Local cache's content differ "
"from original file.")
else:
self.cache_path = os.path.join(self.cache_base_path, data)
self.digest = data
self.test_end(True, "Data sent and cached without error.")
### TEST 001 ###
def test_001(self):
"""Retrieve the file.
"""
logger.info(" I am retrieving the ~100B binary file from FileCacher")
self.fake_content = "Fake content.\n"
with open(self.cache_path, "wb") as cached_file:
cached_file.write(self.fake_content)
try:
data = self.file_cacher.get_file(self.digest)
except Exception as error:
self.test_end(False, "Error received: %r." % error)
return
received = data.read()
data.close()
if received != self.fake_content:
if received == self.content:
self.test_end(False,
"Did not use the cache even if it could.")
else:
self.test_end(False, "Content differ.")
else:
self.test_end(True, "Data object received correctly.")
### TEST 002 ###
def test_002(self):
"""Check the size of the file.
"""
logger.info(" I am checking the size of the ~100B binary file")
try:
size = self.file_cacher.get_size(self.digest)
except Exception as error:
self.test_end(False, "Error received: %r." % error)
return
#.........这里部分代码省略.........
示例3: TestFileCacher
# 需要导入模块: from cms.db.filecacher import FileCacher [as 别名]
# 或者: from cms.db.filecacher.FileCacher import put_file_from_fobj [as 别名]
class TestFileCacher(unittest.TestCase):
"""Service that performs automatically some tests for the
FileCacher service.
"""
def setUp(self):
self.file_cacher = FileCacher()
#self.file_cacher = FileCacher(self, path="fs-storage")
self.cache_base_path = self.file_cacher.file_dir
self.cache_path = None
self.content = None
self.fake_content = None
self.digest = None
self.file_obj = None
def tearDown(self):
shutil.rmtree(self.cache_base_path, ignore_errors=True)
def test_file_life(self):
"""Send a ~100B random binary file to the storage through
FileCacher as a file-like object. FC should cache the content
locally.
Then retrieve it.
Then check its size.
Then get it back.
Then delete it.
"""
self.size = 100
self.content = b"".join(chr(random.randint(0, 255))
for unused_i in xrange(self.size))
data = self.file_cacher.put_file_from_fobj(StringIO(self.content),
u"Test #000")
if not os.path.exists(os.path.join(self.cache_base_path, data)):
self.fail("File not stored in local cache.")
elif io.open(os.path.join(self.cache_base_path, data),
"rb").read() != self.content:
self.fail("Local cache's content differ "
"from original file.")
else:
self.cache_path = os.path.join(self.cache_base_path, data)
self.digest = data
# Retrieve the file.
self.fake_content = "Fake content.\n"
with io.open(self.cache_path, "wb") as cached_file:
cached_file.write(self.fake_content)
try:
data = self.file_cacher.get_file(self.digest)
except Exception as error:
self.fail("Error received: %r." % error)
return
received = data.read()
data.close()
if received != self.fake_content:
if received == self.content:
self.fail("Did not use the cache even if it could.")
else:
self.fail("Content differ.")
# Check the size of the file.
try:
size = self.file_cacher.get_size(self.digest)
except Exception as error:
self.fail("Error received: %r." % error)
return
if size != self.size:
self.fail("The size is wrong: %d instead of %d" %
(size, self.size))
# Get file from FileCacher.
os.unlink(self.cache_path)
try:
data = self.file_cacher.get_file(self.digest)
except Exception as error:
self.fail("Error received: %r." % error)
return
received = data.read()
data.close()
if received != self.content:
self.fail("Content differ.")
elif not os.path.exists(self.cache_path):
self.fail("File not stored in local cache.")
elif io.open(self.cache_path, "rb").read() != self.content:
self.fail("Local cache's content differ " +
"from original file.")
# Delete the file through FS and tries to get it again through
# FC.
try:
#.........这里部分代码省略.........