本文整理汇总了Python中cms.db.FSObject类的典型用法代码示例。如果您正苦于以下问题:Python FSObject类的具体用法?Python FSObject怎么用?Python FSObject使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了FSObject类的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: commit_file
def commit_file(self, fobj, digest, desc=""):
"""See FileCacherBackend.commit_file().
"""
fobj.close()
try:
with SessionGen() as session:
fso = FSObject(description=desc)
fso.digest = digest
fso.loid = fobj.loid
session.add(fso)
session.commit()
logger.info("File %s (%s) stored on the database.",
digest, desc)
except IntegrityError:
# If someone beat us to adding the same object to the database, we
# should at least drop the large object.
LargeObject.unlink(fobj.loid)
logger.warning("File %s (%s) caused an IntegrityError, ignoring.",
digest, desc)
return False
return True
示例2: delete
def delete(self, digest):
"""See FileCacherBackend.delete().
"""
with SessionGen() as session:
fso = FSObject.get_from_digest(digest, session)
fso.delete()
session.commit()
示例3: describe
def describe(self, digest):
"""See FileCacherBackend.describe().
"""
with SessionGen() as session:
fso = FSObject.get_from_digest(digest, session)
if fso is not None:
return fso.description
else:
return None
示例4: describe
def describe(self, digest):
"""See FileCacherBackend.describe().
"""
with SessionGen() as session:
fso = FSObject.get_from_digest(digest, session)
if fso is None:
raise KeyError("File not found.")
return fso.description
示例5: get_file
def get_file(self, digest):
"""See FileCacherBackend.get_file().
"""
with SessionGen() as session:
fso = FSObject.get_from_digest(digest, session)
if fso is None:
raise KeyError("File not found.")
return fso.get_lobject(mode='rb')
示例6: put_file
def put_file(self, digest, origin, description=""):
"""See FileCacherBackend.put_file().
"""
try:
with SessionGen() as session:
# Check digest uniqueness
if FSObject.get_from_digest(digest, session) is not None:
logger.debug("File %s already on database, "
"dropping this one." % digest)
session.rollback()
# If it is not already present, copy the file into the
# lobject
else:
fso = FSObject(description=description)
logger.debug("Sending file %s to the database." % digest)
with open(origin, 'rb') as temp_file:
with fso.get_lobject(session, mode='wb') \
as lobject:
logger.debug("Large object created.")
buf = temp_file.read(self.CHUNK_SIZE)
while buf != '':
while len(buf) > 0:
written = lobject.write(buf)
buf = buf[written:]
# Cooperative yield
gevent.sleep(0)
buf = temp_file.read(self.CHUNK_SIZE)
fso.digest = digest
session.add(fso)
session.commit()
logger.debug("File %s sent to the database." % digest)
except IntegrityError:
logger.warning("File %s caused an IntegrityError, ignoring..."
% digest)
示例7: get_size
def get_size(self, digest):
"""See FileCacherBackend.get_size().
"""
# TODO - The business logic may be moved in FSObject, for
# better generality
with SessionGen() as session:
fso = FSObject.get_from_digest(digest, session)
if fso is not None:
with fso.get_lobject(session, mode='rb') as lobject:
return lobject.seek(0, os.SEEK_END)
else:
return None
示例8: put_file
def put_file(self, digest, desc=""):
"""See FileCacherBackend.put_file().
"""
try:
with SessionGen() as session:
fso = FSObject.get_from_digest(digest, session)
# Check digest uniqueness
if fso is not None:
logger.debug("File %s already stored on database, not "
"sending it again." % digest)
session.rollback()
return None
# If it is not already present, copy the file into the
# lobject
else:
fso = FSObject(description=desc)
fso.digest = digest
session.add(fso)
logger.debug("File %s stored on the database." % digest)
# FIXME There is a remote possibility that someone
# will try to access this file, believing it has
# already been stored (since its FSObject exists),
# while we're still sending its content.
lobject = fso.get_lobject(mode='wb')
session.commit()
return lobject
except IntegrityError:
logger.warning("File %s caused an IntegrityError, ignoring..." %
digest)
示例9: delete
def delete(self, digest):
"""See FileCacherBackend.delete().
"""
with SessionGen() as session:
fso = FSObject.get_from_digest(digest, session)
if fso is None:
session.rollback()
return
fso.delete()
session.commit()
示例10: get_size
def get_size(self, digest):
"""See FileCacherBackend.get_size().
"""
# TODO - The business logic may be moved in FSObject, for
# better generality
with SessionGen() as session:
fso = FSObject.get_from_digest(digest, session)
if fso is None:
raise KeyError("File not found.")
with fso.get_lobject(mode='rb') as lobj:
return lobj.seek(0, io.SEEK_END)
示例11: get_file
def get_file(self, digest, dest):
"""See FileCacherBackend.get_file().
"""
with open(dest, 'wb') as temp_file:
# hasher = hashlib.sha1()
with SessionGen() as session:
fso = FSObject.get_from_digest(digest, session)
# Copy the file into the lobject
with fso.get_lobject(mode='rb') as lobject:
buf = lobject.read(self.CHUNK_SIZE)
while buf != '':
# hasher.update(buf)
temp_file.write(buf)
# Cooperative yield
gevent.sleep(0)
buf = lobject.read(self.CHUNK_SIZE)
示例12: create_file
def create_file(self, digest):
"""See FileCacherBackend.create_file().
"""
with SessionGen() as session:
fso = FSObject.get_from_digest(digest, session)
# Check digest uniqueness
if fso is not None:
logger.debug("File %s already stored on database, not "
"sending it again.", digest)
session.rollback()
return None
# If it is not already present, copy the file into the
# lobject
else:
# Create the large object first. This should be populated
# and committed before putting it into the FSObjects table.
return LargeObject(0, mode='wb')
示例13: main
#.........这里部分代码省略.........
" specified as well.")
return 1
if not os.path.exists(args.output_dir):
os.mkdir(args.output_dir)
if not os.path.isdir(args.output_dir):
logger.critical("The output-dir parameter must point to a directory")
return 1
with SessionGen() as session:
q = session.query(Submission)\
.join(Submission.task)\
.join(Submission.files)\
.join(Submission.results)\
.join(SubmissionResult.dataset)\
.join(Submission.participation)\
.join(Participation.user)\
.filter(Dataset.id == Task.active_dataset_id)\
.filter(SubmissionResult.score >= args.min_score)\
.with_entities(Submission.id, Submission.language,
Submission.timestamp,
SubmissionResult.score,
File.filename, File.digest,
User.id, User.username, User.first_name,
User.last_name,
Task.id, Task.name)
if args.contest_id:
q = q.filter(Participation.contest_id == args.contest_id)
if args.task_id:
q = q.filter(Submission.task_id == args.task_id)
if args.user_id:
q = q.filter(Participation.user_id == args.user_id)
if args.submission_id:
q = q.filter(Submission.id == args.submission_id)
results = q.all()
if args.unique or args.best:
results = filter_top_scoring(results, args.unique)
print("%s file(s) will be created." % len(results))
if raw_input("Continue? [Y/n] ").lower() not in ["y", ""]:
sys.exit(0)
done = 0
for row in results:
s_id, s_language, s_timestamp, sr_score, f_filename, f_digest, \
u_id, u_name, u_fname, u_lname, t_id, t_name = row
name = f_filename
if name.endswith(".%l"):
name = name[:-3] # remove last 3 chars
filename = args.filename.format(id=s_id, name=name, ext=s_language,
time=s_timestamp, user=u_name)
filename = os.path.join(args.output_dir, filename)
if os.path.exists(filename):
logger.warning("Skipping file '%s' because it already exists",
filename)
fso = FSObject.get_from_digest(f_digest, session)
assert fso is not None
with fso.get_lobject(mode="rb") as file_obj:
data = file_obj.read()
if args.utf8:
try:
data = utf8_decoder(data)
except TypeError:
logger.critical("Could not guess encoding of file "
"'%s'. Aborting.",
filename)
sys.exit(1)
if args.add_info:
data = TEMPLATE[s_language] % (
u_name,
u_fname,
u_lname,
t_name,
sr_score,
s_timestamp
) + data
# Print utf8-encoded, possibly altered data
with codecs.open(filename, "w", encoding="utf-8") as f_out:
f_out.write(data)
else:
# Print raw, untouched binary data
with open(filename, "wb") as f_out:
f_out.write(data)
done += 1
print(done, "/", len(results))
return 0