本文整理汇总了Python中swift.obj.replicator.invalidate_hash函数的典型用法代码示例。如果您正苦于以下问题:Python invalidate_hash函数的具体用法?Python invalidate_hash怎么用?Python invalidate_hash使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了invalidate_hash函数的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_invalidate_hash
def test_invalidate_hash(self):
def assertFileData(file_path, data):
with open(file_path, 'r') as fp:
fdata = fp.read()
self.assertEquals(pickle.loads(fdata), pickle.loads(data))
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
mkdirs(df.datadir)
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '0', data_dir)
hashes_file = os.path.join(self.objects, '0',
object_replicator.HASH_FILE)
# test that non existant file except caught
self.assertEquals(object_replicator.invalidate_hash(whole_path_from),
None)
# test that hashes get cleared
check_pickle_data = pickle.dumps({data_dir: None},
object_replicator.PICKLE_PROTOCOL)
for data_hash in [{data_dir: None}, {data_dir: 'abcdefg'}]:
with open(hashes_file, 'wb') as fp:
pickle.dump(data_hash, fp, object_replicator.PICKLE_PROTOCOL)
object_replicator.invalidate_hash(whole_path_from)
assertFileData(hashes_file, check_pickle_data)
示例2: put
def put(self, fd, fsize, metadata, extension='.data'):
"""
Finalize writing the file on disk, and renames it from the temp file to
the real location. This should be called after the data has been
written to the temp file.
:param fd: file descriptor of the temp file
:param fsize: final on-disk size of the created file
:param metadata: dictionary of metadata to be written
:param extension: extension to be used when making the file
"""
assert self.tmppath is not None
metadata['name'] = self.name
timestamp = normalize_timestamp(metadata['X-Timestamp'])
# Write the metadata before calling fsync() so that both data and
# metadata are flushed to disk.
write_metadata(fd, metadata)
# We call fsync() before calling drop_cache() to lower the amount of
# redundant work the drop cache code will perform on the pages (now
# that after fsync the pages will be all clean).
tpool.execute(fsync, fd)
# From the Department of the Redundancy Department, make sure we
# call drop_cache() after fsync() to avoid redundant work (pages
# all clean).
self.drop_cache(fd, 0, fsize)
invalidate_hash(os.path.dirname(self.datadir))
# After the rename completes, this object will be available for other
# requests to reference.
renamer(self.tmppath,
os.path.join(self.datadir, timestamp + extension))
self.metadata = metadata
示例3: test_object_audit_no_meta
def test_object_audit_no_meta(self):
timestamp = str(normalize_timestamp(time.time()))
path = os.path.join(self.disk_file.datadir, timestamp + ".data")
mkdirs(self.disk_file.datadir)
fp = open(path, "w")
fp.write("0" * 1024)
fp.close()
invalidate_hash(os.path.dirname(self.disk_file.datadir))
self.auditor = auditor.AuditorWorker(self.conf)
pre_quarantines = self.auditor.quarantines
self.auditor.object_audit(os.path.join(self.disk_file.datadir, timestamp + ".data"), "sda", "0")
self.assertEquals(self.auditor.quarantines, pre_quarantines + 1)
示例4: test_object_audit_no_meta
def test_object_audit_no_meta(self):
timestamp = str(normalize_timestamp(time.time()))
path = os.path.join(self.disk_file.datadir, timestamp + '.data')
mkdirs(self.disk_file.datadir)
fp = open(path, 'w')
fp.write('0' * 1024)
fp.close()
invalidate_hash(os.path.dirname(self.disk_file.datadir))
self.auditor = auditor.AuditorWorker(self.conf, self.logger)
pre_quarantines = self.auditor.quarantines
self.auditor.object_audit(
os.path.join(self.disk_file.datadir, timestamp + '.data'),
'sda', '0')
self.assertEquals(self.auditor.quarantines, pre_quarantines + 1)
示例5: test_object_audit_no_meta
def test_object_audit_no_meta(self):
cur_part = '0'
disk_file = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o')
timestamp = str(normalize_timestamp(time.time()))
path = os.path.join(disk_file.datadir, timestamp + '.data')
mkdirs(disk_file.datadir)
fp = open(path, 'w')
fp.write('0' * 1024)
fp.close()
invalidate_hash(os.path.dirname(disk_file.datadir))
self.auditor = auditor.AuditorWorker(self.conf)
pre_quarantines = self.auditor.quarantines
self.auditor.object_audit(
os.path.join(disk_file.datadir, timestamp + '.data'),
'sda', cur_part)
self.assertEquals(self.auditor.quarantines, pre_quarantines + 1)
示例6: put
def put(self, fd, tmppath, metadata, extension='.data'):
"""
Finalize writing the file on disk, and renames it from the temp file to
the real location. This should be called after the data has been
written to the temp file.
:params fd: file descriptor of the temp file
:param tmppath: path to the temporary file being used
:param metadata: dictionary of metadata to be written
:param extension: extension to be used when making the file
"""
metadata['name'] = self.name
timestamp = normalize_timestamp(metadata['X-Timestamp'])
write_metadata(fd, metadata)
if 'Content-Length' in metadata:
self.drop_cache(fd, 0, int(metadata['Content-Length']))
tpool.execute(fsync, fd)
invalidate_hash(os.path.dirname(self.datadir))
renamer(tmppath, os.path.join(self.datadir, timestamp + extension))
self.metadata = metadata
示例7: put
def put(self, fd, metadata, extension=".data"):
"""
Finalize writing the file on disk, and renames it from the temp file to
the real location. This should be called after the data has been
written to the temp file.
:param fd: file descriptor of the temp file
:param metadata: dictionary of metadata to be written
:param extension: extension to be used when making the file
"""
assert self.tmppath is not None
metadata["name"] = self.name
timestamp = normalize_timestamp(metadata["X-Timestamp"])
write_metadata(fd, metadata)
if "Content-Length" in metadata:
self.drop_cache(fd, 0, int(metadata["Content-Length"]))
tpool.execute(fsync, fd)
invalidate_hash(os.path.dirname(self.datadir))
renamer(self.tmppath, os.path.join(self.datadir, timestamp + extension))
self.metadata = metadata
示例8: test_invalidate_hash
def test_invalidate_hash(self):
def assertFileData(file_path, data):
with open(file_path, "r") as fp:
fdata = fp.read()
self.assertEquals(fdata, data)
df = DiskFile(self.devices, "sda", "0", "a", "c", "o")
mkdirs(df.datadir)
ohash = hash_path("a", "c", "o")
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, "0", data_dir)
hashes_file = os.path.join(self.objects, "0", object_replicator.HASH_FILE)
# test that non existant file except caught
self.assertEquals(object_replicator.invalidate_hash(whole_path_from), None)
# test that hashes get cleared
check_pickle_data = pickle.dumps({data_dir: None}, object_replicator.PICKLE_PROTOCOL)
for data_hash in [{data_dir: None}, {data_dir: "abcdefg"}]:
with open(hashes_file, "wb") as fp:
pickle.dump(data_hash, fp, object_replicator.PICKLE_PROTOCOL)
object_replicator.invalidate_hash(whole_path_from)
assertFileData(hashes_file, check_pickle_data)
示例9: quarantine_renamer
def quarantine_renamer(device_path, corrupted_file_path):
"""
In the case that a file is corrupted, move it to a quarantined
area to allow replication to fix it.
:params device_path: The path to the device the corrupted file is on.
:params corrupted_file_path: The path to the file you want quarantined.
:returns: path (str) of directory the file was moved to
:raises OSError: re-raises non errno.EEXIST / errno.ENOTEMPTY
exceptions from rename
"""
from_dir = os.path.dirname(corrupted_file_path)
to_dir = os.path.join(device_path, 'quarantined',
'objects', os.path.basename(from_dir))
invalidate_hash(os.path.dirname(from_dir))
try:
renamer(from_dir, to_dir)
except OSError, e:
if e.errno not in (errno.EEXIST, errno.ENOTEMPTY):
raise
to_dir = "%s-%s" % (to_dir, uuid.uuid4().hex)
renamer(from_dir, to_dir)