本文整理汇总了Python中barman.infofile.WalFileInfo类的典型用法代码示例。如果您正苦于以下问题:Python WalFileInfo类的具体用法?Python WalFileInfo怎么用?Python WalFileInfo使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了WalFileInfo类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_get_wal_info
def test_get_wal_info(self, get_wal_mock, tmpdir):
"""
Basic test for get_wal_info method
Test the wals per second and total time in seconds values.
:return:
"""
# Build a test server with a test path
server = build_real_server(global_conf={
'barman_home': tmpdir.strpath
})
# Mock method get_wal_until_next_backup for returning a list of
# 3 fake WAL. the first one is the start and stop WAL of the backup
wal_list = [
WalFileInfo.from_xlogdb_line(
"000000010000000000000002\t16777216\t1434450086.53\tNone\n"),
WalFileInfo.from_xlogdb_line(
"000000010000000000000003\t16777216\t1434450087.54\tNone\n"),
WalFileInfo.from_xlogdb_line(
"000000010000000000000004\t16777216\t1434450088.55\tNone\n")]
get_wal_mock.return_value = wal_list
backup_info = build_test_backup_info(
server=server,
begin_wal=wal_list[0].name,
end_wal=wal_list[0].name)
backup_info.save()
# Evaluate total time in seconds:
# last_wal_timestamp - first_wal_timestamp
wal_total_seconds = wal_list[-1].time - wal_list[0].time
# Evaluate the wals_per_second value:
# wals_in_backup + wals_until_next_backup / total_time_in_seconds
wals_per_second = len(wal_list) / wal_total_seconds
wal_info = server.get_wal_info(backup_info)
assert wal_info
assert wal_info['wal_total_seconds'] == wal_total_seconds
assert wal_info['wals_per_second'] == wals_per_second
示例2: test_recover_xlog
def test_recover_xlog(self, rsync_pg_mock, tmpdir):
"""
Test the recovery of the xlogs of a backup
:param rsync_pg_mock: Mock rsync object for the purpose if this test
"""
# Build basic folders/files structure
dest = tmpdir.mkdir('destination')
wals = tmpdir.mkdir('wals')
xlog_dir = wals.mkdir(xlog.hash_dir('000000000000000000000002'))
xlog_file = xlog_dir.join('000000000000000000000002')
xlog_file.write('dummy content')
server = testing_helpers.build_real_server(
main_conf={'wals_directory': wals.strpath})
# build executor
executor = RecoveryExecutor(server.backup_manager)
required_wals = (WalFileInfo.from_xlogdb_line(
'000000000000000000000002\t42\t43\tNone\n'),)
executor.xlog_copy(required_wals, dest.strpath, None)
# check for a correct invocation of rsync using local paths
rsync_pg_mock.from_file_list.assert_called_once(
['000000000000000000000002'],
xlog_dir.strpath,
dest.strpath)
# reset mock calls
rsync_pg_mock.reset_mock()
required_wals = (WalFileInfo.from_xlogdb_line(
'000000000000000000000002\t42\t43\tNone\n'),)
executor.backup_manager.compression_manager = Mock()
executor.xlog_copy(required_wals, dest.strpath, 'remote_command')
# check for the invocation of rsync on a remote call
rsync_pg_mock.assert_called_once(network_compression=False,
bwlimit=None,
ssh='remote_command')
示例3: test_from_file_override
def test_from_file_override(self, id_compression, tmpdir):
# prepare
id_compression.return_value = None
tmp_file = tmpdir.join("000000000000000000000001")
tmp_file.write("dummy_content\n")
wfile_info = WalFileInfo.from_file(tmp_file.strpath, name="000000000000000000000002")
assert wfile_info.name == "000000000000000000000002"
assert wfile_info.size == tmp_file.size()
assert wfile_info.time == tmp_file.mtime()
assert wfile_info.filename == "%s.meta" % tmp_file.strpath
assert wfile_info.compression is None
assert wfile_info.relpath() == ("0000000000000000/000000000000000000000002")
wfile_info = WalFileInfo.from_file(tmp_file.strpath, size=42)
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == 42
assert wfile_info.time == tmp_file.mtime()
assert wfile_info.filename == "%s.meta" % tmp_file.strpath
assert wfile_info.compression is None
assert wfile_info.relpath() == ("0000000000000000/000000000000000000000001")
wfile_info = WalFileInfo.from_file(tmp_file.strpath, time=43)
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == tmp_file.size()
assert wfile_info.time == 43
assert wfile_info.filename == "%s.meta" % tmp_file.strpath
assert wfile_info.compression is None
assert wfile_info.relpath() == ("0000000000000000/000000000000000000000001")
示例4: get_required_xlog_files
def get_required_xlog_files(self, backup, target_tli=None, target_time=None,
target_xid=None):
"""
Get the xlog files required for a recovery
"""
begin = backup.begin_wal
end = backup.end_wal
# If timeline isn't specified, assume it is the same timeline
# of the backup
if not target_tli:
target_tli, _, _ = xlog.decode_segment_name(end)
with self.xlogdb() as fxlogdb:
for line in fxlogdb:
wal_info = WalFileInfo.from_xlogdb_line(line)
# Handle .history files: add all of them to the output,
# regardless of their age
if xlog.is_history_file(wal_info.name):
yield wal_info
continue
if wal_info.name < begin:
continue
tli, _, _ = xlog.decode_segment_name(wal_info.name)
if tli > target_tli:
continue
yield wal_info
if wal_info.name > end:
end = wal_info.name
if target_time and target_time < wal_info.time:
break
# return all the remaining history files
for line in fxlogdb:
wal_info = WalFileInfo.from_xlogdb_line(line)
if xlog.is_history_file(wal_info.name):
yield wal_info
示例5: test_archive_batch
def test_archive_batch(self, archive_wal_mock, get_next_batch_mock,
fsync_mock, caplog):
"""
Test archive using batch limit
"""
# Setup the test
fxlogdb_mock = MagicMock()
backup_manager = MagicMock()
archiver = FileWalArchiver(backup_manager)
archiver.config.name = "test_server"
wal_info = WalFileInfo(name="test_wal_file")
wal_info.orig_filename = "test_wal_file"
wal_info2 = WalFileInfo(name="test_wal_file2")
wal_info2.orig_filename = "test_wal_file2"
# Test queue with batch limit 1 with a list of 2 files
batch = WalArchiverQueue([wal_info, wal_info2], batch_size=1)
assert batch.size == 2
assert batch.run_size == 1
get_next_batch_mock.return_value = batch
archiver.archive(fxlogdb_mock)
# check the log for messages
assert ("Found %s xlog segments from %s for %s."
" Archive a batch of %s segments in this run." %
(batch.size,
archiver.name,
archiver.config.name,
batch.run_size)) in caplog.text
assert ("Batch size reached (%s) - "
"Exit %s process for %s" %
(batch.batch_size,
archiver.name,
archiver.config.name)) in caplog.text
示例6: test_recover_xlog
def test_recover_xlog(self, rsync_pg_mock, cm_mock, tmpdir):
"""
Test the recovery of the xlogs of a backup
:param rsync_pg_mock: Mock rsync object for the purpose if this test
"""
# Build basic folders/files structure
dest = tmpdir.mkdir("destination")
wals = tmpdir.mkdir("wals")
# Create 3 WAL files with different compressions
xlog_dir = wals.mkdir(xlog.hash_dir("000000000000000000000002"))
xlog_plain = xlog_dir.join("000000000000000000000001")
xlog_gz = xlog_dir.join("000000000000000000000002")
xlog_bz2 = xlog_dir.join("000000000000000000000003")
xlog_plain.write("dummy content")
xlog_gz.write("dummy content gz")
xlog_bz2.write("dummy content bz2")
server = testing_helpers.build_real_server(main_conf={"wals_directory": wals.strpath})
# Prepare compressors mock
c = {"gzip": mock.Mock(name="gzip"), "bzip2": mock.Mock(name="bzip2")}
cm_mock.return_value.get_compressor = lambda compression=None, path=None: c[compression]
# touch destination files to avoid errors on cleanup
c["gzip"].decompress.side_effect = lambda src, dst: open(dst, "w")
c["bzip2"].decompress.side_effect = lambda src, dst: open(dst, "w")
# Build executor
executor = RecoveryExecutor(server.backup_manager)
# Test: local copy
required_wals = (
WalFileInfo.from_xlogdb_line("000000000000000000000001\t42\t43\tNone\n"),
WalFileInfo.from_xlogdb_line("000000000000000000000002\t42\t43\tgzip\n"),
WalFileInfo.from_xlogdb_line("000000000000000000000003\t42\t43\tbzip2\n"),
)
executor._xlog_copy(required_wals, dest.strpath, None)
# Check for a correct invocation of rsync using local paths
rsync_pg_mock.assert_called_once_with(network_compression=False, bwlimit=None, path=None, ssh=None)
assert not rsync_pg_mock.return_value.from_file_list.called
c["gzip"].decompress.assert_called_once_with(xlog_gz.strpath, mock.ANY)
c["bzip2"].decompress.assert_called_once_with(xlog_bz2.strpath, mock.ANY)
# Reset mock calls
rsync_pg_mock.reset_mock()
c["gzip"].reset_mock()
c["bzip2"].reset_mock()
# Test: remote copy
executor._xlog_copy(required_wals, dest.strpath, "remote_command")
# Check for the invocation of rsync on a remote call
rsync_pg_mock.assert_called_once_with(
network_compression=False, bwlimit=None, path=mock.ANY, ssh="remote_command"
)
rsync_pg_mock.return_value.from_file_list.assert_called_once_with(
["000000000000000000000001", "000000000000000000000002", "000000000000000000000003"], mock.ANY, mock.ANY
)
c["gzip"].decompress.assert_called_once_with(xlog_gz.strpath, mock.ANY)
c["bzip2"].decompress.assert_called_once_with(xlog_bz2.strpath, mock.ANY)
示例7: test_archive
def test_archive(self, datetime_mock, move_mock, archive_wal_mock,
get_next_batch_mock, unlink_mock, capsys):
"""
Test FileWalArchiver.archive method
"""
fxlogdb_mock = MagicMock()
backup_manager = MagicMock()
archiver = FileWalArchiver(backup_manager)
archiver.config.name = "test_server"
wal_info = WalFileInfo(name="test_wal_file")
wal_info.orig_filename = "test_wal_file"
batch = WalArchiverBatch([wal_info])
get_next_batch_mock.return_value = batch
archive_wal_mock.side_effect = DuplicateWalFile
archiver.archive(fxlogdb_mock)
out, err = capsys.readouterr()
assert ("\tError: %s is already present in server %s. "
"File moved to errors directory." %
(wal_info.name, archiver.config.name)) in out
archive_wal_mock.side_effect = MatchingDuplicateWalFile
archiver.archive(fxlogdb_mock)
unlink_mock.assert_called_with(wal_info.orig_filename)
# Test batch errors
datetime_mock.utcnow.strftime.return_value = 'test_time'
batch.errors = ['testfile_1', 'testfile_2']
archive_wal_mock.side_effect = DuplicateWalFile
archiver.archive(fxlogdb_mock)
out, err = capsys.readouterr()
assert ("Some unknown objects have been found while "
"processing xlog segments for %s. "
"Objects moved to errors directory:" %
archiver.config.name) in out
move_mock.assert_any_call(
'testfile_1',
os.path.join(archiver.config.errors_directory,
"%s.%s.unknown" % ('testfile_1', 'test_time')))
move_mock.assert_any_call(
'testfile_2',
os.path.join(archiver.config.errors_directory,
"%s.%s.unknown" % ('testfile_2', 'test_time')))
示例8: last_wal_age
def last_wal_age(server, args):
warn = args.warning
crit = args.critical
from barman.infofile import WalFileInfo
with server.xlogdb() as fxlogdb:
line = None
for line in fxlogdb:
pass
if line is None:
critical("No WAL received yet.")
#name, size, time, compression = server.xlogdb_parse_line(line)
wal_info = WalFileInfo.from_xlogdb_line(line)
time = datetime.fromtimestamp(wal_info.time)
now = datetime.now()
age = now - time
minutes = age.seconds / 60
minutes = minutes + age.days * 60 * 24
exit_check(minutes, warn, crit, "Last WAL is %s minutes old." % minutes,
perfdata_key="minutes", perfdata_min=0)
示例9: get_next_batch
def get_next_batch(self):
"""
Returns the next batch of WAL files that have been archived through
a PostgreSQL's 'archive_command' (in the 'incoming' directory)
:return: WalArchiverBatch: list of WAL files
"""
# List and sort all files in the incoming directory
file_names = glob(os.path.join(
self.config.incoming_wals_directory, '*'))
file_names.sort()
# Process anything that looks like a valid WAL file. Anything
# else is treated like an error/anomaly
files = []
errors = []
for file_name in file_names:
if xlog.is_any_xlog_file(file_name) and os.path.isfile(file_name):
files.append(file_name)
else:
errors.append(file_name)
# Build the list of WalFileInfo
wal_files = [WalFileInfo.from_file(f) for f in files]
return WalArchiverBatch(wal_files, errors=errors)
示例10: missing_wals
def missing_wals(server, args):
warn = args.warning
crit = args.critical
from barman.xlog import is_wal_file
from barman.infofile import WalFileInfo
wals_directory = server.config.wals_directory
missing_wals = 0
with server.xlogdb() as fxlogdb:
for line in fxlogdb:
#name, size, time, compression = server.xlogdb_parse_line(line)
wal_info = WalFileInfo.from_xlogdb_line(line)
name = wal_info.name
directory = name[0:16]
if is_wal_file(name):
file_path = os.path.join(wals_directory, directory, name)
if not os.path.exists(file_path):
missing_wals = missing_wals + 1
exit_check(missing_wals, warn, crit,
"There are %d missing wals for the last backup." % missing_wals,
perfdata_key="missing", perfdata_min=0)
示例11: remove_wal_before_backup
def remove_wal_before_backup(self, backup_info):
"""
Remove WAL files which have been archived before the start of
the provided backup.
If no backup_info is provided delete all available WAL files
:param BackupInfo|None backup_info: the backup information structure
:return list: a list of removed WAL files
"""
removed = []
with self.server.xlogdb() as fxlogdb:
xlogdb_new = fxlogdb.name + ".new"
with open(xlogdb_new, 'w') as fxlogdb_new:
for line in fxlogdb:
wal_info = WalFileInfo.from_xlogdb_line(line)
# Keeps the WAL segment if it is a history file or later
# than the given backup (the first available)
if (xlog.is_history_file(wal_info.name) or
(backup_info and wal_info.name >= backup_info.begin_wal)):
fxlogdb_new.write(wal_info.to_xlogdb_line())
continue
else:
self.delete_wal(wal_info)
removed.append(wal_info.name)
fxlogdb_new.flush()
os.fsync(fxlogdb_new.fileno())
shutil.move(xlogdb_new, fxlogdb.name)
fsync_dir(os.path.dirname(fxlogdb.name))
return removed
示例12: get_wal_until_next_backup
def get_wal_until_next_backup(self, backup, include_history=False):
"""
Get the xlog files between backup and the next
:param BackupInfo backup: a backup object, the starting point
to retrieve WALs
:param bool include_history: option for the inclusion of
include_history files into the output
"""
begin = backup.begin_wal
next_end = None
if self.get_next_backup(backup.backup_id):
next_end = self.get_next_backup(backup.backup_id).end_wal
backup_tli, _, _ = xlog.decode_segment_name(begin)
with self.xlogdb() as fxlogdb:
for line in fxlogdb:
wal_info = WalFileInfo.from_xlogdb_line(line)
# Handle .history files: add all of them to the output,
# regardless of their age, if requested (the 'include_history'
# parameter is True)
if xlog.is_history_file(wal_info.name):
if include_history:
yield wal_info
continue
if wal_info.name < begin:
continue
tli, _, _ = xlog.decode_segment_name(wal_info.name)
if tli > backup_tli:
continue
if not xlog.is_wal_file(wal_info.name):
continue
if next_end and wal_info.name > next_end:
break
yield wal_info
示例13: get_latest_archived_wal
def get_latest_archived_wal(self):
"""
Return the WalFileInfo of the last WAL file in the archive,
or None if the archive doesn't contain any WAL file.
:rtype: WalFileInfo|None
"""
# TODO: consider timeline?
from os.path import isdir, join
root = self.config.wals_directory
# If the WAL archive directory doesn't exists the archive is empty
if not isdir(root):
return None
# Traverse all the directory in the archive in reverse order,
# returning the first WAL file found
for name in sorted(os.listdir(root), reverse=True):
fullname = join(root, name)
# All relevant files are in subdirectories, so
# we skip any non-directory entry
if isdir(fullname):
hash_dir = fullname
# Inspect contained files in reverse order
for wal_name in sorted(os.listdir(hash_dir), reverse=True):
fullname = join(hash_dir, wal_name)
# Return the first file that has the correct name
if not isdir(fullname) and xlog.is_wal_file(fullname):
return WalFileInfo.from_file(fullname)
# If we get here, no WAL files have been found
return None
示例14: test_from_file_no_compression
def test_from_file_no_compression(self, tmpdir):
tmp_file = tmpdir.join("000000000000000000000001")
tmp_file.write('dummy_content\n')
stat = os.stat(tmp_file.strpath)
wfile_info = WalFileInfo.from_file(tmp_file.strpath)
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == stat.st_size
assert wfile_info.time == stat.st_mtime
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
assert wfile_info.relpath() == '0000000000000000/000000000000000000000001'
示例15: remove_wal_before_backup
def remove_wal_before_backup(self, backup_info, timelines_to_protect=None):
"""
Remove WAL files which have been archived before the start of
the provided backup.
If no backup_info is provided delete all available WAL files
If timelines_to_protect list is passed, never remove a wal in one of
these timelines.
:param BackupInfo|None backup_info: the backup information structure
:param set timelines_to_protect: optional list of timelines
to protect
:return list: a list of removed WAL files
"""
removed = []
with self.server.xlogdb() as fxlogdb:
xlogdb_new = fxlogdb.name + ".new"
with open(xlogdb_new, 'w') as fxlogdb_new:
for line in fxlogdb:
wal_info = WalFileInfo.from_xlogdb_line(line)
if not xlog.is_any_xlog_file(wal_info.name):
output.error(
"invalid xlog segment name %r\n"
"HINT: Please run \"barman rebuild-xlogdb %s\" "
"to solve this issue",
wal_info.name, self.config.name)
continue
# Keeps the WAL segment if it is a history file
keep = xlog.is_history_file(wal_info.name)
# Keeps the WAL segment if its timeline is in
# `timelines_to_protect`
if timelines_to_protect:
tli, _, _ = xlog.decode_segment_name(wal_info.name)
keep |= tli in timelines_to_protect
# Keeps the WAL segment if it is a newer
# than the given backup (the first available)
if backup_info:
keep |= wal_info.name >= backup_info.begin_wal
# If the file has to be kept write it in the new xlogdb
# otherwise delete it and record it in the removed list
if keep:
fxlogdb_new.write(wal_info.to_xlogdb_line())
else:
self.delete_wal(wal_info)
removed.append(wal_info.name)
fxlogdb_new.flush()
os.fsync(fxlogdb_new.fileno())
shutil.move(xlogdb_new, fxlogdb.name)
fsync_dir(os.path.dirname(fxlogdb.name))
return removed