本文整理汇总了Python中twindb_backup.LOG.warning方法的典型用法代码示例。如果您正苦于以下问题:Python LOG.warning方法的具体用法?Python LOG.warning怎么用?Python LOG.warning使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类twindb_backup.LOG
的用法示例。
在下文中一共展示了LOG.warning方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _retention
# 需要导入模块: from twindb_backup import LOG [as 别名]
# 或者: from twindb_backup.LOG import warning [as 别名]
def _retention(self, section):
kwargs = {}
for i in INTERVALS:
option = '%s_copies' % i
try:
kwargs[i] = self.__cfg.getint(section, option)
except (NoOptionError, NoSectionError):
LOG.warning(
'Option %s is not defined in section %s',
option,
section
)
return RetentionPolicy(**kwargs)
示例2: clone
# 需要导入模块: from twindb_backup import LOG [as 别名]
# 或者: from twindb_backup.LOG import warning [as 别名]
def clone(self, dest_host, port, compress=False):
"""
Send backup to destination host
:param dest_host: Destination host
:type dest_host: str
:param port: Port to sending backup
:type port: int
:param compress: If True compress stream
:type compress: bool
:raise RemoteMySQLSourceError: if any error
"""
retry = 1
retry_time = 2
error_log = "/tmp/{src}_{src_port}-{dst}_{dst_port}.log".format(
src=self._ssh_client.host,
src_port=self._ssh_client.port,
dst=dest_host,
dst_port=port
)
if compress:
compress_cmd = "| gzip -c - "
else:
compress_cmd = ""
cmd = "bash -c \"sudo %s " \
"--stream=xbstream " \
"--host=127.0.0.1 " \
"--backup " \
"--target-dir ./ 2> %s" \
" %s | ncat %s %d --send-only\"" \
% (self._xtrabackup, error_log, compress_cmd, dest_host, port)
while retry < 3:
try:
return self._ssh_client.execute(cmd)
except SshClientException as err:
LOG.warning(err)
LOG.info('Will try again in after %d seconds', retry_time)
time.sleep(retry_time)
retry_time *= 2
retry += 1
示例3: main
# 需要导入模块: from twindb_backup import LOG [as 别名]
# 或者: from twindb_backup.LOG import warning [as 别名]
def main(ctx, debug, # pylint: disable=too-many-arguments
config, version,
xtrabackup_binary, xbstream_binary):
"""
Main entry point
:param ctx: context (See Click docs (http://click.pocoo.org/6/)
for explanation)
:param debug: if True enabled debug logging
:type debug: bool
:param config: path to configuration file
:type config: str
:param version: If True print version string
:type version: bool
:param xtrabackup_binary: Path to xtrabackup binary.
:type xtrabackup_binary: str
:param xbstream_binary: Path to xbstream binary.
:type xbstream_binary: str
"""
if not ctx.invoked_subcommand:
if version:
print(__version__)
exit(0)
else:
print(ctx.get_help())
exit(-1)
setup_logging(LOG, debug=debug)
if os.path.exists(config):
ctx.obj = {
'twindb_config': TwinDBBackupConfig(config_file=config)
}
if xtrabackup_binary is not None:
ctx.obj['twindb_config'].mysql.xtrabackup_binary = \
xtrabackup_binary
if xbstream_binary is not None:
ctx.obj['twindb_config'].mysql.xbstream_binary = \
xbstream_binary
else:
LOG.warning("Config file %s doesn't exist", config)
示例4: apply_retention_policy
# 需要导入模块: from twindb_backup import LOG [as 别名]
# 或者: from twindb_backup.LOG import warning [as 别名]
def apply_retention_policy(self, dst, config, run_type, status):
"""
Delete old backup copies.
:param dst: Destination where the backups are stored.
:type dst: BaseDestination
:param config: Tool configuration
:type config: TwinDBBackupConfig
:param run_type: Run type.
:type run_type: str
:param status: Backups status.
:type status: Status
:return: Updated status.
:rtype: Status
"""
prefix = osp.join(
dst.remote_path,
self.get_prefix(),
'mysql'
)
keep_copies = getattr(config.retention, run_type)
backups_list = dst.list_files(
prefix,
files_only=True
)
LOG.debug('Remote copies: %r', backups_list)
for backup_file in get_files_to_delete(backups_list, keep_copies):
LOG.debug('Deleting remote file %s', backup_file)
dst.delete(backup_file)
try:
status.remove(backup_file)
except StatusKeyNotFound as err:
LOG.warning(err)
LOG.debug('Status: %r', status)
self._delete_local_files('mysql', config)
return status
示例5: _get_file_content
# 需要导入模块: from twindb_backup import LOG [as 别名]
# 或者: from twindb_backup.LOG import warning [as 别名]
def _get_file_content(self, path):
attempts = 10 # up to 1024 seconds
sleep_time = 2
while sleep_time <= 2**attempts:
try:
response = self.s3_client.get_object(
Bucket=self._bucket,
Key=path
)
self.validate_client_response(response)
content = response['Body'].read()
return content
except ClientError as err:
LOG.warning('Failed to read s3://%s/%s', self._bucket, path)
LOG.warning(err)
LOG.info('Will try again in %d seconds', sleep_time)
time.sleep(sleep_time)
sleep_time *= 2
msg = 'Failed to read s3://%s/%s after %d attempts' \
% (self._bucket, path, attempts)
raise OperationError(msg)
示例6: _mysql_service
# 需要导入模块: from twindb_backup import LOG [as 别名]
# 或者: from twindb_backup.LOG import warning [as 别名]
def _mysql_service(dst, action):
"""Start or stop MySQL service
:param dst: Destination server
:type dst: Ssh
:param action: string start or stop
:type action: str
"""
for service in ['mysqld', 'mysql']:
try:
return dst.execute_command(
"PATH=$PATH:/sbin sudo service %s %s" % (service, action),
quiet=True
)
except SshClientException as err:
LOG.debug(err)
try:
LOG.warning('Failed to %s MySQL with an init script. '
'Will try to %s mysqld.', action, action)
if action == "start":
ret = dst.execute_command(
"PATH=$PATH:/sbin sudo bash -c 'nohup mysqld &'",
background=True
)
time.sleep(10)
return ret
elif action == "stop":
return dst.execute_command(
"PATH=$PATH:/sbin sudo kill $(pidof mysqld)"
)
except SshClientException as err:
LOG.error(err)
raise OperationError(
'Failed to %s MySQL on %r'
% (action, dst)
)
示例7: _download_object
# 需要导入模块: from twindb_backup import LOG [as 别名]
# 或者: from twindb_backup.LOG import warning [as 别名]
def _download_object(s3_client, bucket_name, key, read_fd, write_fd):
# The read end of the pipe must be closed in the child process
# before we start writing to it.
os.close(read_fd)
with os.fdopen(write_fd, 'wb') as w_pipe:
try:
retry_interval = 2
for _ in xrange(10):
try:
s3_client.download_fileobj(bucket_name,
key,
w_pipe)
return
except ClientError as err:
LOG.warning(err)
LOG.warning('Will retry in %d seconds',
retry_interval)
time.sleep(retry_interval)
retry_interval *= 2
except IOError as err:
LOG.error(err)
exit(1)
示例8: list_files
# 需要导入模块: from twindb_backup import LOG [as 别名]
# 或者: from twindb_backup.LOG import warning [as 别名]
def list_files(self, prefix=None, recursive=False, pattern=None,
files_only=False):
"""
List files in the destination that have common prefix.
:param prefix: Common prefix. May include the bucket name.
(e.g. ``s3://my_bucket/foo/``) or simply a prefix in the bucket
(e.g. ``foo/``).
:type prefix: str
:param recursive: Does nothing for this class.
:return: sorted list of file names.
:param pattern: files must match with this regexp if specified.
:type pattern: str
:param files_only: Does nothing for this class.
:return: Full S3 url in form ``s3://bucket/path/to/file``.
:rtype: list(str)
:raise S3DestinationError: if failed to list files.
"""
s3client = boto3.resource('s3')
bucket = s3client.Bucket(self._bucket)
LOG.debug('Listing bucket %s', self._bucket)
LOG.debug('prefix = %s', prefix)
norm_prefix = prefix.replace('s3://%s' % bucket.name, '')
norm_prefix = norm_prefix.lstrip('/')
LOG.debug('normal prefix = %s', norm_prefix)
# Try to list the bucket several times
# because of intermittent error NoSuchBucket:
# https://travis-ci.org/twindb/backup/jobs/204053690
expire = time.time() + S3_READ_TIMEOUT
retry_interval = 2
while time.time() < expire:
try:
files = []
all_objects = bucket.objects.filter(Prefix=norm_prefix)
for file_object in all_objects:
if pattern:
if re.search(pattern, file_object.key):
files.append(
's3://{bucket}/{key}'.format(
bucket=self._bucket,
key=file_object.key
)
)
else:
files.append(
's3://{bucket}/{key}'.format(
bucket=self._bucket,
key=file_object.key
)
)
return sorted(files)
except ClientError as err:
LOG.warning(
'%s. Will retry in %d seconds.',
err,
retry_interval
)
time.sleep(retry_interval)
retry_interval *= 2
raise S3DestinationError('Failed to list files.')