本文整理汇总了Python中wal_e.pipeline.get_download_pipeline函数的典型用法代码示例。如果您正苦于以下问题:Python get_download_pipeline函数的具体用法?Python get_download_pipeline怎么用?Python get_download_pipeline使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了get_download_pipeline函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_upload_download_pipeline
def test_upload_download_pipeline(tmpdir, rate_limit):
payload, payload_file = create_bogus_payload(tmpdir)
# Upload section
test_upload = tmpdir.join('upload')
with open(unicode(test_upload), 'w') as upload:
with open(unicode(payload_file)) as inp:
pl = pipeline.get_upload_pipeline(
inp, upload, rate_limit=rate_limit)
pl.finish()
with open(unicode(test_upload)) as completed:
round_trip = completed.read()
# Download section
test_download = tmpdir.join('download')
with open(unicode(test_upload)) as upload:
with open(unicode(test_download), 'w') as download:
pl = pipeline.get_download_pipeline(upload, download)
pl.finish()
with open(unicode(test_download)) as completed:
round_trip = completed.read()
assert round_trip == payload
示例2: download
def download():
with open(path, 'wb') as decomp_out:
key = _uri_to_key(access_key, secret_key, url)
pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)
g = gevent.spawn(write_and_return_error, key, pipeline.stdin)
try:
# Raise any exceptions from write_and_return_error
exc = g.get()
if exc is not None:
raise exc
except boto.exception.S3ResponseError, e:
if e.status == 404:
# Do not retry if the key not present, this can happen
# under normal situations.
logger.warning(
msg=('could no longer locate object while performing '
'wal restore'),
detail=('The absolute URI that could not be located '
'is {url}.'.format(url=url)),
hint=('This can be normal when Postgres is trying to '
'detect what timelines are available during '
'restoration.'))
return False
else:
raise
pipeline.finish()
logger.info(
msg='completed download and decompression',
detail='Downloaded and decompressed "{url}" to "{path}"'
.format(url=url, path=path))
示例3: test_upload_download_pipeline
def test_upload_download_pipeline(tmpdir, rate_limit):
payload, payload_file = create_bogus_payload(tmpdir)
# Upload section
test_upload = tmpdir.join('upload')
with open(str(test_upload), 'wb') as upload:
with open(str(payload_file), 'rb') as inp:
with pipeline.get_upload_pipeline(
inp, upload, rate_limit=rate_limit):
pass
with open(str(test_upload), 'rb') as completed:
round_trip = completed.read()
# Download section
test_download = tmpdir.join('download')
with open(str(test_upload), 'rb') as upload:
with open(str(test_download), 'wb') as download:
with pipeline.get_download_pipeline(upload, download):
pass
with open(str(test_download), 'rb') as completed:
round_trip = completed.read()
assert round_trip == payload
示例4: download
def download():
with files.DeleteOnError(path) as decomp_out:
key = _uri_to_key(creds, url)
with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
g = gevent.spawn(write_and_return_error, key, pl.stdin)
try:
# Raise any exceptions from write_and_return_error
exc = g.get()
if exc is not None:
raise exc
except boto.exception.S3ResponseError as e:
if e.status == 404:
# Do not retry if the key not present, this
# can happen under normal situations.
pl.abort()
logger.info(
msg=('could no longer locate object while '
'performing wal restore'),
detail=('The absolute URI that could not be '
'located is {url}.'.format(url=url)),
hint=('This can be normal when Postgres is trying '
'to detect what timelines are available '
'during restoration.'))
decomp_out.remove_regardless = True
return False
else:
raise
logger.info(
msg='completed download and decompression',
detail='Downloaded and decompressed "{url}" to "{path}"'
.format(url=url, path=path))
return True
示例5: download
def download():
with open(path, 'wb') as decomp_out:
pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)
g = gevent.spawn(write_and_return_error, url, conn, pipeline.stdin)
try:
# Raise any exceptions from _write_and_close
g.get()
except WindowsAzureMissingResourceError:
# Short circuit any re-try attempts under certain race
# conditions.
logger.warn(
msg=('could no longer locate object while performing '
'wal restore'),
detail=('The URI at {url} no longer exists.'
.format(url=url)),
hint=('This can be normal when Postgres is trying to '
'detect what timelines are available during '
'restoration.'))
return False
pipeline.finish()
logger.info(
msg='completed download and decompression',
detail='Downloaded and decompressed "{url}" to "{path}"'
.format(url=url, path=path))
return True
示例6: download
def download():
with open(path, 'wb') as decomp_out:
with get_download_pipeline(PIPE, decomp_out, decrypt) as pl:
conn = calling_format.connect(creds)
g = gevent.spawn(write_and_return_error, uri, conn, pl.stdin)
# Raise any exceptions from write_and_return_error
try:
exc = g.get()
if exc is not None:
raise exc
except ClientException as e:
if e.http_status == 404:
# Do not retry if the key not present, this
# can happen under normal situations.
pl.abort()
logger.warning(
msg=('could no longer locate object while '
'performing wal restore'),
detail=('The absolute URI that could not be '
'located is {uri}.'.format(uri=uri)),
hint=('This can be normal when Postgres is trying '
'to detect what timelines are available '
'during restoration.'))
return False
else:
raise
logger.info(
msg='completed download and decompression',
detail='Downloaded and decompressed "{uri}" to "{path}"'
.format(uri=uri, path=path))
return True
示例7: download
def download():
with files.DeleteOnError(path) as decomp_out:
with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
g = gevent.spawn(write_and_return_error, url, conn, pl.stdin)
try:
# Raise any exceptions guarded by
# write_and_return_error.
exc = g.get()
if exc is not None:
raise exc
except AzureMissingResourceHttpError:
# Short circuit any re-try attempts under certain race
# conditions.
pl.abort()
logger.warning(
msg=('could no longer locate object while '
'performing wal restore'),
detail=('The absolute URI that could not be '
'located is {url}.'.format(url=url)),
hint=('This can be normal when Postgres is trying '
'to detect what timelines are available '
'during restoration.'))
decomp_out.remove_regardless = True
return False
logger.info(
msg='completed download and decompression',
detail='Downloaded and decompressed "{url}" to "{path}"'
.format(url=url, path=path))
return True
示例8: download
def download():
with files.DeleteOnError(path) as decomp_out:
key = _uri_to_key(creds, url)
with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
g = gevent.spawn(write_and_return_error, key, pl.stdin)
try:
# Raise any exceptions from write_and_return_error
exc = g.get()
if exc is not None:
raise exc
except boto.exception.S3ResponseError, e:
if e.status == 404:
# Do not retry if the key not present, this
# can happen under normal situations.
pl.abort()
logger.warning(
msg=('could no longer locate object while '
'performing wal restore'),
detail=('The absolute URI that could not be '
'located is {url}.'.format(url=url)),
hint=('This can be normal when Postgres is trying '
'to detect what timelines are available '
'during restoration.'))
decomp_out.remove_regardless = True
return False
elif e.value.error_code == 'ExpiredToken':
# Do not retry if STS token has expired. It can never
# succeed in the future anyway.
pl.abort()
logger.info(
msg=('could no longer authenticate while '
'performing wal restore'),
detail=('The absolute URI that could not be '
'accessed is {url}.'.format(url=url)),
hint=('This can be normal when using STS '
'credentials.'))
decomp_out.remove_regardless = True
return False
else:
logger.warning(msg='S3 response error',
detail='The error is: {0}, {1}'
.format(e.error_code, e.error_message))
raise
logger.info(
msg='completed download and decompression',
detail='Downloaded and decompressed "{url}" to "{path}"'
.format(url=url, path=path))
示例9: fetch_partition
def fetch_partition(self, partition_name):
part_abs_name = self.layout.basebackup_tar_partition(
self.backup_info, partition_name)
logger.info(
msg='beginning partition download',
detail='The partition being downloaded is {0}.'
.format(partition_name),
hint='The absolute file key is {0}.'.format(part_abs_name))
key = self.bucket.get_key(part_abs_name)
with get_download_pipeline(PIPE, PIPE, self.decrypt) as pl:
g = gevent.spawn(file.write_and_return_error, key, pl.stdin)
TarPartition.tarfile_extract(pl.stdout, self.local_root)
# Raise any exceptions guarded by write_and_return_error.
exc = g.get()
if exc is not None:
raise exc
示例10: download
def download():
with open(path, 'wb') as decomp_out:
pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)
conn = calling_format.connect(creds)
g = gevent.spawn(write_and_return_error, uri, conn, pipeline.stdin)
# Raise any exceptions from write_and_return_error
exc = g.get()
if exc is not None:
raise exc
pipeline.finish()
logger.info(
msg='completed download and decompression',
detail='Downloaded and decompressed "{uri}" to "{path}"'
.format(uri=uri, path=path))
return True
示例11: fetch_partition
def fetch_partition(self, partition_name):
part_abs_name = self.layout.basebackup_tar_partition(
self.backup_info, partition_name)
logger.info(
msg='beginning partition download',
detail=('The partition being downloaded is {0}.'
.format(partition_name)),
hint='The absolute S3 key is {0}.'.format(part_abs_name))
url = 'wabs://{ctr}/{path}'.format(ctr=self.layout.store_name(),
path=part_abs_name)
with get_download_pipeline(PIPE, PIPE, self.decrypt) as pl:
g = gevent.spawn(wabs.write_and_return_error,
url, self.wabs_conn, pl.stdin)
TarPartition.tarfile_extract(pl.stdout, self.local_root)
# Raise any exceptions from self._write_and_close
exc = g.get()
if exc is not None:
raise exc
示例12: download
def download():
with open(path, "wb") as decomp_out:
suri = boto.storage_uri(s3_url, validate=False)
try:
key = suri.get_key()
except boto.exception.InvalidUriError:
logger.warning(msg="file not found: %s" % suri)
return False
pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)
g = gevent.spawn(write_and_close_thread, key, pipeline.stdin)
# Raise any exceptions from _write_and_close
g.get()
pipeline.finish()
logger.info(
msg="completed download and decompression",
detail='Downloaded and decompressed "{s3_url}" to "{path}"'.format(s3_url=s3_url, path=path),
)
return True
示例13: fetch_partition
def fetch_partition(self, partition_name):
part_abs_name = self.layout.basebackup_tar_partition(
self.backup_info, partition_name)
logger.info(
msg='beginning partition download',
detail=('The partition being downloaded is {0}.'
.format(partition_name)),
hint='The absolute Swift object name is {0}.'
.format(part_abs_name))
url = 'swift://{ctr}/{path}'.format(ctr=self.layout.store_name(),
path=part_abs_name)
pipeline = get_download_pipeline(PIPE, PIPE, self.decrypt)
g = gevent.spawn(swift.write_and_return_error,
url, self.swift_conn, pipeline.stdin)
TarPartition.tarfile_extract(pipeline.stdout, self.local_root)
# Raise any exceptions guarded by write_and_return_error.
exc = g.get()
if exc is not None:
raise exc
pipeline.finish()
示例14: download
def download():
with files.DeleteOnError(path) as decomp_out:
blob = _uri_to_blob(creds, url)
with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
signed = blob.generate_signed_url(
datetime.utcnow() + timedelta(minutes=10))
g = gevent.spawn(write_and_return_error, signed, pl.stdin)
try:
# Raise any exceptions from write_and_return_error
exc = g.get()
if exc is not None:
raise exc
except urllib2.HTTPError as e:
if e.code == 404:
# Do not retry if the blob not present, this
# can happen under normal situations.
pl.abort()
logger.warning(
msg=('could no longer locate object while '
'performing wal restore'),
detail=('The absolute URI that could not be '
'located is {url}.'.format(url=url)),
hint=('This can be normal when Postgres is trying '
'to detect what timelines are available '
'during restoration.'))
decomp_out.remove_regardless = True
return False
raise
logger.info(
msg='completed download and decompression',
detail='Downloaded and decompressed "{url}" to "{path}"'
.format(url=url, path=path))
return True
示例15: fetch_manifest
def fetch_manifest(self, partition_name, wale_info_dir):
part_abs_name = self.layout.basebackup_manifest(
self.backup_info, partition_name)
logger.info(
msg='beginning manifest download',
detail='The manifest being downloaded is {0}.'
.format(partition_name),
hint='The absolute S3 key is {0}.'.format(part_abs_name))
key = self.bucket.get_key(part_abs_name)
with get_download_pipeline(PIPE, PIPE,
gpg=self.decrypt,
lzop=False) as pl:
g = gevent.spawn(s3.write_and_return_error, key, pl.stdin)
TarPartition.manifest_extract(pl.stdout,
self.local_root,
partition_name,
wale_info_dir)
# Raise any exceptions guarded by write_and_return_error.
exc = g.get()
if exc is not None:
raise exc