本文整理汇总了Python中azure.storage.blob.BlobService.get_blob_properties方法的典型用法代码示例。如果您正苦于以下问题:Python BlobService.get_blob_properties方法的具体用法?Python BlobService.get_blob_properties怎么用?Python BlobService.get_blob_properties使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类azure.storage.blob.BlobService
的用法示例。
在下文中一共展示了BlobService.get_blob_properties方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: uri_get_file
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_properties [as 别名]
def uri_get_file(creds, uri, conn=None):
assert uri.startswith('wabs://')
url_tup = urlparse(uri)
if conn is None:
conn = BlobService(creds.account_name, creds.account_key,
sas_token=creds.access_token, protocol='https')
# Determin the size of the target blob
props = conn.get_blob_properties(url_tup.netloc, url_tup.path.lstrip('/'))
blob_size = int(props['content-length'])
ret_size = 0
data = io.BytesIO()
# WABS requires large files to be downloaded in 4MB chunks
while ret_size < blob_size:
ms_range = 'bytes={0}-{1}'.format(ret_size,
ret_size + WABS_CHUNK_SIZE - 1)
while True:
# Because we're downloading in chunks, catch rate limiting and
# connection errors here instead of letting them bubble up to the
# @retry decorator so that we don't have to start downloading the
# whole file over again.
try:
part = conn.get_blob(url_tup.netloc,
url_tup.path.lstrip('/'),
x_ms_range=ms_range)
except EnvironmentError as e:
if e.errno in (errno.EBUSY, errno.ECONNRESET):
logger.warning(
msg="retrying after encountering exception",
detail=("Exception traceback:\n{0}".format(
traceback.format_exception(*sys.exc_info()))),
hint="")
gevent.sleep(30)
else:
raise
else:
break
length = len(part)
ret_size += length
data.write(part)
if length > 0 and length < WABS_CHUNK_SIZE:
break
elif length == 0:
break
return data.getvalue()
示例2: AzureFS
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_properties [as 别名]
#.........这里部分代码省略.........
args=(self.blobs, cname, files),
name="list-blobs/%s" % cname)
process.daemon = True
process.start()
container['process'] = process
log.info("Started blob list retrieval for '%s': %s",
cname, process)
container['files'] = files
return container
def _get_file(self, path):
d, f = self._parse_path(path)
log.debug("get_file: requested path=%s (d=%s, f=%s)", path, d, f)
directory = self._get_dir(d, True)
files = None
if directory is not None:
files = directory['files']
if f in files:
return files[f]
if not hasattr(self, "_get_file_noent"):
self._get_file_noent = {}
last_check = self._get_file_noent.get(path, 0)
if time.time() - last_check <= 30:
# Negative TTL is 30 seconds (hardcoded for now)
log.info("get_file: cache says to reply negative for %s", path)
return None
# Check if file now exists and our caches are just stale.
try:
c = self._parse_container(d)
p = path[path.find('/', 1) + 1:]
props = self.blobs.get_blob_properties(c, p)
log.info("get_file: found locally unknown remote file %s: %s",
path, repr(props))
node = make_stat(stat.S_IFREG | 0644, props)
if node['st_size'] > 0:
log.info("get_file: properties for %s: %s", path, repr(node))
# Remember this, so we won't have to re-query it.
files[f] = node
if path in self._get_file_noent:
del self._get_file_noent[path]
return node
else:
# TODO: FIXME: HACK: We currently ignore empty files.
# Sometimes the file is not yet here and is still uploading.
# Such files have "content-length: 0". Ignore those for now.
log.warning("get_file: the file %s is not yet here (size=%s)",
path, node['st_size'])
self._get_file_noent[path] = time.time()
return None
except AzureMissingResourceHttpError:
log.info("get_file: remote confirms non-existence of %s", path)
self._get_file_noent[path] = time.time()
return None
except AzureException as e:
log.error("get_file: exception while querying remote for %s: %s",
path, repr(e))
self._get_file_noent[path] = time.time()
return None
def getattr(self, path, fh=None):
示例3: Command
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_properties [as 别名]
class Command(BaseCommand):
help = "Synchronizes static media to cloud files."
option_list = BaseCommand.option_list + (
optparse.make_option('-w', '--wipe',
action='store_true', dest='wipe', default=False,
help="Wipes out entire contents of container first."),
optparse.make_option('-t', '--test-run',
action='store_true', dest='test_run', default=False,
help="Performs a test run of the sync."),
optparse.make_option('-c', '--container',
dest='container', help="Override STATIC_CONTAINER."),
)
# settings from azurite.settings
ACCOUNT_NAME = AZURITE['ACCOUNT_NAME']
ACCOUNT_KEY = AZURITE['ACCOUNT_KEY']
STATIC_CONTAINER = AZURITE['STATIC_CONTAINER']
# paths
DIRECTORY = os.path.abspath(settings.STATIC_ROOT)
STATIC_URL = settings.STATIC_URL
if not DIRECTORY.endswith('/'):
DIRECTORY = DIRECTORY + '/'
if STATIC_URL.startswith('/'):
STATIC_URL = STATIC_URL[1:]
local_object_names = []
create_count = 0
upload_count = 0
update_count = 0
skip_count = 0
delete_count = 0
service = None
def handle(self, *args, **options):
self.wipe = options.get('wipe')
self.test_run = options.get('test_run')
self.verbosity = int(options.get('verbosity'))
if hasattr(options, 'container'):
self.STATIC_CONTAINER = options.get('container')
self.sync_files()
def sync_files(self):
self.service = BlobService(account_name=self.ACCOUNT_NAME,
account_key=self.ACCOUNT_KEY)
try:
self.service.get_container_properties(self.STATIC_CONTAINER)
except AzureMissingResourceHttpError:
self.service.create_container(self.STATIC_CONTAINER,
x_ms_blob_public_access='blob')
self.service.set_container_acl(self.STATIC_CONTAINER, x_ms_blob_public_access='blob')
# if -w option is provided, wipe out the contents of the container
if self.wipe:
blob_count = len(self.service.list_blobs(self.STATIC_CONTAINER))
if self.test_run:
print "Wipe would delete %d objects." % blob_count
else:
print "Deleting %d objects..." % blob_count
for blob in self.service.list_blobs(self.STATIC_CONTAINER):
self.service.delete_blob(self.STATIC_CONTAINER, blob.name)
# walk through the directory, creating or updating files on the cloud
os.path.walk(self.DIRECTORY, self.upload_files, "foo")
# remove any files on remote that don't exist locally
self.delete_files()
# print out the final tally to the cmd line
self.update_count = self.upload_count - self.create_count
print
if self.test_run:
print "Test run complete with the following results:"
print "Skipped %d. Created %d. Updated %d. Deleted %d." % (
self.skip_count, self.create_count, self.update_count, self.delete_count)
def upload_files(self, arg, dirname, names):
# upload or skip items
for item in names:
file_path = os.path.join(dirname, item)
if os.path.isdir(file_path):
continue # Don't try to upload directories
object_name = self.STATIC_URL + file_path.split(self.DIRECTORY)[1]
self.local_object_names.append(object_name)
try:
properties = self.service.get_blob_properties(self.STATIC_CONTAINER,
object_name)
except AzureMissingResourceHttpError:
properties = {}
self.create_count += 1
cloud_datetime = None
#.........这里部分代码省略.........