本文整理汇总了Python中azure.storage.blob.BlobService.list_blobs方法的典型用法代码示例。如果您正苦于以下问题:Python BlobService.list_blobs方法的具体用法?Python BlobService.list_blobs怎么用?Python BlobService.list_blobs使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类azure.storage.blob.BlobService
的用法示例。
在下文中一共展示了BlobService.list_blobs方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: submit
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
def submit():
blob_service = BlobService(account_name=ACCOUNT_NAME, account_key=ACCOUNT_KEY)
# Get a SAS signature (read for 24 hours) for the input container save to a string
inputsig = sasUrl(account=ACCOUNT_NAME, key=ACCOUNT_KEY, container=INPUT_CONTAINER, permission='r')
# Get a SAS signature (write for 24 hours) for the output container save to a string
outputsig = sasUrl(account = ACCOUNT_NAME, key = ACCOUNT_KEY, container = OUTPUT_CONTAINER, permission = 'rwl')
# List all the blobs and dump the content to a string
blobs = blob_service.list_blobs(INPUT_CONTAINER)
bloblist = []
for blob in blobs:
bloblist.append(blob.name)
os.environ[SLURMDEMO_INPUTSIG] = inputsig
os.environ[SLURMDEMO_OUTPUTSIG] = outputsig
os.environ[SLURMDEMO_BLOBLIST] = json.dumps(bloblist)
os.environ[SLURMDEMO_INPUTCONTAINER] = INPUT_CONTAINER
os.environ[SLURMDEMO_OUTPUTCONTAINER] = OUTPUT_CONTAINER
os.environ[SLURMDEMO_INPUTACCOUNT] = ACCOUNT_NAME
os.environ[SLURMDEMO_OUTPUTACCOUNT] = ACCOUNT_NAME
# Call sbatch
cli = "sbatch -N 2 -n 2 --array=0-{nb} slurmdemo.sh".format(nb=len(bloblist))
run(cli, showoutput=True)
示例2: enumerate_objects
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
def enumerate_objects(container):
blob_service = BlobService(AZURE_ACCOUNT_NAME, AZURE_ACCOUNT_KEY)
blobs = blob_service.list_blobs(container)
items = []
for blob in blobs:
items.append(blob.name)
return items
示例3: upload_azure_blob
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
def upload_azure_blob(account, account_key, container, filename, file, file_type='file/csv'):
block_blob_service = BlobService(account_name=account, account_key=account_key)
# block_blob_service.put_block_blob_from_path(
# container,
# blockblob,
# file,
# x_ms_blob_content_type='file/csv'
# )
block_blob_service.create_blob_from_stream(container, filename, file)
generator = block_blob_service.list_blobs(container)
for blob in generator:
print(blob.name)
示例4: download_data
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
def download_data(key):
blob_service = BlobService(account_name='asosdsrecruiting', account_key=key)
blobs = []
marker = None
while True:
batch = blob_service.list_blobs('recruitingdata', marker=marker)
blobs.extend(batch)
if not batch.next_marker:
break
marker = batch.next_marker
for blob in blobs:
file_name = blob.name
print('Downloading: ' + file_name)
blob_service.get_blob_to_path('recruitingdata', file_name, file_name.replace('/', '_') + '.csv')
示例5: _BlobStorageTestCase
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
class _BlobStorageTestCase(_TestCase):
def _get_container_name(self, handler_name):
container = _get_handler_config_value(handler_name, 'container')
if container:
container = container.replace('_', '-').lower()
return container
def setUp(self):
self.service = BlobService(ACCOUNT_NAME, ACCOUNT_KEY)
# ensure that there's no log file in the container before each test
containers = [c.name for c in self.service.list_containers()]
for handler in LOGGING['handlers']:
container = self._get_container_name(handler)
if container in containers:
filename = _get_handler_config_value(handler, 'filename')
basename = os.path.basename(filename)
for blob in self.service.list_blobs(container, prefix=basename):
self.service.delete_blob(container, blob.name)
示例6: module_impl
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
#.........这里部分代码省略.........
x_ms_blob_cache_control,
x_ms_blob_content_encoding,
x_ms_blob_content_language,
x_ms_blob_content_type
)
results['conainer'] = get_container_facts(bs, container_name)
results['blob'] = get_blob_facts(bs, container_name, blob_name)
results['changed'] = True
results['msg'] = 'Successfully created container and uploaded file.'
return results
if container is not None:
# container exists. just upload.
if not check_mode:
log('Uploading %s to container %s.' % (file_path, container_name))
put_block_blob(
bs,
container_name,
blob_name,
file_path,
x_ms_meta_name_values,
x_ms_blob_cache_control,
x_ms_blob_content_encoding,
x_ms_blob_content_language,
x_ms_blob_content_type
)
results['blob'] = get_blob_facts(bs, container_name, blob_name)
results['changed'] = True
results['msg'] = 'Successfully updloaded file.'
return results
if mode == 'list':
container = container_check(bs, container_name)
response = bs.list_blobs(
container_name,
prefix,
marker,
max_results
)
results['blobs'] = []
for blob in response.blobs:
b = dict(
name = blob.name,
snapshot = blob.snapshot,
last_modified = blob.properties.last_modified,
content_length = blob.properties.content_length,
blob_type = blob.properties.blob_type,
)
results['blobs'].append(b)
return results
if mode == 'get':
if file_path is None:
raise Exception("Parameter error: file_path cannot be None.")
container = container_check(bs, container_name)
blob = blob_check(bs, container_name, blob_name)
path_exists = path_check(file_path)
if not path_exists or overwrite == 'always':
if not check_mode:
bs.get_blob_to_path(container_name, blob_name, file_path)
results['changed'] = True
results['msg'] = "Blob %s successfully downloaded to %s." % (blob_name, file_path)
return results
示例7: BlobService
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
blob_service = BlobService(account_name="<account_name>", account_key="<account_key>")
blob_service.create_container("datacontainer")
blob_service.create_container("datacontainer", x_ms_blob_public_access="container")
blob_service.set_container_acl("datacontainer", x_ms_blob_public_access="container")
blob_service.put_block_blob_from_path(
"datacontainer", "datablob", "StorageClientPy.py", x_ms_blob_content_type="text/x-script.phyton"
)
blobs = []
marker = None
while True:
batch = blob_service.list_blobs("datacontainer", marker=marker)
blobs.extend(batch)
if not batch.next_marker:
break
marker = batch.next_marker
for blob in blobs:
print(blob.name)
blob_service.get_blob_to_path("datacontainer", "datablob", "out-StorageClientPy.py")
blob_service.delete_blob("datacontainer", "datablob")
示例8: AzureIOStore
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
#.........这里部分代码省略.........
Returns the names of files and fake directories in the given input fake
directory, non-recursively.
If with_times is specified, will yield (name, time) pairs including
modification times as datetime objects. Times on directories are None.
"""
self.__connect()
RealTimeLogger.get().info("Enumerating {} from AzureIOStore".format(
input_path))
# Work out what the directory name to list is
fake_directory = self.name_prefix + input_path
if fake_directory != "" and not fake_directory.endswith("/"):
# We have a nonempty prefix, and we need to end it with a slash
fake_directory += "/"
# This will hold the marker that we need to send back to get the next
# page, if there is one. See <http://stackoverflow.com/a/24303682>
marker = None
# This holds the subdirectories we found; we yield each exactly once if
# we aren't recursing.
subdirectories = set()
while True:
# Get the results from Azure. We don't use delimiter since Azure
# doesn't seem to provide the placeholder entries it's supposed to.
result = self.connection.list_blobs(self.container_name,
marker=marker)
RealTimeLogger.get().info("Found {} files".format(len(result)))
for blob in result:
# Yield each result's blob name, but directory names only once
# Drop the common prefix
relative_path = blob.name
if (not recursive) and "/" in relative_path:
# We found a file in a subdirectory, and we aren't supposed
# to be recursing.
subdirectory, _ = relative_path.split("/", 1)
if subdirectory not in subdirectories:
# It's a new subdirectory. Yield and remember it
subdirectories.add(subdirectory)
if with_times:
yield subdirectory, None
else:
yield subdirectory
else:
# We found an actual file
if with_times:
mtime = dateutil.parser.parse(
blob.properties.last_modified).replace(
tzinfo=dateutil.tz.tzutc())
yield relative_path, mtime
else:
示例9: putblob
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
blob_service.create_container(CONTAINER, x_ms_blob_public_access='container')
def putblob(fileid, filename):
global ACCOUNT
blob_service.put_block_blob_from_path(
CONTAINER,
fileid,
filename,
x_ms_blob_content_type=guess_type(filename)
)
return 'https://%s.blob.core.windows.net/%s/%s' %(ACCOUNT, CONTAINER, fileid)
putblob('quotes.pkl', 'quotes.pkl')
blobs = []
marker = None
while True:
batch = blob_service.list_blobs(CONTAINER, marker=marker)
blobs.extend(batch)
if not batch.next_marker:
break
marker = batch.next_marker
for blob in blobs:
print(blob.name)
#blob_service.delete_blob(CONTAINER, 'quotes.pkl')
示例10: AzureFS
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
class AzureFS(LoggingMixIn, Operations):
"""Azure Blob Storage filesystem"""
blobs = None
containers = dict() # <cname, dict(stat:dict,
#files:None|dict<fname, stat>)
fds = dict() # <fd, (path, bytes, dirty)>
fd = 0
def __init__(self, account, key):
self.blobs = BlobService(account, key)
self.rebuild_container_list()
def convert_to_epoch(self, date):
"""Converts Tue, 31 Jul 2012 07:17:34 GMT format to epoch"""
return int(time.mktime(time.strptime(date, TIME_FORMAT)))
def rebuild_container_list(self):
cmap = dict()
cnames = set()
for c in self.blobs.list_containers():
date = c.properties.last_modified
cstat = dict(st_mode=(S_IFDIR | 0755), st_uid=getuid(), st_size=0,
st_mtime=self.convert_to_epoch(date))
cname = c.name
cmap['/' + cname] = dict(stat=cstat, files=None)
cnames.add(cname)
cmap['/'] = dict(files={},
stat=dict(st_mode=(S_IFDIR | 0755),
st_uid=getuid(), st_size=0,
st_mtime=int(time.time())))
self.containers = cmap # destroys fs tree cache resistant to misses
def _parse_path(self, path): # returns </dir, file(=None)>
if path.count('/') > 1: # file
return str(path[:path.rfind('/')]), str(path[path.rfind('/') + 1:])
else: # dir
pos = path.rfind('/', 1)
if pos == -1:
return path, None
else:
return str(path[:pos]), None
def parse_container(self, path):
base_container = path[1:] # /abc/def/g --> abc
if base_container.find('/') > -1:
base_container = base_container[:base_container.find('/')]
return str(base_container)
def _get_dir(self, path, contents_required=False):
if not self.containers:
self.rebuild_container_list()
if path in self.containers and not (contents_required and \
self.containers[path]['files'] is None):
return self.containers[path]
cname = self.parse_container(path)
if '/' + cname not in self.containers:
raise FuseOSError(ENOENT)
else:
if self.containers['/' + cname]['files'] is None:
# fetch contents of container
log.info("------> CONTENTS NOT FOUND: %s" % cname)
blobs = self.blobs.list_blobs(cname)
dirstat = dict(st_mode=(S_IFDIR | 0755), st_size=0,
st_uid=getuid(), st_mtime=time.time())
if self.containers['/' + cname]['files'] is None:
self.containers['/' + cname]['files'] = dict()
for f in blobs:
blob_name = f.name
blob_date = f.properties.last_modified
blob_size = long(f.properties.content_length)
node = dict(st_mode=(S_IFREG | 0644), st_size=blob_size,
st_mtime=self.convert_to_epoch(blob_date),
st_uid=getuid())
if blob_name.find('/') == -1: # file just under container
self.containers['/' + cname]['files'][blob_name] = node
return self.containers['/' + cname]
return None
def _get_file(self, path):
d, f = self._parse_path(path)
dir = self._get_dir(d, True)
if dir is not None and f in dir['files']:
return dir['files'][f]
def getattr(self, path, fh=None):
d, f = self._parse_path(path)
#.........这里部分代码省略.........
示例11: BlobService
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 09 16:36:56 2015
@author: justin.malinchak
"""
# List blobs in container
from azure.storage.blob import BlobService
blob_service = BlobService(account_name='portalvhdss5m831rhl98hj', account_key='Z1MliCYE7p9Ks9kYQoGeM4V99hODtiJL82BVi/zIm06jLYh7n0tV8YaZHzITKixMwUUmjJ1Vp05XrgHG+gXFlg==')
blobs = []
marker = None
while True:
batch = blob_service.list_blobs('mycontainer', marker=marker)
blobs.extend(batch)
if not batch.next_marker:
break
marker = batch.next_marker
for blob in blobs:
bname = blob.name
print('')
print(bname)
print('')
bpathname = 'C:\\Batches\\$Work\\' + bname
blob_service.get_blob_to_path('mycontainer', bname, bpathname)
print('')
print('blob downloaded ' + bpathname)
print('')
示例12: Command
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import list_blobs [as 别名]
class Command(BaseCommand):
help = "Synchronizes static media to cloud files."
option_list = BaseCommand.option_list + (
optparse.make_option('-w', '--wipe',
action='store_true', dest='wipe', default=False,
help="Wipes out entire contents of container first."),
optparse.make_option('-t', '--test-run',
action='store_true', dest='test_run', default=False,
help="Performs a test run of the sync."),
optparse.make_option('-c', '--container',
dest='container', help="Override STATIC_CONTAINER."),
)
# settings from azurite.settings
ACCOUNT_NAME = AZURITE['ACCOUNT_NAME']
ACCOUNT_KEY = AZURITE['ACCOUNT_KEY']
STATIC_CONTAINER = AZURITE['STATIC_CONTAINER']
# paths
DIRECTORY = os.path.abspath(settings.STATIC_ROOT)
STATIC_URL = settings.STATIC_URL
if not DIRECTORY.endswith('/'):
DIRECTORY = DIRECTORY + '/'
if STATIC_URL.startswith('/'):
STATIC_URL = STATIC_URL[1:]
local_object_names = []
create_count = 0
upload_count = 0
update_count = 0
skip_count = 0
delete_count = 0
service = None
def handle(self, *args, **options):
self.wipe = options.get('wipe')
self.test_run = options.get('test_run')
self.verbosity = int(options.get('verbosity'))
if hasattr(options, 'container'):
self.STATIC_CONTAINER = options.get('container')
self.sync_files()
def sync_files(self):
self.service = BlobService(account_name=self.ACCOUNT_NAME,
account_key=self.ACCOUNT_KEY)
try:
self.service.get_container_properties(self.STATIC_CONTAINER)
except AzureMissingResourceHttpError:
self.service.create_container(self.STATIC_CONTAINER,
x_ms_blob_public_access='blob')
self.service.set_container_acl(self.STATIC_CONTAINER, x_ms_blob_public_access='blob')
# if -w option is provided, wipe out the contents of the container
if self.wipe:
blob_count = len(self.service.list_blobs(self.STATIC_CONTAINER))
if self.test_run:
print "Wipe would delete %d objects." % blob_count
else:
print "Deleting %d objects..." % blob_count
for blob in self.service.list_blobs(self.STATIC_CONTAINER):
self.service.delete_blob(self.STATIC_CONTAINER, blob.name)
# walk through the directory, creating or updating files on the cloud
os.path.walk(self.DIRECTORY, self.upload_files, "foo")
# remove any files on remote that don't exist locally
self.delete_files()
# print out the final tally to the cmd line
self.update_count = self.upload_count - self.create_count
print
if self.test_run:
print "Test run complete with the following results:"
print "Skipped %d. Created %d. Updated %d. Deleted %d." % (
self.skip_count, self.create_count, self.update_count, self.delete_count)
def upload_files(self, arg, dirname, names):
# upload or skip items
for item in names:
file_path = os.path.join(dirname, item)
if os.path.isdir(file_path):
continue # Don't try to upload directories
object_name = self.STATIC_URL + file_path.split(self.DIRECTORY)[1]
self.local_object_names.append(object_name)
try:
properties = self.service.get_blob_properties(self.STATIC_CONTAINER,
object_name)
except AzureMissingResourceHttpError:
properties = {}
self.create_count += 1
cloud_datetime = None
#.........这里部分代码省略.........