当前位置: 首页>>代码示例>>Python>>正文


Python BlobService.list_blobs方法代码示例

本文整理汇总了Python中azure.storage.BlobService.list_blobs方法的典型用法代码示例。如果您正苦于以下问题:Python BlobService.list_blobs方法的具体用法?Python BlobService.list_blobs怎么用?Python BlobService.list_blobs使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在azure.storage.BlobService的用法示例。


在下文中一共展示了BlobService.list_blobs方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: submit

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
def submit():
    blob_service = BlobService(account_name=ACCOUNT_NAME, account_key=ACCOUNT_KEY)

    # Get a SAS signature (read for 24 hours) for the input container save to a string
    inputsig = sasUrl(account=ACCOUNT_NAME, key=ACCOUNT_KEY, container=INPUT_CONTAINER, permission='r')

    # Get a SAS signature (write for 24 hours) for the output container save to a string
    outputsig = sasUrl(account = ACCOUNT_NAME, key = ACCOUNT_KEY, container = OUTPUT_CONTAINER, permission = 'rwl')

    # List all the blobs and dump the content to a string
    blobs = blob_service.list_blobs(INPUT_CONTAINER)

    bloblist = []
    for blob in blobs:
        bloblist.append(blob.name)

    os.environ[SLURMDEMO_INPUTSIG] = inputsig
    os.environ[SLURMDEMO_OUTPUTSIG] = outputsig
    os.environ[SLURMDEMO_BLOBLIST] = json.dumps(bloblist)
    os.environ[SLURMDEMO_INPUTCONTAINER] = INPUT_CONTAINER
    os.environ[SLURMDEMO_OUTPUTCONTAINER] = OUTPUT_CONTAINER
    os.environ[SLURMDEMO_INPUTACCOUNT] = ACCOUNT_NAME
    os.environ[SLURMDEMO_OUTPUTACCOUNT] = ACCOUNT_NAME

    # Call sbatch
    cli = "sbatch --array=0-{nb} slurmdemo.sh".format(nb=len(bloblist))
    run(cli, showoutput=True)
开发者ID:kristiyanto,项目名称:azure-training-120215,代码行数:29,代码来源:slurmdemo.py

示例2: AzureTransfer

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name):
        BaseTransfer.__init__(self)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")

    def get_metadata_for_key(self, key):
        key = fix_path(key)
        return self.list_path(key)[0]['metadata']

    def list_path(self, path):
        return_list = []
        path = fix_path(path)
        self.log.info("Asking for listing of: %r", path)
        for r in self.conn.list_blobs(self.container_name, prefix=path, delimiter="/",
                                      include="metadata"):
            entry = {"name": r.name, "size": r.properties.content_length,
                     "last_modified": dateutil.parser.parse(r.properties.last_modified),
                     "metadata": r.metadata}
            return_list.append(entry)
        return return_list

    def delete_key(self, key_name):
        key_name = fix_path(key_name)
        self.log.debug("Deleting key: %r", key_name)
        return self.conn.delete_blob(self.container_name, key_name)

    def get_contents_to_file(self, obj_key, filepath_to_store_to):
        obj_key = fix_path(obj_key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", obj_key, filepath_to_store_to)
        return self.conn.get_blob_to_path(self.container_name, obj_key, filepath_to_store_to)

    def get_contents_to_string(self, obj_key):
        obj_key = fix_path(obj_key)
        self.log.debug("Starting to fetch the contents of: %r", obj_key)
        return self.conn.get_blob_to_bytes(self.container_name, obj_key), self.get_metadata_for_key(obj_key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        # For whatever reason Azure requires all values to be strings at the point of sending
        metadata_to_send = dict((str(k), str(v)) for k, v in metadata.items())
        self.conn.put_block_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=metadata_to_send)

    def store_file_from_disk(self, key, filepath, metadata=None):
        # For whatever reason Azure requires all values to be strings at the point of sending
        metadata_to_send = dict((str(k), str(v)) for k, v in metadata.items())
        self.conn.put_block_blob_from_path(self.container_name, key, filepath,
                                           x_ms_meta_name_values=metadata_to_send)

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
开发者ID:Ormod,项目名称:pghoard,代码行数:60,代码来源:azure.py

示例3: list_files_from_path

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
def list_files_from_path(container, path):
    blob_service = BlobService(account_name=storage_name, account_key=storage_key)
    next_marker = None
    results = []
    while True:
        blobs = blob_service.list_blobs(container, prefix=path, maxresults=2000, marker=next_marker)
        for blob in blobs:
            results.append(blob.name)
        next_marker = blobs.next_marker
        if not next_marker:
            break
    return results
开发者ID:hwind,项目名称:hwindCode,代码行数:14,代码来源:download_files_from_azure.py

示例4: run

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
def run(itk_source_dir, externaldata_object_store, account_name, account_key):
    blob_service = BlobService(account_name=account_name,
                               account_key=account_key)
    blobs = blob_service.list_blobs('md5')
    current_blobs = [blob.name for blob in blobs]

    md5files = []
    for root, dirnames, filenames in os.walk(itk_source_dir):
        for filename in fnmatch.filter(filenames, '*.md5'):
            md5files.append(os.path.join(root, filename))

    for content_link in md5files:
        upload_to_azure(content_link, externaldata_object_store,
                        blob_service, current_blobs)
开发者ID:BishopWolf,项目名称:ITK,代码行数:16,代码来源:ArchiveTestingDataOnAzure.py

示例5: process

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
    def process(self):
        account_name = self.parameters.azure_account_name
        account_key = self.parameters.azure_account_key

        blob_service = BlobService(account_name, account_key, protocol="https")
        proxy_setting = self.parameters.https_proxy or ""
        date_setting = self.parameters.date or ""
        date = None
    
        if date_setting:
            if date_setting != "yesterday":
                date = datetime.datetime.strptime(date_setting, "%Y-%m-%d").date()  # for debbuging (probably)
            elif date_setting == "yesterday":
                date = datetime.date.today() - datetime.timedelta(days=1)  # for normal usage

        proxy_url = "https://" + proxy_setting if proxy_setting.find("https://") == -1 else proxy_setting
        proxy_options = urlparse(proxy_url)

        if date:
            self.logger.info("Fetching for date: %s (%s)" %  (date, date_setting))
        else:
            self.logger.info("No 'date' was specified, fetching ALL")
        
        if proxy_options.hostname:
            self.logger.info("Using https proxy(host=%s, port=%s)" % (proxy_options.hostname, proxy_options.port))
            blob_service.set_proxy(host=proxy_options.hostname, port=proxy_options.port)
        else:
            if proxy_setting:
                self.logger.info("Using NO proxy, couldn't use 'https_proxy' it was: %s" % proxy_setting)
            else:
                self.logger.info("Using NO proxy, 'https_proxy' was empty")

        for container in blob_service.list_containers():
            container_name = container.name
            if container_name == "heartbeat":
                continue

            if date and (not container_name == "processed-" + str(date)):
                self.logger.info("IGNORING container '%s' didn't match date selection" % container_name)
                continue

            for blob in blob_service.list_blobs(container_name):
                self.logger.info("Fetching blob %s in container %s" % (container_name, blob.name))
                data = blob_service.get_blob(container_name, blob.name)
                cs = StringIO.StringIO(data)
                report = gzip.GzipFile(fileobj=cs).read()

                self.send_message(report)
开发者ID:pedrosa-t,项目名称:intelmq,代码行数:50,代码来源:collector.py

示例6: download

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
def download():
    blob_service = BlobService(account_name='squadshots', account_key='UgxaWKAKv2ZvhHrPt0IHi4EQedPpZw35r+RXkAYB2eICPrG3TjSwk2G8gUzG/PNDDTV+4CVCYWCvZSiad5xMQQ==')
    try:
        blob_service.get_blob_to_path('album','image','static/output.png')
    except Exception as e:
        print e
    blobs = blob_service.list_blobs('album',None,None,None,'metadata',None)
    for blob in blobs:
        if blob.metadata != None:
            for key in blob.metadata:
                if (blob.metadata)[key] == session['username']:
                        blob_service.get_blob_to_path('album',blob.name,'static/output.png')
    for i in blob_service.list_containers():
        print "This container is " + i.name


    return render_template('album.html',filename="static/output.png")
开发者ID:engineershreyas,项目名称:SquadShots,代码行数:19,代码来源:app.py

示例7: upload_all_new_azure

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
def upload_all_new_azure(local_folder, azure_container, account_name, account_key):



    blob_service = BlobService(account_name=os.getenv('ACC_NAME'), account_key=os.getenv('ACCESS_KEY'))

    blob_list = blob_service.list_blobs(azure_container)

    blob_name_list = [b.name for b in blob_list.blobs]

    blob_name_set = set(blob_name_list)

    #Now for each file in local forlder see whether it's in the s3folder
    
    localfiles = os.listdir(local_folder)
    localfiles = [f for f in localfiles if "~" not in f]
    localfiles = [f for f in localfiles if f[0] != "."]
    localfiles = [f for f in localfiles if (".zip" in f or ".csv" in f)]
    
    localfiles = set(localfiles)

    
    files_to_upload = localfiles - blob_name_set



    orig_len =len(files_to_upload) 
    error_counter = 0
    while len(files_to_upload)>0:
        if error_counter>orig_len:
            logger.error("too many upload failures, exiting")
            sys.exit()
        filename = files_to_upload.pop()

        try:
            blob_service.put_block_blob_from_path(
                'csvs',
                filename,
                os.path.join(local_folder,filename)
            )

        except Exception:
            error_counter +=1
            logging.error(filename + " failed to upload")
            files_to_upload.add(filename)
开发者ID:RobinL,项目名称:daily,代码行数:47,代码来源:upload_all_new.py

示例8: AzureStorageBlockDeviceAPI

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]

#.........这里部分代码省略.........
            if d.label == str(blockdevice_id):
                target_disk = d
                break

        if target_disk is None:
            # check for unregisterd disk
            blobs = self._get_flocker_blobs()
            blob = None

            if str(blockdevice_id) in blobs:
                blob = blobs[str(blockdevice_id)]

            return blob, None, None

        vm_info = None

        if hasattr(target_disk.attached_to, 'role_name'):
            vm_info = self._azure_service_client.get_role(
                self._service_name, self._service_name,
                target_disk.attached_to.role_name)

            for d in vm_info.data_virtual_hard_disks:
                if d.disk_name == target_disk.name:
                    target_lun = d.lun
                    break

            role_name = target_disk.attached_to.role_name

        return (target_disk, role_name, target_lun)

    def _get_flocker_blobs(self):
        all_blobs = {}

        blobs = self._azure_storage_client.list_blobs(
            self._disk_container_name,
            prefix='flocker-')

        for b in blobs:
            # todo - this could be big!
            all_blobs[b.name] = b

        return all_blobs

    def _wait_for_detach(self, blockdevice_id):
        role_name = ''
        lun = -1

        timeout_count = 0

        log_info('waiting for azure to ' + 'report disk as detached...')

        while role_name is not None or lun is not None:
            (target_disk, role_name, lun) = \
                self._get_disk_vmname_lun(blockdevice_id)
            time.sleep(1)
            timeout_count += 1

            if timeout_count > 5000:
                raise AsynchronousTimeout()

        log_info('Disk Detached')

    def _wait_for_attach(self, blockdevice_id):
        timeout_count = 0
        lun = None
开发者ID:FlorianLudwig,项目名称:azure-flocker-driver,代码行数:69,代码来源:azure_storage_driver.py

示例9: BlobService

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
    blobService = BlobService(storageAccount, accessKey)

    #--------------
    # コンテナとBlobオブジェクトを取得
    # mediaLinkからBlobオブジェクトを得る
    logger.debug("deleteOSandDataDisk.py: Container and Blob object get mediaLink...(%s)" % mediaLink)

    # # 消すべきBlobの存在チェック

    # # コンテナ一覧を取得
    containerList = blobService.list_containers()
    targetBlob = None

    for container in containerList:
        # # コンテナに含まれるBlob一覧を取得
        blobList = blobService.list_blobs(container.name)
        for blob in blobList:
            # # URIから、先頭のhttp*://を取り除いた文字列を比較
            blobname = blob.url.split('://')[1]
            if blobname == mediaLink.split('://')[1]:
                logger.debug('deleteOSandDataDisk.py: find target blobname: ' + blobname)
                targetBlob = blob
                targetContainer = container

    # # 見つからなければエラー終了
    if (targetBlob is None):
        logger.error('deleteOSandDataDisk.py: target blob(%s) is not found.' % mediaLink.split('://')[1])
        sys.exit()

    #-----------------
    # lease開始
开发者ID:primecloud-controller-org,项目名称:primecloud-controller,代码行数:33,代码来源:azuredeleteOSandDataDisk.py

示例10: Command

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
class Command(BaseCommand):
    help = "Synchronizes static media to cloud files."

    option_list = BaseCommand.option_list + (
        optparse.make_option('-w', '--wipe',
            action='store_true', dest='wipe', default=False,
            help="Wipes out entire contents of container first."),
        optparse.make_option('-t', '--test-run',
            action='store_true', dest='test_run', default=False,
            help="Performs a test run of the sync."),
        optparse.make_option('-c', '--container',
            dest='container', help="Override STATIC_CONTAINER."),
    )

    # settings from azurite.settings
    ACCOUNT_NAME     = AZURITE['ACCOUNT_NAME']
    ACCOUNT_KEY      = AZURITE['ACCOUNT_KEY']
    STATIC_CONTAINER = AZURITE['STATIC_CONTAINER']

    # paths
    DIRECTORY        = os.path.abspath(settings.STATIC_ROOT)
    STATIC_URL       = settings.STATIC_URL

    if not DIRECTORY.endswith('/'):
        DIRECTORY = DIRECTORY + '/'

    if STATIC_URL.startswith('/'):
        STATIC_URL = STATIC_URL[1:]

    local_object_names = []
    create_count = 0
    upload_count = 0
    update_count = 0
    skip_count = 0
    delete_count = 0
    service = None

    def handle(self, *args, **options):
        self.wipe = options.get('wipe')
        self.test_run = options.get('test_run')
        self.verbosity = int(options.get('verbosity'))
        if hasattr(options, 'container'):
            self.STATIC_CONTAINER = options.get('container')
        self.sync_files()

    def sync_files(self):
        self.service = BlobService(account_name=self.ACCOUNT_NAME,
            account_key=self.ACCOUNT_KEY)

        try:
            self.service.get_container_properties(self.STATIC_CONTAINER)
        except WindowsAzureMissingResourceError:
            self.service.create_container(self.STATIC_CONTAINER,
                x_ms_blob_public_access='blob')

        self.service.set_container_acl(self.STATIC_CONTAINER, x_ms_blob_public_access='blob')

        # if -w option is provided, wipe out the contents of the container
        if self.wipe:
            blob_count = len(self.service.list_blobs(self.STATIC_CONTAINER))

            if self.test_run:
                print "Wipe would delete %d objects." % blob_count
            else:
                print "Deleting %d objects..." % blob_count
                for blob in self.service.list_blobs(self.STATIC_CONTAINER):
                    self.service.delete_blob(self.STATIC_CONTAINER, blob.name)

        # walk through the directory, creating or updating files on the cloud
        os.path.walk(self.DIRECTORY, self.upload_files, "foo")

        # remove any files on remote that don't exist locally
        self.delete_files()

        # print out the final tally to the cmd line
        self.update_count = self.upload_count - self.create_count
        print
        if self.test_run:
            print "Test run complete with the following results:"
        print "Skipped %d. Created %d. Updated %d. Deleted %d." % (
            self.skip_count, self.create_count, self.update_count, self.delete_count)

    def upload_files(self, arg, dirname, names):
        # upload or skip items
        for item in names:
            file_path = os.path.join(dirname, item)
            if os.path.isdir(file_path):
                continue # Don't try to upload directories

            object_name = self.STATIC_URL + file_path.split(self.DIRECTORY)[1]
            self.local_object_names.append(object_name)

            try:
                properties = self.service.get_blob_properties(self.STATIC_CONTAINER,
                    object_name)
            except WindowsAzureMissingResourceError:
                properties = {}
                self.create_count += 1

            cloud_datetime = None
#.........这里部分代码省略.........
开发者ID:ricardomomm,项目名称:django-azurite,代码行数:103,代码来源:syncstatic.py

示例11: deletefromazure

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
def deletefromazure (strPrefix):
    blob_service = BlobService(account_name='wanderight', account_key='gdmZeJOCx3HYlFPZZukUhHAfeGAu4cfHWGQZc3+HIpkBHjlznUDjhXMl5HWh5MgbjpJF09ZxRaET1JVF9S2MWQ==')
    blobsToDelete = blob_service.list_blobs(config['container'], prefix=strPrefix)
    for b in blobsToDelete:
        blob_service.delete_blob(config['container'], b.name)
开发者ID:trentniemeyer,项目名称:BlogParse,代码行数:7,代码来源:Util.py

示例12: AzureBackend

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
class AzureBackend(duplicity.backend.Backend):
    """
    Backend for Azure Blob Storage Service
    """
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import Microsoft Azure Storage SDK for Python library.
        try:
            import azure
            import azure.storage
            if hasattr(azure.storage, 'BlobService'):
                # v0.11.1 and below
                from azure.storage import BlobService
                self.AzureMissingResourceError = azure.WindowsAzureMissingResourceError
                self.AzureConflictError = azure.WindowsAzureConflictError
            else:
                # v1.0.0 and above
                from azure.storage.blob import BlobService
                self.AzureMissingResourceError = azure.common.AzureMissingResourceHttpError
                self.AzureConflictError = azure.common.AzureConflictHttpError
        except ImportError as e:
            raise BackendException("""\
Azure backend requires Microsoft Azure Storage SDK for Python (https://pypi.python.org/pypi/azure-storage/).
Exception: %s""" % str(e))

        if 'AZURE_ACCOUNT_NAME' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_NAME environment variable not set.')
        if 'AZURE_ACCOUNT_KEY' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_KEY environment variable not set.')
        self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                        account_key=os.environ['AZURE_ACCOUNT_KEY'])

        # TODO: validate container name
        self.container = parsed_url.path.lstrip('/')
        try:
            self.blob_service.create_container(self.container, fail_on_exist=True)
        except self.AzureConflictError:
            # Indicates that the resource could not be created because it already exists.
            pass
        except Exception as e:
            log.FatalError("Could not create Azure container: %s"
                           % unicode(e.message).split('\n', 1)[0],
                           log.ErrorCode.connection_failed)

    def _put(self, source_path, remote_filename):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-a-blob-into-a-container
        self.blob_service.put_block_blob_from_path(self.container, remote_filename, source_path.name)

    def _get(self, remote_filename, local_path):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#download-blobs
        self.blob_service.get_blob_to_path(self.container, remote_filename, local_path.name)

    def _list(self):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#list-the-blobs-in-a-container
        blobs = []
        marker = None
        while True:
            batch = self.blob_service.list_blobs(self.container, marker=marker)
            blobs.extend(batch)
            if not batch.next_marker:
                break
            marker = batch.next_marker
        return [blob.name for blob in blobs]

    def _delete(self, filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#delete-blobs
        self.blob_service.delete_blob(self.container, filename)

    def _query(self, filename):
        prop = self.blob_service.get_blob_properties(self.container, filename)
        return {'size': int(prop['content-length'])}

    def _error_code(self, operation, e):
        if isinstance(e, self.AzureMissingResourceError):
            return log.ErrorCode.backend_not_found
开发者ID:mjuric,项目名称:duplicity,代码行数:78,代码来源:azurebackend.py

示例13: AzureFS

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
class AzureFS(LoggingMixIn, Operations):
    """Azure Blob Storage filesystem"""

    blobs = None
    containers = dict()  # <cname, dict(stat:dict,
                                    #files:None|dict<fname, stat>)
    fds = dict()  # <fd, (path, bytes, dirty)>
    fd = 0

    def __init__(self, account, key):
        self.blobs = BlobService(account, key)
        self.rebuild_container_list()

    def convert_to_epoch(self, date):
        """Converts Tue, 31 Jul 2012 07:17:34 GMT format to epoch"""
        return int(time.mktime(time.strptime(date, TIME_FORMAT)))

    def rebuild_container_list(self):
        cmap = dict()
        cnames = set()
        for c in self.blobs.list_containers():
            date = c.properties.last_modified
            cstat = dict(st_mode=(S_IFDIR | 0755), st_uid=getuid(), st_size=0,
                         st_mtime=self.convert_to_epoch(date))
            cname = c.name
            cmap['/' + cname] = dict(stat=cstat, files=None)
            cnames.add(cname)

        cmap['/'] = dict(files={},
                         stat=dict(st_mode=(S_IFDIR | 0755),
                                     st_uid=getuid(), st_size=0,
                                     st_mtime=int(time.time())))

        self.containers = cmap   # destroys fs tree cache resistant to misses

    def _parse_path(self, path):    # returns </dir, file(=None)>
        if path.count('/') > 1:     # file
            return str(path[:path.rfind('/')]), str(path[path.rfind('/') + 1:])
        else:                       # dir
            pos = path.rfind('/', 1)
            if pos == -1:
                return path, None
            else:
                return str(path[:pos]), None

    def parse_container(self, path):
        base_container = path[1:]   # /abc/def/g --> abc
        if base_container.find('/') > -1:
            base_container = base_container[:base_container.find('/')]
        return str(base_container)

    def _get_dir(self, path, contents_required=False):
        if not self.containers:
            self.rebuild_container_list()

        if path in self.containers and not (contents_required and \
                self.containers[path]['files'] is None):
            return self.containers[path]

        cname = self.parse_container(path)

        if '/' + cname not in self.containers:
            raise FuseOSError(ENOENT)
        else:
            if self.containers['/' + cname]['files'] is None:
                # fetch contents of container
                log.info("------> CONTENTS NOT FOUND: %s" % cname)

                blobs = self.blobs.list_blobs(cname)

                dirstat = dict(st_mode=(S_IFDIR | 0755), st_size=0,
                               st_uid=getuid(), st_mtime=time.time())

                if self.containers['/' + cname]['files'] is None:
                    self.containers['/' + cname]['files'] = dict()

                for f in blobs:
                    blob_name = f.name
                    blob_date = f.properties.last_modified
                    blob_size = long(f.properties.content_length)

                    node = dict(st_mode=(S_IFREG | 0644), st_size=blob_size,
                                st_mtime=self.convert_to_epoch(blob_date),
                                st_uid=getuid())

                    if blob_name.find('/') == -1:  # file just under container
                        self.containers['/' + cname]['files'][blob_name] = node

            return self.containers['/' + cname]
        return None

    def _get_file(self, path):
        d, f = self._parse_path(path)
        dir = self._get_dir(d, True)
        if dir is not None and f in dir['files']:
            return dir['files'][f]

    def getattr(self, path, fh=None):
        d, f = self._parse_path(path)

#.........这里部分代码省略.........
开发者ID:QuasarSE,项目名称:azurefs,代码行数:103,代码来源:azurefs.py

示例14: BlobService

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
ACCOUNT_NAME = 'sounds'
ACCOUNT_KEY  = AC.getAccountKey() # primary access key
HOST_BASE    = '.blob.core.windows.net'

blob_service = BlobService(account_name=ACCOUNT_NAME,
                           account_key=ACCOUNT_KEY,
                           host_base=HOST_BASE)

CONTAINER = 'bat-detective' # or whatever else you like

created = blob_service.create_container(CONTAINER, x_ms_blob_public_access='container')
print "Created" if created else "Not created (probably already existing)"

audio_dir = '../../data/wav/'
SOUND_FILES = glob.glob(audio_dir + '*.wav')

for f in SOUND_FILES:
    print "uploading", os.path.basename(f)
    blob_service.put_block_blob_from_path(
        CONTAINER,                          # container
        os.path.basename(f),                # blob
        f,                                  # path
        x_ms_blob_content_type='audio/wav'
    )


blobs = blob_service.list_blobs(CONTAINER)

for blob in blobs:
    print(blob.name)
开发者ID:gozzilli,项目名称:engaged_hackathon,代码行数:32,代码来源:upload_azure.py

示例15: Command

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import list_blobs [as 别名]
        thread = threading.Thread(target=target)
        thread.start()

        thread.join(timeout)
        if thread.is_alive():
            print 'Terminating process'
            self.process.terminate()
            thread.join()
        print self.process.returncode

#command = Command("echo 'Process started'; sleep 2; echo 'Process finished'")
#print command.run(timeout=3)
#print command.run(timeout=1)
#
#command = Command('ping www.google.com')
#print command.run(timeout=1)

AZURE_STORAGE_CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING']

blob_service = BlobService(connection_string=AZURE_STORAGE_CONNECTION_STRING)

print blob_service.put_block_blob_from_path( 'nexradl2', '201208/20120810/KSRX/NWS_NEXRAD_NXL2SR_KSRX_20120810050000_20120810055959.tar', '/snfs9/q2/levelii_tarfiles/201208/20120810/KSRX/NWS_NEXRAD_NXL2SR_KSRX_20120810050000_20120810055959.tar', max_connections=5,)


blobs = blob_service.list_blobs('nexradl2',maxresults=10)
for blob in blobs:
    print(blob.name)
    print(blob.url)

开发者ID:18F,项目名称:noaabigdata-code,代码行数:30,代码来源:test.py


注:本文中的azure.storage.BlobService.list_blobs方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。