当前位置: 首页>>代码示例>>Python>>正文


Python BlobService.set_container_acl方法代码示例

本文整理汇总了Python中azure.storage.BlobService.set_container_acl方法的典型用法代码示例。如果您正苦于以下问题:Python BlobService.set_container_acl方法的具体用法?Python BlobService.set_container_acl怎么用?Python BlobService.set_container_acl使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在azure.storage.BlobService的用法示例。


在下文中一共展示了BlobService.set_container_acl方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: BlobService

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import set_container_acl [as 别名]
import glob
import os
import time

logtime = time.strftime("%Y%m%d-%H%M%S")
logmessage = "uploadmp3.py started"
command = "sed -i '1s/^/" + logtime + " " + logmessage + "\\n/' /home/pi/selcuk/log.txt"
os.system(command)

from azure.storage import BlobService
blob_service = BlobService(account_name='account_name', account_key='account_key')
blob_service.create_container('record')
blob_service.create_container('record', x_ms_blob_public_access='container')
blob_service.set_container_acl('record', x_ms_blob_public_access='container')

directory = "/home/pi/selcuk/mp3"

os.chdir(directory)
for file in glob.glob("*.mp3"):
    full_path = directory + "/" + file
    blob_service.put_block_blob_from_path(
        'record',
        file,
        full_path,
        x_ms_blob_content_type='audio/mpeg3'
    )
    delete_command = "rm " + file
    os.system(delete_command)

    logtime = time.strftime("%Y%m%d-%H%M%S")
    logmessage = file + " uploaded to cloud and deleted from device"
开发者ID:selcukyavuz,项目名称:raspberry314,代码行数:33,代码来源:uploadmp3.py

示例2: generate_and_upload

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import set_container_acl [as 别名]
def generate_and_upload(gauge_factory, config, logger):
    start = datetime.datetime.now()
    twitter_followers = gauge_factory('twitter.followers')
    twitter_tweets = gauge_factory('twitter.tweets')
    fb_friends = gauge_factory('facebook.friends')
    foursq_checkins = gauge_factory('foursquare.checkins')
    klout_score = gauge_factory('klout.score')
    runkeeper_activities = gauge_factory('runkeeper.activities')
    runkeeper_calories = gauge_factory('runkeeper.calories_burned')
    runkeeper_weight = gauge_factory('runkeeper.weight')
    tmp102_celsius = gauge_factory('tmp102.te  mperature', gauge_type='hourly')
    lastfm_listened = gauge_factory('lastfm.listened')
    jawbone_sleeps = gauge_factory('jawbone.sleeps')
    jawbone_steps = gauge_factory('jawbone.steps')
    jawbone_caffeine = gauge_factory('jawbone.caffeine')

    data = {}
    data_sources = [
        # (output key, gauge, days back, aggregator, postprocessors)
        ('twitter.followers', twitter_followers, 30, None,
            [zero_fill_daily, interpolators.linear]),
        ('twitter.tweets', twitter_tweets, 20, None, [zero_fill_daily]),
        ('facebook.friends', fb_friends, 180, monthly_max, None),
        ('foursquare.checkins', foursq_checkins, 14, None, [zero_fill_daily]),
        ('lastfm.listened', lastfm_listened, 14, None, [zero_fill_daily]),
        ('klout.score', klout_score, 30, weekly_max, [zero_fill_weekly,
                                                      interpolators.linear]),
        ('runkeeper.calories', runkeeper_calories, 60, weekly_sum,
            [zero_fill_weekly]),
        ('runkeeper.activities', runkeeper_activities, 60,weekly_sum,
            [zero_fill_weekly]),
        ('runkeeper.weight', runkeeper_weight, 180,weekly_min,
            [zero_fill_weekly, interpolators.linear]),
        ('sleeps', jawbone_sleeps, 14, None, [zero_fill_daily,
            interpolators.linear]),
        ('steps', jawbone_steps, 14, None, [zero_fill_daily,
            interpolators.linear]),
        ('caffeine', jawbone_caffeine, 30, None, [zero_fill_daily]),
        ('tmp102.temperature', tmp102_celsius, 2.5, None, None)
    ]

    for ds in data_sources:
        data[ds[0]] = ds[1].aggregate(today_utc() - timedelta(days=ds[2]),
                                      aggregator=ds[3],
                                      post_processors=ds[4])

    report = {
        'generated': str(now_utc()),
        'data': data,
        'took': (datetime.datetime.now() - start).seconds
    }
    report_json = json.dumps(report, indent=4, default=json_date_serializer)
    report_content = '{0}({1})'.format(JSONP_CALLBACK_NAME, report_json)
    
    blob_service = BlobService(config['azure.account'], config['azure.key'])
    blob_service.create_container(config['azure.blob.container'])
    blob_service.set_container_acl(config['azure.blob.container'],
                                   x_ms_blob_public_access='container')
    blob_service.put_blob(config['azure.blob.container'],
                          config['azure.blob.name'], report_content, 'BlockBlob')

    took = (datetime.datetime.now() - start).seconds
    logger.info('Report generated and uploaded. Took {0} s.'.format(took))
开发者ID:JonathanGitHub,项目名称:personal-dashboard,代码行数:65,代码来源:reporting.py

示例3: Command

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import set_container_acl [as 别名]
class Command(BaseCommand):
    help = "Synchronizes static media to cloud files."

    option_list = BaseCommand.option_list + (
        optparse.make_option('-w', '--wipe',
            action='store_true', dest='wipe', default=False,
            help="Wipes out entire contents of container first."),
        optparse.make_option('-t', '--test-run',
            action='store_true', dest='test_run', default=False,
            help="Performs a test run of the sync."),
        optparse.make_option('-c', '--container',
            dest='container', help="Override STATIC_CONTAINER."),
    )

    # settings from azurite.settings
    ACCOUNT_NAME     = AZURITE['ACCOUNT_NAME']
    ACCOUNT_KEY      = AZURITE['ACCOUNT_KEY']
    STATIC_CONTAINER = AZURITE['STATIC_CONTAINER']

    # paths
    DIRECTORY        = os.path.abspath(settings.STATIC_ROOT)
    STATIC_URL       = settings.STATIC_URL

    if not DIRECTORY.endswith('/'):
        DIRECTORY = DIRECTORY + '/'

    if STATIC_URL.startswith('/'):
        STATIC_URL = STATIC_URL[1:]

    local_object_names = []
    create_count = 0
    upload_count = 0
    update_count = 0
    skip_count = 0
    delete_count = 0
    service = None

    def handle(self, *args, **options):
        self.wipe = options.get('wipe')
        self.test_run = options.get('test_run')
        self.verbosity = int(options.get('verbosity'))
        if hasattr(options, 'container'):
            self.STATIC_CONTAINER = options.get('container')
        self.sync_files()

    def sync_files(self):
        self.service = BlobService(account_name=self.ACCOUNT_NAME,
            account_key=self.ACCOUNT_KEY)

        try:
            self.service.get_container_properties(self.STATIC_CONTAINER)
        except WindowsAzureMissingResourceError:
            self.service.create_container(self.STATIC_CONTAINER,
                x_ms_blob_public_access='blob')

        self.service.set_container_acl(self.STATIC_CONTAINER, x_ms_blob_public_access='blob')

        # if -w option is provided, wipe out the contents of the container
        if self.wipe:
            blob_count = len(self.service.list_blobs(self.STATIC_CONTAINER))

            if self.test_run:
                print "Wipe would delete %d objects." % blob_count
            else:
                print "Deleting %d objects..." % blob_count
                for blob in self.service.list_blobs(self.STATIC_CONTAINER):
                    self.service.delete_blob(self.STATIC_CONTAINER, blob.name)

        # walk through the directory, creating or updating files on the cloud
        os.path.walk(self.DIRECTORY, self.upload_files, "foo")

        # remove any files on remote that don't exist locally
        self.delete_files()

        # print out the final tally to the cmd line
        self.update_count = self.upload_count - self.create_count
        print
        if self.test_run:
            print "Test run complete with the following results:"
        print "Skipped %d. Created %d. Updated %d. Deleted %d." % (
            self.skip_count, self.create_count, self.update_count, self.delete_count)

    def upload_files(self, arg, dirname, names):
        # upload or skip items
        for item in names:
            file_path = os.path.join(dirname, item)
            if os.path.isdir(file_path):
                continue # Don't try to upload directories

            object_name = self.STATIC_URL + file_path.split(self.DIRECTORY)[1]
            self.local_object_names.append(object_name)

            try:
                properties = self.service.get_blob_properties(self.STATIC_CONTAINER,
                    object_name)
            except WindowsAzureMissingResourceError:
                properties = {}
                self.create_count += 1

            cloud_datetime = None
#.........这里部分代码省略.........
开发者ID:ricardomomm,项目名称:django-azurite,代码行数:103,代码来源:syncstatic.py


注:本文中的azure.storage.BlobService.set_container_acl方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。