当前位置: 首页>>代码示例>>Python>>正文


Python Bucket.delete_key方法代码示例

本文整理汇总了Python中boto.s3.bucket.Bucket.delete_key方法的典型用法代码示例。如果您正苦于以下问题:Python Bucket.delete_key方法的具体用法?Python Bucket.delete_key怎么用?Python Bucket.delete_key使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在boto.s3.bucket.Bucket的用法示例。


在下文中一共展示了Bucket.delete_key方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: remove_from_s3

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import delete_key [as 别名]
    def remove_from_s3(self):
        """
        Removes file for this model instance from S3
        """
        conn = boto.connect_s3()

        # loop over buckets, we have more than one, and remove this playground.
        if app_config.S3_BUCKETS:
            for bucket in app_config.S3_BUCKETS:
                b = Bucket(conn, bucket)
                k = Key(b)
                k.key = '%s/playground/%s.html' % (app_config.PROJECT_SLUG, self.slug)
                b.delete_key(k)
开发者ID:nprapps,项目名称:playgrounds2,代码行数:15,代码来源:models.py

示例2: StorageS3

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import delete_key [as 别名]
class StorageS3():
    def __init__(self, *args, **kwargs):
        self.s3_key = kwargs['key']
        self.s3_secret = kwargs['secret']
        self.s3_bucket_name = kwargs['bucket']

        self.make_connection()

    def make_connection(self):
        conn = S3Connection(self.s3_key, self.s3_secret)
        self.bucket = Bucket(conn, self.s3_bucket_name)

    def upload_file(self, local_path, remote_path):
        key = Key(self.bucket, remote_path)
        key.set_contents_from_file(file(str(local_path)))
        key.set_acl('public-read')

    def delete_file(self, remote_path):
        self.bucket.delete_key(remote_path)
开发者ID:dash1291,项目名称:django-teleport,代码行数:21,代码来源:storage_s3.py

示例3: rm

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import delete_key [as 别名]
def rm(args):
    parser = option_parser("rm URL...")
    parser.add_option("-f", "--force", dest="force", action="store_true",
        default=False, help="Ignore nonexistent keys")
    parser.add_option("-F", "--file", dest="file", action="store",
        default=None, help="File containing a list of URLs to delete")
    options, args = parser.parse_args(args)

    if len(args) == 0 and not options.file:
        parser.error("Specify URL")

    if options.file:
        for rec in read_command_file(options.file):
            if len(rec) != 1:
                raise Exception("Invalid record: %s" % rec)
            args.append(rec[0])

    buckets = {}
    for arg in args:
        uri = parse_uri(arg)
        if uri.bucket is None:
            raise Exception("URL for rm must contain a bucket: %s" % arg)
        if uri.key is None:
            raise Exception("URL for rm must contain a key: %s" % arg)

        bid = "%s/%s" % (uri.ident, uri.bucket)
        buri = S3URI(uri.user, uri.site, uri.bucket, uri.secure)

        if bid not in buckets:
            buckets[bid] = (buri, [])
        buckets[bid][1].append(uri)

    config = get_config(options)

    for bucket in buckets:

        # Connect to the bucket
        debug("Deleting keys from bucket %s" % bucket)
        uri, keys = buckets[bucket]
        conn = get_connection(config, uri)
        b = Bucket(connection=conn, name=uri.bucket)

        # Get a final list of all the keys, resolving wildcards as necessary
        bucket_contents = None
        keys_to_delete = set()
        for key in keys:
            key_name = key.key

            if has_wildcards(key_name):

                # If we haven't yet queried the bucket, then do so now
                # so that we can match the wildcards
                if bucket_contents is None:
                    bucket_contents = b.list()

                # Collect all the keys that match
                for k in bucket_contents:
                    if fnmatch.fnmatch(k.name, key_name):
                        keys_to_delete.add(k.name)

            else:
                keys_to_delete.add(key_name)

        info("Deleting %d keys" % len(keys_to_delete))

        batch_delete = config.getboolean(uri.site, "batch_delete")

        if batch_delete:
            debug("Using batch deletes")

            # Delete the keys in batches
            batch_delete_size = config.getint(uri.site, "batch_delete_size")
            debug("batch_delete_size: %d" % batch_delete_size)
            batch = []
            for k in keys_to_delete:
                batch.append(k)
                if len(batch) == batch_delete_size:
                    info("Deleting batch of %d keys" % len(batch))
                    b.delete_keys(batch, quiet=True)
                    batch = []

            # Delete the final batch
            if len(batch) > 0:
                info("Deleting batch of %d keys" % len(batch))
                b.delete_keys(batch, quiet=True)

        else:
            for key_name in keys_to_delete:
                debug("Deleting %s" % key_name)
                b.delete_key(key_name)
开发者ID:pegasus-isi,项目名称:pegasus,代码行数:92,代码来源:s3.py

示例4: test_delete_key_return_key

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import delete_key [as 别名]
 def test_delete_key_return_key(self):
     self.set_http_response(status_code=204, body='')
     b = Bucket(self.service_connection, 'mybucket')
     key = b.delete_key('fookey')
     self.assertIsNotNone(key)
开发者ID:2mind,项目名称:boto,代码行数:7,代码来源:test_key.py

示例5: Key

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import delete_key [as 别名]
    print '[S3] Uploading file archive ' + file + '...'

    k = Key(b)
    k.key = file
    k.set_contents_from_filename(dir_backup + file, policy="public-read")

    os.remove(dir_backup + file);

    print '[S3] Clearing previous file archive ' + file + '...'

    # Conserve monthly backups (Previous Month)
    if previous != str(datetime.datetime.today().year) + '-' + str(datetime.datetime.today().day) + '-3':
        # Clean up files on S3
        k = Key(b)
        k.key = basename(normpath(f)) + '-' + str(previous) + '.files.tar.gz.aes'
        b.delete_key(k)
	
# Send DBs to S3
for d in dbs:

    d = d.strip()
    file = "%s-%s.sql.gz.aes" % (d, today)

    print '[S3] Uploading database dump ' + file + '...'

    k = Key(b)
    k.key = file
    k.set_contents_from_filename(dir_backup + file, policy="public-read")
#    k.set_acl("public-read")

    os.remove(dir_backup + file);
开发者ID:Viperoo,项目名称:Tiktalik-Backup,代码行数:33,代码来源:backup.py

示例6: S3StaticFileStorage

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import delete_key [as 别名]
class S3StaticFileStorage(Storage):
    BUCKET_NAME = settings.S3_STATICFILES_BUCKET
    KEY_POLICY = 'public-read'
    CHUNK_SIZE = 100 << 20

    def __init__(self):
        super(S3StaticFileStorage, self).__init__()
        self._bucket = Bucket(connection=s3_conn, name=self.BUCKET_NAME)
        self._bucket_public = Bucket(connection=s3_public_conn,
                name=self.BUCKET_NAME)
        if s3_conn.lookup(self.BUCKET_NAME) is None:
            s3_conn.create_bucket(self.BUCKET_NAME, policy='public-read')

        # Allow CORS access (for web fonts)
        self._bucket.set_cors(self._get_cors_config())

    def _get_cors_config(self):
        cors = CORSConfiguration()
        cors.add_rule(['GET'], ['*'])
        return cors

    def _get_key(self, name):
        key = self._bucket.get_key(name)
        if key is None:
            raise IOError('No such key')
        return key

    def _open(self, name, mode='rb'):
        if mode not in ('r', 'rb'):
            raise IOError('_open() only supports reading')
        key = self._get_key(name)
        key.open_read()
        return File(key)

    def _save(self, name, content):
        if name.endswith('.css'):
            content_type = 'text/css'
        elif name.endswith('.js'):
            content_type = 'application/javascript'
        elif name.endswith('.json'):
            content_type = 'application/json'
        elif hasattr(content.file, 'getvalue'):
            content_type = magic.from_buffer(content.file.getvalue(),
                    mime=True)
        else:
            content_type = magic.from_file(content.file.name, mime=True)
        hdrs = {
            'Content-Type': content_type,
        }
        if content.size > self.CHUNK_SIZE:
            # Upload in chunks
            upload = self._bucket.initiate_multipart_upload(name,
                    policy=self.KEY_POLICY, headers=hdrs)
            for i, buf in enumerate(content.chunks(self.CHUNK_SIZE), 1):
                upload.upload_part_from_file(StringIO(buf), i)
            upload.complete_upload()
        else:
            # Upload all at once
            key = self._bucket.new_key(name)
            key.set_contents_from_string(content.read(),
                    policy=self.KEY_POLICY, headers=hdrs)
        return name

    def get_available_name(self, name):
        return name

    def get_valid_name(self, name):
        return name

    def delete(self, name):
        self._bucket.delete_key(name)

    def exists(self, name):
        key = self._bucket.get_key(name)
        return key is not None

    def listdir(self, path):
        path = path.lstrip('/')
        return ([], [key.name for key in self._bucket.list(prefix=path)])

    def modified_time(self, name):
        key = self._get_key(name)
        stamp = dateutil.parser.parse(key.last_modified)
        # Convert to naive datetime in local time, as FileSystemStorage does
        return stamp.astimezone(tzlocal()).replace(tzinfo=None)

    def size(self, name):
        key = self._get_key(name)
        return key.size

    def url(self, name):
        key = self._bucket_public.new_key(name)
        return key.generate_url(0, query_auth=False)
开发者ID:cmusatyalab,项目名称:django-s3,代码行数:95,代码来源:storage.py

示例7: S3Connection

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import delete_key [as 别名]
s3 = S3Connection(aws_access_key, aws_secret_key, host=aws_host, is_secure=False)

today = datetime.date.today()
previous = today - timedelta(days = 3)

from boto.s3.bucket import Bucket

b = Bucket(s3, aws_bucket)

print "[S3] Upload files "

files_in_dir = os.listdir(sys.argv[1])
for file in files_in_dir:
    print "[S3] Uploading " + file
    k = Key(b)
    k.key = file
    k.set_contents_from_filename(workdir + file, policy="public-read")

print "[S3] Upload complete "
print "[S3] Deleting old files "

for key in b:

       d = parse(key.last_modified)

       if datetime.datetime(d.year, d.month, d.day) <= datetime.datetime(previous.year, previous.month, previous.day):
           print "[S3] Deleting " + key.name
           b.delete_key(key)

sys.exit(0)
开发者ID:Viperoo,项目名称:encrypt-backup-s3-script,代码行数:32,代码来源:s3.py


注:本文中的boto.s3.bucket.Bucket.delete_key方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。