当前位置: 首页>>代码示例>>Python>>正文


Python Bucket.get_key方法代码示例

本文整理汇总了Python中boto.s3.bucket.Bucket.get_key方法的典型用法代码示例。如果您正苦于以下问题:Python Bucket.get_key方法的具体用法?Python Bucket.get_key怎么用?Python Bucket.get_key使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在boto.s3.bucket.Bucket的用法示例。


在下文中一共展示了Bucket.get_key方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: Compost

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
class Compost(object):

    def __init__(self, directory, bucket):
        self.directory = directory
        self.bucket = Bucket(connection=boto.connect_s3(), name=bucket)

    def turn(self):
        """
        'Turn' the compost, i.e. make a backup of all files in the local directory.
        """
        for filename, full_path in self._local_files():
            logger.debug('backing up {}'.format(filename))
            key = self.bucket.new_key(filename)
            key.set_contents_from_filename(full_path)

    def list(self):
        """Return a list of known backed up files."""
        return [k.name for k in self.bucket.get_all_keys()]

    def read(self, filename):
        """
        Return the contents of named file, or the empty string if the files does not exist.
        """
        return self.bucket.get_key(filename).get_contents_as_string()


    def _local_files(self):
        for f in os.listdir(self.directory):
            yield f, os.path.join(self.directory, f)
开发者ID:fredrik,项目名称:compost,代码行数:31,代码来源:compost.py

示例2: load

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
def load(context, url, callback):
    enable_http_loader = context.config.get('AWS_ENABLE_HTTP_LOADER', default=False)

    if enable_http_loader and url.startswith('http'):
        return http_loader.load_sync(context, url, callback, normalize_url_func=_normalize_url)

    url = urllib2.unquote(url)

    bucket = context.config.get('S3_LOADER_BUCKET', default=None)

    if not bucket:
        bucket, url = _get_bucket(url)

    if _validate_bucket(context, bucket):
        bucket_loader = Bucket(
            connection=thumbor_aws.connection.get_connection(context),
            name=bucket
        )
        file_key = None
        try:
            file_key = bucket_loader.get_key(url)
        except Exception, e:
            logger.warn("ERROR retrieving image from S3 {0}: {1}".format(url, str(e)))

        if file_key:
            callback(file_key.read())
            return
开发者ID:Bladrak,项目名称:aws,代码行数:29,代码来源:s3_loader.py

示例3: get_file

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
 def get_file(self):
     b = Bucket(self.conn, 'Bucket01')
     key = b.get_key('file10m.dat')
     filename = str(uuid.uuid4())
     key.get_contents_to_filename('%s.tmp' % filename)
     # remove the file from the local fs
     os.remove('%s.tmp' % filename)
开发者ID:alvip,项目名称:rally_s3api,代码行数:9,代码来源:s3_scen.py

示例4: load

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
def load(context, url, callback):
    
    enable_http_loader = context.config.get('AWS_ENABLE_HTTP_LOADER', default=False)

    if enable_http_loader and 'http' in url:
        return http_loader.load(context, url, callback)
      
    url = urllib2.unquote(url)
    
    if context.config.S3_LOADER_BUCKET:
        bucket = context.config.S3_LOADER_BUCKET
    else:
        bucket, url = _get_bucket(url)
        if not _validate_bucket(context, bucket):
            return callback(None)

    bucket_loader = Bucket(
        connection=thumbor_aws.connection.get_connection(context),
        name=bucket
    )

    file_key = bucket_loader.get_key(url)
    if not file_key:
        return callback(None)

    return callback(file_key.read())
开发者ID:seanlin0324,项目名称:thumbor_aws,代码行数:28,代码来源:s3_loader.py

示例5: test_restore_header_with_ongoing_restore

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
 def test_restore_header_with_ongoing_restore(self):
     self.set_http_response(
         status_code=200,
         header=[('x-amz-restore', 'ongoing-request="true"')])
     b = Bucket(self.service_connection, 'mybucket')
     k = b.get_key('myglacierkey')
     self.assertTrue(k.ongoing_restore)
     self.assertIsNone(k.expiry_date)
开发者ID:2mind,项目名称:boto,代码行数:10,代码来源:test_key.py

示例6: test_restore_completed

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
 def test_restore_completed(self):
     self.set_http_response(
         status_code=200,
         header=[('x-amz-restore',
                  'ongoing-request="false", '
                  'expiry-date="Fri, 21 Dec 2012 00:00:00 GMT"')])
     b = Bucket(self.service_connection, 'mybucket')
     k = b.get_key('myglacierkey')
     self.assertFalse(k.ongoing_restore)
     self.assertEqual(k.expiry_date, 'Fri, 21 Dec 2012 00:00:00 GMT')
开发者ID:2mind,项目名称:boto,代码行数:12,代码来源:test_key.py

示例7: load

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
def load(context, url, callback):
    
    if context.config.S3_LOADER_BUCKET:
        bucket = context.config.S3_LOADER_BUCKET
    else:
        bucket,url = _get_bucket(url)
        if not _validate_bucket(context,bucket):
            return callback(None)

    conn = connection
    if conn is None:
        #Store connection not bucket
        conn = S3Connection(
            context.config.AWS_ACCESS_KEY,
            context.config.AWS_SECRET_KEY,
            suppress_consec_slashes=context.config.S3_LOADER_SUPPRESS_SLASHES
        )

    bucketLoader = Bucket(
        connection=conn,
        name=bucket
    )

    if context.config.S3_LOADER_SUPPRESS_SLASHES:
        file_key = bucketLoader.get_key(url)
    else:
        """
            BUG IN BOTO
            When the first character of a key name is a / boto will generate 
            invalid signing keys with suppress_consec_slashes=False
        """
        if url[0] == "/":
            file_key = bucketLoader.get_key(url[1:])
        else:
            file_key = bucketLoader.get_key(url)

    if not file_key:
        return callback(None)

    return callback(file_key.read())
开发者ID:willtrking,项目名称:thumbor,代码行数:42,代码来源:s3_loader.py

示例8: generate

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
def generate():
    api_key = request.form['api_key']
    poll = request.form["poll"]

    if poll == "false":
        dimensions = (int(request.form['width']), int(request.form['height']))
        generate_grid.delay(api_key, dimensions)
        return jsonify({'status': 'generating'})
    else:
        image_path = "images/{0}.png".format(hashlib.md5(api_key).hexdigest())
        conn = S3Connection(settings.S3_ACCESS_KEY, settings.S3_SECRET_KEY)
        bucket = Bucket(conn, settings.S3_BUCKET)
        if bucket.get_key(image_path):
            return jsonify({'status': 'ok', 'path': image_path})
        else:
            return jsonify({'status': 'generating'})
开发者ID:CuZnDragon,项目名称:wanikani,代码行数:18,代码来源:app.py

示例9: load_sync

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
def load_sync(context, url, callback):
    if _use_http_loader(context, url):
        return http_loader.load_sync(context, url, callback, normalize_url_func=_normalize_url)

    bucket, key = _get_bucket_and_key(context, url)

    if _validate_bucket(context, bucket):
        bucket_loader = Bucket(
            connection=get_connection(context),
            name=bucket
        )
        file_key = None
        try:
            file_key = bucket_loader.get_key(url)
        except Exception, e:
            logger.warn("ERROR retrieving image from S3 {0}: {1}".format(url, str(e)))

        if file_key:
            callback(file_key.read())
            return
开发者ID:pgr0ss,项目名称:aws,代码行数:22,代码来源:s3_loader.py

示例10: load

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
def load(context, url, callback):
    if context.config.S3_LOADER_BUCKET:
        bucket = context.config.S3_LOADER_BUCKET
    else:
        bucket, url = _get_bucket(url)
        if not _validate_bucket(context, bucket):
            return callback(None)

    conn = _establish_connection(context.config)

    bucket_loader = Bucket(
        connection=conn,
        name=bucket
    )

    file_key = bucket_loader.get_key(url)
    if not file_key:
        return callback(None)

    return callback(file_key.read())
开发者ID:CMVentures,项目名称:thumbor_aws,代码行数:22,代码来源:s3_loader.py

示例11: test_change_storage_class_new_bucket

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
    def test_change_storage_class_new_bucket(self):
        self.set_http_response(status_code=200)
        b = Bucket(self.service_connection, 'mybucket')
        k = b.get_key('fookey')

        # Mock out Key.copy so we can record calls to it
        k.copy = mock.MagicMock()
        # Mock out the bucket so we don't actually need to have fake responses
        k.bucket = mock.MagicMock()
        k.bucket.name = 'mybucket'

        self.assertEqual(k.storage_class, 'STANDARD')
        # Specifying a different dst_bucket should result in a copy to the new
        # bucket
        k.copy.reset_mock()
        k.change_storage_class('REDUCED_REDUNDANCY', dst_bucket='yourbucket')
        k.copy.assert_called_with(
            'yourbucket',
            'fookey',
            reduced_redundancy=True,
            preserve_acl=True,
            validate_dst_bucket=True,
        )
开发者ID:10sr,项目名称:hue,代码行数:25,代码来源:test_key.py

示例12: test_storage_class

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
    def test_storage_class(self):
        self.set_http_response(status_code=200)
        b = Bucket(self.service_connection, 'mybucket')
        k = b.get_key('fookey')

        # Mock out the bucket object - we really only care about calls
        # to list.
        k.bucket = mock.MagicMock()

        # Default behavior doesn't call list
        k.set_contents_from_string('test')
        k.bucket.list.assert_not_called()

        # Direct access calls list to get the real value if unset,
        # and still defaults to STANDARD if unavailable.
        sc_value = k.storage_class
        self.assertEqual(sc_value, 'STANDARD')
        k.bucket.list.assert_called_with(k.name.encode('utf-8'))
        k.bucket.list.reset_mock()

        # Setting manually doesn't call list
        k.storage_class = 'GLACIER'
        k.set_contents_from_string('test')
        k.bucket.list.assert_not_called()
开发者ID:17patelumang,项目名称:boto,代码行数:26,代码来源:test_key.py

示例13: get

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
def get(args):
    parser = option_parser("get URL [FILE]")
    parser.add_option("-c", "--chunksize", dest="chunksize", action="store", type="int",
        metavar="X", default=10, help="Set the chunk size for parallel downloads to X "
        "megabytes. A value of 0 will avoid chunked reads. This option only applies for "
        "sites that support ranged downloads (see ranged_downloads configuration "
        "parameter). The default chunk size is 10MB, the min is 1MB and the max is "
        "1024MB. Choose smaller values to reduce the impact of transient failures.")
    parser.add_option("-p", "--parallel", dest="parallel", action="store", type="int",
        metavar="N", default=4, help="Use N threads to upload FILE in parallel. The "
            "default value is 4, which enables parallel downloads with 4 threads. "
            "This parameter is only valid if the site supports ranged downloads "
            "and the --chunksize parameter is not 0. Otherwise parallel downloads are "
            "disabled.")
    parser.add_option("-r", "--recursive", dest="recursive", action="store_true",
        help="Get all keys that start with URL")
    options, args = parser.parse_args(args)

    if options.chunksize < 0 or options.chunksize > 1024:
        parser.error("Invalid chunksize")

    if options.parallel <= 0:
        parser.error("Invalid value for --parallel")

    if len(args) == 0:
        parser.error("Specify URL")

    uri = parse_uri(args[0])

    if uri.bucket is None:
        raise Exception("URL must contain a bucket: %s" % args[0])
    if uri.key is None and not options.recursive:
        raise Exception("URL must contain a key or use --recursive")

    if len(args) > 1:
        output = fix_file(args[1])
    elif uri.key is None:
        output = "./"
    else:
        output = os.path.basename(uri.key.rstrip("/"))

    info("Downloading %s" % uri)

    # Does the site support ranged downloads properly?
    config = get_config(options)
    ranged_downloads = config.getboolean(uri.site, "ranged_downloads")

    # Warn the user
    if options.parallel > 1:
        if not ranged_downloads:
            warn("ranged downloads not supported, ignoring --parallel")
        elif options.chunksize == 0:
            warn("--chunksize set to 0, ignoring --parallel")

    conn = get_connection(config, uri)
    b = Bucket(connection=conn, name=uri.bucket)

    if options.recursive:
        # Get all the keys we need to download

        def keyfilter(k):
            if uri.key is None:
                # We want all the keys in the bucket
                return True

            if uri.key.endswith("/"):
                # The user specified a "folder", so we should only match keys
                # in that "folder"
                return k.name.startswith(uri.key)

            if k.name == uri.key:
                # Match bare keys in case they specify recursive, but there
                # is a key that matches the specified path. Note that this
                # could cause a problem in the case where they have a key
                # called 'foo' and a "folder" called 'foo' in the same
                # bucket. In a file system that can't happen, but it can
                # happen in S3.
                return True

            if k.name.startswith(uri.key+"/"):
                # All other keys in the "folder"
                return True

            return False

        keys = [x for x in b.list(uri.key) if keyfilter(x)]
    else:
        # Just get the one key we need to download
        key = b.get_key(uri.key)
        if key is None:
            raise Exception("No such key. If %s is a folder, try --recursive." % uri.key)
        keys = [key]

    info("Downloading %d keys" % len(keys))

    start = time.time()
    totalsize = 0
    for key in keys:
        outfile = get_path_for_key(b.name, uri.key, key.name, output)

#.........这里部分代码省略.........
开发者ID:pegasus-isi,项目名称:pegasus,代码行数:103,代码来源:s3.py

示例14: S3StaticFileStorage

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
class S3StaticFileStorage(Storage):
    BUCKET_NAME = settings.S3_STATICFILES_BUCKET
    KEY_POLICY = 'public-read'
    CHUNK_SIZE = 100 << 20

    def __init__(self):
        super(S3StaticFileStorage, self).__init__()
        self._bucket = Bucket(connection=s3_conn, name=self.BUCKET_NAME)
        self._bucket_public = Bucket(connection=s3_public_conn,
                name=self.BUCKET_NAME)
        if s3_conn.lookup(self.BUCKET_NAME) is None:
            s3_conn.create_bucket(self.BUCKET_NAME, policy='public-read')

        # Allow CORS access (for web fonts)
        self._bucket.set_cors(self._get_cors_config())

    def _get_cors_config(self):
        cors = CORSConfiguration()
        cors.add_rule(['GET'], ['*'])
        return cors

    def _get_key(self, name):
        key = self._bucket.get_key(name)
        if key is None:
            raise IOError('No such key')
        return key

    def _open(self, name, mode='rb'):
        if mode not in ('r', 'rb'):
            raise IOError('_open() only supports reading')
        key = self._get_key(name)
        key.open_read()
        return File(key)

    def _save(self, name, content):
        if name.endswith('.css'):
            content_type = 'text/css'
        elif name.endswith('.js'):
            content_type = 'application/javascript'
        elif name.endswith('.json'):
            content_type = 'application/json'
        elif hasattr(content.file, 'getvalue'):
            content_type = magic.from_buffer(content.file.getvalue(),
                    mime=True)
        else:
            content_type = magic.from_file(content.file.name, mime=True)
        hdrs = {
            'Content-Type': content_type,
        }
        if content.size > self.CHUNK_SIZE:
            # Upload in chunks
            upload = self._bucket.initiate_multipart_upload(name,
                    policy=self.KEY_POLICY, headers=hdrs)
            for i, buf in enumerate(content.chunks(self.CHUNK_SIZE), 1):
                upload.upload_part_from_file(StringIO(buf), i)
            upload.complete_upload()
        else:
            # Upload all at once
            key = self._bucket.new_key(name)
            key.set_contents_from_string(content.read(),
                    policy=self.KEY_POLICY, headers=hdrs)
        return name

    def get_available_name(self, name):
        return name

    def get_valid_name(self, name):
        return name

    def delete(self, name):
        self._bucket.delete_key(name)

    def exists(self, name):
        key = self._bucket.get_key(name)
        return key is not None

    def listdir(self, path):
        path = path.lstrip('/')
        return ([], [key.name for key in self._bucket.list(prefix=path)])

    def modified_time(self, name):
        key = self._get_key(name)
        stamp = dateutil.parser.parse(key.last_modified)
        # Convert to naive datetime in local time, as FileSystemStorage does
        return stamp.astimezone(tzlocal()).replace(tzinfo=None)

    def size(self, name):
        key = self._get_key(name)
        return key.size

    def url(self, name):
        key = self._bucket_public.new_key(name)
        return key.generate_url(0, query_auth=False)
开发者ID:cmusatyalab,项目名称:django-s3,代码行数:95,代码来源:storage.py

示例15: test_when_no_restore_header_present

# 需要导入模块: from boto.s3.bucket import Bucket [as 别名]
# 或者: from boto.s3.bucket.Bucket import get_key [as 别名]
 def test_when_no_restore_header_present(self):
     self.set_http_response(status_code=200)
     b = Bucket(self.service_connection, 'mybucket')
     k = b.get_key('myglacierkey')
     self.assertIsNone(k.ongoing_restore)
     self.assertIsNone(k.expiry_date)
开发者ID:2mind,项目名称:boto,代码行数:8,代码来源:test_key.py


注:本文中的boto.s3.bucket.Bucket.get_key方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。