本文整理汇总了Python中boto.gs.connection.GSConnection.get_bucket方法的典型用法代码示例。如果您正苦于以下问题:Python GSConnection.get_bucket方法的具体用法?Python GSConnection.get_bucket怎么用?Python GSConnection.get_bucket使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类boto.gs.connection.GSConnection
的用法示例。
在下文中一共展示了GSConnection.get_bucket方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: upload_to_gs
# 需要导入模块: from boto.gs.connection import GSConnection [as 别名]
# 或者: from boto.gs.connection.GSConnection import get_bucket [as 别名]
def upload_to_gs(bucket_name, client_id, client_secret, file, key, acl='public-read'):
conn = GSConnection(client_id, client_secret, calling_format=OrdinaryCallingFormat())
bucket = conn.get_bucket(bucket_name)
k = Key(bucket)
# generate key
filename = secure_filename(file.filename)
key_dir = key + '/' + generate_hash(key) + '/'
k.key = key_dir + filename
# delete old data
for item in bucket.list(prefix='/' + key_dir):
item.delete()
# set object settings
file_data = file.read()
file_mime = magic.from_buffer(file_data, mime=True)
size = len(file_data)
sent = k.set_contents_from_string(
file_data,
headers={
'Content-Disposition': 'attachment; filename=%s' % filename,
'Content-Type': '%s' % file_mime
}
)
k.set_acl(acl)
gs_url = 'https://storage.googleapis.com/%s/' % bucket_name
if sent == size:
return gs_url + k.key
return False
示例2: GcsCache
# 需要导入模块: from boto.gs.connection import GSConnection [as 别名]
# 或者: from boto.gs.connection.GSConnection import get_bucket [as 别名]
class GcsCache(S3Cache):
'''A cache that transfers files to and from GCS
'''
def __init__(self, bucket=None, prefix=None, account=None, upstream=None,**kwargs):
'''Init a new S3Cache Cache
'''
from boto.gs.connection import GSConnection
super(S3Cache, self).__init__(upstream=upstream) # Skip parent __init__
self.is_remote = False
self.access_key = account['access']
self.secret = account['secret']
self.project = account['project']
self.bucket_name = bucket
self.prefix = prefix
self.conn = GSConnection(self.access_key, self.secret, is_secure = False )
self.bucket = self.conn.get_bucket(self.bucket_name)
def __repr__(self):
return "GcsCache: bucket={} prefix={} access={} ".format(self.bucket, self.prefix, self.access_key, self.upstream)
示例3: get
# 需要导入模块: from boto.gs.connection import GSConnection [as 别名]
# 或者: from boto.gs.connection.GSConnection import get_bucket [as 别名]
def get(self):
"""Handle GET requests.
For the time being, we just provide an information page. In the future
there will be a web UI here.
"""
path = os.path.join(os.path.dirname(__file__), 'index.html')
user = users.get_current_user()
connection = GSConnection(config.gs_access_key,config.gs_secret_key)
bucket = connection.get_bucket(config.gs_bucket)
# Find all of the batches.
batches = []
logging.info('Loading batches')
rs = bucket.list(prefix=user.email() + '/', delimiter='/')
for r in rs:
logging.info(r.name)
batch_name = r.name.split('/')[1]
batches.append(self.LoadBatchInfo(user.email(), batch_name, bucket))
batches.sort(key=lambda i: i.get('update_time', 0), reverse=True)
self.response.out.write(
template.render(path, {
'url': self.request.url,
'user_id': user.user_id(),
'email': user.email(),
'batches': batches,
}))
示例4: GSTestCase
# 需要导入模块: from boto.gs.connection import GSConnection [as 别名]
# 或者: from boto.gs.connection.GSConnection import get_bucket [as 别名]
class GSTestCase(unittest.TestCase):
gs = True
def setUp(self):
self._conn = GSConnection()
self._buckets = []
self._tempdirs = []
# Retry with an exponential backoff if a server error is received. This
# ensures that we try *really* hard to clean up after ourselves.
@retry(GSResponseError)
def tearDown(self):
while len(self._tempdirs):
tmpdir = self._tempdirs.pop()
shutil.rmtree(tmpdir, ignore_errors=True)
while(len(self._buckets)):
b = self._buckets[-1]
try:
bucket = self._conn.get_bucket(b)
while len(list(bucket.list_versions())) > 0:
for k in bucket.list_versions():
try:
bucket.delete_key(k.name, generation=k.generation)
except GSResponseError, e:
if e.status != 404:
raise
bucket.delete()
except GSResponseError, e:
if e.status != 404:
raise
self._buckets.pop()
示例5: save_image_to_gs
# 需要导入模块: from boto.gs.connection import GSConnection [as 别名]
# 或者: from boto.gs.connection.GSConnection import get_bucket [as 别名]
def save_image_to_gs(key_name,bf,mime="image/png"):
try:
conn = GSConnection(gs_access_key_id = settings.gs_access_key_id,gs_secret_access_key =settings.gs_secret_access_key)
bucket = conn.get_bucket(settings.bucket_name)
gs_file = bucket.new_key(key_name)
gs_file.set_contents_from_string(bf,policy="public-read",headers={"Content-Type":mime})
except:
return False
return True
示例6: test_1_basic
# 需要导入模块: from boto.gs.connection import GSConnection [as 别名]
# 或者: from boto.gs.connection.GSConnection import get_bucket [as 别名]
def test_1_basic(self):
print '--- running GSConnection tests ---'
c = GSConnection()
# create a new, empty bucket
bucket_name = 'test-%d' % int(time.time())
bucket = c.create_bucket(bucket_name)
# now try a get_bucket call and see if it's really there
bucket = c.get_bucket(bucket_name)
k = bucket.new_key()
k.name = 'foobar'
s1 = 'This is a test of file upload and download'
s2 = 'This is a second string to test file upload and download'
k.set_contents_from_string(s1)
fp = open('foobar', 'wb')
# now get the contents from s3 to a local file
k.get_contents_to_file(fp)
fp.close()
fp = open('foobar')
# check to make sure content read from s3 is identical to original
assert s1 == fp.read(), 'corrupted file'
fp.close()
bucket.delete_key(k)
# test a few variations on get_all_keys - first load some data
# for the first one, let's override the content type
phony_mimetype = 'application/x-boto-test'
headers = {'Content-Type': phony_mimetype}
k.name = 'foo/bar'
k.set_contents_from_string(s1, headers)
k.name = 'foo/bas'
k.set_contents_from_filename('foobar')
k.name = 'foo/bat'
k.set_contents_from_string(s1)
k.name = 'fie/bar'
k.set_contents_from_string(s1)
k.name = 'fie/bas'
k.set_contents_from_string(s1)
k.name = 'fie/bat'
k.set_contents_from_string(s1)
# try resetting the contents to another value
md5 = k.md5
k.set_contents_from_string(s2)
assert k.md5 != md5
os.unlink('foobar')
all = bucket.get_all_keys()
assert len(all) == 6
rs = bucket.get_all_keys(prefix='foo')
assert len(rs) == 3
rs = bucket.get_all_keys(prefix='', delimiter='/')
assert len(rs) == 2
rs = bucket.get_all_keys(maxkeys=5)
assert len(rs) == 5
# test the lookup method
k = bucket.lookup('foo/bar')
assert isinstance(k, bucket.key_class)
assert k.content_type == phony_mimetype
k = bucket.lookup('notthere')
assert k == None
# try some metadata stuff
k = bucket.new_key()
k.name = 'has_metadata'
mdkey1 = 'meta1'
mdval1 = 'This is the first metadata value'
k.set_metadata(mdkey1, mdval1)
mdkey2 = 'meta2'
mdval2 = 'This is the second metadata value'
k.set_metadata(mdkey2, mdval2)
# try a unicode metadata value
mdval3 = u'föö'
mdkey3 = 'meta3'
k.set_metadata(mdkey3, mdval3)
k.set_contents_from_string(s1)
k = bucket.lookup('has_metadata')
assert k.get_metadata(mdkey1) == mdval1
assert k.get_metadata(mdkey2) == mdval2
assert k.get_metadata(mdkey3) == mdval3
k = bucket.new_key()
k.name = 'has_metadata'
k.get_contents_as_string()
assert k.get_metadata(mdkey1) == mdval1
assert k.get_metadata(mdkey2) == mdval2
assert k.get_metadata(mdkey3) == mdval3
bucket.delete_key(k)
# test list and iterator
rs1 = bucket.list()
num_iter = 0
for r in rs1:
num_iter = num_iter + 1
rs = bucket.get_all_keys()
num_keys = len(rs)
assert num_iter == num_keys
# try a key with a funny character
k = bucket.new_key()
k.name = 'testnewline\n'
k.set_contents_from_string('This is a test')
rs = bucket.get_all_keys()
assert len(rs) == num_keys + 1
bucket.delete_key(k)
rs = bucket.get_all_keys()
#.........这里部分代码省略.........
示例7: test_1_basic
# 需要导入模块: from boto.gs.connection import GSConnection [as 别名]
# 或者: from boto.gs.connection.GSConnection import get_bucket [as 别名]
def test_1_basic(self):
"""basic regression test for Google Cloud Storage"""
print '--- running GSConnection tests ---'
c = GSConnection()
# create a new, empty bucket
bucket_name = 'test-%d' % int(time.time())
bucket = c.create_bucket(bucket_name)
# now try a get_bucket call and see if it's really there
bucket = c.get_bucket(bucket_name)
k = bucket.new_key()
k.name = 'foobar'
s1 = 'This is a test of file upload and download'
s2 = 'This is a second string to test file upload and download'
k.set_contents_from_string(s1)
fp = open('foobar', 'wb')
# now get the contents from s3 to a local file
k.get_contents_to_file(fp)
fp.close()
fp = open('foobar')
# check to make sure content read from s3 is identical to original
assert s1 == fp.read(), 'corrupted file'
fp.close()
bucket.delete_key(k)
# test a few variations on get_all_keys - first load some data
# for the first one, let's override the content type
phony_mimetype = 'application/x-boto-test'
headers = {'Content-Type': phony_mimetype}
k.name = 'foo/bar'
k.set_contents_from_string(s1, headers)
k.name = 'foo/bas'
k.set_contents_from_filename('foobar')
k.name = 'foo/bat'
k.set_contents_from_string(s1)
k.name = 'fie/bar'
k.set_contents_from_string(s1)
k.name = 'fie/bas'
k.set_contents_from_string(s1)
k.name = 'fie/bat'
k.set_contents_from_string(s1)
# try resetting the contents to another value
md5 = k.md5
k.set_contents_from_string(s2)
assert k.md5 != md5
# Test for stream API
fp2 = open('foobar', 'rb')
k.md5 = None
k.base64md5 = None
k.set_contents_from_stream(fp2, headers=headers)
fp = open('foobar1', 'wb')
k.get_contents_to_file(fp)
fp.close()
fp2.seek(0,0)
fp = open('foobar1', 'rb')
assert (fp2.read() == fp.read()), 'Chunked Transfer corrupted the Data'
fp.close()
fp2.close()
os.unlink('foobar1')
os.unlink('foobar')
all = bucket.get_all_keys()
assert len(all) == 6
rs = bucket.get_all_keys(prefix='foo')
assert len(rs) == 3
rs = bucket.get_all_keys(prefix='', delimiter='/')
assert len(rs) == 2
rs = bucket.get_all_keys(maxkeys=5)
assert len(rs) == 5
# test the lookup method
k = bucket.lookup('foo/bar')
assert isinstance(k, bucket.key_class)
assert k.content_type == phony_mimetype
k = bucket.lookup('notthere')
assert k == None
# try some metadata stuff
k = bucket.new_key()
k.name = 'has_metadata'
mdkey1 = 'meta1'
mdval1 = 'This is the first metadata value'
k.set_metadata(mdkey1, mdval1)
mdkey2 = 'meta2'
mdval2 = 'This is the second metadata value'
k.set_metadata(mdkey2, mdval2)
# try a unicode metadata value
mdval3 = u'föö'
mdkey3 = 'meta3'
k.set_metadata(mdkey3, mdval3)
k.set_contents_from_string(s1)
k = bucket.lookup('has_metadata')
assert k.get_metadata(mdkey1) == mdval1
assert k.get_metadata(mdkey2) == mdval2
assert k.get_metadata(mdkey3) == mdval3
k = bucket.new_key()
k.name = 'has_metadata'
k.get_contents_as_string()
assert k.get_metadata(mdkey1) == mdval1
assert k.get_metadata(mdkey2) == mdval2
assert k.get_metadata(mdkey3) == mdval3
bucket.delete_key(k)
# test list and iterator
#.........这里部分代码省略.........
示例8: GSVersioningTest
# 需要导入模块: from boto.gs.connection import GSConnection [as 别名]
# 或者: from boto.gs.connection.GSConnection import get_bucket [as 别名]
class GSVersioningTest(unittest.TestCase):
gs = True
def setUp(self):
self.conn = GSConnection()
self.buckets = []
def tearDown(self):
for b in self.buckets:
bucket = self.conn.get_bucket(b)
while len(list(bucket.list_versions())) > 0:
for k in bucket.list_versions():
bucket.delete_key(k.name, generation=k.generation)
bucket.delete()
def _MakeBucketName(self):
b = "boto-gs-test-%s" % repr(time.time()).replace(".", "-")
self.buckets.append(b)
return b
def _MakeVersionedBucket(self):
b = self.conn.create_bucket(self._MakeBucketName())
b.configure_versioning(True)
return b
def testVersioningToggle(self):
b = self.conn.create_bucket(self._MakeBucketName())
self.assertFalse(b.get_versioning_status())
b.configure_versioning(True)
self.assertTrue(b.get_versioning_status())
b.configure_versioning(False)
self.assertFalse(b.get_versioning_status())
def testDeleteVersionedKey(self):
b = self._MakeVersionedBucket()
k = b.new_key("foo")
s1 = "test1"
k.set_contents_from_string(s1)
k = b.get_key("foo")
g1 = k.generation
s2 = "test2"
k.set_contents_from_string(s2)
k = b.get_key("foo")
g2 = k.generation
versions = list(b.list_versions())
self.assertEqual(len(versions), 2)
self.assertEqual(versions[0].name, "foo")
self.assertEqual(versions[1].name, "foo")
generations = [k.generation for k in versions]
self.assertIn(g1, generations)
self.assertIn(g2, generations)
# Delete "current" version and make sure that version is no longer
# visible from a basic GET call.
k = b.get_key("foo")
k.delete()
self.assertIsNone(b.get_key("foo"))
# Both old versions should still be there when listed using the versions
# query parameter.
versions = list(b.list_versions())
self.assertEqual(len(versions), 2)
self.assertEqual(versions[0].name, "foo")
self.assertEqual(versions[1].name, "foo")
generations = [k.generation for k in versions]
self.assertIn(g1, generations)
self.assertIn(g2, generations)
# Delete generation 2 and make sure it's gone.
b.delete_key("foo", generation=g2)
versions = list(b.list_versions())
self.assertEqual(len(versions), 1)
self.assertEqual(versions[0].name, "foo")
self.assertEqual(versions[0].generation, g1)
# Delete generation 1 and make sure it's gone.
b.delete_key("foo", generation=g1)
versions = list(b.list_versions())
self.assertEqual(len(versions), 0)
def testGetVersionedKey(self):
b = self._MakeVersionedBucket()
k = b.new_key("foo")
s1 = "test1"
k.set_contents_from_string(s1)
k = b.get_key("foo")
g1 = k.generation
o1 = k.get_contents_as_string()
self.assertEqual(o1, s1)
s2 = "test2"
k.set_contents_from_string(s2)
k = b.get_key("foo")
g2 = k.generation
self.assertNotEqual(g2, g1)
o2 = k.get_contents_as_string()
#.........这里部分代码省略.........
示例9: GoogleStorage
# 需要导入模块: from boto.gs.connection import GSConnection [as 别名]
# 或者: from boto.gs.connection.GSConnection import get_bucket [as 别名]
class GoogleStorage(Storage):
def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
querystring_auth=QUERYSTRING_AUTH, querystring_expire=QUERYSTRING_EXPIRE,
reduced_redundancy=REDUCED_REDUNDANCY,
custom_domain=CUSTOM_DOMAIN, secure_urls=SECURE_URLS,
location=LOCATION, file_name_charset=FILE_NAME_CHARSET,
preload_metadata=PRELOAD_METADATA, calling_format=CALLING_FORMAT):
self.bucket_acl = bucket_acl
self.bucket_name = bucket
self.acl = acl
self.headers = headers
self.preload_metadata = preload_metadata
self.gzip = gzip
self.gzip_content_types = gzip_content_types
self.querystring_auth = querystring_auth
self.querystring_expire = querystring_expire
self.reduced_redundancy = reduced_redundancy
self.custom_domain = custom_domain
self.secure_urls = secure_urls
self.location = location or ''
self.location = self.location.lstrip('/')
self.file_name_charset = file_name_charset
if not access_key and not secret_key:
access_key, secret_key = self._get_access_keys()
self.connection = GSConnection(access_key, secret_key)
self._entries = {}
@property
def bucket(self):
if not hasattr(self, '_bucket'):
self._bucket = self._get_or_create_bucket(self.bucket_name)
return self._bucket
@property
def entries(self):
if self.preload_metadata and not self._entries:
self._entries = dict((self._decode_name(entry.key), entry)
for entry in self.bucket.list())
return self._entries
def _get_access_keys(self):
access_key = ACCESS_KEY_ID
secret_key = SECRET_ACCESS_KEY
if (access_key or secret_key) and (not access_key or not secret_key):
access_key = os.environ.get(ACCESS_KEY_NAME)
secret_key = os.environ.get(SECRET_KEY_NAME)
if access_key and secret_key:
# Both were provided, so use them
return access_key, secret_key
return None, None
def _get_or_create_bucket(self, name):
"""Retrieves a bucket if it exists, otherwise creates it."""
try:
return self.connection.get_bucket(name, validate=AUTO_CREATE_BUCKET)
except Exception, e:
if AUTO_CREATE_BUCKET:
bucket = self.connection.create_bucket(name)
bucket.set_acl(self.bucket_acl)
return bucket
raise ImproperlyConfigured("%s" % str(e))
示例10: GSTestCase
# 需要导入模块: from boto.gs.connection import GSConnection [as 别名]
# 或者: from boto.gs.connection.GSConnection import get_bucket [as 别名]
class GSTestCase(unittest.TestCase):
gs = True
def setUp(self):
self._conn = GSConnection()
self._buckets = []
self._tempdirs = []
# Retry with an exponential backoff if a server error is received. This
# ensures that we try *really* hard to clean up after ourselves.
@retry(GSResponseError)
def tearDown(self):
while len(self._tempdirs):
tmpdir = self._tempdirs.pop()
shutil.rmtree(tmpdir, ignore_errors=True)
while(len(self._buckets)):
b = self._buckets[-1]
try:
bucket = self._conn.get_bucket(b)
while len(list(bucket.list_versions())) > 0:
for k in bucket.list_versions():
try:
bucket.delete_key(k.name, generation=k.generation)
except GSResponseError as e:
if e.status != 404:
raise
bucket.delete()
except GSResponseError as e:
if e.status != 404:
raise
self._buckets.pop()
def _GetConnection(self):
"""Returns the GSConnection object used to connect to GCS."""
return self._conn
def _MakeTempName(self):
"""Creates and returns a temporary name for testing that is likely to be
unique."""
return "boto-gs-test-%s" % repr(time.time()).replace(".", "-")
def _MakeBucketName(self):
"""Creates and returns a temporary bucket name for testing that is
likely to be unique."""
b = self._MakeTempName()
self._buckets.append(b)
return b
def _MakeBucket(self):
"""Creates and returns temporary bucket for testing. After the test, the
contents of the bucket and the bucket itself will be deleted."""
b = self._conn.create_bucket(self._MakeBucketName())
return b
def _MakeKey(self, data='', bucket=None, set_contents=True):
"""Creates and returns a Key with provided data. If no bucket is given,
a temporary bucket is created."""
if data and not set_contents:
# The data and set_contents parameters are mutually exclusive.
raise ValueError('MakeKey called with a non-empty data parameter '
'but set_contents was set to False.')
if not bucket:
bucket = self._MakeBucket()
key_name = self._MakeTempName()
k = bucket.new_key(key_name)
if set_contents:
k.set_contents_from_string(data)
return k
def _MakeVersionedBucket(self):
"""Creates and returns temporary versioned bucket for testing. After the
test, the contents of the bucket and the bucket itself will be
deleted."""
b = self._MakeBucket()
b.configure_versioning(True)
return b
def _MakeTempDir(self):
"""Creates and returns a temporary directory on disk. After the test,
the contents of the directory and the directory itself will be
deleted."""
tmpdir = tempfile.mkdtemp(prefix=self._MakeTempName())
self._tempdirs.append(tmpdir)
return tmpdir
示例11: test_3_default_object_acls
# 需要导入模块: from boto.gs.connection import GSConnection [as 别名]
# 或者: from boto.gs.connection.GSConnection import get_bucket [as 别名]
def test_3_default_object_acls(self):
"""test default object acls"""
c = GSConnection()
# create a new bucket
bucket_name = 'test-%d' % int(time.time())
bucket = c.create_bucket(bucket_name)
# now call get_bucket to see if it's really there
bucket = c.get_bucket(bucket_name)
bucket.set_def_acl('public-read')
acl = bucket.get_def_acl()
# save public-read acl for later test
public_read_acl = acl
assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' +
'<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
'</Entry></Entries></AccessControlList>')
# back to private acl
bucket.set_def_acl('private')
acl = bucket.get_def_acl()
assert acl.to_xml() == '<AccessControlList></AccessControlList>'
# set default acl to an xml acl and verify it gets set
bucket.set_def_acl(public_read_acl)
acl = bucket.get_def_acl()
assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' +
'<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
'</Entry></Entries></AccessControlList>')
# back to private acl
bucket.set_def_acl('private')
acl = bucket.get_def_acl()
assert acl.to_xml() == '<AccessControlList></AccessControlList>'
# delete bucket
c.delete_bucket(bucket)
# repeat default acl tests using boto's storage_uri interface
# create a new bucket
bucket_name = 'test-%d' % int(time.time())
uri = storage_uri('gs://' + bucket_name)
uri.create_bucket()
uri.set_def_acl('public-read')
acl = uri.get_def_acl()
# save public-read acl for later test
public_read_acl = acl
assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' +
'<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
'</Entry></Entries></AccessControlList>')
# back to private acl
uri.set_def_acl('private')
acl = uri.get_def_acl()
assert acl.to_xml() == '<AccessControlList></AccessControlList>'
# set default acl to an xml acl and verify it gets set
uri.set_def_acl(public_read_acl)
acl = uri.get_def_acl()
assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' +
'<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
'</Entry></Entries></AccessControlList>')
# back to private acl
uri.set_def_acl('private')
acl = uri.get_def_acl()
assert acl.to_xml() == '<AccessControlList></AccessControlList>'
# delete bucket
uri.delete_bucket()
print '--- tests completed ---'