本文整理汇总了Python中azure.storage.blob.BlobService.put_blob方法的典型用法代码示例。如果您正苦于以下问题:Python BlobService.put_blob方法的具体用法?Python BlobService.put_blob怎么用?Python BlobService.put_blob使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类azure.storage.blob.BlobService
的用法示例。
在下文中一共展示了BlobService.put_blob方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _save
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_blob [as 别名]
def _save(self,name,content):
blob_service = BlobService(account_name=accountName, account_key=accountKey)
import mimetypes
content.open()
content_type = None
if hasattr(content.file, 'content_type'):
content_type = content.file.content_type
else:
content_type = mimetypes.guess_type(name)[0]
content_str = content.read()
blob_service.put_blob(
'pictures',
name,
content_str,
x_ms_blob_type='BlockBlob',
x_ms_blob_content_type=content_type
)
#print "content saved"
content.close()
#print "content closed"
#url = self.url(name)
#print "URL is: %s" % url
#print "exiting _save"
return name
示例2: open
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_blob [as 别名]
while True:
try:
f = open('/sys/bus/w1/devices/28-031561b266ff/w1_slave', 'r') #open file and store to 'f'
#converts file into a list
list1 = list(f)
list2 = list(list1[1])
#creates a list of numbers and adds decimal to the right place
temperature = list2[29:]
del temperature[len(temperature)-1]
temperature.insert(len(temperature)-3,'.')
#converts list back to a string
tempAsFloat = "".join(temperature)
print tempAsFloat #prints temperature
#required functions for sending temperature to azure cloud. account_name='blobs name', account_key='blobs key'
blob_service = BlobService(account_name='*', account_key='*')
#creates a container 'temperature'
blob_service.create_container('temperature')
#changes container permissions
blob_service.set_container_acl('temperature', x_ms_blob_public_access='container')
#'containers name', 'name of file sent/created to blob', 'name of variable or file path to file', 'BlockBlob'
blob_service.put_blob('temperature', 'temperature', tempAsFloat, 'BlockBlob')
time.sleep(10) #loops every 10 seconds to update temperature data in azure
except:
pass
f.close() #closes the opened temperature file
示例3: AzureFS
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_blob [as 别名]
#.........这里部分代码省略.........
if f_name in dir['files']:
del dir['files'][f_name]
raise FuseOSError(ENOENT)
except AzureException as e:
log.error("Read blob failed HTTP %d" % e.code)
raise FuseOSError(EAGAIN)
self.fd += 1
self.fds[self.fd] = (path, data, False)
return self.fd
def flush(self, path, fh=None):
if not fh:
raise FuseOSError(EIO)
else:
if fh not in self.fds:
raise FuseOSError(EIO)
path = self.fds[fh][0]
data = self.fds[fh][1]
dirty = self.fds[fh][2]
if not dirty:
return 0 # avoid redundant write
d, f = self._parse_path(path)
c_name = self.parse_container(path)
if data is None:
data = ''
try:
if len(data) < 64 * 1024 * 1024: # 64 mb
self.blobs.put_blob(c_name, f, data, 'BlockBlob')
else:
# divide file by blocks and upload
block_size = 8 * 1024 * 1024
num_blocks = int(math.ceil(len(data) * 1.0 / block_size))
rd = str(random.randint(1, 1e8))
block_ids = list()
for i in range(num_blocks):
part = data[i * block_size:min((i + 1) * block_size,
len(data))]
block_id = base64.encodestring('%s_%s' % (rd,
(8 - len(str(i))) * '0' + str(i)))
self.blobs.put_block(c_name, f, part, block_id)
block_ids.append(block_id)
self.blobs.put_block_list(c_name, f, block_ids)
except AzureException:
raise FuseOSError(EAGAIN)
dir = self._get_dir(d, True)
if not dir or f not in dir['files']:
raise FuseOSError(EIO)
# update local data
dir['files'][f]['st_size'] = len(data)
dir['files'][f]['st_mtime'] = time.time()
self.fds[fh] = (path, data, False) # mark as not dirty
return 0
def release(self, path, fh=None):
if fh is not None and fh in self.fds:
del self.fds[fh]
示例4: uri_put_file
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_blob [as 别名]
def uri_put_file(creds, uri, fp, content_encoding=None):
assert fp.tell() == 0
assert uri.startswith("wabs://")
def log_upload_failures_on_error(exc_tup, exc_processor_cxt):
def standard_detail_message(prefix=""):
return prefix + " There have been {n} attempts to upload " "file {url} so far.".format(
n=exc_processor_cxt, url=uri
)
typ, value, tb = exc_tup
del exc_tup
# Screen for certain kinds of known-errors to retry from
if issubclass(typ, socket.error):
socketmsg = value[1] if isinstance(value, tuple) else value
logger.info(
msg="Retrying upload because of a socket error",
detail=standard_detail_message("The socket error's message is '{0}'.".format(socketmsg)),
)
else:
# For all otherwise untreated exceptions, report them as a
# warning and retry anyway -- all exceptions that can be
# justified should be treated and have error messages
# listed.
logger.warning(
msg="retrying file upload from unexpected exception",
detail=standard_detail_message(
"The exception type is {etype} and its value is "
"{evalue} and its traceback is {etraceback}".format(
etype=typ, evalue=value, etraceback="".join(traceback.format_tb(tb))
)
),
)
# Help Python GC by resolving possible cycles
del tb
# Because we're uploading in chunks, catch rate limiting and
# connection errors which occur for each individual chunk instead of
# failing the whole file and restarting.
@retry(retry_with_count(log_upload_failures_on_error))
def upload_chunk(chunk, block_id):
check_sum = base64.encodestring(md5(chunk).digest()).strip("\n")
conn.put_block(url_tup.netloc, url_tup.path, chunk, block_id, content_md5=check_sum)
url_tup = urlparse(uri)
kwargs = dict(x_ms_blob_type="BlockBlob")
if content_encoding is not None:
kwargs["x_ms_blob_content_encoding"] = content_encoding
conn = BlobService(creds.account_name, creds.account_key, protocol="https")
conn.put_blob(url_tup.netloc, url_tup.path, "", **kwargs)
# WABS requires large files to be uploaded in 4MB chunks
block_ids = []
length, index = 0, 0
pool_size = os.getenv("WABS_UPLOAD_POOL_SIZE", 5)
p = gevent.pool.Pool(size=pool_size)
while True:
data = fp.read(WABS_CHUNK_SIZE)
if data:
length += len(data)
block_id = base64.b64encode(str(index))
p.wait_available()
p.spawn(upload_chunk, data, block_id)
block_ids.append(block_id)
index += 1
else:
p.join()
break
conn.put_block_list(url_tup.netloc, url_tup.path, block_ids)
# To maintain consistency with the S3 version of this function we must
# return an object with a certain set of attributes. Currently, that set
# of attributes consists of only 'size'
return _Key(size=len(data))
示例5: Command
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_blob [as 别名]
#.........这里部分代码省略.........
def handle(self, *args, **options):
self.wipe = options.get('wipe')
self.test_run = options.get('test_run')
self.verbosity = int(options.get('verbosity'))
if hasattr(options, 'container'):
self.STATIC_CONTAINER = options.get('container')
self.sync_files()
def sync_files(self):
self.service = BlobService(account_name=self.ACCOUNT_NAME,
account_key=self.ACCOUNT_KEY)
try:
self.service.get_container_properties(self.STATIC_CONTAINER)
except AzureMissingResourceHttpError:
self.service.create_container(self.STATIC_CONTAINER,
x_ms_blob_public_access='blob')
self.service.set_container_acl(self.STATIC_CONTAINER, x_ms_blob_public_access='blob')
# if -w option is provided, wipe out the contents of the container
if self.wipe:
blob_count = len(self.service.list_blobs(self.STATIC_CONTAINER))
if self.test_run:
print "Wipe would delete %d objects." % blob_count
else:
print "Deleting %d objects..." % blob_count
for blob in self.service.list_blobs(self.STATIC_CONTAINER):
self.service.delete_blob(self.STATIC_CONTAINER, blob.name)
# walk through the directory, creating or updating files on the cloud
os.path.walk(self.DIRECTORY, self.upload_files, "foo")
# remove any files on remote that don't exist locally
self.delete_files()
# print out the final tally to the cmd line
self.update_count = self.upload_count - self.create_count
print
if self.test_run:
print "Test run complete with the following results:"
print "Skipped %d. Created %d. Updated %d. Deleted %d." % (
self.skip_count, self.create_count, self.update_count, self.delete_count)
def upload_files(self, arg, dirname, names):
# upload or skip items
for item in names:
file_path = os.path.join(dirname, item)
if os.path.isdir(file_path):
continue # Don't try to upload directories
object_name = self.STATIC_URL + file_path.split(self.DIRECTORY)[1]
self.local_object_names.append(object_name)
try:
properties = self.service.get_blob_properties(self.STATIC_CONTAINER,
object_name)
except AzureMissingResourceHttpError:
properties = {}
self.create_count += 1
cloud_datetime = None
if 'last-modified' in properties:
cloud_datetime = (properties['last-modified'] and
datetime.datetime.strptime(
properties['last-modified'],
"%a, %d %b %Y %H:%M:%S %Z"
) or None)
local_datetime = datetime.datetime.utcfromtimestamp(
os.stat(file_path).st_mtime)
if cloud_datetime and local_datetime < cloud_datetime:
self.skip_count += 1
if self.verbosity > 1:
print "Skipped %s: not modified." % object_name
continue
if not self.test_run:
file_contents = open(file_path, 'r').read()
content_type, encoding = mimetypes.guess_type(file_path)
self.service.put_blob(self.STATIC_CONTAINER, object_name, file_contents,
x_ms_blob_type='BlockBlob', x_ms_blob_content_type=content_type,
content_encoding=encoding)
# sync_headers(cloud_obj)
self.upload_count += 1
if self.verbosity > 1:
print "Uploaded", object_name
def delete_files(self):
# remove any objects in the container that don't exist locally
for blob in self.service.list_blobs(self.STATIC_CONTAINER):
if blob.name not in self.local_object_names:
self.delete_count += 1
if self.verbosity > 1:
print "Deleted %s" % blob.name
if not self.test_run:
self.service.delete_blob(self.STATIC_CONTAINER, blob.name)