本文整理汇总了Python中azure.storage.blob.BlobService.put_block_blob_from_path方法的典型用法代码示例。如果您正苦于以下问题:Python BlobService.put_block_blob_from_path方法的具体用法?Python BlobService.put_block_blob_from_path怎么用?Python BlobService.put_block_blob_from_path使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类azure.storage.blob.BlobService
的用法示例。
在下文中一共展示了BlobService.put_block_blob_from_path方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: upload_file_to_azure
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
def upload_file_to_azure(in_file, file_name, container_name=settings.AZURE_CONTAINER):
try:
blob_service = BlobService(AZURE_ACCOUNT_NAME, AZURE_ACCOUNT_KEY)
blob_service.put_block_blob_from_path(
container_name=container_name,
blob_name=file_name,
file_path=in_file,
x_ms_blob_content_type='application/octet-stream'
)
except Exception as ex:
print("Failed to upload blob: {0}".format(ex))
示例2: _BlobStorageFileHandler
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
class _BlobStorageFileHandler(object):
def __init__(self,
account_name=None,
account_key=None,
protocol='https',
container='logs',
zip_compression=False,
max_connections=1,
max_retries=5,
retry_wait=1.0):
self.service = BlobService(account_name, account_key, protocol)
self.container_created = False
hostname = gethostname()
self.meta = {'hostname': hostname.replace('_', '-'),
'process': os.getpid()}
self.container = (container % self.meta).lower()
self.meta['hostname'] = hostname
self.zip_compression = zip_compression
self.max_connections = max_connections
self.max_retries = max_retries
self.retry_wait = retry_wait
def put_file_into_storage(self, dirName, fileName):
"""
Ship the outdated log file to the specified blob container.
"""
if not self.container_created:
self.service.create_container(self.container)
self.container_created = True
fd, tmpfile_path = None, ''
try:
file_path = os.path.join(dirName, fileName)
if self.zip_compression:
suffix, content_type = '.zip', 'application/zip'
fd, tmpfile_path = mkstemp(suffix=suffix)
with os.fdopen(fd, 'wb') as f:
with ZipFile(f, 'w', ZIP_DEFLATED) as z:
z.write(file_path, arcname=fileName)
file_path = tmpfile_path
else:
suffix, content_type = '', 'text/plain'
self.service.put_block_blob_from_path(self.container,
fileName + suffix,
file_path,
x_ms_blob_content_type=content_type,
max_connections=self.max_connections,
max_retries=self.max_retries,
retry_wait=self.retry_wait)
finally:
if self.zip_compression and fd:
os.remove(tmpfile_path)
示例3: main
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
def main():
#continue here
if verbose:
print('Copying')
print('Storage account:',storage_account_name)
print('Storage Key:',storage_account_key)
print('Container Name:',storage_container_name)
print('Input file:',inputfile)
print('Output Blob:',outputblob)
blob_service = BlobService(account_name=storage_account_name, account_key = storage_account_key)
#blob_service.create_container(storage_container_name,x_ms_blob_public_access='containter')
blob_service.put_block_blob_from_path(storage_container_name, outputblob,inputfile,x_ms_blob_content_type="image/jpeg")
示例4: upload_results
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
def upload_results():
"""
:return: None
"""
logger = logging.getLogger(__name__)
results_fpath = '/data/wsdm_cup/results/results.tsv'
logger.info('Uploading results from {0}'.format(results_fpath))
blob_service = BlobService(account_name='wsdmcupchallenge',
sas_token=Config.SAS_TOKEN)
blob_service.put_block_blob_from_path(container_name='bletchleypark',
blob_name='results.tsv',
file_path=results_fpath)
logger.info('Done uploading')
return
示例5: main
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
def main():
#continue here
if verbose:
print('Copying')
print('Storage account:',storage_account_name)
print('Storage Key:',storage_account_key)
print('Container Name:',storage_container_name)
print('Input file:',inputfile)
print('Output Blob:',outputblob)
blob_service = BlobService(account_name=storage_account_name, account_key = storage_account_key)
#blob_service.create_container(storage_container_name,x_ms_blob_public_access='containter')
blob_service.put_block_blob_from_path(storage_container_name, outputblob,inputfile,x_ms_blob_content_type="image/jpeg")
#this access policy is valid for four minutes (now -120 seconds until now + 120 seconds) to account for clock skew
ap= AccessPolicy(
start = (datetime.datetime.utcnow() + datetime.timedelta(seconds=-120)).strftime('%Y-%m-%dT%H:%M:%SZ'),
expiry = (datetime.datetime.utcnow() + datetime.timedelta(seconds=120)).strftime('%Y-%m-%dT%H:%M:%SZ'),
permission=BlobSharedAccessPermissions.READ,
)
示例6: open
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
with open('proxies.json') as proxies_file:
proxy_data = json.load(proxies_file)
proxy = proxy_data['uk']
sources = [
animesources.Crunchyroll(titlemap, multiseason, 'uk', proxy),
animesources.FunimationNow(titlemap, multiseason, 'gb', proxy),
animesources.Netflix(titlemap, multiseason, 'uk', proxy),
animesources.Daisuki(titlemap, multiseason, 'uk', proxy),
animesources.Viewster(titlemap, multiseason, 'uk', proxy),
animesources.Animax(titlemap, multiseason, 'uk', proxy)]
for source in sources:
source.UpdateShowList(shows)
print(source.GetName() + ': ' + str(len(shows)))
with open('alternates.json') as alternates_file:
alternates = json.load(alternates_file)
for alternate in alternates:
match_index = next((i for i, x in enumerate(shows) if animesources.compare(x['name'], alternate)), False)
if (match_index):
shows[match_index]['alt'] = alternates[alternate]
shows = sorted(shows, key = lambda show: show['name'].lower())
blob = {"lastUpdated": datetime.utcnow().isoformat(), "shows": shows}
out_file = open('uk.json', 'w')
json.dump(blob, out_file)
out_file.close()
azure_blob.put_block_blob_from_path(
'assets',
'uk.json',
'uk.json',
x_ms_blob_content_type='application/json'
)
print('done')
示例7: BlobService
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
# The Storage Account Name
storage_account_name = 'larryfrstore'
storage_account_key = 'Vm7YUAvuKQFjNSv2xY3ckgxwLUMkECUHNFF09lipZY2QxNgTFDHbA7o4U6joHXg+/Wd23sHkukjZUp41siTtwQ=='
storage_container_name = 'mycontainer'
example_file_path = '..\\sampledata\\sample.log'
# Create the blob service, using the name and key for your Azure Storage account
blob_service = BlobService(storage_account_name, storage_account_key)
# Create the container, if it does not already exist
blob_service.create_container(storage_container_name)
# Upload an example file to the container
blob_service.put_block_blob_from_path(
storage_container_name,
'sample.log',
example_file_path,
)
# Create a new signed identifier (policy)
si = SignedIdentifier()
# Set the name
si.id = policy_name
# Set the expiration date
si.access_policy.expiry = '2016-01-01'
# Set the permissions. Read and List in this example
si.access_policy.permission = ContainerSharedAccessPermissions.READ + ContainerSharedAccessPermissions.LIST
# Get the existing signed identifiers (policies) for the container
identifiers = blob_service.get_container_acl(storage_container_name)
# And append the new one ot the list
开发者ID:hning86,项目名称:hdinsight-dotnet-python-azure-storage-shared-access-signature,代码行数:33,代码来源:SASToken.py
示例8: BlobService
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
from azure.storage.blob import BlobService
blob_service = BlobService(account_name="<account_name>", account_key="<account_key>")
blob_service.create_container("datacontainer")
blob_service.create_container("datacontainer", x_ms_blob_public_access="container")
blob_service.set_container_acl("datacontainer", x_ms_blob_public_access="container")
blob_service.put_block_blob_from_path(
"datacontainer", "datablob", "StorageClientPy.py", x_ms_blob_content_type="text/x-script.phyton"
)
blobs = []
marker = None
while True:
batch = blob_service.list_blobs("datacontainer", marker=marker)
blobs.extend(batch)
if not batch.next_marker:
break
marker = batch.next_marker
for blob in blobs:
print(blob.name)
blob_service.get_blob_to_path("datacontainer", "datablob", "out-StorageClientPy.py")
示例9: AzureIOStore
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
#.........这里部分代码省略.........
blob.properties.last_modified).replace(
tzinfo=dateutil.tz.tzutc())
yield relative_path, mtime
else:
yield relative_path
# Save the marker
marker = result.next_marker
if not marker:
break
@backoff
def write_output_file(self, local_path, output_path):
"""
Write output to Azure. Will create the container if necessary.
"""
self.__connect()
RealTimeLogger.get().debug("Saving {} to AzureIOStore".format(
output_path))
try:
# Make the container
self.connection.create_container(self.container_name)
except azure.WindowsAzureConflictError:
# The container probably already exists
pass
# Upload the blob (synchronously)
# TODO: catch no container error here, make the container, and retry
self.connection.put_block_blob_from_path(self.container_name,
self.name_prefix + output_path, local_path)
@backoff
def exists(self, path):
"""
Returns true if the given input or output file exists in Azure already.
"""
self.__connect()
marker = None
while True:
try:
# Make the container
self.connection.create_container(self.container_name)
except azure.WindowsAzureConflictError:
# The container probably already exists
pass
# Get the results from Azure.
result = self.connection.list_blobs(self.container_name,
prefix=self.name_prefix + path, marker=marker)
for blob in result:
# Look at each blob
if blob.name == self.name_prefix + path:
# Found it
return True
示例10: sleep
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
theLog.flush()
except AzureMissingResourceHttpError:
pass
# Try to put the blob out in the wild, provide MD5 for error
# checking since M$ didn't feel the need to implement a return
# code for this function
# On further testing, the "content_md5" is only for header rather
# than the actual blob content - have to wait for these APIs to mature
try:
theLog.write("Writing data to Blob {3} to {0}:{1}/{2}\n".format(azureAccount, ingestContainer, filename, stagingDir+"/"+filename))
azureStorage.put_block_blob_from_path(ingestContainer,
filename,
stagingDir+"/"+filename,
#content_md5=md5Checksum.encode('base64').strip(),
max_connections=5)
theLog.write("Wrote data to Blob\n")
sleep(5)
if not isClaims:
theLog.write("Writing md5 to Blob {3} to {0}:{1}/{2}\n".format(azureAccount,
ingestContainer,
filename.split(".")[0] + ".md5",
md5FullFilePath))
azureStorage.put_block_blob_from_path(ingestContainer,
filename.split(".")[0] + ".md5",
md5FullFilePath,
#content_md5=md5Checksum.encode('base64').strip(),
示例11: BlobService
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
from azure.storage.blob import BlobService
import os
blob_service = BlobService(account_name='newsfeels', account_key=os.environ['AZUREKEY'])
blob_service.put_block_blob_from_path(
'songs',
'-02_diamonds', # number before the song emotion corresponds to 'sentValue' from songdictionary.py. 10 = 1.0, 08 = 0.8, etc.
'-02_diamonds.mp3',
x_ms_blob_content_type='mp3'
)
示例12: BY
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
azureStorage.delete_blob(ingestContainer, targetIngestFullPath)
theLog.write("Existing ingest blob found, deleting it\n\n")
theLog.flush()
except AzureMissingResourceHttpError:
pass
# Try to put the blob out in the wild, provide MD5 for error
# checking since M$ didn't feel the need to implement a return
# code for this function
# On further testing, the "content_md5" is only for header rather
# than the actual blob content - have to wait for these APIs to mature
try:
azureStorage.put_block_blob_from_path(ingestContainer,
targetIngestFullPath,
fullFilePath,
#content_md5=md5Checksum.encode('base64').strip(),
max_connections=5)
theLog.write("Uploaded blob to ingest container : {0}\n".format(ingestContainer))
theLog.flush()
except AzureHttpError as e:
result = "Ingest-Failed:" + e.message.split(".")[0]
theLog.write("Upload exception: {0}\n\n".format(result))
theLog.flush()
# Create a list of queries for Hive
hiveQueries = []
sortedByString = "SORTED BY(GenPatientID)"
if dataSetType == "Clients" or dataSetType == "Providers":
示例13: print
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import put_block_blob_from_path [as 别名]
from azure.storage.blob import BlobService
import subprocess
import uuid
print("debug")
blob_service = BlobService(account_name='{PUT YOUR ACCOUNT NAME HERE}', account_key='{PUT YOUR ACCOUNT KEY HERE}')
blob_service.create_container('mycontainer', x_ms_blob_public_access='container')
while True:
subprocess.call(['./campic.sh'])
image="/home/pi/webcam/image.jpg"
blob_service.put_block_blob_from_path(
'mycontainer',
uuid.uuid1(),
image,
x_ms_blob_content_type='image/jpg'
)