本文整理汇总了Python中azure.storage.blob.BlobService.get_blob_to_path方法的典型用法代码示例。如果您正苦于以下问题:Python BlobService.get_blob_to_path方法的具体用法?Python BlobService.get_blob_to_path怎么用?Python BlobService.get_blob_to_path使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类azure.storage.blob.BlobService
的用法示例。
在下文中一共展示了BlobService.get_blob_to_path方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: getblob
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
def getblob(request):
assert isinstance(request, HttpRequest)
blob_service = BlobService(account_name='araldrift', account_key='YOURKEYGOESHERE')
# http://<storage-account-name>.blob.core.windows.net/<container-name>/<blob-name>
blob_service.get_blob_to_path('flow', 'NARYN.day', './NARYN.day')
# return HttpResponse('ok ye of little faith')
return HttpResponse(json.dumps('i am just a wee bit of json'))
示例2: get_tags
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
def get_tags():
#TODO: Error checking
# global d
# d = {}
clarifai_api = ClarifaiApi()
blob_service = BlobService('calhacks', 'mm7EmY+T+MGahePBDSDU5LHpZR5tRXuh4MSco4jFrzHovOPEf06e18c89pxtPIo4NDVhhjSeaQY/FQmKNxjjyA==')
blob_name = request.form['blob_id']
# blob_name = blob_name.decode('utf-8')
blob_service.get_blob_to_path('imagestore', blob_name, 'out.png')
print("checkpoint 1")
i = open ('out.png', 'r')
strd = ""
for line in i:
strd += line.strip()
fname = 'img.png'
with open (fname, 'wb') as f:
f.write (base64.b64decode(strd))
f.close()
f = open (fname, 'rb')
result = clarifai_api.tag_images(f)
st = result['results'][0]['result']['tag']['classes'][0:6]
print(st)
for i in []:#['food', 'nobody', 'still life', 'meal', 'dish', 'plate', 'delicious', 'isolated', 'cutout', 'unhealthy', 'one', 'background']:
while i in st:
st.remove(i)
js = json.dumps(search_terms(st))
print(js)
return js
示例3: gethydrograph
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
def gethydrograph(request):
'''
JSON return of a particular hydorgaph by start / stop / station / time interval
In response it will
generate a 404 error if the value is not found
or
return a JSON response with the requested slice.
'''
assert isinstance(request, HttpRequest)
start = request.GET.get('start', None)
end = request.GET.get('end', None)
# Here we can thieve the time parse code from LOS and use this line:
# rd, rh = extract_time(request)
# but for now this is commented out of course
station = request.GET.get('station',None)
interval = request.GET.get('interval',None)
blob_service = BlobService(account_name='araldrift', account_key='XXXXXXXXXXXXXXXXXXXXXXX')
blob_service.get_blob_to_path('flow', 'NARYN.day', './tmp.csv')
f = file('./tmp.csv')
h = []
while True:
line = f.readline()
if line == "": break
h.append(line)
f.close()
json_encoded_result = json.dumps(h)
# Keep the LOS original for reference (see time formatting):
# clean_req = {"rdidx": rdidx,"rdepth": rdepth,"rd": rd.strftime("%Y%m%d"), "rh": rh,"rparm": rparm,}
# clean_req = {"start": start, "end": end, "station": station, "interocitor": interval,}
# json_request = json.dumps(clean_req)
# json_encoded_result = json_request
try:
#####################
#
# This is the commented out LOS code (stripped out)
#
#####################
# Convert the depth to an int.
# depthIndex = int(rdidx)
# Get the depth slice from the dataset.
# ret = getDepthSliceByIndex((rd,rh), depthIndex, rparm, rnorm)
# Serialize out to JSON.
# json_encoded_result = json.dumps(ret)
# Cache the response to blob storage. (Would be great if this was async...)
# cacheResult(request.GET, json_encoded_result)
# Send the view on to the client.
#####################
return HttpResponse(json_encoded_result, content_type="application/json" )
except Exception as a:
return HttpResponseNotFound(content="No dice, either the inputs were out of range, the file couldn't be retrieved, or the winds weren't in your favor.")
示例4: gethydrograph
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
def gethydrograph(request):
'''
Returns streamflow data by start / stop / station
In response it will
generate a 404 error if the value is not found
or
return a JSON response with the requested slice or a .csv file by default
'''
assert isinstance(request, HttpRequest)
start = request.GET.get('start', None)
end = request.GET.get('end', None)
station = request.GET.get('station',None)
interval = request.GET.get('interval',None)
jsondat = request.GET.get('jsondat',None)
plot = request.GET.get('plot',None)
#start blob service
stationfile = station + '.day.new'
downloadablefile = station + '_' + start + '_' + end + '.csv'
blob_service = BlobService(account_name='araldrift', account_key='otLzzkwQHQD3xFTQxwxy64PCL6eDINWGjSB7x6Ta2XVw3+3ffI5O2MhAEavf/r8qIW4G/dKrZAVg1R64nK7hDQ==')
blob_service.get_blob_to_path('flow', stationfile, './tmp.csv')
f = file('./tmp.csv')
#read in pandas data and subsetting
d_cols = ["DATE","FLOW"]
d = pd.read_csv('./tmp.csv', sep=" ", names=d_cols)
df = d[(d.DATE >= start) & (d.DATE <= end)]
h = df.to_json(orient='records')
json_encoded_result = json.dumps(h)
df.plot(x='DATE', y='FLOW', figsize=(14,6))
plt.savefig('./plot_test.png')
#h = []
#while True:
# line = f.readline()
# if line == "": break
# h.append(line)
#f.close()
try:
if jsondat in ['TRUE']:
response = HttpResponse(json_encoded_result, content_type="application/json" )
return response
elif plot in ['TRUE']:
image_data = open("./plot_test.png", "rb").read()
response = HttpResponse(image_data, content_type='image/png')
return response
else:
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=' +downloadablefile
df.to_csv(response, index=False, lineterminator='\r\n')
return response
except Exception as a:
return HttpResponseNotFound(content="No dice, either the inputs were out of range, the file couldn't be retrieved, or the winds weren't in your favor.")
示例5: getblob
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
def getblob(request):
assert isinstance(request, HttpRequest)
blob_service = BlobService(account_name='araldrift', account_key='otLzzkwQHQD3xFTQxwxy64PCL6eDINWGjSB7x6Ta2XVw3+3ffI5O2MhAEavf/r8qIW4G/dKrZAVg1R64nK7hDQ==')
# http://<storage-account-name>.blob.core.windows.net/<container-name>/<blob-name>
name = 'test.txt'
fpath = '{0}\{1}'.format(tempfile.gettempdir(),name)
blob_service.get_blob_to_path('flow', 'NARYN.day', fpath)
response = HttpResponse(content_type='text/plain')
response['Content-Disposition'] = 'attachment; filename=test.txt'
blob.Properties.ContentDisposition = "attachment; filename=" + downloadName;
return response
示例6: download_data
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
def download_data(key):
blob_service = BlobService(account_name='asosdsrecruiting', account_key=key)
blobs = []
marker = None
while True:
batch = blob_service.list_blobs('recruitingdata', marker=marker)
blobs.extend(batch)
if not batch.next_marker:
break
marker = batch.next_marker
for blob in blobs:
file_name = blob.name
print('Downloading: ' + file_name)
blob_service.get_blob_to_path('recruitingdata', file_name, file_name.replace('/', '_') + '.csv')
示例7: get_tags
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
def get_tags():
# TODO: Error checking
global d
d = {}
clarifai_api = ClarifaiApi()
blob_service = BlobService(
"calhacks", "mm7EmY+T+MGahePBDSDU5LHpZR5tRXuh4MSco4jFrzHovOPEf06e18c89pxtPIo4NDVhhjSeaQY/FQmKNxjjyA=="
)
blob_name = request.data
blob_name = blob_name.decode("utf-8")
blob_service.get_blob_to_path("imagestore", blob_name, "out.png")
print("checkpoint 1")
i = open("out.png", "r")
strd = ""
for line in i:
strd += line.strip()
fname = "img.png"
with open(fname, "wb") as f:
f.write(base64.b64decode(strd))
f.close()
f = open(fname, "rb")
result = clarifai_api.tag_images(f)
print(result)
st = result["results"][0]["result"]["tag"]["classes"][0:6]
for i in [
"food",
"nobody",
"still life",
"meal",
"dish",
"plate",
"delicious",
"isolated",
"cutout",
"unhealthy",
"one",
"background",
]:
while i in st:
st.remove(i)
d = {blob_name: search_terms(st)}
return "success!"
示例8: getanalysis
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
def getanalysis(request):
assert isinstance(request, HttpRequest)
latstart = request.GET.get('latstart', None)
latend = request.GET.get('latend', None)
lonstart = request.GET.get('lonstart', None)
lonend = request.GET.get('lonend', None)
sea = request.GET.get('season', None)
#start SSH
ssh = paramiko.SSHClient()
blob_service = BlobService(account_name='araldrift', account_key='otLzzkwQHQD3xFTQxwxy64PCL6eDINWGjSB7x6Ta2XVw3+3ffI5O2MhAEavf/r8qIW4G/dKrZAVg1R64nK7hDQ==')
blob_service.get_blob_to_path('security', 'id_rsa', './id_rsa')
privkey = paramiko.RSAKey.from_private_key_file (filename='./id_rsa', password='araldif1*' )
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect('40.112.209.249', username='araldif', password='araldif1*', allow_agent=False, pkey=None, key_filename=None, timeout=10, look_for_keys=False, compress=False)
except paramiko.SSHException:
return HttpResponse()
quit()
#stdin,stdout,stderr = ssh.exec_command("ls /etc/")
cmd = '/home/araldif/anaconda3/bin/python /datadrive/from_webapp/xarray_analysis.py ' + latstart + ' ' + latend + ' ' + lonstart + ' ' + lonend + ' ' + sea
#cmd = '/datadrive/from_webapp/xarray_analysis.py'
#cmd = 'python /datadrive/from_webapp/test.py ' + name
stdin,stdout,stderr = ssh.exec_command(cmd)
h = []
for line in stderr.readlines():
h.append(line)
stderr.close()
ssh.close()
try:
imageoutfile1 = 'prec_' + str(sea) + '_' + str(latstart) + '_' + str(latend) + '_' + str(lonstart) + '_' + str(lonend) + '.png'
imageoutfile2 = './' + imageoutfile1
blob_service = BlobService(account_name='araldrift', account_key='otLzzkwQHQD3xFTQxwxy64PCL6eDINWGjSB7x6Ta2XVw3+3ffI5O2MhAEavf/r8qIW4G/dKrZAVg1R64nK7hDQ==')
blob_service.get_blob_to_path('flow', imageoutfile1, imageoutfile2)
image_data = open(imageoutfile2, "rb").read()
response = HttpResponse(image_data, content_type='image/png')
return response
except:
return HttpResponse(h,content_type='text/plain')
示例9: module_impl
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
#.........这里部分代码省略.........
file_path,
x_ms_meta_name_values,
x_ms_blob_cache_control,
x_ms_blob_content_encoding,
x_ms_blob_content_language,
x_ms_blob_content_type
)
results['blob'] = get_blob_facts(bs, container_name, blob_name)
results['changed'] = True
results['msg'] = 'Successfully updloaded file.'
return results
if mode == 'list':
container = container_check(bs, container_name)
response = bs.list_blobs(
container_name,
prefix,
marker,
max_results
)
results['blobs'] = []
for blob in response.blobs:
b = dict(
name = blob.name,
snapshot = blob.snapshot,
last_modified = blob.properties.last_modified,
content_length = blob.properties.content_length,
blob_type = blob.properties.blob_type,
)
results['blobs'].append(b)
return results
if mode == 'get':
if file_path is None:
raise Exception("Parameter error: file_path cannot be None.")
container = container_check(bs, container_name)
blob = blob_check(bs, container_name, blob_name)
path_exists = path_check(file_path)
if not path_exists or overwrite == 'always':
if not check_mode:
bs.get_blob_to_path(container_name, blob_name, file_path)
results['changed'] = True
results['msg'] = "Blob %s successfully downloaded to %s." % (blob_name, file_path)
return results
if path_exists:
md5_remote = blob['content-md5']
md5_local = get_md5(file_path)
if md5_local == md5_remote:
sum_matches = True
if overwrite == 'always':
if not check_mode:
bs.get_blob_to_path(container_name, blob_name, file_path)
results['changed'] = True
results['msg'] = "Blob %s successfully downloaded to %s." % (blob_name, file_path)
else:
results['msg'] = "Local and remote object are identical, ignoring. Use overwrite parameter to force."
else:
sum_matches = False
if overwrite in ('always', 'different'):
if not check_mode:
bs.get_blob_to_path(container_name, blob_name, file_path)
results['changed'] = True
results['msg'] = "Blob %s successfully downloaded to %s." % (blob_name, file_path)
else:
results['msg'] ="WARNING: Checksums do not match. Use overwrite parameter to force download."
if sum_matches is True and overwrite == 'never':
results['msg'] = "Local and remote object are identical, ignoring. Use overwrite parameter to force."
return results
if mode == 'get_url':
if not blob_name:
raise Exception("Parameter error: blob_name cannot be None.")
container = container_check(bs, container_name)
blob = blob_check(bs, container_name, blob_name)
url = bs.make_blob_url(
container_name=container_name,
blob_name=blob_name,
sas_token=access_token)
results['url'] = url
results['msg'] = "Url: %s" % url
return results
if mode == 'get_token':
if hours == 0 and days == 0:
raise Exception("Parameter error: expecting hours > 0 or days > 0")
container = container_check(bs, container_name)
blob = blob_check(bs, container_name, blob_name)
results['blob_name'] = blob_name
sap = get_shared_access_policy(permissions, hours=hours, days=days)
token = bs.generate_shared_access_signature(container_name, blob_name, sap)
results['access_token'] = token
return results
示例10: BlobService
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
blob_service = BlobService(account_name="<account_name>", account_key="<account_key>")
blob_service.create_container("datacontainer")
blob_service.create_container("datacontainer", x_ms_blob_public_access="container")
blob_service.set_container_acl("datacontainer", x_ms_blob_public_access="container")
blob_service.put_block_blob_from_path(
"datacontainer", "datablob", "StorageClientPy.py", x_ms_blob_content_type="text/x-script.phyton"
)
blobs = []
marker = None
while True:
batch = blob_service.list_blobs("datacontainer", marker=marker)
blobs.extend(batch)
if not batch.next_marker:
break
marker = batch.next_marker
for blob in blobs:
print(blob.name)
blob_service.get_blob_to_path("datacontainer", "datablob", "out-StorageClientPy.py")
blob_service.delete_blob("datacontainer", "datablob")
示例11: AzureIOStore
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
class AzureIOStore(IOStore):
"""
A class that lets you get input from and send output to Azure Storage.
"""
def __init__(self, account_name, container_name, name_prefix=""):
"""
Make a new AzureIOStore that reads from and writes to the given
container in the given account, adding the given prefix to keys. All
paths will be interpreted as keys or key prefixes.
If the name prefix does not end with a trailing slash, and is not empty,
one will be added automatically.
Account keys are retrieved from the AZURE_ACCOUNT_KEY environment
variable or from the ~/.toilAzureCredentials file, as in Toil itself.
"""
# Make sure azure libraries actually loaded
assert(have_azure)
self.account_name = account_name
self.container_name = container_name
self.name_prefix = name_prefix
if self.name_prefix != "" and not self.name_prefix.endswith("/"):
# Make sure it has the trailing slash required.
self.name_prefix += "/"
# Sneak into Toil and use the same keys it uses
self.account_key = toil.jobStores.azureJobStore._fetchAzureAccountKey(
self.account_name)
# This will hold out Azure blob store connection
self.connection = None
def __getstate__(self):
"""
Return the state to use for pickling. We don't want to try and pickle
an open Azure connection.
"""
return (self.account_name, self.account_key, self.container_name,
self.name_prefix)
def __setstate__(self, state):
"""
Set up after unpickling.
"""
self.account_name = state[0]
self.account_key = state[1]
self.container_name = state[2]
self.name_prefix = state[3]
self.connection = None
def __connect(self):
"""
Make sure we have an Azure connection, and set one up if we don't.
"""
if self.connection is None:
RealTimeLogger.get().debug("Connecting to account {}, using "
"container {} and prefix {}".format(self.account_name,
self.container_name, self.name_prefix))
# Connect to the blob service where we keep everything
self.connection = BlobService(
account_name=self.account_name, account_key=self.account_key)
@backoff
def read_input_file(self, input_path, local_path):
"""
Get input from Azure.
"""
self.__connect()
RealTimeLogger.get().debug("Loading {} from AzureIOStore".format(
input_path))
# Download the blob. This is known to be synchronous, although it can
# call a callback during the process.
self.connection.get_blob_to_path(self.container_name,
self.name_prefix + input_path, local_path)
def list_input_directory(self, input_path, recursive=False,
with_times=False):
"""
Loop over fake /-delimited directories on Azure. The prefix may or may
not not have a trailing slash; if not, one will be added automatically.
Returns the names of files and fake directories in the given input fake
directory, non-recursively.
If with_times is specified, will yield (name, time) pairs including
#.........这里部分代码省略.........
示例12: BlobService
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 09 16:36:56 2015
@author: justin.malinchak
"""
# List blobs in container
from azure.storage.blob import BlobService
blob_service = BlobService(account_name='portalvhdss5m831rhl98hj', account_key='Z1MliCYE7p9Ks9kYQoGeM4V99hODtiJL82BVi/zIm06jLYh7n0tV8YaZHzITKixMwUUmjJ1Vp05XrgHG+gXFlg==')
blobs = []
marker = None
while True:
batch = blob_service.list_blobs('mycontainer', marker=marker)
blobs.extend(batch)
if not batch.next_marker:
break
marker = batch.next_marker
for blob in blobs:
bname = blob.name
print('')
print(bname)
print('')
bpathname = 'C:\\Batches\\$Work\\' + bname
blob_service.get_blob_to_path('mycontainer', bname, bpathname)
print('')
print('blob downloaded ' + bpathname)
print('')
示例13: BlobService
# 需要导入模块: from azure.storage.blob import BlobService [as 别名]
# 或者: from azure.storage.blob.BlobService import get_blob_to_path [as 别名]
# coding: utf-8
# In[186]:
#start the connection to Azure Blob Storage
from azure.storage.blob import BlobService
import pandas as pd
#connect to your blob storage account
blob_service = BlobService(account_name='YourAccount', account_key='YourKey')
#get the latest dataset that was scored
#we temporarely store the data in a local CSV file
blob_service.get_blob_to_path('amltest', 'output.csv', 'tmp.csv')
# In[187]:
#convert text file into Pandas DataFrame
data = pd.read_csv("tmp.csv")
#Remove the whitespace from the columns names to avoid Python errors
cols = data.columns
cols = cols.map(lambda x: x.replace(' ', '_') if isinstance(x, (str, unicode)) else x)
data.columns = cols
# In[188]: