当前位置: 首页>>代码示例>>Python>>正文


Python BlobService.put_block_blob_from_path方法代码示例

本文整理汇总了Python中azure.storage.BlobService.put_block_blob_from_path方法的典型用法代码示例。如果您正苦于以下问题:Python BlobService.put_block_blob_from_path方法的具体用法?Python BlobService.put_block_blob_from_path怎么用?Python BlobService.put_block_blob_from_path使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在azure.storage.BlobService的用法示例。


在下文中一共展示了BlobService.put_block_blob_from_path方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: FileService

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
class FileService(Component):
    def __init__(self):
        self.blob_service = None

    def generate_blob_service(self):
        if self.blob_service is None:
            # if storage info doesn't exist in config.py upload file function stop working
            self.blob_service = BlobService(account_name=self.util.get_config("storage.azure.account_name"),
                                            account_key=self.util.get_config("storage.azure.account_key"),
                                            host_base=self.util.get_config("storage.azure.blob_service_host_base"))

    def create_container_in_storage(self, container_name, access):
        """
        create a container if doesn't exist
        :param container_name:
        :param access:
        :return:
        """
        self.generate_blob_service()
        try:
            names = map(lambda x: x.name, self.blob_service.list_containers())
            if container_name not in names:
                self.blob_service.create_container(container_name, x_ms_blob_public_access=access)
            else:
                self.log.debug("container already exsit in storage")
            return True
        except Exception as e:
            self.log.error(e)
            return False

    def upload_file_to_azure(self, stream, container_name, blob_name):
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.put_block_blob_from_file(container_name, blob_name, stream)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def upload_file_to_azure_from_path(self, path, container_name, blob_name):
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.put_block_blob_from_path(container_name, blob_name, path)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def delete_file_from_azure(self, container_name, blob_name):
        try:
            if self.create_container_in_storage(container_name, 'container'):
                self.blob_service.delete_blob(container_name, blob_name)
        except Exception as e:
            self.log.error(e)
            return None
开发者ID:mshubian,项目名称:open-hackathon,代码行数:61,代码来源:azure_file_service.py

示例2: AzureTransfer

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name):
        BaseTransfer.__init__(self)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")

    def get_metadata_for_key(self, key):
        key = fix_path(key)
        return self.list_path(key)[0]['metadata']

    def list_path(self, path):
        return_list = []
        path = fix_path(path)
        self.log.info("Asking for listing of: %r", path)
        for r in self.conn.list_blobs(self.container_name, prefix=path, delimiter="/",
                                      include="metadata"):
            entry = {"name": r.name, "size": r.properties.content_length,
                     "last_modified": dateutil.parser.parse(r.properties.last_modified),
                     "metadata": r.metadata}
            return_list.append(entry)
        return return_list

    def delete_key(self, key_name):
        key_name = fix_path(key_name)
        self.log.debug("Deleting key: %r", key_name)
        return self.conn.delete_blob(self.container_name, key_name)

    def get_contents_to_file(self, obj_key, filepath_to_store_to):
        obj_key = fix_path(obj_key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", obj_key, filepath_to_store_to)
        return self.conn.get_blob_to_path(self.container_name, obj_key, filepath_to_store_to)

    def get_contents_to_string(self, obj_key):
        obj_key = fix_path(obj_key)
        self.log.debug("Starting to fetch the contents of: %r", obj_key)
        return self.conn.get_blob_to_bytes(self.container_name, obj_key), self.get_metadata_for_key(obj_key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        # For whatever reason Azure requires all values to be strings at the point of sending
        metadata_to_send = dict((str(k), str(v)) for k, v in metadata.items())
        self.conn.put_block_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=metadata_to_send)

    def store_file_from_disk(self, key, filepath, metadata=None):
        # For whatever reason Azure requires all values to be strings at the point of sending
        metadata_to_send = dict((str(k), str(v)) for k, v in metadata.items())
        self.conn.put_block_blob_from_path(self.container_name, key, filepath,
                                           x_ms_meta_name_values=metadata_to_send)

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
开发者ID:Ormod,项目名称:pghoard,代码行数:60,代码来源:azure.py

示例3: upload_log

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
def upload_log():

    blob_service = BlobService(account_name=os.getenv('ACC_NAME'), account_key=os.getenv('ACCESS_KEY'))

    fpath = os.path.join(os.getenv('LOGS_DIR'),"log.log")

    blob_service.put_block_blob_from_path(
                    'log',
                    "log.log",
                    fpath,
                    x_ms_blob_content_type="text/plain"
                )
开发者ID:RobinL,项目名称:daily,代码行数:14,代码来源:my_website.py

示例4: upload_all_new_azure

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
def upload_all_new_azure(local_folder, azure_container, account_name, account_key):



    blob_service = BlobService(account_name=os.getenv('ACC_NAME'), account_key=os.getenv('ACCESS_KEY'))

    blob_list = blob_service.list_blobs(azure_container)

    blob_name_list = [b.name for b in blob_list.blobs]

    blob_name_set = set(blob_name_list)

    #Now for each file in local forlder see whether it's in the s3folder
    
    localfiles = os.listdir(local_folder)
    localfiles = [f for f in localfiles if "~" not in f]
    localfiles = [f for f in localfiles if f[0] != "."]
    localfiles = [f for f in localfiles if (".zip" in f or ".csv" in f)]
    
    localfiles = set(localfiles)

    
    files_to_upload = localfiles - blob_name_set



    orig_len =len(files_to_upload) 
    error_counter = 0
    while len(files_to_upload)>0:
        if error_counter>orig_len:
            logger.error("too many upload failures, exiting")
            sys.exit()
        filename = files_to_upload.pop()

        try:
            blob_service.put_block_blob_from_path(
                'csvs',
                filename,
                os.path.join(local_folder,filename)
            )

        except Exception:
            error_counter +=1
            logging.error(filename + " failed to upload")
            files_to_upload.add(filename)
开发者ID:RobinL,项目名称:daily,代码行数:47,代码来源:upload_all_new.py

示例5: AzureBackend

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
class AzureBackend(duplicity.backend.Backend):
    """
    Backend for Azure Blob Storage Service
    """
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import Microsoft Azure Storage SDK for Python library.
        try:
            import azure
            import azure.storage
            if hasattr(azure.storage, 'BlobService'):
                # v0.11.1 and below
                from azure.storage import BlobService
                self.AzureMissingResourceError = azure.WindowsAzureMissingResourceError
                self.AzureConflictError = azure.WindowsAzureConflictError
            else:
                # v1.0.0 and above
                import azure.storage.blob
                if hasattr(azure.storage.blob, 'BlobService'):
                    from azure.storage.blob import BlobService
                else:
                    from azure.storage.blob.blockblobservice import BlockBlobService as BlobService
                self.AzureMissingResourceError = azure.common.AzureMissingResourceHttpError
                self.AzureConflictError = azure.common.AzureConflictHttpError
        except ImportError as e:
            raise BackendException("""\
Azure backend requires Microsoft Azure Storage SDK for Python (https://pypi.python.org/pypi/azure-storage/).
Exception: %s""" % str(e))

        # TODO: validate container name
        self.container = parsed_url.path.lstrip('/')

        if 'AZURE_ACCOUNT_NAME' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_NAME environment variable not set.')

        if 'AZURE_ACCOUNT_KEY' in os.environ:
            if 'AZURE_ENDPOINT_SUFFIX' in os.environ:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                account_key=os.environ['AZURE_ACCOUNT_KEY'],
                                                endpoint_suffix=os.environ['AZURE_ENDPOINT_SUFFIX'])
            else:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                account_key=os.environ['AZURE_ACCOUNT_KEY'])
            self._create_container()
        elif 'AZURE_SHARED_ACCESS_SIGNATURE' in os.environ:
            if 'AZURE_ENDPOINT_SUFFIX' in os.environ:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                sas_token=os.environ['AZURE_SHARED_ACCESS_SIGNATURE'],
                                                endpoint_suffix=os.environ['AZURE_ENDPOINT_SUFFIX'])
            else:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                sas_token=os.environ['AZURE_SHARED_ACCESS_SIGNATURE'])
        else:
            raise BackendException(
                'Neither AZURE_ACCOUNT_KEY nor AZURE_SHARED_ACCESS_SIGNATURE environment variable not set.')

        if globals.azure_max_single_put_size:
            # check if we use azure-storage>=0.30.0
            try:
                _ = self.blob_service.MAX_SINGLE_PUT_SIZE
                self.blob_service.MAX_SINGLE_PUT_SIZE = globals.azure_max_single_put_size
            # fallback for azure-storage<0.30.0
            except AttributeError:
                self.blob_service._BLOB_MAX_DATA_SIZE = globals.azure_max_single_put_size

        if globals.azure_max_block_size:
            # check if we use azure-storage>=0.30.0
            try:
                _ = self.blob_service.MAX_BLOCK_SIZE
                self.blob_service.MAX_BLOCK_SIZE = globals.azure_max_block_size
            # fallback for azure-storage<0.30.0
            except AttributeError:
                self.blob_service._BLOB_MAX_CHUNK_DATA_SIZE = globals.azure_max_block_size

    def _create_container(self):
        try:
            self.blob_service.create_container(self.container, fail_on_exist=True)
        except self.AzureConflictError:
            # Indicates that the resource could not be created because it already exists.
            pass
        except Exception as e:
            log.FatalError("Could not create Azure container: %s"
                           % unicode(e.message).split('\n', 1)[0],
                           log.ErrorCode.connection_failed)

    def _put(self, source_path, remote_filename):
        kwargs = {}
        if globals.azure_max_connections:
            kwargs['max_connections'] = globals.azure_max_connections

        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-a-blob-into-a-container
        try:
            self.blob_service.create_blob_from_path(self.container, remote_filename, source_path.name, **kwargs)
        except AttributeError:  # Old versions use a different method name
            self.blob_service.put_block_blob_from_path(self.container, remote_filename, source_path.name, **kwargs)

    def _get(self, remote_filename, local_path):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#download-blobs
        self.blob_service.get_blob_to_path(self.container, remote_filename, local_path.name)
#.........这里部分代码省略.........
开发者ID:henrysher,项目名称:duplicity,代码行数:103,代码来源:azurebackend.py

示例6: BlobService

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
ACCOUNT_NAME = 'sounds'
ACCOUNT_KEY  = AC.getAccountKey() # primary access key
HOST_BASE    = '.blob.core.windows.net'

blob_service = BlobService(account_name=ACCOUNT_NAME,
                           account_key=ACCOUNT_KEY,
                           host_base=HOST_BASE)

CONTAINER = 'bat-detective' # or whatever else you like

created = blob_service.create_container(CONTAINER, x_ms_blob_public_access='container')
print "Created" if created else "Not created (probably already existing)"

audio_dir = '../../data/wav/'
SOUND_FILES = glob.glob(audio_dir + '*.wav')

for f in SOUND_FILES:
    print "uploading", os.path.basename(f)
    blob_service.put_block_blob_from_path(
        CONTAINER,                          # container
        os.path.basename(f),                # blob
        f,                                  # path
        x_ms_blob_content_type='audio/wav'
    )


blobs = blob_service.list_blobs(CONTAINER)

for blob in blobs:
    print(blob.name)
开发者ID:gozzilli,项目名称:engaged_hackathon,代码行数:32,代码来源:upload_azure.py

示例7: SAzure

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]

#.........这里部分代码省略.........
                print(blob.properties.__dict__)
                print(blob.name)
                print(blob.url)"""
            return blobs
        except Exception as e:
            print(_("Bad connection"))
            logging.warning("container {0}, path {1}".format(container, uri))
            exit(1)

    def path_list(self, path):
        try:
            logging.debug("path_list {0}".format(path))

            container, uri = self.path_split(path)
            logging.debug("Container: {0}, Uri: {1}".format(container, uri))

            self.connect()
            self.blob.create_container(container)

            blobs = self.path_list_blobs(container, uri)

            d = {}

            for b in blobs:
                spath = self.spath(container, uri, b)
                # print(b.__dict__)
                #print(str(b.properties.last_modified.__dict__))
                #print(str(spath.ModifiedTS))
                d[spath.SPath] = spath
            # print(d)
            return d
        except Exception as e:
            print(e)

    def remove(self, src: SyncPath):
        try:
            logging.debug("Removing {0}".format(src.AbsPath))
            self.connect()
            self.blob.create_container(src.BasePath)
            self.blob.delete_blob(src.BasePath, src.AbsPath)
        except:
            pass


    def copy_local2azure(self, src, base_dir):
        try:

            container, uri = self.path_split(base_dir)

            if len(src.SPath)>0 and src.SPath[0]=="/":
                path= uri+ src.SPath[1:]
            else:
                path= uri+src.SPath
            logging.debug("copy_local2azure Spath {0}. path:{1}".format(src.SPath, path))
            self.connect()
            if not src.IsDir:
                self.blob.put_block_blob_from_path (container, path, src.AbsPath)
            else:
                self.blob.put_block_blob_from_text(container, path+"/", "")
        except Exception as e:
            print("Error Copying")
            print(e)

    def copy_azure2local(self, src, base_dir):
        try:

            if len(src.SPath)>0 and (src.SPath[0] == "/" or src.SPath[0] == "\\") :
                path = src.SPath[1:]
            else:
                path = src.SPath


            path= os.path.normpath(os.path.join(base_dir, path))
            logging.debug("copy_azure2local basedir:{0} Spath {1}, path {2}, abs: {3}".format( base_dir, src.SPath, path, src.AbsPath))


            if not os.path.isdir(path):
               os.makedirs(os.path.dirname(path), exist_ok=True)
            #print( os.path.dirname(path)+"***************")

            if not (len(src.AbsPath)>0 and src.AbsPath[len(src.AbsPath)-1]=="/"):
                self.blob.get_blob_to_path(src.BasePath, src.AbsPath, path)




            """container, uri = self.path_split(base_dir)

            if len(src.SPath)>0 and src.SPath[0]=="/":
                path= uri+ src.SPath[1:]
            else:
                path= uri+src.SPath
            self.connect()
            if not src.IsDir:
                self.blob.get_blob_to_path(src.BasePath, path, src.AbsPath)
            else:
                self.blob.put_block_blob_from_text(container, path, "")"""
        except Exception as e:
            print("Error copying")
            print(e)
开发者ID:gomes-,项目名称:alx,代码行数:104,代码来源:sys.py

示例8: Command

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
        thread = threading.Thread(target=target)
        thread.start()

        thread.join(timeout)
        if thread.is_alive():
            print 'Terminating process'
            self.process.terminate()
            thread.join()
        print self.process.returncode

#command = Command("echo 'Process started'; sleep 2; echo 'Process finished'")
#print command.run(timeout=3)
#print command.run(timeout=1)
#
#command = Command('ping www.google.com')
#print command.run(timeout=1)

AZURE_STORAGE_CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING']

blob_service = BlobService(connection_string=AZURE_STORAGE_CONNECTION_STRING)

print blob_service.put_block_blob_from_path( 'nexradl2', '201208/20120810/KSRX/NWS_NEXRAD_NXL2SR_KSRX_20120810050000_20120810055959.tar', '/snfs9/q2/levelii_tarfiles/201208/20120810/KSRX/NWS_NEXRAD_NXL2SR_KSRX_20120810050000_20120810055959.tar', max_connections=5,)


blobs = blob_service.list_blobs('nexradl2',maxresults=10)
for blob in blobs:
    print(blob.name)
    print(blob.url)

开发者ID:18F,项目名称:noaabigdata-code,代码行数:30,代码来源:test.py

示例9: BlobService

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
from azure.storage import BlobService 

mycontainer = 'images'
myblob = 'python.jpg'
myfile = 'c:\\demo\\AzureStorageSDK\\AzureStorageSDKDemo.jpg'

blob_service = BlobService(
    account_name='dev018storage', 
    account_key='q4dLJhW3zLw8hmdjL88w0vc2KHrbjMJlXumFB0eZVpBoeks1MKlZh7+pW36fmlWmBMx+bzwvWcYMU8p4MGhbcg=='
)

blob_service.put_block_blob_from_path(
    mycontainer,
    myblob,
    myfile,
    x_ms_blob_content_type='image/png'
)

print "\n\""+myblob+"\" Uploaded by Python!\n"
开发者ID:takedamasaki555,项目名称:decode15-dev018,代码行数:21,代码来源:UploadAzureStorage.py

示例10: AzureTransfer

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name, prefix=None):
        # NOTE: Azure wants all paths to start with a slash
        prefix = "/{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")
        # XXX: AzureTransfer isn't actively tested and hasn't its error handling is probably lacking
        self.log.warning("AzureTransfer is experimental and has not been thoroughly tested")

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key)
        return self._list_blobs(key)[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key):
        path = self.format_key_for_backend(key, trailing_slash=True)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        items = self.conn.list_blobs(self.container_name, prefix=path, delimiter="/", include="metadata")
        result = []
        for item in items:
            result.append({
                "last_modified": dateutil.parser.parse(item.properties.last_modified),
                "metadata": item.metadata,
                "name": self.format_key_from_backend(item.name),
                "size": item.properties.content_length,
            })
        return result

    def delete_key(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Deleting key: %r", key)
        return self.conn.delete_blob(self.container_name, key)

    def get_contents_to_file(self, key, filepath_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to)
        return self.conn.get_blob_to_path(self.container_name, key, filepath_to_store_to)

    def get_contents_to_fileobj(self, key, fileobj_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_file(self.container_name, key, fileobj_to_store_to)

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_bytes(self.container_name, key), self._metadata_for_key(key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=metadata_to_send)

    def store_file_from_disk(self, key, filepath, metadata=None, multipart=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_path(self.container_name, key, filepath,
                                           x_ms_meta_name_values=metadata_to_send)

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
开发者ID:c2h5oh,项目名称:pghoard,代码行数:79,代码来源:azure.py

示例11: print

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
print("-----------------------")

if not os.path.exists(presen_path): raise IOError(presen_path + "が見つかりません")
if not os.path.exists(os.path.join(presen_path, "img")): raise IOError("imgディレクトリが見つかりません")
if not os.path.exists(os.path.join(presen_path, "css")): raise IOError("cssディレクトリが見つかりません")

for dirpath, dirnames, filenames in os.walk(presen_path):
    for filename in filenames:
        targetfile = os.path.join(dirpath, filename)
        if ".gitignore" in targetfile: continue
        blobname = re.sub(presen_path + r"\\", \
          "servicebuilding/servicebuilding%s/presentation/" % id, \
          targetfile)\
          .replace("\\", "/")

        print("%s -> %s" % (targetfile, blobname), end="")
        try:
            blob_service = BlobService(\
              "welmokpilog",\
              "=LfXCQBPcj4u313vfz+mx+pGC2fWwnhAo+2UW5SVAnAqIjYBEPt76oievOM3LpV35BwYCYi6ufeSBRZCs/h3c8Q==")
            blob_service.put_block_blob_from_path(\
              "test-data-resources",\
              blobname,\
              targetfile)
        except:
            print("    [ERROR]")
            print("Unexpected error : ", sys.exc_info()[0])
            raise
        print("    [SUCCEED]")
print("finish move data.")
开发者ID:toriumi0118,项目名称:sandbox,代码行数:32,代码来源:cppresen.py

示例12: AzureIOStore

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]

#.........这里部分代码省略.........
        # Work out what the directory name to list is
        fake_directory = self.name_prefix + input_path
        
        if fake_directory != "" and not fake_directory.endswith("/"):
            # We have a nonempty prefix, and we need to end it with a slash
            fake_directory += "/"
        
        # This will hold the marker that we need to send back to get the next
        # page, if there is one. See <http://stackoverflow.com/a/24303682>
        marker = None
        
        # This holds the subdirectories we found; we yield each exactly once if
        # we aren't recursing.
        subdirectories = set()
        
        while True:
        
            # Get the results from Azure. We skip the delimiter since it doesn't
            # seem to have the placeholder entries it's suppsoed to.
            result = self.connection.list_blobs(self.container_name, 
                prefix=fake_directory, marker=marker)
                
            for blob in result:
                # Yield each result's blob name, but directory names only once
                
                # Drop the common prefix
                relative_path = blob.name[len(fake_directory):]
                
                if (not recursive) and "/" in relative_path:
                    # We found a file in a subdirectory, and we aren't supposed
                    # to be recursing.
                    subdirectory, _ = relative_path.split("/", 1)
                    
                    if subdirectory not in subdirectories:
                        # It's a new subdirectory. Yield and remember it
                        subdirectories.add(subdirectory)
                        
                        yield subdirectory
                else:
                    # We found an actual file  
                    yield relative_path
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break 
    
    def write_output_file(self, local_path, output_path):
        """
        Write output to Azure. Will create the container if necessary.
        """
        
        self.__connect()
        
        RealTimeLogger.get().debug("Saving {} to AzureIOStore".format(
            output_path))
        
        try:
            # Make the container
            self.connection.create_container(self.container_name)
        except azure.WindowsAzureConflictError:
            # The container probably already exists
            pass
        
        # Upload the blob (synchronously)
        # TODO: catch no container error here, make the container, and retry
        self.connection.put_block_blob_from_path(self.container_name,
            self.name_prefix + output_path, local_path)
            
    def exists(self, path):
        """
        Returns true if the given input or output file exists in Azure already.
        
        """
        
        self.__connect()
        
        marker = None
        
        while True:
        
            # Get the results from Azure.
            result = self.connection.list_blobs(self.container_name, 
                prefix=self.name_prefix + path, marker=marker)
                
            for blob in result:
                # Look at each blob
                
                if blob.name == self.name_prefix + path:
                    # Found it
                    return True
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break 
        
        return False
开发者ID:adamnovak,项目名称:hgvm,代码行数:104,代码来源:toillib.py

示例13: get_oldest_file

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
# Start looping
while True:
   # Get image path
   img_filename = ''
   img_filepath = 'images/'
   img_filelist = glob.glob(img_filepath + '*.jpg')
   img_filefullpath = get_oldest_file(img_filelist)
   if img_filefullpath is None:
      # There is no image to upload, so sleep
      logging.debug('Image directory is empty.')
      time.sleep(1)
   else:
      img_filename = os.path.basename(img_filefullpath)
      # Upload the oldest image
      blob_service.put_block_blob_from_path(azure_storage_acct_container, img_filename, img_filefullpath, x_ms_blob_content_type='image/jpeg')
      img_azureblob_url = azure_storage_acct_name+'.blob.core.windows.net/'+azure_storage_acct_container+'/'+img_filename
      logging.debug('Uploaded to http://%s', img_azureblob_url)
#      msg = Message(img_azureblob_url)
#      bus_service.send_queue_message(azure_servicebus_queue, msg)
      msg = 'info.newImageUploaded:' + img_azureblob_url
      subprocess.call(["./openiot-agent.bin", "-o", openiot_outbound, "-i", openiot_hardware_id, "-s", openiot_spec_id, "-a", msg])
      # Remove the image after uploading
      os.remove(img_filefullpath)
      # If need to sleep for more than 1 second, sleep
#      if sleep_time > 1:
#         time.sleep(sleep_time-1)

# REST ARE ALL TEST CODE

# List all blobs
开发者ID:martincai,项目名称:Operation-Beautiful-Eyes,代码行数:32,代码来源:azure_storage.py

示例14: AzureBackend

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
class AzureBackend(duplicity.backend.Backend):
    """
    Backend for Azure Blob Storage Service
    """
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import Microsoft Azure SDK for Python library.
        try:
            import azure
            from azure.storage import BlobService
        except ImportError:
            raise BackendException('Azure backend requires Microsoft Azure SDK for Python '
                                   '(https://github.com/Azure/azure-sdk-for-python).')

        if 'AZURE_ACCOUNT_NAME' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_NAME environment variable not set.')

        if 'AZURE_ACCOUNT_KEY' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_KEY environment variable not set.')

        account_name = os.environ['AZURE_ACCOUNT_NAME']
        account_key = os.environ['AZURE_ACCOUNT_KEY']
        self.WindowsAzureMissingResourceError = azure.WindowsAzureMissingResourceError
        self.blob_service = BlobService(account_name=account_name, account_key=account_key)
        # TODO: validate container name
        self.container = parsed_url.path.lstrip('/')
        try:
            self.blob_service.create_container(self.container, fail_on_exist=True)
        except azure.WindowsAzureConflictError:
            # Indicates that the resource could not be created because it already exists.
            pass
        except Exception as e:
            log.FatalError("Could not create Azure container: %s"
                           % unicode(e.message).split('\n', 1)[0],
                           log.ErrorCode.connection_failed)

    def _put(self, source_path, remote_filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-blob
        self.blob_service.put_block_blob_from_path(self.container, remote_filename, source_path.name)

    def _get(self, remote_filename, local_path):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#download-blobs
        self.blob_service.get_blob_to_path(self.container, remote_filename, local_path.name)

    def _list(self):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#list-blob
        blobs = self.blob_service.list_blobs(self.container)
        return [blob.name for blob in blobs]

    def _delete(self, filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#delete-blobs
        self.blob_service.delete_blob(self.container, filename)

    def _query(self, filename):
        prop = self.blob_service.get_blob_properties(self.container, filename)
        return {'size': int(prop['content-length'])}

    def _error_code(self, operation, e):
        if isinstance(e, self.WindowsAzureMissingResourceError):
            return log.ErrorCode.backend_not_found
开发者ID:muff1nman,项目名称:duplicity,代码行数:63,代码来源:azurebackend.py

示例15: open

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_path [as 别名]
with open(args.importRules, 'r') as infile:
    images = json.load(infile)

basepath = os.path.dirname(args.importRules)
os.chdir(basepath) 

for image in images:    
    idx = 0;
    for filename in image['files']:
         #First upload the full res images
        basename = str(image['pk']) + "-" + str( idx)
        name = basename + ".jpg"
        print "Uploading " + filename + " to " + name
        blob_service.put_block_blob_from_path(
           container_name,
           name,
           filename,
           x_ms_blob_content_type='image/jpg' )

        # Create and upload thumbnails
        size = 256, 256

        im = Image.open(filename)
        im.thumbnail(size, Image.ANTIALIAS)      
        im.save("tmp-tn.jpg", "JPEG")
        name = basename + "-tn.jpg"
        blob_service.put_block_blob_from_path(
           container_name,
           name,
           "tmp-tn.jpg",
           x_ms_blob_content_type='image/jpg' )
开发者ID:blelem,项目名称:HireMeOrGiveMeABeer,代码行数:33,代码来源:CreateBasicContent.py


注:本文中的azure.storage.BlobService.put_block_blob_from_path方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。