当前位置: 首页>>代码示例>>Python>>正文


Python BlobService.put_block_blob_from_bytes方法代码示例

本文整理汇总了Python中azure.storage.BlobService.put_block_blob_from_bytes方法的典型用法代码示例。如果您正苦于以下问题:Python BlobService.put_block_blob_from_bytes方法的具体用法?Python BlobService.put_block_blob_from_bytes怎么用?Python BlobService.put_block_blob_from_bytes使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在azure.storage.BlobService的用法示例。


在下文中一共展示了BlobService.put_block_blob_from_bytes方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: resizeimageandputinazure

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_bytes [as 别名]
def resizeimageandputinazure (strkey, url):
    maxwidthandheight = 150
    resize = False

    bytes = urllib2.urlopen(url).read()
    img = Image.open( io.BytesIO (bytes))
    newwidth = img.width
    newheight = img.height

    if (newheight > newwidth and newheight > maxwidthandheight):
        heightpercent = maxwidthandheight/float(newheight)
        newheight =  maxwidthandheight
        newwidth =  int((float(img.width)*float(heightpercent)))
        resize = True
    elif (newwidth > newheight and newwidth > maxwidthandheight):
        widthpercent = maxwidthandheight/float(newwidth)
        newwidth = maxwidthandheight
        newheight =  int((float(img.height)*float(widthpercent)))
        resize = True

    if resize:
        newimg = img.resize((newwidth, newheight), Image.ANTIALIAS)
        newimg.format = img.format

        newio = io.BytesIO()
        newimg.save (newio, 'JPEG')
        bytes = newio.getvalue()

    blob_service = BlobService(account_name='wanderight', account_key='gdmZeJOCx3HYlFPZZukUhHAfeGAu4cfHWGQZc3+HIpkBHjlznUDjhXMl5HWh5MgbjpJF09ZxRaET1JVF9S2MWQ==')
    blob_service.put_block_blob_from_bytes(config['container'], 'images/' + strkey, bytes,
                                           x_ms_blob_content_type='image/jpg', x_ms_meta_name_values={'url':url})
开发者ID:trentniemeyer,项目名称:BlogParse,代码行数:33,代码来源:Util.py

示例2: upload_documents

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_bytes [as 别名]
def upload_documents():
    data = request.json.get('data', None)
    if not data:
        return jsonify(status=400, message='No file content passed')

    data = data.decode("base64")
    upload_handler = get_upload_handler()

    # force is a flag that signals to upload the current file even if it was uploaded before
    force = request.json.get('force', None)
    if force is None or force.lower() != "true":
        if upload_handler.is_file_already_uploaded(data, current_user.get_id()):
            return jsonify(status=400, message='File content was already uploaded. Force upload by adding the force boolean')

    blob_service = BlobService(account_name=BLOB_ACCOUNT_NAME, account_key=BLOB_ACCOUNT_KEY)
    filename = uuid.uuid4().hex
    # put the data in the container using a random filename
    blob_service.put_block_blob_from_bytes(BLOB_CONTAINER_NAME, filename, data)

    task_collection = get_db().task_collection
    
    # update the task db with the new task (which is parsing the new data file)
    task_id = upload_handler.update_uploaded_file(filename, data, current_user.get_id())

    return jsonify(status=200, message='Task created successfully', task_id=task_id)
开发者ID:yanivoliver,项目名称:Cloud,代码行数:27,代码来源:StudyBuddy.py

示例3: AzureTransfer

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_bytes [as 别名]
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name):
        BaseTransfer.__init__(self)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")

    def get_metadata_for_key(self, key):
        key = fix_path(key)
        return self.list_path(key)[0]['metadata']

    def list_path(self, path):
        return_list = []
        path = fix_path(path)
        self.log.info("Asking for listing of: %r", path)
        for r in self.conn.list_blobs(self.container_name, prefix=path, delimiter="/",
                                      include="metadata"):
            entry = {"name": r.name, "size": r.properties.content_length,
                     "last_modified": dateutil.parser.parse(r.properties.last_modified),
                     "metadata": r.metadata}
            return_list.append(entry)
        return return_list

    def delete_key(self, key_name):
        key_name = fix_path(key_name)
        self.log.debug("Deleting key: %r", key_name)
        return self.conn.delete_blob(self.container_name, key_name)

    def get_contents_to_file(self, obj_key, filepath_to_store_to):
        obj_key = fix_path(obj_key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", obj_key, filepath_to_store_to)
        return self.conn.get_blob_to_path(self.container_name, obj_key, filepath_to_store_to)

    def get_contents_to_string(self, obj_key):
        obj_key = fix_path(obj_key)
        self.log.debug("Starting to fetch the contents of: %r", obj_key)
        return self.conn.get_blob_to_bytes(self.container_name, obj_key), self.get_metadata_for_key(obj_key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        # For whatever reason Azure requires all values to be strings at the point of sending
        metadata_to_send = dict((str(k), str(v)) for k, v in metadata.items())
        self.conn.put_block_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=metadata_to_send)

    def store_file_from_disk(self, key, filepath, metadata=None):
        # For whatever reason Azure requires all values to be strings at the point of sending
        metadata_to_send = dict((str(k), str(v)) for k, v in metadata.items())
        self.conn.put_block_blob_from_path(self.container_name, key, filepath,
                                           x_ms_meta_name_values=metadata_to_send)

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
开发者ID:Ormod,项目名称:pghoard,代码行数:60,代码来源:azure.py

示例4: upload_chunck

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_bytes [as 别名]
def upload_chunck(buf, path, storagename, container, key):
    blob_service = BlobService(account_name=storagename, account_key=key)
    loop = 0;
    while True:
        try:
            blob_service.put_block_blob_from_bytes(container,path,buf)
            break
        except (azure.http.HTTPError, TimeoutError) as e:
            loop = loop + 1
            if loop >= 3:
                raise e
开发者ID:hwind,项目名称:hwindCode,代码行数:13,代码来源:videomanager.py

示例5: rest_modify_image

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_bytes [as 别名]
def rest_modify_image(album_name, username):
    gallery_db = connect_to_db()
    albums = gallery_db.albums

    requested_album = albums.find_one({"name": album_name})
    if not requested_album:
        return jsonify({'error': "album does not exist"})

    if not username in requested_album["write"]:
        return jsonify({'error': "no permission to post images"})

    if request.method == 'POST':
        req_file = request.json.get('data', '')
        if not req_file:
            return jsonify({'error': "no images"})

        file_name = uuid.uuid4().hex
        blob_service = BlobService(account_name=ACCOUNT_NAME, account_key=ACCOUNT_KEY)
        blob_service.put_block_blob_from_bytes(CONTAINER_NAME, file_name, req_file.decode("base64"))

        gallery_db.albums.update({'name': album_name}, {'$push': {'images': file_name}})
        return jsonify({'success': "file uploaded", 'file_name': file_name})
    else:
        # DELETE
        image = request.json.get('image', '')
        if not image:
            return jsonify({'error': 'no image name'})

        blob_service = BlobService(account_name=ACCOUNT_NAME, account_key=ACCOUNT_KEY)
        try:
            blob_service.delete_blob(CONTAINER_NAME, image)
        except WindowsAzureMissingResourceError:
            # Even if the file is not in the blob storage, we want to remove it from the album
            pass

        gallery_db.albums.update({'name': album_name}, {'$pull': {'images': image}})
        return jsonify({'success': "file deleted"})
开发者ID:yanivoliver,项目名称:Cloud,代码行数:39,代码来源:Gallery.py

示例6: motion_detect

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_bytes [as 别名]
def motion_detect():
    try:
        while True:
            if gpio.input(37) == 1: 
            	    print "Motion Detected"
                    hubClient = EventHubClient()
                    hostname = socket.gethostname()
    
                    host = socket.gethostname()
                    body = ""
                    first = True
                    sensorType = "Motion"
                    sensorValue = "1"
                    sensorId = '[email protected]'
                    tablename = 'sensordata'
                    motiontime = time.strftime('%m/%d/%Y %H:%M:%S');
                    add_info = host + "[email protected]_" + motiontime.replace("/","-").replace(" ","-").replace(":","-") + ".jpg"
                    print motiontime
                    
                    if first == True:
                        first = False
                    else:    
                    	body += ","
                    
                    #deviceid_sensorid_motiondatetime (- replace with).png
                    
                    body += "{ \"DeviceId\" : \"" + host + "\", \"SensorId\" : \"" + sensorId + "\", \"SensorType\" : \"" + sensorType + "\", \"SensorValue\" : \"" + sensorValue + "\", \"Datetime\" : \"" + motiontime + "\", \"AdditionalInfo\" : \"" + add_info + "\", \"table_name\" : \"" + tablename + "\" }"
                    
    
                    my_stream = io.BytesIO()
                    with picamera.PiCamera() as camera:
                        camera.resolution = (640, 480)
                        camera.capture(my_stream, 'jpeg')
                        img_stream = base64.b64encode(my_stream.getvalue())
                        img_type = "jpeg"
                        sensorType = "PiCam"
                        sensorValue = img_stream
                        sensorId = '[email protected]'
                        #add_info = host + "_" + sensorId + "_" + motiontime.replace("/","-").replace(" ","-").replace(":","-") + ".jpg"
                    print add_info
                    print  time.strftime('%m/%d/%Y %H:%M:%S')    
                    cbody = "{ \"DeviceId\" : \"" + host + "\", \"SensorId\" : \"" + sensorId + "\", \"SensorType\" : \"" + sensorType + "\", \"SensorValue\" : \"" + sensorValue + "\", \"Datetime\" : \"" + time.strftime('%m/%d/%Y %H:%M:%S') + "\", \"AdditionalInfo\" : \"" + add_info + "\", \"table_name\" : \"" + tablename + "\" }"
                    hubStatus = hubClient.sendMessage(body, hostname)
                    print hubStatus
                    hubStatus = hubClient.sendMessage(cbody, hostname)
                    print hubStatus
                    print  time.strftime('%m/%d/%Y %H:%M:%S')
                    #print cbody
                    
                    upload = my_stream.getvalue();
		    container = "motioncaptureimages-direct"; 
		    blob = add_info;
		    blob_service = BlobService(account_name='commoniotstorage', account_key='PX4BC7LHPFWYtayYDtHAC/CV/+VHWOudqXBB9En2dGYHg3yGnXwbXIOHyvdq0gEU0P4FTV0A5AlRBHFe5DL5Kg==')
		    blobstatus = blob_service.put_block_blob_from_bytes(
			        	container,
				        blob,
			        	upload,
			        	x_ms_blob_content_type='image/jpg'
	    			)
	    
		    print blobstatus
		    print  time.strftime('%m/%d/%Y %H:%M:%S')
		    
		    
                    
    except KeyboardInterrupt:
        gpio.cleanup()
        print "\n Terminated by User"
        sys.exit()
开发者ID:parasshah143,项目名称:python_script,代码行数:71,代码来源:mspicamb.py

示例7: int

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_bytes [as 别名]
    # ADDED ####################################################################
    ## Decrease main image size
    # we need to keep in mind aspect ratio
    if image.shape[1] >= image.shape[0]:
      width = 800
      r = (width * 1.0) / image.shape[1]
      dim = ( width, int(image.shape[0] * r) )
    else:
      height = 800
      r = (height * 1.0) / image.shape[0]
      dim = ( int(image.shape[1] * r), height )
    # perform the actual resizing of the image
    image = cv2.resize(image, dim, interpolation = cv2.INTER_AREA)
    # put to blob smaller version
    lol ,image_tn = cv2.imencode( '.jpg', image )
    blob_service.put_block_blob_from_bytes( blob_container, imgBlobName, str(bytearray(image_tn.flatten().tolist())) )
    ############################################################################


    # process image
    colourStructure = getCharacteristics( image, region, results )

    blob_service.put_block_blob_from_bytes( blob_analysis, imgBlobName, dumps(colourStructure) )

    # {'PartitionKey': 'allPhotos', 'RowKey': 'imageName', 'thumbnail' : 'thumbnailName',
    #  'userId' : ?, 'local' : ?, 'hue' : 200, 'saturation' : 200, 'value' : 200}
    ## query for image in table to ensure existence
    currentTask = table_service.get_entity( tableName, tablePartitionKey, tableRowKey)

    
  
开发者ID:macqm,项目名称:mosaic-opencv,代码行数:30,代码来源:analyse.cv.py

示例8: AzureTransfer

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_bytes [as 别名]
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name, prefix=None):
        # NOTE: Azure wants all paths to start with a slash
        prefix = "/{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")
        # XXX: AzureTransfer isn't actively tested and hasn't its error handling is probably lacking
        self.log.warning("AzureTransfer is experimental and has not been thoroughly tested")

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key)
        return self._list_blobs(key)[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key):
        path = self.format_key_for_backend(key, trailing_slash=True)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        items = self.conn.list_blobs(self.container_name, prefix=path, delimiter="/", include="metadata")
        result = []
        for item in items:
            result.append({
                "last_modified": dateutil.parser.parse(item.properties.last_modified),
                "metadata": item.metadata,
                "name": self.format_key_from_backend(item.name),
                "size": item.properties.content_length,
            })
        return result

    def delete_key(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Deleting key: %r", key)
        return self.conn.delete_blob(self.container_name, key)

    def get_contents_to_file(self, key, filepath_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to)
        return self.conn.get_blob_to_path(self.container_name, key, filepath_to_store_to)

    def get_contents_to_fileobj(self, key, fileobj_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_file(self.container_name, key, fileobj_to_store_to)

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_bytes(self.container_name, key), self._metadata_for_key(key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=metadata_to_send)

    def store_file_from_disk(self, key, filepath, metadata=None, multipart=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_path(self.container_name, key, filepath,
                                           x_ms_meta_name_values=metadata_to_send)

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
开发者ID:c2h5oh,项目名称:pghoard,代码行数:79,代码来源:azure.py

示例9: upload

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_bytes [as 别名]
def upload():

    file = request.files['fileInput']
    print "File is" + file.filename


    if file:
        data = file.read()


        blob_service = BlobService(account_name='squadshots', account_key='UgxaWKAKv2ZvhHrPt0IHi4EQedPpZw35r+RXkAYB2eICPrG3TjSwk2G8gUzG/PNDDTV+4CVCYWCvZSiad5xMQQ==')
        blob_service.create_container('album')

        blob_service.put_block_blob_from_bytes(
            'album',
            file.filename + "_blob",
            data,
            x_ms_blob_content_type='image/png'
        )

        if 'username' in session:
            un = session['username']
        else:
            print "not in session"

        blob_service.set_blob_metadata(container_name="album",
                                   blob_name=file.filename + "_blob",
                                   x_ms_meta_name_values={'metaun': un})

        blob_service.get_blob_to_path('album',file.filename + "_blob",'static/output.png')
        f = open('input_person.png','w+')
        f.write(data)
        f.close()


        [X,y] = read_images(OUTPUT_DIRECTORY, (256,256))
    # Convert labels to 32bit integers. This is a workaround for 64bit machines,
        y = np.asarray(y, dtype=np.int32)

    # Create the Eigenfaces model.
        model = cv2.createEigenFaceRecognizer()
    # Learn the model. Remember our function returns Python lists,
    # so we use np.asarray to turn them into NumPy lists to make
    # the OpenCV wrapper happy:
        model.train(np.asarray(X), np.asarray(y))

    # Save the model for later use
        model.save("eigenModel.xml")



           # Create an Eign Face recogniser
        t = float(100000)
        model = cv2.createEigenFaceRecognizer(threshold=t)

        # Load the model
        model.load("eigenModel.xml")

       # Read the image we're looking for
        try:
            sampleImage = cv2.imread('static/output.png', cv2.IMREAD_GRAYSCALE)
            if sampleImage != None:
                sampleImage = cv2.resize(sampleImage, (256,256))
            else:
                print "sample image is  null"
        except IOError:
            print "IO error"

      # Look through the model and find the face it matches
        [p_label, p_confidence] = model.predict(sampleImage)

    # Print the confidence levels
        print "Predicted label = %d (confidence=%.2f)" % (p_label, p_confidence)

    # If the model found something, print the file path
        if (p_label > -1):
            count = 0
            for dirname, dirnames, filenames in os.walk(OUTPUT_DIRECTORY):
                for subdirname in dirnames:
                    subject_path = os.path.join(dirname, subdirname)
                    if (count == p_label):
                        for filename in os.listdir(subject_path):
                            print "subject path = " + subject_path

                    count = count+1

    return "uploaded"
开发者ID:engineershreyas,项目名称:SquadShots,代码行数:89,代码来源:app.py

示例10: BlobServiceAdapter

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_bytes [as 别名]
class BlobServiceAdapter(Component):
    """The :class:`BlobServiceAdapter` class is a thin wrapper over azure.storage.BlobService.

    All the attributes of the wrapper stream are proxied by the adapter so
    it's possible to do ``adapter.create_container()`` instead of the long form
    ``adapter.blob_service.adapter()``.
    """

    def __init__(self):
        self.blob_service = BlobService(
            account_name=self.util.get_config("storage.azure.account_name"),
            account_key=self.util.get_config("storage.azure.account_key"),
            host_base=self.util.get_config("storage.azure.blob_service_host_base"),
        )

    def __getattr__(self, name):
        return getattr(self.blob_service, name)

    def create_container_in_storage(self, container_name, access="container"):
        """create a container if doesn't exist

        :type container_name: str|unicode
        :param container_name: Name of container to create.

        :type access: str|unicode
        :param access: Optional. Possible values include: container, blob
        :return:
        """
        try:
            names = [x.name for x in self.blob_service.list_containers()]
            if container_name not in names:
                return self.blob_service.create_container(container_name, x_ms_blob_public_access=access)
            else:
                self.log.debug("container already exists in storage")
                return True
        except Exception as e:
            self.log.error(e)
            return False

    def upload_file_to_azure(self, container_name, blob_name, stream):
        """
        Creates a new block blob from a file/stream, or updates the content of
        an existing block blob, with automatic chunking and progress
        notifications.

        :type container_name: str|unicode
        :param container_name: Name of existing container.

        :type blob_name: str | unicode
        :param blob_name: Name of blob to create or update.

        :type stream: file
        :param stream: Opened file/stream to upload as the blob content.
        """
        try:
            if self.create_container_in_storage(container_name, "container"):
                self.blob_service.put_block_blob_from_file(container_name, blob_name, stream)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def upload_file_to_azure_from_bytes(self, container_name, blob_name, blob):
        """
        Creates a new block blob from an array of bytes, or updates the content
        of an existing block blob, with automatic chunking and progress
        notifications.

        :type container_name: str|unicode
        :param container_name: Name of existing container.

        :type blob_name: str|unicode
        :param blob_name: Name of blob to create or update.

        :type blob: bytes
        :param blob: Content of blob as an array of bytes.
        """
        try:
            if self.create_container_in_storage(container_name, "container"):
                self.blob_service.put_block_blob_from_bytes(container_name, blob_name, blob)
                return self.blob_service.make_blob_url(container_name, blob_name)
            else:
                return None
        except Exception as e:
            self.log.error(e)
            return None

    def upload_file_to_azure_from_text(self, container_name, blob_name, text):
        """
        Creates a new block blob from str/unicode, or updates the content of an
        existing block blob, with automatic chunking and progress notifications.

        :type container_name: str|unicode
        :param container_name: Name of existing container.

        :type blob_name: str|unicode
        :param blob_name: Name of blob to create or update.

#.........这里部分代码省略.........
开发者ID:ifhuang,项目名称:open-hackathon,代码行数:103,代码来源:blob_service_adapter.py

示例11: makeThumbnail

# 需要导入模块: from azure.storage import BlobService [as 别名]
# 或者: from azure.storage.BlobService import put_block_blob_from_bytes [as 别名]
    # process image
    image_tn = makeThumbnail( image, imageWidth )
    (hw, sw, vw) = getCharacteristics( image )

    # put thumbnail to bloob: add suffix _tn
    result ,blob_tn = cv2.imencode( '.jpg', image_tn )

    # Override
    tnID = imgBlobName
    # if imgBlobName[-4] == '.'  :
      # tnID = imgBlobName[:-4] + "_tn" + imgBlobName[-4:]
    # else :
      # tnID = imgBlobName[:-5] + "_tn" + imgBlobName[-5:]


    blob_service.put_block_blob_from_bytes( blob_container, tnID, str(bytearray(blob_tn.flatten().tolist())) )

    # {'PartitionKey': 'allPhotos', 'RowKey': 'imageName', 'thumbnail' : 'thumbnailName',
    #  'userId' : ?, 'local' : ?, 'hue' : 200, 'saturation' : 200, 'value' : 200}
    ## query for image in table to ensure existence
    currentTask = table_service.get_entity( tableName, tablePartitionKey, tableRowKey)
  

    ## send the quantities to table: save thumbnail ID & save image characteristics
    # currentTask.thumbnail = tnID
    currentTask.hue = hw
    currentTask.saturation = sw
    currentTask.value = vw
    table_service.update_entity( tableName, tablePartitionKey, tableRowKey, currentTask)

    # dequeue image
开发者ID:macqm,项目名称:mosaic-opencv,代码行数:33,代码来源:preprocess.cv.py


注:本文中的azure.storage.BlobService.put_block_blob_from_bytes方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。