当前位置: 首页>>代码示例>>Python>>正文


Python transforms.CenterCrop方法代码示例

本文整理汇总了Python中mxnet.gluon.data.vision.transforms.CenterCrop方法的典型用法代码示例。如果您正苦于以下问题:Python transforms.CenterCrop方法的具体用法?Python transforms.CenterCrop怎么用?Python transforms.CenterCrop使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在mxnet.gluon.data.vision.transforms的用法示例。


在下文中一共展示了transforms.CenterCrop方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_transformer

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def test_transformer():
    from mxnet.gluon.data.vision import transforms

    transform = transforms.Compose([
        transforms.Resize(300),
        transforms.Resize(300, keep_ratio=True),
        transforms.CenterCrop(256),
        transforms.RandomResizedCrop(224),
        transforms.RandomFlipLeftRight(),
        transforms.RandomColorJitter(0.1, 0.1, 0.1, 0.1),
        transforms.RandomBrightness(0.1),
        transforms.RandomContrast(0.1),
        transforms.RandomSaturation(0.1),
        transforms.RandomHue(0.1),
        transforms.RandomLighting(0.1),
        transforms.ToTensor(),
        transforms.Normalize([0, 0, 0], [1, 1, 1])])

    transform(mx.nd.ones((245, 480, 3), dtype='uint8')).wait_to_read() 
开发者ID:awslabs,项目名称:dynamic-training-with-apache-mxnet-on-aws,代码行数:21,代码来源:test_gluon_data_vision.py

示例2: create_transformer

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def create_transformer(self):
        train_tforms, eval_tforms = [transforms.Resize(self.args.resize)], [transforms.Resize(self.args.resize)]

        if self.args.random_crop:
            train_tforms.append(transforms.RandomResizedCrop(self.args.size, scale=(0.8, 1.2)))
        else:
            train_tforms.append(transforms.CenterCrop(self.args.size))

        eval_tforms.append(transforms.CenterCrop(self.args.size))

        if self.args.flip:
            train_tforms.append(transforms.RandomFlipLeftRight())

        if self.args.random_color:
            train_tforms.append(transforms.RandomColorJitter(self.args.color_jitter, self.args.color_jitter,
                                                             self.args.color_jitter, 0.1))

        train_tforms.extend([transforms.ToTensor(), transforms.Normalize(self.args.mean, self.args.std)])
        eval_tforms.extend([transforms.ToTensor(), transforms.Normalize(self.args.mean, self.args.std)])

        train_tforms = transforms.Compose(train_tforms)
        eval_tforms = transforms.Compose(eval_tforms)

        return train_tforms, eval_tforms 
开发者ID:aws-samples,项目名称:d-SNE,代码行数:26,代码来源:training_sda.py

示例3: get_data_loader

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def get_data_loader(data_dir, batch_size, num_workers):
    normalize = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    def batch_fn(batch, ctx):
        data = gluon.utils.split_and_load(batch[0], ctx_list=ctx, batch_axis=0)
        label = gluon.utils.split_and_load(batch[1], ctx_list=ctx, batch_axis=0)
        return data, label
    if opt.mode == 'symbolic':
        val_data = mx.io.NDArrayIter(
            mx.nd.random.normal(shape=(opt.dataset_size, 3, 224, 224)),
            label=mx.nd.array(range(opt.dataset_size)),
            batch_size=batch_size,
        )
        transform_test = transforms.Compose([
            transforms.Resize(256, keep_ratio=True),
            transforms.CenterCrop(224),
            transforms.ToTensor(),
            normalize
        ])
        val_data = gluon.data.DataLoader(
            imagenet.classification.ImageNet(data_dir, train=False).transform_first(transform_test),
            batch_size=batch_size, shuffle=False, num_workers=num_workers)

    return val_data, batch_fn 
开发者ID:awslabs,项目名称:deeplearning-benchmark,代码行数:25,代码来源:infer_imagenet.py

示例4: test_transformer

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def test_transformer():
    from mxnet.gluon.data.vision import transforms

    transform = transforms.Compose([
		transforms.Resize(300),
		transforms.CenterCrop(256),
		transforms.RandomResizedCrop(224),
		transforms.RandomFlipLeftRight(),
		transforms.RandomColorJitter(0.1, 0.1, 0.1, 0.1),
		transforms.RandomBrightness(0.1),
		transforms.RandomContrast(0.1),
		transforms.RandomSaturation(0.1),
		transforms.RandomHue(0.1),
		transforms.RandomLighting(0.1),
		transforms.ToTensor(),
		transforms.Normalize([0, 0, 0], [1, 1, 1])])

    transform(mx.nd.ones((245, 480, 3), dtype='uint8')).wait_to_read() 
开发者ID:mahyarnajibi,项目名称:SNIPER-mxnet,代码行数:20,代码来源:test_gluon_data_vision.py

示例5: get_val_data

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def get_val_data(rec_val, batch_size, data_nthreads, input_size, crop_ratio):
    def val_batch_fn(batch, ctx):
        data = batch[0].as_in_context(ctx)
        label = batch[1].as_in_context(ctx)
        return data, label

    normalize = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    crop_ratio = crop_ratio if crop_ratio > 0 else 0.875
    resize = int(math.ceil(input_size/crop_ratio))

    from gluoncv.utils.transforms import EfficientNetCenterCrop
    from autogluon.utils import pil_transforms

    if input_size >= 320:
        transform_test = transforms.Compose([
            pil_transforms.ToPIL(),
            EfficientNetCenterCrop(input_size),
            pil_transforms.Resize((input_size, input_size), interpolation=Image.BICUBIC),
            pil_transforms.ToNDArray(),
            transforms.ToTensor(),
            normalize
        ])
    else:
        transform_test = transforms.Compose([
            transforms.Resize(resize, keep_ratio=True),
            transforms.CenterCrop(input_size),
            transforms.ToTensor(),
            normalize
        ])

    val_set = mx.gluon.data.vision.ImageRecordDataset(rec_val).transform_first(transform_test)

    val_sampler = SplitSampler(len(val_set), num_parts=num_workers, part_index=rank)
    val_data = gluon.data.DataLoader(val_set, batch_size=batch_size,
                                     num_workers=data_nthreads,
                                     sampler=val_sampler)

    return val_data, val_batch_fn

# Horovod: pin GPU to local rank 
开发者ID:dmlc,项目名称:gluon-cv,代码行数:42,代码来源:train_horovod.py

示例6: transform_eval

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def transform_eval(imgs, resize_short=256, crop_size=224,
                   mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)):
    """A util function to transform all images to tensors as network input by applying
    normalizations. This function support 1 NDArray or iterable of NDArrays.

    Parameters
    ----------
    imgs : NDArray or iterable of NDArray
        Image(s) to be transformed.
    resize_short : int, default=256
        Resize image short side to this value and keep aspect ratio.
    crop_size : int, default=224
        After resize, crop the center square of size `crop_size`
    mean : iterable of float
        Mean pixel values.
    std : iterable of float
        Standard deviations of pixel values.

    Returns
    -------
    mxnet.NDArray or list of such tuple
        A (1, 3, H, W) mxnet NDArray as input to network
        If multiple image names are supplied, return a list.
    """
    if isinstance(imgs, mx.nd.NDArray):
        imgs = [imgs]
    for im in imgs:
        assert isinstance(im, mx.nd.NDArray), "Expect NDArray, got {}".format(type(im))

    transform_fn = transforms.Compose([
        transforms.Resize(resize_short, keep_ratio=True),
        transforms.CenterCrop(crop_size),
        transforms.ToTensor(),
        transforms.Normalize(mean, std)
    ])

    res = [transform_fn(img).expand_dims(0) for img in imgs]

    if len(res) == 1:
        return res[0]
    return res 
开发者ID:dmlc,项目名称:gluon-cv,代码行数:43,代码来源:imagenet.py

示例7: create_loader

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def create_loader(self):
        """
        Overwrite the data loader function
        :return: pairwised data loader, None, eval source loader, test target loader
        """
        cpus = cpu_count()

        train_tforms, eval_tforms = [transforms.Resize(self.args.resize)], [transforms.Resize(self.args.resize)]

        if self.args.random_crop:
            train_tforms.append(transforms.RandomResizedCrop(self.args.size, scale=(0.8, 1.2)))
        else:
            train_tforms.append(transforms.CenterCrop(self.args.size))

        eval_tforms.append(transforms.CenterCrop(self.args.size))

        if self.args.flip:
            train_tforms.append(transforms.RandomFlipLeftRight())

        if self.args.random_color:
            train_tforms.append(transforms.RandomColorJitter(self.args.color_jitter, self.args.color_jitter,
                                                             self.args.color_jitter, 0.1))

        train_tforms.extend([transforms.ToTensor(), transforms.Normalize(self.args.mean, self.args.std)])
        eval_tforms.extend([transforms.ToTensor(), transforms.Normalize(self.args.mean, self.args.std)])

        train_tforms = transforms.Compose(train_tforms)
        eval_tforms = transforms.Compose(eval_tforms)

        if 'digits' in self.args.cfg:
            trs_set, tes_set, tet_set = self.create_digits_datasets(train_tforms, eval_tforms)
        elif 'office' in self.args.cfg:
            trs_set, tes_set, tet_set = self.create_office_datasets(train_tforms, eval_tforms)
        elif 'visda' in self.args.cfg:
            trs_set, tes_set, tet_set = self.create_visda_datasets(train_tforms, eval_tforms)
        else:
            raise NotImplementedError

        self.train_src_loader = DataLoader(trs_set, self.args.bs, shuffle=True, num_workers=cpus)
        self.test_src_loader = DataLoader(tes_set, self.args.bs, shuffle=False, num_workers=cpus)
        self.test_tgt_loader = DataLoader(tet_set, self.args.bs, shuffle=False, num_workers=cpus) 
开发者ID:aws-samples,项目名称:d-SNE,代码行数:43,代码来源:training_sda.py

示例8: imagenet_val_transform

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def imagenet_val_transform(ds_metainfo):
    """
    Create image transform sequence for validation subset.

    Parameters:
    ----------
    ds_metainfo : DatasetMetaInfo
        ImageNet-1K dataset metainfo.

    Returns
    -------
    Sequential
        Image transform sequence.
    """
    input_image_size = ds_metainfo.input_image_size
    resize_value = calc_val_resize_value(
        input_image_size=ds_metainfo.input_image_size,
        resize_inv_factor=ds_metainfo.resize_inv_factor)
    return transforms.Compose([
        transforms.Resize(
            size=resize_value,
            keep_ratio=True,
            interpolation=ds_metainfo.interpolation),
        transforms.CenterCrop(size=input_image_size),
        transforms.ToTensor(),
        transforms.Normalize(
            mean=ds_metainfo.mean_rgb,
            std=ds_metainfo.std_rgb)
    ]) 
开发者ID:osmr,项目名称:imgclsmob,代码行数:31,代码来源:imagenet1k_cls_dataset.py

示例9: get_data_loader

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def get_data_loader(data_dir, batch_size, num_workers, opt):
    """
       Creates and returns data MXNet Data Loader object and a function that splits data into batches
    """
    normalize = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    def batch_fn(batch, ctx):
        data = gluon.utils.split_and_load(batch[0], ctx_list=ctx, batch_axis=0)
        label = gluon.utils.split_and_load(batch[1], ctx_list=ctx, batch_axis=0)
        return data, label
    if opt.mode == 'symbolic':
        val_data = mx.io.NDArrayIter(
            mx.nd.random.normal(shape=(opt.dataset_size, 3, 224, 224), ctx=context),
            label=mx.nd.array(range(opt.dataset_size)),
            batch_size=batch_size,
        )
        transform_test = transforms.Compose([
            transforms.Resize(256, keep_ratio=True),
            transforms.CenterCrop(224),
            transforms.ToTensor(),
            normalize
        ])
        val_data = gluon.data.DataLoader(
            imagenet.classification.ImageNet(data_dir, train=False).transform_first(transform_test),
            batch_size=batch_size, shuffle=False, num_workers=num_workers)

    return val_data, batch_fn 
开发者ID:awslabs,项目名称:deeplearning-benchmark,代码行数:28,代码来源:infer_imagenet_gpu.py

示例10: get_data_loader

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def get_data_loader(data_dir, batch_size, num_workers):
    normalize = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    jitter_param = 0.4
    lighting_param = 0.1
    input_size = opt.input_size
    crop_ratio = opt.crop_ratio if opt.crop_ratio > 0 else 0.875
    resize = int(math.ceil(input_size / crop_ratio))

    def batch_fn(batch, ctx):
        data = gluon.utils.split_and_load(batch[0], ctx_list=ctx, batch_axis=0)
        label = gluon.utils.split_and_load(batch[1], ctx_list=ctx, batch_axis=0)
        return data, label

    transform_train = transforms.Compose([
        transforms.RandomResizedCrop(input_size),
        transforms.RandomFlipLeftRight(),
        transforms.RandomColorJitter(brightness=jitter_param, contrast=jitter_param,
                                     saturation=jitter_param),
        transforms.RandomLighting(lighting_param),
        transforms.ToTensor(),
        normalize
    ])
    transform_test = transforms.Compose([
        transforms.Resize(resize, keep_ratio=True),
        transforms.CenterCrop(input_size),
        transforms.ToTensor(),
        normalize
    ])

    train_data = gluon.data.DataLoader(
        imagenet.classification.ImageNet(data_dir, train=True).transform_first(transform_train),
        batch_size=batch_size, shuffle=True, last_batch='discard', num_workers=num_workers)
    val_data = gluon.data.DataLoader(
        imagenet.classification.ImageNet(data_dir, train=False).transform_first(transform_test),
        batch_size=batch_size, shuffle=False, num_workers=num_workers)

    return train_data, val_data, batch_fn 
开发者ID:miraclewkf,项目名称:MXNet-Deep-Learning-in-Action,代码行数:39,代码来源:train_imagenet.py

示例11: get_data_loader

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def get_data_loader(data_dir, batch_size, num_workers):
    normalize = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    jitter_param = 0.4
    lighting_param = 0.1

    def batch_fn(batch, ctx):
        data = gluon.utils.split_and_load(batch[0], ctx_list=ctx, batch_axis=0)
        label = gluon.utils.split_and_load(batch[1], ctx_list=ctx, batch_axis=0)
        return data, label

    transform_train = transforms.Compose([
        transforms.RandomResizedCrop(224),
        transforms.RandomFlipLeftRight(),
        transforms.RandomColorJitter(brightness=jitter_param, contrast=jitter_param,
                                     saturation=jitter_param),
        transforms.RandomLighting(lighting_param),
        transforms.ToTensor(),
        normalize
    ])
    transform_test = transforms.Compose([
        transforms.Resize(256, keep_ratio=True),
        transforms.CenterCrop(224),
        transforms.ToTensor(),
        normalize
    ])

    train_data = gluon.data.DataLoader(
        imagenet.classification.ImageNet(data_dir, train=True).transform_first(transform_train),
        batch_size=batch_size, shuffle=True, last_batch='discard', num_workers=num_workers)
    val_data = gluon.data.DataLoader(
        imagenet.classification.ImageNet(data_dir, train=False).transform_first(transform_test),
        batch_size=batch_size, shuffle=False, num_workers=num_workers)

    return train_data, val_data, batch_fn 
开发者ID:zzdang,项目名称:cascade_rcnn_gluon,代码行数:36,代码来源:train_imagenet.py

示例12: generate_transform

# 需要导入模块: from mxnet.gluon.data.vision import transforms [as 别名]
# 或者: from mxnet.gluon.data.vision.transforms import CenterCrop [as 别名]
def generate_transform(train, resize, _is_osx, input_size, jitter_param):
    if _is_osx:
        # using PIL to load image (slow)
        if train:
            transform = Compose(
                [
                    RandomResizedCrop(input_size),
                    RandomHorizontalFlip(),
                    ColorJitter(0.4, 0.4, 0.4),
                    ToTensor(),
                    transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
                ]
            )
        else:
            transform = Compose(
                [
                    Resize(resize),
                    CenterCrop(input_size),
                    ToTensor(),
                    transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
                ]
            )
    else:
        if train:
            transform = transforms.Compose(
                [
                    transforms.RandomResizedCrop(input_size),
                    transforms.RandomFlipLeftRight(),
                    transforms.RandomColorJitter(
                        brightness=jitter_param,
                        contrast=jitter_param,
                        saturation=jitter_param
                    ),
                    transforms.RandomLighting(0.1),
                    transforms.ToTensor(),
                    transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
                ]
            )
        else:
            transform = transforms.Compose(
                [
                    transforms.Resize(resize),
                    transforms.CenterCrop(input_size),
                    transforms.ToTensor(),
                    transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
                ]
            )
    return transform 
开发者ID:awslabs,项目名称:autogluon,代码行数:50,代码来源:dataset.py


注:本文中的mxnet.gluon.data.vision.transforms.CenterCrop方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。