当前位置: 首页>>代码示例>>Python>>正文


Python imgaug.Lighting方法代码示例

本文整理汇总了Python中tensorpack.dataflow.imgaug.Lighting方法的典型用法代码示例。如果您正苦于以下问题:Python imgaug.Lighting方法的具体用法?Python imgaug.Lighting怎么用?Python imgaug.Lighting使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tensorpack.dataflow.imgaug的用法示例。


在下文中一共展示了imgaug.Lighting方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: fbresnet_augmentor

# 需要导入模块: from tensorpack.dataflow import imgaug [as 别名]
# 或者: from tensorpack.dataflow.imgaug import Lighting [as 别名]
def fbresnet_augmentor():
    # assme BGR input
    augmentors = [
        imgaug.GoogleNetRandomCropAndResize(),
        imgaug.RandomOrderAug(
            [imgaug.BrightnessScale((0.6, 1.4), clip=False),
             imgaug.Contrast((0.6, 1.4), clip=False),
             imgaug.Saturation(0.4, rgb=False),
             # rgb->bgr conversion for the constants copied from fb.resnet.torch
             imgaug.Lighting(0.1,
                             eigval=np.asarray(
                                 [0.2175, 0.0188, 0.0045][::-1]) * 255.0,
                             eigvec=np.array(
                                 [[-0.5675, 0.7192, 0.4009],
                                  [-0.5808, -0.0045, -0.8140],
                                  [-0.5836, -0.6948, 0.4203]],
                                 dtype='float32')[::-1, ::-1]
                             )]),
        imgaug.Flip(horiz=True),
    ]
    return augmentors 
开发者ID:tensorpack,项目名称:benchmarks,代码行数:23,代码来源:augmentors.py

示例2: fbresnet_augmentor

# 需要导入模块: from tensorpack.dataflow import imgaug [as 别名]
# 或者: from tensorpack.dataflow.imgaug import Lighting [as 别名]
def fbresnet_augmentor():
    # assme BGR input
    augmentors = [
        GoogleNetResize(),
        imgaug.RandomOrderAug(
            [imgaug.BrightnessScale((0.6, 1.4), clip=False),
             imgaug.Contrast((0.6, 1.4), clip=False),
             imgaug.Saturation(0.4, rgb=False),
             # rgb->bgr conversion for the constants copied from fb.resnet.torch
             imgaug.Lighting(0.1,
                             eigval=np.asarray(
                                 [0.2175, 0.0188, 0.0045][::-1]) * 255.0,
                             eigvec=np.array(
                                 [[-0.5675, 0.7192, 0.4009],
                                  [-0.5808, -0.0045, -0.8140],
                                  [-0.5836, -0.6948, 0.4203]],
                                 dtype='float32')[::-1, ::-1]
                             )]),
        imgaug.Flip(horiz=True),
    ]
    return augmentors 
开发者ID:qinenergy,项目名称:adanet,代码行数:23,代码来源:augmentors.py

示例3: get_data

# 需要导入模块: from tensorpack.dataflow import imgaug [as 别名]
# 或者: from tensorpack.dataflow.imgaug import Lighting [as 别名]
def get_data(name, batch):
    isTrain = name == 'train'
    if isTrain:
        augmentors = [
            imgaug.ResizeShortestEdge(256, cv2.INTER_CUBIC),
            imgaug.RandomCrop(224),
            imgaug.Lighting(0.1,
                            eigval=np.asarray(
                                [0.2175, 0.0188, 0.0045][::-1]) * 255.0,
                            eigvec=np.array(
                                [[-0.5675, 0.7192, 0.4009],
                                 [-0.5808, -0.0045, -0.8140],
                                 [-0.5836, -0.6948, 0.4203]],
                                dtype='float32')[::-1, ::-1]),
            imgaug.Flip(horiz=True)]
    else:
        augmentors = [
            imgaug.ResizeShortestEdge(256, cv2.INTER_CUBIC),
            imgaug.CenterCrop((224, 224))]
    return get_imagenet_dataflow(args.data, name, batch, augmentors) 
开发者ID:tensorpack,项目名称:tensorpack,代码行数:22,代码来源:alexnet.py

示例4: resizeAndLighting_augmentor

# 需要导入模块: from tensorpack.dataflow import imgaug [as 别名]
# 或者: from tensorpack.dataflow.imgaug import Lighting [as 别名]
def resizeAndLighting_augmentor():
    # assme BGR input
    augmentors = [
        imgaug.GoogleNetRandomCropAndResize(),
        imgaug.Lighting(0.1,
                        eigval=np.asarray(
                            [0.2175, 0.0188, 0.0045][::-1]) * 255.0,
                        eigvec=np.array(
                            [[-0.5675, 0.7192, 0.4009],
                             [-0.5808, -0.0045, -0.8140],
                             [-0.5836, -0.6948, 0.4203]],
                            dtype='float32')[::-1, ::-1]),
        imgaug.Flip(horiz=True),
    ]
    return augmentors 
开发者ID:tensorpack,项目名称:benchmarks,代码行数:17,代码来源:augmentors.py

示例5: resizeOnly_augmentor

# 需要导入模块: from tensorpack.dataflow import imgaug [as 别名]
# 或者: from tensorpack.dataflow.imgaug import Lighting [as 别名]
def resizeOnly_augmentor():
    # assme BGR input
    augmentors = [
        imgaug.GoogleNetRandomCropAndResize(),
        imgaug.Lighting(0.1,
                        eigval=np.asarray(
                            [0.2175, 0.0188, 0.0045][::-1]) * 255.0,
                        eigvec=np.array(
                            [[-0.5675, 0.7192, 0.4009],
                             [-0.5808, -0.0045, -0.8140],
                             [-0.5836, -0.6948, 0.4203]],
                            dtype='float32')[::-1, ::-1]),
        imgaug.Flip(horiz=True),
    ]
    return augmentors 
开发者ID:tensorpack,项目名称:benchmarks,代码行数:17,代码来源:augmentors.py

示例6: get_data

# 需要导入模块: from tensorpack.dataflow import imgaug [as 别名]
# 或者: from tensorpack.dataflow.imgaug import Lighting [as 别名]
def get_data(is_train,
             batch_size,
             data_dir_path,
             input_image_size=224,
             resize_inv_factor=0.875):
    assert (resize_inv_factor > 0.0)
    resize_value = int(math.ceil(float(input_image_size) / resize_inv_factor))

    if is_train:
        augmentors = [
            GoogleNetResize(
                crop_area_fraction=0.08,
                target_shape=input_image_size),
            imgaug.RandomOrderAug([
                imgaug.BrightnessScale((0.6, 1.4), clip=False),
                imgaug.Contrast((0.6, 1.4), clip=False),
                imgaug.Saturation(0.4, rgb=False),
                # rgb-bgr conversion for the constants copied from fb.resnet.torch
                imgaug.Lighting(
                    0.1,
                    eigval=np.asarray([0.2175, 0.0188, 0.0045][::-1]) * 255.0,
                    eigvec=np.array([
                        [-0.5675, 0.7192, 0.4009],
                        [-0.5808, -0.0045, -0.8140],
                        [-0.5836, -0.6948, 0.4203]], dtype="float32")[::-1, ::-1])]),
            imgaug.Flip(horiz=True)]
    else:
        augmentors = [
            # imgaug.ResizeShortestEdge(resize_value, cv2.INTER_CUBIC),
            imgaug.ResizeShortestEdge(resize_value, cv2.INTER_LINEAR),
            imgaug.CenterCrop((input_image_size, input_image_size))
        ]

    return get_imagenet_dataflow(
        datadir=data_dir_path,
        is_train=is_train,
        batch_size=batch_size,
        augmentors=augmentors) 
开发者ID:osmr,项目名称:imgclsmob,代码行数:40,代码来源:utils_tp.py

示例7: resizeAndLighting_augmentor

# 需要导入模块: from tensorpack.dataflow import imgaug [as 别名]
# 或者: from tensorpack.dataflow.imgaug import Lighting [as 别名]
def resizeAndLighting_augmentor():
    # assme BGR input
    augmentors = [
        GoogleNetResize(),
        imgaug.Lighting(0.1,
                        eigval=np.asarray(
                            [0.2175, 0.0188, 0.0045][::-1]) * 255.0,
                        eigvec=np.array(
                            [[-0.5675, 0.7192, 0.4009],
                             [-0.5808, -0.0045, -0.8140],
                             [-0.5836, -0.6948, 0.4203]],
                            dtype='float32')[::-1, ::-1]),
        imgaug.Flip(horiz=True),
    ]
    return augmentors 
开发者ID:qinenergy,项目名称:adanet,代码行数:17,代码来源:augmentors.py

示例8: resizeOnly_augmentor

# 需要导入模块: from tensorpack.dataflow import imgaug [as 别名]
# 或者: from tensorpack.dataflow.imgaug import Lighting [as 别名]
def resizeOnly_augmentor():
    # assme BGR input
    augmentors = [
        GoogleNetResize(),
        imgaug.Lighting(0.1,
                        eigval=np.asarray(
                            [0.2175, 0.0188, 0.0045][::-1]) * 255.0,
                        eigvec=np.array(
                            [[-0.5675, 0.7192, 0.4009],
                             [-0.5808, -0.0045, -0.8140],
                             [-0.5836, -0.6948, 0.4203]],
                            dtype='float32')[::-1, ::-1]),
        imgaug.Flip(horiz=True),
    ]
    return augmentors 
开发者ID:qinenergy,项目名称:adanet,代码行数:17,代码来源:augmentors.py

示例9: get_data

# 需要导入模块: from tensorpack.dataflow import imgaug [as 别名]
# 或者: from tensorpack.dataflow.imgaug import Lighting [as 别名]
def get_data(name, batch):
    isTrain = name == 'train'

    if isTrain:
        augmentors = [
            # use lighter augs if model is too small
            imgaug.GoogleNetRandomCropAndResize(crop_area_fraction=(0.49 if args.ratio < 1 else 0.08, 1.)),
            imgaug.RandomOrderAug(
                [imgaug.BrightnessScale((0.6, 1.4), clip=False),
                 imgaug.Contrast((0.6, 1.4), clip=False),
                 imgaug.Saturation(0.4, rgb=False),
                 # rgb-bgr conversion for the constants copied from fb.resnet.torch
                 imgaug.Lighting(0.1,
                                 eigval=np.asarray(
                                     [0.2175, 0.0188, 0.0045][::-1]) * 255.0,
                                 eigvec=np.array(
                                     [[-0.5675, 0.7192, 0.4009],
                                      [-0.5808, -0.0045, -0.8140],
                                      [-0.5836, -0.6948, 0.4203]],
                                     dtype='float32')[::-1, ::-1]
                                 )]),
            imgaug.Flip(horiz=True),
        ]
    else:
        augmentors = [
            imgaug.ResizeShortestEdge(256, cv2.INTER_CUBIC),
            imgaug.CenterCrop((224, 224)),
        ]
    return get_imagenet_dataflow(
        args.data, name, batch, augmentors) 
开发者ID:tensorpack,项目名称:tensorpack,代码行数:32,代码来源:shufflenet.py


注:本文中的tensorpack.dataflow.imgaug.Lighting方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。