当前位置: 首页>>代码示例>>Python>>正文


Python block.HybridBlock方法代码示例

本文整理汇总了Python中mxnet.gluon.block.HybridBlock方法的典型用法代码示例。如果您正苦于以下问题:Python block.HybridBlock方法的具体用法?Python block.HybridBlock怎么用?Python block.HybridBlock使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在mxnet.gluon.block的用法示例。


在下文中一共展示了block.HybridBlock方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: oth_alpha_pose_resnet101_v1b_coco

# 需要导入模块: from mxnet.gluon import block [as 别名]
# 或者: from mxnet.gluon.block import HybridBlock [as 别名]
def oth_alpha_pose_resnet101_v1b_coco(pretrained=False, **kwargs):
    r""" ResNet-101 backbone model from AlphaPose
    Parameters
    ----------
    num_gpus : int
        Number of usable GPUs.

    Returns
    -------
    mxnet.gluon.HybridBlock
        The AlphaPose network.

    """
    norm_layer = mx.gluon.nn.BatchNorm
    norm_kwargs = {'use_global_stats': False}
    return get_alphapose(
        name='resnet101_v1b', dataset='coco',
        num_joints=17, norm_layer=norm_layer,
        norm_kwargs=norm_kwargs, pretrained=pretrained, **kwargs) 
开发者ID:osmr,项目名称:imgclsmob,代码行数:21,代码来源:oth_alpha_pose.py

示例2: get_dla

# 需要导入模块: from mxnet.gluon import block [as 别名]
# 或者: from mxnet.gluon.block import HybridBlock [as 别名]
def get_dla(layers, pretrained=False, ctx=mx.cpu(),
            root=os.path.join('~', '.mxnet', 'models'), **kwargs):
    """Get a center net instance.

    Parameters
    ----------
    name : str or int
        Layers of the network.
    pretrained : bool or str
        Boolean value controls whether to load the default pretrained weights for model.
        String value represents the hashtag for a certain version of pretrained weights.
    ctx : mxnet.Context
        Context such as mx.cpu(), mx.gpu(0).
    root : str
        Model weights storing path.

    Returns
    -------
    HybridBlock
        A DLA network.

    """
    # pylint: disable=unused-variable
    net = DLA(**kwargs)
    if pretrained:
        from .model_store import get_model_file
        full_name = 'dla{}'.format(layers)
        net.load_parameters(get_model_file(full_name, tag=pretrained, root=root),
                            ctx=ctx, ignore_extra=True)
        from ..data import ImageNet1kAttr
        attrib = ImageNet1kAttr()
        net.synset = attrib.synset
        net.classes = attrib.classes
        net.classes_long = attrib.classes_long
    return net 
开发者ID:dmlc,项目名称:gluon-cv,代码行数:37,代码来源:dla.py

示例3: dla34

# 需要导入模块: from mxnet.gluon import block [as 别名]
# 或者: from mxnet.gluon.block import HybridBlock [as 别名]
def dla34(**kwargs):
    """DLA 34 layer network for image classification.

    Returns
    -------
    HybridBlock
        A DLA34 network.

    """
    model = get_dla(34, levels=[1, 1, 1, 2, 2, 1],
                    channels=[16, 32, 64, 128, 256, 512],
                    block=BasicBlock, **kwargs)
    return model 
开发者ID:dmlc,项目名称:gluon-cv,代码行数:15,代码来源:dla.py

示例4: alpha_pose_resnet101_v1b_coco

# 需要导入模块: from mxnet.gluon import block [as 别名]
# 或者: from mxnet.gluon.block import HybridBlock [as 别名]
def alpha_pose_resnet101_v1b_coco(**kwargs):
    r""" ResNet-101 backbone model from AlphaPose
    Parameters
    ----------
    num_gpus : int
        Number of usable GPUs.

    Returns
    -------
    mxnet.gluon.HybridBlock
        The AlphaPose network.

    """
    from ...data import COCOKeyPoints
    keypoints = COCOKeyPoints.KEYPOINTS
    num_gpus = kwargs.pop('num_gpus', None)
    if num_gpus is not None and int(num_gpus) > 1:
        norm_layer = mx.gluon.contrib.nn.SyncBatchNorm
        norm_kwargs = {'use_global_stats': False, 'num_devices': int(num_gpus)}
    else:
        norm_layer = mx.gluon.nn.BatchNorm
        norm_kwargs = {'use_global_stats': False}

    return get_alphapose(
        name='resnet101_v1b', dataset='coco',
        num_joints=len(keypoints), norm_layer=norm_layer,
        norm_kwargs=norm_kwargs, **kwargs) 
开发者ID:dmlc,项目名称:gluon-cv,代码行数:29,代码来源:fast_pose.py

示例5: get_Siam_RPN

# 需要导入模块: from mxnet.gluon import block [as 别名]
# 或者: from mxnet.gluon.block import HybridBlock [as 别名]
def get_Siam_RPN(base_name, bz=1, is_train=False, pretrained=False, ctx=mx.cpu(0),
                 root='~/.mxnet/models', **kwargs):
    """get Siam_RPN net and get pretrained model if have pretrained

    Parameters
    ----------
    base_name : str
        Backbone model name
    bz : int
        batch size for train, bz = 1 if test
    is_train : str
        is_train is True if train, False if test
    pretrained : bool or str
        Boolean value controls whether to load the default pretrained weights for model.
        String value represents the hashtag for a certain version of pretrained weights.
    ctx : mxnet.Context
        Context such as mx.cpu(), mx.gpu(0).
    root : str
        Model weights storing path.

    Returns
    -------
    HybridBlock
        A SiamRPN Tracking network.
    """
    net = SiamRPN(bz=bz, is_train=is_train, ctx=ctx)
    if pretrained:
        from gluoncv.model_zoo.model_store import get_model_file
        net.load_parameters(get_model_file('siamrpn_%s'%(base_name),
                                           tag=pretrained, root=root), ctx=ctx)

    return net 
开发者ID:dmlc,项目名称:gluon-cv,代码行数:34,代码来源:siam_net.py

示例6: get_alphapose

# 需要导入模块: from mxnet.gluon import block [as 别名]
# 或者: from mxnet.gluon.block import HybridBlock [as 别名]
def get_alphapose(name, dataset, num_joints, pretrained=False,
                  pretrained_base=True, ctx=mx.cpu(),
                  norm_layer=nn.BatchNorm, norm_kwargs=None,
                  root=os.path.join('~', '.mxnet', 'models'), **kwargs):
    r"""Utility function to return AlphaPose networks.

    Parameters
    ----------
    name : str
        Model name.
    dataset : str
        The name of dataset.
    pretrained : bool or str
        Boolean value controls whether to load the default pretrained weights for model.
        String value represents the hashtag for a certain version of pretrained weights.
    ctx : mxnet.Context
        Context such as mx.cpu(), mx.gpu(0).
    root : str
        Model weights storing path.

    Returns
    -------
    mxnet.gluon.HybridBlock
        The AlphaPose network.

    """
    if norm_kwargs is None:
        norm_kwargs = {}
    preact = FastSEResNet(name, norm_layer=norm_layer, **norm_kwargs)
    if not pretrained and pretrained_base:
        from ..model_zoo import get_model
        base_network = get_model(name, pretrained=True, root=root)
        _try_load_parameters(self=base_network, model=base_network)
    net = AlphaPose(preact, num_joints, **kwargs)
    if pretrained:
        from ..model_store import get_model_file
        full_name = '_'.join(('alpha_pose', name, dataset))
        net.load_parameters(get_model_file(full_name, tag=pretrained, root=root))
    else:
        import warnings
        with warnings.catch_warnings(record=True):
            warnings.simplefilter("always")
            net.collect_params().initialize()
    net.collect_params().reset_ctx(ctx)
    return net 
开发者ID:dmlc,项目名称:gluon-cv,代码行数:47,代码来源:fast_pose.py

示例7: get_alphapose

# 需要导入模块: from mxnet.gluon import block [as 别名]
# 或者: from mxnet.gluon.block import HybridBlock [as 别名]
def get_alphapose(name, dataset, num_joints, pretrained=False,
                  pretrained_base=False, ctx=mx.cpu(),
                  norm_layer=nn.BatchNorm, norm_kwargs=None,
                  root=os.path.join('~', '.mxnet', 'models'), **kwargs):
    r"""Utility function to return AlphaPose networks.

    Parameters
    ----------
    name : str
        Model name.
    dataset : str
        The name of dataset.
    pretrained : bool or str
        Boolean value controls whether to load the default pretrained weights for model.
        String value represents the hashtag for a certain version of pretrained weights.
    ctx : mxnet.Context
        Context such as mx.cpu(), mx.gpu(0).
    root : str
        Model weights storing path.

    Returns
    -------
    mxnet.gluon.HybridBlock
        The AlphaPose network.

    """
    if norm_kwargs is None:
        norm_kwargs = {}
    preact = FastSEResNet(name, norm_layer=norm_layer, **norm_kwargs)
    if not pretrained and pretrained_base:
        from gluoncv.model_zoo import get_model
        base_network = get_model(name, pretrained=True, root=root)
        _try_load_parameters(self=base_network, model=base_network)
    net = AlphaPose(preact, num_joints, **kwargs)
    if pretrained:
        from gluoncv.model_zoo.model_store import get_model_file
        full_name = '_'.join(('alpha_pose', name, dataset))
        net.load_parameters(get_model_file(full_name, tag=pretrained, root=root))
    else:
        import warnings
        with warnings.catch_warnings(record=True):
            warnings.simplefilter("always")
            net.collect_params().initialize()
    net.collect_params().reset_ctx(ctx)
    return net 
开发者ID:osmr,项目名称:imgclsmob,代码行数:47,代码来源:oth_alpha_pose.py


注:本文中的mxnet.gluon.block.HybridBlock方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。