當前位置: 首頁>>代碼示例>>Python>>正文


Python block.HybridBlock方法代碼示例

本文整理匯總了Python中mxnet.gluon.block.HybridBlock方法的典型用法代碼示例。如果您正苦於以下問題:Python block.HybridBlock方法的具體用法?Python block.HybridBlock怎麽用?Python block.HybridBlock使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在mxnet.gluon.block的用法示例。


在下文中一共展示了block.HybridBlock方法的7個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: oth_alpha_pose_resnet101_v1b_coco

# 需要導入模塊: from mxnet.gluon import block [as 別名]
# 或者: from mxnet.gluon.block import HybridBlock [as 別名]
def oth_alpha_pose_resnet101_v1b_coco(pretrained=False, **kwargs):
    r""" ResNet-101 backbone model from AlphaPose
    Parameters
    ----------
    num_gpus : int
        Number of usable GPUs.

    Returns
    -------
    mxnet.gluon.HybridBlock
        The AlphaPose network.

    """
    norm_layer = mx.gluon.nn.BatchNorm
    norm_kwargs = {'use_global_stats': False}
    return get_alphapose(
        name='resnet101_v1b', dataset='coco',
        num_joints=17, norm_layer=norm_layer,
        norm_kwargs=norm_kwargs, pretrained=pretrained, **kwargs) 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:21,代碼來源:oth_alpha_pose.py

示例2: get_dla

# 需要導入模塊: from mxnet.gluon import block [as 別名]
# 或者: from mxnet.gluon.block import HybridBlock [as 別名]
def get_dla(layers, pretrained=False, ctx=mx.cpu(),
            root=os.path.join('~', '.mxnet', 'models'), **kwargs):
    """Get a center net instance.

    Parameters
    ----------
    name : str or int
        Layers of the network.
    pretrained : bool or str
        Boolean value controls whether to load the default pretrained weights for model.
        String value represents the hashtag for a certain version of pretrained weights.
    ctx : mxnet.Context
        Context such as mx.cpu(), mx.gpu(0).
    root : str
        Model weights storing path.

    Returns
    -------
    HybridBlock
        A DLA network.

    """
    # pylint: disable=unused-variable
    net = DLA(**kwargs)
    if pretrained:
        from .model_store import get_model_file
        full_name = 'dla{}'.format(layers)
        net.load_parameters(get_model_file(full_name, tag=pretrained, root=root),
                            ctx=ctx, ignore_extra=True)
        from ..data import ImageNet1kAttr
        attrib = ImageNet1kAttr()
        net.synset = attrib.synset
        net.classes = attrib.classes
        net.classes_long = attrib.classes_long
    return net 
開發者ID:dmlc,項目名稱:gluon-cv,代碼行數:37,代碼來源:dla.py

示例3: dla34

# 需要導入模塊: from mxnet.gluon import block [as 別名]
# 或者: from mxnet.gluon.block import HybridBlock [as 別名]
def dla34(**kwargs):
    """DLA 34 layer network for image classification.

    Returns
    -------
    HybridBlock
        A DLA34 network.

    """
    model = get_dla(34, levels=[1, 1, 1, 2, 2, 1],
                    channels=[16, 32, 64, 128, 256, 512],
                    block=BasicBlock, **kwargs)
    return model 
開發者ID:dmlc,項目名稱:gluon-cv,代碼行數:15,代碼來源:dla.py

示例4: alpha_pose_resnet101_v1b_coco

# 需要導入模塊: from mxnet.gluon import block [as 別名]
# 或者: from mxnet.gluon.block import HybridBlock [as 別名]
def alpha_pose_resnet101_v1b_coco(**kwargs):
    r""" ResNet-101 backbone model from AlphaPose
    Parameters
    ----------
    num_gpus : int
        Number of usable GPUs.

    Returns
    -------
    mxnet.gluon.HybridBlock
        The AlphaPose network.

    """
    from ...data import COCOKeyPoints
    keypoints = COCOKeyPoints.KEYPOINTS
    num_gpus = kwargs.pop('num_gpus', None)
    if num_gpus is not None and int(num_gpus) > 1:
        norm_layer = mx.gluon.contrib.nn.SyncBatchNorm
        norm_kwargs = {'use_global_stats': False, 'num_devices': int(num_gpus)}
    else:
        norm_layer = mx.gluon.nn.BatchNorm
        norm_kwargs = {'use_global_stats': False}

    return get_alphapose(
        name='resnet101_v1b', dataset='coco',
        num_joints=len(keypoints), norm_layer=norm_layer,
        norm_kwargs=norm_kwargs, **kwargs) 
開發者ID:dmlc,項目名稱:gluon-cv,代碼行數:29,代碼來源:fast_pose.py

示例5: get_Siam_RPN

# 需要導入模塊: from mxnet.gluon import block [as 別名]
# 或者: from mxnet.gluon.block import HybridBlock [as 別名]
def get_Siam_RPN(base_name, bz=1, is_train=False, pretrained=False, ctx=mx.cpu(0),
                 root='~/.mxnet/models', **kwargs):
    """get Siam_RPN net and get pretrained model if have pretrained

    Parameters
    ----------
    base_name : str
        Backbone model name
    bz : int
        batch size for train, bz = 1 if test
    is_train : str
        is_train is True if train, False if test
    pretrained : bool or str
        Boolean value controls whether to load the default pretrained weights for model.
        String value represents the hashtag for a certain version of pretrained weights.
    ctx : mxnet.Context
        Context such as mx.cpu(), mx.gpu(0).
    root : str
        Model weights storing path.

    Returns
    -------
    HybridBlock
        A SiamRPN Tracking network.
    """
    net = SiamRPN(bz=bz, is_train=is_train, ctx=ctx)
    if pretrained:
        from gluoncv.model_zoo.model_store import get_model_file
        net.load_parameters(get_model_file('siamrpn_%s'%(base_name),
                                           tag=pretrained, root=root), ctx=ctx)

    return net 
開發者ID:dmlc,項目名稱:gluon-cv,代碼行數:34,代碼來源:siam_net.py

示例6: get_alphapose

# 需要導入模塊: from mxnet.gluon import block [as 別名]
# 或者: from mxnet.gluon.block import HybridBlock [as 別名]
def get_alphapose(name, dataset, num_joints, pretrained=False,
                  pretrained_base=True, ctx=mx.cpu(),
                  norm_layer=nn.BatchNorm, norm_kwargs=None,
                  root=os.path.join('~', '.mxnet', 'models'), **kwargs):
    r"""Utility function to return AlphaPose networks.

    Parameters
    ----------
    name : str
        Model name.
    dataset : str
        The name of dataset.
    pretrained : bool or str
        Boolean value controls whether to load the default pretrained weights for model.
        String value represents the hashtag for a certain version of pretrained weights.
    ctx : mxnet.Context
        Context such as mx.cpu(), mx.gpu(0).
    root : str
        Model weights storing path.

    Returns
    -------
    mxnet.gluon.HybridBlock
        The AlphaPose network.

    """
    if norm_kwargs is None:
        norm_kwargs = {}
    preact = FastSEResNet(name, norm_layer=norm_layer, **norm_kwargs)
    if not pretrained and pretrained_base:
        from ..model_zoo import get_model
        base_network = get_model(name, pretrained=True, root=root)
        _try_load_parameters(self=base_network, model=base_network)
    net = AlphaPose(preact, num_joints, **kwargs)
    if pretrained:
        from ..model_store import get_model_file
        full_name = '_'.join(('alpha_pose', name, dataset))
        net.load_parameters(get_model_file(full_name, tag=pretrained, root=root))
    else:
        import warnings
        with warnings.catch_warnings(record=True):
            warnings.simplefilter("always")
            net.collect_params().initialize()
    net.collect_params().reset_ctx(ctx)
    return net 
開發者ID:dmlc,項目名稱:gluon-cv,代碼行數:47,代碼來源:fast_pose.py

示例7: get_alphapose

# 需要導入模塊: from mxnet.gluon import block [as 別名]
# 或者: from mxnet.gluon.block import HybridBlock [as 別名]
def get_alphapose(name, dataset, num_joints, pretrained=False,
                  pretrained_base=False, ctx=mx.cpu(),
                  norm_layer=nn.BatchNorm, norm_kwargs=None,
                  root=os.path.join('~', '.mxnet', 'models'), **kwargs):
    r"""Utility function to return AlphaPose networks.

    Parameters
    ----------
    name : str
        Model name.
    dataset : str
        The name of dataset.
    pretrained : bool or str
        Boolean value controls whether to load the default pretrained weights for model.
        String value represents the hashtag for a certain version of pretrained weights.
    ctx : mxnet.Context
        Context such as mx.cpu(), mx.gpu(0).
    root : str
        Model weights storing path.

    Returns
    -------
    mxnet.gluon.HybridBlock
        The AlphaPose network.

    """
    if norm_kwargs is None:
        norm_kwargs = {}
    preact = FastSEResNet(name, norm_layer=norm_layer, **norm_kwargs)
    if not pretrained and pretrained_base:
        from gluoncv.model_zoo import get_model
        base_network = get_model(name, pretrained=True, root=root)
        _try_load_parameters(self=base_network, model=base_network)
    net = AlphaPose(preact, num_joints, **kwargs)
    if pretrained:
        from gluoncv.model_zoo.model_store import get_model_file
        full_name = '_'.join(('alpha_pose', name, dataset))
        net.load_parameters(get_model_file(full_name, tag=pretrained, root=root))
    else:
        import warnings
        with warnings.catch_warnings(record=True):
            warnings.simplefilter("always")
            net.collect_params().initialize()
    net.collect_params().reset_ctx(ctx)
    return net 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:47,代碼來源:oth_alpha_pose.py


注:本文中的mxnet.gluon.block.HybridBlock方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。