当前位置: 首页>>代码示例>>Python>>正文


Python links.LayerNormalization方法代码示例

本文整理汇总了Python中chainer.links.LayerNormalization方法的典型用法代码示例。如果您正苦于以下问题:Python links.LayerNormalization方法的具体用法?Python links.LayerNormalization怎么用?Python links.LayerNormalization使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在chainer.links的用法示例。


在下文中一共展示了links.LayerNormalization方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: layer_normalization

# 需要导入模块: from chainer import links [as 别名]
# 或者: from chainer.links import LayerNormalization [as 别名]
def layer_normalization(x, gamma, beta, eps=1e-5):
    """Layer normalization.
    This function implements a "layer normalization"
    which normalizes the input units by statistics
    that are computed along the second axis,
    scales and shifts them.
    Args:
        x (~chainer.Variable): Batch vectors.
            Shape of this value must be `(batch_size, unit_size)`,
            e.g., the output of :func:`~chainer.functions.linear`.
        gamma (~chainer.Variable): Scaling vectors.
        beta (~chainer.Variable): Shifting vectors.
    Returns:
        ~chainer.Variable: The output variable which has the same shape
        as :math:`x`.
    See: `Layer Normalization <https://arxiv.org/abs/1607.06450>`_
    """
    return LayerNormalization(eps)(x, gamma, beta) 
开发者ID:fabiencro,项目名称:knmt,代码行数:20,代码来源:layer_normalization.py

示例2: __init__

# 需要导入模块: from chainer import links [as 别名]
# 或者: from chainer.links import LayerNormalization [as 别名]
def __init__(self, idim, n_layers, n_units,
                 e_units=2048, h=8, dropout=0.1):
        super(TransformerEncoder, self).__init__()
        with self.init_scope():
            self.linear_in = L.Linear(idim, n_units)
            self.lnorm_in = L.LayerNormalization(n_units)
            self.pos_enc = PositionalEncoding(n_units, dropout, 5000)
            self.n_layers = n_layers
            self.dropout = dropout
            for i in range(n_layers):
                setattr(self, '{}{:d}'.format("lnorm1_", i),
                        L.LayerNormalization(n_units))
                setattr(self, '{}{:d}'.format("self_att_", i),
                        MultiHeadSelfAttention(n_units, h))
                setattr(self, '{}{:d}'.format("lnorm2_", i),
                        L.LayerNormalization(n_units))
                setattr(self, '{}{:d}'.format("ff_", i),
                        PositionwiseFeedForward(n_units, e_units, dropout))
            self.lnorm_out = L.LayerNormalization(n_units) 
开发者ID:hitachi-speech,项目名称:EEND,代码行数:21,代码来源:transformer.py

示例3: __init__

# 需要导入模块: from chainer import links [as 别名]
# 或者: from chainer.links import LayerNormalization [as 别名]
def __init__(self, dropout=None, residual_mode="normal", no_normalize=False):
        super(DropoutAndAddAndNormalize, self).__init__()
        
        if not no_normalize:
            LayerNormalization = get_layer_normalization_class()
            self.add_link("normalizing_layer", LayerNormalization())
        
        assert residual_mode in "normal none after".split()
        self.residual_mode = residual_mode
        self.no_normalize = no_normalize
        self.dropout = dropout 
开发者ID:fabiencro,项目名称:knmt,代码行数:13,代码来源:utils.py

示例4: get_layer_normalization_class

# 需要导入模块: from chainer import links [as 别名]
# 或者: from chainer.links import LayerNormalization [as 别名]
def get_layer_normalization_class():
    global use_chainer_layer_normalization
    if use_chainer_layer_normalization:
        return L.LayerNormalization
    else:
        log.info("using faster LayerNormalization")
        return LayerNormalizationLink 
开发者ID:fabiencro,项目名称:knmt,代码行数:9,代码来源:layer_normalization.py

示例5: _create_ln

# 需要导入模块: from chainer import links [as 别名]
# 或者: from chainer.links import LayerNormalization [as 别名]
def _create_ln(*args, **kwargs):
    flag = chainer.disable_experimental_feature_warning
    chainer.disable_experimental_feature_warning = True
    try:
        return links.LayerNormalization(*args, **kwargs)
    finally:
        chainer.disable_experimental_feature_warning = flag 
开发者ID:chainer,项目名称:chainer,代码行数:9,代码来源:test_layer_normalization.py

示例6: __init__

# 需要导入模块: from chainer import links [as 别名]
# 或者: from chainer.links import LayerNormalization [as 别名]
def __init__(self, layer, size, dropout_ratio=0.1):
        super().__init__()
        self.dropout_ratio = dropout_ratio
        with self.init_scope():
            self.layer = layer
            self.norm = L.LayerNormalization(size) 
开发者ID:chainer,项目名称:models,代码行数:8,代码来源:utils.py

示例7: __init__

# 需要导入模块: from chainer import links [as 别名]
# 或者: from chainer.links import LayerNormalization [as 别名]
def __init__(self, sublayer, N):
        super().__init__()
        with self.init_scope():
            self.sub_layers = sublayer.repeat(N, mode='copy')
            self.norm = L.LayerNormalization(sublayer.size) 
开发者ID:chainer,项目名称:models,代码行数:7,代码来源:decoder.py

示例8: __init__

# 需要导入模块: from chainer import links [as 别名]
# 或者: from chainer.links import LayerNormalization [as 别名]
def __init__(self, hidden_channels=16, n_edge_types=5,
                 activation=functions.relu):
        super(GNNFiLMUpdate, self).__init__()
        self.n_edge_types = n_edge_types
        self.activation = activation
        with self.init_scope():
            self.W_linear = GraphLinear(
                in_size=None, out_size=self.n_edge_types * hidden_channels,
                nobias=True)  # W_l in eq. (6)
            self.W_g = GraphLinear(
                in_size=None, out_size=self.n_edge_types * hidden_channels * 2,
                nobias=True)  # g in eq. (6)
            self.norm_layer = links.LayerNormalization()  # l in eq. (6) 
开发者ID:chainer,项目名称:chainer-chemistry,代码行数:15,代码来源:gnn_film_update.py

示例9: __init__

# 需要导入模块: from chainer import links [as 别名]
# 或者: from chainer.links import LayerNormalization [as 别名]
def __init__(self, ch0, ch1, \
                nn='conv', \
                norm='bn', \
                activation=F.relu, \
                dropout=False, \
                noise=None, \
                w_init=None, \
                k_size = 3, \
                normalize_input=False ):

        self.norm = norm
        self.normalize_input = normalize_input
        self.activation = activation
        self.dropout = dropout
        self.noise = noise
        self.nn = nn
        layers = {}

        if w_init == None:
            w = chainer.initializers.GlorotNormal()
        else:
            w = w_init

        if nn == 'down_conv':
            layers['c'] = L.Convolution2D(ch0, ch1, 4, 2, 1, initialW=w)

        elif nn == 'up_deconv':
            layers['c'] = L.Deconvolution2D(ch0, ch1, 4, 2, 1, initialW=w)

        elif nn == 'up_subpixel':
            pad = k_size//2
            layers['c'] = L.Convolution2D(ch0, ch1*4, k_size, 1, pad, initialW=w)

        elif nn=='conv' or nn=='up_unpooling':
            pad = k_size//2
            layers['c'] = L.Convolution2D(ch0, ch1, k_size, 1, pad, initialW=w)

        elif nn=='linear':
            layers['c'] = L.Linear(ch0, ch1, initialW=w)

        else:
            raise Exception("Cannot find method %s" % nn)

        if self.norm == 'bn':
            if self.noise:
                layers['n'] = L.BatchNormalization(ch1, use_gamma=False)
            else:
                layers['n'] = L.BatchNormalization(ch1)
        elif self.norm == 'ln':
                layers['n'] = L.LayerNormalization(ch1)

        super(NNBlock, self).__init__(**layers) 
开发者ID:Aixile,项目名称:chainer-gan-experiments,代码行数:54,代码来源:ops.py


注:本文中的chainer.links.LayerNormalization方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。