当前位置: 首页>>代码示例>>Python>>正文


Python nd.relu方法代码示例

本文整理汇总了Python中mxnet.nd.relu方法的典型用法代码示例。如果您正苦于以下问题:Python nd.relu方法的具体用法?Python nd.relu怎么用?Python nd.relu使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在mxnet.nd的用法示例。


在下文中一共展示了nd.relu方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: forward

# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def forward(self, g, h):
        hidden_rep = [h]

        for i in range(self.num_layers - 1):
            h = self.ginlayers[i](g, h)
            h = self.batch_norms[i](h)
            h = nd.relu(h)
            hidden_rep.append(h)

        score_over_layer = 0
        # perform pooling over all nodes in each graph in every layer
        for i, h in enumerate(hidden_rep):
            pooled_h = self.pool(g, h)
            score_over_layer = score_over_layer + self.drop(self.linears_prediction[i](pooled_h))

        return score_over_layer 
开发者ID:dmlc,项目名称:dgl,代码行数:18,代码来源:gin.py

示例2: forward

# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def forward(self, x):
        '''
        Parameters
        ----------
        x: nd.array, shape is (batch_size, c_in, time_step, num_of_vertices)

        Returns
        ----------
        shape is (batch_size, c_out, time_step - Kt + 1, num_of_vertices)

        '''

        x_input = self.align(x)[:, :, self.Kt - 1:, :]

        x_conv = self.conv(x)
        if self.activation == 'GLU':
            x_conv = self.conv(x)
            x_conv1, x_conv2 = nd.split(x_conv, axis=1, num_outputs=2)
            return (x_conv1 + x_input) * nd.sigmoid(x_conv2)
        if self.activation == 'relu':
            return nd.relu(x_conv + x_input)
        return x_conv 
开发者ID:Davidham3,项目名称:STGCN,代码行数:24,代码来源:base_layers.py

示例3: forward

# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def forward(self, img):
        inter = self.pre(img)
        #print("\t inter shape: ", inter.shape)
        outs = []

        for ind in range(self.num_stacks):
            kp_, conv_ = self.kpts[ind], self.convs[ind]
            kp = kp_(inter)
            conv = conv_(kp)
            #print("\t conv shape: ", conv.shape)

            out = {}
            for head in self.heads:
                layer = self.__getattribute__(head)[ind]
                y = layer(conv)
                out[head] = y
            outs.append(out)

            if ind < self.num_stacks - 1:
                inter = self.inters_[ind](inter) + self.convs_[ind](conv)
                inter = nd.relu(inter)
                inter = self.inters[ind](inter)
                #print("\t inter shape: ", inter.shape)
        return outs 
开发者ID:Guanghan,项目名称:mxnet-centernet,代码行数:26,代码来源:hourglass.py

示例4: resnet18

# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def resnet18(num_classes):
    """The ResNet-18 model."""
    net = nn.Sequential()
    net.add(nn.Conv2D(64, kernel_size=3, strides=1, padding=1),
            nn.BatchNorm(), nn.Activation('relu'))

    def resnet_block(num_channels, num_residuals, first_block=False):
        blk = nn.Sequential()
        for i in range(num_residuals):
            if i == 0 and not first_block:
                blk.add(Residual(num_channels, use_1x1conv=True, strides=2))
            else:
                blk.add(Residual(num_channels))
        return blk

    net.add(resnet_block(64, 2, first_block=True),
            resnet_block(128, 2),
            resnet_block(256, 2),
            resnet_block(512, 2))
    net.add(nn.GlobalAvgPool2D(), nn.Dense(num_classes))
    return net 
开发者ID:d2l-ai,项目名称:d2l-zh,代码行数:23,代码来源:utils.py

示例5: __init__

# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def __init__(self, Kt, c_in, c_out, activation='relu', **kwargs):
        super(Temporal_conv_layer, self).__init__(**kwargs)
        self.Kt = Kt
        self.c_out = c_out
        self.activation = activation
        with self.name_scope():
            self.align = Align_layer(c_in, c_out, None)
            if activation == 'GLU':
                self.conv = nn.Conv2D(2 * c_out, (Kt, 1), activation=None)
            elif activation == 'relu':
                self.conv = nn.Conv2D(c_out, (Kt, 1), activation=None)
            else:
                self.conv = nn.Conv2D(c_out, (Kt, 1), activation=activation) 
开发者ID:Davidham3,项目名称:STGCN,代码行数:15,代码来源:base_layers.py

示例6: msg_reduce

# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def msg_reduce(self, node):
        state = node.mailbox['state']
        alpha = node.mailbox['alpha']
        alpha = nd.softmax(alpha, axis=1)

        new_state = nd.relu(nd.sum(alpha * state, axis=1))
        return { 'new_state': new_state } 
开发者ID:panzheyi,项目名称:ST-MetaNet,代码行数:9,代码来源:graph.py

示例7: msg_reduce

# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def msg_reduce(self, node):
        state = node.mailbox['state']
        alpha = node.mailbox['alpha']
        alpha = nd.softmax(alpha, axis=1)

        new_state = nd.relu(nd.sum(alpha * state, axis=1)) * nd.sigmoid(self.weight.data(state.context))
        return { 'new_state': new_state } 
开发者ID:panzheyi,项目名称:ST-MetaNet,代码行数:9,代码来源:graph.py

示例8: forward

# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def forward(self, X):
        Y = nd.relu(self.bn1(self.conv1(X)))
        Y = self.bn2(self.conv2(Y))
        if self.conv3:
            X = self.conv3(X)
        return nd.relu(Y + X) 
开发者ID:d2l-ai,项目名称:d2l-zh,代码行数:8,代码来源:utils.py


注:本文中的mxnet.nd.relu方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。