當前位置: 首頁>>代碼示例>>Python>>正文


Python nd.relu方法代碼示例

本文整理匯總了Python中mxnet.nd.relu方法的典型用法代碼示例。如果您正苦於以下問題:Python nd.relu方法的具體用法?Python nd.relu怎麽用?Python nd.relu使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在mxnet.nd的用法示例。


在下文中一共展示了nd.relu方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: forward

# 需要導入模塊: from mxnet import nd [as 別名]
# 或者: from mxnet.nd import relu [as 別名]
def forward(self, g, h):
        hidden_rep = [h]

        for i in range(self.num_layers - 1):
            h = self.ginlayers[i](g, h)
            h = self.batch_norms[i](h)
            h = nd.relu(h)
            hidden_rep.append(h)

        score_over_layer = 0
        # perform pooling over all nodes in each graph in every layer
        for i, h in enumerate(hidden_rep):
            pooled_h = self.pool(g, h)
            score_over_layer = score_over_layer + self.drop(self.linears_prediction[i](pooled_h))

        return score_over_layer 
開發者ID:dmlc,項目名稱:dgl,代碼行數:18,代碼來源:gin.py

示例2: forward

# 需要導入模塊: from mxnet import nd [as 別名]
# 或者: from mxnet.nd import relu [as 別名]
def forward(self, x):
        '''
        Parameters
        ----------
        x: nd.array, shape is (batch_size, c_in, time_step, num_of_vertices)

        Returns
        ----------
        shape is (batch_size, c_out, time_step - Kt + 1, num_of_vertices)

        '''

        x_input = self.align(x)[:, :, self.Kt - 1:, :]

        x_conv = self.conv(x)
        if self.activation == 'GLU':
            x_conv = self.conv(x)
            x_conv1, x_conv2 = nd.split(x_conv, axis=1, num_outputs=2)
            return (x_conv1 + x_input) * nd.sigmoid(x_conv2)
        if self.activation == 'relu':
            return nd.relu(x_conv + x_input)
        return x_conv 
開發者ID:Davidham3,項目名稱:STGCN,代碼行數:24,代碼來源:base_layers.py

示例3: forward

# 需要導入模塊: from mxnet import nd [as 別名]
# 或者: from mxnet.nd import relu [as 別名]
def forward(self, img):
        inter = self.pre(img)
        #print("\t inter shape: ", inter.shape)
        outs = []

        for ind in range(self.num_stacks):
            kp_, conv_ = self.kpts[ind], self.convs[ind]
            kp = kp_(inter)
            conv = conv_(kp)
            #print("\t conv shape: ", conv.shape)

            out = {}
            for head in self.heads:
                layer = self.__getattribute__(head)[ind]
                y = layer(conv)
                out[head] = y
            outs.append(out)

            if ind < self.num_stacks - 1:
                inter = self.inters_[ind](inter) + self.convs_[ind](conv)
                inter = nd.relu(inter)
                inter = self.inters[ind](inter)
                #print("\t inter shape: ", inter.shape)
        return outs 
開發者ID:Guanghan,項目名稱:mxnet-centernet,代碼行數:26,代碼來源:hourglass.py

示例4: resnet18

# 需要導入模塊: from mxnet import nd [as 別名]
# 或者: from mxnet.nd import relu [as 別名]
def resnet18(num_classes):
    """The ResNet-18 model."""
    net = nn.Sequential()
    net.add(nn.Conv2D(64, kernel_size=3, strides=1, padding=1),
            nn.BatchNorm(), nn.Activation('relu'))

    def resnet_block(num_channels, num_residuals, first_block=False):
        blk = nn.Sequential()
        for i in range(num_residuals):
            if i == 0 and not first_block:
                blk.add(Residual(num_channels, use_1x1conv=True, strides=2))
            else:
                blk.add(Residual(num_channels))
        return blk

    net.add(resnet_block(64, 2, first_block=True),
            resnet_block(128, 2),
            resnet_block(256, 2),
            resnet_block(512, 2))
    net.add(nn.GlobalAvgPool2D(), nn.Dense(num_classes))
    return net 
開發者ID:d2l-ai,項目名稱:d2l-zh,代碼行數:23,代碼來源:utils.py

示例5: __init__

# 需要導入模塊: from mxnet import nd [as 別名]
# 或者: from mxnet.nd import relu [as 別名]
def __init__(self, Kt, c_in, c_out, activation='relu', **kwargs):
        super(Temporal_conv_layer, self).__init__(**kwargs)
        self.Kt = Kt
        self.c_out = c_out
        self.activation = activation
        with self.name_scope():
            self.align = Align_layer(c_in, c_out, None)
            if activation == 'GLU':
                self.conv = nn.Conv2D(2 * c_out, (Kt, 1), activation=None)
            elif activation == 'relu':
                self.conv = nn.Conv2D(c_out, (Kt, 1), activation=None)
            else:
                self.conv = nn.Conv2D(c_out, (Kt, 1), activation=activation) 
開發者ID:Davidham3,項目名稱:STGCN,代碼行數:15,代碼來源:base_layers.py

示例6: msg_reduce

# 需要導入模塊: from mxnet import nd [as 別名]
# 或者: from mxnet.nd import relu [as 別名]
def msg_reduce(self, node):
        state = node.mailbox['state']
        alpha = node.mailbox['alpha']
        alpha = nd.softmax(alpha, axis=1)

        new_state = nd.relu(nd.sum(alpha * state, axis=1))
        return { 'new_state': new_state } 
開發者ID:panzheyi,項目名稱:ST-MetaNet,代碼行數:9,代碼來源:graph.py

示例7: msg_reduce

# 需要導入模塊: from mxnet import nd [as 別名]
# 或者: from mxnet.nd import relu [as 別名]
def msg_reduce(self, node):
        state = node.mailbox['state']
        alpha = node.mailbox['alpha']
        alpha = nd.softmax(alpha, axis=1)

        new_state = nd.relu(nd.sum(alpha * state, axis=1)) * nd.sigmoid(self.weight.data(state.context))
        return { 'new_state': new_state } 
開發者ID:panzheyi,項目名稱:ST-MetaNet,代碼行數:9,代碼來源:graph.py

示例8: forward

# 需要導入模塊: from mxnet import nd [as 別名]
# 或者: from mxnet.nd import relu [as 別名]
def forward(self, X):
        Y = nd.relu(self.bn1(self.conv1(X)))
        Y = self.bn2(self.conv2(Y))
        if self.conv3:
            X = self.conv3(X)
        return nd.relu(Y + X) 
開發者ID:d2l-ai,項目名稱:d2l-zh,代碼行數:8,代碼來源:utils.py


注:本文中的mxnet.nd.relu方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。