本文整理汇总了Python中mxnet.nd.relu方法的典型用法代码示例。如果您正苦于以下问题:Python nd.relu方法的具体用法?Python nd.relu怎么用?Python nd.relu使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mxnet.nd
的用法示例。
在下文中一共展示了nd.relu方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: forward
# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def forward(self, g, h):
hidden_rep = [h]
for i in range(self.num_layers - 1):
h = self.ginlayers[i](g, h)
h = self.batch_norms[i](h)
h = nd.relu(h)
hidden_rep.append(h)
score_over_layer = 0
# perform pooling over all nodes in each graph in every layer
for i, h in enumerate(hidden_rep):
pooled_h = self.pool(g, h)
score_over_layer = score_over_layer + self.drop(self.linears_prediction[i](pooled_h))
return score_over_layer
示例2: forward
# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def forward(self, x):
'''
Parameters
----------
x: nd.array, shape is (batch_size, c_in, time_step, num_of_vertices)
Returns
----------
shape is (batch_size, c_out, time_step - Kt + 1, num_of_vertices)
'''
x_input = self.align(x)[:, :, self.Kt - 1:, :]
x_conv = self.conv(x)
if self.activation == 'GLU':
x_conv = self.conv(x)
x_conv1, x_conv2 = nd.split(x_conv, axis=1, num_outputs=2)
return (x_conv1 + x_input) * nd.sigmoid(x_conv2)
if self.activation == 'relu':
return nd.relu(x_conv + x_input)
return x_conv
示例3: forward
# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def forward(self, img):
inter = self.pre(img)
#print("\t inter shape: ", inter.shape)
outs = []
for ind in range(self.num_stacks):
kp_, conv_ = self.kpts[ind], self.convs[ind]
kp = kp_(inter)
conv = conv_(kp)
#print("\t conv shape: ", conv.shape)
out = {}
for head in self.heads:
layer = self.__getattribute__(head)[ind]
y = layer(conv)
out[head] = y
outs.append(out)
if ind < self.num_stacks - 1:
inter = self.inters_[ind](inter) + self.convs_[ind](conv)
inter = nd.relu(inter)
inter = self.inters[ind](inter)
#print("\t inter shape: ", inter.shape)
return outs
示例4: resnet18
# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def resnet18(num_classes):
"""The ResNet-18 model."""
net = nn.Sequential()
net.add(nn.Conv2D(64, kernel_size=3, strides=1, padding=1),
nn.BatchNorm(), nn.Activation('relu'))
def resnet_block(num_channels, num_residuals, first_block=False):
blk = nn.Sequential()
for i in range(num_residuals):
if i == 0 and not first_block:
blk.add(Residual(num_channels, use_1x1conv=True, strides=2))
else:
blk.add(Residual(num_channels))
return blk
net.add(resnet_block(64, 2, first_block=True),
resnet_block(128, 2),
resnet_block(256, 2),
resnet_block(512, 2))
net.add(nn.GlobalAvgPool2D(), nn.Dense(num_classes))
return net
示例5: __init__
# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def __init__(self, Kt, c_in, c_out, activation='relu', **kwargs):
super(Temporal_conv_layer, self).__init__(**kwargs)
self.Kt = Kt
self.c_out = c_out
self.activation = activation
with self.name_scope():
self.align = Align_layer(c_in, c_out, None)
if activation == 'GLU':
self.conv = nn.Conv2D(2 * c_out, (Kt, 1), activation=None)
elif activation == 'relu':
self.conv = nn.Conv2D(c_out, (Kt, 1), activation=None)
else:
self.conv = nn.Conv2D(c_out, (Kt, 1), activation=activation)
示例6: msg_reduce
# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def msg_reduce(self, node):
state = node.mailbox['state']
alpha = node.mailbox['alpha']
alpha = nd.softmax(alpha, axis=1)
new_state = nd.relu(nd.sum(alpha * state, axis=1))
return { 'new_state': new_state }
示例7: msg_reduce
# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def msg_reduce(self, node):
state = node.mailbox['state']
alpha = node.mailbox['alpha']
alpha = nd.softmax(alpha, axis=1)
new_state = nd.relu(nd.sum(alpha * state, axis=1)) * nd.sigmoid(self.weight.data(state.context))
return { 'new_state': new_state }
示例8: forward
# 需要导入模块: from mxnet import nd [as 别名]
# 或者: from mxnet.nd import relu [as 别名]
def forward(self, X):
Y = nd.relu(self.bn1(self.conv1(X)))
Y = self.bn2(self.conv2(Y))
if self.conv3:
X = self.conv3(X)
return nd.relu(Y + X)