本文整理汇总了Python中mxnet.ndarray.relu方法的典型用法代码示例。如果您正苦于以下问题:Python ndarray.relu方法的具体用法?Python ndarray.relu怎么用?Python ndarray.relu使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mxnet.ndarray
的用法示例。
在下文中一共展示了ndarray.relu方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: demo
# 需要导入模块: from mxnet import ndarray [as 别名]
# 或者: from mxnet.ndarray import relu [as 别名]
def demo(self, x_low, x_high):
self._up_kwargs['height'] = x_high.shape[2]
self._up_kwargs['width'] = x_high.shape[3]
import mxnet.ndarray as F
x_low = F.contrib.BilinearResize2D(x_low,
height=self._up_kwargs['height'],
width=self._up_kwargs['width'])
x_low = self.conv_low(x_low)
x_high = self.conv_hign(x_high)
x = x_low + x_high
x = F.relu(x)
x_low_cls = self.conv_low_cls(x_low)
return x, x_low_cls
示例2: predict
# 需要导入模块: from mxnet import ndarray [as 别名]
# 或者: from mxnet.ndarray import relu [as 别名]
def predict(self, x):
import mxnet.ndarray as F
x_sub1_out = self.conv_sub1(x)
x_sub2 = F.contrib.BilinearResize2D(x, height=x.shape[2] // 2, width=x.shape[3] // 2)
x = self.conv1(x_sub2)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x_sub2_out = self.layer2(x)
x_sub4 = F.contrib.BilinearResize2D(x_sub2_out,
height=x_sub2_out.shape[2] // 2,
width=x_sub2_out.shape[3] // 2)
x = self.layer3(x_sub4)
x = self.layer4(x)
x_sub4_out = self.psp_head(x)
x_sub4_out = self.conv_sub4(x_sub4_out)
x_sub2_out = self.conv_sub2(x_sub2_out)
res = self.head(x_sub1_out, x_sub2_out, x_sub4_out)
return res[0]
示例3: build_input_layer
# 需要导入模块: from mxnet import ndarray [as 别名]
# 或者: from mxnet.ndarray import relu [as 别名]
def build_input_layer(self):
return RelGraphConv(self.num_nodes, self.h_dim, self.num_rels, "basis",
self.num_bases, activation=F.relu, self_loop=self.use_self_loop,
dropout=self.dropout)
示例4: build_hidden_layer
# 需要导入模块: from mxnet import ndarray [as 别名]
# 或者: from mxnet.ndarray import relu [as 别名]
def build_hidden_layer(self, idx):
return RelGraphConv(self.h_dim, self.h_dim, self.num_rels, "basis",
self.num_bases, activation=F.relu, self_loop=self.use_self_loop,
dropout=self.dropout)
示例5: hybrid_forward
# 需要导入模块: from mxnet import ndarray [as 别名]
# 或者: from mxnet.ndarray import relu [as 别名]
def hybrid_forward(self, F, x):
# large resolution branch
x_sub1_out = self.conv_sub1(x)
# medium resolution branch
x_sub2 = F.contrib.BilinearResize2D(x,
height=self._up_kwargs['height'] // 2,
width=self._up_kwargs['width'] // 2)
x = self.conv1(x_sub2)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x_sub2_out = self.layer2(x)
# small resolution branch
x_sub4 = F.contrib.BilinearResize2D(x_sub2_out,
height=self._up_kwargs['height'] // 32,
width=self._up_kwargs['width'] // 32)
x = self.layer3(x_sub4)
x = self.layer4(x)
x_sub4_out = self.psp_head(x)
# reduce conv
x_sub4_out = self.conv_sub4(x_sub4_out)
x_sub2_out = self.conv_sub2(x_sub2_out)
# ICNet head
res = self.head(x_sub1_out, x_sub2_out, x_sub4_out)
return res
示例6: predict
# 需要导入模块: from mxnet import ndarray [as 别名]
# 或者: from mxnet.ndarray import relu [as 别名]
def predict(self, x):
h, w = x.shape[2:]
self._up_kwargs['height'] = h
self._up_kwargs['width'] = w
import mxnet.ndarray as F
x_sub1_out = self.conv_sub1(x)
x_sub2 = F.contrib.BilinearResize2D(x,
height=self._up_kwargs['height'] // 2,
width=self._up_kwargs['width'] // 2)
x = self.conv1(x_sub2)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x_sub2_out = self.layer2(x)
x_sub4 = F.contrib.BilinearResize2D(x_sub2_out,
height=self._up_kwargs['height'] // 32,
width=self._up_kwargs['width'] // 32)
x = self.layer3(x_sub4)
x = self.layer4(x)
x_sub4_out = self.psp_head.demo(x)
x_sub4_out = self.conv_sub4(x_sub4_out)
x_sub2_out = self.conv_sub2(x_sub2_out)
res = self.head.demo(x_sub1_out, x_sub2_out, x_sub4_out)
return res[0]
示例7: __init__
# 需要导入模块: from mxnet import ndarray [as 别名]
# 或者: from mxnet.ndarray import relu [as 别名]
def __init__(self, in_planes, out_planes, ksize, stride=1, pad=0, dilation=1,
groups=1, has_bn=True, norm_layer=nn.BatchNorm, bn_eps=1e-5,
has_relu=True, has_bias=False, **kwargs):
super(ConvBnRelu, self).__init__()
with self.name_scope():
self.conv = nn.Conv2D(in_channels=in_planes, channels=out_planes,
kernel_size=ksize, padding=pad, strides=stride,
dilation=dilation, groups=groups, use_bias=has_bias)
self.has_bn = has_bn
self.has_relu = has_relu
if self.has_bn:
self.bn = norm_layer(in_channels=out_planes, epsilon=bn_eps)
if self.has_relu:
self.relu = nn.Activation('relu')
示例8: hybrid_forward
# 需要导入模块: from mxnet import ndarray [as 别名]
# 或者: from mxnet.ndarray import relu [as 别名]
def hybrid_forward(self, F, fts, ys, ftt, yt):
"""
Semantic Alignment Loss
:param F: Function
:param yt: label for the target domain [N]
:param ftt: features for the target domain [N, K]
:param ys: label for the source domain [M]
:param fts: features for the source domain [M, K]
:return:
"""
if self._fn:
# Normalize ft
fts = F.L2Normalization(fts, mode='instance')
ftt = F.L2Normalization(ftt, mode='instance')
fts_rpt = F.broadcast_to(fts.expand_dims(axis=0), shape=(self._bs_tgt, self._bs_src, self._embed_size))
ftt_rpt = F.broadcast_to(ftt.expand_dims(axis=1), shape=(self._bs_tgt, self._bs_src, self._embed_size))
dists = F.sum(F.square(ftt_rpt - fts_rpt), axis=2)
yt_rpt = F.broadcast_to(yt.expand_dims(axis=1), shape=(self._bs_tgt, self._bs_src)).astype('int32')
ys_rpt = F.broadcast_to(ys.expand_dims(axis=0), shape=(self._bs_tgt, self._bs_src)).astype('int32')
y_same = F.equal(yt_rpt, ys_rpt).astype('float32')
y_diff = F.not_equal(yt_rpt, ys_rpt).astype('float32')
intra_cls_dists = dists * y_same
inter_cls_dists = dists * y_diff
max_dists = F.max(dists, axis=1, keepdims=True)
max_dists = F.broadcast_to(max_dists, shape=(self._bs_tgt, self._bs_src))
revised_inter_cls_dists = F.where(y_same, max_dists, inter_cls_dists)
max_intra_cls_dist = F.max(intra_cls_dists, axis=1)
min_inter_cls_dist = F.min(revised_inter_cls_dists, axis=1)
loss = F.relu(max_intra_cls_dist - min_inter_cls_dist + self._margin)
return loss
示例9: hybrid_forward
# 需要导入模块: from mxnet import ndarray [as 别名]
# 或者: from mxnet.ndarray import relu [as 别名]
def hybrid_forward(self, F, x):
# large resolution branch --> (1, 3, 480, 480)
x_sub1_out = self.conv_sub1(x) # --> (1, 64, 60, 60)
# medium resolution branch --> (1, 3, 240, 240)
x_sub2 = F.contrib.BilinearResize2D(x,
height=self._up_kwargs['height'] // 2,
width=self._up_kwargs['width'] // 2)
x = self.conv1(x_sub2) # --> (1, 128, 120, 120)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x) # --> (1, 128, 60, 60)
x = self.layer1(x) # --> (1, 256, 60, 60)
x_sub2_out = self.layer2(x) # --> (1, 512, 30, 30)
# small resolution branch --> (1, 512, 15, 15)
x_sub4 = F.contrib.BilinearResize2D(x_sub2_out,
height=self._up_kwargs['height'] // 32,
width=self._up_kwargs['width'] // 32)
x = self.layer3(x_sub4) # --> (1, 1024, 15, 15)
x = self.layer4(x) # --> (1, 2048, 15, 15)
x_sub4_out = self.psp_head(x) # --> (1, 512, 15, 15)
# reduce conv
x_sub4_out = self.conv_sub4(x_sub4_out) # --> (1, 256, 15, 15)
x_sub2_out = self.conv_sub2(x_sub2_out) # --> (1, 256, 30, 30)
# ICNet head
res = self.head(x_sub1_out, x_sub2_out, x_sub4_out)
return res
示例10: demo
# 需要导入模块: from mxnet import ndarray [as 别名]
# 或者: from mxnet.ndarray import relu [as 别名]
def demo(self, x_low, x_high):
import mxnet.ndarray as F
x_low = F.contrib.BilinearResize2D(x_low, height=x_high.shape[2], width=x_high.shape[3])
x_low = self.conv_low(x_low)
x_high = self.conv_hign(x_high)
x = x_low + x_high
x = F.relu(x)
x_low_cls = self.conv_low_cls(x_low)
return x, x_low_cls