本文整理汇总了Python中tensorflow.keras.layers.ReLU方法的典型用法代码示例。如果您正苦于以下问题:Python layers.ReLU方法的具体用法?Python layers.ReLU怎么用?Python layers.ReLU使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.keras.layers
的用法示例。
在下文中一共展示了layers.ReLU方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self, out_features,**kwargs):
super(_DenseLayer, self).__init__(**kwargs)
k_reg = None if w_decay is None else l2(w_decay)
self.layers = []
self.layers.append(tf.keras.Sequential(
[
layers.ReLU(),
layers.Conv2D(
filters=out_features, kernel_size=(3,3), strides=(1,1), padding='same',
use_bias=True, kernel_initializer=weight_init,
kernel_regularizer=k_reg),
layers.BatchNormalization(),
layers.ReLU(),
layers.Conv2D(
filters=out_features, kernel_size=(3,3), strides=(1,1), padding='same',
use_bias=True, kernel_initializer=weight_init,
kernel_regularizer=k_reg),
layers.BatchNormalization(),
])) # first relu can be not needed
示例2: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
strides,
data_format="channels_last",
**kwargs):
super(SKNetUnit, self).__init__(**kwargs)
self.resize_identity = (in_channels != out_channels) or (strides != 1)
self.body = SKNetBottleneck(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
data_format=data_format,
name="body")
if self.resize_identity:
self.identity_conv = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
activation=None,
data_format=data_format,
name="identity_conv")
self.activ = nn.ReLU()
示例3: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
kernel_size,
padding,
data_format="channels_last",
**kwargs):
super(FireConv, self).__init__(**kwargs)
self.conv = Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
padding=padding,
data_format=data_format,
name="conv")
self.activ = nn.ReLU()
示例4: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
kernel_size,
strides,
padding,
num_blocks,
data_format="channels_last",
**kwargs):
super(PolyConv, self).__init__(**kwargs)
self.conv = Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
strides=strides,
padding=padding,
use_bias=False,
data_format=data_format,
name="conv")
self.bns = []
for i in range(num_blocks):
self.bns.append(BatchNorm(
data_format=data_format,
name="bn{}".format(i + 1)))
self.activ = nn.ReLU()
示例5: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
residual,
data_format="channels_last",
**kwargs):
super(DLARoot, self).__init__(**kwargs)
self.residual = residual
self.data_format = data_format
self.conv = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels,
activation=None,
data_format=data_format,
name="conv")
self.activ = nn.ReLU()
示例6: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
kernel_size,
strides,
padding,
data_format="channels_last",
**kwargs):
super(InceptConv, self).__init__(**kwargs)
self.conv = Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
strides=strides,
padding=padding,
use_bias=False,
data_format=data_format,
name="conv")
self.bn = BatchNorm(
momentum=0.1,
epsilon=1e-3,
data_format=data_format,
name="bn")
self.activ = nn.ReLU()
示例7: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
kernel_size,
strides,
padding,
groups,
data_format="channels_last",
**kwargs):
super(DPNConv, self).__init__(**kwargs)
self.bn = dpn_batch_norm(
channels=in_channels,
data_format=data_format,
name="bn")
self.activ = nn.ReLU()
self.conv = Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
strides=strides,
padding=padding,
groups=groups,
use_bias=False,
data_format=data_format,
name="conv")
示例8: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
kernel_size,
strides,
padding,
activate,
data_format="channels_last",
**kwargs):
super(WRNConv, self).__init__(**kwargs)
self.activate = activate
self.conv = Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
strides=strides,
padding=padding,
use_bias=True,
data_format=data_format,
name="conv")
if self.activate:
self.activ = nn.ReLU()
示例9: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
data_format="channels_last",
**kwargs):
super(PreResInitBlock, self).__init__(**kwargs)
self.conv = Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=7,
strides=2,
padding=3,
use_bias=False,
data_format=data_format,
name="conv")
self.bn = BatchNorm(
data_format=data_format,
name="bn")
self.activ = nn.ReLU()
self.pool = MaxPool2d(
pool_size=3,
strides=2,
padding=1,
name="pool")
示例10: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
kernel_size,
strides,
padding,
data_format="channels_last",
**kwargs):
super(DiracConv, self).__init__(**kwargs)
self.activ = nn.ReLU()
self.conv = Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
strides=strides,
padding=padding,
use_bias=True,
data_format=data_format,
name="conv")
示例11: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
data_format="channels_last",
**kwargs):
super(ShuffleInitBlock, self).__init__(**kwargs)
self.conv = conv3x3(
in_channels=in_channels,
out_channels=out_channels,
strides=2,
data_format=data_format,
name="conv")
self.bn = BatchNorm(
# in_channels=out_channels,
data_format=data_format,
name="bn")
self.activ = nn.ReLU()
self.pool = MaxPool2d(
pool_size=3,
strides=2,
padding=1,
data_format=data_format,
name="pool")
示例12: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
kernel_size,
strides,
padding,
activate,
data_format="channels_last",
**kwargs):
super(DwsConvBlock, self).__init__(**kwargs)
self.activate = activate
if self.activate:
self.activ = nn.ReLU()
self.conv = DwsConv(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
name="conv")
self.bn = BatchNorm(
data_format=data_format,
name="bn")
示例13: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
data_format="channels_last",
**kwargs):
super(PyrInitBlock, self).__init__(**kwargs)
self.conv = Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=7,
strides=2,
padding=3,
use_bias=False,
data_format=data_format,
name="conv")
self.bn = BatchNorm(
data_format=data_format,
name="bn")
self.activ = nn.ReLU()
self.pool = MaxPool2d(
pool_size=3,
strides=2,
padding=1,
data_format=data_format,
name="pool")
示例14: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
in_channels,
out_channels,
kernel_size,
strides,
padding,
groups,
data_format="channels_last",
**kwargs):
super(NasConv, self).__init__(**kwargs)
self.activ = nn.ReLU()
self.conv = Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
strides=strides,
padding=padding,
groups=groups,
use_bias=False,
data_format=data_format,
name="conv")
self.bn = nasnet_batch_norm(
channels=out_channels,
data_format=data_format,
name="bn")
示例15: __init__
# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
filters, # NOTE: will be filters // 2
norm_type="instance",
pad_type="constant",
**kwargs):
super(BasicShuffleUnitV2, self).__init__(name="BasicShuffleUnitV2")
filters //= 2
self.model = tf.keras.models.Sequential([
Conv2D(filters, 1, use_bias=False),
get_norm(norm_type),
ReLU(),
DepthwiseConv2D(3, padding='same', use_bias=False),
get_norm(norm_type),
Conv2D(filters, 1, use_bias=False),
get_norm(norm_type),
ReLU(),
])