当前位置: 首页>>代码示例>>Python>>正文


Python layers.ReLU方法代码示例

本文整理汇总了Python中tensorflow.keras.layers.ReLU方法的典型用法代码示例。如果您正苦于以下问题:Python layers.ReLU方法的具体用法?Python layers.ReLU怎么用?Python layers.ReLU使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tensorflow.keras.layers的用法示例。


在下文中一共展示了layers.ReLU方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self, out_features,**kwargs):
        super(_DenseLayer, self).__init__(**kwargs)
        k_reg = None if w_decay is None else l2(w_decay)
        self.layers = []
        self.layers.append(tf.keras.Sequential(
            [
                layers.ReLU(),
                layers.Conv2D(
                    filters=out_features, kernel_size=(3,3), strides=(1,1), padding='same',
                    use_bias=True, kernel_initializer=weight_init,
                kernel_regularizer=k_reg),
                layers.BatchNormalization(),
                layers.ReLU(),
                layers.Conv2D(
                    filters=out_features, kernel_size=(3,3), strides=(1,1), padding='same',
                    use_bias=True, kernel_initializer=weight_init,
                    kernel_regularizer=k_reg),
                layers.BatchNormalization(),
            ])) # first relu can be not needed 
开发者ID:xavysp,项目名称:DexiNed,代码行数:21,代码来源:model.py

示例2: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 strides,
                 data_format="channels_last",
                 **kwargs):
        super(SKNetUnit, self).__init__(**kwargs)
        self.resize_identity = (in_channels != out_channels) or (strides != 1)

        self.body = SKNetBottleneck(
            in_channels=in_channels,
            out_channels=out_channels,
            strides=strides,
            data_format=data_format,
            name="body")
        if self.resize_identity:
            self.identity_conv = conv1x1_block(
                in_channels=in_channels,
                out_channels=out_channels,
                strides=strides,
                activation=None,
                data_format=data_format,
                name="identity_conv")
        self.activ = nn.ReLU() 
开发者ID:osmr,项目名称:imgclsmob,代码行数:26,代码来源:sknet.py

示例3: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 padding,
                 data_format="channels_last",
                 **kwargs):
        super(FireConv, self).__init__(**kwargs)
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            padding=padding,
            data_format=data_format,
            name="conv")
        self.activ = nn.ReLU() 
开发者ID:osmr,项目名称:imgclsmob,代码行数:18,代码来源:squeezenet.py

示例4: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 num_blocks,
                 data_format="channels_last",
                 **kwargs):
        super(PolyConv, self).__init__(**kwargs)
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            use_bias=False,
            data_format=data_format,
            name="conv")
        self.bns = []
        for i in range(num_blocks):
            self.bns.append(BatchNorm(
                data_format=data_format,
                name="bn{}".format(i + 1)))
        self.activ = nn.ReLU() 
开发者ID:osmr,项目名称:imgclsmob,代码行数:27,代码来源:polynet.py

示例5: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 residual,
                 data_format="channels_last",
                 **kwargs):
        super(DLARoot, self).__init__(**kwargs)
        self.residual = residual
        self.data_format = data_format

        self.conv = conv1x1_block(
            in_channels=in_channels,
            out_channels=out_channels,
            activation=None,
            data_format=data_format,
            name="conv")
        self.activ = nn.ReLU() 
开发者ID:osmr,项目名称:imgclsmob,代码行数:19,代码来源:dla.py

示例6: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 data_format="channels_last",
                 **kwargs):
        super(InceptConv, self).__init__(**kwargs)
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            use_bias=False,
            data_format=data_format,
            name="conv")
        self.bn = BatchNorm(
            momentum=0.1,
            epsilon=1e-3,
            data_format=data_format,
            name="bn")
        self.activ = nn.ReLU() 
开发者ID:osmr,项目名称:imgclsmob,代码行数:26,代码来源:inceptionv4.py

示例7: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 groups,
                 data_format="channels_last",
                 **kwargs):
        super(DPNConv, self).__init__(**kwargs)
        self.bn = dpn_batch_norm(
            channels=in_channels,
            data_format=data_format,
            name="bn")
        self.activ = nn.ReLU()
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            groups=groups,
            use_bias=False,
            data_format=data_format,
            name="conv") 
开发者ID:osmr,项目名称:imgclsmob,代码行数:27,代码来源:dpn.py

示例8: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 activate,
                 data_format="channels_last",
                 **kwargs):
        super(WRNConv, self).__init__(**kwargs)
        self.activate = activate

        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            use_bias=True,
            data_format=data_format,
            name="conv")
        if self.activate:
            self.activ = nn.ReLU() 
开发者ID:osmr,项目名称:imgclsmob,代码行数:25,代码来源:wrn.py

示例9: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 data_format="channels_last",
                 **kwargs):
        super(PreResInitBlock, self).__init__(**kwargs)
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=7,
            strides=2,
            padding=3,
            use_bias=False,
            data_format=data_format,
            name="conv")
        self.bn = BatchNorm(
            data_format=data_format,
            name="bn")
        self.activ = nn.ReLU()
        self.pool = MaxPool2d(
            pool_size=3,
            strides=2,
            padding=1,
            name="pool") 
开发者ID:osmr,项目名称:imgclsmob,代码行数:26,代码来源:preresnet.py

示例10: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 data_format="channels_last",
                 **kwargs):
        super(DiracConv, self).__init__(**kwargs)
        self.activ = nn.ReLU()
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            use_bias=True,
            data_format=data_format,
            name="conv") 
开发者ID:osmr,项目名称:imgclsmob,代码行数:21,代码来源:diracnetv2.py

示例11: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 data_format="channels_last",
                 **kwargs):
        super(ShuffleInitBlock, self).__init__(**kwargs)
        self.conv = conv3x3(
            in_channels=in_channels,
            out_channels=out_channels,
            strides=2,
            data_format=data_format,
            name="conv")
        self.bn = BatchNorm(
            # in_channels=out_channels,
            data_format=data_format,
            name="bn")
        self.activ = nn.ReLU()
        self.pool = MaxPool2d(
            pool_size=3,
            strides=2,
            padding=1,
            data_format=data_format,
            name="pool") 
开发者ID:osmr,项目名称:imgclsmob,代码行数:25,代码来源:shufflenet.py

示例12: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 activate,
                 data_format="channels_last",
                 **kwargs):
        super(DwsConvBlock, self).__init__(**kwargs)
        self.activate = activate

        if self.activate:
            self.activ = nn.ReLU()
        self.conv = DwsConv(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            data_format=data_format,
            name="conv")
        self.bn = BatchNorm(
            data_format=data_format,
            name="bn") 
开发者ID:osmr,项目名称:imgclsmob,代码行数:27,代码来源:xception.py

示例13: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 data_format="channels_last",
                 **kwargs):
        super(PyrInitBlock, self).__init__(**kwargs)
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=7,
            strides=2,
            padding=3,
            use_bias=False,
            data_format=data_format,
            name="conv")
        self.bn = BatchNorm(
            data_format=data_format,
            name="bn")
        self.activ = nn.ReLU()
        self.pool = MaxPool2d(
            pool_size=3,
            strides=2,
            padding=1,
            data_format=data_format,
            name="pool") 
开发者ID:osmr,项目名称:imgclsmob,代码行数:27,代码来源:pyramidnet.py

示例14: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 groups,
                 data_format="channels_last",
                 **kwargs):
        super(NasConv, self).__init__(**kwargs)
        self.activ = nn.ReLU()
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            groups=groups,
            use_bias=False,
            data_format=data_format,
            name="conv")
        self.bn = nasnet_batch_norm(
            channels=out_channels,
            data_format=data_format,
            name="bn") 
开发者ID:osmr,项目名称:imgclsmob,代码行数:27,代码来源:nasnet.py

示例15: __init__

# 需要导入模块: from tensorflow.keras import layers [as 别名]
# 或者: from tensorflow.keras.layers import ReLU [as 别名]
def __init__(self,
                 filters,  # NOTE: will be filters // 2
                 norm_type="instance",
                 pad_type="constant",
                 **kwargs):
        super(BasicShuffleUnitV2, self).__init__(name="BasicShuffleUnitV2")
        filters //= 2
        self.model = tf.keras.models.Sequential([
            Conv2D(filters, 1, use_bias=False),
            get_norm(norm_type),
            ReLU(),
            DepthwiseConv2D(3, padding='same', use_bias=False),
            get_norm(norm_type),
            Conv2D(filters, 1, use_bias=False),
            get_norm(norm_type),
            ReLU(),
        ]) 
开发者ID:mnicnc404,项目名称:CartoonGan-tensorflow,代码行数:19,代码来源:layers.py


注:本文中的tensorflow.keras.layers.ReLU方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。