當前位置: 首頁>>代碼示例>>Python>>正文


Python layers.ReLU方法代碼示例

本文整理匯總了Python中tensorflow.keras.layers.ReLU方法的典型用法代碼示例。如果您正苦於以下問題:Python layers.ReLU方法的具體用法?Python layers.ReLU怎麽用?Python layers.ReLU使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在tensorflow.keras.layers的用法示例。


在下文中一共展示了layers.ReLU方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self, out_features,**kwargs):
        super(_DenseLayer, self).__init__(**kwargs)
        k_reg = None if w_decay is None else l2(w_decay)
        self.layers = []
        self.layers.append(tf.keras.Sequential(
            [
                layers.ReLU(),
                layers.Conv2D(
                    filters=out_features, kernel_size=(3,3), strides=(1,1), padding='same',
                    use_bias=True, kernel_initializer=weight_init,
                kernel_regularizer=k_reg),
                layers.BatchNormalization(),
                layers.ReLU(),
                layers.Conv2D(
                    filters=out_features, kernel_size=(3,3), strides=(1,1), padding='same',
                    use_bias=True, kernel_initializer=weight_init,
                    kernel_regularizer=k_reg),
                layers.BatchNormalization(),
            ])) # first relu can be not needed 
開發者ID:xavysp,項目名稱:DexiNed,代碼行數:21,代碼來源:model.py

示例2: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 strides,
                 data_format="channels_last",
                 **kwargs):
        super(SKNetUnit, self).__init__(**kwargs)
        self.resize_identity = (in_channels != out_channels) or (strides != 1)

        self.body = SKNetBottleneck(
            in_channels=in_channels,
            out_channels=out_channels,
            strides=strides,
            data_format=data_format,
            name="body")
        if self.resize_identity:
            self.identity_conv = conv1x1_block(
                in_channels=in_channels,
                out_channels=out_channels,
                strides=strides,
                activation=None,
                data_format=data_format,
                name="identity_conv")
        self.activ = nn.ReLU() 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:26,代碼來源:sknet.py

示例3: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 padding,
                 data_format="channels_last",
                 **kwargs):
        super(FireConv, self).__init__(**kwargs)
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            padding=padding,
            data_format=data_format,
            name="conv")
        self.activ = nn.ReLU() 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:18,代碼來源:squeezenet.py

示例4: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 num_blocks,
                 data_format="channels_last",
                 **kwargs):
        super(PolyConv, self).__init__(**kwargs)
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            use_bias=False,
            data_format=data_format,
            name="conv")
        self.bns = []
        for i in range(num_blocks):
            self.bns.append(BatchNorm(
                data_format=data_format,
                name="bn{}".format(i + 1)))
        self.activ = nn.ReLU() 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:27,代碼來源:polynet.py

示例5: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 residual,
                 data_format="channels_last",
                 **kwargs):
        super(DLARoot, self).__init__(**kwargs)
        self.residual = residual
        self.data_format = data_format

        self.conv = conv1x1_block(
            in_channels=in_channels,
            out_channels=out_channels,
            activation=None,
            data_format=data_format,
            name="conv")
        self.activ = nn.ReLU() 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:19,代碼來源:dla.py

示例6: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 data_format="channels_last",
                 **kwargs):
        super(InceptConv, self).__init__(**kwargs)
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            use_bias=False,
            data_format=data_format,
            name="conv")
        self.bn = BatchNorm(
            momentum=0.1,
            epsilon=1e-3,
            data_format=data_format,
            name="bn")
        self.activ = nn.ReLU() 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:26,代碼來源:inceptionv4.py

示例7: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 groups,
                 data_format="channels_last",
                 **kwargs):
        super(DPNConv, self).__init__(**kwargs)
        self.bn = dpn_batch_norm(
            channels=in_channels,
            data_format=data_format,
            name="bn")
        self.activ = nn.ReLU()
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            groups=groups,
            use_bias=False,
            data_format=data_format,
            name="conv") 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:27,代碼來源:dpn.py

示例8: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 activate,
                 data_format="channels_last",
                 **kwargs):
        super(WRNConv, self).__init__(**kwargs)
        self.activate = activate

        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            use_bias=True,
            data_format=data_format,
            name="conv")
        if self.activate:
            self.activ = nn.ReLU() 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:25,代碼來源:wrn.py

示例9: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 data_format="channels_last",
                 **kwargs):
        super(PreResInitBlock, self).__init__(**kwargs)
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=7,
            strides=2,
            padding=3,
            use_bias=False,
            data_format=data_format,
            name="conv")
        self.bn = BatchNorm(
            data_format=data_format,
            name="bn")
        self.activ = nn.ReLU()
        self.pool = MaxPool2d(
            pool_size=3,
            strides=2,
            padding=1,
            name="pool") 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:26,代碼來源:preresnet.py

示例10: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 data_format="channels_last",
                 **kwargs):
        super(DiracConv, self).__init__(**kwargs)
        self.activ = nn.ReLU()
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            use_bias=True,
            data_format=data_format,
            name="conv") 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:21,代碼來源:diracnetv2.py

示例11: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 data_format="channels_last",
                 **kwargs):
        super(ShuffleInitBlock, self).__init__(**kwargs)
        self.conv = conv3x3(
            in_channels=in_channels,
            out_channels=out_channels,
            strides=2,
            data_format=data_format,
            name="conv")
        self.bn = BatchNorm(
            # in_channels=out_channels,
            data_format=data_format,
            name="bn")
        self.activ = nn.ReLU()
        self.pool = MaxPool2d(
            pool_size=3,
            strides=2,
            padding=1,
            data_format=data_format,
            name="pool") 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:25,代碼來源:shufflenet.py

示例12: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 activate,
                 data_format="channels_last",
                 **kwargs):
        super(DwsConvBlock, self).__init__(**kwargs)
        self.activate = activate

        if self.activate:
            self.activ = nn.ReLU()
        self.conv = DwsConv(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            data_format=data_format,
            name="conv")
        self.bn = BatchNorm(
            data_format=data_format,
            name="bn") 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:27,代碼來源:xception.py

示例13: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 data_format="channels_last",
                 **kwargs):
        super(PyrInitBlock, self).__init__(**kwargs)
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=7,
            strides=2,
            padding=3,
            use_bias=False,
            data_format=data_format,
            name="conv")
        self.bn = BatchNorm(
            data_format=data_format,
            name="bn")
        self.activ = nn.ReLU()
        self.pool = MaxPool2d(
            pool_size=3,
            strides=2,
            padding=1,
            data_format=data_format,
            name="pool") 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:27,代碼來源:pyramidnet.py

示例14: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 strides,
                 padding,
                 groups,
                 data_format="channels_last",
                 **kwargs):
        super(NasConv, self).__init__(**kwargs)
        self.activ = nn.ReLU()
        self.conv = Conv2d(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            groups=groups,
            use_bias=False,
            data_format=data_format,
            name="conv")
        self.bn = nasnet_batch_norm(
            channels=out_channels,
            data_format=data_format,
            name="bn") 
開發者ID:osmr,項目名稱:imgclsmob,代碼行數:27,代碼來源:nasnet.py

示例15: __init__

# 需要導入模塊: from tensorflow.keras import layers [as 別名]
# 或者: from tensorflow.keras.layers import ReLU [as 別名]
def __init__(self,
                 filters,  # NOTE: will be filters // 2
                 norm_type="instance",
                 pad_type="constant",
                 **kwargs):
        super(BasicShuffleUnitV2, self).__init__(name="BasicShuffleUnitV2")
        filters //= 2
        self.model = tf.keras.models.Sequential([
            Conv2D(filters, 1, use_bias=False),
            get_norm(norm_type),
            ReLU(),
            DepthwiseConv2D(3, padding='same', use_bias=False),
            get_norm(norm_type),
            Conv2D(filters, 1, use_bias=False),
            get_norm(norm_type),
            ReLU(),
        ]) 
開發者ID:mnicnc404,項目名稱:CartoonGan-tensorflow,代碼行數:19,代碼來源:layers.py


注:本文中的tensorflow.keras.layers.ReLU方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。