当前位置: 首页>>代码示例>>Python>>正文


Python functions.leaky_relu方法代码示例

本文整理汇总了Python中chainer.functions.leaky_relu方法的典型用法代码示例。如果您正苦于以下问题:Python functions.leaky_relu方法的具体用法?Python functions.leaky_relu怎么用?Python functions.leaky_relu使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在chainer.functions的用法示例。


在下文中一共展示了functions.leaky_relu方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __init__(self, in_channels, out_channels, ksize=3, pad=1, activation=F.leaky_relu, mode='none', bn=False, dr=None):
        super(ResBlock, self).__init__()
        initializer = chainer.initializers.GlorotUniform()
        initializer_sc = chainer.initializers.GlorotUniform()
        self.activation = activation
        self.mode = _downsample if mode == 'down' else _upsample if mode == 'up' else None
        self.learnable_sc = in_channels != out_channels
        self.dr = dr
        self.bn = bn
        with self.init_scope():
            self.c1 = L.Convolution2D(in_channels,  out_channels, ksize=ksize, pad=pad, initialW=initializer, nobias=bn)
            self.c2 = L.Convolution2D(out_channels, out_channels, ksize=ksize, pad=pad, initialW=initializer, nobias=bn)
            if bn:
                self.b1 = L.BatchNormalization(out_channels)
                self.b2 = L.BatchNormalization(out_channels)
            if self.learnable_sc:
                self.c_sc = L.Convolution2D(in_channels, out_channels, ksize=1, pad=0, initialW=initializer_sc) 
开发者ID:pstuvwx,项目名称:Deep_VoiceChanger,代码行数:19,代码来源:block.py

示例2: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __call__(self, x):
        if self.dr:
            with chainer.using_config('train', True):
                x = F.dropout(x, self.dr)
        if self.gap:
            x = F.sum(x, axis=(2,3))
        N = x.shape[0]
        #Below code copyed from https://github.com/pfnet-research/chainer-gan-lib/blob/master/minibatch_discrimination/net.py
        feature = F.reshape(F.leaky_relu(x), (N, -1))
        m = F.reshape(self.md(feature), (N, self.B * self.C, 1))
        m0 = F.broadcast_to(m, (N, self.B * self.C, N))
        m1 = F.transpose(m0, (2, 1, 0))
        d = F.absolute(F.reshape(m0 - m1, (N, self.B, self.C, N)))
        d = F.sum(F.exp(-F.sum(d, axis=2)), axis=2) - 1
        h = F.concat([feature, d])

        h = self.l(h)
        return h 
开发者ID:pstuvwx,项目名称:Deep_VoiceChanger,代码行数:20,代码来源:block.py

示例3: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __init__(self, in_channels, out_channels, mode='none', activation=F.leaky_relu, bn=True, dr=None):
        super(ConvBlock, self).__init__()
        initializer = chainer.initializers.GlorotUniform()
        self.activation = activation
        self.bn = bn
        self.dr = dr
        with self.init_scope():
            if mode == 'none':
                self.c = L.Convolution1D(in_channels, out_channels, ksize=3, stride=1, pad=1, initialW=initializer, nobias=bn)
            elif mode == 'down':
                self.c = L.Convolution1D(in_channels, out_channels, ksize=4, stride=2, pad=1, initialW=initializer, nobias=bn)
            elif mode == 'up':
                self.c = L.Deconvolution1D(in_channels, out_channels, ksize=4, stride=2, pad=1, initialW=initializer, nobias=bn)
            else:
                raise Exception('mode is missing')
            if bn:
                self.b = L.BatchNormalization(out_channels) 
开发者ID:pstuvwx,项目名称:Deep_VoiceChanger,代码行数:19,代码来源:block_1d.py

示例4: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __init__(self,ch0=3,input_size=256,layer_size=7): #input_size=512(2^9) in original paper but 256(2^8) in this implementation
        if 2**(layer_size+1) != input_size:
            raise AssertionError
        enc_layers = {}
        dec_layers = {}
        #encoder layers
        enc_layers['PConv_00'] = PConv(ch0, 64, bn=False, sample='down-7') #(1/2)^1
        enc_layers['PConv_01'] = PConv(64, 128, sample='down-5') #(1/2)^2
        enc_layers['PConv_02'] = PConv(128, 256, sample='down-5') #(1/2)^3
        enc_layers['PConv_03'] = PConv(256, 512, sample='down-3') #(1/2)^3
        for i in range(4,layer_size):     
            enc_layers['PConv_0'+str(i)] = PConv(512, 512, sample='down-3') #(1/2)^5
        
        #decoder layers
        for i in range(4,layer_size):
            dec_layers['PConv_1'+str(i)] = PConv(512*2, 512, activation=F.leaky_relu) 
        dec_layers['PConv_13'] = PConv(512+256, 256, activation=F.leaky_relu) 
        dec_layers['PConv_12'] = PConv(256+128, 128, activation=F.leaky_relu) 
        dec_layers['PConv_11'] = PConv(128+64, 64, activation=F.leaky_relu) 
        dec_layers['PConv_10'] = PConv(64+ch0, ch0, bn=False, activation=None)
        self.layer_size = layer_size
        self.enc_layers = enc_layers
        self.dec_layers = dec_layers
        super(PartialConvCompletion, self).__init__(**enc_layers,**dec_layers) 
开发者ID:SeitaroShinagawa,项目名称:chainer-partial_convolution_image_inpainting,代码行数:26,代码来源:net.py

示例5: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __init__(self,ch0=3,input_size=256,layer_size=7): #input_size=512(2^9) in original paper but 256(2^8) in this implementation
        if 2**(layer_size+1) != input_size:
            raise AssertionError
        enc_layers = {}
        dec_layers = {}
        #encoder layers
        enc_layers['PConv_00'] = PConv(ch0, 64, bn=False, sample='down-8') #(1/2)^1
        enc_layers['PConv_01'] = PConv(64, 128, sample='down-4') #(1/2)^2
        enc_layers['PConv_02'] = PConv(128, 256, sample='down-4') #(1/2)^3
        enc_layers['PConv_03'] = PConv(256, 512, sample='down-4') #(1/2)^3
        for i in range(4,layer_size):     
            enc_layers['PConv_0'+str(i)] = PConv(512, 512, sample='down-4') #(1/2)^5
        
        #decoder layers
        for i in range(4,layer_size):
            dec_layers['PConv_1'+str(i)] = PConv(512*2, 512, activation=F.leaky_relu) 
        dec_layers['PConv_13'] = PConv(512+256, 256, activation=F.leaky_relu) 
        dec_layers['PConv_12'] = PConv(256+128, 128, activation=F.leaky_relu) 
        dec_layers['PConv_11'] = PConv(128+64, 64, activation=F.leaky_relu) 
        dec_layers['PConv_10'] = PConv(64+ch0, ch0, bn=False, activation=None)
        self.layer_size = layer_size
        self.enc_layers = enc_layers
        self.dec_layers = dec_layers
        super(PartialConvCompletion, self).__init__(**enc_layers,**dec_layers) 
开发者ID:SeitaroShinagawa,项目名称:chainer-partial_convolution_image_inpainting,代码行数:26,代码来源:net_pre-trained.py

示例6: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 alpha):
        super(DarkUnit, self).__init__()
        assert (out_channels % 2 == 0)
        mid_channels = out_channels // 2

        with self.init_scope():
            self.conv1 = conv1x1_block(
                in_channels=in_channels,
                out_channels=mid_channels,
                activation=partial(
                    F.leaky_relu,
                    slope=alpha))
            self.conv2 = conv3x3_block(
                in_channels=mid_channels,
                out_channels=out_channels,
                activation=partial(
                    F.leaky_relu,
                    slope=alpha)) 
开发者ID:osmr,项目名称:imgclsmob,代码行数:23,代码来源:darknet53.py

示例7: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __call__(self, x, alpha=1.0):
        if self.depth > 0 and alpha < 1:
            h1 = self['b%d'%(7-self.depth)](x, True)
            x2 = F.average_pooling_2d(x, 2, 2)
            h2 = F.leaky_relu(self['b%d'%(7-self.depth+1)].fromRGB(x2))
            h = h2 * (1 - alpha) + h1 * alpha
        else:
            h = self['b%d'%(7-self.depth)](x, True)
                
        for i in range(self.depth):
            h = self['b%d'%(7-self.depth+1+i)](h)

        h = self.l(h)
        h = F.flatten(h)

        return h 
开发者ID:joisino,项目名称:chainer-PGGAN,代码行数:18,代码来源:network.py

示例8: process_trajectory

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def process_trajectory(self, l):
        """This is the time-dependent convolution operation, applied to a trajectory (in order).
        """
        shp = l.shape[0]
        # First dim is batchsize=1, then either 1 channel for 2d conv or n_feat channels
        # for 1d conv.
        l = F.expand_dims(l, axis=0)
        l = F.transpose(l, (0, 2, 1))
        l = self.traj_c0(l)
        l = F.leaky_relu(l)
        l = self.traj_c1(l)
        l = F.leaky_relu(l)
        l = F.sum(l, axis=(0, 2)) / l.shape[0] / l.shape[2]
        l = F.expand_dims(l, axis=0)
        l = self.traj_d0(l)
        l = F.tile(l, (shp, 1))
        return l 
开发者ID:openai,项目名称:EPG,代码行数:19,代码来源:losses.py

示例9: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __init__(self, in_ch):
        w = chainer.initializers.Normal(0.02)
        super(Encoder, self).__init__()
        with self.init_scope():
            self.c0 = L.Convolution2D(in_ch, 64, 3, 1, 1, initialW=w)
            self.c1 = ConvBNR(64, 128, use_bn=True, sample='down',
                              activation=F.leaky_relu, dropout=False)
            self.c2 = ConvBNR(128, 256, use_bn=True, sample='down',
                              activation=F.leaky_relu, dropout=False)
            self.c3 = ConvBNR(256, 512, use_bn=True, sample='down',
                              activation=F.leaky_relu, dropout=False)
            self.c4 = ConvBNR(512, 512, use_bn=True, sample='down',
                              activation=F.leaky_relu, dropout=False)
            self.c5 = ConvBNR(512, 512, use_bn=True, sample='down',
                              activation=F.leaky_relu, dropout=False)
            self.c6 = ConvBNR(512, 512, use_bn=True, sample='down',
                              activation=F.leaky_relu, dropout=False)
            self.c7 = ConvBNR(512, 512, use_bn=True, sample='down',
                              activation=F.leaky_relu, dropout=False) 
开发者ID:chainer,项目名称:chainer,代码行数:21,代码来源:net.py

示例10: test_str

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def test_str(self):
        self.assertEqual(str(chainer.Sequential()), 'Sequential()')

        expected = '''\
  (0): Sequential(
    (0): Linear(in_size=None, out_size=3, nobias=False),
    (1): Linear(in_size=3, out_size=2, nobias=False),
  ),
  (1): Linear(in_size=2, out_size=3, nobias=False),
  (2): lambda x: functions.leaky_relu(x, slope=0.2),
'''
        layers = [
            self.s1,
            self.l3,
            lambda x: functions.leaky_relu(x, slope=0.2),
        ]
        if six.PY3:
            # In Python2, it fails because of different id of the function.
            layer = functools.partial(functions.leaky_relu, slope=0.2)
            layers.append(layer)
            expected += '  (3): %s,\n' % layer
        expected = 'Sequential(\n%s)' % expected
        s = chainer.Sequential(*layers)
        self.assertEqual(str(s), expected) 
开发者ID:chainer,项目名称:chainer,代码行数:26,代码来源:test_sequential.py

示例11: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __call__(self, x):
        N = x.data.shape[0]
        h = F.leaky_relu(self.c0_0(x))
        h = F.leaky_relu(self.bn0_1(self.c0_1(h)))
        h = F.leaky_relu(self.bn1_0(self.c1_0(h)))
        h = F.leaky_relu(self.bn1_1(self.c1_1(h)))
        h = F.leaky_relu(self.bn2_0(self.c2_0(h)))
        h = F.leaky_relu(self.bn2_1(self.c2_1(h)))
        feature = F.reshape(F.leaky_relu(self.c3_0(h)), (N, 8192))
        m = F.reshape(self.md(feature), (N, self.B * self.C, 1))
        m0 = F.broadcast_to(m, (N, self.B * self.C, N))
        m1 = F.transpose(m0, (2, 1, 0))
        d = F.absolute(F.reshape(m0 - m1, (N, self.B, self.C, N)))
        d = F.sum(F.exp(-F.sum(d, axis=2)), axis=2) - 1
        h = F.concat([feature, d])

        return self.l4(h) 
开发者ID:pfnet-research,项目名称:chainer-gan-lib,代码行数:19,代码来源:net.py

示例12: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __init__(self, in_ch, base=64, extensive_layers=8) -> None:
        super().__init__()
        w = chainer.initializers.Normal(0.02)
        with self.init_scope():
            if extensive_layers > 0:
                self.c0 = L.Convolution2D(in_ch, base * 1, 3, 1, 1, initialW=w)
            else:
                self.c0 = L.Convolution2D(in_ch, base * 1, 1, 1, 0, initialW=w)

            _choose = lambda i: 'down' if i < extensive_layers else 'same'
            self.c1 = CBR(base * 1, base * 2, bn=True, sample=_choose(1), activation=F.leaky_relu, dropout=False)
            self.c2 = CBR(base * 2, base * 4, bn=True, sample=_choose(2), activation=F.leaky_relu, dropout=False)
            self.c3 = CBR(base * 4, base * 8, bn=True, sample=_choose(3), activation=F.leaky_relu, dropout=False)
            self.c4 = CBR(base * 8, base * 8, bn=True, sample=_choose(4), activation=F.leaky_relu, dropout=False)
            self.c5 = CBR(base * 8, base * 8, bn=True, sample=_choose(5), activation=F.leaky_relu, dropout=False)
            self.c6 = CBR(base * 8, base * 8, bn=True, sample=_choose(6), activation=F.leaky_relu, dropout=False)
            self.c7 = CBR(base * 8, base * 8, bn=True, sample=_choose(7), activation=F.leaky_relu, dropout=False) 
开发者ID:Hiroshiba,项目名称:become-yukarin,代码行数:19,代码来源:sr_model.py

示例13: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __init__(self, in_ch, base=64, extensive_layers=8) -> None:
        super().__init__()
        w = chainer.initializers.Normal(0.02)
        with self.init_scope():
            if extensive_layers > 0:
                self.c0 = Convolution1D(in_ch, base * 1, 3, 1, 1, initialW=w)
            else:
                self.c0 = Convolution1D(in_ch, base * 1, 1, 1, 0, initialW=w)

            _choose = lambda i: 'down' if i < extensive_layers else 'same'
            self.c1 = CBR(base * 1, base * 2, bn=True, sample=_choose(1), activation=F.leaky_relu, dropout=False)
            self.c2 = CBR(base * 2, base * 4, bn=True, sample=_choose(2), activation=F.leaky_relu, dropout=False)
            self.c3 = CBR(base * 4, base * 8, bn=True, sample=_choose(3), activation=F.leaky_relu, dropout=False)
            self.c4 = CBR(base * 8, base * 8, bn=True, sample=_choose(4), activation=F.leaky_relu, dropout=False)
            self.c5 = CBR(base * 8, base * 8, bn=True, sample=_choose(5), activation=F.leaky_relu, dropout=False)
            self.c6 = CBR(base * 8, base * 8, bn=True, sample=_choose(6), activation=F.leaky_relu, dropout=False)
            self.c7 = CBR(base * 8, base * 8, bn=True, sample=_choose(7), activation=F.leaky_relu, dropout=False) 
开发者ID:Hiroshiba,项目名称:become-yukarin,代码行数:19,代码来源:model.py

示例14: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __call__(self, x, t=None):
        self.clear()
        #x = Variable(x_data)  # x_data.astype(np.float32)

        h = F.leaky_relu(self.conv1(x), slope=0.1)
        h = F.leaky_relu(self.conv2(h), slope=0.1)
        h = F.leaky_relu(self.conv3(h), slope=0.1)
        h = F.leaky_relu(self.conv4(h), slope=0.1)
        h = F.leaky_relu(self.conv5(h), slope=0.1)
        h = F.leaky_relu(self.conv6(h), slope=0.1)
        h = F.clipped_relu(self.conv7(h), z=1.0)
        if self.train:
            self.loss = F.mean_squared_error(h, t)
            return self.loss
        else:
            return h 
开发者ID:corochann,项目名称:SeRanet,代码行数:18,代码来源:basic_cnn_tail.py

示例15: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import leaky_relu [as 别名]
def __call__(self, w, x=None, add_noise=False):
        h = x
        batch_size, _ = w.shape
        if self.upsample:
            assert h is not None
            if self.blur_k is None:
                k = np.asarray([1, 2, 1]).astype('f')
                k = k[:, None] * k[None, :]
                k = k / np.sum(k)
                self.blur_k = self.xp.asarray(k)[None, None, :]
            h = self.c0(upscale2x(h))
            if self.enable_blur:
                h = blur(h, self.blur_k)
        else:
            h = F.broadcast_to(self.W, (batch_size, self.ch_in, 4, 4))
      
        # h should be (batch, ch, size, size)
        if add_noise:
            h = self.n0(h)

        h = F.leaky_relu(self.b0(h))
        h = self.s0(w, h)

        h = self.c1(h)
        if add_noise:
            h = self.n1(h)

        h = F.leaky_relu(self.b1(h))
        h = self.s1(w, h)
        return h 
开发者ID:pfnet-research,项目名称:chainer-stylegan,代码行数:32,代码来源:net.py


注:本文中的chainer.functions.leaky_relu方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。