当前位置: 首页>>代码示例>>Python>>正文


Python functions.relu方法代码示例

本文整理汇总了Python中chainer.functions.relu方法的典型用法代码示例。如果您正苦于以下问题:Python functions.relu方法的具体用法?Python functions.relu怎么用?Python functions.relu使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在chainer.functions的用法示例。


在下文中一共展示了functions.relu方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def __init__(self, in_channels, out_channels, ksize=3, pad=1, activation=F.relu, mode='none', bn=True, dr=None):
        super(ResBlock, self).__init__()
        initializer = chainer.initializers.GlorotUniform()
        initializer_sc = chainer.initializers.GlorotUniform()
        self.activation = activation
        self.mode = _downsample if mode == 'down' else _upsample if mode == 'up' else None
        self.learnable_sc = in_channels != out_channels
        self.dr = dr
        self.bn = bn
        with self.init_scope():
            self.c1 = L.Convolution1D(in_channels,  out_channels, ksize=ksize, pad=pad, initialW=initializer, nobias=bn)
            self.c2 = L.Convolution1D(out_channels, out_channels, ksize=ksize, pad=pad, initialW=initializer, nobias=bn)
            if bn:
                self.b1 = L.BatchNormalization(out_channels)
                self.b2 = L.BatchNormalization(out_channels)
            if self.learnable_sc:
                self.c_sc = L.Convolution2D(in_channels, out_channels, ksize=1, pad=0, initialW=initializer_sc) 
开发者ID:pstuvwx,项目名称:Deep_VoiceChanger,代码行数:19,代码来源:block_1d.py

示例2: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def __init__(self, n_actions, max_episode_steps):
        super().__init__()
        with self.init_scope():
            self.embed = L.EmbedID(max_episode_steps + 1, 3136)
            self.image2hidden = chainerrl.links.Sequence(
                L.Convolution2D(None, 32, 8, stride=4),
                F.relu,
                L.Convolution2D(None, 64, 4, stride=2),
                F.relu,
                L.Convolution2D(None, 64, 3, stride=1),
                functools.partial(F.reshape, shape=(-1, 3136)),
            )
            self.hidden2out = chainerrl.links.Sequence(
                L.Linear(None, 512),
                F.relu,
                L.Linear(None, n_actions),
                DiscreteActionValue,
            ) 
开发者ID:chainer,项目名称:chainerrl,代码行数:20,代码来源:train_dqn_batch_grasping.py

示例3: make_q_func

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def make_q_func(self, env):
        obs_size = env.observation_space.low.size
        hidden_size = 64
        return iqn.StatelessRecurrentImplicitQuantileQFunction(
            psi=chainerrl.links.StatelessRecurrentSequential(
                L.Linear(obs_size, hidden_size),
                F.relu,
                L.NStepRNNTanh(1, hidden_size, hidden_size, 0),
            ),
            phi=chainerrl.links.Sequence(
                chainerrl.agents.iqn.CosineBasisLinear(32, hidden_size),
                F.relu,
            ),
            f=L.Linear(hidden_size, env.action_space.n,
                       initialW=chainer.initializers.LeCunNormal(1e-1)),
        ) 
开发者ID:chainer,项目名称:chainerrl,代码行数:18,代码来源:test_iqn.py

示例4: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def __call__(self, state):
        h = state
        for layer in self.hidden_layers:
            h = F.relu(layer(h))
        v = self.v(h)
        mu = self.mu(h)

        if self.scale_mu:
            mu = scale_by_tanh(mu, high=self.action_space.high,
                               low=self.action_space.low)

        mat_diag = F.exp(self.mat_diag(h))
        if hasattr(self, 'mat_non_diag'):
            mat_non_diag = self.mat_non_diag(h)
            tril = lower_triangular_matrix(mat_diag, mat_non_diag)
            mat = F.matmul(tril, tril, transb=True)
        else:
            mat = F.expand_dims(mat_diag ** 2, axis=2)
        return QuadraticActionValue(
            mu, mat, v, min_action=self.action_space.low,
            max_action=self.action_space.high) 
开发者ID:chainer,项目名称:chainerrl,代码行数:23,代码来源:state_q_functions.py

示例5: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def __call__(self, x, t):
        h = F.relu(self.conv1_1(x))
        h = F.relu(self.conv1_2(h))
        h = F.max_pooling_2d(h, 2, 2)
        h = F.relu(self.conv2_1(h))
        h = F.relu(self.conv2_2(h))
        h = F.max_pooling_2d(h, 2, 2)
        h = F.relu(self.conv3_1(h))
        h = F.relu(self.conv3_2(h))
        h = F.relu(self.conv3_3(h))
        h = F.max_pooling_2d(h, 2, 2)
        h = F.relu(self.conv4_1(h))
        h = F.relu(self.conv4_2(h))
        h = F.relu(self.conv4_3(h))
        h = F.max_pooling_2d(h, 2, 2)
        h = F.relu(self.conv5_1(h))
        h = F.relu(self.conv5_2(h))
        h = F.relu(self.conv5_3(h))
        h = F.max_pooling_2d(h, 2, 2)
        h = F.dropout(F.relu(self.fc6(h)), ratio=0.5, train=self.train)
        h = F.dropout(F.relu(self.fc7(h)), ratio=0.5, train=self.train)
        h = self.score_fr(h)
        h = self.upsample(h)

        return h 
开发者ID:mitmul,项目名称:ssai-cnn,代码行数:27,代码来源:FCN_32s.py

示例6: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def __call__(self, x, t):
        h = F.relu(self.conv1(x))
        h = F.max_pooling_2d(h, 2, 1)
        h = F.relu(self.conv2(h))
        h = F.relu(self.conv3(h))
        h = F.relu(self.fc4(h))
        h = self.fc5(h)
        h = F.reshape(h, (x.data.shape[0], 3, 16, 16))
        h = self.channelwise_inhibited(h)

        if self.train:
            self.loss = F.softmax_cross_entropy(h, t, normalize=False)
            return self.loss
        else:
            self.pred = F.softmax(h)
            return self.pred 
开发者ID:mitmul,项目名称:ssai-cnn,代码行数:18,代码来源:MnihCNN_rcis.py

示例7: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def __call__(self, x, t):
        h = F.relu(self.conv1(x))
        h = F.max_pooling_2d(h, 2, 1)
        h = F.relu(self.conv2(h))
        h = F.relu(self.conv3(h))
        h = F.dropout(F.relu(self.fc4(h)), train=self.train)
        h = self.fc5(h)
        h = F.reshape(h, (x.data.shape[0], 3, 16, 16))
        h = self.channelwise_inhibited(h)

        if self.train:
            self.loss = F.softmax_cross_entropy(h, t, normalize=False)
            return self.loss
        else:
            self.pred = F.softmax(h)
            return self.pred 
开发者ID:mitmul,项目名称:ssai-cnn,代码行数:18,代码来源:MnihCNN_cis.py

示例8: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def __init__(self, obs_size, n_actions, n_hidden_channels=[1024,256]):
        super(QFunction,self).__init__()
        net = []
        inpdim = obs_size
        for i,n_hid in enumerate(n_hidden_channels):
            net += [ ('l{}'.format(i), L.Linear( inpdim, n_hid ) ) ]
            net += [ ('norm{}'.format(i), L.BatchNormalization( n_hid ) ) ]
            net += [ ('_act{}'.format(i), F.relu ) ]
            inpdim = n_hid

        net += [('output', L.Linear( inpdim, n_actions) )]

        with self.init_scope():
            for n in net:
                if not n[0].startswith('_'):
                    setattr(self, n[0], n[1])

        self.forward = net 
开发者ID:endgameinc,项目名称:gym-malware,代码行数:20,代码来源:train_agent_chainer.py

示例9: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def __init__(self, scale=1.0, activation_fn=F.relu):
        super(Block35, self).__init__()
        with self.init_scope():
            self.Branch_0 = TFLoadableChain()
            with self.Branch_0.init_scope():
                self.Branch_0.Conv2d_1x1 = ConvBnRelu(32, 1)

            self.Branch_1 = TFLoadableChain()
            with self.Branch_1.init_scope():
                self.Branch_1.Conv2d_0a_1x1 = ConvBnRelu(32, 1)
                self.Branch_1.Conv2d_0b_3x3 = ConvBnRelu(32, 3, pad=1)

            self.Branch_2 = TFLoadableChain()
            with self.Branch_2.init_scope():
                self.Branch_2.Conv2d_0a_1x1 = ConvBnRelu(32, 1)
                self.Branch_2.Conv2d_0b_3x3 = ConvBnRelu(48, 3, pad=1)
                self.Branch_2.Conv2d_0c_3x3 = ConvBnRelu(64, 3, pad=1)

                # NOTE: Conv2d_1x1 is built at the first iteration

        self.scale = scale
        self.activation_fn = activation_fn 
开发者ID:pfnet-research,项目名称:nips17-adversarial-attack,代码行数:24,代码来源:inception_resnet_v2.py

示例10: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def __call__(self, x):
        h = F.relu(self.conv1_1(x))
        h = F.relu(self.conv1_2(h))
        h = F.max_pooling_2d(h, 2, stride=2)
        h = F.relu(self.conv2_1(h))
        h = F.relu(self.conv2_2(h))
        h = F.max_pooling_2d(h, 2, stride=2)
        h = F.relu(self.conv3_1(h))
        h = F.relu(self.conv3_2(h))
        h = F.max_pooling_2d(h, 2, stride=2)
        h = F.relu(self.conv4_1(h))
        h = F.relu(self.conv4_2(h))
        h = F.spatial_pyramid_pooling_2d(h, 3, F.MaxPooling2D)
        h = F.tanh(self.fc4(h))
        h = F.dropout(h, ratio=.5, train=self.train)
        h = F.tanh(self.fc5(h))
        h = F.dropout(h, ratio=.5, train=self.train)
        h = self.fc6(h)
        return h 
开发者ID:oyam,项目名称:Semantic-Segmentation-using-Adversarial-Networks,代码行数:21,代码来源:spp_discriminator.py

示例11: forward

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def forward(self, x, t):
        # def forward(self, x):
        h = F.max_pooling_2d(F.local_response_normalization(
            F.relu(self.conv1(x))), 3, stride=2)
        h = F.max_pooling_2d(F.local_response_normalization(
            F.relu(self.conv2(h))), 3, stride=2)
        h = F.relu(self.conv3(h))
        h = F.relu(self.conv4(h))
        h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
        h = F.dropout(F.relu(self.fc6(h)))
        h = F.dropout(F.relu(self.fc7(h)))
        h = self.fc8(h)

        loss = F.softmax_cross_entropy(h, t)
        #loss = h

        # chainer.report({'loss': loss, 'accuracy': F.accuracy(h, t)}, self)
        return loss

# from https://github.com/chainer/chainer/blob/master/examples/imagenet/alex.py 
开发者ID:pfnet-research,项目名称:chainer-compiler,代码行数:22,代码来源:Alex_with_loss.py

示例12: forward

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def forward(self, x):
        """Computes the output of the Inception module.
        Args:
            x (~chainer.Variable): Input variable.
        Returns:
            Variable: Output variable. Its array has the same spatial size and
            the same minibatch size as the input array. The channel dimension
            has size ``out1 + out3 + out5 + proj_pool``.
        """
        out1 = self.conv1(x)
        out3 = self.conv3(relu.relu(self.proj3(x)))
        out5 = self.conv5(relu.relu(self.proj5(x)))
        pool = self.projp(F.max_pooling_2d(
            x, 3, stride=1, pad=1))

        y = relu.relu(concat.concat((out1, out3, out5, pool), axis=1))
        return y 
开发者ID:pfnet-research,项目名称:chainer-compiler,代码行数:19,代码来源:GoogleNet_with_loss.py

示例13: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def __call__(self, x, train):
        param_num = 0
        for name, f in self.forward:
            if 'conv1' in name:
                x = getattr(self, name)(x)
                param_num += (f.W.shape[0]*f.W.shape[2]*f.W.shape[3]*f.W.shape[1]+f.W.shape[0])
            elif 'bn1' in name:
                x = getattr(self, name)(x, not train)
                param_num += x.data.shape[1]*2
        return (F.relu(x), param_num)


# [(CONV -> Batch -> ReLU -> CONV -> Batch) + (x)] 
开发者ID:sg-nm,项目名称:cgp-cnn,代码行数:15,代码来源:cnn_model.py

示例14: predict

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def predict(self, x):
        h = F.max_pooling_2d(F.relu(
            F.local_response_normalization(self.conv1(x))), 3, stride=2)
        h = F.max_pooling_2d(F.relu(
            F.local_response_normalization(self.conv2(h))), 3, stride=2)
        h = F.relu(self.conv3(h))
        h = F.relu(self.conv4(h))
        h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
        h = F.dropout(F.relu(self.fc6(h)), train=self.train)
        h = F.dropout(F.relu(self.fc7(h)), train=self.train)
        h = F.dropout(F.relu(self.fc8(h)), train=self.train)
        h = self.fc9(h)

        return h 
开发者ID:icoxfog417,项目名称:mlimages,代码行数:16,代码来源:alex.py

示例15: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import relu [as 别名]
def __call__(self, x):
        h1 = F.relu(self.l1(x))
        h2 = F.relu(self.l2(h1))
        return self.l3(h2) 
开发者ID:floydhub,项目名称:dockerfiles,代码行数:6,代码来源:mnist.2.py


注:本文中的chainer.functions.relu方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。