当前位置: 首页>>代码示例>>Python>>正文


Python functions.clipped_relu方法代码示例

本文整理汇总了Python中chainer.functions.clipped_relu方法的典型用法代码示例。如果您正苦于以下问题:Python functions.clipped_relu方法的具体用法?Python functions.clipped_relu怎么用?Python functions.clipped_relu使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在chainer.functions的用法示例。


在下文中一共展示了functions.clipped_relu方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import clipped_relu [as 别名]
def __call__(self, x, t=None):
        self.clear()
        #x = Variable(x_data)  # x_data.astype(np.float32)

        h = F.leaky_relu(self.conv1(x), slope=0.1)
        h = F.leaky_relu(self.conv2(h), slope=0.1)
        h = F.leaky_relu(self.conv3(h), slope=0.1)
        h = F.leaky_relu(self.conv4(h), slope=0.1)
        h = F.leaky_relu(self.conv5(h), slope=0.1)
        h = F.leaky_relu(self.conv6(h), slope=0.1)
        h = F.clipped_relu(self.conv7(h), z=1.0)
        if self.train:
            self.loss = F.mean_squared_error(h, t)
            return self.loss
        else:
            return h 
开发者ID:corochann,项目名称:SeRanet,代码行数:18,代码来源:basic_cnn_tail.py

示例2: forward

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import clipped_relu [as 别名]
def forward(self, inputs, device):
        x, = inputs
        y = functions.clipped_relu(x, self.z)
        return y, 
开发者ID:chainer,项目名称:chainer,代码行数:6,代码来源:test_clipped_relu.py

示例3: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import clipped_relu [as 别名]
def __call__(self, x, t=None):
        self.clear()

        h = F.leaky_relu(self.conv1(x), slope=0.1)
        h = F.leaky_relu(self.conv2(h), slope=0.1)
        #h = F.leaky_relu(self.conv3(h), slope=0.1)
        #h = F.leaky_relu(self.conv4(h), slope=0.1)
        h = F.clipped_relu(self.conv3(h), z=1.0)
        if self.train:
            self.loss = F.mean_squared_error(h, t)
            return self.loss
        else:
            return h 
开发者ID:corochann,项目名称:SeRanet,代码行数:15,代码来源:basic_cnn_small.py

示例4: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import clipped_relu [as 别名]
def __call__(self, x, t=None):
        self.clear()
        h1 = F.leaky_relu(self.conv1(x), slope=0.1)
        h1 = F.leaky_relu(self.conv2(h1), slope=0.1)
        h1 = F.leaky_relu(self.conv3(h1), slope=0.1)

        h2 = self.seranet_v1_crbm(x)
        # Fusion
        h12 = F.concat((h1, h2), axis=1)

        lu = F.leaky_relu(self.convlu6(h12), slope=0.1)
        lu = F.leaky_relu(self.convlu7(lu), slope=0.1)
        lu = F.leaky_relu(self.convlu8(lu), slope=0.1)
        ru = F.leaky_relu(self.convru6(h12), slope=0.1)
        ru = F.leaky_relu(self.convru7(ru), slope=0.1)
        ru = F.leaky_relu(self.convru8(ru), slope=0.1)
        ld = F.leaky_relu(self.convld6(h12), slope=0.1)
        ld = F.leaky_relu(self.convld7(ld), slope=0.1)
        ld = F.leaky_relu(self.convld8(ld), slope=0.1)
        rd = F.leaky_relu(self.convrd6(h12), slope=0.1)
        rd = F.leaky_relu(self.convrd7(rd), slope=0.1)
        rd = F.leaky_relu(self.convrd8(rd), slope=0.1)

        # Splice
        h = CF.splice(lu, ru, ld, rd)

        h = F.leaky_relu(self.conv9(h), slope=0.1)
        h = F.leaky_relu(self.conv10(h), slope=0.1)
        h = F.leaky_relu(self.conv11(h), slope=0.1)
        h = F.clipped_relu(self.conv12(h), z=1.0)
        if self.train:
            self.loss = F.mean_squared_error(h, t)
            return self.loss
        else:
            return h 
开发者ID:corochann,项目名称:SeRanet,代码行数:37,代码来源:seranet_v1.py

示例5: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import clipped_relu [as 别名]
def __call__(self, x, t=None):
        self.clear()

        h = F.leaky_relu(self.conv1(x), slope=0.1)
        h = F.leaky_relu(self.conv2(h), slope=0.1)
        h = F.leaky_relu(self.conv3(h), slope=0.1)
        h = F.leaky_relu(self.conv4(h), slope=0.1)
        h = F.leaky_relu(self.conv5(h), slope=0.1)
        h = F.leaky_relu(self.conv6(h), slope=0.1)
        h = F.clipped_relu(self.conv7(h), z=1.0)
        if self.train:
            self.loss = F.mean_squared_error(h, t)
            return self.loss
        else:
            return h 
开发者ID:corochann,项目名称:SeRanet,代码行数:17,代码来源:basic_cnn_head.py

示例6: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import clipped_relu [as 别名]
def __call__(self, x):
        for nth in range(self.layers):
            if getattr(self, 'P' + str(nth)) is None:
                setattr(self, 'P' + str(nth), variable.Variable(
                    self.xp.zeros(self.sizes[nth], dtype=x.data.dtype),
                    volatile='auto'))

        E = [None] * self.layers
        for nth in range(self.layers):
            if nth == 0:
                E[nth] = F.concat((F.relu(x - getattr(self, 'P' + str(nth))),
                                  F.relu(getattr(self, 'P' + str(nth)) - x)))
            else:
                A = F.max_pooling_2d(F.relu(getattr(self, 'ConvA' + str(nth))(E[nth - 1])), 2, stride = 2)
                E[nth] = F.concat((F.relu(A - getattr(self, 'P' + str(nth))),
                                  F.relu(getattr(self, 'P' + str(nth)) - A)))

        R = [None] * self.layers
        for nth in reversed(range(self.layers)):
            if nth == self.layers - 1:
                R[nth] = getattr(self, 'ConvLSTM' + str(nth))((E[nth],))
            else:
                upR = F.unpooling_2d(R[nth + 1], 2, stride = 2, cover_all=False)
                R[nth] = getattr(self, 'ConvLSTM' + str(nth))((E[nth], upR))

            if nth == 0:
                setattr(self, 'P' + str(nth), F.clipped_relu(getattr(self, 'ConvP' + str(nth))(R[nth]), 1.0))
            else:
                setattr(self, 'P' + str(nth), F.relu(getattr(self, 'ConvP' + str(nth))(R[nth])))
        
        return self.P0 
开发者ID:quadjr,项目名称:PredNet,代码行数:33,代码来源:net.py

示例7: __init__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import clipped_relu [as 别名]
def __init__(self,
                 in_channels,
                 out_channels,
                 expansion_size=expand_input_by_factor(6),
                 expand_pad='SAME',
                 depthwise_stride=1,
                 depthwise_ksize=3,
                 depthwise_pad='SAME',
                 project_pad='SAME',
                 initialW=None,
                 bn_kwargs={}):
        super(ExpandedConv2D, self).__init__()
        with self.init_scope():
            if callable(expansion_size):
                self.inner_size = expansion_size(num_inputs=in_channels)
            else:
                self.inner_size = expansion_size

            def relu6(x):
                return clipped_relu(x, 6.)
            if self.inner_size > in_channels:
                self.expand = TFConv2DBNActiv(
                    in_channels,
                    self.inner_size,
                    ksize=1,
                    pad=expand_pad,
                    nobias=True,
                    initialW=initialW,
                    bn_kwargs=bn_kwargs,
                    activ=relu6)
                depthwise_in_channels = self.inner_size
            else:
                depthwise_in_channels = in_channels
            self.depthwise = TFConv2DBNActiv(
                depthwise_in_channels,
                self.inner_size,
                ksize=depthwise_ksize,
                stride=depthwise_stride,
                pad=depthwise_pad,
                nobias=True,
                initialW=initialW,
                groups=depthwise_in_channels,
                bn_kwargs=bn_kwargs,
                activ=relu6)
            self.project = TFConv2DBNActiv(
                self.inner_size,
                out_channels,
                ksize=1,
                pad=project_pad,
                nobias=True,
                initialW=initialW,
                bn_kwargs=bn_kwargs,
                activ=None) 
开发者ID:chainer,项目名称:chainercv,代码行数:55,代码来源:expanded_conv_2d.py

示例8: __call__

# 需要导入模块: from chainer import functions [as 别名]
# 或者: from chainer.functions import clipped_relu [as 别名]
def __call__(self, x, train=False):
        """
        calculate output of VoxResNet given input x

        Parameters
        ----------
        x : (batch_size, in_channels, xlen, ylen, zlen) ndarray
            image to perform semantic segmentation

        Returns
        -------
        proba: (batch_size, n_classes, xlen, ylen, zlen) ndarray
            probability of each voxel belonging each class
            elif train=True, returns list of logits
        """
        with chainer.using_config("train", train):
            h = self.conv1a(x)
            h = F.relu(self.bnorm1a(h))
            h = self.conv1b(h)
            c1 = F.clipped_relu(self.c1deconv(h))
            c1 = self.c1conv(c1)

            h = F.relu(self.bnorm1b(h))
            h = self.conv1c(h)
            h = self.voxres2(h)
            h = self.voxres3(h)
            c2 = F.clipped_relu(self.c2deconv(h))
            c2 = self.c2conv(c2)

            h = F.relu(self.bnorm3(h))
            h = self.conv4(h)
            h = self.voxres5(h)
            h = self.voxres6(h)
            c3 = F.clipped_relu(self.c3deconv(h))
            c3 = self.c3conv(c3)

            h = F.relu(self.bnorm6(h))
            h = self.conv7(h)
            h = self.voxres8(h)
            h = self.voxres9(h)
            c4 = F.clipped_relu(self.c4deconv(h))
            c4 = self.c4conv(c4)

            c = c1 + c2 + c3 + c4

        if train:
            return [c1, c2, c3, c4, c]
        else:
            return F.softmax(c) 
开发者ID:Ryo-Ito,项目名称:brain_segmentation,代码行数:51,代码来源:model.py


注:本文中的chainer.functions.clipped_relu方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。