本文整理汇总了Python中lasagne.nonlinearities.elu方法的典型用法代码示例。如果您正苦于以下问题:Python nonlinearities.elu方法的具体用法?Python nonlinearities.elu怎么用?Python nonlinearities.elu使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类lasagne.nonlinearities
的用法示例。
在下文中一共展示了nonlinearities.elu方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: initialization
# 需要导入模块: from lasagne import nonlinearities [as 别名]
# 或者: from lasagne.nonlinearities import elu [as 别名]
def initialization(name):
initializations = {'sigmoid':init.HeNormal(gain=1.0),
'softmax':init.HeNormal(gain=1.0),
'elu':init.HeNormal(gain=1.0),
'relu':init.HeNormal(gain=math.sqrt(2)),
'lrelu':init.HeNormal(gain=math.sqrt(2/(1+0.01**2))),
'vlrelu':init.HeNormal(gain=math.sqrt(2/(1+0.33**2))),
'rectify':init.HeNormal(gain=math.sqrt(2)),
'identity':init.HeNormal(gain=math.sqrt(2))
}
return initializations[name]
#################### BASELINE MODEL #####################
示例2: nonlinearity
# 需要导入模块: from lasagne import nonlinearities [as 别名]
# 或者: from lasagne.nonlinearities import elu [as 别名]
def nonlinearity(name):
nonlinearities = {'rectify': nl.rectify,
'relu': nl.rectify,
'lrelu': nl.LeakyRectify(0.01),
'vlrelu': nl.LeakyRectify(0.33),
'elu': nl.elu,
'softmax': nl.softmax,
'sigmoid': nl.sigmoid,
'identity':nl.identity}
return nonlinearities[name]
示例3: ResLayer
# 需要导入模块: from lasagne import nonlinearities [as 别名]
# 或者: from lasagne.nonlinearities import elu [as 别名]
def ResLayer(incoming, IB):
return NL(ESL([IB,incoming]),elu)
示例4: __init__
# 需要导入模块: from lasagne import nonlinearities [as 别名]
# 或者: from lasagne.nonlinearities import elu [as 别名]
def __init__(self, incoming, nonlinearity=elu, survival_p=0.5,
**kwargs):
super(IfElseDropLayer, self).__init__(incoming, **kwargs)
self.nonlinearity = (identity if nonlinearity is None
else nonlinearity)
self._srng = RandomStreams(lasagne.random.get_rng().randint(1, 2147462579))
self.p = 1-survival_p
示例5: get_output_for
# 需要导入模块: from lasagne import nonlinearities [as 别名]
# 或者: from lasagne.nonlinearities import elu [as 别名]
def get_output_for(self, input, deterministic=False, **kwargs):
if deterministic:
return self.p*input
else:
return theano.ifelse.ifelse(
T.lt(self._srng.uniform( (1,), 0, 1)[0], self.p),
input,
T.zeros(input.shape)
)
# def ResDrop(incoming, IB, p):
# return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
示例6: ResDropNoPre
# 需要导入模块: from lasagne import nonlinearities [as 别名]
# 或者: from lasagne.nonlinearities import elu [as 别名]
def ResDropNoPre(incoming, IB, p):
return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
示例7: ResDrop
# 需要导入模块: from lasagne import nonlinearities [as 别名]
# 或者: from lasagne.nonlinearities import elu [as 别名]
def ResDrop(incoming, IB, p):
return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
示例8: ResLayer
# 需要导入模块: from lasagne import nonlinearities [as 别名]
# 或者: from lasagne.nonlinearities import elu [as 别名]
def ResLayer(incoming, IB):
return NL(ESL([IB,incoming]),elu)
# If-else Drop Layer, adopted from Christopher Beckham's recipe:
# https://github.com/Lasagne/Recipes/pull/67
示例9: pd
# 需要导入模块: from lasagne import nonlinearities [as 别名]
# 或者: from lasagne.nonlinearities import elu [as 别名]
def pd(num_layers=2,num_filters=32,filter_size=(3,3),pad=1,stride = (1,1),nonlinearity=elu,style='convolutional',bnorm=1,**kwargs):
input_args = locals()
input_args.pop('num_layers')
return {key:entry if type(entry) is list else [entry]*num_layers for key,entry in input_args.iteritems()}
# Possible Conv2DDNN convenience function. Remember to delete the C2D import at the top if you use this
# def C2D(incoming = None, num_filters = 32, filter_size= [3,3],pad = 'same',stride = [1,1], W = initmethod('relu'),nonlinearity = elu,name = None):
# return lasagne.layers.dnn.Conv2DDNNLayer(incoming,num_filters,filter_size,stride,pad,False,W,None,nonlinearity,False)
# Shape-Preserving Gaussian Sample layer for latent vectors with spatial dimensions.
# This is a holdover from an "old" (i.e. I abandoned it last month) idea.