本文整理匯總了Python中lasagne.nonlinearities.elu方法的典型用法代碼示例。如果您正苦於以下問題:Python nonlinearities.elu方法的具體用法?Python nonlinearities.elu怎麽用?Python nonlinearities.elu使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類lasagne.nonlinearities
的用法示例。
在下文中一共展示了nonlinearities.elu方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: initialization
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import elu [as 別名]
def initialization(name):
initializations = {'sigmoid':init.HeNormal(gain=1.0),
'softmax':init.HeNormal(gain=1.0),
'elu':init.HeNormal(gain=1.0),
'relu':init.HeNormal(gain=math.sqrt(2)),
'lrelu':init.HeNormal(gain=math.sqrt(2/(1+0.01**2))),
'vlrelu':init.HeNormal(gain=math.sqrt(2/(1+0.33**2))),
'rectify':init.HeNormal(gain=math.sqrt(2)),
'identity':init.HeNormal(gain=math.sqrt(2))
}
return initializations[name]
#################### BASELINE MODEL #####################
示例2: nonlinearity
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import elu [as 別名]
def nonlinearity(name):
nonlinearities = {'rectify': nl.rectify,
'relu': nl.rectify,
'lrelu': nl.LeakyRectify(0.01),
'vlrelu': nl.LeakyRectify(0.33),
'elu': nl.elu,
'softmax': nl.softmax,
'sigmoid': nl.sigmoid,
'identity':nl.identity}
return nonlinearities[name]
示例3: ResLayer
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import elu [as 別名]
def ResLayer(incoming, IB):
return NL(ESL([IB,incoming]),elu)
示例4: __init__
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import elu [as 別名]
def __init__(self, incoming, nonlinearity=elu, survival_p=0.5,
**kwargs):
super(IfElseDropLayer, self).__init__(incoming, **kwargs)
self.nonlinearity = (identity if nonlinearity is None
else nonlinearity)
self._srng = RandomStreams(lasagne.random.get_rng().randint(1, 2147462579))
self.p = 1-survival_p
示例5: get_output_for
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import elu [as 別名]
def get_output_for(self, input, deterministic=False, **kwargs):
if deterministic:
return self.p*input
else:
return theano.ifelse.ifelse(
T.lt(self._srng.uniform( (1,), 0, 1)[0], self.p),
input,
T.zeros(input.shape)
)
# def ResDrop(incoming, IB, p):
# return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
示例6: ResDropNoPre
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import elu [as 別名]
def ResDropNoPre(incoming, IB, p):
return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
示例7: ResDrop
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import elu [as 別名]
def ResDrop(incoming, IB, p):
return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
示例8: ResLayer
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import elu [as 別名]
def ResLayer(incoming, IB):
return NL(ESL([IB,incoming]),elu)
# If-else Drop Layer, adopted from Christopher Beckham's recipe:
# https://github.com/Lasagne/Recipes/pull/67
示例9: pd
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import elu [as 別名]
def pd(num_layers=2,num_filters=32,filter_size=(3,3),pad=1,stride = (1,1),nonlinearity=elu,style='convolutional',bnorm=1,**kwargs):
input_args = locals()
input_args.pop('num_layers')
return {key:entry if type(entry) is list else [entry]*num_layers for key,entry in input_args.iteritems()}
# Possible Conv2DDNN convenience function. Remember to delete the C2D import at the top if you use this
# def C2D(incoming = None, num_filters = 32, filter_size= [3,3],pad = 'same',stride = [1,1], W = initmethod('relu'),nonlinearity = elu,name = None):
# return lasagne.layers.dnn.Conv2DDNNLayer(incoming,num_filters,filter_size,stride,pad,False,W,None,nonlinearity,False)
# Shape-Preserving Gaussian Sample layer for latent vectors with spatial dimensions.
# This is a holdover from an "old" (i.e. I abandoned it last month) idea.