本文整理匯總了Python中lasagne.nonlinearities.sigmoid方法的典型用法代碼示例。如果您正苦於以下問題:Python nonlinearities.sigmoid方法的具體用法?Python nonlinearities.sigmoid怎麽用?Python nonlinearities.sigmoid使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類lasagne.nonlinearities
的用法示例。
在下文中一共展示了nonlinearities.sigmoid方法的13個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: build_discriminator_toy
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def build_discriminator_toy(image=None, nd=512, GP_norm=None):
Input = InputLayer(shape=(None, 2), input_var=image)
print ("Dis input:", Input.output_shape)
dis0 = DenseLayer(Input, nd, W=Normal(0.02), nonlinearity=relu)
print ("Dis fc0:", dis0.output_shape)
if GP_norm is True:
dis1 = DenseLayer(dis0, nd, W=Normal(0.02), nonlinearity=relu)
else:
dis1 = batch_norm(DenseLayer(dis0, nd, W=Normal(0.02), nonlinearity=relu))
print ("Dis fc1:", dis1.output_shape)
if GP_norm is True:
dis2 = batch_norm(DenseLayer(dis1, nd, W=Normal(0.02), nonlinearity=relu))
else:
dis2 = DenseLayer(dis1, nd, W=Normal(0.02), nonlinearity=relu)
print ("Dis fc2:", dis2.output_shape)
disout = DenseLayer(dis2, 1, W=Normal(0.02), nonlinearity=sigmoid)
print ("Dis output:", disout.output_shape)
return disout
示例2: build_discriminator_32
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def build_discriminator_32(image=None,ndf=128):
lrelu = LeakyRectify(0.2)
# input: images
InputImg = InputLayer(shape=(None, 3, 32, 32), input_var=image)
print ("Dis Img_input:", InputImg.output_shape)
# Conv Layer
dis1 = Conv2DLayer(InputImg, ndf, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu)
print ("Dis conv1:", dis1.output_shape)
# Conv Layer
dis2 = batch_norm(Conv2DLayer(dis1, ndf*2, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu))
print ("Dis conv2:", dis2.output_shape)
# Conv Layer
dis3 = batch_norm(Conv2DLayer(dis2, ndf*4, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu))
print ("Dis conv3:", dis3.output_shape)
# Conv Layer
dis4 = DenseLayer(dis3, 1, W=Normal(0.02), nonlinearity=sigmoid)
print ("Dis output:", dis4.output_shape)
return dis4
示例3: build_discriminator_64
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def build_discriminator_64(image=None,ndf=128):
lrelu = LeakyRectify(0.2)
# input: images
InputImg = InputLayer(shape=(None, 3, 64, 64), input_var=image)
print ("Dis Img_input:", InputImg.output_shape)
# Conv Layer
dis1 = Conv2DLayer(InputImg, ndf, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu)
print ("Dis conv1:", dis1.output_shape)
# Conv Layer
dis2 = batch_norm(Conv2DLayer(dis1, ndf*2, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu))
print ("Dis conv2:", dis2.output_shape)
# Conv Layer
dis3 = batch_norm(Conv2DLayer(dis2, ndf*4, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu))
print ("Dis conv3:", dis3.output_shape)
# Conv Layer
dis4 = batch_norm(Conv2DLayer(dis3, ndf*8, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu))
print ("Dis conv3:", dis4.output_shape)
# Conv Layer
dis5 = DenseLayer(dis4, 1, W=Normal(0.02), nonlinearity=sigmoid)
print ("Dis output:", dis5.output_shape)
return dis5
示例4: build_discriminator_128
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def build_discriminator_128(image=None,ndf=128):
lrelu = LeakyRectify(0.2)
# input: images
InputImg = InputLayer(shape=(None, 3, 128, 128), input_var=image)
print ("Dis Img_input:", InputImg.output_shape)
# Conv Layer
dis1 = Conv2DLayer(InputImg, ndf, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu)
print ("Dis conv1:", dis1.output_shape)
# Conv Layer
dis2 = batch_norm(Conv2DLayer(dis1, ndf*2, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu))
print ("Dis conv2:", dis2.output_shape)
# Conv Layer
dis3 = batch_norm(Conv2DLayer(dis2, ndf*4, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu))
print ("Dis conv3:", dis3.output_shape)
# Conv Layer
dis4 = batch_norm(Conv2DLayer(dis3, ndf*8, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu))
print ("Dis conv3:", dis4.output_shape)
# Conv Layer
dis5 = batch_norm(Conv2DLayer(dis4, ndf*16, (4,4), (2,2), pad=1, W=Normal(0.02), nonlinearity=lrelu))
print ("Dis conv4:", dis5.output_shape)
# Conv Layer
dis6 = DenseLayer(dis5, 1, W=Normal(0.02), nonlinearity=sigmoid)
print ("Dis output:", dis6.output_shape)
return dis6
示例5: setup_transform_net
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def setup_transform_net(self, input_var=None):
transform_net = InputLayer(shape=self.shape, input_var=input_var)
transform_net = style_conv_block(transform_net, self.num_styles, 32, 9, 1)
transform_net = style_conv_block(transform_net, self.num_styles, 64, 3, 2)
transform_net = style_conv_block(transform_net, self.num_styles, 128, 3, 2)
for _ in range(5):
transform_net = residual_block(transform_net, self.num_styles)
transform_net = nn_upsample(transform_net, self.num_styles)
transform_net = nn_upsample(transform_net, self.num_styles)
if self.net_type == 0:
transform_net = style_conv_block(transform_net, self.num_styles, 3, 9, 1, tanh)
transform_net = ExpressionLayer(transform_net, lambda X: 150.*X, output_shape=None)
elif self.net_type == 1:
transform_net = style_conv_block(transform_net, self.num_styles, 3, 9, 1, sigmoid)
self.network['transform_net'] = transform_net
示例6: get_output_for
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def get_output_for(self, input, **kwargs):
# if the input has more than two dimensions, flatten it into a
# batch of feature vectors.
input_reshape = input.flatten(2) if input.ndim > 2 else input
activation = T.dot(input_reshape, self.W_h)
if self.b_h is not None:
activation = activation + self.b_h.dimshuffle('x', 0)
activation = self.nonlinearity(activation)
transform = T.dot(input_reshape, self.W_t)
if self.b_t is not None:
transform = transform + self.b_t.dimshuffle('x', 0)
transform = nonlinearities.sigmoid(transform)
carry = 1.0 - transform
output = activation * transform + input_reshape * carry
# reshape output back to orignal input_shape
if input.ndim > 2:
output = T.reshape(output, input.shape)
return output
示例7: initialization
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def initialization(name):
initializations = {'sigmoid':init.HeNormal(gain=1.0),
'softmax':init.HeNormal(gain=1.0),
'elu':init.HeNormal(gain=1.0),
'relu':init.HeNormal(gain=math.sqrt(2)),
'lrelu':init.HeNormal(gain=math.sqrt(2/(1+0.01**2))),
'vlrelu':init.HeNormal(gain=math.sqrt(2/(1+0.33**2))),
'rectify':init.HeNormal(gain=math.sqrt(2)),
'identity':init.HeNormal(gain=math.sqrt(2))
}
return initializations[name]
#################### BASELINE MODEL #####################
示例8: predictionPooling
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def predictionPooling(p, sensitivity=-1, mode='avg'):
# Apply sigmoid function
p = flat_sigmoid(p, sensitivity)
# Mean exponential pooling for monophonic recordings
if mode == 'mexp':
p_pool = np.mean((p * 2.0) ** 2, axis=0)
# Simple average pooling
else:
p_pool = np.mean(p, axis=0)
p_pool[p_pool > 1.0] = 1.0
return p_pool
示例9: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model_4ch/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape=(None, 4, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = rectify);
network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = sigmoid);
return network, encode_layer, input_var, aug_var, target_var;
示例10: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape=(None, 3, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = rectify);
network_mll = layers.DenseLayer(incoming = hidden_layer, num_units = 12, nonlinearity = sigmoid);
network_sll = layers.DenseLayer(incoming = hidden_layer, num_units = 7, nonlinearity = sigmoid);
network = lasagne.layers.ConcatLayer([network_mll, network_sll], axis = 1);
return network, encode_layer, input_var, aug_var, target_var;
示例11: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def build_network_from_ae(classn, fea_len):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
fea_var = T.matrix('fea_var');
target_var = T.imatrix('targets');
ae = pickle.load(open('model/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape=(None, 3, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
fea_layer = layers.InputLayer(shape=(None, fea_len), input_var = fea_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer, fea_layer], axis = 1);
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = rectify);
network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = sigmoid);
return network, encode_layer, input_var, aug_var, fea_var, target_var;
示例12: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape=(None, 3, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = rectify);
network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = sigmoid);
return network, encode_layer, input_var, aug_var, target_var;
示例13: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import sigmoid [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model_4ch_rot/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape=(None, 4, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = rectify);
network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = sigmoid);
return network, encode_layer, input_var, aug_var, target_var;