本文整理匯總了Python中lasagne.nonlinearities.linear方法的典型用法代碼示例。如果您正苦於以下問題:Python nonlinearities.linear方法的具體用法?Python nonlinearities.linear怎麽用?Python nonlinearities.linear使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類lasagne.nonlinearities
的用法示例。
在下文中一共展示了nonlinearities.linear方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape=(None, 3, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = rectify);
#network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = linear);
hidden_smth_layer = SmthActLayer(incoming = hidden_layer, x_start = -5, x_end = 5, num_segs = 10);
network = layers.DenseLayer(incoming = hidden_smth_layer, num_units = classn, nonlinearity = linear);
return network, encode_layer, input_var, aug_var, target_var;
示例2: residual_block
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def residual_block(resnet_in, num_styles=None, num_filters=None, filter_size=3, stride=1):
if num_filters == None:
num_filters = resnet_in.output_shape[1]
conv1 = style_conv_block(resnet_in, num_styles, num_filters, filter_size, stride)
conv2 = style_conv_block(conv1, num_styles, num_filters, filter_size, stride, linear)
res_block = ElemwiseSumLayer([conv2, resnet_in])
return res_block
示例3: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model_fullsize_nopool_4ch/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 4, 50, 50), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# conventional recitified linear units
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
#network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
ago_layer = AgoLayer(incoming = hidden_layer, num_segs = 5);
network = layers.DenseLayer(incoming = ago_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, ago_layer.W];
return (encode_layer, hidden_layer, ago_layer, network), input_var, aug_var, target_var, stack_params;
示例4: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn, model_param):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model_fullsize_nopool_4ch/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 4, 50, 50), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# smooth activation function
smth_type, num_units, num_segs, seg_start_end, _ = model_param;
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = num_units, nonlinearity = linear);
if smth_type == 1:
smth_act_layer = SmthAct1Layer(incoming = hidden_layer, x_start = -seg_start_end, x_end = seg_start_end, num_segs = num_segs);
else:
smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -seg_start_end, x_end = seg_start_end, num_segs = num_segs);
network = SumLayer(incoming = smth_act_layer);
stack_params = [network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];
return (encode_layer, hidden_layer, smth_act_layer, network), input_var, aug_var, target_var, stack_params;
示例5: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model_4ch/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 4, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# conventional recitified linear units
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
#network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -10.0, x_end = 10.0, num_segs = 20);
network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];
return (encode_layer, hidden_layer, smth_act_layer, network), input_var, aug_var, target_var, stack_params;
示例6: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 3, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# conventional recitified linear units
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
#network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -10.0, x_end = 10.0, num_segs = 20);
network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];
return (encode_layer, hidden_layer, smth_act_layer, network), input_var, aug_var, target_var, stack_params;
示例7: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model_4ch/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 4, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# conventional recitified linear units
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
#network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
ago_layer = AgoLayer(incoming = hidden_layer, num_segs = 20);
network = layers.DenseLayer(incoming = ago_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, ago_layer.W];
return (encode_layer, hidden_layer, ago_layer, network), input_var, aug_var, target_var, stack_params;
示例8: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 3, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# conventional recitified linear units
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
#smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -10.0, x_end = 10.0, num_segs = 20);
#network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];
return (encode_layer, hidden_layer, network), input_var, aug_var, target_var, stack_params;
示例9: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model_4ch/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 4, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# conventional recitified linear units
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
#network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
he_layer = HeLayer(incoming = hidden_layer);
network = layers.DenseLayer(incoming = he_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, he_layer.W];
return (encode_layer, hidden_layer, he_layer, network), input_var, aug_var, target_var, stack_params;
示例10: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model_fullsize_nopool_4ch/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 4, 50, 50), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# conventional recitified linear units
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
#network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
he_layer = HeLayer(incoming = hidden_layer);
network = layers.DenseLayer(incoming = he_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, he_layer.W];
return (encode_layer, hidden_layer, he_layer, network), input_var, aug_var, target_var, stack_params;
示例11: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model_fullsize_nopool_4ch/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 4, 50, 50), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# conventional recitified linear units
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = very_leaky_rectify);
network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
#he_layer = HeLayer(incoming = hidden_layer);
#network = layers.DenseLayer(incoming = he_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, he_layer.W];
return (encode_layer, hidden_layer, network), input_var, aug_var, target_var, stack_params;
示例12: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model_fullsize_nopool_4ch/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 4, 50, 50), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# conventional recitified linear units
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
#he_layer = HeLayer(incoming = hidden_layer);
#network = layers.DenseLayer(incoming = he_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, he_layer.W];
return (encode_layer, hidden_layer, network), input_var, aug_var, target_var, stack_params;
示例13: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model_4ch_rot/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 4, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# conventional recitified linear units
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
#network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -10.0, x_end = 10.0, num_segs = 20);
network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];
return (encode_layer, hidden_layer, smth_act_layer, network), input_var, aug_var, target_var, stack_params;
示例14: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
target_var = T.matrix('targets');
ae = pickle.load(open('model_large/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 3, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
# conventional recitified linear units
#hidden_layer = layers.DenseLayer(incoming = encode_layer, num_units = 200, nonlinearity = rectify);
#network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
hidden_layer = layers.DenseLayer(incoming = encode_layer, num_units = 200, nonlinearity = linear);
smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -10.0, x_end = 10.0, num_segs = 20);
network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];
return (encode_layer, hidden_layer, smth_act_layer, network), input_var, target_var, stack_params;
示例15: build_network_from_ae
# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
input_var = T.tensor4('inputs');
aug_var = T.matrix('aug_var');
target_var = T.matrix('targets');
ae = pickle.load(open('model/conv_ae.pkl', 'rb'));
input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
first_layer = ae.get_all_layers()[input_layer_index + 1];
input_layer = layers.InputLayer(shape = (None, 3, 32, 32), input_var = input_var);
first_layer.input_layer = input_layer;
encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
encode_layer = ae.get_all_layers()[encode_layer_index];
aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);
cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
# conventional recitified linear units
#hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = rectify);
#network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
#stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];
# smooth activation function
hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = linear);
smth_act_layer = SmthAct1Layer(incoming = hidden_layer, x_start = -5, x_end = 5, num_segs = 10);
network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];
return network, encode_layer, input_var, aug_var, target_var, stack_params;