當前位置: 首頁>>代碼示例>>Python>>正文


Python nonlinearities.linear方法代碼示例

本文整理匯總了Python中lasagne.nonlinearities.linear方法的典型用法代碼示例。如果您正苦於以下問題:Python nonlinearities.linear方法的具體用法?Python nonlinearities.linear怎麽用?Python nonlinearities.linear使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在lasagne.nonlinearities的用法示例。


在下文中一共展示了nonlinearities.linear方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape=(None, 3, 32, 32), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = rectify);
    #network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);

    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = linear);
    hidden_smth_layer = SmthActLayer(incoming = hidden_layer, x_start = -5, x_end = 5, num_segs = 10);
    network = layers.DenseLayer(incoming = hidden_smth_layer, num_units = classn, nonlinearity = linear);

    return network, encode_layer, input_var, aug_var, target_var; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:27,代碼來源:conv_sup_regression.py

示例2: residual_block

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def residual_block(resnet_in, num_styles=None, num_filters=None, filter_size=3, stride=1):
	if num_filters == None:
		num_filters = resnet_in.output_shape[1]

	conv1 = style_conv_block(resnet_in, num_styles, num_filters, filter_size, stride)
	conv2 = style_conv_block(conv1, num_styles, num_filters, filter_size, stride, linear)
	res_block = ElemwiseSumLayer([conv2, resnet_in])

	return res_block 
開發者ID:joelmoniz,項目名稱:gogh-figure,代碼行數:11,代碼來源:layers.py

示例3: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model_fullsize_nopool_4ch/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 4, 50, 50), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # conventional recitified linear units
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
    #network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
    ago_layer = AgoLayer(incoming = hidden_layer, num_segs = 5);
    network = layers.DenseLayer(incoming = ago_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, ago_layer.W];

    return (encode_layer, hidden_layer, ago_layer, network), input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:32,代碼來源:conv_sup_regression_hseg_4ch_ago.py

示例4: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn, model_param):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model_fullsize_nopool_4ch/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 4, 50, 50), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # smooth activation function
    smth_type, num_units, num_segs, seg_start_end, _ = model_param;
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = num_units, nonlinearity = linear);
    if smth_type == 1:
        smth_act_layer = SmthAct1Layer(incoming = hidden_layer, x_start = -seg_start_end, x_end = seg_start_end, num_segs = num_segs);
    else:
        smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -seg_start_end, x_end = seg_start_end, num_segs = num_segs);
    network = SumLayer(incoming = smth_act_layer);
    stack_params = [network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];

    return (encode_layer, hidden_layer, smth_act_layer, network), input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:31,代碼來源:conv_sup_regression_hseg_4ch_modelsel.py

示例5: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model_4ch/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 4, 32, 32), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # conventional recitified linear units
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
    #network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
    smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -10.0, x_end = 10.0, num_segs = 20);
    network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];

    return (encode_layer, hidden_layer, smth_act_layer, network), input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:32,代碼來源:conv_sup_regression_4ch.py

示例6: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 3, 32, 32), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # conventional recitified linear units
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
    #network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
    smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -10.0, x_end = 10.0, num_segs = 20);
    network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];

    return (encode_layer, hidden_layer, smth_act_layer, network), input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:32,代碼來源:conv_sup_regression.py

示例7: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model_4ch/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 4, 32, 32), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # conventional recitified linear units
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
    #network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
    ago_layer = AgoLayer(incoming = hidden_layer, num_segs = 20);
    network = layers.DenseLayer(incoming = ago_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, ago_layer.W];

    return (encode_layer, hidden_layer, ago_layer, network), input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:32,代碼來源:conv_sup_regression_4ch_ago.py

示例8: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 3, 32, 32), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # conventional recitified linear units
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
    network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
    #smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -10.0, x_end = 10.0, num_segs = 20);
    #network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];

    return (encode_layer, hidden_layer, network), input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:32,代碼來源:conv_sup_regression_baseline.py

示例9: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model_4ch/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 4, 32, 32), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # conventional recitified linear units
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
    #network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
    he_layer = HeLayer(incoming = hidden_layer);
    network = layers.DenseLayer(incoming = he_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, he_layer.W];

    return (encode_layer, hidden_layer, he_layer, network), input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:32,代碼來源:conv_sup_regression_4ch_he.py

示例10: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model_fullsize_nopool_4ch/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 4, 50, 50), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # conventional recitified linear units
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
    #network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
    he_layer = HeLayer(incoming = hidden_layer);
    network = layers.DenseLayer(incoming = he_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, he_layer.W];

    return (encode_layer, hidden_layer, he_layer, network), input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:32,代碼來源:conv_sup_regression_hseg_4ch_he.py

示例11: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model_fullsize_nopool_4ch/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 4, 50, 50), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # conventional recitified linear units
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = very_leaky_rectify);
    network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
    #he_layer = HeLayer(incoming = hidden_layer);
    #network = layers.DenseLayer(incoming = he_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, he_layer.W];

    return (encode_layer, hidden_layer, network), input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:32,代碼來源:conv_sup_regression_hseg_4ch_leaky.py

示例12: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model_fullsize_nopool_4ch/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 4, 50, 50), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # conventional recitified linear units
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
    network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
    #he_layer = HeLayer(incoming = hidden_layer);
    #network = layers.DenseLayer(incoming = he_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, he_layer.W];

    return (encode_layer, hidden_layer, network), input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:32,代碼來源:conv_sup_regression_hseg_4ch_relu.py

示例13: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model_4ch_rot/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 4, 32, 32), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # conventional recitified linear units
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = rectify);
    #network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 200, nonlinearity = linear);
    smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -10.0, x_end = 10.0, num_segs = 20);
    network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];

    return (encode_layer, hidden_layer, smth_act_layer, network), input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:32,代碼來源:conv_sup_regression_4ch_rot.py

示例14: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model_large/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 3, 32, 32), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];

    # conventional recitified linear units
    #hidden_layer = layers.DenseLayer(incoming = encode_layer, num_units = 200, nonlinearity = rectify);
    #network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    hidden_layer = layers.DenseLayer(incoming = encode_layer, num_units = 200, nonlinearity = linear);
    smth_act_layer = SmthAct2Layer(incoming = hidden_layer, x_start = -10.0, x_end = 10.0, num_segs = 20);
    network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];

    return (encode_layer, hidden_layer, smth_act_layer, network), input_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:28,代碼來源:conv_sup_large_regression_syn.py

示例15: build_network_from_ae

# 需要導入模塊: from lasagne import nonlinearities [as 別名]
# 或者: from lasagne.nonlinearities import linear [as 別名]
def build_network_from_ae(classn):
    input_var = T.tensor4('inputs');
    aug_var = T.matrix('aug_var');
    target_var = T.matrix('targets');

    ae = pickle.load(open('model/conv_ae.pkl', 'rb'));

    input_layer_index = map(lambda pair : pair[0], ae.layers).index('input');
    first_layer = ae.get_all_layers()[input_layer_index + 1];
    input_layer = layers.InputLayer(shape = (None, 3, 32, 32), input_var = input_var);
    first_layer.input_layer = input_layer;

    encode_layer_index = map(lambda pair : pair[0], ae.layers).index('encode_layer');
    encode_layer = ae.get_all_layers()[encode_layer_index];
    aug_layer = layers.InputLayer(shape=(None, classn), input_var = aug_var);

    cat_layer = lasagne.layers.ConcatLayer([encode_layer, aug_layer], axis = 1);

    # conventional recitified linear units
    #hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = rectify);
    #network = layers.DenseLayer(incoming = hidden_layer, num_units = classn, nonlinearity = linear);
    #stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b];

    # smooth activation function
    hidden_layer = layers.DenseLayer(incoming = cat_layer, num_units = 100, nonlinearity = linear);
    smth_act_layer = SmthAct1Layer(incoming = hidden_layer, x_start = -5, x_end = 5, num_segs = 10);
    network = layers.DenseLayer(incoming = smth_act_layer, num_units = classn, nonlinearity = linear);
    stack_params = [network.W, network.b, hidden_layer.W, hidden_layer.b, smth_act_layer.W];

    return network, encode_layer, input_var, aug_var, target_var, stack_params; 
開發者ID:SBU-BMI,項目名稱:u24_lymphocyte,代碼行數:32,代碼來源:conv_sup_regression.py


注:本文中的lasagne.nonlinearities.linear方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。