当前位置: 首页>>代码示例>>Python>>正文


Python regularization.regularize_network_params方法代码示例

本文整理汇总了Python中lasagne.regularization.regularize_network_params方法的典型用法代码示例。如果您正苦于以下问题:Python regularization.regularize_network_params方法的具体用法?Python regularization.regularize_network_params怎么用?Python regularization.regularize_network_params使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在lasagne.regularization的用法示例。


在下文中一共展示了regularization.regularize_network_params方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: make_training_functions

# 需要导入模块: from lasagne import regularization [as 别名]
# 或者: from lasagne.regularization import regularize_network_params [as 别名]
def make_training_functions(network_layers, input_var, aug_var, target_var, stack_params, weight_decay):
    encode_layer, hidden_layer, ago_layer, network = network_layers;

    output = lasagne.layers.get_output(network, deterministic = True);
    pred_train = lasagne.layers.get_output(network, deterministic = False);
    loss = lasagne.objectives.squared_error(pred_train, target_var).mean() + \
           weight_decay * regularization.regularize_network_params(
                   layer = network, penalty = regularization.l2, tags={'regularizable' : True});

    params = layers.get_all_params(network, trainable = True);
    updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate = 0.00002, momentum = 0.95);
    stack_updates = lasagne.updates.nesterov_momentum(loss, stack_params, learning_rate = 0.00001, momentum = 0.95);

    encode = lasagne.layers.get_output(encode_layer, deterministic = True);
    hidden = lasagne.layers.get_output(hidden_layer, deterministic = True);
    smth_act = lasagne.layers.get_output(ago_layer, deterministic = True);

    val_fn = theano.function([input_var, aug_var, target_var], [loss, encode, hidden, smth_act, output]);
    train_fn = theano.function([input_var, aug_var, target_var], loss, updates = updates);
    stack_train_fn = theano.function([input_var, aug_var, target_var], loss, updates = stack_updates);

    return val_fn, train_fn, stack_train_fn; 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:24,代码来源:conv_sup_regression_hseg_4ch_ago.py

示例2: make_training_functions

# 需要导入模块: from lasagne import regularization [as 别名]
# 或者: from lasagne.regularization import regularize_network_params [as 别名]
def make_training_functions(network_layers, input_var, aug_var, target_var, stack_params, weight_decay, learning_rate):
    encode_layer, hidden_layer, smth_act_layer, network = network_layers;

    output = lasagne.layers.get_output(network, deterministic = True);
    pred_train = lasagne.layers.get_output(network, deterministic = False);
    loss = lasagne.objectives.squared_error(pred_train, target_var).mean() + \
           weight_decay * regularization.regularize_network_params(
                   layer = network, penalty = regularization.l2, tags={'regularizable' : True});

    params = layers.get_all_params(network, trainable = True);
    updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate = learning_rate, momentum = 0.95);
    stack_updates = lasagne.updates.nesterov_momentum(loss, stack_params, learning_rate = learning_rate, momentum = 0.95);

    encode = lasagne.layers.get_output(encode_layer, deterministic = True);
    hidden = lasagne.layers.get_output(hidden_layer, deterministic = True);
    smth_act = lasagne.layers.get_output(smth_act_layer, deterministic = True);

    val_fn = theano.function([input_var, aug_var, target_var], [loss, encode, hidden, smth_act, output]);
    train_fn = theano.function([input_var, aug_var, target_var], loss, updates = updates);
    stack_train_fn = theano.function([input_var, aug_var, target_var], loss, updates = stack_updates);

    return val_fn, train_fn, stack_train_fn; 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:24,代码来源:conv_sup_regression_hseg_4ch_modelsel.py

示例3: make_training_functions

# 需要导入模块: from lasagne import regularization [as 别名]
# 或者: from lasagne.regularization import regularize_network_params [as 别名]
def make_training_functions(network_layers, input_var, aug_var, target_var, stack_params, weight_decay):
    encode_layer, hidden_layer, smth_act_layer, network = network_layers;

    output = lasagne.layers.get_output(network, deterministic = True);
    loss = lasagne.objectives.squared_error(output, target_var).mean() + \
           weight_decay * regularization.regularize_network_params(
                   layer = network, penalty = regularization.l2, tags={'regularizable' : True});

    params = layers.get_all_params(network, trainable = True);
    updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate = 0.00002, momentum = 0.95);
    stack_updates = lasagne.updates.nesterov_momentum(loss, stack_params, learning_rate = 0.00001, momentum = 0.95);

    encode = lasagne.layers.get_output(encode_layer, deterministic = True);
    hidden = lasagne.layers.get_output(hidden_layer, deterministic = True);
    smth_act = lasagne.layers.get_output(smth_act_layer, deterministic = True);

    val_fn = theano.function([input_var, aug_var, target_var], [loss, encode, hidden, smth_act, output]);
    train_fn = theano.function([input_var, aug_var, target_var], loss, updates = updates);
    stack_train_fn = theano.function([input_var, aug_var, target_var], loss, updates = stack_updates);

    return val_fn, train_fn, stack_train_fn; 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:23,代码来源:conv_sup_regression_4ch.py

示例4: make_training_functions

# 需要导入模块: from lasagne import regularization [as 别名]
# 或者: from lasagne.regularization import regularize_network_params [as 别名]
def make_training_functions(network_layers, input_var, aug_var, target_var, stack_params, weight_decay):
    encode_layer, hidden_layer, ago_layer, network = network_layers;

    output = lasagne.layers.get_output(network, deterministic = True);
    loss = lasagne.objectives.squared_error(output, target_var).mean() + \
           weight_decay * regularization.regularize_network_params(
                   layer = network, penalty = regularization.l2, tags={'regularizable' : True});

    params = layers.get_all_params(network, trainable = True);
    updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate = 0.00002, momentum = 0.95);
    stack_updates = lasagne.updates.nesterov_momentum(loss, stack_params, learning_rate = 0.00001, momentum = 0.95);

    encode = lasagne.layers.get_output(encode_layer, deterministic = True);
    hidden = lasagne.layers.get_output(hidden_layer, deterministic = True);
    smth_act = lasagne.layers.get_output(ago_layer, deterministic = True);

    val_fn = theano.function([input_var, aug_var, target_var], [loss, encode, hidden, smth_act, output]);
    train_fn = theano.function([input_var, aug_var, target_var], loss, updates = updates);
    stack_train_fn = theano.function([input_var, aug_var, target_var], loss, updates = stack_updates);

    return val_fn, train_fn, stack_train_fn; 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:23,代码来源:conv_sup_regression_4ch_ago.py

示例5: make_training_functions

# 需要导入模块: from lasagne import regularization [as 别名]
# 或者: from lasagne.regularization import regularize_network_params [as 别名]
def make_training_functions(network_layers, input_var, aug_var, target_var, stack_params, weight_decay):
    encode_layer, hidden_layer, network = network_layers;

    output = lasagne.layers.get_output(network, deterministic = True);
    loss = lasagne.objectives.squared_error(output, target_var).mean() + \
           weight_decay * regularization.regularize_network_params(
                   layer = network, penalty = regularization.l2, tags={'regularizable' : True});

    params = layers.get_all_params(network, trainable = True);
    updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate = 0.00002, momentum = 0.95);
    stack_updates = lasagne.updates.nesterov_momentum(loss, stack_params, learning_rate = 0.00001, momentum = 0.95);

    encode = lasagne.layers.get_output(encode_layer, deterministic = True);
    hidden = lasagne.layers.get_output(hidden_layer, deterministic = True);

    val_fn = theano.function([input_var, aug_var, target_var], [loss, encode, hidden, output]);
    train_fn = theano.function([input_var, aug_var, target_var], loss, updates = updates);
    stack_train_fn = theano.function([input_var, aug_var, target_var], loss, updates = stack_updates);

    return val_fn, train_fn, stack_train_fn; 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:22,代码来源:conv_sup_regression_baseline.py

示例6: make_training_functions

# 需要导入模块: from lasagne import regularization [as 别名]
# 或者: from lasagne.regularization import regularize_network_params [as 别名]
def make_training_functions(network_layers, input_var, aug_var, target_var, stack_params, weight_decay):
    encode_layer, hidden_layer, he_layer, network = network_layers;

    output = lasagne.layers.get_output(network, deterministic = True);
    loss = lasagne.objectives.squared_error(output, target_var).mean() + \
           weight_decay * regularization.regularize_network_params(
                   layer = network, penalty = regularization.l2, tags={'regularizable' : True});

    params = layers.get_all_params(network, trainable = True);
    updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate = 0.00002, momentum = 0.95);
    stack_updates = lasagne.updates.nesterov_momentum(loss, stack_params, learning_rate = 0.00001, momentum = 0.95);

    encode = lasagne.layers.get_output(encode_layer, deterministic = True);
    hidden = lasagne.layers.get_output(hidden_layer, deterministic = True);
    smth_act = lasagne.layers.get_output(he_layer, deterministic = True);

    val_fn = theano.function([input_var, aug_var, target_var], [loss, encode, hidden, smth_act, output]);
    train_fn = theano.function([input_var, aug_var, target_var], loss, updates = updates);
    stack_train_fn = theano.function([input_var, aug_var, target_var], loss, updates = stack_updates);

    return val_fn, train_fn, stack_train_fn; 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:23,代码来源:conv_sup_regression_4ch_he.py

示例7: make_training_functions

# 需要导入模块: from lasagne import regularization [as 别名]
# 或者: from lasagne.regularization import regularize_network_params [as 别名]
def make_training_functions(network_layers, input_var, aug_var, target_var, stack_params, weight_decay):
    encode_layer, hidden_layer, he_layer, network = network_layers;

    output = lasagne.layers.get_output(network, deterministic = True);
    pred_train = lasagne.layers.get_output(network, deterministic = False);
    loss = lasagne.objectives.squared_error(pred_train, target_var).mean() + \
           weight_decay * regularization.regularize_network_params(
                   layer = network, penalty = regularization.l2, tags={'regularizable' : True});

    params = layers.get_all_params(network, trainable = True);
    updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate = 0.00002, momentum = 0.95);
    stack_updates = lasagne.updates.nesterov_momentum(loss, stack_params, learning_rate = 0.00001, momentum = 0.95);

    encode = lasagne.layers.get_output(encode_layer, deterministic = True);
    hidden = lasagne.layers.get_output(hidden_layer, deterministic = True);
    smth_act = lasagne.layers.get_output(he_layer, deterministic = True);

    val_fn = theano.function([input_var, aug_var, target_var], [loss, encode, hidden, smth_act, output]);
    train_fn = theano.function([input_var, aug_var, target_var], loss, updates = updates);
    stack_train_fn = theano.function([input_var, aug_var, target_var], loss, updates = stack_updates);

    return val_fn, train_fn, stack_train_fn; 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:24,代码来源:conv_sup_regression_hseg_4ch_he.py

示例8: make_training_functions

# 需要导入模块: from lasagne import regularization [as 别名]
# 或者: from lasagne.regularization import regularize_network_params [as 别名]
def make_training_functions(network_layers, input_var, aug_var, target_var, stack_params, weight_decay):
    encode_layer, hidden_layer, network = network_layers;

    output = lasagne.layers.get_output(network, deterministic = True);
    pred_train = lasagne.layers.get_output(network, deterministic = False);
    loss = lasagne.objectives.squared_error(pred_train, target_var).mean() + \
           weight_decay * regularization.regularize_network_params(
                   layer = network, penalty = regularization.l2, tags={'regularizable' : True});

    params = layers.get_all_params(network, trainable = True);
    updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate = 0.00002, momentum = 0.95);
    stack_updates = lasagne.updates.nesterov_momentum(loss, stack_params, learning_rate = 0.00001, momentum = 0.95);

    encode = lasagne.layers.get_output(encode_layer, deterministic = True);
    hidden = lasagne.layers.get_output(hidden_layer, deterministic = True);

    val_fn = theano.function([input_var, aug_var, target_var], [loss, encode, hidden, output]);
    train_fn = theano.function([input_var, aug_var, target_var], loss, updates = updates);
    stack_train_fn = theano.function([input_var, aug_var, target_var], loss, updates = stack_updates);

    return val_fn, train_fn, stack_train_fn; 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:23,代码来源:conv_sup_regression_hseg_4ch_leaky.py

示例9: make_training_functions

# 需要导入模块: from lasagne import regularization [as 别名]
# 或者: from lasagne.regularization import regularize_network_params [as 别名]
def make_training_functions(network_layers, input_var, target_var, stack_params, weight_decay):
    encode_layer, hidden_layer, smth_act_layer, network = network_layers;

    output = lasagne.layers.get_output(network, deterministic = True);
    loss = lasagne.objectives.squared_error(output, target_var).mean() + \
           weight_decay * regularization.regularize_network_params(
                   layer = network, penalty = regularization.l2, tags={'regularizable' : True});

    params = layers.get_all_params(network, trainable = True);
    updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate = 0.00001, momentum = 0.95);
    stack_updates = lasagne.updates.nesterov_momentum(loss, stack_params, learning_rate = 0.00001, momentum = 0.95);

    encode = lasagne.layers.get_output(encode_layer, deterministic = True);
    hidden = lasagne.layers.get_output(hidden_layer, deterministic = True);
    smth_act = lasagne.layers.get_output(smth_act_layer, deterministic = True);

    val_fn = theano.function([input_var, target_var], [loss, encode, hidden, smth_act, output]);
    train_fn = theano.function([input_var, target_var], loss, updates = updates);
    stack_train_fn = theano.function([input_var, target_var], loss, updates = stack_updates);

    return val_fn, train_fn, stack_train_fn; 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:23,代码来源:conv_sup_large_regression_syn.py

示例10: make_training_functions

# 需要导入模块: from lasagne import regularization [as 别名]
# 或者: from lasagne.regularization import regularize_network_params [as 别名]
def make_training_functions(network, encode_layer, input_var, aug_var, target_var, stack_params, weight_decay):
    output = lasagne.layers.get_output(network, deterministic = True);
    loss = lasagne.objectives.squared_error(output, target_var).mean() + \
           weight_decay * regularization.regularize_network_params(
                   layer = network, penalty = regularization.l2, tags={'regularizable' : True});

    params = layers.get_all_params(network, trainable = True);
    updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate = 0.0001, momentum = 0.95);
    stack_updates = lasagne.updates.nesterov_momentum(loss, stack_params, learning_rate = 0.0001, momentum = 0.95);

    encode = lasagne.layers.get_output(encode_layer, deterministic = True);

    val_fn = theano.function([input_var, aug_var, target_var], [loss, encode, output]);
    train_fn = theano.function([input_var, aug_var, target_var], loss, updates = updates);
    stack_train_fn = theano.function([input_var, aug_var, target_var], loss, updates = stack_updates);

    return val_fn, train_fn, stack_train_fn; 
开发者ID:SBU-BMI,项目名称:u24_lymphocyte,代码行数:19,代码来源:conv_sup_regression.py


注:本文中的lasagne.regularization.regularize_network_params方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。