本文整理汇总了Python中neuralnilm.Net.load_params方法的典型用法代码示例。如果您正苦于以下问题:Python Net.load_params方法的具体用法?Python Net.load_params怎么用?Python Net.load_params使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类neuralnilm.Net
的用法示例。
在下文中一共展示了Net.load_params方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: exp_a
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_a(name):
# ReLU hidden layers
# linear output
# output one appliance
# 0% skip prob for first appliance
# 100% skip prob for other appliances
# input is diff
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config']= [
{
'type': BidirectionalRecurrentLayer,
'num_units': 50,
'W_in_to_hid': Normal(std=1),
'W_hid_to_hid': Identity(scale=0.9),
'nonlinearity': rectify,
'learn_init': False,
'precompute_input': True
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': None,
'W': Normal(std=1/sqrt(50))
}
]
net = Net(**net_dict_copy)
net.load_params(5000)
return net
示例2: exp_c
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_c(name):
global source
MAX_TARGET_POWER = 200
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
logger=logging.getLogger(name),
appliances=[
'HTPC',
'dish washer',
['fridge freezer', 'fridge', 'freezer'],
['washer dryer', 'washing machine'],
'kettle'
],
max_appliance_powers=[MAX_TARGET_POWER, 2500, 300, 2400, 2600],
on_power_thresholds=[5] * 5,
min_on_durations=[60, 1800, 60, 1800, 30],
min_off_durations=[12, 1800, 12, 600, 1],
seq_length=2048
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
plotter=StartEndMeanPlotter(
n_seq_to_plot=32, max_target_power=MAX_TARGET_POWER),
learning_rate_changes_by_iteration={
150000: 1e-4,
275000: 1e-5
}
))
net = Net(**net_dict_copy)
net.load_params(146758)
return net
示例3: exp_a
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_a(name):
# 5 appliances
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television',
'dish washer',
['washer dryer', 'washing machine']
],
skip_probability=0.7
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config'].extend([
{
'type': MixtureDensityLayer,
'num_units': source.n_outputs,
'num_components': 2
}
])
net = Net(**net_dict_copy)
net.load_params(iteration=4000)
return net
示例4: exp_a
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_a(name):
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=multi_source,
plotter=StartEndMeanPlotter(
n_seq_to_plot=32, max_target_power=MAX_TARGET_POWER)
))
net = Net(**net_dict_copy)
net.load_params(350000)
return net
示例5: get_net
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def get_net(appliance, architecture):
"""
Parameters
----------
appliance : string
architecture : {'rnn', 'ae', 'rectangles'}
"""
NET_DICTS = {
'rnn': net_dict_rnn,
'ae': net_dict_ae,
'rectangles': net_dict_rectangles
}
net_dict_func = NET_DICTS[architecture]
source = get_source(
appliance,
logger,
target_is_start_and_end_and_mean=(architecture == 'rectangles'),
is_rnn=(architecture == 'rnn'),
window_per_building={ # just load a tiny bit of data. Won't be used.
1: ("2013-04-12", "2013-05-12"),
2: ("2013-05-22", "2013-06-22"),
3: ("2013-02-27", "2013-03-27"),
4: ("2013-03-09", "2013-04-09"),
5: ("2014-06-29", "2014-07-29")
},
source_type='real_appliance_source',
filename=UKDALE_FILENAME
)
seq_length = source.seq_length
net_dict = net_dict_func(seq_length)
if appliance == 'dish washer' and architecture == 'rectangles':
epochs = 200000
net_dict.pop('epochs')
else:
epochs = net_dict.pop('epochs')
net_dict_copy = deepcopy(net_dict)
experiment_name = EXPERIMENT + "_" + appliance + "_" + architecture
net_dict_copy.update(dict(
source=source,
logger=logger,
experiment_name=experiment_name
))
net = Net(**net_dict_copy)
net.plotter.max_target_power = source.max_appliance_powers.values()[0]
net.load_params(iteration=epochs,
path=join(NET_BASE_PATH, experiment_name))
net.print_net()
net.compile()
return net
示例6: exp_c
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_c(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy['random_window'] = 256
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
learning_rate=1e-5
))
N = 512 * 8
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': DenseLayer,
'num_units': N * 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 4,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': output_shape[1] * output_shape[2],
'nonlinearity': sigmoid
}
]
net = Net(**net_dict_copy)
net.load_params(30000)
return net
示例7: exp_a
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config'].extend([
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
])
net = Net(**net_dict_copy)
net.load_params(1000)
return net
示例8: exp_a
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_a(name):
# 5 appliances
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config'].extend([
{
'type': MixtureDensityLayer,
'num_units': source.n_outputs,
'num_components': 2
}
])
net = Net(**net_dict_copy)
net.load_params(397)
return net
示例9: exp_a
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
N = 50
net_dict_copy['layers_config'] = [
{
'type': BidirectionalRecurrentLayer,
'num_units': N,
'gradient_steps': GRADIENT_STEPS,
'W_in_to_hid': Normal(std=1.),
'nonlinearity': tanh
},
{
'type': FeaturePoolLayer,
'ds': 4, # number of feature maps to be pooled together
'axis': 1, # pool over the time axis
'pool_function': T.max
},
{
'type': BidirectionalRecurrentLayer,
'num_units': N,
'gradient_steps': GRADIENT_STEPS,
'W_in_to_hid': Normal(std=1/sqrt(N)),
'nonlinearity': tanh
},
{
'type': MixtureDensityLayer,
'num_units': source.n_outputs,
'num_components': 2
}
]
net = Net(**net_dict_copy)
net.load_params(iteration=5000)
return net
示例10: exp_a
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_a(name):
global source
# source_dict_copy = deepcopy(source_dict)
# source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
N = 512
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': DenseLayer,
'num_units': N,
'W': Normal(std=1/sqrt(N)),
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 2,
'W': Normal(std=1/sqrt(N)),
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 4,
'W': Normal(std=1/sqrt(N // 2)),
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': output_shape[1] * output_shape[2],
'W': Normal(std=1/sqrt(N // 4)),
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
net.load_params(25000)
return net
示例11: exp_a
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(logger=logging.getLogger(name)))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(experiment_name=name, source=source))
net_dict_copy["layers_config"] = [
{"type": DimshuffleLayer, "pattern": (0, 2, 1)}, # (batch, features, time)
{"type": PadLayer, "width": 4},
{
"type": Conv1DLayer, # convolve over the time axis
"num_filters": 16,
"filter_size": 4,
"stride": 1,
"nonlinearity": None,
"border_mode": "valid",
},
{
"type": Conv1DLayer, # convolve over the time axis
"num_filters": 16,
"filter_size": 4,
"stride": 1,
"nonlinearity": None,
"border_mode": "valid",
},
{"type": DimshuffleLayer, "pattern": (0, 2, 1), "label": "dimshuffle3"}, # back to (batch, time, features)
{"type": DenseLayer, "num_units": 512 * 16, "nonlinearity": rectify, "label": "dense0"},
{"type": DenseLayer, "num_units": 512 * 8, "nonlinearity": rectify, "label": "dense1"},
{"type": DenseLayer, "num_units": 512 * 4, "nonlinearity": rectify, "label": "dense2"},
{"type": DenseLayer, "num_units": 512, "nonlinearity": rectify},
{"type": DenseLayer, "num_units": 3, "nonlinearity": None},
]
net = Net(**net_dict_copy)
net.load_params(300000)
return net
示例12: exp_a
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_a(name):
logger = logging.getLogger(name)
real_appliance_source1 = RealApplianceSource(
logger=logger,
filename=UKDALE_FILENAME,
appliances=[
TARGET_APPLIANCE,
['fridge freezer', 'fridge', 'freezer'],
'dish washer',
'kettle',
['washer dryer', 'washing machine']
],
max_appliance_powers=[MAX_TARGET_POWER, 300, 2500, 2600, 2400],
on_power_thresholds=[ON_POWER_THRESHOLD] + [10] * 4,
min_on_durations=[MIN_ON_DURATION, 60, 1800, 12, 1800],
min_off_durations=[MIN_OFF_DURATION, 12, 1800, 12, 600],
divide_input_by_max_input_power=False,
window_per_building=WINDOW_PER_BUILDING,
seq_length=SEQ_LENGTH,
output_one_appliance=True,
train_buildings=TRAIN_BUILDINGS,
validation_buildings=VALIDATION_BUILDINGS,
n_seq_per_batch=N_SEQ_PER_BATCH,
skip_probability=0.75,
skip_probability_for_first_appliance=SKIP_PROBABILITY_FOR_TARGET,
target_is_start_and_end_and_mean=True,
standardise_input=True,
input_stats=INPUT_STATS,
independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS
)
same_location_source1 = SameLocation(
logger=logger,
filename=UKDALE_FILENAME,
target_appliance=TARGET_APPLIANCE,
window_per_building=WINDOW_PER_BUILDING,
seq_length=SEQ_LENGTH,
train_buildings=TRAIN_BUILDINGS,
validation_buildings=VALIDATION_BUILDINGS,
n_seq_per_batch=N_SEQ_PER_BATCH,
skip_probability=SKIP_PROBABILITY_FOR_TARGET,
target_is_start_and_end_and_mean=True,
standardise_input=True,
offset_probability=1,
divide_target_by=MAX_TARGET_POWER,
input_stats=INPUT_STATS,
independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
on_power_threshold=ON_POWER_THRESHOLD,
min_on_duration=MIN_ON_DURATION,
min_off_duration=MIN_OFF_DURATION
)
multi_source = MultiSource(
sources=[
{
'source': real_appliance_source1,
'train_probability': 0.5,
'validation_probability': 0
},
{
'source': same_location_source1,
'train_probability': 0.5,
'validation_probability': 1
}
],
standardisation_source=same_location_source1
)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=multi_source,
plotter=StartEndMeanPlotter(
n_seq_to_plot=32,
n_training_examples_to_plot=16,
max_target_power=MAX_TARGET_POWER)
))
net = Net(**net_dict_copy)
net.load_params(11589)
return net
示例13: exp_b
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
#.........这里部分代码省略.........
standardise_input=True,
input_stats=INPUT_STATS,
independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
subsample_target=SUBSAMPLE_TARGET,
input_padding=INPUT_PADDING
)
same_location_source1 = SameLocation(
logger=logger,
filename=UKDALE_FILENAME,
target_appliance=TARGET_APPLIANCE,
window_per_building=WINDOW_PER_BUILDING,
seq_length=SEQ_LENGTH,
train_buildings=TRAIN_BUILDINGS,
validation_buildings=VALIDATION_BUILDINGS,
n_seq_per_batch=N_SEQ_PER_BATCH,
skip_probability=SKIP_PROBABILITY_FOR_TARGET,
standardise_input=True,
offset_probability=1,
divide_target_by=MAX_TARGET_POWER,
input_stats=INPUT_STATS,
independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
on_power_threshold=ON_POWER_THRESHOLD,
min_on_duration=MIN_ON_DURATION,
min_off_duration=MIN_OFF_DURATION,
include_all=True,
allow_incomplete=True,
subsample_target=SUBSAMPLE_TARGET,
input_padding=INPUT_PADDING
)
multi_source = MultiSource(
sources=[
{
'source': real_appliance_source1,
'train_probability': 0.5,
'validation_probability': 0
},
{
'source': same_location_source1,
'train_probability': 0.5,
'validation_probability': 1
}
],
standardisation_source=same_location_source1
)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
auto_reshape=True,
experiment_name=name,
source=multi_source,
plotter=Plotter(
n_seq_to_plot=32,
n_training_examples_to_plot=16
),
layers_config=[
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': 16,
'filter_size': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'same'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1), # back to (batch, time, features)
'label': 'dimshuffle3'
},
{
'type': BLSTMLayer,
'num_units': 128,
'merge_mode': 'concatenate'
},
{
'type': BLSTMLayer,
'num_units': 256,
'merge_mode': 'concatenate'
},
{
'type': DenseLayer,
'num_units': 128,
'nonlinearity': tanh
},
{
'type': DenseLayer,
'num_units': 1,
'nonlinearity': None
}
]
))
net = Net(**net_dict_copy)
net.load_params(1500)
return net
示例14: exp_a
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
logger=logging.getLogger(name)
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config'] = [
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'type': PadLayer,
'width': 4
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': 16,
'filter_size': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'valid'
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': 16,
'filter_size': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'valid'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1), # back to (batch, time, features)
'label': 'dimshuffle3'
},
{
'type': DenseLayer,
'num_units': 512 * 16,
'nonlinearity': rectify,
'label': 'dense0'
},
{
'type': DenseLayer,
'num_units': 512 * 8,
'nonlinearity': rectify,
'label': 'dense1'
},
{
'type': DenseLayer,
'num_units': 512 * 4,
'nonlinearity': rectify,
'label': 'dense2'
},
{
'type': DenseLayer,
'num_units': 512,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': 3,
'nonlinearity': None
}
]
net = Net(**net_dict_copy)
net.load_params(300000)
return net
示例15: exp_a
# 需要导入模块: from neuralnilm import Net [as 别名]
# 或者: from neuralnilm.Net import load_params [as 别名]
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source = SameLocation(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
NUM_FILTERS = 4
net_dict_copy['layers_config'] = [
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'label': 'conv0',
'type': Conv1DLayer, # convolve over the time axis
'num_filters': NUM_FILTERS,
'filter_length': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'valid'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # back to (batch, time, features)
},
{
'label': 'dense0',
'type': DenseLayer,
'num_units': (SEQ_LENGTH - 3) * NUM_FILTERS,
'nonlinearity': rectify
},
{
'label': 'dense1',
'type': DenseLayer,
'num_units': SEQ_LENGTH - 3,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': (SEQ_LENGTH - 3) * NUM_FILTERS,
'nonlinearity': rectify
},
{
'type': ReshapeLayer,
'shape': (N_SEQ_PER_BATCH, SEQ_LENGTH - 3, NUM_FILTERS)
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'type': DeConv1DLayer,
'num_output_channels': 1,
'filter_length': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'full'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # back to (batch, time, features)
}
]
net = Net(**net_dict_copy)
net.load_params(15000)
return net