本文整理汇总了Python中neuralnilm.Net类的典型用法代码示例。如果您正苦于以下问题:Python Net类的具体用法?Python Net怎么用?Python Net使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Net类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: exp_a
def exp_a(name):
# ReLU hidden layers
# linear output
# output one appliance
# 0% skip prob for first appliance
# 100% skip prob for other appliances
# input is diff
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config']= [
{
'type': BidirectionalRecurrentLayer,
'num_units': 50,
'W_in_to_hid': Normal(std=1),
'W_hid_to_hid': Identity(scale=0.9),
'nonlinearity': rectify,
'learn_init': False,
'precompute_input': True
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': None,
'W': Normal(std=1/sqrt(50))
}
]
net = Net(**net_dict_copy)
net.load_params(5000)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:35,代码来源:e417.py
示例2: exp_c
def exp_c(name):
global source
MAX_TARGET_POWER = 200
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
logger=logging.getLogger(name),
appliances=[
'HTPC',
'dish washer',
['fridge freezer', 'fridge', 'freezer'],
['washer dryer', 'washing machine'],
'kettle'
],
max_appliance_powers=[MAX_TARGET_POWER, 2500, 300, 2400, 2600],
on_power_thresholds=[5] * 5,
min_on_durations=[60, 1800, 60, 1800, 30],
min_off_durations=[12, 1800, 12, 600, 1],
seq_length=2048
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
plotter=StartEndMeanPlotter(
n_seq_to_plot=32, max_target_power=MAX_TARGET_POWER),
learning_rate_changes_by_iteration={
150000: 1e-4,
275000: 1e-5
}
))
net = Net(**net_dict_copy)
net.load_params(146758)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:34,代码来源:e545.py
示例3: exp_a
def exp_a(name):
# 5 appliances
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television',
'dish washer',
['washer dryer', 'washing machine']
],
skip_probability=0.7
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config'].extend([
{
'type': MixtureDensityLayer,
'num_units': source.n_outputs,
'num_components': 2
}
])
net = Net(**net_dict_copy)
net.load_params(iteration=4000)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:30,代码来源:e291.py
示例4: exp_a
def exp_a(name):
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=multi_source,
plotter=StartEndMeanPlotter(
n_seq_to_plot=32, max_target_power=MAX_TARGET_POWER)
))
net = Net(**net_dict_copy)
net.load_params(350000)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:11,代码来源:e550.py
示例5: exp_c
def exp_c(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy['random_window'] = 256
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
learning_rate=1e-5
))
N = 512 * 8
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': DenseLayer,
'num_units': N * 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 2,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 4,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': output_shape[1] * output_shape[2],
'nonlinearity': sigmoid
}
]
net = Net(**net_dict_copy)
net.load_params(30000)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:43,代码来源:e359.py
示例6: exp_a
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config'].extend([
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
])
net = Net(**net_dict_copy)
net.load_params(1000)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:19,代码来源:e377.py
示例7: exp_a
def exp_a(name):
# 5 appliances
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config'].extend([
{
'type': MixtureDensityLayer,
'num_units': source.n_outputs,
'num_components': 2
}
])
net = Net(**net_dict_copy)
net.load_params(397)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:20,代码来源:e292.py
示例8: exp_a
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
N = 50
net_dict_copy['layers_config'] = [
{
'type': BidirectionalRecurrentLayer,
'num_units': N,
'gradient_steps': GRADIENT_STEPS,
'W_in_to_hid': Normal(std=1.),
'nonlinearity': tanh
},
{
'type': FeaturePoolLayer,
'ds': 4, # number of feature maps to be pooled together
'axis': 1, # pool over the time axis
'pool_function': T.max
},
{
'type': BidirectionalRecurrentLayer,
'num_units': N,
'gradient_steps': GRADIENT_STEPS,
'W_in_to_hid': Normal(std=1/sqrt(N)),
'nonlinearity': tanh
},
{
'type': MixtureDensityLayer,
'num_units': source.n_outputs,
'num_components': 2
}
]
net = Net(**net_dict_copy)
net.load_params(iteration=5000)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:40,代码来源:e280.py
示例9: exp_a
def exp_a(name):
global source
# source_dict_copy = deepcopy(source_dict)
# source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
N = 512
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': DenseLayer,
'num_units': N,
'W': Normal(std=1/sqrt(N)),
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 2,
'W': Normal(std=1/sqrt(N)),
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': N // 4,
'W': Normal(std=1/sqrt(N // 2)),
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': output_shape[1] * output_shape[2],
'W': Normal(std=1/sqrt(N // 4)),
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
net.load_params(25000)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:40,代码来源:e346.py
示例10: exp_a
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(logger=logging.getLogger(name)))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(experiment_name=name, source=source))
net_dict_copy["layers_config"] = [
{"type": DimshuffleLayer, "pattern": (0, 2, 1)}, # (batch, features, time)
{"type": PadLayer, "width": 4},
{
"type": Conv1DLayer, # convolve over the time axis
"num_filters": 16,
"filter_size": 4,
"stride": 1,
"nonlinearity": None,
"border_mode": "valid",
},
{
"type": Conv1DLayer, # convolve over the time axis
"num_filters": 16,
"filter_size": 4,
"stride": 1,
"nonlinearity": None,
"border_mode": "valid",
},
{"type": DimshuffleLayer, "pattern": (0, 2, 1), "label": "dimshuffle3"}, # back to (batch, time, features)
{"type": DenseLayer, "num_units": 512 * 16, "nonlinearity": rectify, "label": "dense0"},
{"type": DenseLayer, "num_units": 512 * 8, "nonlinearity": rectify, "label": "dense1"},
{"type": DenseLayer, "num_units": 512 * 4, "nonlinearity": rectify, "label": "dense2"},
{"type": DenseLayer, "num_units": 512, "nonlinearity": rectify},
{"type": DenseLayer, "num_units": 3, "nonlinearity": None},
]
net = Net(**net_dict_copy)
net.load_params(300000)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:36,代码来源:disag_545c.py
示例11: get_net
def get_net(appliance, architecture):
"""
Parameters
----------
appliance : string
architecture : {'rnn', 'ae', 'rectangles'}
"""
NET_DICTS = {
'rnn': net_dict_rnn,
'ae': net_dict_ae,
'rectangles': net_dict_rectangles
}
net_dict_func = NET_DICTS[architecture]
source = get_source(
appliance,
logger,
target_is_start_and_end_and_mean=(architecture == 'rectangles'),
is_rnn=(architecture == 'rnn'),
window_per_building={ # just load a tiny bit of data. Won't be used.
1: ("2013-04-12", "2013-05-12"),
2: ("2013-05-22", "2013-06-22"),
3: ("2013-02-27", "2013-03-27"),
4: ("2013-03-09", "2013-04-09"),
5: ("2014-06-29", "2014-07-29")
},
source_type='real_appliance_source',
filename=UKDALE_FILENAME
)
seq_length = source.seq_length
net_dict = net_dict_func(seq_length)
if appliance == 'dish washer' and architecture == 'rectangles':
epochs = 200000
net_dict.pop('epochs')
else:
epochs = net_dict.pop('epochs')
net_dict_copy = deepcopy(net_dict)
experiment_name = EXPERIMENT + "_" + appliance + "_" + architecture
net_dict_copy.update(dict(
source=source,
logger=logger,
experiment_name=experiment_name
))
net = Net(**net_dict_copy)
net.plotter.max_target_power = source.max_appliance_powers.values()[0]
net.load_params(iteration=epochs,
path=join(NET_BASE_PATH, experiment_name))
net.print_net()
net.compile()
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:49,代码来源:disag_567.py
示例12: exp_a
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
logger=logging.getLogger(name)
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config'] = [
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'type': PadLayer,
'width': 4
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': 16,
'filter_size': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'valid'
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': 16,
'filter_size': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'valid'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1), # back to (batch, time, features)
'label': 'dimshuffle3'
},
{
'type': DenseLayer,
'num_units': 512 * 16,
'nonlinearity': rectify,
'label': 'dense0'
},
{
'type': DenseLayer,
'num_units': 512 * 8,
'nonlinearity': rectify,
'label': 'dense1'
},
{
'type': DenseLayer,
'num_units': 512 * 4,
'nonlinearity': rectify,
'label': 'dense2'
},
{
'type': DenseLayer,
'num_units': 512,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': 3,
'nonlinearity': None
}
]
net = Net(**net_dict_copy)
net.load_params(300000)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:74,代码来源:disag_545b.py
示例13: exp_a
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source = SameLocation(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
NUM_FILTERS = 4
net_dict_copy['layers_config'] = [
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'label': 'conv0',
'type': Conv1DLayer, # convolve over the time axis
'num_filters': NUM_FILTERS,
'filter_length': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'valid'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # back to (batch, time, features)
},
{
'label': 'dense0',
'type': DenseLayer,
'num_units': (SEQ_LENGTH - 3) * NUM_FILTERS,
'nonlinearity': rectify
},
{
'label': 'dense1',
'type': DenseLayer,
'num_units': SEQ_LENGTH - 3,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': (SEQ_LENGTH - 3) * NUM_FILTERS,
'nonlinearity': rectify
},
{
'type': ReshapeLayer,
'shape': (N_SEQ_PER_BATCH, SEQ_LENGTH - 3, NUM_FILTERS)
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'type': DeConv1DLayer,
'num_output_channels': 1,
'filter_length': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'full'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # back to (batch, time, features)
}
]
net = Net(**net_dict_copy)
net.load_params(15000)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:69,代码来源:e454.py
示例14: RealApplianceSource
from __future__ import print_function, division
from neuralnilm import Net, RealApplianceSource
from lasagne.nonlinearities import sigmoid
source = RealApplianceSource(
'/data/dk3810/ukdale.h5',
['fridge freezer', 'hair straighteners', 'television'],
max_input_power=1000, max_output_power=300,
window=("2013-06-01", "2014-06-01")
)
net = Net(
source=source,
n_cells_per_hidden_layer=[50,50,50],
output_nonlinearity=sigmoid,
learning_rate=1e-1,
n_dense_cells_per_layer=50
)
net.fit(n_iterations=1600)
net.plot_costs()
net.plot_estimates()
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:22,代码来源:e37.py
示例15: Net
net = Net(
experiment_name="e49b",
source=source,
learning_rate=1e-1,
save_plot_interval=50,
loss_function=crossentropy,
layers_config=[
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': sigmoid,
'W': Uniform(25),
'b': Uniform(25)
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': sigmoid,
'W': Uniform(10),
'b': Uniform(10)
},
{
'type': LSTMLayer,
'num_units': 20,
'W_in_to_cell': Uniform(5)
},
{
'type': ReshapeLayer,
'shape': (5, 20, 1000)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 5,
'stride': 5
},
{
'type': ReshapeLayer,
'shape': (5, 200, 20)
},
{
'type': LSTMLayer,
'num_units': 40,
'W_in_to_cell': Uniform(5)
},
{
'type': ReshapeLayer,
'shape': (5, 40, 200)
},
{
'type': Conv1DLayer,
'num_filters': 40,
'filter_length': 5,
'stride': 5
},
{
'type': ReshapeLayer,
'shape': (5, 40, 40)
},
{
'type': LSTMLayer,
'num_units': 80,
'W_in_to_cell': Uniform(5)
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': sigmoid
}
]
)
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:71,代码来源:e49.py
示例16: exp_a
def exp_a(name):
logger = logging.getLogger(name)
real_appliance_source1 = RealApplianceSource(
logger=logger,
filename=UKDALE_FILENAME,
appliances=[
TARGET_APPLIANCE,
['fridge freezer', 'fridge', 'freezer'],
'dish washer',
'kettle',
['washer dryer', 'washing machine']
],
max_appliance_powers=[MAX_TARGET_POWER, 300, 2500, 2600, 2400],
on_power_thresholds=[ON_POWER_THRESHOLD] + [10] * 4,
min_on_durations=[MIN_ON_DURATION, 60, 1800, 12, 1800],
min_off_durations=[MIN_OFF_DURATION, 12, 1800, 12, 600],
divide_input_by_max_input_power=False,
window_per_building=WINDOW_PER_BUILDING,
seq_length=SEQ_LENGTH,
output_one_appliance=True,
train_buildings=TRAIN_BUILDINGS,
validation_buildings=VALIDATION_BUILDINGS,
n_seq_per_batch=N_SEQ_PER_BATCH,
skip_probability=0.75,
skip_probability_for_first_appliance=SKIP_PROBABILITY_FOR_TARGET,
target_is_start_and_end_and_mean=True,
standardise_input=True,
input_stats=INPUT_STATS,
independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS
)
same_location_source1 = SameLocation(
logger=logger,
filename=UKDALE_FILENAME,
target_appliance=TARGET_APPLIANCE,
window_per_building=WINDOW_PER_BUILDING,
seq_length=SEQ_LENGTH,
train_buildings=TRAIN_BUILDINGS,
validation_buildings=VALIDATION_BUILDINGS,
n_seq_per_batch=N_SEQ_PER_BATCH,
skip_probability=SKIP_PROBABILITY_FOR_TARGET,
target_is_start_and_end_and_mean=True,
standardise_input=True,
offset_probability=1,
divide_target_by=MAX_TARGET_POWER,
input_stats=INPUT_STATS,
independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
on_power_threshold=ON_POWER_THRESHOLD,
min_on_duration=MIN_ON_DURATION,
min_off_duration=MIN_OFF_DURATION
)
multi_source = MultiSource(
sources=[
{
'source': real_appliance_source1,
'train_probability': 0.5,
'validation_probability': 0
},
{
'source': same_location_source1,
'train_probability': 0.5,
'validation_probability': 1
}
],
standardisation_source=same_location_source1
)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=multi_source,
plotter=StartEndMeanPlotter(
n_seq_to_plot=32,
n_training_examples_to_plot=16,
max_target_power=MAX_TARGET_POWER)
))
net = Net(**net_dict_copy)
net.load_params(11589)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:80,代码来源:e554.py
示例17: exp_b
#.........这里部分代码省略.........
standardise_input=True,
input_stats=INPUT_STATS,
independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
subsample_target=SUBSAMPLE_TARGET,
input_padding=INPUT_PADDING
)
same_location_source1 = SameLocation(
logger=logger,
filename=UKDALE_FILENAME,
target_appliance=TARGET_APPLIANCE,
window_per_building=WINDOW_PER_BUILDING,
seq_length=SEQ_LENGTH,
train_buildings=TRAIN_BUILDINGS,
validation_buildings=VALIDATION_BUILDINGS,
n_seq_per_batch=N_SEQ_PER_BATCH,
skip_probability=SKIP_PROBABILITY_FOR_TARGET,
standardise_input=True,
offset_probability=1,
divide_target_by=MAX_TARGET_POWER,
input_stats=INPUT_STATS,
independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
on_power_threshold=ON_POWER_THRESHOLD,
min_on_duration=MIN_ON_DURATION,
min_off_duration=MIN_OFF_DURATION,
include_all=True,
allow_incomplete=True,
subsample_target=SUBSAMPLE_TARGET,
input_padding=INPUT_PADDING
)
multi_source = MultiSource(
sources=[
{
'source': real_appliance_source1,
'train_probability': 0.5,
'validation_probability': 0
},
{
'source': same_location_source1,
'train_probability': 0.5,
'validation_probability': 1
}
],
standardisation_source=same_location_source1
)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
auto_reshape=True,
experiment_name=name,
source=multi_source,
plotter=Plotter(
n_seq_to_plot=32,
n_training_examples_to_plot=16
),
layers_config=[
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': 16,
'filter_size': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'same'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1), # back to (batch, time, features)
'label': 'dimshuffle3'
},
{
'type': BLSTMLayer,
'num_units': 128,
'merge_mode': 'concatenate'
},
{
'type': BLSTMLayer,
'num_units': 256,
'merge_mode': 'concatenate'
},
{
'type': DenseLayer,
'num_units': 128,
'nonlinearity': tanh
},
{
'type': DenseLayer,
'num_units': 1,
'nonlinearity': None
}
]
))
net = Net(**net_dict_copy)
net.load_params(1500)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:101,代码来源:e565.py
示例18: exp_z
def exp_z(name):
# N = 50, 5 layers (!), 2x2x subsampling
# avg valid cost = 0.4871760607
source_dict_copy = deepcopy(source_dict)
source_dict_copy['subsample_target'] = 4
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
updates=partial(nesterov_momentum, learning_rate=0.001),
epoch_callbacks={},
do_save_activations=False
))
N = 50
net_dict_copy['layers_config'] = [
{
'type': BidirectionalRecurrentLayer,
'num_units': N,
'gradient_steps': GRADIENT_STEPS,
'W_in_to_hid': Normal(std=1.),
'nonlinearity': tanh
},
{
'type': BidirectionalRecurrentLayer,
'num_units': N,
'gradient_steps': GRADIENT_STEPS,
'W_in_to_hid': Normal(std=1/sqrt(N)),
'nonlinearity': tanh
},
{
'type': FeaturePoolLayer,
'ds': 2, # number of feature maps to be pooled together
'axis': 1, # pool over the time axis
'pool_function': T.max
},
{
'type': BidirectionalRecurrentLayer,
'num_units': N,
'gradient_steps': GRADIENT_STEPS,
'W_in_to_hid': Normal(std=1/sqrt(N)),
'nonlinearity': tanh
},
{
'type': BidirectionalRecurrentLayer,
'num_units': N,
'gradient_steps': GRADIENT_STEPS,
'W_in_to_hid': Normal(std=1/sqrt(N)),
'nonlinearity': tanh
},
{
'type': FeaturePoolLayer,
'ds': 2, # number of feature maps to be pooled together
'axis': 1, # pool over the time axis
'pool_function': T.max
},
{
'type': BidirectionalRecurrentLayer,
'num_units': N,
'gradient_steps': GRADIENT_STEPS,
'W_in_to_hid': Normal(std=1/sqrt(N)),
'nonlinearity': tanh
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': None,
'W': Normal(std=(1/sqrt(N)))
}
]
net = Net(**net_dict_copy)
net.load_params('e277z.hdf5', 1500)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:73,代码来源:e277.py
示例19: RealApplianceSource
source = RealApplianceSource(
'/data/dk3810/ukdale.h5',
['fridge freezer', 'hair straighteners', 'television'],
max_input_power=1000, max_appliance_powers=[300, 500, 200],
window=("2013-06-01", "2014-07-01"),
output_one_appliance=False,
boolean_targets=False,
min_on_duration=60
# sample_period=15, seq_length=400
)
net = Net(
experiment_name="e43a",
source=source,
n_cells_per_hidden_layer=[50,50,50],
output_nonlinearity=sigmoid,
learning_rate=1e-1,
n_dense_cells_per_layer=0,
# validation_interval=2,
save_plot_interval=50,
loss_function=crossentropy
)
# [200,200,200] n_dense_cells=200 got killed before training
net.fit()
#net.plot_costs()
#net.plot_estimates()
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:29,代码来源:e43a.py
示例20: exp_a
def exp_a(name):
# tanh and softplus output
# sane inits for other layers
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
standardise_targets=True,
unit_variance_targets=True
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
loss_function=lambda x, t: mse(x, t).mean(),
learning_rate=1e-3,
learning_rate_changes_by_iteration={
1000: 1e-4,
2000: 1e-5
}
))
net_dict_copy['layers_config']= [
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Uniform(25),
'b': Uniform(25)
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50)),
'b': Normal(std=1/sqrt(50))
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 40,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50))
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 80,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
net.load_params(2000)
return net
开发者ID:mmottahedi,项目名称:neuralnilm_prototype,代码行数:78,代码来源:e406.py
注:本文中的neuralnilm.Net类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论