本文整理汇总了Python中pybrain.datasets.SequentialDataSet.addSample方法的典型用法代码示例。如果您正苦于以下问题:Python SequentialDataSet.addSample方法的具体用法?Python SequentialDataSet.addSample怎么用?Python SequentialDataSet.addSample使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pybrain.datasets.SequentialDataSet
的用法示例。
在下文中一共展示了SequentialDataSet.addSample方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: train
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def train(self, params):
n = params['encoding_num']
net = buildNetwork(n, params['num_cells'], n,
hiddenclass=LSTMLayer,
bias=True,
outputbias=params['output_bias'],
recurrent=True)
net.reset()
ds = SequentialDataSet(n, n)
trainer = RPropMinusTrainer(net, dataset=ds)
history = self.window(self.history, params)
resets = self.window(self.resets, params)
for i in xrange(1, len(history)):
if not resets[i-1]:
ds.addSample(self.encoder.encode(history[i-1]),
self.encoder.encode(history[i]))
if resets[i]:
ds.newSequence()
if len(history) > 1:
trainer.trainEpochs(params['num_epochs'])
net.reset()
for i in xrange(len(history) - 1):
symbol = history[i]
output = self.net.activate(self.encoder.encode(symbol))
predictions = self.encoder.classify(output, num=params['num_predictions'])
if resets[i]:
net.reset()
return net
示例2: visulizeDataSet
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def visulizeDataSet(network, data, seqno, in_labels, out_labels):
seq = data.getSequence(seqno)
tmpDs = SequentialDataSet(data.indim, data.outdim)
tmpDs.newSequence()
for i in xrange(data.getSequenceLength(seqno)):
tmpDs.addSample(seq[0][i], seq[1][i])
nplots = len(in_labels) + len(out_labels)
for i in range(len(in_labels)):
p = PL.subplot(nplots, 1, i + 1)
p.clear()
p.plot(tmpDs['input'][:, i])
p.set_ylabel(in_labels[i])
for i in range(len(out_labels)):
p = PL.subplot(nplots, 1, i + 1 + len(in_labels))
p.clear()
output = ModuleValidator.calculateModuleOutput(network, tmpDs)
p.plot(tmpDs['target'][:, i], label='train')
p.plot(output[:, i], label='sim')
p.legend()
p.set_ylabel(out_labels[i])
示例3: main
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def main():
generated_data = [0 for i in range(10000)]
rate, data = get_data_from_wav("../../data/natabhairavi_violin.wav")
data = data[1000:190000]
print("Got wav")
ds = SequentialDataSet(1, 1)
for sample, next_sample in zip(data, cycle(data[1:])):
ds.addSample(sample, next_sample)
net = buildNetwork(1, 5, 1,
hiddenclass=LSTMLayer, outputbias=False, recurrent=True)
trainer = RPropMinusTrainer(net, dataset=ds)
train_errors = [] # save errors for plotting later
EPOCHS_PER_CYCLE = 5
CYCLES = 10
EPOCHS = EPOCHS_PER_CYCLE * CYCLES
for i in xrange(CYCLES):
trainer.trainEpochs(EPOCHS_PER_CYCLE)
train_errors.append(trainer.testOnData())
epoch = (i+1) * EPOCHS_PER_CYCLE
print("\r epoch {}/{}".format(epoch, EPOCHS), end="")
stdout.flush()
# predict new values
old_sample = [100]
for i in xrange(500000):
new_sample = net.activate(old_sample)
old_sample = new_sample
generated_data[i] = new_sample[0]
print(new_sample)
wavfile.write("../../output/test.wav", rate, np.array(generated_data))
示例4: train
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def train(self, params):
self.net.reset()
ds = SequentialDataSet(self.nDimInput, self.nDimOutput)
trainer = RPropMinusTrainer(self.net, dataset=ds, verbose=False)
history = self.window(self.history, params)
resets = self.window(self.resets, params)
for i in xrange(params['prediction_nstep'], len(history)):
if not resets[i-1]:
ds.addSample(self.inputEncoder.encode(history[i-params['prediction_nstep']]),
self.outputEncoder.encode(history[i][0]))
if resets[i]:
ds.newSequence()
# print ds.getSample(0)
# print ds.getSample(1)
# print ds.getSample(1000)
# print " training data size", ds.getLength(), " len(history) ", len(history), " self.history ", len(self.history)
# print ds
if len(history) > 1:
trainer.trainEpochs(params['num_epochs'])
self.net.reset()
for i in xrange(len(history) - params['prediction_nstep']):
symbol = history[i]
output = self.net.activate(ds.getSample(i)[0])
if resets[i]:
self.net.reset()
示例5: train
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def train(self, params):
"""
Train LSTM network on buffered dataset history
After training, run LSTM on history[:-1] to get the state correct
:param params:
:return:
"""
if params['reset_every_training']:
n = params['encoding_num']
self.net = buildNetwork(n, params['num_cells'], n,
hiddenclass=LSTMLayer,
bias=True,
outputbias=params['output_bias'],
recurrent=True)
self.net.reset()
# prepare training dataset
ds = SequentialDataSet(params['encoding_num'], params['encoding_num'])
history = self.window(self.history, params)
resets = self.window(self.resets, params)
for i in xrange(1, len(history)):
if not resets[i - 1]:
ds.addSample(self.encoder.encode(history[i - 1]),
self.encoder.encode(history[i]))
if resets[i]:
ds.newSequence()
print "Train LSTM network on buffered dataset of length ", len(history)
if params['num_epochs'] > 1:
trainer = RPropMinusTrainer(self.net,
dataset=ds,
verbose=params['verbosity'] > 0)
if len(history) > 1:
trainer.trainEpochs(params['num_epochs'])
# run network on buffered dataset after training to get the state right
self.net.reset()
for i in xrange(len(history) - 1):
symbol = history[i]
output = self.net.activate(self.encoder.encode(symbol))
self.encoder.classify(output, num=params['num_predictions'])
if resets[i]:
self.net.reset()
else:
self.trainer.setData(ds)
self.trainer.train()
# run network on buffered dataset after training to get the state right
self.net.reset()
for i in xrange(len(history) - 1):
symbol = history[i]
output = self.net.activate(self.encoder.encode(symbol))
self.encoder.classify(output, num=params['num_predictions'])
if resets[i]:
self.net.reset()
示例6: create_train_set
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def create_train_set (consumption):
#create train/test set
global active_max
ds = SequentialDataSet(1, 1)
consumption_data = normalize (consumption)
active_max = max(consumption_data[1],active_max)
consumption = consumption_data[0]
size = len (consumption)
for i in range(0, size-1):
ds.addSample(consumption[i], consumption[i+1])
return ds
示例7: buildAppropriateDataset
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def buildAppropriateDataset(module):
""" build a sequential dataset with 2 sequences of 3 samples, with arndom input and target values,
but the appropriate dimensions to be used on the provided module. """
if module.sequential:
d = SequentialDataSet(module.indim, module.outdim)
for dummy in range(2):
d.newSequence()
for dummy in range(3):
d.addSample(randn(module.indim), randn(module.outdim))
else:
d = SupervisedDataSet(module.indim, module.outdim)
for dummy in range(3):
d.addSample(randn(module.indim), randn(module.outdim))
return d
示例8: train
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def train(d, cycles=100, epochs_per_cycle=7):
ds = SequentialDataSet(1, 1)
net = buildNetwork(1, 5, 1, hiddenclass=LSTMLayer, outputbias=False, recurrent=False)
for sample, next_sample in zip(d, cycle(d[1:])):
ds.addSample(sample, next_sample)
trainer = RPropMinusTrainer(net, dataset=ds)
train_errors = [] # save errors for plotting later
for i in xrange(cycles):
trainer.trainEpochs(epochs_per_cycle)
train_errors.append(trainer.testOnData())
stdout.flush()
return net, train_errors
示例9: create_train_set
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def create_train_set (open_price, close_price):
global open_max
global close_max
ds = SequentialDataSet(1, 1)
open_data = normalize (open_price)
close_data = normalize (close_price)
open_max = open_data[1]
close_max = close_data[1]
open_price = open_data[0]
close_price = close_data[0]
size = len (open_price)
for i in range(0, size):
ds.addSample(open_price[i], close_price[i])
return ds
示例10: getPyBrainDataSetScalarEncoder
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def getPyBrainDataSetScalarEncoder(sequence, nTrain, encoderInput, encoderOutput,
predictionStep=1, useTimeOfDay=True, useDayOfWeek=True):
"""
Use scalar encoder for the data
:param sequence:
:param nTrain:
:param predictionStep:
:param useTimeOfDay:
:param useDayOfWeek:
:return:
"""
print "generate a pybrain dataset of sequences"
print "the training data contains ", str(nTrain-predictionStep), "records"
if encoderInput is None:
inDim = 1 + int(useTimeOfDay) + int(useDayOfWeek)
else:
inDim = encoderInput.n + int(useTimeOfDay) + int(useDayOfWeek)
if encoderOutput is None:
outDim = 1
else:
outDim = encoderOutput.n
ds = SequentialDataSet(inDim, outDim)
if useTimeOfDay:
print "include time of day as input field"
if useDayOfWeek:
print "include day of week as input field"
for i in xrange(nTrain-predictionStep):
sample = getSingleSample(i, sequence, useTimeOfDay, useDayOfWeek)
if encoderOutput is None:
dataSDROutput = [sequence['normdata'][i+predictionStep]]
else:
dataSDROutput = encoderOutput.encode(sequence['data'][i+predictionStep])
ds.addSample(sample, dataSDROutput)
return ds
示例11: train
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def train(self, params, verbose=False):
if params['reset_every_training']:
if verbose:
print 'create lstm network'
random.seed(6)
if params['output_encoding'] == None:
self.net = buildNetwork(self.nDimInput, params['num_cells'], self.nDimOutput,
hiddenclass=LSTMLayer, bias=True, outputbias=True, recurrent=True)
elif params['output_encoding'] == 'likelihood':
self.net = buildNetwork(self.nDimInput, params['num_cells'], self.nDimOutput,
hiddenclass=LSTMLayer, bias=True, outclass=SigmoidLayer, recurrent=True)
self.net.reset()
ds = SequentialDataSet(self.nDimInput, self.nDimOutput)
networkInput = self.window(self.networkInput, params)
targetPrediction = self.window(self.targetPrediction, params)
# prepare a training data-set using the history
for i in xrange(len(networkInput)):
ds.addSample(self.inputEncoder.encode(networkInput[i]),
self.outputEncoder.encode(targetPrediction[i]))
if params['num_epochs'] > 1:
trainer = RPropMinusTrainer(self.net, dataset=ds, verbose=verbose)
if verbose:
print " train LSTM on ", len(ds), " records for ", params['num_epochs'], " epochs "
if len(networkInput) > 1:
trainer.trainEpochs(params['num_epochs'])
else:
self.trainer.setData(ds)
self.trainer.train()
# run through the training dataset to get the lstm network state right
self.net.reset()
for i in xrange(len(networkInput)):
self.net.activate(ds.getSample(i)[0])
示例12: trainNetwork
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def trainNetwork(dirname):
numFeatures = 5000
ds = SequentialDataSet(numFeatures, 1)
tracks = glob.glob(os.path.join(dirname, 'train??.wav'))
for t in tracks:
track = os.path.splitext(t)[0]
# load training data
print "Reading %s..." % track
data = numpy.genfromtxt(track + '_seg.csv', delimiter=",")
labels = numpy.genfromtxt(track + 'REF.txt', delimiter='\t')[0::10,1]
numData = data.shape[0]
# add the input to the dataset
print "Adding to dataset..."
ds.newSequence()
for i in range(numData):
ds.addSample(data[i], (labels[i],))
# initialize the neural network
print "Initializing neural network..."
net = buildNetwork(numFeatures, 50, 1,
hiddenclass=LSTMLayer, outputbias=False, recurrent=True)
# train the network on the dataset
print "Training neural net"
trainer = RPropMinusTrainer(net, dataset=ds)
## trainer.trainUntilConvergence(maxEpochs=50, verbose=True, validationProportion=0.1)
error = -1
for i in range(100):
new_error = trainer.train()
print "error: " + str(new_error)
if abs(error - new_error) < 0.1: break
error = new_error
# save the network
print "Saving neural network..."
NetworkWriter.writeToFile(net, os.path.basename(dirname) + 'net')
示例13: getReberDS
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def getReberDS(maxLength, display = 0):
"""
@param maxLength (int): maximum length of the sequence
"""
[in_seq, out_seq] = generateSequencesVector(maxLength)
target = out_seq
last_target = target[-1]
last_target[np.argmax(out_seq[-1])] = 1
target[-1] = last_target
ds = SequentialDataSet(7, 7)
i = 0
for sample, next_sample in zip(in_seq, target):
ds.addSample(sample, next_sample)
if display:
print(" sample: %s" % sample)
print(" target: %s" % next_sample)
print("next sample: %s" % out_seq[i])
print()
i += 1
return (ds, in_seq, out_seq)
示例14: getPyBrainDataSet
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def getPyBrainDataSet(sequence, nTrain, predictionStep=1, useTimeOfDay=True, useDayOfWeek=True):
print "generate a pybrain dataset of sequences"
print "the training data contains ", str(nTrain-predictionStep), "records"
inDim = 1 + int(useTimeOfDay) + int(useDayOfWeek)
ds = SequentialDataSet(inDim, 1)
if useTimeOfDay:
print "include time of day as input field"
if useDayOfWeek:
print "include day of week as input field"
for i in xrange(nTrain-predictionStep):
if useTimeOfDay and useDayOfWeek:
sample = np.array([sequence['data'][i], sequence['timeofday'][i], sequence['dayofweek'][i]])
elif useTimeOfDay:
sample = np.array([sequence['data'][i], sequence['timeofday'][i]])
elif useDayOfWeek:
sample = np.array([sequence['data'][i], sequence['dayofweek'][i]])
else:
sample = np.array([sequence['data'][i]])
ds.addSample(sample, sequence['data'][i+predictionStep])
return ds
示例15: say_hello_text
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import addSample [as 别名]
def say_hello_text(username = "World",text="You are good"):
object_data_new = pd.read_csv('/Users/ruiyun_zhou/Documents/cmpe-274/data/data.csv')
data_area_new = object_data_new[object_data_new.Area==username]
data_area_new_1=data_area_new[data_area_new.Disease== text]
data_list_new = data_area_new_1['Count'].values.tolist()
print data_list_new.__len__()
data_list=data_list_new
ds = SequentialDataSet(1,1)
isZero=0;
for sample,next_sample in zip(data_list,cycle(data_list[1:])):
ds.addSample(sample, next_sample)
if sample:
isZero=1
if(isZero==0):
return '[0, 0]'
net = buildNetwork(1,5,1,hiddenclass=LSTMLayer,outputbias=False,recurrent=True)
trainer = RPropMinusTrainer(net, dataset=ds)
train_errors = [] # save errors for plotting later
EPOCHS_PER_CYCLE = 5
CYCLES = 10
EPOCHS = EPOCHS_PER_CYCLE * CYCLES
for i in xrange(CYCLES):
print "Doing epoch %d" %i
trainer.trainEpochs(EPOCHS_PER_CYCLE)
train_errors.append(trainer.testOnData())
epoch = (i+1) * EPOCHS_PER_CYCLE
# return '<p>%d</p>\n' % (data_list_new.__len__())
# print("final error =", train_errors[-1])
# print "Value for last week is %4.1d" % abs(data_list[-1])
# print "Value for next week is %4.1d" % abs(net.activate(data_list[-1]))
# result = (abs(data_list[-1]))
result = (abs(net.activate(data_list[-1])))
result_1 = (abs(net.activate(result)))
return '[%d, %d]' % (result,result_1)