本文整理汇总了Python中pybrain.datasets.SequentialDataSet.appendLinked方法的典型用法代码示例。如果您正苦于以下问题:Python SequentialDataSet.appendLinked方法的具体用法?Python SequentialDataSet.appendLinked怎么用?Python SequentialDataSet.appendLinked使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pybrain.datasets.SequentialDataSet
的用法示例。
在下文中一共展示了SequentialDataSet.appendLinked方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: create_data
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import appendLinked [as 别名]
def create_data(self, inputs, targets):
data = SequentialDataSet(inputs, targets)
for i in xrange(0, len(self.dataframe) - 1):
data.newSequence()
ins = self.dataframe.ix[i].values
target = self.dataframe.ix[i + 1].values[0]
data.appendLinked(ins, target)
self.data = data
示例2: rnnTrain
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import appendLinked [as 别名]
def rnnTrain(data):
ds = SequentialDataSet(3,3)
s = np.size(input) / 9
ds.newSequence()
for idx1 in range(s):
ds.appendLinked(data[idx1],(1,0,0))
ds.newSequence()
for idx2 in range(s):
ds.appendLinked(data[idx2+s],(0,1,0))
ds.newSequence()
for idx3 in range(s):
ds.appendLinked(data[idx3+s+s],(0,0,1))
net = buildNetwork(3, 8, 3, bias=True, recurrent=True, hiddenclass=LSTMLayer)
trainer = BackpropTrainer(net,ds)
trainer.trainEpochs(1000)
return net
示例3: SequentialDataSet
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import appendLinked [as 别名]
consecutive_days_pressure_dropping_by = 0
ds = SequentialDataSet(5, 1)
for sample, next_sample in zip(training_features, cycle(training_features[1:])):
ds.newSequence()
yesterdays_sample = sample
yesterdays_pressure = yesterdays_sample[3]
todays_pressure = next_sample[3]
raining = 0.0
if (todays_pressure > yesterdays_pressure):
consecutive_days_pressure_dropping_by += (todays_pressure - yesterdays_pressure)
else:
raining = 1.0
consecutive_days_pressure_dropping_by = 0
yesterdays_sample.append(float(consecutive_days_pressure_dropping_by))
ds.appendLinked(yesterdays_sample, raining)
net = buildNetwork(5, 10, 1,
hiddenclass=LSTMLayer, outputbias=False, recurrent=True)
#inLayer = LinearLayer(2)
#hiddenLayer = LSTMLayer(5)
#outLayer = LinearLayer(1)
#from pybrain.structure import FullConnection
#in_to_hidden = FullConnection(inLayer, hiddenLayer)
#hidden_to_out = FullConnection(hiddenLayer, outLayer)
trainer = RPropMinusTrainer(net, dataset=ds)
train_errors = [] # save errors for plotting later
EPOCHS_PER_CYCLE = 5
CYCLES = 50
示例4: epochs
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import appendLinked [as 别名]
class LanguageLearner:
__OUTPUT = "Sample at {0} epochs (prompt=\"{1}\", length={2}): {3}"
def __init__(self, trainingText, hiddenLayers, hiddenNodes):
self.__initialized = False
with open(trainingText) as f:
self.raw = f.read()
self.characters = list(self.raw)
self.rawData = list(map(ord, self.characters))
print("Creating alphabet mapping...")
self.mapping = []
for charCode in self.rawData:
if charCode not in self.mapping:
self.mapping.append(charCode)
print("Mapping of " + str(len(self.mapping)) + " created.")
print(str(self.mapping))
print("Converting data to mapping...")
self.data = []
for charCode in self.rawData:
self.data.append(self.mapping.index(charCode))
print("Done.")
self.dataIn = self.data[:-1:]
self.dataOut = self.data[1::]
self.inputs = 1
self.hiddenLayers = hiddenLayers
self.hiddenNodes = hiddenNodes
self.outputs = 1
def initialize(self, verbose):
print("Initializing language learner...")
self.verbose = verbose
# Create network and modules
self.net = RecurrentNetwork()
inp = LinearLayer(self.inputs, name="in")
hiddenModules = []
for i in range(0, self.hiddenLayers):
hiddenModules.append(LSTMLayer(self.hiddenNodes, name=("hidden-" + str(i + 1))))
outp = LinearLayer(self.outputs, name="out")
# Add modules to the network with recurrence
self.net.addOutputModule(outp)
self.net.addInputModule(inp)
for module in hiddenModules:
self.net.addModule(module)
# Create connections
self.net.addConnection(FullConnection(self.net["in"], self.net["hidden-1"]))
for i in range(0, len(hiddenModules) - 1):
self.net.addConnection(FullConnection(self.net["hidden-" + str(i + 1)], self.net["hidden-" + str(i + 2)]))
self.net.addRecurrentConnection(FullConnection(self.net["hidden-" + str(i + 1)], self.net["hidden-" + str(i + 1)]))
self.net.addRecurrentConnection(FullConnection(self.net["hidden-" + str(len(hiddenModules))],
self.net["hidden-" + str(len(hiddenModules))]))
self.net.addConnection(FullConnection(self.net["hidden-" + str(len(hiddenModules))], self.net["out"]))
self.net.sortModules()
self.trainingSet = SequentialDataSet(self.inputs, self.outputs)
for x, y in zip(self.dataIn, self.dataOut):
self.trainingSet.newSequence()
self.trainingSet.appendLinked([x], [y])
self.net.randomize()
print("Neural network initialzed with structure:")
print(self.net)
self.trainer = BackpropTrainer(self.net, self.trainingSet, verbose=verbose)
self.__initialized = True
print("Successfully initialized network.")
def train(self, epochs, frequency, prompt, length):
if not self.__initialized:
raise Exception("Attempted to train uninitialized LanguageLearner")
print ("Beginning training for " + str(epochs) + " epochs...")
if frequency >= 0:
print(LanguageLearner.__OUTPUT.format(0, prompt, length, self.sample(prompt, length)))
for i in range(1, epochs):
print("Error at " + str(i) + " epochs: " + str(self.trainer.train()))
if i % frequency == 0:
print(LanguageLearner.__OUTPUT.format(i, prompt, length, self.sample(prompt, length)))
print("Completed training.")
def sample(self, prompt, length):
self.net.reset()
if prompt == None:
prompt = chr(random.choice(self.mapping))
output = prompt
charCode = ord(prompt)
for i in range(0, length):
sampledResult = self.net.activate([charCode])
charCode = int(round(sampledResult[0]))
if charCode < 0 or charCode >= len(self.mapping):
return output + "#TERMINATED_SAMPLE(reason: learner guessed invalid character)"
output += chr(self.mapping[charCode])
return output
示例5: open
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import appendLinked [as 别名]
files = glob.glob('reel_nopickup/*.csv') #16th notes
tunes = []
for filename in files: #tune
with open(filename) as f:
notes = map(int, map(str.strip, f.readlines()))[::2] #8th ntoes
bitmasks = map(to_bitmask, notes)
tunes.append(bitmasks)
nested_tunes = map(lambda tune: [tune[x:min(len(tune) + 1, x+4)] for x in range(0, len(tune), 4)], tunes)
for tune in nested_tunes:
for (inp, target) in zip(tune, tune[1:]):
chord_ds.newSequence()
chord_ds.appendLinked(inp[0], target[0])
for tune in tunes:
for beat in tunes:
for (inp, target) in zip(beat, beat[1:]):
melody_ds.newSequence()
melody_ds.appendLinked(inp, target)
chord_network.randomize()
melody_network.randomize()
chord_trainer = BackpropTrainer(chord_network, chord_ds, learningrate=.00001, momentum=.9)
melody_trainer = BackpropTrainer(melody_network, melody_ds, learningrate=.00001, momentum=.9)
import time
print "training chords..."
print "\n".join(map(str, chord_trainer.trainUntilConvergence()))
示例6: buildNetwork
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import appendLinked [as 别名]
from pybrain.datasets import SequentialDataSet
from pybrain.structure import SigmoidLayer
from pybrain.structure import LSTMLayer
import itertools
import numpy as np
data = np.loadtxt("datain.txt").T
print data
datain = data[:-1,:]
dataout = data[1:,:]
INPUTS = 5
OUTPUTS = 5
HIDDEN = 40
net = buildNetwork(INPUTS, HIDDEN, OUTPUTS, hiddenclass=LSTMLayer, outclass=SigmoidLayer, recurrent=True)
ds = SequentialDataSet(INPUTS, OUTPUTS)
for x,y in itertools.izip(datain,dataout):
ds.newSequence()
ds.appendLinked(tuple(x), tuple(y))
net.randomize()
trainer = BackpropTrainer(net, ds)
for _ in range(1000):
print trainer.train()
示例7: print
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import appendLinked [as 别名]
#create the training set
#takes word vector -> simple enumerated vector of words from vocab
print("Creating the DataSet")
dataSet = SequentialDataSet(inputLayerSize,outputLayerSize)
dataSet.newSequence()
for sample,next in zip(blob[0:sampleSize],cycle(blob[1:sampleSize+1])):
try:
if sample == ' ': dataSet.newSequence()
#print("creating Sample of:",sample,next)
actual = [0.0 for x in range(inputLayerSize)]
actual[wordToNum[sample]] = 1.0
expected = [0.0 for x in range(inputLayerSize)]
expected[wordToNum[next]] = 1.0
dataSet.appendLinked(actual,expected)
except KeyError as e:
print("Missing: ",str(e))
#print("Something went wrong for:",sample,next)
print("Data Set Created: ",dataSet.getNumSequences())
if False: #os.path.exists(networkSaveFile):
print("Loading Neural Network")
networkSave = codecs.open(networkSaveFile,'r','utf-8')
net = pickle.load(networkSave)
networkSave.close()
else:
#create the network
print("Creating Network: ",inputLayerSize,"->",hiddenLayerSize,"->",outputLayerSize)
net = buildNetwork(inputLayerSize,hiddenLayerSize,outputLayerSize,hiddenclass=LSTMLayer,outputbias=False,recurrent=True)
示例8: evalSensor
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import appendLinked [as 别名]
def evalSensor(sensorIndex, featureSpaceIndices):
#reset data structure
allByTime = {}
f = open(fileName, 'r')
headers = f.readline().split()
for line in f:
splited = line.split()
timeStamp = int(splited[0])
allByTime[timeStamp] = {}
f.close()
allByJoint = {}
for inputIndices in featureSpaceIndices:
allByJoint[inputIndices] = {}
clfs = {}
grades = {}
for inputIndices in featureSpaceIndices:
allByTime, allByJoint = angleExtraction.prepareAnglesFromInput(fileName, inputIndices, sensorIndex, True, allByTime, allByJoint)
#normalizing allByJoint
timeSet = Set([])
for inputIndices in featureSpaceIndices:
vec = []
for timeStamp in allByTime.keys():
if(timeStamp in allByJoint[inputIndices].keys()):
timeSet.add(timeStamp)
x = allByJoint[inputIndices][timeStamp]
vec.append(x)
if(len(vec) > 0):
vec = angleExtraction.normelizeVector(vec)
i=0
for timeStamp in allByTime.keys():
if(timeStamp in allByJoint[inputIndices].keys()):
allByJoint[inputIndices][timeStamp] = vec[i]
i = i + 1
#time set to list, output dict to list
time = []
for timeStamp in timeSet:
time.append(timeStamp)
time.sort()
allOutput = []
tmpTime = []
#clean zeros, create time ordered output vector
for timeStamp in time:
out = allByTime[timeStamp]['output']
if(out != 0 and len(allByTime[timeStamp]) == featureNum + 1):
tmpTime.append(timeStamp)
allOutput.append(out)
time = tmpTime
#normalize allOutput
allOutput = normalByPercentile(allOutput)
#create a net
hiddenSize = (featureNum + 2)/2
net = buildNetwork(featureNum, hiddenSize, 1, hiddenclass=LSTMLayer, outclass=SigmoidLayer, recurrent=True, bias=True)
#build dataset
ds = SequentialDataSet(featureNum, 1)
i=0
lastTimeStamp = time[0]
for timeStamp in time:
if(len(allByTime[timeStamp]) == featureNum+1):#it is a full vector
if(timeStamp - lastTimeStamp > 100):
ds.newSequence()
sample = []
for inputIndices in featureSpaceIndices:
sample.append(allByTime[timeStamp][inputIndices])
ds.appendLinked(sample, allOutput[i])
i = i + 1
lastTimeStamp = timeStamp
#train
net.randomize()
tstdata, trndata = ds.splitWithProportion( 0.25 )
trainer = BackpropTrainer(net, trndata)
print len(ds)
min = 100
trainNum = 100
bestTrainer = None
for i in range(trainNum):
res = trainer.train()
if(res < min):
min = res
bestTrainer = trainer
net.randomize()
print min
"""
res = 100
while(res > min):
net.randomize()
res = trainer.train()
"""
trainer = bestTrainer
for i in range(trainNum):
res = trainer.train()
if(i % (trainNum/10) == 0):
print res
print 'trndata.evaluateModuleMSE ' + str(trndata.evaluateModuleMSE(net))
print 'tstdata.evaluateModuleMSE ' + str(tstdata.evaluateModuleMSE(net))
#print net.activateOnDataset(tstdata)
hits = 0.0
total = 0.0
#.........这里部分代码省略.........
示例9: train_network
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import appendLinked [as 别名]
def train_network(options_file_location,training_data_location,output_location):
training_file_handle = open(training_data_location,"r")
training_reader = csv.reader(training_file_handle)
stdout_file = output_location+'training_console_output.txt'
stderr_file = output_location+'training_console_errput.txt'
sys.stdout = open(stdout_file,"w")
sys.stderr = open(stderr_file,"w")
options_file_handle = open(options_file_location,'r')
options_dictionary = {}
for option in options_file_handle.readlines():
key,val = option.split('=')
print key
print val
options_dictionary[key] = val;
num_predictors = int(options_dictionary['num_predictors'])
num_outputs = int(options_dictionary['num_outputs'])
num_training_epochs = int(options_dictionary['num_training_epochs'])
num_hidden_neurons = int(options_dictionary['num_hidden_neurons'])
compound_prediction = int(options_dictionary['compound_prediction'])
#teacher_forced_transient = int(options_dictionary['teacher_forced_transient'])
teacher_forced_transient = 0
hidden_neuron_type_str = options_dictionary['hidden_neuron_type']
output_neuron_type_str = options_dictionary['output_neuron_type']
hidden_layer_type,output_layer_type = net_topol.get_layer_types(options_dictionary)
training_dataset = SequentialDataSet(num_predictors, num_outputs)
print 'reach'
previous_sequence_number = 1
#read data into dataset objects
print 'reading in training data...'
for row in training_reader:
#convert list of strings to list of floats
list = [float(s) for s in row]
#split input line
predictors = list[0:num_predictors]
outputs = list[num_predictors+1:num_predictors+1+num_outputs]
#convert from python list to numpy array
predictors = np.array(predictors)
outputs = np.array(outputs)
sequence_number = math.trunc(list[num_predictors])
if not sequence_number==previous_sequence_number:
# print sequence_number
# print previous_sequence_number
training_dataset.newSequence()
previous_sequence_number = sequence_number
#add to dataset
training_dataset.appendLinked(predictors, outputs)
network = shortcuts.buildNetwork(num_predictors, num_hidden_neurons, num_outputs, hiddenclass=LSTMLayer, outclass=LinearLayer)
network.sortModules();
trainer = RPropMinusTrainer(module=network, dataset=training_dataset)
for i in range(num_training_epochs):
print 'Starting training epoch: '+str(i)
trainer.trainEpochs(1)
sys.stdout.flush()
#brittle
network_file_path = output_location+'trained_network.xml'
NetworkWriter.writeToFile(network, network_file_path)
done_file_handle = open(output_location+'training_done.txt',"w")
done_file_handle.write('%s' % 'done!')
done_file_handle.close()
示例10: network_predict
# 需要导入模块: from pybrain.datasets import SequentialDataSet [as 别名]
# 或者: from pybrain.datasets.SequentialDataSet import appendLinked [as 别名]
def network_predict(options_file_location,prediction_data_location,output_location,network_location):
prediction_data_file_handle = open(prediction_data_location,"r")
prediction_data_reader = csv.reader(prediction_data_file_handle)
stdout_file = output_location+'prediction_console_output.txt'
stderr_file = output_location+'prediction_console_errput.txt'
sys.stdout = open(stdout_file,"w")
sys.stderr = open(stderr_file,"w")
prediction_results_file_location = output_location+'prediction_results.csv'
prediction_results_file_handle = open(prediction_results_file_location,"w")
options_file_handle = open(options_file_location,'r')
options_dictionary = {}
for option in options_file_handle.readlines():
key,val = option.split('=')
print key
print val
options_dictionary[key] = val;
num_predictors = int(options_dictionary['num_predictors'])
num_outputs = int(options_dictionary['num_outputs'])
num_training_epochs = int(options_dictionary['num_training_epochs'])
num_hidden_neurons = int(options_dictionary['num_hidden_neurons'])
compound_prediction = int(options_dictionary['compound_prediction'])
# teacher_forced_transient = int(options_dictionary['teacher_forced_transient'])
teacher_forced_transient = 0
prediction_dataset = SequentialDataSet(num_predictors, num_outputs)
previous_sequence_number = 1
print 'reading in prediction data...'
for row in prediction_data_reader:
#convert list of strings to list of floats
list = [float(s) for s in row]
#split input line
predictors = list[0:num_predictors]
#+1 is to skip over the sequence column
outputs = list[num_predictors+1:num_predictors+1+num_outputs]
#convert from python list to numpy array
predictors = np.array(predictors)
outputs = np.array(outputs)
sequence_number = math.trunc(list[num_predictors])
if not sequence_number==previous_sequence_number:
# print 'sequence_number '+str(sequence_number)
# print 'previous_sequence_number '+str(previous_sequence_number)
# frame_number_debug = 0;
prediction_dataset.newSequence()
previous_sequence_number = sequence_number
#add to dataset
prediction_dataset.appendLinked(predictors, outputs)
network = NetworkReader.readFrom(network_location)
if compound_prediction==0:
results, targets, mse = evalRNN.evalRNNOnSeqDataset(network,prediction_dataset)
elif compound_prediction==1:
results, targets, mse = evalRNN.compoundEvalRNNOnSeqDataset(network,prediction_dataset, teacher_forced_transient)
results_length, results_width = results.shape
np.savetxt(prediction_results_file_location,results,delimiter=" ",fmt='%9.9f')
done_file_handle = open(output_location+'predicting_done.txt',"w")
done_file_handle.write('%s' % 'done!')
done_file_handle.close()