本文整理汇总了Python中pybrain.supervised.trainers.BackpropTrainer.testOnData方法的典型用法代码示例。如果您正苦于以下问题:Python BackpropTrainer.testOnData方法的具体用法?Python BackpropTrainer.testOnData怎么用?Python BackpropTrainer.testOnData使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pybrain.supervised.trainers.BackpropTrainer
的用法示例。
在下文中一共展示了BackpropTrainer.testOnData方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: main
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def main():
images, labels = load_labeled_training(flatten=True)
images = standardize(images)
# images, labels = load_pca_proj(K=100)
shuffle_in_unison(images, labels)
ds = ClassificationDataSet(images.shape[1], 1, nb_classes=7)
for i, l in zip(images, labels):
ds.addSample(i, [l - 1])
# ds._convertToOneOfMany()
test, train = ds.splitWithProportion(0.2)
test._convertToOneOfMany()
train._convertToOneOfMany()
net = shortcuts.buildNetwork(train.indim, 1000, train.outdim, outclass=SoftmaxLayer)
trainer = BackpropTrainer(net, dataset=train, momentum=0.1, learningrate=0.01, weightdecay=0.05)
# trainer = RPropMinusTrainer(net, dataset=train)
# cv = validation.CrossValidator(trainer, ds)
# print cv.validate()
net.randomize()
tr_labels_2 = net.activateOnDataset(train).argmax(axis=1)
trnres = percentError(tr_labels_2, train["class"])
# trnres = percentError(trainer.testOnClassData(dataset=train), train['class'])
testres = percentError(trainer.testOnClassData(dataset=test), test["class"])
print "Training error: %.10f, Test error: %.10f" % (trnres, testres)
print "Iters: %d" % trainer.totalepochs
for i in range(100):
trainer.trainEpochs(10)
trnres = percentError(trainer.testOnClassData(dataset=train), train["class"])
testres = percentError(trainer.testOnClassData(dataset=test), test["class"])
trnmse = trainer.testOnData(dataset=train)
testmse = trainer.testOnData(dataset=test)
print "Iteration: %d, Training error: %.5f, Test error: %.5f" % (trainer.totalepochs, trnres, testres)
print "Training MSE: %.5f, Test MSE: %.5f" % (trnmse, testmse)
示例2: anntrain
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def anntrain(xdata,ydata):#,epochs):
#print len(xdata[0])
ds=SupervisedDataSet(len(xdata[0]),1)
#ds=ClassificationDataSet(len(xdata[0]),1, nb_classes=2)
for i,algo in enumerate (xdata):
ds.addSample(algo,ydata[i])
#ds._convertToOneOfMany( ) esto no
net= FeedForwardNetwork()
inp=LinearLayer(len(xdata[0]))
h1=SigmoidLayer(1)
outp=LinearLayer(1)
net.addOutputModule(outp)
net.addInputModule(inp)
net.addModule(h1)
#net=buildNetwork(len(xdata[0]),1,1,hiddenclass=TanhLayer,outclass=SoftmaxLayer)
net.addConnection(FullConnection(inp, h1))
net.addConnection(FullConnection(h1, outp))
net.sortModules()
trainer=BackpropTrainer(net,ds)#, verbose=True)#dataset=ds,verbose=True)
#trainer.trainEpochs(40)
trainer.trainOnDataset(ds,40)
#trainer.trainUntilConvergence(ds, 20, verbose=True, validationProportion=0.15)
trainer.testOnData()#verbose=True)
#print 'Final weights:',net.params
return net
示例3: handle
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def handle(self, *args, **options):
better_thans = BetterThan.objects.all() #.filter(pk__lte=50)
ds = SupervisedDataSet(204960, 1)
for better_than in better_thans:
bt = imread(better_than.better_than.image.file)
wt = imread(better_than.worse_than.image.file)
better_than.better_than.image.file.close()
better_than.worse_than.image.file.close()
bt = filters.sobel(bt)
wt = filters.sobel(wt)
bt_input_array = np.reshape(bt, (bt.shape[0] * bt.shape[1]))
wt_input_array = np.reshape(wt, (wt.shape[0] * wt.shape[1]))
input_1 = np.append(bt_input_array, wt_input_array)
input_2 = np.append(wt_input_array, bt_input_array)
ds.addSample(np.append(bt_input_array, wt_input_array), [-1])
ds.addSample(np.append(wt_input_array, bt_input_array), [1])
net = buildNetwork(204960, 2, 1)
train_ds, test_ds = ds.splitWithProportion(options['train_test_split'])
_, test_ds = ds.splitWithProportion(options['test_split'])
trainer = BackpropTrainer(net, ds)
avgerr = trainer.testOnData(dataset=test_ds)
print 'untrained avgerr: {0}'.format(avgerr)
trainer.train()
avgerr = trainer.testOnData(dataset=test_ds)
print 'trained avgerr: {0}'.format(avgerr)
示例4: neuralnetworktrain
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def neuralnetworktrain(self):
dataset = self.getdata()
# Constructing a multiple output neural network.
# Other neural network architectures will also be experimented,
# like using different single output neural networks.
net = FeedForwardNetwork()
inp = LinearLayer(9)
h1 = SigmoidLayer(20)
h2 = TanhLayer(10)
outp = LinearLayer(3)
# Adding the modules to the architecture
net.addOutputModule(outp)
net.addInputModule(inp)
net.addModule(h1)
net.addModule(h2)
# Creating the connections
net.addConnection(FullConnection(inp, h1))
net.addConnection(FullConnection(h1, h2))
net.addConnection(FullConnection(h2, outp))
net.sortModules()
# Training the neural network using Backpropagation
t = BackpropTrainer(net, learningrate=0.01, momentum=0.5, verbose=True)
t.trainOnDataset(dataset, 5)
t.testOnData(verbose=False)
# Saving the trained neural network information to file
self.writetrainedinfo(net)
示例5: main
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def main():
trainingSet = buildDataSet("days", -1) #build data set.
net = buildNetwork(5,3,1,bias=True,hiddenclass=TanhLayer)
trainer = BackpropTrainer(net,trainingSet,verbose=True)
testSet = buildDataSet("hours", -6) #build another set for testing/validating
#In my testing 4000 epochs has been enough to almost reach the lowest error without taking all day.
#You could use trainUntilConvergence() but that takes all night and does only minimally better
trainer.trainEpochs(4000)
#net.activateOnDataset(testSet)
trainer.testOnData(testSet, verbose = True) # test on the data set.
示例6: train_callback
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def train_callback():
trainer = BackpropTrainer(net, learningrate=0.001, lrdecay=1, momentum=0.0, verbose=True)
print 'MSE before', trainer.testOnData(ds, verbose=True)
epoch_count = 0
while epoch_count < 1000:
epoch_count += 10
trainer.trainUntilConvergence(dataset=ds, maxEpochs=10)
networkwriter.NetworkWriter.writeToFile(net,'autosave.network')
print 'MSE after', trainer.testOnData(ds, verbose=True)
print ("\n")
print 'Total epochs:', trainer.totalepochs
示例7: estimateNot
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def estimateNot():
ds_not = SupervisedDataSet(1, 1)
ds_not.addSample( (0,) , (1,))
ds_not.addSample( (1,) , (0,))
net = buildNetwork(1, 100, 1, bias=True)
trainer = BackpropTrainer(net, learningrate = 0.01, momentum = 0.99)
trainer.trainOnDataset(ds_not, 3000)
trainer.testOnData()
print '\nthe prediction for NOT value:'
print 'NOT 0 = ', net.activate((0,))
print 'NOT 1 = ', net.activate((1,))
示例8: NNet
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
class NNet(object):
def __init__(self):
self.net = buildNetwork(2, 4, 2, bias=True)
self.net.randomize()
print self.net
self.ds = SupervisedDataSet(2,2)
self.trainer = BackpropTrainer(self.net, self.ds, learningrate = 0.1, momentum=0.99)
def addTrainDS(self, data1, data2, max):
for x in [1,2]:
norm1 = self.normalize(data1,max)
norm2 = self.normalize(data2,max)
for x in range(len(norm1)):
self.ds.addSample(norm1[x], norm2[x])
def train(self):
print "Training"
# print self.trainer.train()
trndata, tstdata = self.ds.splitWithProportion(.25)
self.trainer.trainUntilConvergence(verbose=True,
trainingData=trndata,
validationData=tstdata,
validationProportion=.3,
maxEpochs=500)
# self.trainer.trainOnDataset(trndata,500)
self.trainer.testOnData(tstdata, verbose= True)
def activate(self, data):
for x in data:
self.net.activate(x)
def normalize(self, data, max):
normData = np.zeros((len(data), 2))
for x in [0,1]:
for y in range(len(data)):
val = data[y][x]
normData[y][x] = (val)/(max[x])
# print normData
return normData
def denormalize(self, data, max):
deNorm = np.zeros((len(data), 2))
for x in [0,1]:
for y in range(len(data)):
val = data[y][x]
deNorm[y][x] = val*max[x]
return deNorm
def getOutput(self, mat, max):
norm = self.normalize(mat, max)
out = []
for val in norm:
out.append(self.net.activate(val))
return self.denormalize(out, max)
示例9: buildAndTrain
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def buildAndTrain(ds):
net = buildNetwork(2, 4, 1, bias=True)
# try:
# f = open('_learned', 'r')
# net = pickle.load(f)
# f.close()
# except:
trainer = BackpropTrainer(net, learningrate = 0.01, momentum = 0.99)
trainer.trainOnDataset(ds, 1000)
trainer.testOnData()
return net
示例10: RunNet
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def RunNet(net, dataset, train_epochs):
"a function to build a neural net and test on it, for testing purposes right now"
#print net.activate([2, 1])
#ds = SupervisedDataSet(15, 1)
#ds.addSample((1,1,1,1,1,1,1,1,1,1,1,1,1,1,1), (100))
#ds.addSample((0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), (0))
#trainer = BackpropTrainer(net, learningrate = 0.01, momentum = 0.99, verbose = True)
trainer = BackpropTrainer(net, learningrate = 0.01, momentum = 0.5, verbose = True)
trainer.trainOnDataset(dataset, train_epochs)
trainer.testOnData(verbose = True)
示例11: xtrain
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def xtrain(self):
dataset = self.getdata()
# Constructing a two hidden layes Neural Network
net = buildNetwork(9, 15, 5, 1, recurrent=True)
# Training using Back Propagation
trainer = BackpropTrainer(net, learningrate=0.01, momentum=0.75,
weightdecay=0.02, verbose=True)
trainer.trainOnDataset(dataset, 10)
trainer.testOnData(verbose=False)
# Saving the trained neural network information to file
self.writetrainedinfo(net)
示例12: estimateAnd
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def estimateAnd():
ds_and = SupervisedDataSet(2, 1)
ds_and.addSample( (0,0) , (0,))
ds_and.addSample( (0,1) , (0,))
ds_and.addSample( (1,0) , (0,))
ds_and.addSample( (1,1) , (1,))
net = buildNetwork(2, 4, 1, bias=True)
trainer = BackpropTrainer(net, learningrate = 0.01, momentum = 0.99)
trainer.trainOnDataset(ds_and, 3000)
trainer.testOnData()
print '\nthe prediction for AND value:'
print '1 AND 1 = ', net.activate((1,1))
print '1 AND 0 = ', net.activate((1,0))
print '0 AND 1 = ', net.activate((0,1))
print '0 AND 0 = ', net.activate((0,0))
示例13: estimateNor
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def estimateNor():
ds_nor = SupervisedDataSet(2, 1)
ds_nor.addSample( (0,0) , (1,))
ds_nor.addSample( (0,1) , (0,))
ds_nor.addSample( (1,0) , (0,))
ds_nor.addSample( (1,1) , (0,))
net = buildNetwork(2, 100, 1, bias=True)
trainer = BackpropTrainer(net, learningrate = 0.01, momentum = 0.99)
trainer.trainOnDataset(ds_nor, 3000)
trainer.testOnData()
print '\nthe prediction for NOR value:'
print '1 NOR 1 = ', net.activate((1,1))
print '1 NOR 0 = ', net.activate((1,0))
print '0 NOR 1 = ', net.activate((0,1))
print '0 NOR 0 = ', net.activate((0,0))
示例14: computeModel
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def computeModel(self, path, user):
# Create a supervised dataset for training.
trndata = SupervisedDataSet(24, 1)
tstdata = SupervisedDataSet(24, 1)
#Fill the dataset.
for number in range(0,10):
for variation in range(0,7):
# Pass all the features as inputs.
trndata.addSample(self.getSample(user, number, variation),(user.key,))
for variation in range(7,10):
# Pass all the features as inputs.
tstdata.addSample(self.getSample(user, number, variation),(user.key,))
# Build the LSTM.
n = buildNetwork(24, 50, 1, hiddenclass=LSTMLayer, recurrent=True, bias=True)
# define a training method
trainer = BackpropTrainer(n, dataset = trndata, momentum=0.99, learningrate=0.00002)
# carry out the training
trainer.trainOnDataset(trndata, 2000)
valueA = trainer.testOnData(tstdata)
print '\tMSE -> {0:.2f}'.format(valueA)
self.saveModel(n, '.\NeuralNets\SavedNet_%d' %(user.key))
return n
示例15: _run_training
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import testOnData [as 别名]
def _run_training(net, data_set):
logger.info("Running training...")
data_set_training, data_set_test = data_set.splitWithProportion(0.9)
rate = LEARNING_RATE
trainer = BackpropTrainer(net, data_set_training, learningrate=rate)
for epoch in xrange(NUM_EPOCHS):
logger.info("Calculating EPOCH %d", epoch)
logger.info("Result on training set %f", trainer.train())
if epoch % 4 == 0:
logger.info("Result on test set %f", trainer.testOnData(data_set_test, verbose=True))
if epoch == 0 or epoch % 10 == 9:
rate /= 10
trainer = BackpropTrainer(net, data_set_training, learningrate=rate)