本文整理汇总了Python中pybrain.supervised.trainers.BackpropTrainer.verbose方法的典型用法代码示例。如果您正苦于以下问题:Python BackpropTrainer.verbose方法的具体用法?Python BackpropTrainer.verbose怎么用?Python BackpropTrainer.verbose使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pybrain.supervised.trainers.BackpropTrainer
的用法示例。
在下文中一共展示了BackpropTrainer.verbose方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: testTrainingOnSepervisedDataset
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import verbose [as 别名]
def testTrainingOnSepervisedDataset(self):
DS = SupervisedDataSet(2, 1)
DS.addSample([ 0, 0 ] , [0])
DS.addSample([ 0, 1 ] , [1])
DS.addSample([ 1, 0 ] , [1])
DS.addSample([ 1, 1 ] , [0])
network = N = buildNetwork(2, 4, 1)
trainer = BackpropTrainer(N, learningrate = 0.01, momentum = 0.99)
trainer.verbose = False
nnf = NeuralNetworkFactory(network, trainer, seed=2, iterationsNum=500)
nnClassifier = nnf.buildClassifier(DS)
self.assertAlmostEqual(nnClassifier.getPrediction([0, 0]), 0, delta=0.01)
self.assertAlmostEqual(nnClassifier.getPrediction([0, 1]), 1, delta=0.01)
self.assertAlmostEqual(nnClassifier.getPrediction([1, 0]), 1, delta=0.01)
self.assertAlmostEqual(nnClassifier.getPrediction([1, 1]), 0, delta=0.01)
示例2: ClassificationDataSet
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import verbose [as 别名]
# Imports
import numpy as np
from pybrain.datasets import ClassificationDataSet
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure.modules import SigmoidLayer
# Data and outputs
X = np.array([[-1,-1],[-1,1],[1,-1],[1,1]]).transpose()
y = np.array([0, 1, 1, 0])
data = ClassificationDataSet(2,1)
for i in range(0,X.shape[1]):
data.addSample(X[:,i],y[i])
### Add your code here!
#inLayer = LinearLayer(2)
#hiddenLayer = SigmoidLayer(4)
#outLayer = LinearLayer(1)
#build network
net = buildNetwork(2,4,1, hiddenclass=SigmoidLayer, outclass=SigmoidLayer)
#create BackpropTrainer
trainer = BackpropTrainer(net, dataset=data, learningrate=1, momentum=0.001, weightdecay=0.000001, batchlearning=True)
#train the network
trainer.trainEpochs( 3000 )
#or can be used :
for i in range(30):
trainer.trainEpochs(99)
trainer.verbose=True
trainer.trainEpochs(1)
trainer.verbose=False