本文整理汇总了Python中pybrain.supervised.trainers.BackpropTrainer.trainUntilConvergence方法的典型用法代码示例。如果您正苦于以下问题:Python BackpropTrainer.trainUntilConvergence方法的具体用法?Python BackpropTrainer.trainUntilConvergence怎么用?Python BackpropTrainer.trainUntilConvergence使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pybrain.supervised.trainers.BackpropTrainer
的用法示例。
在下文中一共展示了BackpropTrainer.trainUntilConvergence方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: training_nerual_network
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def training_nerual_network(self):
dataTrain,dataTest = self.DS.splitWithProportion(0.7)
xTrain, yTrain = dataTrain['input'], dataTrain['target']
xTest, yTest = dataTest['input'], dataTest['target']
trainer = BackpropTrainer(self.fnn, dataTrain,verbose = True, learningrate=0.03, momentum=0.1)
trainer.trainUntilConvergence(maxEpochs=20)
output = self.fnn.activateOnDataset(dataTest)
count = 0
countRight = 0
error = 0
for i in range(len(output)):
posReal = yTest[i].argmax()
posPredict = output[i].argmax()
#print('o',output[i],posPredict)
#print('r',yTest[i],posReal)
error += abs(posReal-posPredict)
if posReal == posPredict:
countRight+=1
count +=1
error/=count
print('Correct rate:{:.2f} Average error:{:.2f}'.format(countRight/count,error))
示例2: train
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def train(data):
"""
See http://www.pybrain.org/docs/tutorial/fnn.html
Returns a neural network trained on the test data.
Parameters:
data - A ClassificationDataSet for training.
Should not include the test data.
"""
network = buildNetwork(
# This is where we specify the architecture of
# the network. We can play around with different
# parameters here.
# http://www.pybrain.org/docs/api/tools.html
data.indim, 5, data.outdim,
hiddenclass=SigmoidLayer,
outclass=SoftmaxLayer
)
# We can fiddle around with this guy's options as well.
# http://www.pybrain.org/docs/api/supervised/trainers.html
trainer = BackpropTrainer(network, dataset=data)
trainer.trainUntilConvergence(maxEpochs=20)
return network
示例3: neuralNetwork_eval_func
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def neuralNetwork_eval_func(self, chromosome):
node_num, learning_rate, window_size = self.decode_chromosome(chromosome)
if self.check_log(node_num, learning_rate, window_size):
return self.get_means_from_log(node_num, learning_rate, window_size)[0]
folded_dataset = self.create_folded_dataset(window_size)
indim = 21 * (2 * window_size + 1)
mean_AUC = 0
mean_decision_value = 0
mean_mcc = 0
sample_size_over_thousand_flag = False
for test_fold in xrange(self.fold):
test_labels, test_dataset, train_labels, train_dataset = folded_dataset.get_test_and_training_dataset(test_fold)
if len(test_labels) + len(train_labels) > 1000:
sample_size_over_thousand_flag = True
ds = SupervisedDataSet(indim, 1)
for i in xrange(len(train_labels)):
ds.appendLinked(train_dataset[i], [train_labels[i]])
net = buildNetwork(indim, node_num, 1, outclass=SigmoidLayer, bias=True)
trainer = BackpropTrainer(net, ds, learningrate=learning_rate)
trainer.trainUntilConvergence(maxEpochs=self.maxEpochs_for_trainer)
decision_values = [net.activate(test_dataset[i]) for i in xrange(len(test_labels))]
decision_values = map(lambda x: x[0], decision_values)
AUC, decision_value_and_max_mcc = validate_performance.calculate_AUC(decision_values, test_labels)
mean_AUC += AUC
mean_decision_value += decision_value_and_max_mcc[0]
mean_mcc += decision_value_and_max_mcc[1]
if sample_size_over_thousand_flag:
break
if not sample_size_over_thousand_flag:
mean_AUC /= self.fold
mean_decision_value /= self.fold
mean_mcc /= self.fold
self.write_log(node_num, learning_rate, window_size, mean_AUC, mean_decision_value, mean_mcc)
self.add_log(node_num, learning_rate, window_size, mean_AUC, mean_decision_value, mean_mcc)
return mean_AUC
示例4: startTrials
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def startTrials(ds, maxTrials = 2, maxExperiments = 2):
"""start and run the trials"""
hpCount = []
for i in range(0, maxExperiments):
for j in range(0, maxTrials):
enemyTestPos = runExperiments.makeTestDataset()
net = NetworkReader.readFrom("net.xml")
netResults = net.activate([val for pair in normalize(enemyTestPos) for val in pair])
netIter = iter(netResults)
allyTestPos = zip(netIter, netIter)
#undo normalization
allyTestPos = map(lambda p: (abs(p[0]*640), abs(p[1]*720)), allyTestPos)
print(allyTestPos)
runExperiments.writeTestData(allyTestPos)
runExperiments.run()
with open("exp_results_raw.txt", "r") as resultsFile:
lines = resultsFile.readlines()
if "Zerg_Zergling" in lines[1]:
x = normalize(enemyTestPos)
y = normalize(allyTestPos)
x = [val for pair in x for val in pair]
y = [val for pair in y for val in pair]
ds.addSample(x, y)
lineSplit = lines[1].split("Zerg_Zergling")[-1]
hpCount.append(lineSplit.split(" ")[2])
trainer = BackpropTrainer(net, ds)
trainer.trainUntilConvergence()
return hpCount
示例5: getModel
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def getModel(inputSize,hiddenSize1,hiddenSize2,trainData,target):
fnn = FeedForwardNetwork()
inLayer = LinearLayer(inputSize,name = 'inLayer')
hiddenLayer0 = SigmoidLayer(hiddenSize1,name='hiddenLayer0')
hiddenLayer1 = SigmoidLayer(hiddenSize2,name='hiddenLayer1')
outLayer = LinearLayer(1,name = 'outLayer')
fnn.addInputModule(inLayer)
fnn.addModule(hiddenLayer0)
fnn.addModule(hiddenLayer1)
fnn.addOutputModule(outLayer)
inToHidden0 = FullConnection(inLayer,hiddenLayer0)
hidden0ToHidden1 = FullConnection(hiddenLayer0,hiddenLayer1)
hidden1ToHiddenOutput = FullConnection(hiddenLayer1,outLayer)
fnn.addConnection(inToHidden0)
fnn.addConnection(hidden0ToHidden1)
fnn.addConnection(hidden1ToHiddenOutput)
fnn.sortModules()
Ds = SupervisedDataSet(inputSize,1)
scaler = preprocessing.StandardScaler().fit(trainData)
x = scaler.transform(trainData)
# print(len(target))
# print(len(x))
for i in range(len(target)):
Ds.addSample(x[i],[target[i]])
trainer = BackpropTrainer(fnn,Ds,learningrate=0.01,verbose=False)
trainer.trainUntilConvergence(maxEpochs=1000)
return fnn
示例6: main
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def main():
for stock in STOCK_TICKS:
# Download Data
get_data(stock)
# Import Data
days = extract_data(stock)
today = days.pop(0)
# Make DataSet
data_set = ClassificationDataSet(INPUT_NUM, 1, nb_classes=2)
for day in days:
target = 0
if day.change > 0:
target = 1
data_set.addSample(day.return_metrics(), [target])
# Make Network
network = buildNetwork(INPUT_NUM, MIDDLE_NUM, MIDDLE_NUM, OUTPUT_NUM)
# Train Network
trainer = BackpropTrainer(network)
trainer.setData(data_set)
trainer.trainUntilConvergence(maxEpochs=EPOCHS_MAX)
# Activate Network
prediction = network.activate(today.return_metrics())
print prediction
示例7: run
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def run(self, ds_train, ds_test):
"""
This function both trains the ANN and evaluates the ANN using a specified training and testing set
Args:
:param ds_train (TweetClassificationDatasetFactory): the training dataset the neural network is trained with.
:param ds_test (TweetClassificationDatasetFactory): the test dataset evaluated.
:returns: error (float): the percent error of the test dataset, tested on the neural network.
"""
ds_train._convertToOneOfMany()
ds_test._convertToOneOfMany()
trainer = BackpropTrainer(
self.network,
dataset=ds_train,
momentum=0.1,
verbose=True,
weightdecay=0.01)
trainer.trainUntilConvergence(
dataset=ds_train,
maxEpochs=self.max_epochs,
continueEpochs=self.con_epochs)
result = trainer.testOnClassData(dataset=ds_test)
error = percentError(result, ds_test['class'])
return error
示例8: encoderdecoder
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def encoderdecoder(outersize,innersize,indata,
fname):
# create network
n = FeedForwardNetwork()
inLayer = LinearLayer(outersize)
hiddenLayer = SigmoidLayer(innersize)
outLayer = LinearLayer(outersize)
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)
n.sortModules()
# create dataset
ds = SupervisedDataSet(outersize,outersize)
for x,y in indata,indata:
ds.addSample(x,y)
# train network
trainer = BackpropTrainer(n,ds)
trainer.trainUntilConvergence()
n.saveNetwork(fname)
return [[in_to_hidden,hidden_to_out],
[inLayer,hiddenLayer,outLayer],
n]
示例9: fit
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def fit(self, X, y):
"""
Train the regressor model.
:param X: pandas.DataFrame of shape [n_samples, n_features]
:param y: values - array-like of shape [n_samples]
:return: self
"""
dataset = self._prepare_net_and_dataset(X, y, 'regression')
trainer = BackpropTrainer(self.net,
dataset,
learningrate=self.learningrate,
lrdecay=self.lrdecay,
momentum=self.momentum,
verbose=self.verbose,
batchlearning=self.batchlearning,
weightdecay=self.weightdecay)
if self.epochs < 0:
trainer.trainUntilConvergence(maxEpochs=self.max_epochs,
continueEpochs=self.continue_epochs,
verbose=self.verbose,
validationProportion=self.validation_proportion)
else:
for i in range(self.epochs):
trainer.train()
self.__fitted = True
return self
示例10: makeNet
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def makeNet(learning_rate):
ds = SupervisedDataSet(20, 20)
with open('data/misspellingssmall.csv', 'rbU') as f:
reader = csv.reader(f)
for row in reader:
ds.addSample(convert(row[0]),convert(row[1]))
#testds, trainds = ds.splitWithProportion(0.2)
net = buildNetwork(20, 20, 20)
#trainer = BackpropTrainer(net, dataset=trainds, learningrate=learning_rate)
trainer = BackpropTrainer(net, dataset=ds, learningrate=learning_rate)
#trainer.train()
#trainer.trainEpochs(5)
trainer.trainUntilConvergence()
score = 0
for x, y in testds:
predict = unconvert(net.activate(x))
score += damerau_levenshtein_distance(predict,unconvert(y))
global lastNet
lastNet = net
global netNum
netNum += 1
print "Network " + str(netNum) + " done with score " + str(score)
return score
示例11: main
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def main():
#read in pre-processed features
print('reading preprocessed data')
bag = read_bag_of_word('features')
#read in sentimental dictionary
print('reading dictionary')
[word_vector, sentiments] = read_dictionary("positive.txt", "negative.txt")
features,target,features_dict=create_feature_matrix(bag, sentiments)
# Sort dates in order
dates=dow_jones_labels.keys()
dates = [datetime.datetime.strptime(ts, "%Y-%m-%d") for ts in dates]
dates.sort()
dates = [datetime.datetime.strftime(ts, "%Y-%m-%d") for ts in dates]
ds = SupervisedDataSet(4, 1)
ds.setField('input', features)
target=np.array(target).reshape( -1, 1 )
ds.setField('target', target)
net = buildNetwork(4, 40, 1, bias=True)
trainer = BackpropTrainer(net, ds)
trainer.trainUntilConvergence(verbose=True, validationProportion=0.15, maxEpochs=10000, continueEpochs=10)
count=0
for i in range(0,len(target)):
print("predict={0},actual={1}".format(net.activate(features[i]),target[i]))
if net.activate(features[i])*target[i]>0:
count+=1
print("accuracy={0}".format(float(count) / len(dow_jones_labels)))
示例12: xtest_simple
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def xtest_simple(self):
# Create a network with 2 input nodes, 3 hidden nodes, 1 output node
net = buildNetwork(2, 3, 1)
# Create 2 input, 1 output dataset
ds = SupervisedDataSet(2, 1)
ds.addSample((0, 0), (0,))
ds.addSample((0, 1), (1,))
ds.addSample((1, 0), (1,))
ds.addSample((1, 1), (0,))
ds.addSample((0, 0), (0,))
ds.addSample((0, 1), (1,))
ds.addSample((1, 0), (1,))
ds.addSample((1, 1), (0,))
ds.addSample((1, 1), (0,))
ds.addSample((1, 1), (0,))
ds.addSample((1, 1), (0,))
ds.addSample((1, 1), (0,))
ds.addSample((1, 1), (0,))
ds.addSample((1, 1), (0,))
#print ds['input']
#print ds['target']
#self.assertEqual(len(ds), 8)
#self.show_activation(net, ds)
trainer = BackpropTrainer(net, ds, learningrate=1.0)
trainer.trainUntilConvergence(verbose=True, validationProportion=0.125)
self.show_activation(net, ds)
示例13: dynamic_data_network
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def dynamic_data_network(slept, study):
train_set = SupervisedDataSet(2, 1)
for r1 in range(0, 15):
slept = random.randint(0,10)
study = random.randint(0,10)
score = random.randint(60,100)
train_set.addSample((slept, study), score)
#print train_set['input']
print 'build net'
net = buildNetwork(2, 3, 1, bias=True)
print 'initial guess'
print net.activate((slept,study))
print 'trainer'
trainer = BackpropTrainer(net, train_set)
print 'training'
trainer.trainUntilConvergence()
print 'final guess'
print net.activate((slept,study))
while True:
slept = int(raw_input('hours slept: '))
if slept < 0:
break
study = raw_input('hours studied: ')
print 'new guess :'
print net.activate((slept,study))
示例14: train
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def train(self):
print "Enter the number of times to train, -1 means train until convergence:"
t = int(raw_input())
print "Training the Neural Net"
print "self.net.indim = "+str(self.net.indim)
print "self.train_data.indim = "+str(self.train_data.indim)
trainer = BackpropTrainer(self.net, dataset=self.train_data, momentum=0.1, verbose=True, weightdecay=0.01)
if t == -1:
trainer.trainUntilConvergence()
else:
for i in range(t):
trainer.trainEpochs(1)
trnresult = percentError( trainer.testOnClassData(), self.train_data['class'])
# print self.test_data
tstresult = percentError( trainer.testOnClassData(dataset=self.test_data), self.test_data['class'] )
print "epoch: %4d" % trainer.totalepochs, \
" train error: %5.2f%%" % trnresult, \
" test error: %5.2f%%" % tstresult
if i % 10 == 0 and i > 1:
print "Saving Progress... Writing to a file"
NetworkWriter.writeToFile(self.net, self.path)
print "Done training... Writing to a file"
NetworkWriter.writeToFile(self.net, self.path)
return trainer
示例15: entrenarSomnolencia
# 需要导入模块: from pybrain.supervised.trainers import BackpropTrainer [as 别名]
# 或者: from pybrain.supervised.trainers.BackpropTrainer import trainUntilConvergence [as 别名]
def entrenarSomnolencia(red):
#Se inicializa el dataset
ds = SupervisedDataSet(4096,1)
"""Se crea el dataset, para ello procesamos cada una de las imagenes obteniendo los rostros,
luego se le asignan los valores deseados del resultado la red neuronal."""
print "Somnolencia - cara"
for i,c in enumerate(os.listdir(os.path.dirname('/home/taberu/Imágenes/img_tesis/somnoliento/'))):
try:
im = cv2.imread('/home/taberu/Imágenes/img_tesis/somnoliento/'+c)
pim = pi.procesarImagen(im)
cara = d.deteccionFacial(pim)
if cara == None:
print "No hay cara"
else:
print i
ds.appendLinked(cara.flatten(),10)
except:
pass
trainer = BackpropTrainer(red, ds)
print "Entrenando hasta converger"
trainer.trainUntilConvergence()
NetworkWriter.writeToFile(red, 'rna_somnolencia.xml')