本文整理汇总了Python中pybrain.datasets.ClassificationDataSet.splitWithProportion方法的典型用法代码示例。如果您正苦于以下问题:Python ClassificationDataSet.splitWithProportion方法的具体用法?Python ClassificationDataSet.splitWithProportion怎么用?Python ClassificationDataSet.splitWithProportion使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pybrain.datasets.ClassificationDataSet
的用法示例。
在下文中一共展示了ClassificationDataSet.splitWithProportion方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
class NeuralNetLearner:
def __init__(self):
self.bunch = load_digits()
self.X = np.asarray(self.bunch.data, 'float32')
self.Y = np.asarray(self.bunch.target, 'float32')
#self.X, self.Y = nudge_dataset(self.X, self.bunch.target)
self.X = (self.X - np.min(self.X, 0)) / (np.max(self.X, 0) + 0.0001) # 0-1 scaling
self.ds = ClassificationDataSet(64, nb_classes=10, class_labels=self.bunch.target_names)
for (x, y) in zip(self.X, self.Y):
self.ds.addSample(x, y)
self.test_data, self.train_data = self.ds.splitWithProportion(0.3)
self.network = buildNetwork(64, 10, 1)
def get_datasets(self):
return self.train_data, self.test_data
def activate(self, x):
self.network.activate(x.tolist())
def fitness_func(self, x):
if not (x.size == 64):
print("Bad input vector: ", x)
return
sum_of_squared_error = 0
for (input, target) in self.ds:
sum_of_squared_error += (target - self.activate(input.tolist()))
return (sum_of_squared_error / self.ds.length)
def get_weights(self):
return
示例2: NeuralNetwork
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
class NeuralNetwork(BaseWorkflow):
def __init__(self, purpose='train', num_inputs=None, num_ouputs=None, classes=None, class_lables=None):
super(NeuralNetwork, self).__init__()
self.purpose = purpose
self.data_path = self.config.neural_net.get(self.purpose, None)
self.file_name = 'neural_net'
self.all_data = ClassificationDataSet(num_inputs,
num_ouputs,
nb_classes=classes,
class_labels=class_lables)
self.train = None
self.test = None
self.neural_network = None
self.train_result = None
self.test_result = None
self.cross_validation_result = None
def process(self):
self.prepare_train_test()
self.build_network()
trainer = self.train_network(dataset=self.train)
self.score_train_test(trainer=trainer)
self.cross_validate(dataset=self.all_data)
def add_sample(self, correlogram_matrix=None, target=None, sample_path=None):
self.all_data.addSample(correlogram_matrix, target)
logger.info('sample added from {sample_path}'.format(sample_path=sample_path))
def prepare_train_test(self):
self.test, self.train = self.all_data.splitWithProportion(0.25)
def build_network(self):
self.neural_network = buildNetwork(self.train.indim, 7, self.train.outdim, outclass=SoftmaxLayer) # feed forward network
def train_network(self, dataset=None):
starter_trainer = BackpropTrainer(self.neural_network, dataset=dataset, momentum=0.1, verbose=True, weightdecay=0.01)
starter_trainer.trainUntilConvergence(validationProportion=0.25, maxEpochs=100)
return starter_trainer
def score_train_test(self, trainer=None):
self.test_result = percentError(trainer.testOnClassData(dataset=self.test), self.test['class'])
logger.info('test error result: {result}'.format(result=self.test_result))
self.train_result = percentError(trainer.testOnClassData(dataset=self.train), self.train['class'] )
logger.info('train error result: {result}'.format(result=self.train_result))
def cross_validate(self, dataset=None):
trainer = BackpropTrainer(self.neural_network, dataset=dataset, momentum=0.1, verbose=True, weightdecay=0.01)
validator = CrossValidator(trainer=trainer, dataset=dataset, n_folds=10)
mean_validation_result = validator.validate()
self.cross_validation_result = mean_validation_result
logger.info('cross val result: {result}'.format(result=self.cross_validation_result))
@staticmethod
def save_network_to_xml(net=None, file_name=None):
NetworkWriter.writeToFile(net, file_name)
@staticmethod
def read_network_from_xml(file_name=None):
return NetworkReader.readFrom(file_name)
示例3: build_sample_nn
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def build_sample_nn():
means = [(-1,0),(2,4),(3,1)]
cov = [diag([1,1]), diag([0.5,1.2]), diag([1.5,0.7])]
alldata = ClassificationDataSet(2, 1, nb_classes=3)
for n in xrange(400):
for klass in range(3):
input = multivariate_normal(means[klass],cov[klass])
alldata.addSample(input, [klass])
tstdata_temp, trndata_temp = alldata.splitWithProportion(0.25)
tstdata = ClassificationDataSet(2, 1, nb_classes=3)
for n in xrange(0, tstdata_temp.getLength()):
tstdata.addSample( tstdata_temp.getSample(n)[0], tstdata_temp.getSample(n)[1] )
trndata = ClassificationDataSet(2, 1, nb_classes=3)
for n in xrange(0, trndata_temp.getLength()):
trndata.addSample( trndata_temp.getSample(n)[0], trndata_temp.getSample(n)[1] )
trndata._convertToOneOfMany( )
tstdata._convertToOneOfMany( )
fnn = buildNetwork( trndata.indim, 5, trndata.outdim, outclass=SoftmaxLayer )
trainer = BackpropTrainer( fnn, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01)
return trainer, fnn, tstdata
示例4: prepare_datasets
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def prepare_datasets(inp,out,dataframe, ratio):
'''conversion from pandas dataframe to ClassificationDataSet of numpy
parameters:
inp: list of names of input features
out: list of names of output features(target value)
ratio: ratio of dimension of test to train dataset
'''
inp_dim = len(inp)
out_dim = len(out)
no_classes = 2
alldata = ClassificationDataSet(inp_dim,out_dim,no_classes)
inp = dataframe[inp]
out = dataframe[out]
#for [a,b,c],d in zip(inp.values,out.values):
for i in range(len(inp.values)):
d = out.values[i]
if d=='up': d = 0
elif d == 'down': d = 1
else: d =2
alldata.addSample(inp.values[i],d)
tstdata_temp, trndata_temp = alldata.splitWithProportion( ratio )
# to convert supervised datasets to classification datasets
tstdata = trndata = ClassificationDataSet(inp_dim, out_dim, no_classes)
for n in range(0, tstdata_temp.getLength()):
tstdata.addSample( tstdata_temp.getSample(n)[0], tstdata_temp.getSample(n)[1] )
for n in range(0, trndata_temp.getLength()):
trndata.addSample( trndata_temp.getSample(n)[0], trndata_temp.getSample(n)[1])
trndata._convertToOneOfMany()
tstdata._convertToOneOfMany()
return alldata, trndata, tstdata
示例5: main
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def main():
images, labels = load_labeled_training(flatten=True)
images = standardize(images)
# images, labels = load_pca_proj(K=100)
shuffle_in_unison(images, labels)
ds = ClassificationDataSet(images.shape[1], 1, nb_classes=7)
for i, l in zip(images, labels):
ds.addSample(i, [l - 1])
# ds._convertToOneOfMany()
test, train = ds.splitWithProportion(0.2)
test._convertToOneOfMany()
train._convertToOneOfMany()
net = shortcuts.buildNetwork(train.indim, 1000, train.outdim, outclass=SoftmaxLayer)
trainer = BackpropTrainer(net, dataset=train, momentum=0.1, learningrate=0.01, weightdecay=0.05)
# trainer = RPropMinusTrainer(net, dataset=train)
# cv = validation.CrossValidator(trainer, ds)
# print cv.validate()
net.randomize()
tr_labels_2 = net.activateOnDataset(train).argmax(axis=1)
trnres = percentError(tr_labels_2, train["class"])
# trnres = percentError(trainer.testOnClassData(dataset=train), train['class'])
testres = percentError(trainer.testOnClassData(dataset=test), test["class"])
print "Training error: %.10f, Test error: %.10f" % (trnres, testres)
print "Iters: %d" % trainer.totalepochs
for i in range(100):
trainer.trainEpochs(10)
trnres = percentError(trainer.testOnClassData(dataset=train), train["class"])
testres = percentError(trainer.testOnClassData(dataset=test), test["class"])
trnmse = trainer.testOnData(dataset=train)
testmse = trainer.testOnData(dataset=test)
print "Iteration: %d, Training error: %.5f, Test error: %.5f" % (trainer.totalepochs, trnres, testres)
print "Training MSE: %.5f, Test MSE: %.5f" % (trnmse, testmse)
示例6: trainModel
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def trainModel(self):
self.finalDataSet = np.c_[self.flattenNumericalData, self.flattenCategoryData, self.flattenTargetDataConverted]
self.finalHeaderSet = self.flattenNumericalHeader + self.flattenCategoryHeader + self.flattenTargetHeader
self.nattributes = self.flattenNumericalData.shape[1] + self.flattenCategoryData.shape[1]
ds = ClassificationDataSet(self.nattributes, 1, nb_classes=self.nbClasses)
for rowData in self.finalDataSet:
target = rowData[-1]
variables = rowData[0:-1]
ds.addSample(variables, target)
self.testDataSet, self.trainDataSet = ds.splitWithProportion(0.25)
self.testDataSet._convertToOneOfMany()
self.trainDataSet._convertToOneOfMany()
print self.testDataSet
print self.trainDataSet
self.net = buildNetwork(self.nattributes, self.nhiddenNerons, self.noutput, hiddenclass=TanhLayer, outclass=SigmoidLayer, bias=True)
self.trainer = BackpropTrainer(self.net, self.trainDataSet, learningrate=0.001, momentum=0.99)
begin0 = time.time()
# self.trainer.trainUntilConvergence(verbose=True, dataset=ds, validationProportion=0.25, maxEpochs=10)
for i in xrange(10):
begin = time.time()
self.trainer.trainEpochs(10)
end = time.time()
print 'iteration ', i, ' takes ', end-begin, 'seconds'
end0 = time.time()
print 'total time consumed: ', end0 - begin0
示例7: conductGeneration
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def conductGeneration(generation, corpus):
'''
Conducts a generation of learning and testing on the input data
generation (int) --- the number of the generation
corpus (object) --- corpus object containing info needed
'''
# Set up the dataset skeleton
alldata = ClassificationDataSet(2, 1, nb_classes=3, class_labels=['a', 'b', 'c'])
# means = [(-1,0),(2,4),(3,1)]
# cov = [diag([1,1]), diag([0.5,1.2]), diag([1.5,0.7])]
# alldata = ClassificationDataSet(2, 1, nb_classes=3)
# for n in xrange(400):
# for klass in range(3):
# input = multivariate_normal(means[klass],cov[klass])
# print type(input)
# alldata.addSample(input, [klass])
alldata.addSample((0, 1), (1))
alldata.addSample((1, 0), (0))
alldata.addSample((0, 0), (2))
alldata.addSample((1, 1), (0))
trndata, partdata = alldata.splitWithProportion(0.5)
return alldata
示例8: __init__
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
class NNetwork:
def __init__(self):
self.ds = ClassificationDataSet(7, 1, nb_classes=8) #8 since we have 8 gestures, 7 since we have 7 features
def add_data(self, training_data):
for gesture in training_data:
self.ds.addSample(gesture[1], gesture[0]) #a method to add all the training data we have
def newData(self, training_data): #a method for replacing the data already existing and adding data from scratch
self.ds = ClassificationDataSet(7, 1, nb_classes=8)
for gesture in training_data:
self.ds.addSample(gesture[1], gesture[0])
def train(self, shouldPrint):
tstdata, trndata = self.ds.splitWithProportion(0.2) #splits the data into training and verification data
trndata._convertToOneOfMany()
tstdata._convertToOneOfMany()
self.fnn = buildNetwork(trndata.indim, 64, trndata.outdim, outclass=SoftmaxLayer) #builds a network with 64 hidden neurons
self.trainer = BackpropTrainer(self.fnn, dataset=trndata, momentum=0.1, learningrate=0.01, verbose=True, weightdecay=0.1)
#uses the backpropagation algorithm
self.trainer.trainUntilConvergence(dataset=trndata, maxEpochs=100, verbose=True, continueEpochs=10, validationProportion=0.20) #early stopping with 20% as testing data
trnresult = percentError( self.trainer.testOnClassData(), trndata['class'] )
tstresult = percentError( self.trainer.testOnClassData(dataset=tstdata ), tstdata['class'] )
if shouldPrint:
print "epoch: %4d" % self.trainer.totalepochs, " train error: %5.2f%%" % trnresult, " test error: %5.2f%%" % tstresult
def activate(self, data): #tests a particular data point (feature vector)
return self.fnn.activate(data)
示例9: createnetwork
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def createnetwork(n_hoglist,n_classlist,n_classnum,n_hiddensize=100):
n_inputdim=len(n_hoglist[0])
n_alldata = ClassificationDataSet(n_inputdim,1, nb_classes=n_classnum)
for i in range(len(n_hoglist)):
n_input = n_hoglist[i]
n_class = n_classlist[i]
n_alldata.addSample(n_input, [n_class])
n_tstdata, n_trndata = n_alldata.splitWithProportion( 0.25 )
n_trndata._convertToOneOfMany( )
n_tstdata._convertToOneOfMany( )
print "Number of training patterns: ", len(n_trndata)
print "Input and output dimensions: ", n_trndata.indim, n_trndata.outdim
print "First sample (input, target, class):"
print n_trndata['input'][0], n_trndata['target'][0], n_trndata['class'][0]
n_fnn = buildNetwork(n_trndata.indim,n_hiddensize, n_trndata.outdim, outclass=SoftmaxLayer)
n_trainer = BackpropTrainer(n_fnn, dataset=n_trndata, momentum=0.1, verbose=True, weightdecay=0.01)
n_result = 1
while n_result > 0.1:
print n_result
n_trainer.trainEpochs(1)
n_trnresult = percentError(n_trainer.testOnClassData(),
n_trndata['class'])
n_tstresult = percentError(n_trainer.testOnClassData(
dataset=n_tstdata), n_tstdata['class'])
print "epoch: %4d" % n_trainer.totalepochs, \
" train error: %5.2f%%" % n_trnresult, \
" test error: %5.2f%%" % n_tstresult
n_result = n_tstresult
示例10: train
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def train(self, inputData, verbose=True):
# Set of data to classify:
# - IMG_SIZE input dimensions per data point
# - 1 dimensional output
# - 4 clusters of classification
all_faces = ClassificationDataSet(IMG_SIZE, 1, nb_classes=4)
for entry in inputData:
(emotion, data) = entry
all_faces.addSample(data, [emotion])
# Generate a test and a train set from our data
test_faces, train_faces = all_faces.splitWithProportion(0.25)
# Hack to convert a 1-dimensional output into 4 output neurons
test_faces._convertToOneOfMany()
train_faces._convertToOneOfMany()
# Set up the actual network. These are the tunable params
self.fnn = buildNetwork(
train_faces.indim,
20,
train_faces.outdim,
outclass=SoftmaxLayer
)
# Set up the network trainer. Also nice tunable params
trainer = BackpropTrainer(
self.fnn,
dataset=train_faces,
momentum=0.1,
verbose=False,
weightdecay=0.01
)
tabledata = []
# Train this bitch.
if verbose:
# Report after every epoch if verbose
for i in range(EPOCHS):
trainer.trainEpochs(1)
trnresult = percentError( trainer.testOnClassData(),
train_faces['class'] )
tstresult = percentError( trainer.testOnClassData(
dataset=test_faces ), test_faces['class'] )
tabledata.append((trainer.totalepochs,trnresult,tstresult))
else:
trainer.trainEpochs(EPOCHS)
if verbose:
print "Epoch\tTrain Error\tTest Error"
for line in tabledata:
print "%4d\t" % line[0], \
"%5.2f%%\t\t" % line[1], \
"%5.2f%%" % line[2]
示例11: main
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def main():
means = [(-1,0),(2,4),(3,1)]
cov = [diag([1,1]), diag([0.5,1.2]), diag([1.5,0.7])]
alldata = ClassificationDataSet(2, 1, nb_classes=3)
for n in xrange(400):
for klass in range(3):
input = multivariate_normal(means[klass],cov[klass])
alldata.addSample(input, [klass])
tstdata, trndata = alldata.splitWithProportion( 0.25 )
trndata._convertToOneOfMany( )
tstdata._convertToOneOfMany( )
print "Number of training patterns: ", len(trndata)
print "Input and output dimensions: ", trndata.indim, trndata.outdim
print "First sample (input, target, class):"
print trndata['input'][0], trndata['target'][0], trndata['class'][0]
fnn = buildNetwork( trndata.indim, 5, trndata.outdim, outclass=SoftmaxLayer )
trainer = BackpropTrainer( fnn, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01)
ticks = arange(-3.,6.,0.2)
X, Y = meshgrid(ticks, ticks)
# need column vectors in dataset, not arrays
griddata = ClassificationDataSet(2,1, nb_classes=3)
for i in xrange(X.size):
griddata.addSample([X.ravel()[i],Y.ravel()[i]], [0])
griddata._convertToOneOfMany() # this is still needed to make the fnn feel comfy
for i in range(20):
trainer.trainEpochs(1)
trnresult = percentError( trainer.testOnClassData(),
trndata['class'] )
tstresult = percentError( trainer.testOnClassData(
dataset=tstdata ), tstdata['class'] )
print "epoch: %4d" % trainer.totalepochs, \
" train error: %5.2f%%" % trnresult, \
" test error: %5.2f%%" % tstresult
out = fnn.activateOnDataset(griddata)
out = out.argmax(axis=1) # the highest output activation gives the class
out = out.reshape(X.shape)
figure(1)
ioff() # interactive graphics off
clf() # clear the plot
hold(True) # overplot on
for c in [0,1,2]:
here, _ = where(tstdata['class']==c)
plot(tstdata['input'][here,0],tstdata['input'][here,1],'o')
if out.max()!=out.min(): # safety check against flat field
contourf(X, Y, out) # plot the contour
ion() # interactive graphics on
draw() # update the plot
ioff()
show()
示例12: createDS
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def createDS():
pat = [[[5.1, 3.5, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.0, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.7, 3.2, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.1, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.6, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.9, 1.7, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.4, 1.4, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.4, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.4, 2.9, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.7, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.4, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.0, 1.4, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[4.3, 3.0, 1.1, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.8, 4.0, 1.2, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.7, 4.4, 1.5, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.9, 1.3, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.5, 1.4, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.7, 3.8, 1.7, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.8, 1.5, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.4, 1.7, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.7, 1.5, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.6, 1.0, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.3, 1.7, 0.5], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.4, 1.9, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.0, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.4, 1.6, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.2, 3.5, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.2, 3.4, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.7, 3.2, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.1, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.4, 1.5, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.2, 4.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.5, 4.2, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.2, 1.2, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.5, 3.5, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[4.4, 3.0, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.4, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.5, 1.3, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[4.5, 2.3, 1.3, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[4.4, 3.2, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.5, 1.6, 0.6], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.8, 1.9, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.0, 1.4, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.8, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.2, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.3, 3.7, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.3, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[7.0, 3.2, 4.7, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[6.4, 3.2, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.9, 3.1, 4.9, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.3, 4.0, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.5, 2.8, 4.6, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.8, 4.5, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 3.3, 4.7, 1.6], [0, 1, 0], [1], ['Iris-versicolor']], [[4.9, 2.4, 3.3, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[6.6, 2.9, 4.6, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.2, 2.7, 3.9, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.0, 2.0, 3.5, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.9, 3.0, 4.2, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 2.2, 4.0, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 2.9, 4.7, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 2.9, 3.6, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.7, 3.1, 4.4, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 3.0, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.8, 2.7, 4.1, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[6.2, 2.2, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 2.5, 3.9, 1.1], [0, 1, 0], [1], ['Iris-versicolor']], [[5.9, 3.2, 4.8, 1.8], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 2.8, 4.0, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 2.5, 4.9, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 2.8, 4.7, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[6.4, 2.9, 4.3, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.6, 3.0, 4.4, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[6.8, 2.8, 4.8, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[6.7, 3.0, 5.0, 1.7], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 2.9, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.6, 3.5, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.4, 3.8, 1.1], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.4, 3.7, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.8, 2.7, 3.9, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 2.7, 5.1, 1.6], [0, 1, 0], [1], ['Iris-versicolor']], [[5.4, 3.0, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 3.4, 4.5, 1.6], [0, 1, 0], [1], ['Iris-versicolor']], [[6.7, 3.1, 4.7, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 2.3, 4.4, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 3.0, 4.1, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.5, 4.0, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.6, 4.4, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 3.0, 4.6, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.8, 2.6, 4.0, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[5.0, 2.3, 3.3, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 2.7, 4.2, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 3.0, 4.2, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.9, 4.2, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.2, 2.9, 4.3, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.1, 2.5, 3.0, 1.1], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.8, 4.1, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 3.3, 6.0, 2.5], [0, 0, 1], [2], ['Iris-virginica']], [[5.8, 2.7, 5.1, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[7.1, 3.0, 5.9, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.9, 5.6, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.0, 5.8, 2.2], [0, 0, 1], [2], ['Iris-virginica']], [[7.6, 3.0, 6.6, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[4.9, 2.5, 4.5, 1.7], [0, 0, 1], [2], ['Iris-virginica']], [[7.3, 2.9, 6.3, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 2.5, 5.8, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[7.2, 3.6, 6.1, 2.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.2, 5.1, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 2.7, 5.3, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[6.8, 3.0, 5.5, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[5.7, 2.5, 5.0, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[5.8, 2.8, 5.1, 2.4], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 3.2, 5.3, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.0, 5.5, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 3.8, 6.7, 2.2], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 2.6, 6.9, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.0, 2.2, 5.0, 1.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.9, 3.2, 5.7, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[5.6, 2.8, 4.9, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 2.8, 6.7, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.7, 4.9, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.3, 5.7, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[7.2, 3.2, 6.0, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.2, 2.8, 4.8, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.1, 3.0, 4.9, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 2.8, 5.6, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[7.2, 3.0, 5.8, 1.6], [0, 0, 1], [2], ['Iris-virginica']], [[7.4, 2.8, 6.1, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[7.9, 3.8, 6.4, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 2.8, 5.6, 2.2], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.8, 5.1, 1.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.1, 2.6, 5.6, 1.4], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 3.0, 6.1, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 3.4, 5.6, 2.4], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 3.1, 5.5, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.0, 3.0, 4.8, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.9, 3.1, 5.4, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.1, 5.6, 2.4], [0, 0, 1], [2], ['Iris-virginica']], [[6.9, 3.1, 5.1, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[5.8, 2.7, 5.1, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[6.8, 3.2, 5.9, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.3, 5.7, 2.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.0, 5.2, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.5, 5.0, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.0, 5.2, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.2, 3.4, 5.4, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[5.9, 3.0, 5.1, 1.8], [0, 0, 1], [2], ['Iris-virginica']]]
alldata = ClassificationDataSet(4, 1, nb_classes=3, \
class_labels=['set','vers','virg'])
for p in pat:
t = p[2]
alldata.addSample(p[0],t)
tstdata, trndata = alldata.splitWithProportion( 0.33 )
trndata._convertToOneOfMany( )
tstdata._convertToOneOfMany( )
return trndata, tstdata
示例13: EvaluateArtificialNeuralNetwork
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def EvaluateArtificialNeuralNetwork(training_data, Input_features, Output_feature, NUMBER_CLASSES, HIDDEN_NEURONS, NUMBER_LAYERS, dataset_name, ParameterVal):
X = training_data[Input_features]
Y = training_data[Output_feature]
ds = ClassificationDataSet(X.shape[1], nb_classes=NUMBER_CLASSES)
for k in xrange(len(X)):
ds.addSample((X.ix[k,:]), Y.ix[k,:])
tstdata_temp, trndata_temp = ds.splitWithProportion(.25)
tstdata = ClassificationDataSet(X.shape[1], nb_classes=NUMBER_CLASSES)
for n in xrange(0, tstdata_temp.getLength()):
tstdata.addSample( tstdata_temp.getSample(n)[0], tstdata_temp.getSample(n)[1] )
trndata = ClassificationDataSet(X.shape[1], nb_classes=NUMBER_CLASSES)
for n in xrange(0, trndata_temp.getLength()):
trndata.addSample( trndata_temp.getSample(n)[0], trndata_temp.getSample(n)[1] )
if NUMBER_CLASSES > 1:
trndata._convertToOneOfMany( )
tstdata._convertToOneOfMany( )
'''*****Actual computation with one layer and HIDDEN_NEURONS number of neurons********'''
fnn = buildNetwork( trndata.indim, HIDDEN_NEURONS , trndata.outdim, outclass=SoftmaxLayer )
trainer = BackpropTrainer( fnn, dataset=trndata, momentum=0.1, verbose=False, weightdecay=0.01)
trainer.trainUntilConvergence(maxEpochs=3)
trnresult = percentError( trainer.testOnClassData(), trndata['class'] )
tstresult = percentError( trainer.testOnClassData(dataset=tstdata ), tstdata['class'] )
print ("Accuracy with Artificial Neural Network: epoch: " + str(trainer.totalepochs) + " TrainingSet:" + str(1-trnresult/100) + " TestSet:" + str(1-tstresult/100))
'''****** Graphical Representation*****'''
'''tot_hidden_tests, X_train, X_test, Y_train, Y_test, training_error, test_error = InitiateErrorCalcData(ParameterVal, training_data[Input_features], training_data[Output_feature])
for hidden_unit in tot_hidden_tests:
print ("Computing hidden unit :" + str(hidden_unit))
model = buildNetwork( trndata.indim, hidden_unit , trndata.outdim, outclass=SoftmaxLayer )
temp_trainer = BackpropTrainer( model, dataset=trndata, momentum=0.1, verbose=True, weightdecay=0.01)
temp_trainer.trainUntilConvergence(maxEpochs=3)
training_error.append(MSE( temp_trainer.testOnClassData(), trndata['class'] ))
test_error.append(MSE( temp_trainer.testOnClassData(dataset=tstdata ), tstdata['class'] ))
PlotErrors(tot_hidden_tests, training_error, test_error, dataset_name, "Number of Hidden Units for single layer ANN", "MSE")'''
'''*****Graphical representation with multiple layers and HIDDEN_NEURONS number of neurons********'''
'''ffn = FeedForwardNetwork()
示例14: makeIrisDatasets
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def makeIrisDatasets():
# taken from iris data set at machine learning repository
alldata = ClassificationDataSet(4, 1, nb_classes=3, \
class_labels=['set','vers','virg'])
for p in pat:
t = p[2]
alldata.addSample(p[0],t)
tstdata, trndata = alldata.splitWithProportion( 0.33 )
trndata._convertToOneOfMany( )
tstdata._convertToOneOfMany( )
return trndata, tstdata
示例15: neuralNetworksTrain
# 需要导入模块: from pybrain.datasets import ClassificationDataSet [as 别名]
# 或者: from pybrain.datasets.ClassificationDataSet import splitWithProportion [as 别名]
def neuralNetworksTrain(self):
alldata = ClassificationDataSet( 23, 1, nb_classes=2)
train_input_data = self.loadData(self.train_file)
test_input_data = self.loadData(self.test_file)
target = [x[1] for x in train_input_data]
target = target[1:]
features = [x[2:] for x in train_input_data]
features = features[1:]
for i in range(0,len(features)):
alldata.addSample(features[i], target[i])
tstdata, trndata = alldata.splitWithProportion(0.25)
trndata._convertToOneOfMany()
tstdata._convertToOneOfMany()
INPUT_FEATURES = 23
CLASSES = 2
HIDDEN_NEURONS = 200
WEIGHTDECAY = 0.1
MOMENTUM = 0.1
EPOCH = 2
fnn = buildNetwork(trndata.indim, HIDDEN_NEURONS, trndata.outdim,outclass=LinearLayer)
trainer = BackpropTrainer(fnn, dataset=trndata, momentum=MOMENTUM,verbose=True, weightdecay=WEIGHTDECAY)
trainer.trainEpochs(EPOCH)
pred = trainer.testOnClassData(dataset=tstdata)
actual = tstdata['class']
self.computeAccuracy(actual,pred)
#trnresult = percentError(trainer.testOnClassData(),
# trndata['class'])
#tstresult = percentError(trainer.testOnClassData(
# dataset=tstdata), tstdata['class'])
# print("epoch: %4d" % trainer.totalepochs,
# " train error: %5.2f%%" % trnresult,
# " test error: %5.2f%%" % tstresult)
#out = fnn.activateOnDataset(griddata)
# the highest output activation gives the class
#out = out.argmax(axis=1)
#out = out.reshape(X.shape)
testFeatures = [x[2:] for x in test_input_data]
testFeatures = testFeatures[1:]
prediction = [fnn.activate(x) for x in testFeatures]
i=0
print "Neural Network Architecture:"
print "Layers: Input layer, Hidden Layer and Output Layers"
print "Epoch = "+str(EPOCH)
print "Neurons in the hidden layer:"+str(HIDDEN_NEURONS)
print "Precision recall F score support metrics for Neural Networks "
print precision_recall_fscore_support(actual,pred)
print "confusion matrix"
print confusion_matrix(actual,pred)