当前位置: 首页>>代码示例>>Python>>正文


Python SupervisedDataSet.addSample方法代码示例

本文整理汇总了Python中pybrain.datasets.SupervisedDataSet.addSample方法的典型用法代码示例。如果您正苦于以下问题:Python SupervisedDataSet.addSample方法的具体用法?Python SupervisedDataSet.addSample怎么用?Python SupervisedDataSet.addSample使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在pybrain.datasets.SupervisedDataSet的用法示例。


在下文中一共展示了SupervisedDataSet.addSample方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
	def __init__(self, histogram_list):
		self.net = buildNetwork(1024, 100, 1)

		ds = SupervisedDataSet(1024, 1)

		for histogram in histogram_list:
			#print (histogram)			
			ds.addSample(histogram, (1,))
		
		for x in range(0,15):
			ds.addSample(numpy.random.random((1024)) * 255, (0,)) # this noise should never be a face
			#print (numpy.random.random((1024)) * 255)
		
		trainer = BackpropTrainer(self.net, ds)

		#trainer.trainUntilConvergence()

		
		for x in range(2000):
			print ("count:\t" + str(x) + "\terror:\t" + str(trainer.train()))
			#trainer.train()
		
		print (trainer.train())

		"""
开发者ID:SamGinzburg,项目名称:FaceDetectionAndRecognition,代码行数:27,代码来源:ProfileANN.py

示例2: train

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
 def train(self, training_files, learningrate=0.01, scaling=True, noise=False, verbose=True):
     print "building dataset..."
     ds = SupervisedDataSet(SensorModel.array_length(self.sensor_ids), 1)
     # read training file line, create sensormodel object, do backprop
     a = None
     s = None
     for logfile in training_files:
         print "loading file", logfile
         with open(logfile) as f:
             for line in f:
                 if line.startswith("Received:"):
                     s = SensorModel(string=line.split(' ', 1)[1])
                 elif line.startswith("Sending:"):
                     a = Actions.from_string(string=line.split(' ', 1)[1])
                 if s is not None and a is not None:
                     ds.addSample(inp=s.get_array(self.sensor_ids), target=a[self.action_ids[0]])
                     if noise:
                         # add the same training sample again but with noise in the sensors
                         s.add_noise()
                         ds.addSample(inp=s.get_array(self.sensor_ids), target=a[self.action_ids[0]])
                     s = None
                     a = None
     print "dataset size:", len(ds)
     if scaling:
         print "scaling dataset"
         self.scaler_input = StandardScaler(with_mean=True, with_std=False).fit(ds.data['input'])
         ds.data['input'] = self.scaler_input.transform(ds.data['input'])
         ds.data['target'] = ds.data['target']
     #self.trainer = BackpropTrainer(self.net, learningrate=learningrate, verbose=verbose)
     self.trainer = RPropMinusTrainer(self.net, verbose=verbose, batchlearning=True)
     print "training network..."
     self.trainer.trainUntilConvergence(dataset=ds, validationProportion=0.25, maxEpochs=10, continueEpochs=2)
开发者ID:lqrz,项目名称:computational_intelligence,代码行数:34,代码来源:network.py

示例3: main

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
def main():
	inputs = ReadCSV('./data/input.csv')
	outputs = ReadCSV('./data/output.csv')
	
	test_set = test.keys()
	train_set = []
	for k in inputs.keys():
		if k not in test_set:
			train_set.append(k)
	print "Number of training samples", len(train_set)
	print "Number of testing samples", len(test_set)
			
	net = buildNetwork(178, 6, 5)
	ds=SupervisedDataSet(178,5)
	for id in train_set:
		ds.addSample(inputs[id],outputs[id])

	trainer = BackpropTrainer(net, ds, learningrate=0.001, momentum = 0.001)

	trainer.trainUntilConvergence(maxEpochs=1000, validationProportion = 0.5)
	
	
	for id in test_set:
		predicted = net.activate(inputs[id])
		actual = outputs[id]
		print '-----------------------------'
		print test[id]
		print '-----------------------------'
		print 'Trait\t\tPredicted\tActual\tError'
		for i in range(0,5):
			error = abs(predicted[i] - actual[i])*100/4.0
			print traits[i], '\t', predicted[i], '\t', actual[i], '\t', error,"%" 
开发者ID:niyasc,项目名称:Personality-Prediction-using-facebook-profile,代码行数:34,代码来源:sample.py

示例4: __init__

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
class Brain:
	def __init__(self, hiddenNodes = 30):
		# construct neural network 
		self.myClassifierNet = buildNetwork(12, hiddenNodes, 1, bias=True, hiddenclass=TanhLayer) #parameters to buildNetwork are inputs, hidden, output
		# set up dataset
		self.myDataset = SupervisedDataSet(12, 1)
		self.myClassifierTrainer = BackpropTrainer(self.myClassifierNet, self.myDataset)

	def addSampleImageFromFile(self, imageFile, groupId):
		"adds a data sample from an image file, including needed processing"
		myImage = Image.open(imageFile)
		self.myDataset.addSample(twelveToneParallel(myImage), (groupId,))

	def train(self):
		#myClassifierTrainer.trainUntilConvergence() #this will take forever (possibly literally in the pathological case)
		for i in range(0, 15):
			self.myClassifierTrainer.train() #this may result in an inferior network, but in practice seems to work fine

	def save(self, saveFileName="recognizernet.brain"):
		saveFile = open(saveFileName, 'w')
		pickle.dump(self.myClassifierNet, saveFile)
		saveFile.close()

	def load(self, saveFileName="recognizernet.brain"):
		saveFile = open(saveFileName, 'r')
		myClassifierNet = pickle.load(saveFile)
		saveFile.close()

	def classify(self, fileName):
		myImage = Image.open(fileName)
		if self.myClassifierNet.activate(twelveToneParallel(myImage)) < 0.5:
			return 0
		else:
			return 1
开发者ID:anurive,项目名称:pybrain-picture-sort,代码行数:36,代码来源:recognizer.py

示例5: loadDataSet

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
def loadDataSet(ds_file):
  global X, Y
  BB = set()
  aaa = {}
  ds = SupervisedDataSet(400, 10)
  #ds = SupervisedDataSet(1024, 5)
  with open(ds_file,"rb") as f:
    lines = f.readlines()
    for line in lines:
      l = [float(a) for a in line.strip().split(',')]
      #A = [float(1.0)] + l[:-1]
      A = l[:-1]
      X.append(A)
      B = int(l[-1])
      #BB.update([B])
      #for aa,bb in enumerate(BB):
      #  aaa[bb] = aa
      #print aaa
      #Y.append(aaa[bb])
      Y.append(B)
      C = []
      for i in range(10):
        C.append(int(1) if i==B or (i==0 and B==10) else int(0))
      ds.addSample(tuple(A), tuple(C))
  return ds
开发者ID:onidzelskyi,项目名称:VSN,代码行数:27,代码来源:trainNN.py

示例6: pybrain_high

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
def pybrain_high():
	back=[]
	alldate=New_stock.objects.filter().exclude(name='CIHKY')[0:100]
	wholelen=len(alldate)
	test=New_stock.objects.filter(name__contains="CIHKY")
	testlen=len(test)
	# test dateset
	testdata= SupervisedDataSet(5, 1)
	testwhole=newalldate(test,testlen)
	for i in testwhole:
		testdata.addSample((i[0],i[2],i[3],i[4],i[5]), (0,))	
	# 实验 dateset
	data= SupervisedDataSet(5, 1)
	wholedate=newalldate(alldate,wholelen)
	for i in wholedate:
		data.addSample((i[0],i[2],i[3],i[4],i[5]), (i[1]))	
	#print testwhole
	# 建立bp神经网络
	net = buildNetwork(5, 3, 1,bias=True,hiddenclass=TanhLayer, outclass=SoftmaxLayer)
	
	trainer = BackpropTrainer(net,data)
	trainer.trainEpochs(epochs=100)
	# train and test the network
#	print trainer.train()
	trainer.train()
	print 'ok'
	out=net.activateOnDataset(testdata)
	for j in  test:
                back.append((j.high))
	print back
	print out
	backout=backnormal(back,out)
	print 'okokokoko'
	print backout # 输出22的测试集合
	return out 
开发者ID:lanlanzky,项目名称:stock_project,代码行数:37,代码来源:views.py

示例7: learn

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
    def learn(self):
        # convert reinforcement dataset to NFQ supervised dataset
        supervised = SupervisedDataSet(self.module.network.indim, 1)

        for seq in self.dataset:
            lastexperience = None
            for state, action, reward in seq:
                if not lastexperience:
                    # delay each experience in sequence by one
                    lastexperience = (state, action, reward)
                    continue

                # use experience from last timestep to do Q update
                (state_, action_, reward_) = lastexperience

                Q = self.module.getValue(state_, action_[0])

                inp = r_[state_, one_to_n(action_[0], self.module.numActions)]
                tgt = Q + 0.5*(reward_ + self.gamma * max(self.module.getActionValues(state)) - Q)
                supervised.addSample(inp, tgt)

                # update last experience with current one
                lastexperience = (state, action, reward)

        # train module with backprop/rprop on dataset
        trainer = RPropMinusTrainer(self.module.network, dataset=supervised, batchlearning=True, verbose=False)
        trainer.trainUntilConvergence(maxEpochs=self.maxEpochs)
开发者ID:myeaton1,项目名称:euphoriaAI,代码行数:29,代码来源:eunfq.py

示例8: NeuralKinect

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
class NeuralKinect():
    def __init__(self):
        # Softmax layer -> great for classification networks
        #self.neuralNet = buildNetwork(60, 60, 5, outclass=SoftmaxLayer)
        #self.neuralNet = buildNetwork(60, 60, 5, hiddenclass=TanhLayer)
        #self.neuralNet = buildNetwork(60, 60, 5, bias=True)
        self.neuralNet = buildNetwork(60, 60, 5)
        self.dataSet = SupervisedDataSet(60, 5)

    def trainBackProp(self):
        trainer = BackpropTrainer(self.neuralNet, self.dataSet)
        start = time.time()
        trainer.trainEpochs(EPOCHS)
        end = time.time()
        print("Training time -> " + repr(end-start))
        print(repr(trainer.train()))

    def loadDataSet(self):
        points = []
        for csvFile in glob.iglob("TrainData/*.csv"):
            with open(csvFile, 'rt') as letterSet:
                reader = csv.reader(letterSet)
                header = str(reader.next())
                letter = header[2:3]
                targetStr = header[4:9]
                print("Processing Dataset for letter -> " + letter)
                target = []
                for digit in targetStr:
                    target.append(digit)
                rows = 1
                for row in reader:              
                    for col in row:
                        points.append(col)
                    if rows % 20 == 0:
                        self.dataSet.addSample(points, target)
                        points = []
                    rows += 1
                    
    def processResults(self, output):
        result = ""
        for digit in output:
            if digit > 0.5:
                result += "1"
            else:
                result += "0"
        print("Network result -> " + chr(64+int(result,2)))
                    
    def testNetwork(self):
        points = []
        for csvFile in glob.iglob("TestData/*.csv"):
            with open(csvFile, 'rt') as testPose:
                reader = csv.reader(testPose)
                rows = 1
                for row in reader:
                    for col in row:
                        points.append(col)
                    if rows % 20 == 0:
                        self.processResults(self.neuralNet.activate(points))
                        points = []
                    rows += 1
开发者ID:kepplemr,项目名称:neuralKinect,代码行数:62,代码来源:neuralkinect.py

示例9: neural_network

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
def neural_network(data, target, network):
    DS = SupervisedDataSet(len(data[0]), 1)
    nn = buildNetwork(len(data[0]), 7, 1, bias = True)
    kf = KFold(len(target), 10, shuffle = True);
    RMSE_NN = []
    for train_index, test_index in kf:
        data_train, data_test = data[train_index], data[test_index]
        target_train, target_test = target[train_index], target[test_index]
        for d,t in zip(data_train, target_train):
            DS.addSample(d, t)
        bpTrain = BackpropTrainer(nn,DS, verbose = True)
        #bpTrain.train()
        bpTrain.trainUntilConvergence(maxEpochs = 10)
        p = []
        for d_test in data_test:
            p.append(nn.activate(d_test))
        
        rmse_nn = sqrt(np.mean((p - target_test)**2))
        RMSE_NN.append(rmse_nn)
        DS.clear()
    time = range(1,11)
    plt.figure()
    plt.plot(time, RMSE_NN)
    plt.xlabel('cross-validation time')
    plt.ylabel('RMSE')
    plt.show()
    print(np.mean(RMSE_NN))
开发者ID:eprym,项目名称:EE-239AS,代码行数:29,代码来源:problem3_predict.py

示例10: get_dataset_for_pybrain_regression

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
def get_dataset_for_pybrain_regression(X,y):
	ds = SupervisedDataSet(250,1)
	tuples_X = [tuple(map(float,tuple(x))) for x in X.values]
	tuples_y = [tuple(map(float,(y,))) for y in y.values]
	for X,y in zip(tuples_X,tuples_y):
		ds.addSample(X,y)
	return ds
开发者ID:SaarthakKhanna2104,项目名称:Home-Depot-Product-Search-Relevance,代码行数:9,代码来源:NeuralNetsReg.py

示例11: learn

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
    def learn(self,dataset):
        """
            This function trains network

            Input:
            dataset     - Dataset to train network

            Returns:
            Nothing
        """
        from pybrain.supervised.trainers import BackpropTrainer
        from pybrain.datasets import SupervisedDataSet
        from neuraltrainer import NeuralTrainer

        if self._net == None: raise NeuralBrainException("Brain is not configured!")
        if dataset == {}: raise NeuralBrainException("Dataset for learning is empty.")

        data = SupervisedDataSet(self._input,self._output)
        for input,output in dataset.items():
            input = self._normalize(input,self._input)
            output = self._normalize(output,self._output)
            data.addSample(input,output)
            data.addSample(input,output)# For better learning 2x

        trainer = NeuralTrainer(self._net, data)
        trainer.simpleTrain()
开发者ID:0x1001,项目名称:jarvis,代码行数:28,代码来源:neuralbrain.py

示例12: get_train_samples

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
def get_train_samples(input_num,output_num):
    '''
    从new_samples文件夹中读图,根据输入数和输出数制作样本,每一原始样本加入随机噪音生成100个样本
    '''
    print 'getsample start.'
    sam_path='./new_samples'
    samples = SupervisedDataSet(input_num,output_num)
    nlist = os.listdir(sam_path)
    t=int(np.sqrt(input_num))
    for n in nlist:
        file = os.path.join(sam_path,n)
        im = Image.open(file)
        im = im.convert('L')
        im = im.resize((t,t),Image.BILINEAR)
        buf = np.array(im).reshape(input_num,1)
        buf = buf<200
        buf = tuple(buf)
        buf1=int(n.split('.')[0])
        buf2=range(output_num)
        for i in range(len(buf2)):
            buf2[i] = 0
        buf2[buf1]=1
        buf2 = tuple(buf2)
        samples.addSample(buf,buf2)
        for i in range(100):
            buf3 = list(buf)
            for j in range(len(buf)/20):
                buf3[np.random.randint(len(buf))] = bool(np.random.randint(2))
            samples.addSample(tuple(buf3),buf2)
    return samples 
开发者ID:Bayoscar,项目名称:ChineseNumberIdentify,代码行数:32,代码来源:ChineseNumberIdentify.py

示例13: convertDataNeuralNetwork

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
def convertDataNeuralNetwork(x, y):
	data = SupervisedDataSet(x.shape[1], 1)
	for xIns, yIns in zip(x, y):
    	data.addSample(xIns, yIns)    
	return data

def NN(xTrain, yTrain, xTest, yTest):
	trainData = convertDataNeuralNetwork(xTrain, yTrain)
	testData = convertDataNeuralNetwork(xTest, yTest)
	fnn = FeedForwardNetwork()
	inLayer = SigmoidLayer(trainData.indim)
	hiddenLayer = SigmoidLayer(5)
	outLayer = LinearLayer(trainData.outdim)
	fnn.addInputModule(inLayer)
	fnn.addModule(hiddenLayer)
	fnn.addOutputModule(outLayer)
	in_to_hidden = FullConnection(inLayer, hiddenLayer)
	hidden_to_out = FullConnection(hiddenLayer, outLayer)
	fnn.addConnection(in_to_hidden)
	fnn.addConnection(hidden_to_out)
	fnn.sortModules()
	trainer = BackpropTrainer(fnn, dataset = trainData, momentum = 0.1, verbose = True, weightdecay = 0.01)

	for i in xrange(10):
	    trainer.trainEpochs(500)
	    
	rmse = percentError(trainer.testOnClassData(dataset = testData), yTest)
	return rmse/100

def main():
	rmse = NN(xTrain, yTrain, xTest, yTest)
	print rmse

if __name__=="__main__":
	main()
开发者ID:amish-goyal,项目名称:yelp-ratings,代码行数:37,代码来源:NeuralNets.py

示例14: run_try

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
    def run_try(self, rand_chance=0, rand_count=0, rand_count_ref=0, render=False):
        ds = SupervisedDataSet(env_size, 1)
        observation = env.reset()

        random_indexes = []

        while len(random_indexes) < rand_count:
            random_index = math.floor(random() * rand_count_ref)
            if random_index not in random_indexes:
                random_indexes.append(random_index)

        for t in range(max_frames):
            if render:
                env.render()
            # print(observation)

            action = 0 if net.activate(observation)[0] < 0 else 1

            if t in random_indexes or random() < rand_chance:
                action = (action + 1) % 1

            ds.addSample(observation, (action,))
            observation, reward, done, info = env.step(action)

            if done:
                print("Episode finished after {} timesteps".format(t + 1))
                break

        if t == max_frames - 1:
            print("Passed!!")
            self.run_try(render=True)

        return t, ds
开发者ID:jpodeszwik,项目名称:openai,代码行数:35,代码来源:gnn_pybrain.py

示例15: trainDataSet

# 需要导入模块: from pybrain.datasets import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.SupervisedDataSet import addSample [as 别名]
def trainDataSet():
    cases = Case.objects.exclude(geocode__isnull=True, geocode__grid=-1)

    print "Data Representation"
    ds = SupervisedDataSet(5245, 5245)
    for w in xrange(0,52):
        print "Start week w",
        dataset_input = [0 for i in xrange(0,5245)]
        dataset_output = [0 for i in xrange(0,5245)]
        for i in xrange(0,5245):
            dataset_input[i] = cases.filter(geocode__grid=i, morbidity__week=w).count()
            dataset_output[i] = 1 if (cases.filter(geocode__grid=i, morbidity__week=w+1).count() > 0 or cases.filter(geocode__grid=i, morbidity__week=w+2).count() > 0) else 0
        ds.addSample( (dataset_input), (dataset_output))
        print " - done week w"
    # tstdata, trndata = ds.splitWithProportion(0.25)
    print "Train"
    net = buildNetwork( 5245, 1000, 5245, bias=True)
    trainer = BackpropTrainer(net, ds, learningrate=0.1, momentum=0.99)

    terrors = trainer.trainUntilConvergence(verbose = None, validationProportion = 0.33, maxEpochs = 1000, continueEpochs = 10 )
    # print terrors[0][-1],terrors[1][-1]
    fo = open("data.txt", "w")
    for input, expectedOutput in ds:
        output = net.activate(input)
        count = 0
        for q in xrange(0, 5245):
            print math.floor(output[q]), math.floor(expectedOutput[q])
            if math.floor(output[q]) == math.floor(expectedOutput[q]):
                count+=1    
        m = count/5245
        fo.write("{0} ::  {1}".format(count, m));
开发者ID:kevcal69,项目名称:thesis,代码行数:33,代码来源:rep.py


注:本文中的pybrain.datasets.SupervisedDataSet.addSample方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。