当前位置: 首页>>代码示例>>Python>>正文


Python SupervisedDataSet.addSample方法代码示例

本文整理汇总了Python中pybrain.datasets.supervised.SupervisedDataSet.addSample方法的典型用法代码示例。如果您正苦于以下问题:Python SupervisedDataSet.addSample方法的具体用法?Python SupervisedDataSet.addSample怎么用?Python SupervisedDataSet.addSample使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在pybrain.datasets.supervised.SupervisedDataSet的用法示例。


在下文中一共展示了SupervisedDataSet.addSample方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_train

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
	def test_train(self, epochs=1):
		print("Training...")

		split = int(len(self.samples) * 0.7)
		train_samples = self.samples[0:split]
		train_labels  = self.labels[0:split]

		test_samples = self.samples[split:]
		test_labels  = self.labels[split:]

		net = buildNetwork(300, 300, 1)	
		ds = SupervisedDataSet(300, 1)
		for i in range(len(train_samples)):  
			ds.addSample(tuple(np.array(train_samples[i], dtype='float64')), (train_labels[i],))
		
		trainer = BackpropTrainer(net, ds, verbose=True)
		trainer.trainEpochs(epochs)
		self.totalEpochs = epochs
		
		error = 0
		counter = 0
		for i in range(0, 100):
			output = net.activate(tuple(np.array(test_samples[i], dtype='float64')))
			if round(output[0]) != test_labels[i]:
				counter += 1
				print(counter, " : output : ", output[0], " real answer : ", test_labels[i])
				error += 1
			else:
				counter += 1
				print(counter, " : output : ", output[0], " real answer : ", test_labels[i])
		
		print("Trained with " + str(epochs) + " epochs; Total: " + str(self.totalEpochs) + ";")
		return error
开发者ID:skrustev,项目名称:traffic-sign-recognition,代码行数:35,代码来源:neural_network.py

示例2: Predict

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
	def Predict(self, ticker, day):
		endDay = day-datetime.timedelta(1)
		startDay = endDay - datetime.timedelta(self.trainingPeriod)
		try:
			stockData = data.DataReader(ticker, 'yahoo', startDay, endDay)
		except:
			return [0]

		rawTrainFeatures = []
		rawTrainResponses = []
		for currentDay in range(self.windowLength, len(stockData)):
			window = stockData[currentDay-self.windowLength:currentDay]
			currentPrice = stockData.iloc[currentDay]['Open']
			response = stockData.iloc[currentDay]['Close']
			rawTrainFeatures.append(self.GetFeature(window))
			rawTrainResponses.append(response)

		rawTestFeatures = self.GetFeature(stockData[len(stockData)-self.windowLength:len(stockData)])

		# normalTrainFeatures, normalTestFeatures = self.NormalizeFeatures(rawTrainFeatures, rawTestFeatures)
		alldata = SupervisedDataSet(len(rawTrainFeatures[0]), 1)
		for index in range(0, len(rawTrainFeatures)):
			alldata.addSample(rawTrainFeatures[index],[rawTrainResponses[index]])

		self.network = buildNetwork(alldata.indim, (alldata.indim+alldata.outdim)/2, alldata.outdim, hiddenclass=SigmoidLayer, outclass=LinearLayer)
		trainer = BackpropTrainer(self.network, dataset=alldata)
		activations = []
		for i in range(50):
			for x in range(5):
				trainer.train()
		return float(self.network.activate(rawTestFeatures))
开发者ID:DerekHunter,项目名称:Algo,代码行数:33,代码来源:algo.py

示例3: ANN

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
def ANN(
    trainFeature, trainLabel, testFeature, testLabel, netStructure, para_rate, para_momentum
):  # netStructure is a list [in, hidden, out], momentum is a parameter in SGD
    sampleNum = trainFeature.shape[0]
    featureNum = trainFeature.shape[1]
    Dataset = SupervisedDataSet(featureNum, 1)
    i = 0
    while i < sampleNum:
        print(i)
        Dataset.addSample(list(trainFeature[i]), [trainLabel[i]])
        i += 1
    Network = buildNetwork(
        netStructure[0],
        netStructure[1],
        netStructure[2],
        netStructure[3],
        hiddenclass=SigmoidLayer,
        outclass=SigmoidLayer,
    )
    T = BackpropTrainer(Network, Dataset, learningrate=para_rate, momentum=para_momentum, verbose=True)
    # print(Dataset['input'])
    errorList = []
    errorList.append(T.testOnData(Dataset))
    T.trainOnDataset(Dataset)
    errorList.append(T.testOnData(Dataset))
    T.trainOnDataset(Dataset)
    while abs(T.testOnData(Dataset) - errorList[-1]) > 0.0001:
        T.trainOnDataset(Dataset)
        errorList.append(T.testOnData(Dataset))
    pass  # this step is for the output of predictedLabel
    print(np.array([Network.activate(x) for x in trainFeature]))
    # print(testLabel)
    print(Network.activate([0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))
    return errorList
开发者ID:Codelegant92,项目名称:CreditScoring,代码行数:36,代码来源:ANN.py

示例4: create_dataset

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
def create_dataset():
    dataset = SupervisedDataSet(1, 1)

    for x in arange(0, 4*pi, pi/30):
        dataset.addSample(x, sin(x))

    return dataset
开发者ID:slnowak,项目名称:msi_byrski,代码行数:9,代码来源:zad2.py

示例5: anntrain

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
def anntrain(xdata,ydata):#,epochs):
    #print len(xdata[0])
    ds=SupervisedDataSet(len(xdata[0]),1)
    #ds=ClassificationDataSet(len(xdata[0]),1, nb_classes=2)
    for i,algo in enumerate (xdata):
        ds.addSample(algo,ydata[i])
    #ds._convertToOneOfMany( ) esto no
    net= FeedForwardNetwork()
    inp=LinearLayer(len(xdata[0]))
    h1=SigmoidLayer(1)
    outp=LinearLayer(1)
    net.addOutputModule(outp) 
    net.addInputModule(inp) 
    net.addModule(h1)
    #net=buildNetwork(len(xdata[0]),1,1,hiddenclass=TanhLayer,outclass=SoftmaxLayer)
    
    net.addConnection(FullConnection(inp, h1))  
    net.addConnection(FullConnection(h1, outp))

    net.sortModules()

    trainer=BackpropTrainer(net,ds)#, verbose=True)#dataset=ds,verbose=True)
    #trainer.trainEpochs(40)
    trainer.trainOnDataset(ds,40) 
    #trainer.trainUntilConvergence(ds, 20, verbose=True, validationProportion=0.15)
    trainer.testOnData()#verbose=True)
    #print 'Final weights:',net.params
    return net
开发者ID:gibranfp,项目名称:authorid,代码行数:30,代码来源:ML.py

示例6: readFromExcel

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
def readFromExcel(inCols,targetCols, numRows, fileName, offset=0, sheet=0, dataSet=None, conversionFun=None):
    """Populates a given dataset or creates a new SupervisedDataSet from an exccel file.
       
       inCols = array of colum numbers containing the input data colums, colums are indexed from 0
       targetCols = array of colum numbers containing the target data colums, colums are indexed from 0
       numRows = the number of rows ofs data
       fileName= the name of the excel file
       offset = the row the vaild data starts at
       sheet = the sheet of the workbook the data is on, indexed from 0 as it is in xlrd
       dataSet = the dataset to be populated, a SupervisedDataSet if created if it is None
       conversionFun = used to preprocess data.
    """
    book = open_workbook(fileName)
    sheet=book.sheet_by_index(sheet)
    
    if dataSet is None:
        dataSet=SupervisedDataSet(len(inCols),len(targetCols))
    for r in range(offset,(offset+numRows)):
        input=[]
        target=[]
        for inC in inCols:
            input.append(sheet.cell_value(r,inC))

        for tC in targetCols:
            target.append(sheet.cell_value(r,tC))
        try:
            if conversionFun:
                input=[conversionFun(i) for i in input]
                target=[conversionFun(t) for t in target]
                print input,target
        
            dataSet.addSample(input, target)
        except Exception:
            print 'rejected row {}'.format(r)
    return dataSet
开发者ID:PatrickHunter,项目名称:PyBrainExcel,代码行数:37,代码来源:ExcelTools.py

示例7: retrain

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
def retrain(N, dataset, net):
    ds = SupervisedDataSet(20, 20)
    for data in dataset:
        ds.addSample(data[0], data[1])
    trainer = BackpropTrainer(net, ds)
    for i in range(N):
        trainer.train()
    return net
开发者ID:shoz,项目名称:predlife,代码行数:10,代码来源:trainer.py

示例8: main

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
def main():
    train_file = 'data/train.csv'
    # validation_file = 'data/validation.csv'
    output_model_file = 'model.xml'

    # hidden_size = 4
    epochs = 500

    # load data
    # def loadData():
    train = np.loadtxt(train_file, delimiter=' ')
    Input = train[0:,0:3]
    Output = train[0:,3:5]

    # validation = np.loadtxt(validation_file, delimiter=',')
    # train = np.vstack((train, validation))

    # x_train = train[:, 0:-1]
    # y_train = train[:, -1]
    # y_train = y_train.reshape(-1, 1)

    # input_size = x_train.shape[1]
    # target_size = y_train.shape[1]

    # prepare dataset
    # def prepare dataset(input_size, target_size):
    ds = SDS(Input,Output)
    # ds.addSample(input_size)
    # ds.setField('input', x_train)
    # ds.setField('target', y_train)

    # init and train
    # def initTrain(input_size, hidden_size, input, output):
    # net = buildNetwork(input_size, hidden_size, target_size, bias=True)
    net = buildNetwork(3,  # input layer
                                 4,  # hidden0
                                 2,  # output
                                 hiddenclass=SigmoidLayer,
                                 outclass=SigmoidLayer,
                                 bias=True
                                 )
    net = NetworkReader.readFrom('model.xml')
    for i,o in zip(Input,Output):
        ds.addSample(i,o)
        print i, o

    trainer = BackpropTrainer(net, ds)
        
    print "training for {} epochs...".format(epochs)

    for i in range(epochs):
        mse = trainer.train()
        rmse = sqrt(mse)
        print "training RMSE, epoch {}: {}".format(i + 1, rmse)
        if os.path.isfile("../stopfile.txt") == True:
            break
    
    NetworkWriter.writeToFile(net, output_model_file)
开发者ID:amaneureka,项目名称:iResQ,代码行数:60,代码来源:train.py

示例9: update_neural_network

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
 def update_neural_network(self, old_state, old_value, new_state,action, reward):
    desired_value = old_value + self.learning_rate * (reward + self.discount_factor * self.get_best_action(new_state)[1] - old_value)
    ds = SupervisedDataSet(self.states_and_actions_num,1)
    ds.addSample(old_state + action, desired_value)
    trainer = BackpropTrainer(self.neural_network,ds)
    trainer.train()
    
     
     
     
     
开发者ID:lastkuku,项目名称:HearthstoneAI,代码行数:8,代码来源:q_learner.py

示例10: train

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
def train(N, dataset):
    ds = SupervisedDataSet(20, 20)
    for data in dataset:
        ds.addSample(data[0], data[1])
    net = buildNetwork(20, 20, 20, bias=True, hiddenclass=TanhLayer)
    trainer = BackpropTrainer(net, ds)
    for i in range(N):
        sys.stdout.write("Progress: %d/%d \r" % (i, N))
        sys.stdout.flush()
        trainer.train()
    return net
开发者ID:shoz,项目名称:predlife,代码行数:13,代码来源:trainer.py

示例11: absorb

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
    def absorb(self, winner, **kwargs):
        self.total_sim += 1

        ds = SupervisedDataSet(self.features_num, 2)
        for who, s0, s1 in self.observation:
            if who != Board.STONE_BLACK:
                continue
            input_vec = self.get_input_values(s0, s1, who)
            val = self.net.activate(input_vec)
            plays = val[1] * self.total_sim + 1
            wins = val[0] * self.total_sim
            if who == winner:
                wins += 1
            ds.addSample(input_vec, (wins, plays))
        self.trainer.trainOnDataset(ds)
开发者ID:splendor-kill,项目名称:ml-five,代码行数:17,代码来源:mcts.py

示例12: _update_impl

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
    def _update_impl(self, old, new, reward):
        old_input = self.get_input_values(old)

        v1_a = self.net_attack.activate(self.get_input_values(new))
        target = self.gamma * v1_a
        
        ds_a = SupervisedDataSet(self.features_num, 1)
        ds_a.addSample(old_input, target + max(0, reward))
        ds_d = SupervisedDataSet(self.features_num, 1)
        ds_d.addSample(old_input, target + min(0, reward))
#         self.trainer.setData(ds)
#         err = self.trainer.train()
        self.trainer_attack.setData(ds_a)
        self.trainer_attack.train()
        self.trainer_defence.setData(ds_d)
        self.trainer_defence.train()
开发者ID:splendor-kill,项目名称:ml-five,代码行数:18,代码来源:strategy_ann.py

示例13: buildDataset

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
def buildDataset(filenames,
                 history=2, # how many snapshots into the past?
                 ):
    D = SupervisedDataSet(set_feats + history * snap_feats, num_targ)
    for fname in filenames:
        rundata = quickload(fname)
        snapshots = rundata['snapshots']
        settings = rundata['setting']
        for i in range(len(snapshots) - history - 1):
            inp = parseFeatures(settings, snapshots[i:i + history])
            prevtarget = parseTarget(snapshots[i + history-1])
            nexttarget = parseTarget(snapshots[i + history])
            # percentage gain
            target = (-nexttarget+prevtarget)/(nexttarget+prevtarget)/2.
            D.addSample(inp, [target])        
    return D
开发者ID:schaul,项目名称:nnsandbox,代码行数:18,代码来源:paresdata1.py

示例14: NetworkTrain

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
def NetworkTrain(trainDataSet, mnetwork=NetworkBuild(), file='NetworkDump.pkl',maxEpochs=100):
    mnetwork = NetworkBuild(new = True)
    assert len(mnetwork[0].inmodules) == len(mnetwork[1].keys())
    print('DEBUG')
    #print(trainDataSet)
    print("lens " + str(len(trainDataSet[0][0])) + " " + str(len(mnetwork[0].inmodules)))
    # 定义数据集的格式
    DS = SupervisedDataSet(len(trainDataSet[0][0]), len(trainDataSet[0][1]))

    for itrainDataSet in trainDataSet:
        indata = itrainDataSet[0]
        outdata = itrainDataSet[1]

        DS.addSample(indata, outdata)

    # 如果要获得里面的输入/输出时,可以用
    # 如果要把数据集切分成训练集和测试集,可以用下面的语句,训练集:测试集=8:2
    # 为了方便之后的调用,可以把输入和输出拎出来




    # 训练器采用BP算法
    # verbose = True即训练时会把Total error打印出来,库里默认训练集和验证集的比例为4:1,可以在括号里更改
    mnetwork[0].sortModules()
    trainer = BackpropTrainer(mnetwork[0], DS, verbose=True, learningrate=0.01)
    # 0.0575
    # maxEpochs即你需要的最大收敛迭代次数,这里采用的方法是训练至收敛,我一般设为1000
    trainer.trainUntilConvergence(maxEpochs=maxEpochs)
    '''
    for mod in mnetwork[0].modules:
        print "Module:", mod.name
        if mod.paramdim > 0:
            print "--parameters:", mod.params
        for conn in mnetwork[0].connections[mod]:
            print "-connection to", conn.outmod.name
            if conn.paramdim > 0:
                print "- parameters", conn.params
        if hasattr(mnetwork[0], "recurrentConns"):
            print "Recurrent connections"
            for conn in mnetwork[0].recurrentConns:
                print "-", conn.inmod.name, " to", conn.outmod.name
                if conn.paramdim > 0:
                    print "- parameters", conn.params
        '''
    pickle.dump(mnetwork, open(file, 'wb'))
    return mnetwork
开发者ID:nickisverygood,项目名称:MindMapMain,代码行数:49,代码来源:RNN.py

示例15: create_datasets

# 需要导入模块: from pybrain.datasets.supervised import SupervisedDataSet [as 别名]
# 或者: from pybrain.datasets.supervised.SupervisedDataSet import addSample [as 别名]
def create_datasets():
    train_ds = SupervisedDataSet(13, 1)
    test_ds = SupervisedDataSet(13, 1)

    with open(TRAIN_FN, 'r') as fn:
        for row in csv.reader(fn):
            input = [float(x) for x in row[:-1]]
            target = [int(row[-1])]
            train_ds.addSample(input, target)

    with open(TEST_FN, 'r') as fn:
        for row in csv.reader(fn):
            input = [float(x) for x in row[:-1]]
            target = [int(row[-1])]
            test_ds.addSample(input, target)

    return train_ds, test_ds
开发者ID:tkrishp,项目名称:research,代码行数:19,代码来源:run_model.py


注:本文中的pybrain.datasets.supervised.SupervisedDataSet.addSample方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。