当前位置: 首页>>代码示例>>Python>>正文


Python shortcuts.buildNetwork函数代码示例

本文整理汇总了Python中pybrain.tools.shortcuts.buildNetwork函数的典型用法代码示例。如果您正苦于以下问题:Python buildNetwork函数的具体用法?Python buildNetwork怎么用?Python buildNetwork使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了buildNetwork函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

 def __init__(self, hidden, **args):
     self.setArgs(**args)
     if self.useSpecialInfo:
         net = buildNetwork(self.inGridSize**2+2, hidden, self.usedActions, outclass = SigmoidLayer)
     else:
         net = buildNetwork(self.inGridSize**2, hidden, self.usedActions, outclass = SigmoidLayer)
     ModuleMarioAgent.__init__(self, net)
开发者ID:DioMuller,项目名称:ai-exercices,代码行数:7,代码来源:networkagent.py

示例2: __init__

 def __init__(self, num_features, num_actions, indexOfAgent=None):    
     PHC_FA.__init__(self, num_features, num_actions, indexOfAgent)
     self.linQ = buildNetwork(num_features + num_actions, (num_features + num_actions), 1, hiddenclass = SigmoidLayer, outclass = LinearLayer)
     self.linPolicy = buildNetwork(num_features, (num_features + num_actions), num_actions, hiddenclass = SigmoidLayer,outclass = SigmoidLayer)
     self.averagePolicy=[]
     self.trainer4LinQ=BackpropTrainer(self.linQ,weightdecay=self.weightdecay)
     self.trainer4LinPolicy=BackpropTrainer(self.linPolicy,weightdecay=self.weightdecay)
开发者ID:Snazz2001,项目名称:Multi-Agent-Reinforcement-Learning-in-Stochastic-Games,代码行数:7,代码来源:phc.py

示例3: buildCustomNetwork

 def buildCustomNetwork(self, hiddenLayers, train_faces):
     myfnn = None     
     print "building network..."
     if len(hiddenLayers) == 1:
         myfnn = buildNetwork( 
           train_faces.indim, 
           hiddenLayers[0],
           train_faces.outdim, 
           outclass=SoftmaxLayer
         )
     elif len(hiddenLayers) == 2:
         myfnn = buildNetwork( 
           train_faces.indim, 
           hiddenLayers[0],
           hiddenLayers[1],
           train_faces.outdim, 
           outclass=SoftmaxLayer
         )
     elif len(hiddenLayers) == 3:
         myfnn = buildNetwork( 
           train_faces.indim, 
           hiddenLayers[0],
           hiddenLayers[1],
           hiddenLayers[2],
           train_faces.outdim, 
           outclass=SoftmaxLayer
         )
     return myfnn
开发者ID:mwebergithub,项目名称:face457b,代码行数:28,代码来源:supervised_facial_classifier.py

示例4: __init__

     def __init__(self, motion, memory, sonar, posture):
         self.motionProxy = motion
         self.memoryProxy = memory
         self.sonarProxy = sonar
         self.postureProxy = posture
         self.useSensors    = True
         self.inputLength = 26+18
         self.outputLength = 26
         self.sonarProxy.subscribe("Closed-Loop Motor Babbling") #Start the sonor
         self.set_stiffness(0.3)
         self.net = buildNetwork(INPUTSIZE,HIDDENSIZE,OUTPUTSIZE)

         #Hierarchical Control Networks 
         self.netH1 = buildNetwork(INPUTSIZE,HIDDENSIZE,OUTPUTSIZE)
         self.netH2 = buildNetwork(INPUTSIZE,HIDDENSIZE,OUTPUTSIZE)
         self.sMemory1 = np.array([1]*(INPUTSIZE + PREDICTSIZE))
         self.sMemory2 = np.array([1]*(INPUTSIZE + PREDICTSIZE))
         self.mMemory1 = np.array([0]*OUTPUTSIZE)
         self.mMemory2 = np.array([0]*OUTPUTSIZE)
         

         # Access global joint limits.
         self.Body = motion.getLimits("Body")
         self.bangles =  [1] * 26
         self.othersens = [2] * 18
         self.sMemory = np.array([1]*(INPUTSIZE + PREDICTSIZE))
         self.mMemory = np.array([0]*OUTPUTSIZE)
         self.cl = curiosityLoop()

         self.rand = Random()
         self.rand.seed(int(time()))

         #Initialize a model dictionary
         self.models = dict()
开发者ID:ctf20,项目名称:DarwinianNeurodynamics,代码行数:34,代码来源:motorBabbling15.py

示例5: reset

  def reset(self, params, repetition):
    print params

    self.nDimInput = 3
    self.inputEncoder = PassThroughEncoder()

    if params['output_encoding'] == None:
      self.outputEncoder = PassThroughEncoder()
      self.nDimOutput = 1
    elif params['output_encoding'] == 'likelihood':
      self.outputEncoder = ScalarBucketEncoder()
      self.nDimOutput = self.outputEncoder.encoder.n

    if params['dataset'] == 'nyc_taxi' or params['dataset'] == 'nyc_taxi_perturb_baseline':
      self.dataset = NYCTaxiDataset(params['dataset'])
    else:
      raise Exception("Dataset not found")

    self.testCounter = 0
    self.resets = []
    self.iteration = 0

    # initialize LSTM network
    random.seed(6)
    if params['output_encoding'] == None:
      self.net = buildNetwork(self.nDimInput, params['num_cells'], self.nDimOutput,
                         hiddenclass=LSTMLayer, bias=True, outputbias=True, recurrent=True)
    elif params['output_encoding'] == 'likelihood':
      self.net = buildNetwork(self.nDimInput, params['num_cells'], self.nDimOutput,
                         hiddenclass=LSTMLayer, bias=True, outclass=SigmoidLayer, recurrent=True)

    (self.networkInput, self.targetPrediction, self.trueData) = \
      self.dataset.generateSequence(
      prediction_nstep=params['prediction_nstep'],
      output_encoding=params['output_encoding'])
开发者ID:oxtopus,项目名称:nupic.research,代码行数:35,代码来源:suite.py

示例6: buildFNN

def buildFNN(testData, trainData):
    '''
    Input: testing data object, training data object
    Output: Prints details of best FNN
    '''
        
    accuracy=0
    model = None
    params = None
    fnn = buildNetwork( trainData.indim, (trainData.indim + trainData.outdim)/2, trainData.outdim, hiddenclass=TanhLayer, outclass=SoftmaxLayer, bias='true' )    
    trainer = BackpropTrainer(fnn, dataset=trainData, momentum=0.1, verbose=False, weightdecay=0.01)        
    a=calculateANNaccuracy(fnn, trainData, testData, trainer)    
    if a>accuracy:
        model=fnn
        accuracy=a
        params='''network = [Hidden Layer = TanhLayer; Hidden Layer Units= (Input+Output)Units/2; Output Layer = SoftmaxLayer]\n'''
    
    fnn = buildNetwork( trainData.indim, trainData.indim, trainData.outdim, hiddenclass=TanhLayer, outclass=SoftmaxLayer, bias='true' )
    trainer = BackpropTrainer(fnn, dataset=trainData, momentum=0.1, verbose=False, weightdecay=0.01)
    a=calculateANNaccuracy(fnn, trainData, testData, trainer)
    if a>accuracy:
        model=fnn
        accuracy=a
        params='''network = [Hidden Layer = TanhLayer; Hidden Layer Units = Input Units; Output Layer = SoftmaxLayer]\n'''
    
        
    fnn = buildNetwork( trainData.indim, (trainData.indim + trainData.outdim)/2, trainData.outdim, hiddenclass=TanhLayer, outclass=SigmoidLayer, bias='true' )
    trainer = BackpropTrainer(fnn, dataset=trainData, momentum=0.1, verbose=False, weightdecay=0.01)
    a=calculateANNaccuracy(fnn, trainData, testData, trainer)
    if a>accuracy:
        model=fnn
        accuracy=a
        params='''network = [Hidden Layer = TanhLayer; Hidden Layer Units = (Input+Output)Units/2; Output Layer = SigmoidLayer]\n'''
    
        
    fnn = buildNetwork( trainData.indim, (trainData.indim + trainData.outdim)/2, trainData.outdim, hiddenclass=TanhLayer, outclass=SigmoidLayer, bias='true' )
    trainer = BackpropTrainer(fnn, dataset=trainData, momentum=0.1, verbose=False, weightdecay=0.01)
    a=calculateANNaccuracy(fnn, trainData, testData, trainer)
    if a>accuracy:
        model=fnn
        accuracy=a
        params='''network = [Hidden Layer = TanhLayer; Hidden Layer Units = Input Units; Output Layer = SigmoidLayer]\n'''
    
    fnn = buildNetwork( trainData.indim, (trainData.indim + trainData.outdim)/2, (trainData.indim + trainData.outdim)/2, trainData.outdim, hiddenclass=TanhLayer, outclass=SoftmaxLayer, bias='true' )
    trainer = BackpropTrainer(fnn, dataset=trainData, momentum=0.1, verbose=False, weightdecay=0.01)
    a=calculateANNaccuracy(fnn, trainData, testData, trainer)
    if a>accuracy:
        model=fnn
        accuracy=a
        params='''network = [TWO (2) Hidden Layers = TanhLayer; Hidden Layer Units = (Input+Output)Units/2; Output Layer = SoftmaxLayer]\n'''
    
        
    print '\nThe best model had '+str(accuracy)+'% accuracy and used the parameters:\n'+params+'\n'
开发者ID:aplassard,项目名称:Image_Processing,代码行数:53,代码来源:ann.py

示例7: __init__

    def __init__(self, prev=5):
        # timsig beat, timsig denom, prev + curr dur/freq, prev 3 chords, bass note
        self.t_ds = SupervisedDataSet((prev+1) * 2 + 4, 2)
        self.t_net = buildNetwork((prev+1) * 2 + 4, 50, 75, 25, 2)
        self.t_freq_err = []
        self.t_dur_err = []

        self.b_ds = SupervisedDataSet((prev+1) * 2 + 4, 2)
        self.b_net = buildNetwork((prev+1) * 2 + 4, 50, 75, 25, 2)
        self.b_freq_err = []
        self.b_dur_err = []

        self.prev = prev
        self.corpus = []
开发者ID:ijoosong,项目名称:classical-ml,代码行数:14,代码来源:NeuralNetwork.py

示例8: __init__

    def __init__(self, array=None):

        if array == None:
            ##self.net  = [Network((18,18,1)) for i in range(9)]
            ##self.theta = [self.net[i].theta for i in range(9)]
            self.net = buildNetwork(18, 18, 9)
            self.theta = self.net.params

        else:
            ##self.theta = array
            ##self.net = [Network((18,18,1),self.theta[i]) for i in range(9)]
            self.theta = array
            self.net = buildNetwork(18, 18, 9)
            self.net._params = self.theta
开发者ID:Chuphay,项目名称:python,代码行数:14,代码来源:tic_tac.py

示例9: reset

    def reset(self):
        FA.reset(self)

        # self.network = buildNetwork(self.indim, 2*(self.indim+self.outdim), self.outdim)
        self.network = buildNetwork(self.indim, self.outdim, bias=True)
        self.network._setParameters(random.normal(0, 0.1, self.network.params.shape))
        self.pybdataset = SupervisedDataSet(self.indim, self.outdim)
开发者ID:rueckstiess,项目名称:dopamine,代码行数:7,代码来源:pybnn.py

示例10: train_net

    def train_net(self,training_times_input=100,num_neroun=200,learning_rate_input=0.1,weight_decay=0.1,momentum_in = 0,verbose_input=True):
        '''
        The main function to train the network
        '''
        print self.trndata['input'].shape
        raw_input()
        self.network=buildNetwork(self.trndata.indim,
                                  num_neroun,self.trndata.outdim,
                                  bias=True,
                                  hiddenclass=SigmoidLayer,
                                  outclass = LinearLayer)
        self.trainer=BackpropTrainer(self.network,
                                     dataset=self.trndata,
                                     learningrate=learning_rate_input,
                                     momentum=momentum_in,
                                     verbose=True,
                                     weightdecay=weight_decay )

        for iter in range(training_times_input):
            print "Training", iter+1,"times"
            self.trainer.trainEpochs(1)
            trn_error = self._net_performance(self.network, self.trndata)
            tst_error = self._net_performance(self.network, self.tstdata)
            print "the trn error is: ", trn_error
            print "the test error is: ",tst_error

        '''prediction on all data:'''
开发者ID:DajeRoma,项目名称:clicc-flask,代码行数:27,代码来源:regression.py

示例11: run

    def run(self, fold, X_train, y_train, X_test, y_test):
        DS_train, DS_test = ClassificationData.convert_to_DS(
            X_train,
            y_train,
            X_test,
            y_test)

        NHiddenUnits = self.__get_best_hu(DS_train)
        fnn = buildNetwork(
            DS_train.indim,
            NHiddenUnits,
            DS_train.outdim,
            outclass=SoftmaxLayer,
            bias=True)

        trainer = BackpropTrainer(
            fnn,
            dataset=DS_train,
            momentum=0.1,
            verbose=False,
            weightdecay=0.01)

        trainer.trainEpochs(self.epochs)
        tstresult = percentError(
            trainer.testOnClassData(dataset=DS_test),
            DS_test['class'])

        print "NN fold: %4d" % fold, "; test error: %5.2f%%" % tstresult
        return tstresult / 100.0
开发者ID:dzitkowskik,项目名称:Introduction-To-Machine-Learning-And-Data-Mining,代码行数:29,代码来源:PyBrainNN.py

示例12: neuralNetwork_eval_func

 def neuralNetwork_eval_func(self, chromosome):
     node_num, learning_rate, window_size = self.decode_chromosome(chromosome)
     if self.check_log(node_num, learning_rate, window_size):
         return self.get_means_from_log(node_num, learning_rate, window_size)[0]
     folded_dataset = self.create_folded_dataset(window_size)
     indim = 21 * (2 * window_size + 1)
     mean_AUC = 0
     mean_decision_value = 0
     mean_mcc = 0
     sample_size_over_thousand_flag = False
     for test_fold in xrange(self.fold):
         test_labels, test_dataset, train_labels, train_dataset = folded_dataset.get_test_and_training_dataset(test_fold)
         if len(test_labels) + len(train_labels) > 1000:
             sample_size_over_thousand_flag = True
         ds = SupervisedDataSet(indim, 1)
         for i in xrange(len(train_labels)):
             ds.appendLinked(train_dataset[i], [train_labels[i]])
         net = buildNetwork(indim, node_num, 1, outclass=SigmoidLayer, bias=True)
         trainer = BackpropTrainer(net, ds, learningrate=learning_rate)
         trainer.trainUntilConvergence(maxEpochs=self.maxEpochs_for_trainer)
         decision_values = [net.activate(test_dataset[i]) for i in xrange(len(test_labels))]
         decision_values = map(lambda x: x[0], decision_values)
         AUC, decision_value_and_max_mcc = validate_performance.calculate_AUC(decision_values, test_labels)
         mean_AUC += AUC
         mean_decision_value += decision_value_and_max_mcc[0]
         mean_mcc += decision_value_and_max_mcc[1]
         if sample_size_over_thousand_flag:
             break
     if not sample_size_over_thousand_flag:
         mean_AUC /= self.fold
         mean_decision_value /= self.fold
         mean_mcc /= self.fold
     self.write_log(node_num, learning_rate, window_size, mean_AUC, mean_decision_value, mean_mcc)
     self.add_log(node_num, learning_rate, window_size, mean_AUC, mean_decision_value, mean_mcc)
     return mean_AUC
开发者ID:clclcocoro,项目名称:MLwithGA,代码行数:35,代码来源:cross_validation.py

示例13: setUp

 def setUp(self):
   self.nn = buildNetwork(4,6,3, bias=False, hiddenclass=TanhLayer, 
                    outclass=TanhLayer)
   self.nn.sortModules()
   self.in_to_hidden, = self.nn.connections[self.nn['in']]
   self.hiddenAstroLayer = AstrocyteLayer(self.nn['hidden0'], 
                                          self.in_to_hidden)
开发者ID:mfbx9da4,项目名称:neuron-astrocyte-networks,代码行数:7,代码来源:testastrocyte_layer.py

示例14: createNN

def createNN(indim, hiddim, outdim):
    nn = buildNetwork(indim, hiddim, outdim,
                    bias=False,
                    hiddenclass=TanhLayer, 
                    outclass=TanhLayer)
    nn.sortModules()
    return nn
开发者ID:mfbx9da4,项目名称:neuron-astrocyte-networks,代码行数:7,代码来源:add_astrocytes_to_learned_weigts_XOR.py

示例15: train

def train(data):
	"""
	See http://www.pybrain.org/docs/tutorial/fnn.html

	Returns a neural network trained on the test data.

	Parameters:
	  data - A ClassificationDataSet for training.
	         Should not include the test data.
	"""
	network = buildNetwork(
		# This is where we specify the architecture of
		# the network.  We can play around with different
		# parameters here.
		# http://www.pybrain.org/docs/api/tools.html
		data.indim, 5, data.outdim,
		hiddenclass=SigmoidLayer,
		outclass=SoftmaxLayer
	)

	# We can fiddle around with this guy's options as well.
	# http://www.pybrain.org/docs/api/supervised/trainers.html
	trainer = BackpropTrainer(network, dataset=data)
	trainer.trainUntilConvergence(maxEpochs=20)

	return network
开发者ID:IPPETAD,项目名称:ProjectSmiley,代码行数:26,代码来源:neural_net_learner.py


注:本文中的pybrain.tools.shortcuts.buildNetwork函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。