本文整理汇总了Python中pybrain.tools.shortcuts.buildNetwork方法的典型用法代码示例。如果您正苦于以下问题:Python shortcuts.buildNetwork方法的具体用法?Python shortcuts.buildNetwork怎么用?Python shortcuts.buildNetwork使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pybrain.tools.shortcuts
的用法示例。
在下文中一共展示了shortcuts.buildNetwork方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from pybrain.tools import shortcuts [as 别名]
# 或者: from pybrain.tools.shortcuts import buildNetwork [as 别名]
def __init__(self, **kwargs):
self.max_depth = 0
self.stats = {}
self.calculation_time = float(kwargs.get('time', 1))
self.max_moves = int(kwargs.get('max_moves', Board.BOARD_SIZE_SQ))
# Exploration constant, increase for more exploratory moves,
# decrease to prefer moves with known higher win rates.
self.C = float(kwargs.get('C', 1.4))
self.features_num = Board.BOARD_SIZE_SQ * 3 + 2
self.hidden_neurons_num = self.features_num * 2
self.net = buildNetwork(self.features_num, self.hidden_neurons_num, 2, bias=True, outclass=SigmoidLayer)
self.trainer = BackpropTrainer(self.net)
self.total_sim = 0
self.observation = []
示例2: __init__
# 需要导入模块: from pybrain.tools import shortcuts [as 别名]
# 或者: from pybrain.tools.shortcuts import buildNetwork [as 别名]
def __init__(self, features_num, hidden_neurons_num):
super().__init__()
self.is_learning = True
self.features_num = features_num
# self.net = buildNetwork(features_num, hidden_neurons_num, 1, bias = True)
# self.net = buildNetwork(features_num, hidden_neurons_num, hidden_neurons_num, 1, bias = True)
# self.net = ConvolutionalBoardNetwork(Board.BOARD_SIZE, 5, 3)
# self.trainer = BackpropTrainer(self.net)
self.net_attack = buildNetwork(features_num, hidden_neurons_num, hidden_neurons_num, 1, bias = True)
self.net_defence = buildNetwork(features_num, hidden_neurons_num, hidden_neurons_num, 1, bias = True)
self.trainer_attack = BackpropTrainer(self.net_attack)
self.trainer_defence = BackpropTrainer(self.net_defence)
self.gamma = 0.9
self.errors = []
self.buf = np.zeros(200)
self.buf_index = 0
self.setup()
示例3: get_nn
# 需要导入模块: from pybrain.tools import shortcuts [as 别名]
# 或者: from pybrain.tools.shortcuts import buildNetwork [as 别名]
def get_nn(self, train=True):
train_data, results_data = self.get_train_and_test_data()
DS = self.create_DS(train_data)
try:
import arac # noqa
print("ARAC Available, using fast mode network builder!")
FNN = buildNetwork(DS.indim, self.hiddenneurons, DS.outdim, bias=self.bias, recurrent=self.recurrent,
fast=True)
except ImportError:
FNN = buildNetwork(DS.indim, self.hiddenneurons, DS.outdim, bias=self.bias, recurrent=self.recurrent)
FNN.randomize()
TRAINER = BackpropTrainer(FNN, dataset=DS, learningrate=self.learningrate,
momentum=self.momentum, verbose=False, weightdecay=self.weightdecay)
if train:
for i in range(self.epochs):
TRAINER.train()
self.nn = FNN
return FNN
示例4: fitANN
# 需要导入模块: from pybrain.tools import shortcuts [as 别名]
# 或者: from pybrain.tools.shortcuts import buildNetwork [as 别名]
def fitANN(data):
'''
Build a neural network regressor
'''
# determine the number of inputs and outputs
inputs_cnt = data['input'].shape[1]
target_cnt = data['target'].shape[1]
# create the regressor object
ann = pb.buildNetwork(inputs_cnt,
inputs_cnt * 3,
target_cnt,
hiddenclass=st.TanhLayer,
outclass=st.LinearLayer,
bias=True
)
# create the trainer object
trainer = tr.BackpropTrainer(ann, data,
verbose=True, batchlearning=False)
# and train the network
trainer.trainUntilConvergence(maxEpochs=50, verbose=True,
continueEpochs=2, validationProportion=0.25)
# and return the regressor
return ann
# the file name of the dataset
示例5: fitANN
# 需要导入模块: from pybrain.tools import shortcuts [as 别名]
# 或者: from pybrain.tools.shortcuts import buildNetwork [as 别名]
def fitANN(data):
'''
Build a neural network classifier
'''
# determine the number of inputs and outputs
inputs_cnt = data['input'].shape[1]
target_cnt = data['target'].shape[1]
# create the classifier object
ann = pb.buildNetwork(inputs_cnt,
inputs_cnt * 2,
inputs_cnt / 2,
target_cnt,
hiddenclass=st.SigmoidLayer,
outclass=st.SoftmaxLayer,
bias=True
)
# create the trainer object
trainer = tr.BackpropTrainer(ann, data,
verbose=True, batchlearning=False)
# and train the network
trainer.trainUntilConvergence(maxEpochs=50, verbose=True,
continueEpochs=3, validationProportion=0.25)
# and return the classifier
return ann
# the file name of the dataset
示例6: fitANN
# 需要导入模块: from pybrain.tools import shortcuts [as 别名]
# 或者: from pybrain.tools.shortcuts import buildNetwork [as 别名]
def fitANN(data):
'''
Build a neural network classifier
'''
# determine the number of inputs and outputs
inputs_cnt = data['input'].shape[1]
target_cnt = data['target'].shape[1]
# create the classifier object
ann = pb.buildNetwork(inputs_cnt,
inputs_cnt * 2,
target_cnt,
hiddenclass=st.TanhLayer,
outclass=st.SoftmaxLayer,
bias=True
)
# create the trainer object
trainer = tr.BackpropTrainer(ann, data,
verbose=True, batchlearning=False)
# and train the network
trainer.trainUntilConvergence(maxEpochs=50, verbose=True,
continueEpochs=3, validationProportion=0.25)
# and return the classifier
return ann
# the file name of the dataset
示例7: build_network
# 需要导入模块: from pybrain.tools import shortcuts [as 别名]
# 或者: from pybrain.tools.shortcuts import buildNetwork [as 别名]
def build_network(self, dataset, new=True, **kwargs):
"""
Builds a neural network using the dataset provided.
Expected keyword args:
- 'hidden_layers'
- 'prediction_window'
- 'learning_rate'
- 'momentum'
"""
self.hidden_layers = kwargs.get('hidden_layers', 3)
self.prediction_window = kwargs.get('prediction_window', 1)
self.learning_rate = kwargs.get('learning_rate', 0.1)
self.momentum = kwargs.get('momentum', 0.01)
if not new:
self.network.sorted = False
self.network.sortModules()
if self.network_dataset_type == SUPERVISED_DATASET:
self.ready_supervised_dataset(dataset)
else: raise InvalidNetworkDatasetType()
else:
if self.network_type == FEED_FORWARD_NETWORK:
self.network = buildNetwork(len(self.train_data), self.hidden_layers, 1)
else: raise InvalidNetworkType()
if self.network_dataset_type == SUPERVISED_DATASET:
self.ready_supervised_dataset(dataset)
else: raise InvalidNetworkDatasetType()
if self.trainer_type == BACKPROP_TRAINER:
self.trainer = BackpropTrainer(self.network,
learningrate=self.learning_rate,
momentum=self.momentum,
verbose=True)
self.trainer.setData(self.network_dataset)
else: raise InvalidTrainerType()
示例8: train
# 需要导入模块: from pybrain.tools import shortcuts [as 别名]
# 或者: from pybrain.tools.shortcuts import buildNetwork [as 别名]
def train(context, trainX, trainY):
ds = SequentialDataSet(4, 1)
for dataX, dataY in zip(trainX, trainY):
ds.addSample(dataX, dataY)
net = buildNetwork(4, 1, 1, hiddenclass=LSTMLayer, outputbias=False, recurrent=True)
trainer = RPropMinusTrainer(net, dataset=ds)
EPOCHS_PER_CYCLE = 5
CYCLES = 5
for i in range(CYCLES):
trainer.trainEpochs(EPOCHS_PER_CYCLE)
return net, trainer.testOnData()
# 更新数据集data
示例9: build_network
# 需要导入模块: from pybrain.tools import shortcuts [as 别名]
# 或者: from pybrain.tools.shortcuts import buildNetwork [as 别名]
def build_network():
# get iris data
iris = datasets.load_iris()
d,t = iris.data, iris.target
# build dataset
ds = _get_classification_dataset()
for i in range(len(d)):
ds.addSample(d[i],t[i])
print "Dataset input: {}".format(ds['input'])
print "Dataset output: {}".format(ds['target'])
print "Dataset input length: {}".format(len(ds['input']))
print "Dataset output length: {}".format(len(ds['target']))
print "Dataset length: {}".format(len(ds))
print "Dataset input|output dimensions are {}|{}".format(ds.indim, ds.outdim)
# split dataset
train_data,test_data = _split_with_proportion(ds, 0.70)
print "Train Data length: {}".format(len(train_data))
print "Test Data length: {}".format(len(test_data))
# encode with one output neuron per class
train_data._convertToOneOfMany()
test_data._convertToOneOfMany()
print "Train Data input|output dimensions are {}|{}".format(train_data.indim, train_data.outdim)
print "Test Data input|output dimensions are {}|{}".format(test_data.indim, test_data.outdim)
# build network
network = buildNetwork(INPUT,HIDDEN,CLASSES,outclass=SoftmaxLayer)
# train network
trainer = BackpropTrainer(network,dataset=train_data,momentum=0.1,verbose=True,weightdecay=0.01)
trainer.trainOnDataset(train_data, 500)
print "Total epochs: {}".format(trainer.totalepochs)
# test network
output = network.activateOnDataset(test_data).argmax(axis=1)
print "Percent error: {}".format(percentError(output, test_data['class']))
# return network
return network
# classify data against neural network
示例10: build_network
# 需要导入模块: from pybrain.tools import shortcuts [as 别名]
# 或者: from pybrain.tools.shortcuts import buildNetwork [as 别名]
def build_network(inputs,targets):
# build dataset
ds = _get_classification_dataset()
for i in range(len(inputs)):
ds.addSample(inputs[i],targets[i])
print "Dataset input: {}".format(ds['input'])
print "Dataset output: {}".format(ds['target'])
print "Dataset input length: {}".format(len(ds['input']))
print "Dataset output length: {}".format(len(ds['target']))
print "Dataset length: {}".format(len(ds))
print "Dataset input|output dimensions are {}|{}".format(ds.indim, ds.outdim)
# split dataset
train_data,test_data = _split_with_proportion(ds, 0.70)
print "Train Data length: {}".format(len(train_data))
print "Test Data length: {}".format(len(test_data))
# encode with one output neuron per class
train_data._convertToOneOfMany()
test_data._convertToOneOfMany()
print "Train Data input|output dimensions are {}|{}".format(train_data.indim, train_data.outdim)
print "Test Data input|output dimensions are {}|{}".format(test_data.indim, test_data.outdim)
# build network
network = buildNetwork(INPUT,HIDDEN,CLASSES,outclass=SoftmaxLayer)
# train network
trainer = BackpropTrainer(network,dataset=train_data,momentum=0.1,verbose=True,weightdecay=0.01)
trainer.trainUntilConvergence(dataset=train_data,maxEpochs=500)
print "Total epochs: {}".format(trainer.totalepochs)
# test network
output = network.activateOnDataset(test_data).argmax(axis=1)
print "Percent error: {}".format(percentError(output, test_data['class']))
# return network
return network
# classify input against neural network