本文整理汇总了Python中pybrain.structure.networks.feedforward.FeedForwardNetwork.addModule方法的典型用法代码示例。如果您正苦于以下问题:Python FeedForwardNetwork.addModule方法的具体用法?Python FeedForwardNetwork.addModule怎么用?Python FeedForwardNetwork.addModule使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pybrain.structure.networks.feedforward.FeedForwardNetwork
的用法示例。
在下文中一共展示了FeedForwardNetwork.addModule方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: main
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
def main():
a = 0
for i in range(0,100):
inLayer = SigmoidLayer(2)
hiddenLayer = SigmoidLayer(3)
outLayer = SigmoidLayer(1)
net = FeedForwardNetwork()
net.addInputModule(inLayer)
net.addModule(hiddenLayer)
net.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer,hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer,outLayer)
net.addConnection(in_to_hidden)
net.addConnection(hidden_to_out)
net.sortModules()
ds = SupervisedDataSet(2,1)
ds.addSample((1,1), (0))
ds.addSample((1,0), (1))
ds.addSample((0,1), (1))
ds.addSample((0,0), (0))
trainer = BackpropTrainer(net,ds)
trainer.trainUntilConvergence()
out = net.activate((1,1))
if (out < 0.5):
a = a + 1
print(str(a) + "/100")
示例2: __init__
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
def __init__(self, states, verbose=False, max_epochs=None):
'''Create a NeuralNetwork instance.
`states` is a tuple of tuples of ints, representing the discovered subnetworks'
entrez ids.
'''
self.verbose = verbose
self.max_epochs = max_epochs
self.num_features = sum(map(lambda tup: len(tup), states))
self.states = states
n = FeedForwardNetwork()
n.addOutputModule(TanhLayer(1, name='out'))
n.addModule(BiasUnit(name='bias out'))
n.addConnection(FullConnection(n['bias out'], n['out']))
for i, state in enumerate(states):
dim = len(state)
n.addInputModule(TanhLayer(dim, name='input %s' % i))
n.addModule(BiasUnit(name='bias input %s' % i))
n.addConnection(FullConnection(n['bias input %s' % i], n['input %s' % i]))
n.addConnection(FullConnection(n['input %s' % i], n['out']))
n.sortModules()
self.n = n
示例3: buildSharedCrossedNetwork
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
def buildSharedCrossedNetwork():
""" build a network with shared connections. Two hidden modules are
symmetrically linked, but to a different input neuron than the
output neuron. The weights are random. """
N = FeedForwardNetwork('shared-crossed')
h = 1
a = LinearLayer(2, name = 'a')
b = LinearLayer(h, name = 'b')
c = LinearLayer(h, name = 'c')
d = LinearLayer(2, name = 'd')
N.addInputModule(a)
N.addModule(b)
N.addModule(c)
N.addOutputModule(d)
m1 = MotherConnection(h)
m1.params[:] = scipy.array((1,))
m2 = MotherConnection(h)
m2.params[:] = scipy.array((2,))
N.addConnection(SharedFullConnection(m1, a, b, inSliceTo = 1))
N.addConnection(SharedFullConnection(m1, a, c, inSliceFrom = 1))
N.addConnection(SharedFullConnection(m2, b, d, outSliceFrom = 1))
N.addConnection(SharedFullConnection(m2, c, d, outSliceTo = 1))
N.sortModules()
return N
示例4: buildXor
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
def buildXor(self):
self.params['dataset'] = 'XOR'
d = ClassificationDataSet(2)
d.addSample([0., 0.], [0.])
d.addSample([0., 1.], [1.])
d.addSample([1., 0.], [1.])
d.addSample([1., 1.], [0.])
d.setField('class', [[0.], [1.], [1.], [0.]])
self.trn_data = d
self.tst_data = d
global trn_data
trn_data = self.trn_data
nn = FeedForwardNetwork()
inLayer = TanhLayer(2, name='in')
hiddenLayer = TanhLayer(3, name='hidden0')
outLayer = ThresholdLayer(1, name='out')
nn.addInputModule(inLayer)
nn.addModule(hiddenLayer)
nn.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
nn.addConnection(in_to_hidden)
nn.addConnection(hidden_to_out)
nn.sortModules()
nn.randomize()
self.net_settings = str(nn.connections)
self.nn = nn
示例5: custom_build_network
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
def custom_build_network(layer_sizes):
net = FeedForwardNetwork()
layers = []
inp = SigmoidLayer(layer_sizes[0], name = 'visible')
h1 = SigmoidLayer(layer_sizes[1], name = 'hidden1')
h2 = SigmoidLayer(layer_sizes[2], name = 'hidden2')
out = SigmoidLayer(layer_sizes[3], name = 'out')
bias = BiasUnit(name = 'bias')
net.addInputModule(inp)
net.addModule(h1)
net.addModule(h2)
net.addOutputModule(out)
net.addModule(bias)
net.addConnection(FullConnection(inp, h1))
net.addConnection(FullConnection(h1, h2))
net.addConnection(FullConnection(h2, out))
net.addConnection(FullConnection(bias, h1))
net.addConnection(FullConnection(bias, h2))
net.addConnection(FullConnection(bias, out))
net.sortModules()
return net
示例6: _buildNetwork
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
def _buildNetwork(*layers, **options):
"""This is a helper function to create different kinds of networks.
`layers` is a list of tuples. Each tuple can contain an arbitrary number of
layers, each being connected to the next one with IdentityConnections. Due
to this, all layers have to have the same dimension. We call these tuples
'parts.'
Afterwards, the last layer of one tuple is connected to the first layer of
the following tuple by a FullConnection.
If the keyword argument bias is given, BiasUnits are added additionally with
every FullConnection.
Example:
_buildNetwork(
(LinearLayer(3),),
(SigmoidLayer(4), GaussianLayer(4)),
(SigmoidLayer(3),),
)
"""
bias = options['bias'] if 'bias' in options else False
use_random_seed = options['use_random_seed'] if 'use_random_seed' in options else False
net = FeedForwardNetwork()
layerParts = iter(layers)
firstPart = iter(next(layerParts))
firstLayer = next(firstPart)
net.addInputModule(firstLayer)
prevLayer = firstLayer
for part in chain(firstPart, layerParts):
new_part = True
for layer in part:
net.addModule(layer)
# Pick class depending on whether we entered a new part
if new_part:
ConnectionClass = FullConnection
if bias:
biasUnit = BiasUnit('BiasUnit for %s' % layer.name)
net.addModule(biasUnit)
net.addConnection(FullConnection(biasUnit, layer, use_random_seed=use_random_seed))
else:
ConnectionClass = IdentityConnection
new_part = False
conn = ConnectionClass(prevLayer, layer)
net.addConnection(conn)
prevLayer = layer
net.addOutputModule(layer)
net.sortModules()
return net
示例7: __init__
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
class PyBrainANNs:
def __init__(self, x_dim, y_dim, hidden_size, s_id):
self.serialize_id = s_id
self.net = FeedForwardNetwork()
in_layer = LinearLayer(x_dim)
hidden_layer = SigmoidLayer(hidden_size)
out_layer = LinearLayer(y_dim)
self.net.addInputModule(in_layer)
self.net.addModule(hidden_layer)
self.net.addOutputModule(out_layer)
in_to_hidden = FullConnection(in_layer, hidden_layer)
hidden_to_out = FullConnection(hidden_layer, out_layer)
self.net.addConnection(in_to_hidden)
self.net.addConnection(hidden_to_out)
self.net.sortModules()
def _prepare_dataset(self, x_data, y_data):
assert x_data.shape[0] == y_data.shape[0]
if len(y_data.shape) == 1:
y_matrix = np.matrix(y_data).T
else:
y_matrix = y_data.values
assert x_data.shape[1] == self.net.indim
assert y_matrix.shape[1] == self.net.outdim
data_set = SupervisedDataSet(self.net.indim, self.net.outdim)
data_set.setField("input", x_data)
data_set.setField("target", y_matrix)
return data_set
def train(self, x_data, y_data):
trainer = BackpropTrainer(self.net, self._prepare_dataset(x_data, y_data))
trainer.train()
def score(self, x_data, y_datas):
return ModuleValidator.validate(regression_score, self.net, self._prepare_dataset(x_data, y_datas))
def predict(self, x_data):
return np.array([self.net.activate(sample) for sample in x_data])
def save(self, path):
joblib.dump(self.net, path)
def load(self, path):
self.net = joblib.load(path)
示例8: createNN
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
def createNN():
nn = FeedForwardNetwork()
inLayer = TanhLayer(4, name='in')
hiddenLayer = TanhLayer(6, name='hidden0')
outLayer = ThresholdLayer(3)
nn.addInputModule(inLayer)
nn.addModule(hiddenLayer)
nn.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
nn.addConnection(in_to_hidden)
nn.addConnection(hidden_to_out)
nn.sortModules()
return nn
示例9: buildIris
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
def buildIris(self):
self.params['dataset'] = 'iris'
self.trn_data, self.tst_data = pybrainData(0.5)
global trn_data
trn_data = self.trn_data
nn = FeedForwardNetwork()
inLayer = TanhLayer(4, name='in')
hiddenLayer = TanhLayer(6, name='hidden0')
outLayer = ThresholdLayer(3, name='out')
nn.addInputModule(inLayer)
nn.addModule(hiddenLayer)
nn.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
nn.addConnection(in_to_hidden)
nn.addConnection(hidden_to_out)
nn.sortModules()
nn.randomize()
self.net_settings = str(nn.connections)
self.nn = nn
示例10: buildParity
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
def buildParity(self):
self.params['dataset'] = 'parity'
self.trn_data = ParityDataSet(nsamples=75)
self.trn_data.setField('class', self.trn_data['target'])
self.tst_data = ParityDataSet(nsamples=75)
global trn_data
trn_data = self.trn_data
nn = FeedForwardNetwork()
inLayer = TanhLayer(4, name='in')
hiddenLayer = TanhLayer(6, name='hidden0')
outLayer = ThresholdLayer(1, name='out')
nn.addInputModule(inLayer)
nn.addModule(hiddenLayer)
nn.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
nn.addConnection(in_to_hidden)
nn.addConnection(hidden_to_out)
nn.sortModules()
nn.randomize()
self.net_settings = str(nn.connections)
self.nn = nn
示例11: buildnet
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
def buildnet(modules):
net = FeedForwardNetwork(name='mynet');
net.addInputModule(modules['in'])
net.addModule(modules['hidden'])
net.addOutputModule(modules['out'])
net.addModule(modules['bias'])
net.addConnection(modules['in_to_hidden'])
net.addConnection(modules['bias_to_hidden'])
net.addConnection(modules['bias_to_out'])
if ('hidden2' in modules):
net.addModule(modules['hidden2'])
net.addConnection(modules['hidden_to_hidden2'])
net.addConnection(modules['bias_to_hidden2'])
net.addConnection(modules['hidden2_to_out'])
else:
net.addConnection(modules['hidden_to_out'])
net.sortModules()
return net
示例12: _build_network
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
def _build_network():
logger.info("Building network...")
net = FeedForwardNetwork()
inp = LinearLayer(IMG_WIDTH * IMG_HEIGHT * 2)
h1_image_width = IMG_WIDTH - FIRST_CONVOLUTION_FILTER + 1
h1_image_height = IMG_HEIGHT - FIRST_CONVOLUTION_FILTER + 1
h1_full_width = h1_image_width * CONVOLUTION_MULTIPLIER * NUMBER_OF_IMAGES
h1_full_height = h1_image_height * CONVOLUTION_MULTIPLIER
h1 = SigmoidLayer(h1_full_width * h1_full_height)
h2_width = h1_full_width / 2
h2_height = h1_full_height / 2
h2 = LinearLayer(h2_width * h2_height)
h3_image_width = h2_width / CONVOLUTION_MULTIPLIER / NUMBER_OF_IMAGES - SECOND_CONVOLUTION_FILTER + 1
h3_image_height = h2_height / CONVOLUTION_MULTIPLIER - SECOND_CONVOLUTION_FILTER + 1
h3_full_width = h3_image_width * (CONVOLUTION_MULTIPLIER * 2) * NUMBER_OF_IMAGES
h3_full_height = h3_image_height * (CONVOLUTION_MULTIPLIER * 2)
h3 = SigmoidLayer(h3_full_width * h3_full_height)
h4_full_width = h3_image_width - MERGE_FILTER
h4_full_height = h3_image_height - MERGE_FILTER
h4 = SigmoidLayer(h4_full_width * h4_full_height)
logger.info("BASE IMG: %d x %d" % (IMG_WIDTH, IMG_HEIGHT))
logger.info("First layer IMG: %d x %d" % (h1_image_width, h1_image_height))
logger.info("First layer FULL: %d x %d" % (h1_full_width, h1_full_height))
logger.info("Second layer FULL: %d x %d" % (h2_width, h2_height))
logger.info("Third layer IMG: %d x %d" % (h3_image_width, h3_image_height))
logger.info("Third layer FULL: %d x %d" % (h3_full_width, h3_full_height))
logger.info("Forth layer FULL: %d x %d" % (h3_image_width, h3_image_height))
outp = SoftmaxLayer(2)
h5 = SigmoidLayer(h4_full_width * h4_full_height)
# add modules
net.addOutputModule(outp)
net.addInputModule(inp)
net.addModule(h1)
net.addModule(h2)
net.addModule(h3)
net.addModule(h4)
net.addModule(h5)
# create connections
for i in range(NUMBER_OF_IMAGES):
_add_convolutional_connection(
net=net,
h1=inp,
h2=h1,
filter_size=FIRST_CONVOLUTION_FILTER,
multiplier=CONVOLUTION_MULTIPLIER,
input_width=IMG_WIDTH * 2,
input_height=IMG_HEIGHT,
output_width=h1_full_width,
output_height=h1_full_height,
offset_x=h1_image_width * i,
offset_y=0,
size_x=h1_image_width,
size_y=h1_image_height
)
_add_pool_connection(
net=net,
h1=h1,
h2=h2,
input_width=h1_full_width,
input_height=h1_full_height
)
for i in range(NUMBER_OF_IMAGES * CONVOLUTION_MULTIPLIER):
for j in range(CONVOLUTION_MULTIPLIER):
_add_convolutional_connection(
net=net,
h1=h2,
h2=h3,
filter_size=SECOND_CONVOLUTION_FILTER,
multiplier=CONVOLUTION_MULTIPLIER,
input_width=h2_width,
input_height=h2_height,
output_width=h3_full_width,
output_height=h3_full_height,
offset_x=h3_image_width * i,
offset_y=h3_image_height * j,
size_x=h3_image_width,
size_y=h3_image_height
)
_merge_connection(
net=net,
h1=h3,
h2=h4,
filter_size=MERGE_FILTER,
input_width=h3_full_width,
input_height=h3_full_height,
output_width=h4_full_width,
output_height=h4_full_height
)
#.........这里部分代码省略.........
示例13: print
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
for input, target in ds:
print(input, target)
#define layers and connections
inLayer = LinearLayer(2)
hiddenLayerOne = SigmoidLayer(4, "one")
hiddenLayerTwo = SigmoidLayer(4, "two")
outLayer = LinearLayer(1)
inToHiddenOne = FullConnection(inLayer, hiddenLayerOne)
hiddenOneToTwo = FullConnection(hiddenLayerOne, hiddenLayerTwo)
hiddenTwoToOut = FullConnection(hiddenLayerTwo, outLayer)
#wire the layers and connections to a net
net = FeedForwardNetwork()
net.addInputModule(inLayer)
net.addModule(hiddenLayerOne)
net.addModule(hiddenLayerTwo)
net.addOutputModule(outLayer)
net.addConnection(inToHiddenOne)
net.addConnection(hiddenOneToTwo)
net.addConnection(hiddenTwoToOut)
net.sortModules()
print(net)
trainer = BackpropTrainer(net, ds)
for i in range(20):
for j in range(1000):
trainer.train()
printNetResult(i, net)
示例14: buildNet
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
return tosave
if (len(sys.argv) <= 3):
saved = buildNet()
else:
saved = pickle.load(open(sys.argv[3], "rb"));
pickle.dump( saved, open( "pablosemptynet.p", "wb" ) )
net = FeedForwardNetwork(name='mynet');
net.addInputModule(saved[0])
net.addModule(saved[1])
net.addOutputModule(saved[2])
net.addModule(saved[3])
net.addConnection(saved[4])
net.addConnection(saved[5])
net.addConnection(saved[6])
net.addConnection(saved[7])
net.sortModules()
trainer = BackpropTrainer(net, None, learningrate=lrate, verbose=False, batchlearning=True, weightdecay=wdecay)
stressErrors=list();
phonemeErrors=list();
for cycle in range(100):
示例15: phoneme_to_layer
# 需要导入模块: from pybrain.structure.networks.feedforward import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.networks.feedforward.FeedForwardNetwork import addModule [as 别名]
class Network:
"NETwhisperer neural network"
def phoneme_to_layer(self, phoneme):
return self.phonemes_to_layers[phoneme]
def layer_to_phoneme(self, layer):
def cos_to_input(item):
phoneme, phoneme_layer = item
return _cos(layer,phoneme_layer)
# minimum angle should be maximum cos
return max(self.phonemes_to_layers.iteritems(), key=cos_to_input)[0]
def __init__(self, window_size, window_middle, n_hidden_neurons):
self.window_size = window_size
self.window_middle = window_middle
self.n_hidden_neurons = n_hidden_neurons
self.n_trainings = 0
self.training_errors = []
self._init_layers()
self._generate_pybrain_network()
def _init_layers(self):
# one neuron for each window/letter combination
self.letter_neuron_names = list(product(range(self.window_size), corpus.all_letters))
# one neuron for each phoneme trait
self.phoneme_trait_neuron_names = list(corpus.all_phoneme_traits)
# neuron counts
self.n_input_neurons = len(self.letter_neuron_names)
self.n_output_neurons = len(self.phoneme_trait_neuron_names)
# mapping from (pos, letter) to input neuron index
self.letters_to_neurons = dict({(pos_and_letter, index) for index, pos_and_letter in enumerate(self.letter_neuron_names)})
# mapping from trait to neuron
self.traits_to_neurons = dict({(trait, index) for index, trait in enumerate(self.phoneme_trait_neuron_names)})
# mapping from phoneme to layer
self.phonemes_to_layers = {}
for (phoneme, traits) in corpus.phoneme_traits.iteritems():
layer = zeros(self.n_output_neurons)
for trait in traits:
index = self.traits_to_neurons[trait]
layer[index] = 1
self.phonemes_to_layers[phoneme] = layer
def _generate_pybrain_network(self):
# make network
self._pybrain_network = FeedForwardNetwork()
# make layers
self._in_layer = LinearLayer(self.n_input_neurons, name='in')
self._hidden_layer = SigmoidLayer(self.n_hidden_neurons, name='hidden')
self._out_layer = LinearLayer(self.n_output_neurons, name='out')
self._bias_neuron = BiasUnit(name='bias')
# make connections between layers
self._in_hidden_connection = FullConnection(self._in_layer, self._hidden_layer)
self._hidden_out_connection = FullConnection(self._hidden_layer, self._out_layer)
self._bias_hidden_connection = FullConnection(self._bias_neuron, self._hidden_layer)
self._bias_out_connection = FullConnection(self._bias_neuron, self._out_layer)
# add modules to network
self._pybrain_network.addInputModule(self._in_layer)
self._pybrain_network.addModule(self._hidden_layer)
self._pybrain_network.addOutputModule(self._out_layer)
self._pybrain_network.addModule(self._bias_neuron)
# add connections to network
for c in (self._in_hidden_connection, self._hidden_out_connection, self._bias_hidden_connection, self._bias_out_connection):
self._pybrain_network.addConnection(c)
# initialize network with added modules/connections
self._pybrain_network.sortModules()
def windowIter(self, letters):
assert type(letters) == str
padding_before = ' ' * self.window_middle
padding_after = ' ' * (self.window_size - self.window_middle - 1)
padded_letters = padding_before + letters + padding_after
# for each letter in the sample
for l_num in range(len(letters)):
letters_window = padded_letters[l_num:l_num+self.window_size]
yield letters_window
def generateSamples(self, letters, phonemes):
assert len(letters) == len(phonemes)
for (letters_window, current_phoneme) in izip(self.windowIter(letters), phonemes):
yield self.letters_to_layer(letters_window), self.phoneme_to_layer(current_phoneme)
def letters_to_layer(self, letters):
assert len(letters) == self.window_size
# start with empty layer
layer = zeros(self.n_input_neurons)
# loop through letters and activate each neuron
for (pos, letter) in enumerate(letters):
index = self.letters_to_neurons[(pos, letter)]
layer[index] = 1
return layer
def train(self, training_set, n_epochs=1, callback=None):
# build dataset
dataset = DataSet(self.n_input_neurons, self.n_output_neurons)
for (ltr,ph) in training_set:
for sample in self.generateSamples(ltr,ph):
dataset.addSample(*sample)
# build trainer
trainer = Trainer(self._pybrain_network, dataset, 0.01, 1.0, 0.9)
#.........这里部分代码省略.........