本文整理汇总了Python中pybrain.structure.FeedForwardNetwork.addModule方法的典型用法代码示例。如果您正苦于以下问题:Python FeedForwardNetwork.addModule方法的具体用法?Python FeedForwardNetwork.addModule怎么用?Python FeedForwardNetwork.addModule使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pybrain.structure.FeedForwardNetwork
的用法示例。
在下文中一共展示了FeedForwardNetwork.addModule方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _constructNetwork
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def _constructNetwork(self, nIn, nOut, params):
''' Construct the network '''
nHidden = params.setdefault('nHidden', 2)
hiddenSize = np.empty(nHidden)
for i in range(nHidden):
pstr = 'hiddenSize[' + str(i) + ']'
hiddenSize[i] = params.setdefault(pstr, nIn + nOut)
# Construct network
ann = FeedForwardNetwork()
# Add layers
layers = []
layers.append(LinearLayer(nIn))
for nHid in hiddenSize:
layers.append(SoftmaxLayer(nHid))
layers.append(LinearLayer(nOut))
ann.addOutputModule(layers[-1])
ann.addInputModule(layers[0])
for mod in layers[1:-1]:
ann.addModule(mod)
# Connections
for i, mod in enumerate(layers):
if i < len(layers) - 1:
conn = FullConnection(mod, layers[i+1])
ann.addConnection(conn)
# Sort the modules
ann.sortModules()
return ann
示例2: ann_network
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def ann_network():
nn = FeedForwardNetwork()
# define the activation function and # of nodes per layer
in_layer = LinearLayer(13)
hidden_layer = SigmoidLayer(5)
bias_unit = BiasUnit(name='bias')
out_layer = LinearLayer(1)
# add modules to the network
nn.addInputModule(in_layer)
nn.addModule(hidden_layer)
nn.addModule(bias_unit)
nn.addOutputModule(out_layer)
# define connections between the nodes
hidden_with_bias = FullConnection(hidden_layer, bias_unit)
in_to_hidden = FullConnection(in_layer, hidden_layer)
hidden_to_out = FullConnection(hidden_layer, out_layer)
# add connections to the network
nn.addConnection(in_to_hidden)
nn.addConnection(hidden_with_bias)
nn.addConnection(hidden_to_out)
# perform network interal initialization
nn.sortModules()
return nn
示例3: buildMLP
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def buildMLP(dataSet, num_hidden):
'''
Function that builds a feed forward network based
on the datset inputed.
The hidden layer has nodes equal to num_hidden.
'''
#make the network
network = FeedForwardNetwork()
#make network layers
inputLayer = LinearLayer(dataSet.indim)
hiddenLayer = SigmoidLayer(num_hidden)
outputLayer = LinearLayer(dataSet.outdim)
#add the layers to the network
network.addInputModule(inputLayer)
network.addModule(hiddenLayer)
network.addOutputModule(outputLayer)
#add bias
network.addModule(BiasUnit(name='bias'))
#create connections between layers
inToHidden = FullConnection(inputLayer, hiddenLayer)
hiddenToOut = FullConnection(hiddenLayer, outputLayer)
#connect bias
network.addConnection(FullConnection(network['bias'], outputLayer))
network.addConnection(FullConnection(network['bias'], hiddenLayer))
#add connections to the network
network.addConnection(inToHidden)
network.addConnection(hiddenToOut)
network.sortModules()
return network
示例4: encoderdecoder
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def encoderdecoder(outersize,innersize,indata,
fname):
# create network
n = FeedForwardNetwork()
inLayer = LinearLayer(outersize)
hiddenLayer = SigmoidLayer(innersize)
outLayer = LinearLayer(outersize)
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)
n.sortModules()
# create dataset
ds = SupervisedDataSet(outersize,outersize)
for x,y in indata,indata:
ds.addSample(x,y)
# train network
trainer = BackpropTrainer(n,ds)
trainer.trainUntilConvergence()
n.saveNetwork(fname)
return [[in_to_hidden,hidden_to_out],
[inLayer,hiddenLayer,outLayer],
n]
示例5: BackupNetwork
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def BackupNetwork(genome=None):
#initial a network [12,12,4] and initial weights are baseline policy versions
from pybrain.structure import FeedForwardNetwork,LinearLayer,TanhLayer,FullConnection
network = FeedForwardNetwork()
inLayer= LinearLayer(12)
hiddenLayer = LinearLayer(12)
outLayer = TanhLayer(4)
network.addInputModule(inLayer)
network.addModule(hiddenLayer)
network.addOutputModule(outLayer)
weights = []
if(genome == None):
import pickle
weights = pickle.load(open("seed"))
else:
weights = genome
in_to_hidden = FullConnection(inLayer,hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer,outLayer)
for i in range(0,144):
in_to_hidden.params[i] = weights[i]
for j in range(0,48):
hidden_to_out.params[j] = weights[j+144]
network.addConnection(in_to_hidden)
network.addConnection(hidden_to_out)
network.sortModules()
return network
示例6: buildNet
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def buildNet(input_size, hidden_size):
n = FeedForwardNetwork()
in1Layer = LinearLayer(input_size)
in2Layer = LinearLayer(input_size)
hidden1Layer = SigmoidLayer(hidden_size)
hidden2Layer = SigmoidLayer(hidden_size)
hidden3Layer = SigmoidLayer(2)
outLayer = LinearLayer(1)
n.addInputModule(in1Layer)
n.addInputModule(in2Layer)
n.addModule(hidden1Layer)
n.addModule(hidden2Layer)
n.addModule(hidden3Layer)
n.addOutputModule(outLayer)
in1_to_hidden1 = FullConnection(in1Layer, hidden1Layer)
in2_to_hidden2 = FullConnection(in2Layer, hidden2Layer)
hidden1_to_hidden3 = FullConnection(hidden1Layer, hidden3Layer)
hidden2_to_hidden3 = FullConnection(hidden2Layer, hidden3Layer)
hidden3_to_out = FullConnection(hidden3Layer, outLayer)
n.addConnection(in1_to_hidden1)
n.addConnection(in2_to_hidden2)
n.addConnection(hidden1_to_hidden3)
n.addConnection(hidden2_to_hidden3)
n.addConnection(hidden3_to_out)
n.sortModules()
return n
示例7: main
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def main():
n = FeedForwardNetwork()
in_layer = LinearLayer(2)
hidden_layer = SigmoidLayer(3)
out_layer = LinearLayer(1)
n.addInputModule(in_layer)
n.addModule(hidden_layer)
n.addOutputModule(out_layer)
in_to_hidden = FullConnection(in_layer, hidden_layer)
hidden_to_out = FullConnection(hidden_layer, out_layer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)
n.sortModules()
print(">>> print n")
print(n)
print(">>> n.activate([1, 2])")
print(n.activate([1, 2]))
print(">>> in_to_hidden.params")
print(in_to_hidden.params)
print(">>> hidden_to_out.params")
print(hidden_to_out.params)
print(">>> n.params")
print(n.params)
示例8: __init__
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def __init__(self, index, name, params):
self.name = name
self.index = index
self.liste = []#ClassificationDataSet(17, 1, nb_classes=4)
self.status_good = True
self.number_of_moves = 0
self.number_of_sound_moves = 0
n = FeedForwardNetwork()
self.inLayer = LinearLayer(5)
self.hiddenLayer1 = SigmoidLayer(15)
self.hiddenLayer2 = SigmoidLayer(15)
self.hiddenLayer3 = SigmoidLayer(15)
self.outLayer = LinearLayer(4)
n.addInputModule(self.inLayer)
n.addModule(self.hiddenLayer1)
n.addModule(self.hiddenLayer2)
n.addModule(self.hiddenLayer3)
n.addOutputModule(self.outLayer)
from pybrain.structure import FullConnection
in_to_hidden = FullConnection(self.inLayer, self.hiddenLayer1)
hidden_to_hidden1 = FullConnection(self.hiddenLayer1, self.outLayer2)
hidden_to_hidden2 = FullConnection(self.hiddenLayer2, self.outLayer3)
hidden_to_out = FullConnection(self.hiddenLayer3, self.outLayer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_hidden1)
示例9: createNLayerFFNet
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def createNLayerFFNet(historySize, n, k):
net = FeedForwardNetwork()
# Create and add layers
net.addInputModule(LinearLayer(historySize * 2, name='in'))
net.addOutputModule(LinearLayer(1, name='out'))
# Create and add connections between the layers
baseLayerName = 'hidden%i'
connectionName = 'c%i'
net.addModule(SigmoidLayer(k, name=baseLayerName % 0))
net.addConnection(FullConnection(net['in'], net[baseLayerName % 0], name=connectionName % 0))
for i in xrange(1, n):
layerName = baseLayerName % i
inLayerName = baseLayerName % (i-1)
net.addModule(SigmoidLayer(k, name=layerName))
net.addConnection(FullConnection(net[inLayerName], net[layerName], name=connectionName % (i-1)))
net.addConnection(FullConnection(net[baseLayerName % (n-1)], net['out'], name=connectionName % (n-1)))
# Preps the net for use
net.sortModules()
return net
示例10: NNet
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
class NNet(FunctionApproximator):
def __init__(self, num_features, num_hidden_neurons):
super(NNet,self).__init__(num_features)
self.ds = SupervisedDataSet(num_features, 1)
self.net = FeedForwardNetwork()
self.net.addInputModule(LinearLayer(num_features, name='in'))
self.net.addModule(LinearLayer(num_hidden_neurons, name='hidden'))
self.net.addOutputModule(LinearLayer(1, name='out'))
self.net.addConnection(FullConnection(self.net['in'], self.net['hidden'], name='c1'))
self.net.addConnection(FullConnection(self.net['hidden'], self.net['out'], name='c2'))
self.net.sortModules()
def getY(self, inpt):
#giving NAN
return self.net.activate(inpt)
def update(self, inpt, target):
q_old = self.qvalue(state, action)
q_new = self.qvalue(new_state, new_action)
target = q_old + self.alpha*(reward + (self.gamma*q_new)-q_old)
self.ds.addSample(inpt, target)
# print inpt.shape, target.shape
# print inpt, target
trainer = BackpropTrainer(self.net, self.ds)
# try:
# trainer.trainUntilConvergence()
# except:
trainer.train()
示例11: initalize_nn
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def initalize_nn():
global in_to_hidden
global hidden_to_hidden2
global hidden_to_out
# Old code (regression)
n = FeedForwardNetwork()
# n = buildNetwork( 2, 3, data.outdim, outclass=SoftmaxLayer )
inLayer = LinearLayer(2)
hiddenLayer = SigmoidLayer(3)
hiddenLayer2 = SigmoidLayer(3)
outLayer = LinearLayer(1)
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addModule(hiddenLayer2)
n.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_hidden2 = FullConnection(hiddenLayer, hiddenLayer2)
hidden_to_out = FullConnection(hiddenLayer2, outLayer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_hidden2)
n.addConnection(hidden_to_out)
n.sortModules()
return n
示例12: crearRN
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def crearRN():
#Se crea la red neuronal
n = FeedForwardNetwork()
#Se declaran las laminas de entrada, las laminas escondidas y las de salida de la red neuronal
inLayer = LinearLayer(4096)
hiddenLayer = SigmoidLayer(3)
outLayer = LinearLayer(1)
#Se agregan los layers a la red neuronal
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)
#Se declaran las conexiones de los nodos
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
#Se establecen las conexiones en los layers de la red neuronal
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)
#Red neuronal lista para usar
n.sortModules()
return n
示例13: build_network
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def build_network(self, layers=None, end=1):
layerobjects = []
for item in layers:
try:
t, n = item
if t == "sig":
if n == 0:
continue
layerobjects.append(SigmoidLayer(n))
except TypeError:
layerobjects.append(LinearLayer(item))
n = FeedForwardNetwork()
n.addInputModule(layerobjects[0])
for i, layer in enumerate(layerobjects[1:-1]):
n.addModule(layer)
connection = FullConnection(layerobjects[i], layerobjects[i+1])
n.addConnection(connection)
n.addOutputModule(layerobjects[-1])
connection = FullConnection(layerobjects[-2], layerobjects[-1])
n.addConnection(connection)
n.sortModules()
return n
示例14: __init__
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def __init__(self, index, name, params):
self.name = name
self.index = index
self.status_good = True
n = FeedForwardNetwork()
self.inLayer = LinearLayer(17)
self.hiddenLayer = SigmoidLayer(5)
self.outLayer = LinearLayer(4)
n.addInputModule(self.inLayer)
n.addModule(self.hiddenLayer)
n.addOutputModule(self.outLayer)
from pybrain.structure import FullConnection
in_to_hidden = FullConnection(self.inLayer, self.hiddenLayer)
hidden_to_out = FullConnection(self.hiddenLayer, self.outLayer)
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)
n.sortModules()
for j, i in enumerate(params[0]):
n.connections[self.hiddenLayer][0].params[j] = i
for j, i in enumerate(params[1]):
n.connections[self.inLayer][0].params[j] = i
self.n = n
示例15: trained_cat_dog_ANN
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import addModule [as 别名]
def trained_cat_dog_ANN():
n = FeedForwardNetwork()
d = get_cat_dog_trainset()
input_size = d.getDimension('input')
n.addInputModule(LinearLayer(input_size, name='in'))
n.addModule(SigmoidLayer(input_size+1500, name='hidden'))
n.addOutputModule(LinearLayer(2, name='out'))
n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
n.sortModules()
n.convertToFastNetwork()
print 'successful converted to fast network'
t = BackpropTrainer(n, d, learningrate=0.0001)#, momentum=0.75)
count = 0
while True:
globErr = t.train()
print globErr
count += 1
if globErr < 0.01:
break
if count == 30:
break
exportCatDogANN(n)
return n