本文整理汇总了Python中pybrain.structure.networks.feedforward.FeedForwardNetwork类的典型用法代码示例。如果您正苦于以下问题:Python FeedForwardNetwork类的具体用法?Python FeedForwardNetwork怎么用?Python FeedForwardNetwork使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了FeedForwardNetwork类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
def __init__(self, predefined = None, **kwargs):
""" For the current implementation, the sequence length
needs to be fixed, and given at construction time. """
if predefined is not None:
self.predefined = predefined
else:
self.predefined = {}
FeedForwardNetwork.__init__(self, **kwargs)
assert self.seqlen is not None
# the input is a 1D-mesh (as a view on a flat input layer)
inmod = LinearLayer(self.inputsize * self.seqlen, name='input')
inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.seqlen,), 'inmesh')
# the output is also a 1D-mesh
outmod = self.outcomponentclass(self.outputsize * self.seqlen, name='output')
outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.seqlen,), 'outmesh')
# the hidden layers are places in a 2xseqlen mesh
hiddenmesh = ModuleMesh.constructWithLayers(self.componentclass, self.hiddensize,
(2, self.seqlen), 'hidden')
# add the modules
for c in inmesh:
self.addInputModule(c)
for c in outmesh:
self.addOutputModule(c)
for c in hiddenmesh:
self.addModule(c)
# set the connections weights to be shared
inconnf = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
outconnf = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
forwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='fconn')
if self.symmetric:
backwardconn = forwardconn
inconnb = inconnf
outconnb = outconnf
else:
backwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='bconn')
inconnb = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
outconnb = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
# build the connections
for i in range(self.seqlen):
# input to hidden
self.addConnection(SharedFullConnection(inconnf, inmesh[(i,)], hiddenmesh[(0, i)]))
self.addConnection(SharedFullConnection(inconnb, inmesh[(i,)], hiddenmesh[(1, i)]))
# hidden to output
self.addConnection(SharedFullConnection(outconnf, hiddenmesh[(0, i)], outmesh[(i,)]))
self.addConnection(SharedFullConnection(outconnb, hiddenmesh[(1, i)], outmesh[(i,)]))
if i > 0:
# forward in time
self.addConnection(SharedFullConnection(forwardconn, hiddenmesh[(0, i - 1)], hiddenmesh[(0, i)]))
if i < self.seqlen - 1:
# backward in time
self.addConnection(SharedFullConnection(backwardconn, hiddenmesh[(1, i + 1)], hiddenmesh[(1, i)]))
self.sortModules()
示例2: createNet
def createNet():
net = FeedForwardNetwork()
modules = add_modules(net)
add_connections(net, modules)
# finish up
net.sortModules()
gradientCheck(net)
return net
示例3: buildSlicedNetwork
def buildSlicedNetwork():
""" build a network with shared connections. Two hiddne modules are symetrically linked, but to a different
input neuron than the output neuron. The weights are random. """
N = FeedForwardNetwork('sliced')
a = LinearLayer(2, name = 'a')
b = LinearLayer(2, name = 'b')
N.addInputModule(a)
N.addOutputModule(b)
N.addConnection(FullConnection(a, b, inSliceTo=1, outSliceFrom=1))
N.addConnection(FullConnection(a, b, inSliceFrom=1, outSliceTo=1))
N.sortModules()
return N
示例4: __init__
def __init__(self, boardSize, convSize, numFeatureMaps, **args):
inputdim = 2
FeedForwardNetwork.__init__(self, **args)
inlayer = LinearLayer(inputdim*boardSize*boardSize, name = 'in')
self.addInputModule(inlayer)
# we need some treatment of the border too - thus we pad the direct board input.
x = convSize/2
insize = boardSize+2*x
if convSize % 2 == 0:
insize -= 1
paddedlayer = LinearLayer(inputdim*insize*insize, name = 'pad')
self.addModule(paddedlayer)
# we connect a bias to the padded-parts (with shared but trainable weights).
bias = BiasUnit()
self.addModule(bias)
biasConn = MotherConnection(inputdim)
paddable = []
if convSize % 2 == 0:
xs = range(x)+range(insize-x+1, insize)
else:
xs = range(x)+range(insize-x, insize)
paddable.extend(crossproduct([range(insize), xs]))
paddable.extend(crossproduct([xs, range(x, boardSize+x)]))
for (i, j) in paddable:
self.addConnection(SharedFullConnection(biasConn, bias, paddedlayer,
outSliceFrom = (i*insize+j)*inputdim,
outSliceTo = (i*insize+j+1)*inputdim))
for i in range(boardSize):
inmod = ModuleSlice(inlayer, outSliceFrom = i*boardSize*inputdim,
outSliceTo = (i+1)*boardSize*inputdim)
outmod = ModuleSlice(paddedlayer, inSliceFrom = ((i+x)*insize+x)*inputdim,
inSliceTo = ((i+x)*insize+x+boardSize)*inputdim)
self.addConnection(IdentityConnection(inmod, outmod))
self._buildStructure(inputdim, insize, paddedlayer, convSize, numFeatureMaps)
self.sortModules()
示例5: training
def training(self,d):
"""
Builds a network ,trains and returns it
"""
self.net = FeedForwardNetwork()
inLayer = LinearLayer(4) # 4 inputs
hiddenLayer = SigmoidLayer(3) # 5 neurons on hidden layer with sigmoid function
outLayer = LinearLayer(2) # 2 neuron as output layer
"add layers to NN"
self.net.addInputModule(inLayer)
self.net.addModule(hiddenLayer)
self.net.addOutputModule(outLayer)
"create connections"
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
"add connections"
self.net.addConnection(in_to_hidden)
self.net.addConnection(hidden_to_out)
"some unknown but necessary function :)"
self.net.sortModules()
print self.net
"generate big sized training set"
trainingSet = SupervisedDataSet(4,2)
trainArr = self.generate_training_set()
for ri in range(2000):
input = ((trainArr[0][ri][0],trainArr[0][ri][1],trainArr[0][ri][2],trainArr[0][ri][3]))
target = ((trainArr[1][ri][0],trainArr[1][ri][1]))
trainingSet.addSample(input, target)
"create backpropogation trainer"
t = BackpropTrainer(self.net,d,learningrate=0.00001, momentum=0.99)
while True:
globErr = t.train()
print "global error:", globErr
if globErr < 0.0001:
break
return self.net
示例6: __init__
def __init__(self, x_dim, y_dim, hidden_size, s_id):
self.serialize_id = s_id
self.net = FeedForwardNetwork()
in_layer = LinearLayer(x_dim)
hidden_layer = SigmoidLayer(hidden_size)
out_layer = LinearLayer(y_dim)
self.net.addInputModule(in_layer)
self.net.addModule(hidden_layer)
self.net.addOutputModule(out_layer)
in_to_hidden = FullConnection(in_layer, hidden_layer)
hidden_to_out = FullConnection(hidden_layer, out_layer)
self.net.addConnection(in_to_hidden)
self.net.addConnection(hidden_to_out)
self.net.sortModules()
示例7: _generate_pybrain_network
def _generate_pybrain_network(self):
# make network
self._pybrain_network = FeedForwardNetwork()
# make layers
self._in_layer = LinearLayer(self.n_input_neurons, name='in')
self._hidden_layer = SigmoidLayer(self.n_hidden_neurons, name='hidden')
self._out_layer = LinearLayer(self.n_output_neurons, name='out')
self._bias_neuron = BiasUnit(name='bias')
# make connections between layers
self._in_hidden_connection = FullConnection(self._in_layer, self._hidden_layer)
self._hidden_out_connection = FullConnection(self._hidden_layer, self._out_layer)
self._bias_hidden_connection = FullConnection(self._bias_neuron, self._hidden_layer)
self._bias_out_connection = FullConnection(self._bias_neuron, self._out_layer)
# add modules to network
self._pybrain_network.addInputModule(self._in_layer)
self._pybrain_network.addModule(self._hidden_layer)
self._pybrain_network.addOutputModule(self._out_layer)
self._pybrain_network.addModule(self._bias_neuron)
# add connections to network
for c in (self._in_hidden_connection, self._hidden_out_connection, self._bias_hidden_connection, self._bias_out_connection):
self._pybrain_network.addConnection(c)
# initialize network with added modules/connections
self._pybrain_network.sortModules()
示例8: _buildNetwork
def _buildNetwork(*layers, **options):
"""This is a helper function to create different kinds of networks.
`layers` is a list of tuples. Each tuple can contain an arbitrary number of
layers, each being connected to the next one with IdentityConnections. Due
to this, all layers have to have the same dimension. We call these tuples
'parts.'
Afterwards, the last layer of one tuple is connected to the first layer of
the following tuple by a FullConnection.
If the keyword argument bias is given, BiasUnits are added additionally with
every FullConnection.
Example:
_buildNetwork(
(LinearLayer(3),),
(SigmoidLayer(4), GaussianLayer(4)),
(SigmoidLayer(3),),
)
"""
bias = options['bias'] if 'bias' in options else False
net = FeedForwardNetwork()
layerParts = iter(layers)
firstPart = iter(layerParts.next())
firstLayer = firstPart.next()
net.addInputModule(firstLayer)
prevLayer = firstLayer
for part in chain(firstPart, layerParts):
new_part = True
for layer in part:
net.addModule(layer)
# Pick class depending on whether we entered a new part
if new_part:
ConnectionClass = FullConnection
if bias:
biasUnit = BiasUnit('BiasUnit for %s' % layer.name)
net.addModule(biasUnit)
net.addConnection(FullConnection(biasUnit, layer))
else:
ConnectionClass = IdentityConnection
new_part = False
conn = ConnectionClass(prevLayer, layer)
net.addConnection(conn)
prevLayer = layer
net.addOutputModule(layer)
net.sortModules()
return net
示例9: __init__
def __init__(self, inputdim, insize, convSize, numFeatureMaps, **args):
FeedForwardNetwork.__init__(self, **args)
inlayer = LinearLayer(inputdim * insize * insize)
self.addInputModule(inlayer)
self._buildStructure(inputdim, insize, inlayer, convSize, numFeatureMaps)
self.sortModules()
示例10: __init__
def __init__(self, states, verbose=False, max_epochs=None):
'''Create a NeuralNetwork instance.
`states` is a tuple of tuples of ints, representing the discovered subnetworks'
entrez ids.
'''
self.verbose = verbose
self.max_epochs = max_epochs
self.num_features = sum(map(lambda tup: len(tup), states))
self.states = states
n = FeedForwardNetwork()
n.addOutputModule(TanhLayer(1, name='out'))
n.addModule(BiasUnit(name='bias out'))
n.addConnection(FullConnection(n['bias out'], n['out']))
for i, state in enumerate(states):
dim = len(state)
n.addInputModule(TanhLayer(dim, name='input %s' % i))
n.addModule(BiasUnit(name='bias input %s' % i))
n.addConnection(FullConnection(n['bias input %s' % i], n['input %s' % i]))
n.addConnection(FullConnection(n['input %s' % i], n['out']))
n.sortModules()
self.n = n
示例11: generate_training_set
class MLP:
data = SupervisedDataSet
net = FeedForwardNetwork
def generate_training_set(self):
random.seed()
ind = floor(empty((2000,4)))
outd = floor(empty((2000, 2)))
res = array((ind,outd))
print ind
print
print outd
print
print res
for i in range(2000):
n = random.getrandbits(1)
if n == 0:
a = random.randint(0,100)
b = random.randint(0,100)
c = random.randint(100,5000)
d = random.randint(100,5000)
res[0][i][0] = a
res[0][i][1] = b
res[0][i][2] = c
res[0][i][3] = d
res[1][i][0] = 0
res[1][i][1] = 1
else:
a = random.randint(100,5000)
b = random.randint(100,5000)
c = random.randint(0,100)
d = random.randint(0,100)
res[0][i][0] = a
res[0][i][1] = b
res[0][i][2] = c
res[0][i][3] = d
res[1][i][0] = 1
res[1][i][1] = 0
for i in range(2000):
print res[0][i][0],res[0][i][1],res[0][i][2],res[0][i][3], " out", res[1][i][0],res[1][i][1]
return res
def getFullDataSet(self):
res = zeros((50**4, 4))
a = 0
b = 0
c = 0
d = 0
for i in range(len(res)):
if (a % 50 == 0):
a = 0
a = a + 1
if (i % 2 == 0):
if (b % 50 == 0):
b = 0
b = b + 1
if (i % 4 == 0):
if (c % 50 == 0):
c = 0
c = c + 1
if (i % 8 ==0):
if (d % 50 == 0):
d = 0
d = d + 1
res[i][0] = a
res[i][1] = b
res[i][2] = c
res[i][3] = d
res += 75
return res
def make_dataset(self):
"""
Creates a set of training data with 2-dimensioanal input and 2-dimensional output
So how dataset have to be looks like?
"""
self.data = SupervisedDataSet(4,2)
self.data.addSample((1,1,150,150),(0,1))
self.data.addSample((1,1,199,142),(0,1))
self.data.addSample((150,120,43,12),(1,0))
self.data.addSample((198,123,54,65),(1,0))
return self.data
def training(self,d):
"""
Builds a network ,trains and returns it
#.........这里部分代码省略.........
示例12: _build_network
def _build_network():
logger.info("Building network...")
net = FeedForwardNetwork()
inp = LinearLayer(IMG_WIDTH * IMG_HEIGHT * 2)
h1_image_width = IMG_WIDTH - FIRST_CONVOLUTION_FILTER + 1
h1_image_height = IMG_HEIGHT - FIRST_CONVOLUTION_FILTER + 1
h1_full_width = h1_image_width * CONVOLUTION_MULTIPLIER * NUMBER_OF_IMAGES
h1_full_height = h1_image_height * CONVOLUTION_MULTIPLIER
h1 = SigmoidLayer(h1_full_width * h1_full_height)
h2_width = h1_full_width / 2
h2_height = h1_full_height / 2
h2 = LinearLayer(h2_width * h2_height)
h3_image_width = h2_width / CONVOLUTION_MULTIPLIER / NUMBER_OF_IMAGES - SECOND_CONVOLUTION_FILTER + 1
h3_image_height = h2_height / CONVOLUTION_MULTIPLIER - SECOND_CONVOLUTION_FILTER + 1
h3_full_width = h3_image_width * (CONVOLUTION_MULTIPLIER * 2) * NUMBER_OF_IMAGES
h3_full_height = h3_image_height * (CONVOLUTION_MULTIPLIER * 2)
h3 = SigmoidLayer(h3_full_width * h3_full_height)
h4_full_width = h3_image_width - MERGE_FILTER
h4_full_height = h3_image_height - MERGE_FILTER
h4 = SigmoidLayer(h4_full_width * h4_full_height)
logger.info("BASE IMG: %d x %d" % (IMG_WIDTH, IMG_HEIGHT))
logger.info("First layer IMG: %d x %d" % (h1_image_width, h1_image_height))
logger.info("First layer FULL: %d x %d" % (h1_full_width, h1_full_height))
logger.info("Second layer FULL: %d x %d" % (h2_width, h2_height))
logger.info("Third layer IMG: %d x %d" % (h3_image_width, h3_image_height))
logger.info("Third layer FULL: %d x %d" % (h3_full_width, h3_full_height))
logger.info("Forth layer FULL: %d x %d" % (h3_image_width, h3_image_height))
outp = SoftmaxLayer(2)
h5 = SigmoidLayer(h4_full_width * h4_full_height)
# add modules
net.addOutputModule(outp)
net.addInputModule(inp)
net.addModule(h1)
net.addModule(h2)
net.addModule(h3)
net.addModule(h4)
net.addModule(h5)
# create connections
for i in range(NUMBER_OF_IMAGES):
_add_convolutional_connection(
net=net,
h1=inp,
h2=h1,
filter_size=FIRST_CONVOLUTION_FILTER,
multiplier=CONVOLUTION_MULTIPLIER,
input_width=IMG_WIDTH * 2,
input_height=IMG_HEIGHT,
output_width=h1_full_width,
output_height=h1_full_height,
offset_x=h1_image_width * i,
offset_y=0,
size_x=h1_image_width,
size_y=h1_image_height
)
_add_pool_connection(
net=net,
h1=h1,
h2=h2,
input_width=h1_full_width,
input_height=h1_full_height
)
for i in range(NUMBER_OF_IMAGES * CONVOLUTION_MULTIPLIER):
for j in range(CONVOLUTION_MULTIPLIER):
_add_convolutional_connection(
net=net,
h1=h2,
h2=h3,
filter_size=SECOND_CONVOLUTION_FILTER,
multiplier=CONVOLUTION_MULTIPLIER,
input_width=h2_width,
input_height=h2_height,
output_width=h3_full_width,
output_height=h3_full_height,
offset_x=h3_image_width * i,
offset_y=h3_image_height * j,
size_x=h3_image_width,
size_y=h3_image_height
)
_merge_connection(
net=net,
h1=h3,
h2=h4,
filter_size=MERGE_FILTER,
input_width=h3_full_width,
input_height=h3_full_height,
output_width=h4_full_width,
output_height=h4_full_height
)
#.........这里部分代码省略.........
示例13: __init__
def __init__(self, **args):
FeedForwardNetwork.__init__(self, **args)
示例14: buildSubsamplingNetwork
def buildSubsamplingNetwork():
""" Builds a network with subsampling connections. """
n = FeedForwardNetwork()
n.addInputModule(LinearLayer(6, 'in'))
n.addOutputModule(LinearLayer(1, 'out'))
n.addConnection(SubsamplingConnection(n['in'], n['out'], inSliceTo=4))
n.addConnection(SubsamplingConnection(n['in'], n['out'], inSliceFrom=4))
n.sortModules()
return n
示例15: buildnet
def buildnet(modules):
net = FeedForwardNetwork(name='mynet');
net.addInputModule(modules['in'])
net.addModule(modules['hidden'])
net.addOutputModule(modules['out'])
net.addModule(modules['bias'])
net.addConnection(modules['in_to_hidden'])
net.addConnection(modules['bias_to_hidden'])
net.addConnection(modules['bias_to_out'])
if ('hidden2' in modules):
net.addModule(modules['hidden2'])
net.addConnection(modules['hidden_to_hidden2'])
net.addConnection(modules['bias_to_hidden2'])
net.addConnection(modules['hidden2_to_out'])
else:
net.addConnection(modules['hidden_to_out'])
net.sortModules()
return net