本文整理汇总了Python中pybrain.structure.FeedForwardNetwork._setParameters方法的典型用法代码示例。如果您正苦于以下问题:Python FeedForwardNetwork._setParameters方法的具体用法?Python FeedForwardNetwork._setParameters怎么用?Python FeedForwardNetwork._setParameters使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pybrain.structure.FeedForwardNetwork
的用法示例。
在下文中一共展示了FeedForwardNetwork._setParameters方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: importCatDogANN
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import _setParameters [as 别名]
def importCatDogANN(fileName = root.path()+"/res/recCatDogANN"):
n = FeedForwardNetwork()
n.addInputModule(LinearLayer(7500, name='in'))
n.addModule(SigmoidLayer(9000, name='hidden'))
n.addOutputModule(LinearLayer(2, name='out'))
n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
n.sortModules()
params = np.load(root.path()+'/res/cat_dog_params.txt.npy')
n._setParameters(params)
return n
示例2: StateToActionNetwork
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import _setParameters [as 别名]
def StateToActionNetwork(genome=None):
#initial a network [12,12,4] and initial weights are baseline policy versions
from pybrain.structure import FeedForwardNetwork,LinearLayer,TanhLayer,FullConnection
network = FeedForwardNetwork()
inLayer= LinearLayer(12)
outLayer = LinearLayer(4)
network.addInputModule(inLayer)
network.addOutputModule(outLayer)
weights = []
if(genome == None):
import pickle
weights = pickle.load(open("seed2"))
else:
weights = genome
in_to_out = FullConnection(inLayer,outLayer)
network.addConnection(in_to_out)
network.sortModules()
network._setParameters(weights)
return network
示例3: lmsTrain
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import _setParameters [as 别名]
network.addConnection(in_hidden)
network.addConnection(hidden_out)
network.sortModules()
x = network.params
for h in labels:
j = [0,0,0,0,0,0,0,0,0,0]
j[h] = 1
targets += [j]
newParams = lmsTrain(network, dataSet, targets, 20)
newParams = newParams.flatten()
x[(len(x) - (784 * 10)):] = newParams
network._setParameters(p=x)
activations = np.zeros(10)
results = []
for x in dataSet:
activations = np.zeros(10)
r = network.activate(x)
activations[np.argmax(r)] = 1
results += [1]
testTargets = []
for x in testLabels:
h = np.zeros(10)
h[x] = 1
testTargets += [h]
示例4: layer
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import _setParameters [as 别名]
N_GAUSSIANS = 3
n.addOutputModule(MixtureDensityLayer(dim=1, name='out', mix=N_GAUSSIANS))
# add bias module and connection to out module
n.addModule(BiasUnit(name = 'bias'))
n.addConnection(FullConnection(n['bias'], n['out']))
# arbitrary number of hidden layers of type 'hiddenclass'
n.addModule(SigmoidLayer(5, name='hidden'))
n.addConnection(FullConnection(n['bias'], n['hidden']))
# network with hidden layer(s), connections
# from in to first hidden and last hidden to out
n.addConnection(FullConnection(n['in'], n['hidden']))
n.addConnection(FullConnection(n['hidden'], n['out']))
n.sortModules()
n._setParameters(np.random.uniform(-0.1, 0.1, size=n.paramdim))
# build some data
y = np.arange(0.0, 1.0, 0.005).reshape(200,1)
x = (
y +
0.3 * np.sin(2 * np.pi * y) +
np.random.uniform(-0.1, 0.1, y.size).reshape(y.size, 1)
)
dataset = SupervisedDataSet(1, 1)
dataset.setField('input', x)
dataset.setField('target', y)
# train the network
trainer = RPropMinusTrainerMix(n, dataset=dataset, verbose=True,
weightdecay=0.05)
示例5: open
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import _setParameters [as 别名]
n.addConnection(in_to_hidden)
n.addConnection(bias_to_hidden)
n.addConnection(bias_to_out)
n.addConnection(hidden_to_out)
n.sortModules()
n.reset()
#read the initail weight values from myparam2.txt
filetoopen = os.path.join(os.getcwd(),'myparam2.txt')
if os.path.isfile(filetoopen):
myfile = open('myparam2.txt','r')
c=[]
for line in myfile:
c.append(float(line))
n._setParameters(c)
else:
myfile = open('myparam2.txt','w')
for i in n.params:
myfile.write(str(i)+'\n')
myfile.close()
#activate the neural networks
act = SupervisedDataSet(1,1)
act.addSample((0.2,),(0.880422606518061,))
n.activateOnDataset(act)
#create the test DataSet
x = numpy.arange(0.0, 1.0+0.01, 0.01)
s = 0.5+0.4*numpy.sin(2*numpy.pi*x)
tsts = SupervisedDataSet(1,1)
tsts.setField('input',x.reshape(len(x),1))
示例6: NeuralNet
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import _setParameters [as 别名]
class NeuralNet(regression):
'''
#deprecated
def __init__(self, inputDim, outputDim):
\'''
Initializes class parameters
Input:
\'''
regression.__init__(self,inputDim, outputDim)
#self.net = buildNetwork(inputDim, outputDim)
self.net = FeedForwardNetwork()
inLayer = LinearLayer(inputDim)
hiddenLayer1 = TanhLayer(10)
hiddenLayer2 = TanhLayer(10)
outLayer = SigmoidLayer(outputDim)
self.net.addInputModule(inLayer)
self.net.addModule(hiddenLayer1)
self.net.addModule(hiddenLayer2)
self.net.addOutputModule(outLayer)
in_to_hidden1 = FullConnection(inLayer, hiddenLayer1)
hidden1_to_hidden2=FullConnection(hiddenLayer1, hiddenLayer2)
hidden2_to_out = FullConnection(hiddenLayer2, outLayer)
self.net.addConnection(in_to_hidden1)
self.net.addConnection(hidden1_to_hidden2)
self.net.addConnection(hidden2_to_out)
self.net.sortModules()
self.shape=self.net.params.shape
self.ds = SupervisedDataSet(self.inputDimension, self.outputDimension)
'''
def __init__(self, rs):
regression.__init__(self,rs)
self.learningRate=rs.learningRate
self.momentum=rs.momentum
self.net = FeedForwardNetwork()
#input Layer
inLayer = layersDict[rs.inputLayer](rs.inputDim)
self.net.addInputModule(inLayer)
#outputLayer
outLayer = layersDict[rs.outputLayer](rs.outputDim)
self.net.addOutputModule(outLayer)
#no hidden Layer
if(len(rs.hiddenLayers)==0):
#connection between input and output Layer
in_to_out = FullConnection(inLayer, outLayer)
self.net.addConnection(in_to_out)
if(rs.bias==True):
bias= BiasUnit('bias')
self.net.addModule(bias)
bias_to_out = FullConnection(bias, outLayer)
self.net.addConnection(bias_to_out)
else :
#hidden Layers
hiddenLayers=[]
for layer in rs.hiddenLayers:
tmp=layersDict[layer[0]](layer[1])
self.net.addModule(tmp)
hiddenLayers.append(tmp)
#connection between input and first hidden Layer
in_to_hidden=FullConnection(inLayer,hiddenLayers[0])
self.net.addConnection(in_to_hidden)
#connection between hidden Layers
i=0
for i in range(1,len(hiddenLayers)):
hidden_to_hidden=FullConnection(hiddenLayers[i-1],hiddenLayers[i])
self.net.addConnection(hidden_to_hidden)
#connection between last hidden Layer and output Layer
hidden_to_out= FullConnection(hiddenLayers[i],outLayer)
self.net.addConnection(hidden_to_out)
if(rs.bias==True):
bias=BiasUnit('bias')
self.net.addModule(bias)
for layer in hiddenLayers :
bias_to_hidden = FullConnection(bias, layer)
self.net.addConnection(bias_to_hidden)
bias_to_out = FullConnection(bias, outLayer)
self.net.addConnection(bias_to_out)
#initilisation of weight
self.net.sortModules()
self.shape=self.net.params.shape
self.net._setParameters(np.random.normal(0.0,0.1,self.shape))
#.........这里部分代码省略.........
示例7: __init__
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import _setParameters [as 别名]
class NNW:
def __init__(self, num_input, num_hidden, num_output):
# self.net = buildNetwork(num_input, num_hidden, num_output, bias = True)
self.net = FeedForwardNetwork()
self.num_input = num_input
self.num_hidden = num_hidden
self.num_output = num_output
inLayer = LinearLayer(num_input, name='in')
hiddenLayer1 = SigmoidLayer(num_hidden, name='hidden1')
outLayer = LinearLayer(num_output, name='out')
self.net.addInputModule(inLayer)
self.net.addModule(hiddenLayer1)
self.net.addOutputModule(outLayer)
self.in_to_hidden = FullConnection(inLayer, hiddenLayer1)
self.hidden_to_out = FullConnection(hiddenLayer1, outLayer)
self.net.addConnection(self.in_to_hidden)
self.net.addConnection(self.hidden_to_out)
self.net.sortModules()
self.dataset = None
def trainData(self, learningRate = 0.01, batch = True, maxEpochs = 100, continueEpochs = 10):
# http://pybrain.org/docs/api/supervised/trainers.html?highlight=backproptrainer#pybrain.supervised.trainers.BackpropTrainer
# BackpropTrainer(module, dataset=None, learningrate=0.01, lrdecay=1.0, momentum=0.0, verbose=False, batchlearning=False, weightdecay=0.0)
# things for setting:
# 1. dataset
# 2. learningrate: 0.01 ~ 0.25
# 3. batchlearning: True or False
trainer = BackpropTrainer(self.net, dataset = self.dataset, learningrate = learningRate, batchlearning = batch)
# trainUntilConvergence(dataset=None, maxEpochs=None, verbose=None, continueEpochs=10, validationProportion=0.1)
# things for setting:
# 1. maxEpochs: at most that many epochs are trained.
# 2. continueEpochs: Each time validation error hits a minimum, try for continueEpochs epochs to find a better one.
# 3. validationProportion: ratio of the dataset for validation dataset.
trainer.trainUntilConvergence(maxEpochs = 10000, continueEpochs = 10, validationProportion=0.2)
# print error
def trainOnce(self, learningRate = 0.01, batch = True, maxEpochs = 100, continueEpochs = 10):
# http://pybrain.org/docs/api/supervised/trainers.html?highlight=backproptrainer#pybrain.supervised.trainers.BackpropTrainer
# BackpropTrainer(module, dataset=None, learningrate=0.01, lrdecay=1.0, momentum=0.0, verbose=False, batchlearning=False, weightdecay=0.0)
# things for setting:
# 1. dataset
# 2. learningrate: 0.01 ~ 0.25
# 3. batchlearning: True or False
trainer = BackpropTrainer(self.net, dataset = self.dataset, learningrate = learningRate, batchlearning = batch)
error = trainer.train()
print error
def setTrainData(self, train, target):
ds = SupervisedDataSet(self.num_input, self.num_output)
dataSize = len(train) # should be same as len(target)
for i in range(dataSize):
ds.addSample(train[i], target[i])
self.dataset = ds
def activate(self, inputData):
# self.net.sortModules()
decision = self.net.activate(inputData)
return decision
def getParameter(self, laynumber = 0):
if laynumber == 0:
return self.net.params
elif laynumber == 1:
return self.in_to_hidden.params
elif laynumber == 2:
return self.hidden_to_out.params
def setParameters(self, para):
self.net._setParameters(para)
示例8: Slave
# 需要导入模块: from pybrain.structure import FeedForwardNetwork [as 别名]
# 或者: from pybrain.structure.FeedForwardNetwork import _setParameters [as 别名]
#.........这里部分代码省略.........
self.net.addConnection(FullConnection(self.net['in'], self.hiddenLayers[0]))
for h1, h2 in zip(self.hiddenLayers[:-1], self.hiddenLayers[1:]):
self.net.addConnection(FullConnection(self.net['networkBias'],h1))
self.net.addConnection(FullConnection(h1,h2))
if outPutBias:
self.net.addConnection(FullConnection(self.net['networkBias'],self.net['out']))
self.net.addConnection(FullConnection(self.hiddenLayers[-1],self.net['out']))
else:
if outPutBias:
self.net.addConnection(FullConnection(self.net['networkBias'],self.net['out']))
self.net.addConnection(FullConnection(self.net['in'],self.net['out']))
else:
# Definição da camada de entrada
if inLType == 0:
self.net.addInputModule(LinearLayer(inLayer,name='in'))
elif inLType == 1:
self.net.addInputModule(SigmoidLayer(inLayer,name='in'))
elif inLType == 2:
self.net.addInputModule(TanhLayer(inLayer,name='in'))
elif inLType == 3:
self.net.addInputModule(SoftmaxLayer(inLayer,name='in'))
elif inLType == 4:
self.net.addInputModule(GaussianLayer(inLayer,name='in'))
# Definição das camadas escondidas
self.hiddenLayers = []
if hLayersType == 0:
for i in range(0, hLayerNum):
self.hiddenLayers.append(LinearLayer(hiddenLayers[i]))
self.net.addModule(self.hiddenLayers[i])
elif hLayersType == 1:
for i in range(0, hLayerNum):
self.hiddenLayers.append(SigmoidLayer(hiddenLayers[i]))
self.net.addModule(self.hiddenLayers[i])
elif hLayersType == 2:
for i in range(0, hLayerNum):
self.hiddenLayers.append(TanhLayer(hiddenLayers[i]))
self.net.addModule(self.hiddenLayers[i])
elif hLayersType == 3:
for i in range(0, hLayerNum):
self.hiddenLayers.append(SoftmaxLayer(hiddenLayers[i]))
self.net.addModule(self.hiddenLayers[i])
elif hLayersType == 4:
for i in range(0, hLayerNum):
self.hiddenLayers.append(GaussianLayer(hiddenLayers[i]))
self.net.addModule(self.hiddenLayers[i])
# Definição da camada de saída
if outLType == 0:
self.net.addOutputModule(LinearLayer(outLayer,name='out'))
elif outLType == 1:
self.net.addOutputModule(SigmoidLayer(outLayer,name='out'))
elif outLType == 2:
self.net.addOutputModule(TanhLayer(outLayer,name='out'))
elif outLType == 3:
self.net.addOutputModule(SoftmaxLayer(inLayer,name='out'))
elif outLType == 4:
self.net.addOutputModule(GaussianLayer(outLayer,name='out'))
if self.hiddenLayers:
self.net.addConnection(FullConnection(self.net['in'], self.hiddenLayers[:1]))
for h1, h2 in zip(self.hiddenLayers[:-1], self.hiddenLayers[1:]):
self.net.addConnection(FullConnection(h1,h2))
self.net.addConnection(FullConnection(self.hiddenLayers[-1:],self.net['out']))
else:
self.net.addConnection(FullConnection(self.net['in'],self.net['out']))
# Termina de construir a rede e a monta corretamente
self.net.sortModules()
def setParameters(self, parameters):
self.net._setParameters(parameters)
def getParameters(self):
return self.net.params.tolist()
def createDataSet(self, ds):
inp = ds.indim
targ = ds.outdim
self.ds = SupervisedDataSet(inp, targ)
for i,t in ds:
self.ds.addSample(i,t)
def updateDataSet(self, ds):
self.ds.clear(True)
for i,t in ds:
self.ds.addSample(i,t)
self.trainer.setData(self.ds)
def createTrainer(self, learnrate=0.01, ldecay=1.0, momentum=0.0, batchlearn=False, wdecay=0.0):
self.trainer = BackpropTrainer(self.net, self.ds, learningrate=learnrate, lrdecay=ldecay, momentum=momentum, batchlearning=batchlearn, weightdecay=wdecay)
def trainNetwork(self):
self.trainer.train()
def loadNetwork(self, net):
del self.net
self.net = net