本文整理汇总了Python中pybrain.structure.RecurrentNetwork.reset方法的典型用法代码示例。如果您正苦于以下问题:Python RecurrentNetwork.reset方法的具体用法?Python RecurrentNetwork.reset怎么用?Python RecurrentNetwork.reset使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pybrain.structure.RecurrentNetwork
的用法示例。
在下文中一共展示了RecurrentNetwork.reset方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: construct_network
# 需要导入模块: from pybrain.structure import RecurrentNetwork [as 别名]
# 或者: from pybrain.structure.RecurrentNetwork import reset [as 别名]
def construct_network(input_len, output_len, hidden_nodes, is_elman=True):
n = RecurrentNetwork()
n.addInputModule(LinearLayer(input_len, name="i"))
n.addModule(BiasUnit("b"))
n.addModule(SigmoidLayer(hidden_nodes, name="h"))
n.addOutputModule(LinearLayer(output_len, name="o"))
n.addConnection(FullConnection(n["i"], n["h"]))
n.addConnection(FullConnection(n["b"], n["h"]))
n.addConnection(FullConnection(n["b"], n["o"]))
n.addConnection(FullConnection(n["h"], n["o"]))
if is_elman:
# Elman (hidden->hidden)
n.addRecurrentConnection(FullConnection(n["h"], n["h"]))
else:
# Jordan (out->hidden)
n.addRecurrentConnection(FullConnection(n["o"], n["h"]))
n.sortModules()
n.reset()
return n
示例2:
# 需要导入模块: from pybrain.structure import RecurrentNetwork [as 别名]
# 或者: from pybrain.structure.RecurrentNetwork import reset [as 别名]
We can modify our existing network 'net2' and add a recurrent connection on the hidden layer: """
n2.addRecurrentConnection(FullConnection(n2['h'], n2['h'], name='rec'))
""" After every structural modification, if we want ot use the network, we call 'sortModules()' again"""
n2.sortModules()
print n2
""" As the network is now recurrent, successive activations produce different outputs: """
print n2.activate([1, 2]),
print n2.activate([1, 2]),
print n2.activate([1, 2])
""" The 'reset()' method re-initializes the network, and with it sets the recurrent
activations to zero, so now we get the same results: """
n2.reset()
print n2.activate([1, 2]),
print n2.activate([1, 2]),
print n2.activate([1, 2])
""" This is already a good coverage of the basics, but if you're an advanced user
you might want to find out about the possibilities of nesting networks within
others, using weight-sharing, and more exotic types of networks, connections
and modules... but that goes beyond the scope of this tutorial.
"""
示例3: RecurrentNetwork
# 需要导入模块: from pybrain.structure import RecurrentNetwork [as 别名]
# 或者: from pybrain.structure.RecurrentNetwork import reset [as 别名]
if __name__ == "__main__":
from pybrain.structure import RecurrentNetwork
from pybrain.structure import LinearLayer
from pybrain.structure import SigmoidLayer
from pybrain.structure import FullConnection
net = RecurrentNetwork()
net.addInputModule(LinearLayer(2, "in"))
net.addModule(SigmoidLayer(3, "hidden"))
net.addOutputModule(LinearLayer(1, "out"))
net.addConnection(FullConnection(net["in"], net["hidden"], "c1"))
net.addConnection(FullConnection(net["hidden"], net["out"], "c2"))
net.addRecurrentConnection(FullConnection(net["hidden"], net["hidden"], "c3-recurrent"))
net.sortModules()
print net
for i in xrange(5):
print net.activate([2, 2])
print "reset"
net.reset()
for i in xrange(5):
print net.activate([2, 2])
示例4: epochs
# 需要导入模块: from pybrain.structure import RecurrentNetwork [as 别名]
# 或者: from pybrain.structure.RecurrentNetwork import reset [as 别名]
class LanguageLearner:
__OUTPUT = "Sample at {0} epochs (prompt=\"{1}\", length={2}): {3}"
def __init__(self, trainingText, hiddenLayers, hiddenNodes):
self.__initialized = False
with open(trainingText) as f:
self.raw = f.read()
self.characters = list(self.raw)
self.rawData = list(map(ord, self.characters))
print("Creating alphabet mapping...")
self.mapping = []
for charCode in self.rawData:
if charCode not in self.mapping:
self.mapping.append(charCode)
print("Mapping of " + str(len(self.mapping)) + " created.")
print(str(self.mapping))
print("Converting data to mapping...")
self.data = []
for charCode in self.rawData:
self.data.append(self.mapping.index(charCode))
print("Done.")
self.dataIn = self.data[:-1:]
self.dataOut = self.data[1::]
self.inputs = 1
self.hiddenLayers = hiddenLayers
self.hiddenNodes = hiddenNodes
self.outputs = 1
def initialize(self, verbose):
print("Initializing language learner...")
self.verbose = verbose
# Create network and modules
self.net = RecurrentNetwork()
inp = LinearLayer(self.inputs, name="in")
hiddenModules = []
for i in range(0, self.hiddenLayers):
hiddenModules.append(LSTMLayer(self.hiddenNodes, name=("hidden-" + str(i + 1))))
outp = LinearLayer(self.outputs, name="out")
# Add modules to the network with recurrence
self.net.addOutputModule(outp)
self.net.addInputModule(inp)
for module in hiddenModules:
self.net.addModule(module)
# Create connections
self.net.addConnection(FullConnection(self.net["in"], self.net["hidden-1"]))
for i in range(0, len(hiddenModules) - 1):
self.net.addConnection(FullConnection(self.net["hidden-" + str(i + 1)], self.net["hidden-" + str(i + 2)]))
self.net.addRecurrentConnection(FullConnection(self.net["hidden-" + str(i + 1)], self.net["hidden-" + str(i + 1)]))
self.net.addRecurrentConnection(FullConnection(self.net["hidden-" + str(len(hiddenModules))],
self.net["hidden-" + str(len(hiddenModules))]))
self.net.addConnection(FullConnection(self.net["hidden-" + str(len(hiddenModules))], self.net["out"]))
self.net.sortModules()
self.trainingSet = SequentialDataSet(self.inputs, self.outputs)
for x, y in zip(self.dataIn, self.dataOut):
self.trainingSet.newSequence()
self.trainingSet.appendLinked([x], [y])
self.net.randomize()
print("Neural network initialzed with structure:")
print(self.net)
self.trainer = BackpropTrainer(self.net, self.trainingSet, verbose=verbose)
self.__initialized = True
print("Successfully initialized network.")
def train(self, epochs, frequency, prompt, length):
if not self.__initialized:
raise Exception("Attempted to train uninitialized LanguageLearner")
print ("Beginning training for " + str(epochs) + " epochs...")
if frequency >= 0:
print(LanguageLearner.__OUTPUT.format(0, prompt, length, self.sample(prompt, length)))
for i in range(1, epochs):
print("Error at " + str(i) + " epochs: " + str(self.trainer.train()))
if i % frequency == 0:
print(LanguageLearner.__OUTPUT.format(i, prompt, length, self.sample(prompt, length)))
print("Completed training.")
def sample(self, prompt, length):
self.net.reset()
if prompt == None:
prompt = chr(random.choice(self.mapping))
output = prompt
charCode = ord(prompt)
for i in range(0, length):
sampledResult = self.net.activate([charCode])
charCode = int(round(sampledResult[0]))
if charCode < 0 or charCode >= len(self.mapping):
return output + "#TERMINATED_SAMPLE(reason: learner guessed invalid character)"
output += chr(self.mapping[charCode])
return output
示例5: RecurrentNetwork
# 需要导入模块: from pybrain.structure import RecurrentNetwork [as 别名]
# 或者: from pybrain.structure.RecurrentNetwork import reset [as 别名]
from pybrain.structure import RecurrentNetwork, LinearLayer, SigmoidLayer
from pybrain.structure import FullConnection
n = RecurrentNetwork()
inLayer = LinearLayer(2, name="Input")
hiddenLayer = SigmoidLayer(3, name="Hidden")
outLayer = LinearLayer(1, name="Output")
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)
n.addConnection(FullConnection(inLayer, hiddenLayer, name="C_IH"))
n.addConnection(FullConnection(hiddenLayer, outLayer, name="C_HO"))
n.addRecurrentConnection(FullConnection(n['Hidden'], n['Hidden'], name='C_HH'))
n.sortModules()
n.reset()
print n.activate((2,2))