本文整理匯總了Python中blocks.bricks.recurrent.SimpleRecurrent方法的典型用法代碼示例。如果您正苦於以下問題:Python recurrent.SimpleRecurrent方法的具體用法?Python recurrent.SimpleRecurrent怎麽用?Python recurrent.SimpleRecurrent使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類blocks.bricks.recurrent
的用法示例。
在下文中一共展示了recurrent.SimpleRecurrent方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
# 需要導入模塊: from blocks.bricks import recurrent [as 別名]
# 或者: from blocks.bricks.recurrent import SimpleRecurrent [as 別名]
def __init__(self, dimension, alphabet_size, **kwargs):
super(SimpleGenerator, self).__init__(**kwargs)
lookup = LookupTable(alphabet_size, dimension)
transition = SimpleRecurrent(
activation=Tanh(),
dim=dimension, name="transition")
attention = SequenceContentAttention(
state_names=transition.apply.states,
attended_dim=dimension, match_dim=dimension, name="attention")
readout = Readout(
readout_dim=alphabet_size,
source_names=[transition.apply.states[0],
attention.take_glimpses.outputs[0]],
emitter=SoftmaxEmitter(name="emitter"),
feedback_brick=LookupFeedback(alphabet_size, dimension),
name="readout")
generator = SequenceGenerator(
readout=readout, transition=transition, attention=attention,
name="generator")
self.lookup = lookup
self.generator = generator
self.children = [lookup, generator]
示例2: rnn_layer
# 需要導入模塊: from blocks.bricks import recurrent [as 別名]
# 或者: from blocks.bricks.recurrent import SimpleRecurrent [as 別名]
def rnn_layer(dim, h, n):
linear = Linear(input_dim=dim, output_dim=dim, name='linear' + str(n))
rnn = SimpleRecurrent(dim=dim, activation=Tanh(), name='rnn' + str(n))
initialize([linear, rnn])
return rnn.apply(linear.apply(h))
示例3: __init__
# 需要導入模塊: from blocks.bricks import recurrent [as 別名]
# 或者: from blocks.bricks.recurrent import SimpleRecurrent [as 別名]
def __init__(self, dimension, alphabet_size, **kwargs):
super(WordReverser, self).__init__(**kwargs)
encoder = Bidirectional(
SimpleRecurrent(dim=dimension, activation=Tanh()))
fork = Fork([name for name in encoder.prototype.apply.sequences
if name != 'mask'])
fork.input_dim = dimension
fork.output_dims = [encoder.prototype.get_dim(name) for name in fork.input_names]
lookup = LookupTable(alphabet_size, dimension)
transition = SimpleRecurrent(
activation=Tanh(),
dim=dimension, name="transition")
attention = SequenceContentAttention(
state_names=transition.apply.states,
attended_dim=2 * dimension, match_dim=dimension, name="attention")
readout = Readout(
readout_dim=alphabet_size,
source_names=[transition.apply.states[0],
attention.take_glimpses.outputs[0]],
emitter=SoftmaxEmitter(name="emitter"),
feedback_brick=LookupFeedback(alphabet_size, dimension),
name="readout")
generator = SequenceGenerator(
readout=readout, transition=transition, attention=attention,
name="generator")
self.lookup = lookup
self.fork = fork
self.encoder = encoder
self.generator = generator
self.children = [lookup, fork, encoder, generator]