本文整理匯總了Python中blocks.bricks.recurrent.LSTM屬性的典型用法代碼示例。如果您正苦於以下問題:Python recurrent.LSTM屬性的具體用法?Python recurrent.LSTM怎麽用?Python recurrent.LSTM使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在類blocks.bricks.recurrent
的用法示例。
在下文中一共展示了recurrent.LSTM屬性的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
# 需要導入模塊: from blocks.bricks import recurrent [as 別名]
# 或者: from blocks.bricks.recurrent import LSTM [as 別名]
def __init__(self, image_feature_dim, embedding_dim, **kwargs):
super(Encoder, self).__init__(**kwargs)
self.image_embedding = Linear(
input_dim=image_feature_dim
, output_dim=embedding_dim
, name="image_embedding"
)
self.to_inputs = Linear(
input_dim=embedding_dim
, output_dim=embedding_dim*4 # times 4 cuz vstack(input, forget, cell, hidden)
, name="to_inputs"
)
self.transition = LSTM(
dim=embedding_dim, name="transition")
self.children = [ self.image_embedding
, self.to_inputs
, self.transition
]
示例2: lstm_layer
# 需要導入模塊: from blocks.bricks import recurrent [as 別名]
# 或者: from blocks.bricks.recurrent import LSTM [as 別名]
def lstm_layer(dim, h, n):
linear = Linear(input_dim=dim, output_dim=dim * 4, name='linear' + str(n))
lstm = LSTM(dim=dim, name='lstm' + str(n))
initialize([linear, lstm])
return lstm.apply(linear.apply(h))
示例3: make_bidir_lstm_stack
# 需要導入模塊: from blocks.bricks import recurrent [as 別名]
# 或者: from blocks.bricks.recurrent import LSTM [as 別名]
def make_bidir_lstm_stack(seq, seq_dim, mask, sizes, skip=True, name=''):
bricks = []
curr_dim = [seq_dim]
curr_hidden = [seq]
hidden_list = []
for k, dim in enumerate(sizes):
fwd_lstm_ins = [Linear(input_dim=d, output_dim=4*dim, name='%s_fwd_lstm_in_%d_%d'%(name,k,l)) for l, d in enumerate(curr_dim)]
fwd_lstm = LSTM(dim=dim, activation=Tanh(), name='%s_fwd_lstm_%d'%(name,k))
bwd_lstm_ins = [Linear(input_dim=d, output_dim=4*dim, name='%s_bwd_lstm_in_%d_%d'%(name,k,l)) for l, d in enumerate(curr_dim)]
bwd_lstm = LSTM(dim=dim, activation=Tanh(), name='%s_bwd_lstm_%d'%(name,k))
bricks = bricks + [fwd_lstm, bwd_lstm] + fwd_lstm_ins + bwd_lstm_ins
fwd_tmp = sum(x.apply(v) for x, v in zip(fwd_lstm_ins, curr_hidden))
bwd_tmp = sum(x.apply(v) for x, v in zip(bwd_lstm_ins, curr_hidden))
fwd_hidden, _ = fwd_lstm.apply(fwd_tmp, mask=mask)
bwd_hidden, _ = bwd_lstm.apply(bwd_tmp[::-1], mask=mask[::-1])
hidden_list = hidden_list + [fwd_hidden, bwd_hidden]
if skip:
curr_hidden = [seq, fwd_hidden, bwd_hidden[::-1]]
curr_dim = [seq_dim, dim, dim]
else:
curr_hidden = [fwd_hidden, bwd_hidden[::-1]]
curr_dim = [dim, dim]
return bricks, hidden_list
開發者ID:thomasmesnard,項目名稱:DeepMind-Teaching-Machines-to-Read-and-Comprehend,代碼行數:31,代碼來源:attentive_reader.py