本文整理汇总了Python中keras.layers.recurrent.SimpleRNN方法的典型用法代码示例。如果您正苦于以下问题:Python recurrent.SimpleRNN方法的具体用法?Python recurrent.SimpleRNN怎么用?Python recurrent.SimpleRNN使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.layers.recurrent
的用法示例。
在下文中一共展示了recurrent.SimpleRNN方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_masking_layer
# 需要导入模块: from keras.layers import recurrent [as 别名]
# 或者: from keras.layers.recurrent import SimpleRNN [as 别名]
def test_masking_layer():
''' This test based on a previously failing issue here:
https://github.com/keras-team/keras/issues/1567
'''
inputs = np.random.random((6, 3, 4))
targets = np.abs(np.random.random((6, 3, 5)))
targets /= targets.sum(axis=-1, keepdims=True)
model = Sequential()
model.add(Masking(input_shape=(3, 4)))
model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False))
model.compile(loss='categorical_crossentropy', optimizer='adam')
model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
model = Sequential()
model.add(Masking(input_shape=(3, 4)))
model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True))
model.compile(loss='categorical_crossentropy', optimizer='adam')
model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
示例2: create_rnn
# 需要导入模块: from keras.layers import recurrent [as 别名]
# 或者: from keras.layers.recurrent import SimpleRNN [as 别名]
def create_rnn():
"""Create a recurrent neural network to compute a control policy.
Reference:
Koutnik, Jan, Jurgen Schmidhuber, and Faustino Gomez. "Evolving deep
unsupervised convolutional networks for vision-based reinforcement
learning." Proceedings of the 2014 conference on Genetic and
evolutionary computation. ACM, 2014.
"""
model = Sequential()
model.add(SimpleRNN(output_dim=3, stateful=True, batch_input_shape=(1, 1, 3)))
model.add(Dense(input_dim=3, output_dim=3))
model.compile(loss='mse', optimizer='rmsprop')
return model
示例3: test_simple
# 需要导入模块: from keras.layers import recurrent [as 别名]
# 或者: from keras.layers.recurrent import SimpleRNN [as 别名]
def test_simple(self):
_runner(recurrent.SimpleRNN)
示例4: rnn_test
# 需要导入模块: from keras.layers import recurrent [as 别名]
# 或者: from keras.layers.recurrent import SimpleRNN [as 别名]
def rnn_test(f):
"""
All the recurrent layers share the same interface,
so we can run through them with a single function.
"""
f = keras_test(f)
return pytest.mark.parametrize('layer_class', [
recurrent.SimpleRNN,
recurrent.GRU,
recurrent.LSTM
])(f)
示例5: build
# 需要导入模块: from keras.layers import recurrent [as 别名]
# 或者: from keras.layers.recurrent import SimpleRNN [as 别名]
def build(self, input_shape):
self.input_spec = [InputSpec(shape=input_shape)]
if self.stateful:
self.reset_states()
else:
# initial states: all-zero tensor of shape (output_dim)
self.states = [None]
input_dim = input_shape[2]
self.input_dim = input_dim
self.W = self.init((input_dim, self.output_dim),
name='{}_W'.format(self.name))
# Only change in build compared to SimpleRNN:
# U is of shape (inner_input_dim, output_dim) now.
self.U = self.inner_init((self.inner_input_dim, self.output_dim),
name='{}_U'.format(self.name))
self.b = K.zeros((self.output_dim,), name='{}_b'.format(self.name))
self.regularizers = []
if self.W_regularizer:
self.W_regularizer.set_param(self.W)
self.regularizers.append(self.W_regularizer)
if self.U_regularizer:
self.U_regularizer.set_param(self.U)
self.regularizers.append(self.U_regularizer)
if self.b_regularizer:
self.b_regularizer.set_param(self.b)
self.regularizers.append(self.b_regularizer)
self.trainable_weights = [self.W, self.U, self.b]
if self.initial_weights is not None:
self.set_weights(self.initial_weights)
del self.initial_weights
示例6: construct_model
# 需要导入模块: from keras.layers import recurrent [as 别名]
# 或者: from keras.layers.recurrent import SimpleRNN [as 别名]
def construct_model(maxlen, input_dimension, output_dimension, lstm_vector_output_dim):
"""
Склеены три слова
"""
input = Input(shape=(maxlen, input_dimension), name='input')
# lstm_encode = LSTM(lstm_vector_output_dim)(input)
lstm_encode = SimpleRNN(lstm_vector_output_dim, activation='sigmoid')(input)
encoded_copied = RepeatVector(n=maxlen)(lstm_encode)
# lstm_decode = LSTM(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied)
lstm_decode = SimpleRNN(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied)
decoded = TimeDistributed(Dense(output_dimension, activation='softmax'))(lstm_decode)
encoder_decoder = Model(input, decoded)
adam = Adam()
encoder_decoder.compile(loss='categorical_crossentropy', optimizer=adam)
return encoder_decoder
示例7: construct_model
# 需要导入模块: from keras.layers import recurrent [as 别名]
# 或者: from keras.layers.recurrent import SimpleRNN [as 别名]
def construct_model(maxlen, input_dimension, output_dimension, lstm_vector_output_dim):
"""
Склеены три слова
"""
input = Input(shape=(maxlen, input_dimension), name='input')
# lstm_encode = LSTM(lstm_vector_output_dim)(input)
lstm_encode = SimpleRNN(lstm_vector_output_dim, activation='relu')(input)
encoded_copied = RepeatVector(n=maxlen)(lstm_encode)
# lstm_decode = LSTM(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied)
lstm_decode = SimpleRNN(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied)
encoder = Model(input, lstm_decode)
adam = Adam()
encoder.compile(loss='categorical_crossentropy', optimizer=adam)
return encoder