本文整理汇总了Python中keras.layers.wrappers.Bidirectional方法的典型用法代码示例。如果您正苦于以下问题:Python wrappers.Bidirectional方法的具体用法?Python wrappers.Bidirectional怎么用?Python wrappers.Bidirectional使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.layers.wrappers
的用法示例。
在下文中一共展示了wrappers.Bidirectional方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _build_sequence_model
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def _build_sequence_model(self, sequence_input):
RNN = GRU if self._rnn_type == 'gru' else LSTM
def rnn():
rnn = RNN(units=self._rnn_output_size,
return_sequences=True,
dropout=self._dropout_rate,
recurrent_dropout=self._dropout_rate,
kernel_regularizer=self._regularizer,
kernel_initializer=self._initializer,
implementation=2)
rnn = Bidirectional(rnn) if self._bidirectional_rnn else rnn
return rnn
input_ = sequence_input
for _ in range(self._rnn_layers):
input_ = BatchNormalization(axis=-1)(input_)
rnn_out = rnn()(input_)
input_ = rnn_out
time_dist_dense = TimeDistributed(Dense(units=self._vocab_size))(rnn_out)
return time_dist_dense
示例2: change_trainable
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def change_trainable(layer, trainable, verbose=False):
""" Helper method that fixes some of Keras' issues with wrappers and
trainability. Freezes or unfreezes a given layer.
# Arguments:
layer: Layer to be modified.
trainable: Whether the layer should be frozen or unfrozen.
verbose: Verbosity flag.
"""
layer.trainable = trainable
if type(layer) == Bidirectional:
layer.backward_layer.trainable = trainable
layer.forward_layer.trainable = trainable
if type(layer) == TimeDistributed:
layer.backward_layer.trainable = trainable
if verbose:
action = 'Unfroze' if trainable else 'Froze'
print("{} {}".format(action, layer.name))
示例3: create_model
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def create_model(self):
features_length = self.parameters.pref['features_length']
inputs = Input(shape=(None, features_length))
x = inputs
x = Masking(mask_value=0., input_shape=(None, features_length))(x)
x = Bidirectional(
GRU(self.parameters.pref['internal_neurons'], return_sequences=True, dropout=0.0, recurrent_dropout=0.5,
implementation=1), input_shape=(None, features_length))(x)
x = Bidirectional(
GRU(self.parameters.pref['internal_neurons'], return_sequences=True, dropout=0.0, recurrent_dropout=0.5,
implementation=1), input_shape=(None, features_length))(x)
x = Dropout(0.5)(x)
x = TimeDistributed(Dense(self.parameters.pref['output_length'], activation='softmax'))(x)
self.model = Model(inputs=inputs, outputs=x)
self.loss = 'categorical_crossentropy'
self.optimizer = keras.optimizers.Nadam()
示例4: test_tiny_no_sequence_bidir_random
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def test_tiny_no_sequence_bidir_random(
self, model_precision=_MLMODEL_FULL_PRECISION
):
np.random.seed(1988)
input_dim = 1
input_length = 1
num_channels = 1
num_samples = 1
# Define a model
model = Sequential()
model.add(
Bidirectional(
LSTM(num_channels, implementation=1, recurrent_activation="sigmoid"),
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model, model_precision=model_precision)
示例5: test_tiny_no_sequence_bidir_random_gpu
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def test_tiny_no_sequence_bidir_random_gpu(
self, model_precision=_MLMODEL_FULL_PRECISION
):
np.random.seed(1988)
input_dim = 1
input_length = 1
num_channels = 1
num_samples = 1
# Define a model
model = Sequential()
model.add(
Bidirectional(
LSTM(num_channels, implementation=2, recurrent_activation="sigmoid"),
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model, model_precision=model_precision)
示例6: test_small_no_sequence_bidir_random
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def test_small_no_sequence_bidir_random(self):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 1
# Define a model
model = Sequential()
model.add(
Bidirectional(
LSTM(num_channels, implementation=2, recurrent_activation="sigmoid"),
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
示例7: test_medium_no_sequence_bidir_random
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def test_medium_no_sequence_bidir_random(self):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 10
# Define a model
model = Sequential()
model.add(
Bidirectional(
LSTM(num_channels, implementation=2, recurrent_activation="sigmoid"),
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
示例8: test_Bidirectional_state_reuse
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def test_Bidirectional_state_reuse():
rnn = layers.LSTM
samples = 2
dim = 5
timesteps = 3
units = 3
input1 = Input((timesteps, dim))
layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True))
state = layer(input1)[1:]
# test passing invalid initial_state: passing a tensor
input2 = Input((timesteps, dim))
with pytest.raises(ValueError):
output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0])
# test valid usage: passing a list
output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state)
model = Model([input1, input2], output)
assert len(model.layers) == 4
assert isinstance(model.layers[-1].input, list)
inputs = [np.random.rand(samples, timesteps, dim),
np.random.rand(samples, timesteps, dim)]
outputs = model.predict(inputs)
示例9: create
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def create(inputtokens, vocabsize, units=16, dropout=0, embedding=32):
input_ = Input(shape=(inputtokens,), dtype='int32')
# Embedding layer
net = Embedding(input_dim=vocabsize, output_dim=embedding, input_length=inputtokens)(input_)
net = Dropout(dropout)(net)
# Bidirectional LSTM layer
net = BatchNormalization()(net)
net = Bidirectional(CuDNNLSTM(units))(net)
net = Dropout(dropout)(net)
# Output layer
net = Dense(vocabsize, activation='softmax')(net)
model = Model(inputs=input_, outputs=net)
# Make data-parallel
ngpus = len(get_available_gpus())
if ngpus > 1:
model = make_parallel(model, ngpus)
return model
示例10: bilstm_layer
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def bilstm_layer(input_layer, lstm_dims, rnn_layers, dropout):
lstm = None
if isinstance(lstm_dims, (list, tuple)):
lstm_dims = lstm_dims
else:
assert isinstance(lstm_dims, int)
lstm_dims = [lstm_dims] * rnn_layers
for i in range(rnn_layers):
if i == 0:
nested = input_layer
else:
nested = lstm
wrapped = LSTM(
output_dim=lstm_dims[i], activation='tanh', return_sequences=True,
dropout_W=dropout, dropout_U=dropout, name='bistm_%d' % i)
lstm = Bidirectional(wrapped, merge_mode='sum')(nested)
return lstm
示例11: test_medium_bidir_random_return_seq_false
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def test_medium_bidir_random_return_seq_false(self):
np.random.seed(1988)
input_dim = 7
input_length = 5
num_channels = 10
# Define a model
model = Sequential()
model.add(
Bidirectional(
LSTM(
num_channels,
return_sequences=False,
implementation=2,
recurrent_activation="sigmoid",
),
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
示例12: test_bilstm_merge_modes
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def test_bilstm_merge_modes(self):
# issue 157
def get_model(input_dim, fc_size, rnn_size, output_dim, merge_mode):
input_data = Input(name="the_input", shape=(None, input_dim))
x = TimeDistributed(Dense(fc_size, name="fc1", activation="relu",))(
input_data
)
x = Bidirectional(
LSTM(
rnn_size,
return_sequences=True,
activation="relu",
kernel_initializer="he_normal",
),
merge_mode=merge_mode,
)(x)
y_pred = TimeDistributed(
Dense(output_dim, name="y_pred", activation="softmax")
)(x)
model = Model([input_data], [y_pred])
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
return model
input_dim = 26
fc_size = 512
rnn_size = 512
output_dim = 29
for merge_mode in ["concat", "sum", "mul", "ave"]:
model = get_model(input_dim, fc_size, rnn_size, output_dim, merge_mode)
self._test_model(model)
示例13: test_Bidirectional_dropout
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def test_Bidirectional_dropout(merge_mode):
rnn = layers.LSTM
samples = 2
dim = 5
timesteps = 3
units = 3
X = [np.random.rand(samples, timesteps, dim)]
inputs = Input((timesteps, dim))
wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2),
merge_mode=merge_mode)
outputs = _to_list(wrapped(inputs, training=True))
assert all(not getattr(x, '_uses_learning_phase') for x in outputs)
inputs = Input((timesteps, dim))
wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True),
merge_mode=merge_mode)
outputs = _to_list(wrapped(inputs))
assert all(x._uses_learning_phase for x in outputs)
model = Model(inputs, outputs)
assert model.uses_learning_phase
y1 = _to_list(model.predict(X))
y2 = _to_list(model.predict(X))
for x1, x2 in zip(y1, y2):
assert_allclose(x1, x2, atol=1e-5)
示例14: test_Bidirectional_trainable
# 需要导入模块: from keras.layers import wrappers [as 别名]
# 或者: from keras.layers.wrappers import Bidirectional [as 别名]
def test_Bidirectional_trainable():
# test layers that need learning_phase to be set
x = Input(shape=(3, 2))
layer = wrappers.Bidirectional(layers.SimpleRNN(3))
_ = layer(x)
assert len(layer.trainable_weights) == 6
layer.trainable = False
assert len(layer.trainable_weights) == 0
layer.trainable = True
assert len(layer.trainable_weights) == 6