本文整理汇总了Python中tensorflow.python.keras.layers.LSTM属性的典型用法代码示例。如果您正苦于以下问题:Python layers.LSTM属性的具体用法?Python layers.LSTM怎么用?Python layers.LSTM使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类tensorflow.python.keras.layers
的用法示例。
在下文中一共展示了layers.LSTM属性的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: crnn_model
# 需要导入模块: from tensorflow.python.keras import layers [as 别名]
# 或者: from tensorflow.python.keras.layers import LSTM [as 别名]
def crnn_model(width=100, n_vars=6, n_classes=7, conv_kernel_size=5,
conv_filters=3, lstm_units=3):
input_shape = (width, n_vars)
model = Sequential()
model.add(Conv1D(filters=conv_filters, kernel_size=conv_kernel_size,
padding='valid', activation='relu', input_shape=input_shape))
model.add(Conv1D(filters=conv_filters, kernel_size=conv_kernel_size,
padding='valid', activation='relu'))
model.add(LSTM(units=lstm_units, dropout=0.1, recurrent_dropout=0.1))
model.add(Dense(n_classes, activation="softmax"))
model.compile(loss='categorical_crossentropy', optimizer='adam',
metrics=['accuracy'])
return model
# load the data
示例2: crnn_model
# 需要导入模块: from tensorflow.python.keras import layers [as 别名]
# 或者: from tensorflow.python.keras.layers import LSTM [as 别名]
def crnn_model(width=100, n_vars=6, n_classes=7, conv_kernel_size=5,
conv_filters=2, lstm_units=2):
# create a crnn model with keras with one cnn layers, and one rnn layer
input_shape = (width, n_vars)
model = Sequential()
model.add(Conv1D(filters=conv_filters, kernel_size=conv_kernel_size,
padding='valid', activation='relu', input_shape=input_shape))
model.add(LSTM(units=lstm_units, dropout=0.1, recurrent_dropout=0.1))
model.add(Dense(n_classes, activation="softmax"))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
# load the data
示例3: crnn_model
# 需要导入模块: from tensorflow.python.keras import layers [as 别名]
# 或者: from tensorflow.python.keras.layers import LSTM [as 别名]
def crnn_model(width=100, n_vars=6, n_classes=7, conv_kernel_size=5,
conv_filters=3, lstm_units=3):
input_shape = (width, n_vars)
model = Sequential()
model.add(Conv1D(filters=conv_filters, kernel_size=conv_kernel_size,
padding='valid', activation='relu', input_shape=input_shape))
model.add(LSTM(units=lstm_units, dropout=0.1, recurrent_dropout=0.1))
model.add(Dense(n_classes, activation="softmax"))
model.compile(loss='categorical_crossentropy', optimizer='adam',
metrics=['accuracy'])
return model
##############################################
# Setup
##############################################
# load the data
示例4: build
# 需要导入模块: from tensorflow.python.keras import layers [as 别名]
# 或者: from tensorflow.python.keras.layers import LSTM [as 别名]
def build(self, input_shape):
if len(input_shape) != 3:
raise ValueError(
"Unexpected inputs dimensions %d, expect to be 3 dimensions" % (len(input_shape)))
self.fw_lstm = []
self.bw_lstm = []
for _ in range(self.layers):
self.fw_lstm.append(
LSTM(self.units, dropout=self.dropout_rate, bias_initializer='ones', return_sequences=True,
unroll=True))
self.bw_lstm.append(
LSTM(self.units, dropout=self.dropout_rate, bias_initializer='ones', return_sequences=True,
go_backwards=True, unroll=True))
super(BiLSTM, self).build(
input_shape) # Be sure to call this somewhere!
示例5: build
# 需要导入模块: from tensorflow.python.keras import layers [as 别名]
# 或者: from tensorflow.python.keras.layers import LSTM [as 别名]
def build(self, input_shape):
if len(input_shape) != 3:
raise ValueError(
"Unexpected inputs dimensions %d, expect to be 3 dimensions" % (len(input_shape)))
self.fw_lstm = []
self.bw_lstm = []
for _ in range(self.layers):
self.fw_lstm.append(LSTM(self.units, dropout=self.dropout, bias_initializer='ones', return_sequences=True,
unroll=True))
self.bw_lstm.append(LSTM(self.units, dropout=self.dropout, bias_initializer='ones', return_sequences=True,
go_backwards=True, unroll=True))
super(BiLSTM, self).build(
input_shape) # Be sure to call this somewhere!