本文整理匯總了Python中keras.regularizers方法的典型用法代碼示例。如果您正苦於以下問題:Python keras.regularizers方法的具體用法?Python keras.regularizers怎麽用?Python keras.regularizers使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類keras
的用法示例。
在下文中一共展示了keras.regularizers方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: build
# 需要導入模塊: import keras [as 別名]
# 或者: from keras import regularizers [as 別名]
def build(self):
try:
self.input_ndim = len(self.previous.input_shape)
except AttributeError:
self.input_ndim = len(self.input_shape)
self.layer.set_input_shape((None, ) + self.input_shape[2:])
if hasattr(self.layer, 'regularizers'):
self.regularizers = self.layer.regularizers
if hasattr(self.layer, 'constraints'):
self.constraints = self.layer.constraints
if hasattr(self.layer, 'trainable_weights'):
self.trainable_weights = self.layer.trainable_weights
if self.initial_weights is not None:
self.layer.set_weights(self.initial_weights)
del self.initial_weights
示例2: __init__
# 需要導入模塊: import keras [as 別名]
# 或者: from keras import regularizers [as 別名]
def __init__(self, h, output_dim,
init='glorot_uniform', **kwargs):
self.init = initializations.get(init)
self.h = h
self.output_dim = output_dim
#removing the regularizers and the dropout
super(AttenLayer, self).__init__(**kwargs)
# this seems necessary in order to accept 3 input dimensions
# (samples, timesteps, features)
self.input_spec=[InputSpec(ndim=3)]
示例3: feed_forward_net
# 需要導入模塊: import keras [as 別名]
# 或者: from keras import regularizers [as 別名]
def feed_forward_net(input, output, hidden_layers=[64, 64], activations='relu',
dropout_rate=0., l2=0., constrain_norm=False):
'''
Helper function for building a Keras feed forward network.
input: Keras Input object appropriate for the data. e.g. input=Input(shape=(20,))
output: Function representing final layer for the network that maps from the last
hidden layer to output.
e.g. if output = Dense(10, activation='softmax') if we're doing 10 class
classification or output = Dense(1, activation='linear') if we're doing
regression.
'''
state = input
if isinstance(activations, str):
activations = [activations] * len(hidden_layers)
for h, a in zip(hidden_layers, activations):
if l2 > 0.:
w_reg = keras.regularizers.l2(l2)
else:
w_reg = None
const = maxnorm(2) if constrain_norm else None
state = Dense(h, activation=a, kernel_regularizer=w_reg, kernel_constraint=const)(state)
if dropout_rate > 0.:
state = Dropout(dropout_rate)(state)
return output(state)