本文整理汇总了Python中tensorflow.keras.activations.get方法的典型用法代码示例。如果您正苦于以下问题:Python activations.get方法的具体用法?Python activations.get怎么用?Python activations.get使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.keras.activations
的用法示例。
在下文中一共展示了activations.get方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: build
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def build(self, input_shape):
self.W_list = []
self.b_list = []
init = initializers.get(self.init)
prev_layer_size = self.n_embedding
for i, layer_size in enumerate(self.layer_sizes):
self.W_list.append(init([prev_layer_size, layer_size]))
self.b_list.append(backend.zeros(shape=[
layer_size,
]))
prev_layer_size = layer_size
self.W_list.append(init([prev_layer_size, self.n_outputs]))
self.b_list.append(backend.zeros(shape=[
self.n_outputs,
]))
self.built = True
示例2: __init__
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def __init__(self,
k,
channels=None,
return_mask=False,
activation=None,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
kernel_constraint=None,
**kwargs):
super().__init__(**kwargs)
self.k = k
self.channels = channels
self.return_mask = return_mask
self.activation = activations.get(activation)
self.kernel_initializer = initializers.get(kernel_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
示例3: convert_sequence_vocab
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def convert_sequence_vocab(self, sequence, sequence_lengths):
PFAM_TO_UNIREP_ENCODED = {encoding: UNIREP_VOCAB.get(aa, 23) for aa, encoding in PFAM_VOCAB.items()}
def to_uniprot_unirep(seq, seqlens):
new_seq = np.zeros_like(seq)
for pfam_encoding, unirep_encoding in PFAM_TO_UNIREP_ENCODED.items():
new_seq[seq == pfam_encoding] = unirep_encoding
# add start/stop
new_seq = np.pad(new_seq, [[0, 0], [1, 1]], mode='constant')
new_seq[:, 0] = UNIREP_VOCAB['<START>']
new_seq[np.arange(new_seq.shape[0]), seqlens + 1] = UNIREP_VOCAB['<STOP>']
return new_seq
new_sequence = tf.py_func(to_uniprot_unirep, [sequence, sequence_lengths], sequence.dtype)
new_sequence.set_shape([sequence.shape[0], sequence.shape[1] + 2])
return new_sequence
示例4: __init__
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def __init__(self,
output_dim: int,
decomp_size: int,
use_bias: Optional[bool] = True,
activation: Optional[Text] = None,
kernel_initializer: Optional[Text] = 'glorot_uniform',
bias_initializer: Optional[Text] = 'zeros',
**kwargs) -> None:
# Allow specification of input_dim instead of input_shape,
# for compatability with Keras layers that support this
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'),)
super(DenseDecomp, self).__init__(**kwargs)
self.output_dim = output_dim
self.decomp_size = decomp_size
self.use_bias = use_bias
self.activation = activations.get(activation)
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
示例5: __init__
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def __init__(self,
exp_base: int,
num_nodes: int,
use_bias: Optional[bool] = True,
activation: Optional[Text] = None,
kernel_initializer: Optional[Text] = 'glorot_uniform',
bias_initializer: Optional[Text] = 'zeros',
**kwargs) -> None:
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'),)
super(DenseCondenser, self).__init__(**kwargs)
self.exp_base = exp_base
self.num_nodes = num_nodes
self.nodes = []
self.use_bias = use_bias
self.activation = activations.get(activation)
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
示例6: get
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def get(identifier: OptStrOrCallable = None) -> Callable[..., Any]:
"""
Get activations by identifier
Args:
identifier (str or callable): the identifier of activations
Returns:
callable activation
"""
try:
return keras_get(identifier)
except ValueError:
if isinstance(identifier, str):
return deserialize(identifier, custom_objects=globals())
else:
raise ValueError('Could not interpret:', identifier)
示例7: __init__
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def __init__(self,
output_dim,
input_dim,
init_fn='glorot_uniform',
inner_init_fn='orthogonal',
activation_fn='tanh',
inner_activation_fn='hard_sigmoid',
**kwargs):
"""
Parameters
----------
output_dim: int
Dimensionality of output vectors.
input_dim: int
Dimensionality of input vectors.
init_fn: str
TensorFlow nitialization to use for W.
inner_init_fn: str
TensorFlow initialization to use for U.
activation_fn: str
TensorFlow activation to use for output.
inner_activation_fn: str
TensorFlow activation to use for inner steps.
"""
super(LSTMStep, self).__init__(**kwargs)
self.init = init_fn
self.inner_init = inner_init_fn
self.output_dim = output_dim
# No other forget biases supported right now.
self.activation = activation_fn
self.inner_activation = inner_activation_fn
self.activation_fn = activations.get(activation_fn)
self.inner_activation_fn = activations.get(inner_activation_fn)
self.input_dim = input_dim
示例8: deserialize_kwarg
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def deserialize_kwarg(key, attr):
if key.endswith('_initializer'):
return initializers.get(attr)
if key.endswith('_regularizer'):
return regularizers.get(attr)
if key.endswith('_constraint'):
return constraints.get(attr)
if key == 'activation':
return activations.get(attr)
示例9: __init__
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def __init__(self,
trainable_kernel=False,
activation=None,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
kernel_constraint=None,
**kwargs):
super().__init__(**kwargs)
self.trainable_kernel = trainable_kernel
self.activation = activations.get(activation)
self.kernel_initializer = initializers.get(kernel_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
示例10: __init__
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def __init__(self,
k,
mlp_hidden=None,
mlp_activation='relu',
return_mask=False,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
super().__init__(**kwargs)
self.k = k
self.mlp_hidden = mlp_hidden if mlp_hidden else []
self.mlp_activation = mlp_activation
self.return_mask = return_mask
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
示例11: __init__
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def __init__(self,
channels,
mlp_hidden=None,
mlp_activation='relu',
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
super().__init__(aggregate='sum',
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
**kwargs)
self.channels = self.output_dim = channels
self.mlp_hidden = mlp_hidden if mlp_hidden else []
self.mlp_activation = activations.get(mlp_activation)
示例12: __init__
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def __init__(self,
channels,
order=1,
iterations=1,
share_weights=False,
gcn_activation='relu',
dropout_rate=0.0,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
super().__init__(channels,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
**kwargs)
self.iterations = iterations
self.order = order
self.share_weights = share_weights
self.gcn_activation = activations.get(gcn_activation)
self.dropout_rate = dropout_rate
示例13: __init__
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def __init__(self,
channels,
alpha=0.2,
propagations=1,
mlp_hidden=None,
mlp_activation='relu',
dropout_rate=0.0,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
super().__init__(channels,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
**kwargs)
self.mlp_hidden = mlp_hidden if mlp_hidden else []
self.alpha = alpha
self.propagations = propagations
self.mlp_activation = activations.get(mlp_activation)
self.dropout_rate = dropout_rate
示例14: __init__
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def __init__(self,
channels,
epsilon=None,
mlp_hidden=None,
mlp_activation='relu',
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
super().__init__(aggregate='sum',
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
**kwargs)
self.channels = self.output_dim = channels
self.epsilon = epsilon
self.mlp_hidden = mlp_hidden if mlp_hidden else []
self.mlp_activation = activations.get(mlp_activation)
示例15: __init__
# 需要导入模块: from tensorflow.keras import activations [as 别名]
# 或者: from tensorflow.keras.activations import get [as 别名]
def __init__(self,
output_dim: int,
num_legs: int,
num_levels: int,
use_bias: Optional[bool] = True,
activation: Optional[Text] = None,
kernel_initializer: Optional[Text] = 'glorot_uniform',
bias_initializer: Optional[Text] = 'zeros',
**kwargs) -> None:
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'),)
assert (
num_legs >=
2), f'Need at least 2 legs to create Entangler but got {num_legs} legs'
assert (
num_levels >= 1
), f'Need at least 1 level to create Entangler but got {num_levels} levels'
super(DenseEntangler, self).__init__(**kwargs)
self.output_dim = output_dim
self.num_legs = num_legs
self.num_levels = num_levels
self.nodes = []
self.use_bias = use_bias
self.activation = activations.get(activation)
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)