本文整理匯總了Python中tensorflow.keras.layers方法的典型用法代碼示例。如果您正苦於以下問題:Python keras.layers方法的具體用法?Python keras.layers怎麽用?Python keras.layers使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tensorflow.keras
的用法示例。
在下文中一共展示了keras.layers方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: format_layer_idx
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def format_layer_idx(self, idx):
"""Pad the layer index with the appropriate amount of zeros.
The number of zeros used for padding is determined by the maximum index
(i.e. the number of layers in the network).
Parameters
----------
idx: int
Layer index.
Returns
-------
num_str: str
Zero-padded layer index.
"""
max_idx = len(self.input_model.layers)
return str(idx).zfill(len(str(max_idx)))
示例2: has_weights
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def has_weights(layer):
"""Return ``True`` if layer has weights.
Parameters
----------
layer : keras.layers.Layer
Keras layer
Returns
-------
: bool
``True`` if layer has weights.
"""
return len(layer.weights)
示例3: get_inbound_layers_without_params
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def get_inbound_layers_without_params(layer):
"""Return inbound layers.
Parameters
----------
layer: Keras.layers
A Keras layer.
Returns
-------
: list[Keras.layers]
List of inbound layers.
"""
return [layer for layer in get_inbound_layers(layer)
if not has_weights(layer)]
示例4: get_outbound_layers
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def get_outbound_layers(layer):
"""Return outbound layers.
Parameters
----------
layer: Keras.layers
A Keras layer.
Returns
-------
: list[Keras.layers]
List of outbound layers.
"""
try:
# noinspection PyProtectedMember
outbound_nodes = layer._outbound_nodes
except AttributeError: # For Keras backward-compatibility.
outbound_nodes = layer.outbound_nodes
return [on.outbound_layer for on in outbound_nodes]
示例5: get_outbound_activation
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def get_outbound_activation(layer):
"""
Iterate over 2 outbound layers to find an activation layer. If there is no
activation layer, take the activation of the current layer.
Parameters
----------
layer: Union[keras.layers.Conv2D, keras.layers.Dense]
Layer
Returns
-------
activation: str
Name of outbound activation type.
"""
activation = layer.activation.__name__
outbound = layer
for _ in range(2):
outbound = get_outbound_layers(outbound)
if len(outbound) == 1 and get_type(outbound[0]) == 'Activation':
activation = outbound[0].activation.__name__
return activation
示例6: transition_block
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def transition_block(x, reduction, name, pool=True):
"""A transition block.
# Arguments
x: input tensor.
reduction: float, compression rate at transition layers.
name: string, block label.
# Returns
output tensor for the block.
"""
bn_axis = 3 if backend.image_data_format() == "channels_last" else 1
x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + "_bn")(x)
x = layers.Activation("relu", name=name + "_relu")(x)
x = layers.Conv2D(
int(backend.int_shape(x)[bn_axis] * reduction),
1,
use_bias=False,
name=name + "_conv",
)(x)
if pool:
x = layers.AveragePooling2D(2, strides=2, name=name + "_pool")(x)
return x
示例7: inference_network
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def inference_network(x, latent_dim, hidden_size):
"""Construct an inference network parametrizing a Gaussian.
Args:
x: A batch of MNIST digits.
latent_dim: The latent dimensionality.
hidden_size: The size of the neural net hidden layers.
Returns:
mu: Mean parameters for the variational family Normal
sigma: Standard deviation parameters for the variational family Normal
"""
inference_net = tfk.Sequential([
tfkl.Flatten(),
tfkl.Dense(hidden_size, activation=tf.nn.relu),
tfkl.Dense(hidden_size, activation=tf.nn.relu),
tfkl.Dense(latent_dim * 2, activation=None)
])
gaussian_params = inference_net(x)
# The mean parameter is unconstrained
mu = gaussian_params[:, :latent_dim]
# The standard deviation must be positive. Parametrize with a softplus
sigma = tf.nn.softplus(gaussian_params[:, latent_dim:])
return mu, sigma
開發者ID:altosaar,項目名稱:variational-autoencoder,代碼行數:26,代碼來源:train_variational_autoencoder_tensorflow.py
示例8: get_kwargs
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def get_kwargs():
return {
'backend': tfkeras.backend,
'layers': tfkeras.layers,
'models': tfkeras.models,
'utils': tfkeras.utils,
}
示例9: _layer_from_dict
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def _layer_from_dict(layer_type: str, *args, **kwargs) -> Layer:
from tensorflow.keras import layers
cls = getattr(layers, layer_type)
assert issubclass(cls, Layer)
return cls(*args, **kwargs)
示例10: test_replace_imports
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def test_replace_imports():
python_code = """
import keras
from keras import backend as K
import os
import keras_contrib
import keras_contrib.layers as lay
import keras.layers
from keras.layers import Dense
if K.backend() == 'tensorflow':
import tensorflow as tf
function = tf.max
"""
expected_code = """
from tensorflow import keras
from tensorflow.keras import backend as K
import os
import keras_contrib
import keras_contrib.layers as lay
import tensorflow.keras.layers
from tensorflow.keras.layers import Dense
if K.backend() == 'tensorflow':
import tensorflow as tf
function = tf.max
"""
code_with_replacement = replace_imports_in_text(python_code, False)
assert expected_code == code_with_replacement
assert python_code == replace_imports_in_text(code_with_replacement, True)
示例11: get_layer_iterable
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def get_layer_iterable(self):
"""Get an iterable over the layers of the network.
Returns
-------
layers: list
"""
pass
示例12: get_inbound_layers_with_parameters
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def get_inbound_layers_with_parameters(self, layer):
"""Iterate until inbound layers are found that have parameters.
Parameters
----------
layer:
Layer
Returns
-------
: list
List of inbound layers.
"""
inbound = layer
while True:
inbound = self.get_inbound_layers(inbound)
if len(inbound) == 1:
inbound = inbound[0]
if self.has_weights(inbound):
return [inbound]
else:
result = []
for inb in inbound:
if self.has_weights(inb):
result.append(inb)
else:
result += self.get_inbound_layers_with_parameters(inb)
return result
示例13: get_inbound_names
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def get_inbound_names(self, layer, name_map):
"""Get names of inbound layers.
Parameters
----------
layer:
Layer
name_map: dict
Maps the name of a layer to the `id` of the layer object.
Returns
-------
: list
The names of inbound layers.
"""
inbound = self.get_inbound_layers(layer)
for ib in range(len(inbound)):
for _ in range(len(self.layers_to_skip)):
if self.get_type(inbound[ib]) in self.layers_to_skip:
inbound[ib] = self.get_inbound_layers(inbound[ib])[0]
else:
break
if len(self._layer_list) == 0 or \
any([self.get_type(inb) == 'InputLayer' for inb in inbound]):
return ['input']
else:
inb_idxs = [name_map[str(id(inb))] for inb in inbound]
return [self._layer_list[i]['name'] for i in inb_idxs]
示例14: get_inbound_layers
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def get_inbound_layers(self, layer):
"""Get inbound layers of ``layer``.
Returns
-------
inbound: Sequence
"""
pass
示例15: absorb_bn_parameters
# 需要導入模塊: from tensorflow import keras [as 別名]
# 或者: from tensorflow.keras import layers [as 別名]
def absorb_bn_parameters(weight, bias, mean, var_eps_sqrt_inv, gamma, beta,
axis, image_data_format, is_depthwise=False):
"""
Absorb the parameters of a batch-normalization layer into the previous
layer.
"""
axis = weight.ndim + axis if axis < 0 else axis
print("Using BatchNorm axis {}.".format(axis))
# Map batch norm axis from layer dimension space to kernel dimension space.
# Assumes that kernels are shaped like
# [height, width, num_input_channels, num_output_channels],
# and layers like [batch_size, channels, height, width] or
# [batch_size, height, width, channels].
if weight.ndim == 4:
channel_axis = 2 if is_depthwise else 3
if image_data_format == 'channels_first':
layer2kernel_axes_map = [None, channel_axis, 0, 1]
else:
layer2kernel_axes_map = [None, 0, 1, channel_axis]
axis = layer2kernel_axes_map[axis]
broadcast_shape = [1] * weight.ndim
broadcast_shape[axis] = weight.shape[axis]
var_eps_sqrt_inv = np.reshape(var_eps_sqrt_inv, broadcast_shape)
gamma = np.reshape(gamma, broadcast_shape)
beta = np.reshape(beta, broadcast_shape)
bias = np.reshape(bias, broadcast_shape)
mean = np.reshape(mean, broadcast_shape)
bias_bn = np.ravel(beta + (bias - mean) * gamma * var_eps_sqrt_inv)
weight_bn = weight * gamma * var_eps_sqrt_inv
return weight_bn, bias_bn