本文整理汇总了Python中keras.applications.mobilenet.relu6方法的典型用法代码示例。如果您正苦于以下问题:Python mobilenet.relu6方法的具体用法?Python mobilenet.relu6怎么用?Python mobilenet.relu6使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.applications.mobilenet
的用法示例。
在下文中一共展示了mobilenet.relu6方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: conv_block
# 需要导入模块: from keras.applications import mobilenet [as 别名]
# 或者: from keras.applications.mobilenet import relu6 [as 别名]
def conv_block(inputs, filters, weight_decay, name, kernel=(3, 3), strides=(1, 1)):
'''
Normal convolution block performs conv+bn+relu6 operations.
:param inputs: Input Keras tensor in (B, H, W, C_in)
:param filters: number of filters in the convolution layer
:param name: name for the convolutional layer
:param kernel: kernel size
:param strides: strides for convolution
:return: Output tensor in (B, H_new, W_new, filters)
'''
channel_axis = 1 if K.image_data_format() == 'channels_first' else -1
x = Conv2D(filters, kernel,
padding='same',
use_bias=False,
kernel_regularizer=l2(weight_decay),
strides=strides,
name=name)(inputs)
x = BatchNormalization(axis=channel_axis, epsilon=1e-5,momentum=0.9,name=name+'_bn')(x)
return Relu6(x, name=name+'_relu')
示例2: _conv_block
# 需要导入模块: from keras.applications import mobilenet [as 别名]
# 或者: from keras.applications.mobilenet import relu6 [as 别名]
def _conv_block(inputs, filters, kernel, strides):
"""Convolution Block
This function defines a 2D convolution operation with BN and relu6.
# Arguments
inputs: Tensor, input tensor of conv layer.
filters: Integer, the dimensionality of the output space.
kernel: An integer or tuple/list of 2 integers, specifying the
width and height of the 2D convolution window.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the convolution along the width
and height.Can be a single integer to specify the same
value for all spatial dimensions.
# Returns
Output tensor.
"""
channel_axis = 1 if K.image_data_format() == 'channels_first' else -1
x = Conv2D(filters, kernel, padding='same', strides=strides)(inputs)
x = BatchNormalization(axis=channel_axis)(x)
return Activation(relu6)(x)
示例3: __init__
# 需要导入模块: from keras.applications import mobilenet [as 别名]
# 或者: from keras.applications.mobilenet import relu6 [as 别名]
def __init__(self, model):
super(Keras2Parser, self).__init__()
# load model files into Keras graph
if isinstance(model, _string_types):
try:
# Keras 2.1.6
from keras.applications.mobilenet import relu6
from keras.applications.mobilenet import DepthwiseConv2D
model = _keras.models.load_model(
model,
custom_objects={
'relu6': _keras.applications.mobilenet.relu6,
'DepthwiseConv2D': _keras.applications.mobilenet.DepthwiseConv2D
}
)
except:
# Keras. 2.2.2
import keras.layers as layers
model = _keras.models.load_model(
model,
custom_objects={
'relu6': layers.ReLU(6, name='relu6'),
'DepthwiseConv2D': layers.DepthwiseConv2D
}
)
self.weight_loaded = True
elif isinstance(model, tuple):
model = self._load_model(model[0], model[1])
else:
assert False
# _keras.utils.plot_model(model, "model.png", show_shapes = True)
# Build network graph
self.data_format = _keras.backend.image_data_format()
self.keras_graph = Keras2Graph(model)
self.keras_graph.build()
self.lambda_layer_count = 0
示例4: Relu6
# 需要导入模块: from keras.applications import mobilenet [as 别名]
# 或者: from keras.applications.mobilenet import relu6 [as 别名]
def Relu6(x, **kwargs):
return Activation(relu6, **kwargs)(x)
示例5: _bottleneck
# 需要导入模块: from keras.applications import mobilenet [as 别名]
# 或者: from keras.applications.mobilenet import relu6 [as 别名]
def _bottleneck(inputs, filters, kernel, t, s, r=False):
"""Bottleneck
This function defines a basic bottleneck structure.
# Arguments
inputs: Tensor, input tensor of conv layer.
filters: Integer, the dimensionality of the output space.
kernel: An integer or tuple/list of 2 integers, specifying the
width and height of the 2D convolution window.
t: Integer, expansion factor.
t is always applied to the input size.
s: An integer or tuple/list of 2 integers,specifying the strides
of the convolution along the width and height.Can be a single
integer to specify the same value for all spatial dimensions.
r: Boolean, Whether to use the residuals.
# Returns
Output tensor.
"""
channel_axis = 1 if K.image_data_format() == 'channels_first' else -1
tchannel = K.int_shape(inputs)[channel_axis] * t
x = _conv_block(inputs, tchannel, (1, 1), (1, 1))
x = DepthwiseConv2D(kernel, strides=(s, s),
depth_multiplier=1, padding='same')(x)
x = BatchNormalization(axis=channel_axis)(x)
x = Activation(relu6)(x)
x = Conv2D(filters, (1, 1), strides=(1, 1), padding='same')(x)
x = BatchNormalization(axis=channel_axis)(x)
if r:
x = add([x, inputs])
return x
示例6: _load_model
# 需要导入模块: from keras.applications import mobilenet [as 别名]
# 或者: from keras.applications.mobilenet import relu6 [as 别名]
def _load_model(self, model_network_path, model_weight_path):
"""Load a keras model from disk
Parameters
----------
model_network_path: str
Path where the model network path is (json file)
model_weight_path: str
Path where the model network weights are (hd5 file)
Returns
-------
model: A keras model
"""
from keras.models import model_from_json
# Load the model network
json_file = open(model_network_path, 'r')
loaded_model_json = json_file.read()
json_file.close()
# Load the model weights
try:
from keras.applications.mobilenet import relu6
from keras.applications.mobilenet import DepthwiseConv2D
loaded_model = model_from_json(loaded_model_json, custom_objects={
'relu6': _keras.applications.mobilenet.relu6,
'DepthwiseConv2D': _keras.applications.mobilenet.DepthwiseConv2D})
except:
import keras.layers as layers
loaded_model = model_from_json(loaded_model_json, custom_objects={
'relu6': layers.ReLU(6, name='relu6'),
'DepthwiseConv2D': layers.DepthwiseConv2D})
if model_weight_path:
if os.path.isfile(model_weight_path):
loaded_model.load_weights(model_weight_path)
self.weight_loaded = True
print("Network file [{}] and [{}] is loaded successfully.".format(model_network_path, model_weight_path))
else:
print("Warning: Weights File [%s] is not found." % (model_weight_path))
return loaded_model
示例7: _get_activation_name_from_keras_layer
# 需要导入模块: from keras.applications import mobilenet [as 别名]
# 或者: from keras.applications.mobilenet import relu6 [as 别名]
def _get_activation_name_from_keras_layer(keras_layer):
if isinstance(keras_layer, _keras.layers.advanced_activations.LeakyReLU):
non_linearity = "LEAKYRELU"
elif isinstance(keras_layer, _keras.layers.advanced_activations.PReLU):
non_linearity = "PRELU"
elif isinstance(keras_layer, _keras.layers.advanced_activations.ELU):
non_linearity = "ELU"
elif isinstance(keras_layer, _keras.layers.advanced_activations.ThresholdedReLU):
non_linearity = "THRESHOLDEDRELU"
elif isinstance(keras_layer, _keras.layers.advanced_activations.Softmax):
non_linearity = "SOFTMAX"
else:
import six
if six.PY2:
act_name = keras_layer.activation.func_name
else:
act_name = keras_layer.activation.__name__
if act_name == "softmax":
non_linearity = "SOFTMAX"
elif act_name == "sigmoid":
non_linearity = "SIGMOID"
elif act_name == "tanh":
non_linearity = "TANH"
elif act_name == "relu":
non_linearity = "RELU"
elif act_name == "relu6":
non_linearity = "RELU6"
elif act_name == "softplus":
non_linearity = "SOFTPLUS"
elif act_name == "softsign":
non_linearity = "SOFTSIGN"
elif act_name == "hard_sigmoid":
non_linearity = "SIGMOID_HARD"
elif act_name == "elu":
non_linearity = "UNIT_ELU"
elif act_name == "linear":
non_linearity = "LINEAR"
elif act_name == "selu":
non_linearity = "SELU"
else:
non_linearity = "CUSTOM"
return non_linearity
示例8: test_tiny_mobilenet_arch
# 需要导入模块: from keras.applications import mobilenet [as 别名]
# 或者: from keras.applications.mobilenet import relu6 [as 别名]
def test_tiny_mobilenet_arch(self, model_precision=_MLMODEL_FULL_PRECISION):
def ReLU6(x, name):
if keras.__version__ >= _StrictVersion("2.2.1"):
return ReLU(6.0, name=name)(x)
else:
return Activation(relu6, name=name)(x)
img_input = Input(shape=(32, 32, 3))
x = Conv2D(
4, (3, 3), padding="same", use_bias=False, strides=(2, 2), name="conv1"
)(img_input)
x = BatchNormalization(axis=-1, name="conv1_bn")(x)
x = ReLU6(x, name="conv1_relu")
x = DepthwiseConv2D(
(3, 3),
padding="same",
depth_multiplier=1,
strides=(1, 1),
use_bias=False,
name="conv_dw_1",
)(x)
x = BatchNormalization(axis=-1, name="conv_dw_1_bn")(x)
x = ReLU6(x, name="conv_dw_1_relu")
x = Conv2D(
8, (1, 1), padding="same", use_bias=False, strides=(1, 1), name="conv_pw_1"
)(x)
x = BatchNormalization(axis=-1, name="conv_pw_1_bn")(x)
x = ReLU6(x, name="conv_pw_1_relu")
x = DepthwiseConv2D(
(3, 3),
padding="same",
depth_multiplier=1,
strides=(2, 2),
use_bias=False,
name="conv_dw_2",
)(x)
x = BatchNormalization(axis=-1, name="conv_dw_2_bn")(x)
x = ReLU6(x, name="conv_dw_2_relu")
x = Conv2D(
8, (1, 1), padding="same", use_bias=False, strides=(2, 2), name="conv_pw_2"
)(x)
x = BatchNormalization(axis=-1, name="conv_pw_2_bn")(x)
x = ReLU6(x, name="conv_pw_2_relu")
model = Model(inputs=[img_input], outputs=[x])
self._test_model(model, delta=1e-2, model_precision=model_precision)