本文整理汇总了Python中tensorflow.python.ops.gen_nn_ops.relu函数的典型用法代码示例。如果您正苦于以下问题:Python relu函数的具体用法?Python relu怎么用?Python relu使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了relu函数的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _multiple_ops_in_middle
def _multiple_ops_in_middle():
inputs = keras.Input(shape=(10,))
x = keras.layers.Dense(10)(inputs)
x = gen_nn_ops.relu(x, name='hey')
x = gen_nn_ops.relu(x, name='hey2')
outputs = keras.layers.Dense(10)(x)
return inputs, outputs
示例2: test_numerical_correctness_simple
def test_numerical_correctness_simple(self):
x = ops.convert_to_tensor([[-1., 0., -2., 1.]])
inputs = keras.Input(shape=(4,))
outputs = gen_nn_ops.relu(inputs)
model = keras.Model(inputs, outputs)
y = self.evaluate(model(x))
self.assertAllClose(y, [[0., 0., 0., 1.]])
示例3: test_serialization
def test_serialization(self):
x = ops.convert_to_tensor([-1., 0., -2., 1.])
inputs = keras.Input(shape=(4,))
outputs = gen_nn_ops.relu(inputs)
model1 = keras.Model(inputs, outputs)
y1 = self.evaluate(model1(x))
model2 = model1.from_config(model1.get_config())
y2 = self.evaluate(model2(x))
self.assertAllClose(y1, y2)
示例4: test_built
def test_built(self):
inputs = keras.Input(shape=(10,))
outputs = gen_nn_ops.relu(inputs)
model = keras.Model(inputs, outputs)
model.compile('sgd', 'mse')
for layer in model.layers:
self.assertTrue(layer.built)
# Test something that requires Layers to be built.
model.summary()
示例5: _reuse_op
def _reuse_op():
inputs = keras.Input(shape=(10,))
# This op needs to be checked multiple times.
x = gen_nn_ops.relu(inputs)
y = keras.layers.Dense(10)(x)
x2 = x * 2
y2 = keras.layers.Dense(10)(x2)
outputs = y + y2
return inputs, outputs
示例6: _construct_graph_of_size
def _construct_graph_of_size(size):
start = time.time()
x = keras.backend.placeholder(shape=(10, 4))
for _ in range(size):
x = keras.layers.Dense(4)(x)
x = gen_nn_ops.relu(x)
end = time.time()
return end - start
示例7: per_image_whitening
def per_image_whitening(image):
"""Linearly scales `image` to have zero mean and unit norm.
This op computes `(x - mean) / adjusted_stddev`, where `mean` is the average
of all values in image, and
`adjusted_stddev = max(stddev, 1.0/sqrt(image.NumElements()))`.
`stddev` is the standard deviation of all values in `image`. It is capped
away from zero to protect against division by 0 when handling uniform images.
Note that this implementation is limited:
* It only whitens based on the statistics of an individual image.
* It does not take into account the covariance structure.
Args:
image: 3-D tensor of shape `[height, width, channels]`.
Returns:
The whitened image with same shape as `image`.
Raises:
ValueError: if the shape of 'image' is incompatible with this function.
"""
image = ops.convert_to_tensor(image, name='image')
_Check3DImage(image, require_static=False)
num_pixels = math_ops.reduce_prod(array_ops.shape(image))
image = math_ops.cast(image, dtype=dtypes.float32)
image_mean = math_ops.reduce_mean(image)
variance = (math_ops.reduce_mean(math_ops.square(image)) -
math_ops.square(image_mean))
variance = gen_nn_ops.relu(variance)
stddev = math_ops.sqrt(variance)
# Apply a minimum normalization that protects us against uniform images.
min_stddev = math_ops.inv(
math_ops.sqrt(math_ops.cast(num_pixels, dtypes.float32)))
pixel_value_scale = math_ops.maximum(stddev, min_stddev)
pixel_value_offset = image_mean
image = math_ops.sub(image, pixel_value_offset)
image = math_ops.div(image, pixel_value_scale)
return image
示例8: test_gradient_tape_in_function
def test_gradient_tape_in_function(self):
z = keras.Input((1,))
x = math_ops.matmul(z, constant_op.constant(2.0, shape=(1, 1)))
x = math_ops.reduce_mean(x, axis=0, keepdims=True)
h = gen_nn_ops.relu(x)
m = keras.Model(z, h)
@def_function.function()
def f(x):
with backprop.GradientTape() as t:
t.watch(x)
z = m(x ** 2)
grads = t.gradient(z, x)
return grads
self.assertAllEqual(f(constant_op.constant(10.0, shape=(1, 1))),
constant_op.constant(40.0, shape=(1, 1)))
f = def_function.function(f)
self.assertAllEqual(f(constant_op.constant(10.0, shape=(1, 1))),
constant_op.constant(40.0, shape=(1, 1)))
示例9: _single_op_at_end
def _single_op_at_end():
inputs = keras.Input(shape=(10,))
x = keras.layers.Dense(10)(inputs)
outputs = gen_nn_ops.relu(x, name='hey')
return inputs, outputs
示例10: _single_op_in_middle
def _single_op_in_middle():
inputs = keras.Input(shape=(10,))
x = keras.layers.Dense(10)(inputs)
x = gen_nn_ops.relu(x)
outputs = keras.layers.Dense(10)(x)
return inputs, outputs
示例11: _multiple_ops_at_end
def _multiple_ops_at_end():
inputs = keras.Input(shape=(10,))
x = keras.layers.Dense(10)(inputs)
x = gen_nn_ops.relu(x)
outputs = gen_nn_ops.relu(x)
return inputs, outputs