本文整理汇总了Python中TensorflowUtils.leaky_relu方法的典型用法代码示例。如果您正苦于以下问题:Python TensorflowUtils.leaky_relu方法的具体用法?Python TensorflowUtils.leaky_relu怎么用?Python TensorflowUtils.leaky_relu使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类TensorflowUtils
的用法示例。
在下文中一共展示了TensorflowUtils.leaky_relu方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: discriminator
# 需要导入模块: import TensorflowUtils [as 别名]
# 或者: from TensorflowUtils import leaky_relu [as 别名]
def discriminator(input_images, train_mode):
# dropout_prob = 1.0
# if train_mode:
# dropout_prob = 0.5
W_conv0 = utils.weight_variable([5, 5, NUM_OF_CHANNELS, 64 * 1], name="W_conv0")
b_conv0 = utils.bias_variable([64 * 1], name="b_conv0")
h_conv0 = utils.conv2d_strided(input_images, W_conv0, b_conv0)
h_bn0 = h_conv0 # utils.batch_norm(h_conv0, 64 * 1, train_mode, scope="disc_bn0")
h_relu0 = utils.leaky_relu(h_bn0, 0.2, name="h_relu0")
utils.add_activation_summary(h_relu0)
W_conv1 = utils.weight_variable([5, 5, 64 * 1, 64 * 2], name="W_conv1")
b_conv1 = utils.bias_variable([64 * 2], name="b_conv1")
h_conv1 = utils.conv2d_strided(h_relu0, W_conv1, b_conv1)
h_bn1 = utils.batch_norm(h_conv1, 64 * 2, train_mode, scope="disc_bn1")
h_relu1 = utils.leaky_relu(h_bn1, 0.2, name="h_relu1")
utils.add_activation_summary(h_relu1)
W_conv2 = utils.weight_variable([5, 5, 64 * 2, 64 * 4], name="W_conv2")
b_conv2 = utils.bias_variable([64 * 4], name="b_conv2")
h_conv2 = utils.conv2d_strided(h_relu1, W_conv2, b_conv2)
h_bn2 = utils.batch_norm(h_conv2, 64 * 4, train_mode, scope="disc_bn2")
h_relu2 = utils.leaky_relu(h_bn2, 0.2, name="h_relu2")
utils.add_activation_summary(h_relu2)
W_conv3 = utils.weight_variable([5, 5, 64 * 4, 64 * 8], name="W_conv3")
b_conv3 = utils.bias_variable([64 * 8], name="b_conv3")
h_conv3 = utils.conv2d_strided(h_relu2, W_conv3, b_conv3)
h_bn3 = utils.batch_norm(h_conv3, 64 * 8, train_mode, scope="disc_bn3")
h_relu3 = utils.leaky_relu(h_bn3, 0.2, name="h_relu3")
utils.add_activation_summary(h_relu3)
shape = h_relu3.get_shape().as_list()
h_3 = tf.reshape(h_relu3, [FLAGS.batch_size, (IMAGE_SIZE // 16) * (IMAGE_SIZE // 16) * shape[3]])
W_4 = utils.weight_variable([h_3.get_shape().as_list()[1], 1], name="W_4")
b_4 = utils.bias_variable([1], name="b_4")
h_4 = tf.matmul(h_3, W_4) + b_4
return tf.nn.sigmoid(h_4), h_4, h_relu3
示例2: activation_function
# 需要导入模块: import TensorflowUtils [as 别名]
# 或者: from TensorflowUtils import leaky_relu [as 别名]
def activation_function(x, name=""):
activation_dict = {'relu': tf.nn.relu(x, name), 'elu': tf.nn.elu(x, name), 'lrelu': utils.leaky_relu(x, 0.2, name),
'tanh': tf.nn.tanh(x, name),
'sigmoid': tf.nn.sigmoid(x, name)}
act = activation_dict[FLAGS.activation]
utils.add_activation_summary(act)
return act