本文整理匯總了Python中TensorflowUtils.leaky_relu方法的典型用法代碼示例。如果您正苦於以下問題:Python TensorflowUtils.leaky_relu方法的具體用法?Python TensorflowUtils.leaky_relu怎麽用?Python TensorflowUtils.leaky_relu使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類TensorflowUtils
的用法示例。
在下文中一共展示了TensorflowUtils.leaky_relu方法的2個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: discriminator
# 需要導入模塊: import TensorflowUtils [as 別名]
# 或者: from TensorflowUtils import leaky_relu [as 別名]
def discriminator(input_images, train_mode):
# dropout_prob = 1.0
# if train_mode:
# dropout_prob = 0.5
W_conv0 = utils.weight_variable([5, 5, NUM_OF_CHANNELS, 64 * 1], name="W_conv0")
b_conv0 = utils.bias_variable([64 * 1], name="b_conv0")
h_conv0 = utils.conv2d_strided(input_images, W_conv0, b_conv0)
h_bn0 = h_conv0 # utils.batch_norm(h_conv0, 64 * 1, train_mode, scope="disc_bn0")
h_relu0 = utils.leaky_relu(h_bn0, 0.2, name="h_relu0")
utils.add_activation_summary(h_relu0)
W_conv1 = utils.weight_variable([5, 5, 64 * 1, 64 * 2], name="W_conv1")
b_conv1 = utils.bias_variable([64 * 2], name="b_conv1")
h_conv1 = utils.conv2d_strided(h_relu0, W_conv1, b_conv1)
h_bn1 = utils.batch_norm(h_conv1, 64 * 2, train_mode, scope="disc_bn1")
h_relu1 = utils.leaky_relu(h_bn1, 0.2, name="h_relu1")
utils.add_activation_summary(h_relu1)
W_conv2 = utils.weight_variable([5, 5, 64 * 2, 64 * 4], name="W_conv2")
b_conv2 = utils.bias_variable([64 * 4], name="b_conv2")
h_conv2 = utils.conv2d_strided(h_relu1, W_conv2, b_conv2)
h_bn2 = utils.batch_norm(h_conv2, 64 * 4, train_mode, scope="disc_bn2")
h_relu2 = utils.leaky_relu(h_bn2, 0.2, name="h_relu2")
utils.add_activation_summary(h_relu2)
W_conv3 = utils.weight_variable([5, 5, 64 * 4, 64 * 8], name="W_conv3")
b_conv3 = utils.bias_variable([64 * 8], name="b_conv3")
h_conv3 = utils.conv2d_strided(h_relu2, W_conv3, b_conv3)
h_bn3 = utils.batch_norm(h_conv3, 64 * 8, train_mode, scope="disc_bn3")
h_relu3 = utils.leaky_relu(h_bn3, 0.2, name="h_relu3")
utils.add_activation_summary(h_relu3)
shape = h_relu3.get_shape().as_list()
h_3 = tf.reshape(h_relu3, [FLAGS.batch_size, (IMAGE_SIZE // 16) * (IMAGE_SIZE // 16) * shape[3]])
W_4 = utils.weight_variable([h_3.get_shape().as_list()[1], 1], name="W_4")
b_4 = utils.bias_variable([1], name="b_4")
h_4 = tf.matmul(h_3, W_4) + b_4
return tf.nn.sigmoid(h_4), h_4, h_relu3
示例2: activation_function
# 需要導入模塊: import TensorflowUtils [as 別名]
# 或者: from TensorflowUtils import leaky_relu [as 別名]
def activation_function(x, name=""):
activation_dict = {'relu': tf.nn.relu(x, name), 'elu': tf.nn.elu(x, name), 'lrelu': utils.leaky_relu(x, 0.2, name),
'tanh': tf.nn.tanh(x, name),
'sigmoid': tf.nn.sigmoid(x, name)}
act = activation_dict[FLAGS.activation]
utils.add_activation_summary(act)
return act