本文整理汇总了Python中TensorflowUtils.weight_variable_xavier_initialized方法的典型用法代码示例。如果您正苦于以下问题:Python TensorflowUtils.weight_variable_xavier_initialized方法的具体用法?Python TensorflowUtils.weight_variable_xavier_initialized怎么用?Python TensorflowUtils.weight_variable_xavier_initialized使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类TensorflowUtils
的用法示例。
在下文中一共展示了TensorflowUtils.weight_variable_xavier_initialized方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: encoder
# 需要导入模块: import TensorflowUtils [as 别名]
# 或者: from TensorflowUtils import weight_variable_xavier_initialized [as 别名]
def encoder(dataset, train_mode):
with tf.variable_scope("Encoder"):
with tf.name_scope("enc_conv1") as scope:
W_conv1 = utils.weight_variable_xavier_initialized([3, 3, 3, 32], name="W_conv1")
b_conv1 = utils.bias_variable([32], name="b_conv1")
h_conv1 = utils.conv2d_strided(dataset, W_conv1, b_conv1)
h_bn1 = utils.batch_norm(h_conv1, 32, train_mode, scope="conv1_bn")
h_relu1 = tf.nn.relu(h_bn1)
with tf.name_scope("enc_conv2") as scope:
W_conv2 = utils.weight_variable_xavier_initialized([3, 3, 32, 64], name="W_conv2")
b_conv2 = utils.bias_variable([64], name="b_conv2")
h_conv2 = utils.conv2d_strided(h_relu1, W_conv2, b_conv2)
h_bn2 = utils.batch_norm(h_conv2, 64, train_mode, scope="conv2_bn")
h_relu2 = tf.nn.relu(h_bn2)
with tf.name_scope("enc_conv3") as scope:
W_conv3 = utils.weight_variable_xavier_initialized([3, 3, 64, 128], name="W_conv3")
b_conv3 = utils.bias_variable([128], name="b_conv3")
h_conv3 = utils.conv2d_strided(h_relu2, W_conv3, b_conv3)
h_bn3 = utils.batch_norm(h_conv3, 128, train_mode, scope="conv3_bn")
h_relu3 = tf.nn.relu(h_bn3)
with tf.name_scope("enc_conv4") as scope:
W_conv4 = utils.weight_variable_xavier_initialized([3, 3, 128, 256], name="W_conv4")
b_conv4 = utils.bias_variable([256], name="b_conv4")
h_conv4 = utils.conv2d_strided(h_relu3, W_conv4, b_conv4)
h_bn4 = utils.batch_norm(h_conv4, 256, train_mode, scope="conv4_bn")
h_relu4 = tf.nn.relu(h_bn4)
with tf.name_scope("enc_conv5") as scope:
W_conv5 = utils.weight_variable_xavier_initialized([3, 3, 256, 512], name="W_conv5")
b_conv5 = utils.bias_variable([512], name="b_conv5")
h_conv5 = utils.conv2d_strided(h_relu4, W_conv5, b_conv5)
h_bn5 = utils.batch_norm(h_conv5, 512, train_mode, scope="conv5_bn")
h_relu5 = tf.nn.relu(h_bn5)
with tf.name_scope("enc_fc") as scope:
image_size = IMAGE_SIZE // 32
h_relu5_flatten = tf.reshape(h_relu5, [-1, image_size * image_size * 512])
W_fc = utils.weight_variable([image_size * image_size * 512, 1024], name="W_fc")
b_fc = utils.bias_variable([1024], name="b_fc")
encoder_val = tf.matmul(h_relu5_flatten, W_fc) + b_fc
return encoder_val
示例2: inference_fully_convolutional
# 需要导入模块: import TensorflowUtils [as 别名]
# 或者: from TensorflowUtils import weight_variable_xavier_initialized [as 别名]
def inference_fully_convolutional(dataset):
'''
Fully convolutional inference on notMNIST dataset
:param datset: [batch_size, 28*28*1] tensor
:return: logits
'''
dataset_reshaped = tf.reshape(dataset, [-1, 28, 28, 1])
with tf.name_scope("conv1") as scope:
W_conv1 = utils.weight_variable_xavier_initialized([3, 3, 1, 32], name="W_conv1")
b_conv1 = utils.bias_variable([32], name="b_conv1")
h_conv1 = tf.nn.relu(utils.conv2d_strided(dataset_reshaped, W_conv1, b_conv1))
with tf.name_scope("conv2") as scope:
W_conv2 = utils.weight_variable_xavier_initialized([3, 3, 32, 64], name="W_conv2")
b_conv2 = utils.bias_variable([64], name="b_conv2")
h_conv2 = tf.nn.relu(utils.conv2d_strided(h_conv1, W_conv2, b_conv2))
with tf.name_scope("conv3") as scope:
W_conv3 = utils.weight_variable_xavier_initialized([3, 3, 64, 128], name="W_conv3")
b_conv3 = utils.bias_variable([128], name="b_conv3")
h_conv3 = tf.nn.relu(utils.conv2d_strided(h_conv2, W_conv3, b_conv3))
with tf.name_scope("conv4") as scope:
W_conv4 = utils.weight_variable_xavier_initialized([3, 3, 128, 256], name="W_conv4")
b_conv4 = utils.bias_variable([256], name="b_conv4")
h_conv4 = tf.nn.relu(utils.conv2d_strided(h_conv3, W_conv4, b_conv4))
with tf.name_scope("conv5") as scope:
# W_conv5 = utils.weight_variable_xavier_initialized([2, 2, 256, 512], name="W_conv5")
# b_conv5 = utils.bias_variable([512], name="b_conv5")
# h_conv5 = tf.nn.relu(utils.conv2d_strided(h_conv4, W_conv5, b_conv5))
h_conv5 = utils.avg_pool_2x2(h_conv4)
with tf.name_scope("conv6") as scope:
W_conv6 = utils.weight_variable_xavier_initialized([1, 1, 256, 10], name="W_conv6")
b_conv6 = utils.bias_variable([10], name="b_conv6")
logits = tf.nn.relu(utils.conv2d_basic(h_conv5, W_conv6, b_conv6))
print logits.get_shape()
logits = tf.reshape(logits, [-1, 10])
return logits
示例3: inpainter
# 需要导入模块: import TensorflowUtils [as 别名]
# 或者: from TensorflowUtils import weight_variable_xavier_initialized [as 别名]
def inpainter(embedding, train_mode):
with tf.variable_scope("context_inpainter"):
image_size = IMAGE_SIZE // 32
with tf.name_scope("dec_fc") as scope:
W_fc = utils.weight_variable([1024, image_size * image_size * 512], name="W_fc")
b_fc = utils.bias_variable([image_size * image_size * 512], name="b_fc")
h_fc = tf.nn.relu(tf.matmul(embedding, W_fc) + b_fc)
with tf.name_scope("dec_conv1") as scope:
h_reshaped = tf.reshape(h_fc, tf.pack([tf.shape(h_fc)[0], image_size, image_size, 512]))
W_conv_t1 = utils.weight_variable_xavier_initialized([3, 3, 256, 512], name="W_conv_t1")
b_conv_t1 = utils.bias_variable([256], name="b_conv_t1")
deconv_shape = tf.pack([tf.shape(h_reshaped)[0], 2 * image_size, 2 * image_size, 256])
h_conv_t1 = utils.conv2d_transpose_strided(h_reshaped, W_conv_t1, b_conv_t1, output_shape=deconv_shape)
h_bn_t1 = utils.batch_norm(h_conv_t1, 256, train_mode, scope="conv_t1_bn")
h_relu_t1 = tf.nn.relu(h_bn_t1)
with tf.name_scope("dec_conv2") as scope:
W_conv_t2 = utils.weight_variable_xavier_initialized([3, 3, 128, 256], name="W_conv_t2")
b_conv_t2 = utils.bias_variable([128], name="b_conv_t2")
deconv_shape = tf.pack([tf.shape(h_relu_t1)[0], 4 * image_size, 4 * image_size, 128])
h_conv_t2 = utils.conv2d_transpose_strided(h_relu_t1, W_conv_t2, b_conv_t2, output_shape=deconv_shape)
h_bn_t2 = utils.batch_norm(h_conv_t2, 128, train_mode, scope="conv_t2_bn")
h_relu_t2 = tf.nn.relu(h_bn_t2)
with tf.name_scope("dec_conv3") as scope:
W_conv_t3 = utils.weight_variable_xavier_initialized([3, 3, 64, 128], name="W_conv_t3")
b_conv_t3 = utils.bias_variable([64], name="b_conv_t3")
deconv_shape = tf.pack([tf.shape(h_relu_t2)[0], 8 * image_size, 8 * image_size, 64])
h_conv_t3 = utils.conv2d_transpose_strided(h_relu_t2, W_conv_t3, b_conv_t3, output_shape=deconv_shape)
h_bn_t3 = utils.batch_norm(h_conv_t3, 64, train_mode, scope="conv_t3_bn")
h_relu_t3 = tf.nn.relu(h_bn_t3)
with tf.name_scope("dec_conv4") as scope:
W_conv_t4 = utils.weight_variable_xavier_initialized([3, 3, 3, 64], name="W_conv_t4")
b_conv_t4 = utils.bias_variable([3], name="b_conv_t4")
deconv_shape = tf.pack([tf.shape(h_relu_t3)[0], 16 * image_size, 16 * image_size, 3])
pred_image = utils.conv2d_transpose_strided(h_relu_t3, W_conv_t4, b_conv_t4, output_shape=deconv_shape)
return pred_image
示例4: inference
# 需要导入模块: import TensorflowUtils [as 别名]
# 或者: from TensorflowUtils import weight_variable_xavier_initialized [as 别名]
def inference(image):
W1 = utils.weight_variable_xavier_initialized([9, 9, 1, 32])
b1 = utils.bias_variable([32])
tf.histogram_summary("W1", W1)
tf.histogram_summary("b1", b1)
h_conv1 = tf.nn.relu(utils.conv2d_basic(image, W1, b1))
W2 = utils.weight_variable_xavier_initialized([3, 3, 32, 64])
b2 = utils.bias_variable([64])
tf.histogram_summary("W2", W2)
tf.histogram_summary("b2", b2)
h_conv2 = tf.nn.relu(utils.conv2d_strided(h_conv1, W2, b2))
W3 = utils.weight_variable_xavier_initialized([3, 3, 64, 128])
b3 = utils.bias_variable([128])
tf.histogram_summary("W3", W3)
tf.histogram_summary("b3", b3)
h_conv3 = tf.nn.relu(utils.conv2d_strided(h_conv2, W3, b3))
# upstrides
W4 = utils.weight_variable_xavier_initialized([3, 3, 64, 128])
b4 = utils.bias_variable([64])
tf.histogram_summary("W4", W4)
tf.histogram_summary("b4", b4)
h_conv4 = tf.nn.relu(utils.conv2d_transpose_strided(h_conv3, W4, b4))
W5 = utils.weight_variable_xavier_initialized([3, 3, 32, 64])
b5 = utils.bias_variable([32])
tf.histogram_summary("W5", W5)
tf.histogram_summary("b5", b5)
h_conv5 = tf.nn.relu(utils.conv2d_transpose_strided(h_conv4, W5, b5))
W6 = utils.weight_variable_xavier_initialized([9, 9, 32, 3])
b6 = utils.bias_variable([3])
tf.histogram_summary("W6", W6)
tf.histogram_summary("b6", b6)
pred_image = tf.nn.tanh(utils.conv2d_basic(h_conv5, W6, b6))
return pred_image