本文整理匯總了Python中tensorflow.concats方法的典型用法代碼示例。如果您正苦於以下問題:Python tensorflow.concats方法的具體用法?Python tensorflow.concats怎麽用?Python tensorflow.concats使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tensorflow
的用法示例。
在下文中一共展示了tensorflow.concats方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: AddParallel
# 需要導入模塊: import tensorflow [as 別名]
# 或者: from tensorflow import concats [as 別名]
def AddParallel(self, prev_layer, index):
"""tf.concats outputs of layers that run on the same inputs.
Args:
prev_layer: Input tensor.
index: Position in model_str to start parsing
Returns:
Output tensor of the parallel, end index in model_str.
Raises:
ValueError: If () are unbalanced or the elements don't match.
"""
if self.model_str[index] != '(':
return None, None
index += 1
layers = []
num_dims = 0
# Each parallel must output the same, including any reduction factor, in
# all dimensions except depth.
# We have to save the starting factors, so they don't get reduced by all
# the elements of the parallel, only once.
original_factors = self.reduction_factors
final_factors = None
while index < len(self.model_str) and self.model_str[index] != ')':
self.reduction_factors = original_factors
layer, index = self.BuildFromString(prev_layer, index)
if num_dims == 0:
num_dims = len(layer.get_shape())
elif num_dims != len(layer.get_shape()):
raise ValueError('All elements of parallel must return same num dims')
layers.append(layer)
if final_factors:
if final_factors != self.reduction_factors:
raise ValueError('All elements of parallel must scale the same')
else:
final_factors = self.reduction_factors
if index == len(self.model_str):
raise ValueError('Missing ) at end of parallel!' + self.model_str)
return tf.concat(axis=num_dims - 1, values=layers), index + 1
示例2: AddParallel
# 需要導入模塊: import tensorflow [as 別名]
# 或者: from tensorflow import concats [as 別名]
def AddParallel(self, prev_layer, index, reuse=None):
"""tf.concats outputs of layers that run on the same inputs.
Args:
prev_layer: Input tensor.
index: Position in model_str to start parsing
Returns:
Output tensor of the parallel, end index in model_str.
Raises:
ValueError: If () are unbalanced or the elements don't match.
"""
if self.model_str[index] != '(':
return None, None
index += 1
layers = []
num_dims = 0
# Each parallel must output the same, including any reduction factor, in
# all dimensions except depth.
# We have to save the starting factors, so they don't get reduced by all
# the elements of the parallel, only once.
original_factors = self.reduction_factors
final_factors = None
while index < len(self.model_str) and self.model_str[index] != ')':
self.reduction_factors = original_factors
layer, index = self.BuildFromString(prev_layer, index)
if num_dims == 0:
num_dims = len(layer.get_shape())
elif num_dims != len(layer.get_shape()):
raise ValueError('All elements of parallel must return same num dims')
layers.append(layer)
if final_factors:
if final_factors != self.reduction_factors:
raise ValueError('All elements of parallel must scale the same')
else:
final_factors = self.reduction_factors
if index == len(self.model_str):
raise ValueError('Missing ) at end of parallel!' + self.model_str)
return tf.concat(axis=num_dims - 1, values=layers), index + 1
示例3: AddParallel
# 需要導入模塊: import tensorflow [as 別名]
# 或者: from tensorflow import concats [as 別名]
def AddParallel(self, prev_layer, index):
"""tf.concats outputs of layers that run on the same inputs.
Args:
prev_layer: Input tensor.
index: Position in model_str to start parsing
Returns:
Output tensor of the parallel, end index in model_str.
Raises:
ValueError: If () are unbalanced or the elements don't match.
"""
if self.model_str[index] != '(':
return None, None
index += 1
layers = []
num_dims = 0
# Each parallel must output the same, including any reduction factor, in
# all dimensions except depth.
# We have to save the starting factors, so they don't get reduced by all
# the elements of the parallel, only once.
original_factors = self.reduction_factors
final_factors = None
while index < len(self.model_str) and self.model_str[index] != ')':
self.reduction_factors = original_factors
layer, index = self.BuildFromString(prev_layer, index)
if num_dims == 0:
num_dims = len(layer.get_shape())
elif num_dims != len(layer.get_shape()):
raise ValueError('All elements of parallel must return same num dims')
layers.append(layer)
if final_factors:
if final_factors != self.reduction_factors:
raise ValueError('All elements of parallel must scale the same')
else:
final_factors = self.reduction_factors
if index == len(self.model_str):
raise ValueError('Missing ) at end of parallel!' + self.model_str)
return tf.concat(num_dims - 1, layers), index + 1