本文整理汇总了Python中tensorflow.DataType方法的典型用法代码示例。如果您正苦于以下问题:Python tensorflow.DataType方法的具体用法?Python tensorflow.DataType怎么用?Python tensorflow.DataType使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow
的用法示例。
在下文中一共展示了tensorflow.DataType方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _AddParam
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import DataType [as 别名]
def _AddParam(self,
shape,
dtype,
name,
initializer=None,
return_average=False):
"""Add a model parameter w.r.t. we expect to compute gradients.
_AddParam creates both regular parameters (usually for training) and
averaged nodes (usually for inference). It returns one or the other based
on the 'return_average' arg.
Args:
shape: int list, tensor shape of the parameter to create
dtype: tf.DataType, data type of the parameter
name: string, name of the parameter in the TF graph
initializer: optional initializer for the paramter
return_average: if False, return parameter otherwise return moving average
Returns:
parameter or averaged parameter
"""
if name not in self.params:
step = tf.cast(self.GetStep(), tf.float32)
# Put all parameters and their initializing ops in their own scope
# irrespective of the current scope (training or eval).
with tf.name_scope(self._param_scope):
self.params[name] = tf.get_variable(name, shape, dtype, initializer)
param = self.params[name]
if initializer is not None:
self.inits[name] = state_ops.init_variable(param, initializer)
if self._averaging_decay == 1:
logging.info('Using vanilla averaging of parameters.')
ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)),
num_updates=None)
else:
ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay,
num_updates=step)
self._averaging[name + '_avg_update'] = ema.apply([param])
self.variables[name + '_avg_var'] = ema.average(param)
self.inits[name + '_avg_init'] = state_ops.init_variable(
ema.average(param), tf.zeros_initializer())
return (self.variables[name + '_avg_var'] if return_average else
self.params[name])
示例2: _AddParam
# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import DataType [as 别名]
def _AddParam(self,
shape,
dtype,
name,
initializer=None,
return_average=False):
"""Add a model parameter w.r.t. we expect to compute gradients.
_AddParam creates both regular parameters (usually for training) and
averaged nodes (usually for inference). It returns one or the other based
on the 'return_average' arg.
Args:
shape: int list, tensor shape of the parameter to create
dtype: tf.DataType, data type of the parameter
name: string, name of the parameter in the TF graph
initializer: optional initializer for the paramter
return_average: if False, return parameter otherwise return moving average
Returns:
parameter or averaged parameter
"""
if name not in self.params:
step = tf.cast(self.GetStep(), tf.float32)
# Put all parameters and their initializing ops in their own scope
# irrespective of the current scope (training or eval).
with tf.name_scope(self._param_scope):
self.params[name] = tf.get_variable(name, shape, dtype, initializer)
param = self.params[name]
if initializer is not None:
self.inits[name] = state_ops.init_variable(param, initializer)
if self._averaging_decay == 1:
logging.info('Using vanilla averaging of parameters.')
ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)),
num_updates=None)
else:
ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay,
num_updates=step)
self._averaging[name + '_avg_update'] = ema.apply([param])
self.variables[name + '_avg_var'] = ema.average(param)
self.inits[name + '_avg_init'] = state_ops.init_variable(
ema.average(param), tf.zeros_initializer)
return (self.variables[name + '_avg_var'] if return_average else
self.params[name])