本文整理汇总了Python中tensorflow.python.platform.tf_logging.fatal方法的典型用法代码示例。如果您正苦于以下问题:Python tf_logging.fatal方法的具体用法?Python tf_logging.fatal怎么用?Python tf_logging.fatal使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.python.platform.tf_logging
的用法示例。
在下文中一共展示了tf_logging.fatal方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_layer_size
# 需要导入模块: from tensorflow.python.platform import tf_logging [as 别名]
# 或者: from tensorflow.python.platform.tf_logging import fatal [as 别名]
def get_layer_size(self, layer_name):
if layer_name == 'logits':
return self._component.num_actions
if layer_name == 'last_layer':
return self._hidden_layer_sizes[-1]
if not layer_name.startswith('layer_'):
logging.fatal(
'Invalid layer name: "%s" Can only retrieve from "logits", '
'"last_layer", and "layer_*".',
layer_name)
# NOTE(danielandor): Since get_layer_size is called before the
# model has been built, we compute the layer size directly from
# the hyperparameters rather than from self._layers.
layer_index = int(layer_name.split('_')[1])
return self._hidden_layer_sizes[layer_index]
示例2: get_layer_size
# 需要导入模块: from tensorflow.python.platform import tf_logging [as 别名]
# 或者: from tensorflow.python.platform.tf_logging import fatal [as 别名]
def get_layer_size(self, layer_name):
if layer_name == 'logits':
return self._component.num_actions
if layer_name == 'last_layer':
return self._hidden_layer_sizes[-1]
if not layer_name.startswith('layer_'):
logging.fatal('Invalid layer name: "%s" Can only retrieve from "logits", '
'"last_layer", and "layer_*".', layer_name)
# NOTE(danielandor): Since get_layer_size is called before the
# model has been built, we compute the layer size directly from
# the hyperparameters rather than from self._layers.
layer_index = int(layer_name.split('_')[1])
return self._hidden_layer_sizes[layer_index]
示例3: _deleted
# 需要导入模块: from tensorflow.python.platform import tf_logging [as 别名]
# 或者: from tensorflow.python.platform.tf_logging import fatal [as 别名]
def _deleted(obj_id, fatal_error):
obj = _REF_INFO[obj_id]
del _REF_INFO[obj_id]
if not obj.object_used:
if fatal_error:
logger = tf_logging.fatal
else:
logger = tf_logging.error
logger(
'==================================\n'
'Object was never used (type %s):\n%s\nIf you want to mark it as '
'used call its "mark_used()" method.\nIt was originally created '
'here:\n%s\n'
'==================================' %
(obj.type_, obj.repr_, obj.creation_stack))
示例4: must_use_result_or_fatal
# 需要导入模块: from tensorflow.python.platform import tf_logging [as 别名]
# 或者: from tensorflow.python.platform.tf_logging import fatal [as 别名]
def must_use_result_or_fatal(fn):
"""Function wrapper that ensures the function's output is used.
If the output is not used, a `tf.logging.fatal` error is raised.
An output is marked as used if any of its attributes are read, modified, or
updated. Examples when the output is a `Tensor` include:
- Using it in any capacity (e.g. `y = t + 0`, `sess.run(t)`)
- Accessing a property (e.g. getting `t.name` or `t.op`).
Note, certain behaviors cannot be tracked - for these the object may not
be marked as used. Examples include:
- `t != 0`. In this case, comparison is done on types / ids.
- `isinstance(t, tf.Tensor)`. Similar to above.
Args:
fn: The function to wrap.
Returns:
The wrapped function.
"""
def wrapped(*args, **kwargs):
return _add_should_use_warning(fn(*args, **kwargs), fatal_error=True)
return tf_decorator.make_decorator(
fn, wrapped, 'must_use_result_or_fatal',
((fn.__doc__ or '') +
('\n\n '
'**NOTE** The output of this function must be used. If it is not, '
'a fatal error will be raised. To mark the output as used, '
'call its .mark_used() method.')))
示例5: _create_optimizer
# 需要导入模块: from tensorflow.python.platform import tf_logging [as 别名]
# 或者: from tensorflow.python.platform.tf_logging import fatal [as 别名]
def _create_optimizer(hyperparams, learning_rate_var, step_var=None):
"""Creates an optimizer object for a given spec, learning rate and step var.
Args:
hyperparams: a GridPoint proto containing optimizer spec, particularly
learning_method to determine optimizer class to use.
learning_rate_var: a `tf.Tensor`, the learning rate.
step_var: a `tf.Variable`, global training step.
Returns:
a `tf.train.Optimizer` object that was built.
"""
if hyperparams.learning_method == 'gradient_descent':
return tf.train.GradientDescentOptimizer(
learning_rate_var, use_locking=True)
elif hyperparams.learning_method == 'adam':
return tf.train.AdamOptimizer(
learning_rate_var,
beta1=hyperparams.adam_beta1,
beta2=hyperparams.adam_beta2,
epsilon=hyperparams.adam_eps,
use_locking=True)
elif hyperparams.learning_method == 'lazyadam':
return tf.contrib.opt.LazyAdamOptimizer(
learning_rate_var,
beta1=hyperparams.adam_beta1,
beta2=hyperparams.adam_beta2,
epsilon=hyperparams.adam_eps,
use_locking=True)
elif hyperparams.learning_method == 'momentum':
return tf.train.MomentumOptimizer(
learning_rate_var, hyperparams.momentum, use_locking=True)
elif hyperparams.learning_method == 'composite':
spec = hyperparams.composite_optimizer_spec
optimizer1 = _create_optimizer(spec.method1, learning_rate_var, step_var)
optimizer2 = _create_optimizer(spec.method2, learning_rate_var, step_var)
if step_var is None:
logging.fatal('step_var is required for CompositeOptimizer')
switch = tf.less(step_var, spec.switch_after_steps)
return composite_optimizer.CompositeOptimizer(
optimizer1, optimizer2, switch, use_locking=True)
else:
logging.fatal('Unknown learning method (optimizer)')
示例6: logistic_regression_signature_fn
# 需要导入模块: from tensorflow.python.platform import tf_logging [as 别名]
# 或者: from tensorflow.python.platform.tf_logging import fatal [as 别名]
def logistic_regression_signature_fn(examples, unused_features, predictions):
"""Creates logistic regression signature from given examples and predictions.
Args:
examples: `Tensor`.
unused_features: `dict` of `Tensor`s.
predictions: `Tensor` of shape [batch_size, 2] of predicted probabilities or
dict that contains the probabilities tensor as in
{'probabilities', `Tensor`}.
Returns:
Tuple of default regression signature and named signature.
Raises:
ValueError: If examples is `None`.
"""
if examples is None:
raise ValueError('examples cannot be None when using this signature fn.')
if isinstance(predictions, dict):
predictions_tensor = predictions['probabilities']
else:
predictions_tensor = predictions
# predictions should have shape [batch_size, 2] where first column is P(Y=0|x)
# while second column is P(Y=1|x). We are only interested in the second
# column for inference.
predictions_shape = predictions_tensor.get_shape()
predictions_rank = len(predictions_shape)
if predictions_rank != 2:
logging.fatal(
'Expected predictions to have rank 2, but received predictions with '
'rank: {} and shape: {}'.format(predictions_rank, predictions_shape))
if predictions_shape[1] != 2:
logging.fatal(
'Expected predictions to have 2nd dimension: 2, but received '
'predictions with 2nd dimension: {} and shape: {}. Did you mean to use '
'regression_signature_fn or classification_signature_fn_with_prob '
'instead?'.format(predictions_shape[1], predictions_shape))
positive_predictions = predictions_tensor[:, 1]
default_signature = exporter.regression_signature(
input_tensor=examples, output_tensor=positive_predictions)
return default_signature, {}
# pylint: disable=protected-access