当前位置: 首页>>代码示例>>Python>>正文


Python variables.get_global_step函数代码示例

本文整理汇总了Python中tensorflow.contrib.framework.python.ops.variables.get_global_step函数的典型用法代码示例。如果您正苦于以下问题:Python get_global_step函数的具体用法?Python get_global_step怎么用?Python get_global_step使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了get_global_step函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_train_skip_train_if_max_step_already_saved

  def test_train_skip_train_if_max_step_already_saved(self):
    with ops.Graph().as_default() as g, self.test_session(g):
      with ops.control_dependencies(self._build_inference_graph()):
        train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
      learn.graph_actions._monitored_train(  # pylint: disable=protected-access
          g,
          output_dir=self._output_dir,
          train_op=train_op,
          loss_op=constant_op.constant(2.0),
          max_steps=10)
      step = checkpoint_utils.load_variable(
          self._output_dir, variables_lib.get_global_step().name)
      self.assertEqual(10, step)

    with ops.Graph().as_default() as g, self.test_session(g):
      with ops.control_dependencies(self._build_inference_graph()):
        train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
      learn.graph_actions._monitored_train(  # pylint: disable=protected-access
          g,
          output_dir=self._output_dir,
          train_op=train_op,
          loss_op=constant_op.constant(2.0),
          max_steps=10)
      step = checkpoint_utils.load_variable(
          self._output_dir, variables_lib.get_global_step().name)
      self.assertEqual(10, step)
开发者ID:AliMiraftab,项目名称:tensorflow,代码行数:26,代码来源:graph_actions_test.py

示例2: test_train_max_steps_is_not_incremental

  def test_train_max_steps_is_not_incremental(self):
    with ops.Graph().as_default() as g, self.test_session(g):
      with ops.control_dependencies(self._build_inference_graph()):
        train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
      learn.graph_actions.train(
          g,
          output_dir=self._output_dir,
          train_op=train_op,
          loss_op=constant_op.constant(2.0),
          max_steps=10)
      step = checkpoint_utils.load_variable(
          self._output_dir, variables_lib.get_global_step().name)
      self.assertEqual(10, step)

    with ops.Graph().as_default() as g, self.test_session(g):
      with ops.control_dependencies(self._build_inference_graph()):
        train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
      learn.graph_actions.train(
          g,
          output_dir=self._output_dir,
          train_op=train_op,
          loss_op=constant_op.constant(2.0),
          max_steps=15)
      step = checkpoint_utils.load_variable(
          self._output_dir, variables_lib.get_global_step().name)
      self.assertEqual(15, step)
开发者ID:AliMiraftab,项目名称:tensorflow,代码行数:26,代码来源:graph_actions_test.py

示例3: _make_training_op

  def _make_training_op(training_loss):
    """Training op for the DNN linear combined model."""
    train_ops = []
    if dnn_logits is not None:
      train_ops.append(
          optimizers.optimize_loss(
              loss=training_loss,
              global_step=contrib_variables.get_global_step(),
              learning_rate=_DNN_LEARNING_RATE,
              optimizer=_get_optimizer(dnn_optimizer),
              gradient_multipliers=_extract_embedding_lr_multipliers(  # pylint: disable=protected-access
                  embedding_lr_multipliers, dnn_parent_scope,
                  dnn_input_scope.name),
              clip_gradients=gradient_clip_norm,
              variables=ops.get_collection(dnn_parent_scope),
              name=dnn_parent_scope,
              # Empty summaries, because head already logs "loss" summary.
              summaries=[]))
    if linear_logits is not None:
      train_ops.append(
          optimizers.optimize_loss(
              loss=training_loss,
              global_step=contrib_variables.get_global_step(),
              learning_rate=_linear_learning_rate(len(linear_feature_columns)),
              optimizer=_get_optimizer(linear_optimizer),
              clip_gradients=gradient_clip_norm,
              variables=ops.get_collection(linear_parent_scope),
              name=linear_parent_scope,
              # Empty summaries, because head already logs "loss" summary.
              summaries=[]))

    return control_flow_ops.group(*train_ops)
开发者ID:Jackhuang945,项目名称:tensorflow,代码行数:32,代码来源:dnn_linear_combined.py

示例4: test_get_global_step

 def test_get_global_step(self):
   with ops.Graph().as_default() as g:
     self.assertEquals(None, variables_lib2.get_global_step())
     variables_lib.Variable(
         0,
         trainable=False,
         dtype=dtypes.int32,
         name=ops.GraphKeys.GLOBAL_STEP)
     self._assert_global_step(
         variables_lib2.get_global_step(), expected_dtype=dtypes.int32)
   self._assert_global_step(
       variables_lib2.get_global_step(g), expected_dtype=dtypes.int32)
开发者ID:AliMiraftab,项目名称:tensorflow,代码行数:12,代码来源:variables_test.py

示例5: _model_fn

 def _model_fn(features, labels, mode, config):
   """Model function."""
   assert labels is None, labels
   (all_scores,
    model_predictions,
    losses, training_op,
    init_op,
    is_initialized) = gmm_ops.gmm(self._parse_tensor_or_dict(features),
                                  self._training_initial_clusters,
                                  self._num_clusters, self._random_seed,
                                  self._covariance_type,
                                  self._params)
   incr_step = state_ops.assign_add(variables.get_global_step(), 1)
   loss = math_ops.reduce_sum(losses)
   training_op = with_dependencies([training_op, incr_step], loss)
   training_hooks = [_InitializeClustersHook(
       init_op, is_initialized, config.is_chief)]
   predictions = {
       GMM.ALL_SCORES: all_scores[0],
       GMM.ASSIGNMENTS: model_predictions[0][0],
   }
   eval_metric_ops = {
       GMM.SCORES: _streaming_sum(loss),
   }
   return model_fn_lib.ModelFnOps(mode=mode, predictions=predictions,
                                  eval_metric_ops=eval_metric_ops,
                                  loss=loss, train_op=training_op,
                                  training_hooks=training_hooks)
开发者ID:1000sprites,项目名称:tensorflow,代码行数:28,代码来源:gmm.py

示例6: begin

 def begin(self):
   self._last_reported_time = None
   self._last_reported_step = None
   self._global_step_tensor = contrib_variables.get_global_step()
   if self._global_step_tensor is None:
     raise RuntimeError(
         "Global step should be created to use StepCounterHook.")
开发者ID:MostafaGazar,项目名称:tensorflow,代码行数:7,代码来源:basic_session_run_hooks.py

示例7: test_train_summaries

 def test_train_summaries(self):
   with ops.Graph().as_default() as g, self.test_session(g):
     with ops.control_dependencies(self._build_inference_graph()):
       train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
     loss_op = constant_op.constant(2.0)
     summary.scalar('loss', loss_op)
     writer = learn.graph_actions.get_summary_writer(self._output_dir)
     self._assert_summaries(self._output_dir, writer)
     self._assert_ckpt(self._output_dir, False)
     loss = learn.graph_actions._monitored_train(  # pylint: disable=protected-access
         g,
         output_dir=self._output_dir,
         train_op=train_op,
         loss_op=loss_op,
         steps=1)
     meta_graph_def = meta_graph.create_meta_graph_def(
         graph_def=g.as_graph_def(add_shapes=True),
         saver_def=monitored_session.Scaffold().finalize().saver.saver_def)
     self.assertEqual(2.0, loss)
     self._assert_summaries(
         self._output_dir,
         writer,
         expected_graphs=[g],
         expected_meta_graphs=[meta_graph_def],
         expected_summaries={1: {
             'loss': 2.0
         }})
     self._assert_ckpt(self._output_dir, True)
开发者ID:AliMiraftab,项目名称:tensorflow,代码行数:28,代码来源:graph_actions_test.py

示例8: _get_train_ops

  def _get_train_ops(self, features, targets):
    """See base class."""
    global_step = contrib_variables.get_global_step()
    assert global_step

    features = self._get_feature_dict(features)
    logits = self._logits(features, is_training=True)
    if self._enable_centered_bias:
      centered_bias_step = [self._centered_bias_step(targets, features)]
    else:
      centered_bias_step = []
    with ops.control_dependencies(centered_bias_step):
      training_loss = self._target_column.training_loss(logits, targets,
                                                        features)
      weighted_average_loss = self._target_column.loss(logits, targets,
                                                       features)

    logging_ops.scalar_summary("loss", weighted_average_loss)

    linear_train_step = self._linear_model.get_train_step(training_loss)
    dnn_train_step = (self._dnn_model.get_train_step(training_loss) if
                      self._dnn_model else [])

    with ops.control_dependencies(linear_train_step + dnn_train_step):
      with ops.get_default_graph().colocate_with(global_step):
        return state_ops.assign_add(global_step, 1).op, weighted_average_loss
开发者ID:MrRabbit0o0,项目名称:tensorflow,代码行数:26,代码来源:dnn_linear_combined.py

示例9: _invalid_model_fn

 def _invalid_model_fn(features, labels):
   # pylint: disable=unused-argument
   w = variables_lib.Variable(42.0, 'weight')
   update_global_step = variables.get_global_step().assign_add(1)
   with control_flow_ops.control_dependencies([update_global_step]):
     loss = 100.0 - w
   return None, loss, None
开发者ID:vaccine,项目名称:tensorflow,代码行数:7,代码来源:estimator_test.py

示例10: _model_fn

 def _model_fn(features, labels, mode):
   """Model function."""
   assert labels is None, labels
   (all_scores, model_predictions, losses,
    training_op) = clustering_ops.KMeans(
        self._parse_tensor_or_dict(features),
        self._num_clusters,
        self._training_initial_clusters,
        self._distance_metric,
        self._use_mini_batch,
        random_seed=self._random_seed,
        kmeans_plus_plus_num_retries=self.
        _kmeans_plus_plus_num_retries).training_graph()
   incr_step = state_ops.assign_add(variables.get_global_step(), 1)
   loss = math_ops.reduce_sum(losses, name=KMeansClustering.LOSS_OP_NAME)
   logging_ops.scalar_summary('loss/raw', loss)
   training_op = with_dependencies([training_op, incr_step], loss)
   predictions = {
       KMeansClustering.ALL_SCORES: all_scores[0],
       KMeansClustering.CLUSTER_IDX: model_predictions[0],
   }
   eval_metric_ops = {KMeansClustering.SCORES: loss,}
   if self._relative_tolerance is not None:
     training_hooks = [self.LossRelativeChangeHook(self._relative_tolerance)]
   else:
     training_hooks = None
   return ModelFnOps(
       mode=mode,
       predictions=predictions,
       eval_metric_ops=eval_metric_ops,
       loss=loss,
       train_op=training_op,
       training_hooks=training_hooks)
开发者ID:cancan101,项目名称:tensorflow,代码行数:33,代码来源:kmeans.py

示例11: _get_train_ops

    def _get_train_ops(self, features, targets):
        """See base class."""
        global_step = contrib_variables.get_global_step()
        assert global_step
        logits = self._logits(features, is_training=True)
        if self._enable_centered_bias:
            centered_bias_step = [self._centered_bias_step(targets, features)]
        else:
            centered_bias_step = []
        with ops.control_dependencies(centered_bias_step):
            loss = self._loss(logits, targets, features)
        logging_ops.scalar_summary("loss", loss)

        linear_vars = self._get_linear_vars()
        dnn_vars = self._get_dnn_vars()
        grads = gradients.gradients(loss, dnn_vars + linear_vars)
        if self._gradient_clip_norm:
            grads, _ = clip_ops.clip_by_global_norm(grads, self._gradient_clip_norm)

        dnn_grads = grads[0 : len(dnn_vars)]
        linear_grads = grads[len(dnn_vars) :]

        train_ops = self._get_linear_training_ops(linear_grads, linear_vars) + self._get_dnn_training_ops(
            dnn_grads, dnn_vars
        )

        train_step = control_flow_ops.group(*train_ops, name="combined_training_op")
        with ops.control_dependencies([train_step]):
            with ops.get_default_graph().colocate_with(global_step):
                return state_ops.assign_add(global_step, 1).op, loss
开发者ID:285219011,项目名称:liuwenfeng,代码行数:30,代码来源:dnn_linear_combined.py

示例12: _get_train_ops

  def _get_train_ops(self, features, targets):
    """See base class."""
    self._validate_linear_feature_columns(features)
    if not isinstance(self._linear_optimizer, sdca_optimizer.SDCAOptimizer):
      return super(LinearClassifier, self)._get_train_ops(features, targets)

    # SDCA currently supports binary classification only.
    if self._target_column.num_label_columns > 2:
      raise ValueError(
          "SDCA does not currently support multi-class classification.")
    global_step = contrib_variables.get_global_step()
    assert global_step

    logits, columns_to_variables, _ = layers.weighted_sum_from_feature_columns(
        columns_to_tensors=features,
        feature_columns=self._linear_feature_columns,
        num_outputs=self._target_column.num_label_columns,
        weight_collections=[self._linear_weight_collection],
        name="linear")
    with ops.control_dependencies([self._centered_bias()]):
      loss = self._loss(logits, targets, features)
    logging_ops.scalar_summary("loss", loss)

    train_ops = self._linear_optimizer.get_train_step(
        self._linear_feature_columns, self._target_column.weight_column_name,
        "logistic_loss", features, targets, columns_to_variables, global_step)

    return train_ops, loss
开发者ID:363158858,项目名称:tensorflow,代码行数:28,代码来源:linear.py

示例13: _train_op_fn

 def _train_op_fn(unused_loss):
   global_step = contrib_variables.get_global_step()
   sdca_model, train_op = optimizer.get_train_step(
       columns_to_variables, weight_column_name, loss_type, features, labels,
       global_step)
   if update_weights_hook is not None:
     update_weights_hook.set_parameters(sdca_model, train_op)
   return train_op
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:8,代码来源:sdca_estimator.py

示例14: _argument_checker

 def _argument_checker(features, labels, mode, params, config=None,
                       model_dir=None):
   _, _, _ = features, labels, config
   self.assertEqual(model_fn.ModeKeys.TRAIN, mode)
   self.assertEqual(expected_param, params)
   self.assertEqual(model_dir, expected_model_dir)
   return (constant_op.constant(0.), constant_op.constant(0.),
           variables.get_global_step().assign_add(1))
开发者ID:AutumnQYN,项目名称:tensorflow,代码行数:8,代码来源:estimator_test.py

示例15: _model_fn_scaffold

 def _model_fn_scaffold(features, labels, mode):
   _, _ = features, labels
   return model_fn.ModelFnOps(
       mode=mode,
       predictions=constant_op.constant(0.),
       loss=constant_op.constant(0.),
       train_op=variables.get_global_step().assign_add(1),
       scaffold=monitored_session.Scaffold(init_fn=_init_fn))
开发者ID:AutumnQYN,项目名称:tensorflow,代码行数:8,代码来源:estimator_test.py


注:本文中的tensorflow.contrib.framework.python.ops.variables.get_global_step函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。