当前位置: 首页>>代码示例>>Python>>正文


Python nn_impl.l2_normalize函数代码示例

本文整理汇总了Python中tensorflow.python.ops.nn_impl.l2_normalize函数的典型用法代码示例。如果您正苦于以下问题:Python l2_normalize函数的具体用法?Python l2_normalize怎么用?Python l2_normalize使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了l2_normalize函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _init_clusters

  def _init_clusters(self):
    """Initialization of clusters.

    Returns:
    Tuple with following elements:
      cluster_centers: a Tensor for storing cluster centers
      cluster_counts: a Tensor for storing counts of points assigned to this
        cluster. This is used by mini-batch training.
    """
    init = self._initial_clusters
    if init == RANDOM_INIT:
      clusters_init = self._init_clusters_random()
    elif init == KMEANS_PLUS_PLUS_INIT:
      # Points from only the first shard are used for initializing centers.
      # TODO(ands): Use all points.
      inp = self._inputs[0]
      if self._distance_metric == COSINE_DISTANCE:
        inp = nn_impl.l2_normalize(inp, dim=1)
      clusters_init = gen_clustering_ops.kmeans_plus_plus_initialization(
          inp, self._num_clusters, self._random_seed,
          self._kmeans_plus_plus_num_retries)
    elif callable(init):
      clusters_init = init(self._inputs, self._num_clusters)
    elif not isinstance(init, str):
      clusters_init = init
    else:
      assert False, 'Unsupported init passed to Kmeans %s' % str(init)
    if self._distance_metric == COSINE_DISTANCE and clusters_init is not None:
      clusters_init = nn_impl.l2_normalize(clusters_init, dim=1)
    clusters_init = clusters_init if clusters_init is not None else []
    # TODO(agarwal): Locally cache cluster_centers on the worker to avoid
    # copying them each step.
    cluster_centers = variables.Variable(clusters_init,
                                         name='clusters',
                                         validate_shape=False)
    if self._use_mini_batch and self._mini_batch_steps_per_iteration > 1:
      # Copy of cluster centers actively updated each step according to
      # mini-batch update rule.
      cluster_centers_updated = variables.Variable(clusters_init,
                                                   name='clusters_updated',
                                                   validate_shape=False)
      # How many steps till we copy the updated clusters to cluster_centers.
      update_in_steps = variables.Variable(self._mini_batch_steps_per_iteration,
                                           dtype=dtypes.int64,
                                           name='update_in_steps')
      # Count of points assigned to cluster_centers_updated.
      cluster_counts = variables.Variable(array_ops.zeros([self._num_clusters],
                                                          dtype=dtypes.int64))
    else:
      cluster_centers_updated = cluster_centers
      update_in_steps = None
      cluster_counts = (variables.Variable(array_ops.ones([self._num_clusters],
                                                          dtype=dtypes.int64))
                        if self._use_mini_batch else None)
    return (cluster_centers, cluster_counts,
            cluster_centers_updated, update_in_steps)
开发者ID:Jackhuang945,项目名称:tensorflow,代码行数:56,代码来源:clustering_ops.py

示例2: _full_batch_training_op

  def _full_batch_training_op(self, inputs, cluster_idx_list, cluster_centers):
    """Creates an op for training for full batch case.

    Args:
      inputs: list of input Tensors.
      cluster_idx_list: A vector (or list of vectors). Each element in the
        vector corresponds to an input row in 'inp' and specifies the cluster id
        corresponding to the input.
      cluster_centers: Tensor Ref of cluster centers.

    Returns:
      An op for doing an update of mini-batch k-means.
    """
    cluster_sums = []
    cluster_counts = []
    epsilon = constant_op.constant(1e-6, dtype=inputs[0].dtype)
    for inp, cluster_idx in zip(inputs, cluster_idx_list):
      with ops.colocate_with(inp):
        cluster_sums.append(
            math_ops.unsorted_segment_sum(inp, cluster_idx, self._num_clusters))
        cluster_counts.append(
            math_ops.unsorted_segment_sum(
                array_ops.reshape(
                    array_ops.ones(
                        array_ops.reshape(array_ops.shape(inp)[0], [-1])),
                    [-1, 1]), cluster_idx, self._num_clusters))
    with ops.colocate_with(cluster_centers):
      new_clusters_centers = math_ops.add_n(cluster_sums) / (math_ops.cast(
          math_ops.add_n(cluster_counts), cluster_sums[0].dtype) + epsilon)
      if self._clusters_l2_normalized():
        new_clusters_centers = nn_impl.l2_normalize(new_clusters_centers, dim=1)
    return state_ops.assign(cluster_centers, new_clusters_centers)
开发者ID:AliMiraftab,项目名称:tensorflow,代码行数:32,代码来源:clustering_ops.py

示例3: _l2_normalize_data

 def _l2_normalize_data(cls, inputs):
   """Normalized the input data."""
   output = []
   for inp in inputs:
     with ops.colocate_with(inp):
       output.append(nn_impl.l2_normalize(inp, dim=1))
   return output
开发者ID:AliMiraftab,项目名称:tensorflow,代码行数:7,代码来源:clustering_ops.py

示例4: _f

 def _f():
   # Note that there is a race condition here, so we do a best effort
   # updates here. We reset update_in_steps first so that other workers
   # don't duplicate the updates. Also we update cluster_center_vars
   # before resetting total_counts to avoid large updates to
   # cluster_centers_updated based on partially updated
   # cluster_center_vars.
   with ops.control_dependencies([
       state_ops.assign(update_in_steps,
                        self._mini_batch_steps_per_iteration - 1)
   ]):
     with ops.colocate_with(
         cluster_centers_updated, ignore_existing=True):
       if self._distance_metric == COSINE_DISTANCE:
         cluster_centers = nn_impl.l2_normalize(
             cluster_centers_updated, dim=1)
       else:
         cluster_centers = cluster_centers_updated
     with ops.colocate_with(cluster_centers_var, ignore_existing=True):
       with ops.control_dependencies(
           [state_ops.assign(cluster_centers_var, cluster_centers)]):
         with ops.colocate_with(None, ignore_existing=True):
           with ops.control_dependencies([
               state_ops.assign(total_counts,
                                array_ops.zeros_like(total_counts))
           ]):
             return array_ops.identity(update_in_steps)
开发者ID:AnddyWang,项目名称:tensorflow,代码行数:27,代码来源:clustering_ops.py

示例5: _init_clusters

  def _init_clusters(self):
    """Initialization of clusters.

    Returns:
    Tuple with following elements:
      cluster_centers: a Tensor for storing cluster centers
      cluster_counts: a Tensor for storing counts of points assigned to this
        cluster. This is used by mini-batch training.
    """
    init = self._initial_clusters
    if init == RANDOM_INIT:
      clusters_init = self._init_clusters_random()
    elif init == KMEANS_PLUS_PLUS_INIT:
      # Points from only the first shard are used for initializing centers.
      # TODO(ands): Use all points.
      clusters_init = gen_clustering_ops.kmeans_plus_plus_initialization(
          self._inputs[0], self._num_clusters, self._random_seed,
          self._kmeans_plus_plus_num_retries)
    elif callable(init):
      clusters_init = init(self._inputs, self._num_clusters)
    elif not isinstance(init, str):
      clusters_init = init
    else:
      assert False, 'Unsupported init passed to Kmeans %s' % str(init)
    if self._distance_metric == COSINE_DISTANCE and clusters_init is not None:
      clusters_init = nn_impl.l2_normalize(clusters_init, dim=1)
    clusters_init = clusters_init if clusters_init is not None else []
    cluster_centers = variables.Variable(
        clusters_init, name='clusters', validate_shape=False)
    cluster_counts = (variables.Variable(
        array_ops.ones(
            [self._num_clusters], dtype=dtypes.int64)) if self._use_mini_batch
                      else None)
    return cluster_centers, cluster_counts
开发者ID:AliMiraftab,项目名称:tensorflow,代码行数:34,代码来源:clustering_ops.py

示例6: _sample_random

 def _sample_random():
   """Returns a random point as a cluster center."""
   # By assumption the batch is reshuffled and _sample_random is always
   # called for i=0. Hence, we simply return the first point.
   new_center = array_ops.reshape(first_shard[0], [1, -1])
   if self._distance_metric == COSINE_DISTANCE:
     new_center = nn_impl.l2_normalize(new_center, dim=1)
   return new_center
开发者ID:AnddyWang,项目名称:tensorflow,代码行数:8,代码来源:clustering_ops.py

示例7: _initialize_clusters

  def _initialize_clusters(self,
                           cluster_centers,
                           cluster_centers_initialized,
                           cluster_centers_updated):
    """Returns an op to initialize the cluster centers."""

    init = self._initial_clusters
    if init == RANDOM_INIT:
      clusters_init = self._init_clusters_random()
    elif init == KMEANS_PLUS_PLUS_INIT:
      # Points from only the first shard are used for initializing centers.
      # TODO(ands): Use all points.
      inp = self._inputs[0]
      if self._distance_metric == COSINE_DISTANCE:
        inp = nn_impl.l2_normalize(inp, dim=1)
      clusters_init = gen_clustering_ops.kmeans_plus_plus_initialization(
          inp, self._num_clusters, self._random_seed,
          self._kmeans_plus_plus_num_retries)
    elif callable(init):
      clusters_init = init(self._inputs, self._num_clusters)
    elif not isinstance(init, str):
      clusters_init = init
    else:
      assert False, 'Unsupported init passed to Kmeans %s' % str(init)
    if self._distance_metric == COSINE_DISTANCE and clusters_init is not None:
      clusters_init = nn_impl.l2_normalize(clusters_init, dim=1)

    with ops.colocate_with(cluster_centers_initialized):
      initialized = control_flow_ops.with_dependencies(
          [clusters_init],
          array_ops.identity(cluster_centers_initialized))
    with ops.colocate_with(cluster_centers):
      assign_centers = state_ops.assign(cluster_centers, clusters_init,
                                        validate_shape=False)
      if cluster_centers_updated != cluster_centers:
        assign_centers = control_flow_ops.group(
            assign_centers,
            state_ops.assign(cluster_centers_updated, clusters_init,
                             validate_shape=False))
      assign_centers = control_flow_ops.with_dependencies(
          [assign_centers],
          state_ops.assign(cluster_centers_initialized, True))
      return control_flow_ops.cond(initialized,
                                   control_flow_ops.no_op,
                                   lambda: assign_centers).op
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:45,代码来源:clustering_ops.py

示例8: testL2NormalizeDimArray

 def testL2NormalizeDimArray(self):
   x_shape = [20, 7, 3]
   np.random.seed(1)
   x_np = np.random.random_sample(x_shape).astype(np.float32)
   dim = [1, 2]
   y_np = self._l2Normalize(x_np, dim)
   x_tf = constant_op.constant(x_np, name="x")
   y_tf = nn_impl.l2_normalize(x_tf, dim)
   self.assertAllClose(y_np, self.evaluate(y_tf))
开发者ID:AnddyWang,项目名称:tensorflow,代码行数:9,代码来源:nn_test.py

示例9: _kmeans_plus_plus

 def _kmeans_plus_plus(self):
   # Points from only the first shard are used for initializing centers.
   # TODO(ands): Use all points.
   inp = self._inputs[0]
   if self._distance_metric == COSINE_DISTANCE:
     inp = nn_impl.l2_normalize(inp, dim=1)
   return gen_clustering_ops.kmeans_plus_plus_initialization(
       inp, math_ops.cast(self._num_remaining, dtypes.int64), self._seed,
       self._kmeans_plus_plus_num_retries)
开发者ID:adit-chandra,项目名称:tensorflow,代码行数:9,代码来源:clustering_ops.py

示例10: testL2Normalize

 def testL2Normalize(self):
   x_shape = [20, 7, 3]
   np.random.seed(1)
   x_np = np.random.random_sample(x_shape).astype(np.float32)
   for dim in range(len(x_shape)):
     y_np = self._l2Normalize(x_np, dim)
     with self.test_session():
       x_tf = constant_op.constant(x_np, name="x")
       y_tf = nn_impl.l2_normalize(x_tf, dim)
       self.assertAllClose(y_np, y_tf.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:10,代码来源:nn_test.py

示例11: testL2NormalizeGradient

 def testL2NormalizeGradient(self):
   x_shape = [20, 7, 3]
   np.random.seed(1)
   x_np = np.random.random_sample(x_shape).astype(np.float64)
   for dim in range(len(x_shape)):
     with self.test_session():
       x_tf = constant_op.constant(x_np, name="x")
       y_tf = nn_impl.l2_normalize(x_tf, dim)
       err = gradient_checker.compute_gradient_error(x_tf, x_shape, y_tf,
                                                     x_shape)
     print("L2Normalize gradient err = %g " % err)
     self.assertLess(err, 1e-4)
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:12,代码来源:nn_test.py

示例12: training_graph

  def training_graph(self):
    """Generate a training graph for kmeans algorithm.

    Returns:
      A tuple consisting of:
      all_scores: A matrix (or list of matrices) of dimensions (num_input,
        num_clusters) where the value is the distance of an input vector and a
        cluster center.
      cluster_idx: A vector (or list of vectors). Each element in the vector
        corresponds to an input row in 'inp' and specifies the cluster id
        corresponding to the input.
      scores: Similar to cluster_idx but specifies the distance to the
        assigned cluster instead.
      cluster_centers_initialized: scalar indicating whether clusters have been
        initialized.
      init_op: an op to initialize the clusters.
      training_op: an op that runs an iteration of training.
    """
    # Implementation of kmeans.
    inputs = self._inputs
    (cluster_centers_var,
     cluster_centers_initialized,
     total_counts,
     cluster_centers_updated,
     update_in_steps) = self._create_variables()
    init_op = self._initialize_clusters(cluster_centers_var,
                                        cluster_centers_initialized,
                                        cluster_centers_updated)
    cluster_centers = cluster_centers_var

    if self._distance_metric == COSINE_DISTANCE:
      inputs = self._l2_normalize_data(inputs)
      if not self._clusters_l2_normalized():
        cluster_centers = nn_impl.l2_normalize(cluster_centers, dim=1)

    all_scores, scores, cluster_idx = self._infer_graph(inputs, cluster_centers)
    if self._use_mini_batch:
      sync_updates_op = self._mini_batch_sync_updates_op(
          update_in_steps,
          cluster_centers_var, cluster_centers_updated,
          total_counts)
      assert sync_updates_op is not None
      with ops.control_dependencies([sync_updates_op]):
        training_op = self._mini_batch_training_op(
            inputs, cluster_idx, cluster_centers_updated, total_counts)
    else:
      assert cluster_centers == cluster_centers_var
      training_op = self._full_batch_training_op(inputs, cluster_idx,
                                                 cluster_centers_var)

    return (all_scores, cluster_idx, scores,
            cluster_centers_initialized, init_op, training_op)
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:52,代码来源:clustering_ops.py

示例13: _compute_cosine_distance

  def _compute_cosine_distance(cls, inputs, clusters, inputs_normalized=True):
    """Computes cosine distance between each input and each cluster center.

    Args:
      inputs: list of input Tensor.
      clusters: cluster Tensor
      inputs_normalized: if True, it assumes that inp and clusters are
      normalized and computes the dot product which is equivalent to the cosine
      distance. Else it L2 normalizes the inputs first.

    Returns:
      list of Tensors, where each element corresponds to each element in inp.
      The value is the distance of each row to all the cluster centers.
    """
    output = []
    if not inputs_normalized:
      with ops.colocate_with(clusters):
        clusters = nn_impl.l2_normalize(clusters, dim=1)
    for inp in inputs:
      with ops.colocate_with(inp):
        if not inputs_normalized:
          inp = nn_impl.l2_normalize(inp, dim=1)
        output.append(1 - math_ops.matmul(inp, clusters, transpose_b=True))
    return output
开发者ID:AliMiraftab,项目名称:tensorflow,代码行数:24,代码来源:clustering_ops.py

示例14: _add_new_centers

 def _add_new_centers(self):
   """Adds some centers and returns the number of centers remaining."""
   new_centers = self._choose_initial_centers()
   if self._distance_metric == COSINE_DISTANCE:
     new_centers = nn_impl.l2_normalize(new_centers, dim=1)
   # If cluster_centers is empty, it doesn't have the right shape for concat.
   all_centers = control_flow_ops.cond(
       math_ops.equal(self._num_selected, 0), lambda: new_centers,
       lambda: array_ops.concat([self._cluster_centers, new_centers], 0))
   # TODO(ccolby): De-dupe all_centers?
   a = state_ops.assign(
       self._cluster_centers, all_centers, validate_shape=False)
   if self._cluster_centers_updated is not self._cluster_centers:
     a = state_ops.assign(
         self._cluster_centers_updated, a, validate_shape=False)
   return self._num_clusters - array_ops.shape(a)[0]
开发者ID:AnddyWang,项目名称:tensorflow,代码行数:16,代码来源:clustering_ops.py

示例15: _sample_kmc2_chain

 def _sample_kmc2_chain():
   """Returns previous centers as well as a new center sampled using k-MC2.
   """
   # Extract the subset from the underlying batch.
   start = i * self._kmc2_chain_length
   end = start + self._kmc2_chain_length
   subset = first_shard[start:end]
   # Compute the distances from points in the subset to previous centers.
   _, distances = gen_clustering_ops.nearest_neighbors(
       subset, self._cluster_centers, 1)
   # Sample index of new center using k-MC2 Markov chain.
   new_center_index = gen_clustering_ops.kmc2_chain_initialization(
       array_ops.squeeze(distances), self._random_seed)
   # Extract actual new center.
   newly_sampled_center = array_ops.reshape(subset[new_center_index],
                                            [1, -1])
   # Return concatenation with previously sampled centers.
   if self._distance_metric == COSINE_DISTANCE:
     newly_sampled_center = nn_impl.l2_normalize(
         newly_sampled_center, dim=1)
   return array_ops.concat([self._cluster_centers, newly_sampled_center],
                           0)
开发者ID:AnddyWang,项目名称:tensorflow,代码行数:22,代码来源:clustering_ops.py


注:本文中的tensorflow.python.ops.nn_impl.l2_normalize函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。