当前位置: 首页>>代码示例>>Python>>正文


Python tensorflow.string_to_hash_bucket_fast方法代码示例

本文整理汇总了Python中tensorflow.string_to_hash_bucket_fast方法的典型用法代码示例。如果您正苦于以下问题:Python tensorflow.string_to_hash_bucket_fast方法的具体用法?Python tensorflow.string_to_hash_bucket_fast怎么用?Python tensorflow.string_to_hash_bucket_fast使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tensorflow的用法示例。


在下文中一共展示了tensorflow.string_to_hash_bucket_fast方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _get_features_dict

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import string_to_hash_bucket_fast [as 别名]
def _get_features_dict(input_dict):
  """Extracts features dict from input dict."""

  source_id = _replace_empty_string_with_random_number(
      input_dict[fields.InputDataFields.source_id])

  hash_from_source_id = tf.string_to_hash_bucket_fast(source_id, HASH_BINS)
  features = {
      fields.InputDataFields.image:
          input_dict[fields.InputDataFields.image],
      HASH_KEY: tf.cast(hash_from_source_id, tf.int32),
      fields.InputDataFields.true_image_shape:
          input_dict[fields.InputDataFields.true_image_shape],
      fields.InputDataFields.original_image_spatial_shape:
          input_dict[fields.InputDataFields.original_image_spatial_shape]
  }
  if fields.InputDataFields.original_image in input_dict:
    features[fields.InputDataFields.original_image] = input_dict[
        fields.InputDataFields.original_image]
  return features 
开发者ID:ahmetozlu,项目名称:vehicle_counting_tensorflow,代码行数:22,代码来源:inputs.py

示例2: _instruction

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import string_to_hash_bucket_fast [as 别名]
def _instruction(self, instruction):
    # Split string.
    splitted = tf.string_split(instruction)
    dense = tf.sparse_tensor_to_dense(splitted, default_value='')
    length = tf.reduce_sum(tf.to_int32(tf.not_equal(dense, '')), axis=1)

    # To int64 hash buckets. Small risk of having collisions. Alternatively, a
    # vocabulary can be used.
    num_hash_buckets = 1000
    buckets = tf.string_to_hash_bucket_fast(dense, num_hash_buckets)

    # Embed the instruction. Embedding size 20 seems to be enough.
    embedding_size = 20
    embedding = snt.Embed(num_hash_buckets, embedding_size)(buckets)

    # Pad to make sure there is at least one output.
    padding = tf.to_int32(tf.equal(tf.shape(embedding)[1], 0))
    embedding = tf.pad(embedding, [[0, 0], [0, padding], [0, 0]])

    core = tf.contrib.rnn.LSTMBlockCell(64, name='language_lstm')
    output, _ = tf.nn.dynamic_rnn(core, embedding, length, dtype=tf.float32)

    # Return last output.
    return tf.reverse_sequence(output, length, seq_axis=1)[:, 0] 
开发者ID:deepmind,项目名称:scalable_agent,代码行数:26,代码来源:experiment.py

示例3: _graph_fn_call

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import string_to_hash_bucket_fast [as 别名]
def _graph_fn_call(self, text_inputs):
        """
        Args:
            text_inputs (SingleDataOp): The Text input to generate a hash bucket for.

        Returns:
            tuple:
                - SingleDataOp: The hash lookup table (int64) that can be used as input to embedding-lookups.
                - SingleDataOp: The length (number of words) of the longest string in the `text_input` batch.
        """
        if get_backend() == "tf":
            # Split the input string.
            split_text_inputs = tf.string_split(source=text_inputs, delimiter=self.delimiter)
            # Build a tensor of n rows (number of items in text_inputs) words with
            dense = tf.sparse_tensor_to_dense(sp_input=split_text_inputs, default_value="")

            length = tf.reduce_sum(input_tensor=tf.cast(x=tf.not_equal(x=dense, y=""), dtype=tf.int32), axis=-1)
            if self.hash_function == "fast":
                hash_bucket = tf.string_to_hash_bucket_fast(input=dense, num_buckets=self.num_hash_buckets)
            else:
                hash_bucket = tf.string_to_hash_bucket_strong(input=dense,
                                                              num_buckets=self.num_hash_buckets,
                                                              key=self.hash_keys)

            # Int64 is tf's default for `string_to_hash_bucket` operation: Can leave as is.
            if self.dtype != "int64":
                hash_bucket = tf.cast(x=hash_bucket, dtype=dtype_(self.dtype))

            # Hash-bucket output is always batch-major.
            hash_bucket._batch_rank = 0
            hash_bucket._time_rank = 1

            return hash_bucket, length 
开发者ID:rlgraph,项目名称:rlgraph,代码行数:35,代码来源:string_to_hash_bucket.py

示例4: _get_features_dict

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import string_to_hash_bucket_fast [as 别名]
def _get_features_dict(input_dict):
  """Extracts features dict from input dict."""
  hash_from_source_id = tf.string_to_hash_bucket_fast(
      input_dict[fields.InputDataFields.source_id], HASH_BINS)
  features = {
      fields.InputDataFields.image:
          input_dict[fields.InputDataFields.image],
      HASH_KEY: tf.cast(hash_from_source_id, tf.int32),
      fields.InputDataFields.true_image_shape:
          input_dict[fields.InputDataFields.true_image_shape]
  }
  if fields.InputDataFields.original_image in input_dict:
    features[fields.InputDataFields.original_image] = input_dict[
        fields.InputDataFields.original_image]
  return features 
开发者ID:ambakick,项目名称:Person-Detection-and-Tracking,代码行数:17,代码来源:inputs.py

示例5: hash_float

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import string_to_hash_bucket_fast [as 别名]
def hash_float(x, big_num=1000 * 1000):
    """Hash a tensor 'x' into a floating point number in the range [0, 1)."""
    return tf.cast(
        tf.string_to_hash_bucket_fast(x, big_num), tf.float32
    ) / tf.constant(float(big_num)) 
开发者ID:uizard-technologies,项目名称:realmix,代码行数:7,代码来源:utils.py

示例6: testStringToOneHashBucketFast

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import string_to_hash_bucket_fast [as 别名]
def testStringToOneHashBucketFast(self):
    with self.test_session():
      input_string = tf.placeholder(tf.string)
      output = tf.string_to_hash_bucket_fast(input_string, 1)
      result = output.eval(feed_dict={input_string: ['a', 'b', 'c']})

      self.assertAllEqual([0, 0, 0], result) 
开发者ID:tobegit3hub,项目名称:deep_image_model,代码行数:9,代码来源:string_to_hash_bucket_op_test.py

示例7: testStringToHashBucketsFast

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import string_to_hash_bucket_fast [as 别名]
def testStringToHashBucketsFast(self):
    with self.test_session():
      input_string = tf.placeholder(tf.string)
      output = tf.string_to_hash_bucket_fast(input_string, 10)
      result = output.eval(feed_dict={input_string: ['a', 'b', 'c', 'd']})

      # Fingerprint64('a') -> 12917804110809363939 -> mod 10 -> 9
      # Fingerprint64('b') -> 11795596070477164822 -> mod 10 -> 2
      # Fingerprint64('c') -> 11430444447143000872 -> mod 10 -> 2
      # Fingerprint64('d') -> 4470636696479570465 -> mod 10 -> 5
      self.assertAllEqual([9, 2, 2, 5], result) 
开发者ID:tobegit3hub,项目名称:deep_image_model,代码行数:13,代码来源:string_to_hash_bucket_op_test.py

示例8: hash_in_range

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import string_to_hash_bucket_fast [as 别名]
def hash_in_range(self, buckets, base, limit):
    """Return true if the hashed id falls in the range [base, limit)."""
    hash_bucket = tf.string_to_hash_bucket_fast(self.id, buckets)
    return tf.logical_and(
        tf.greater_equal(hash_bucket, base), tf.less(hash_bucket, limit)) 
开发者ID:google,项目名称:stereo-magnification,代码行数:7,代码来源:datasets.py

示例9: call

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import string_to_hash_bucket_fast [as 别名]
def call(self, x, mask=None, **kwargs):
        if x.dtype != tf.string:
            x = tf.as_string(x, )
        try:
            hash_x = tf.string_to_hash_bucket_fast(x, self.num_buckets if not self.mask_zero else self.num_buckets - 1,
                                                    name=None)  # weak hash
        except:
            hash_x = tf.strings.to_hash_bucket_fast(x, self.num_buckets if not self.mask_zero else self.num_buckets - 1,
                                               name=None)  # weak hash
        if self.mask_zero:
            mask_1 = tf.cast(tf.not_equal(x, "0"), 'int64')
            mask_2 = tf.cast(tf.not_equal(x, "0.0"), 'int64')
            mask = mask_1 * mask_2
            hash_x = (hash_x + 1) * mask
        return hash_x 
开发者ID:shenweichen,项目名称:DeepCTR,代码行数:17,代码来源:utils.py


注:本文中的tensorflow.string_to_hash_bucket_fast方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。