当前位置: 首页>>代码示例>>Python>>正文


Python tensorflow.transform方法代码示例

本文整理汇总了Python中tensorflow.transform方法的典型用法代码示例。如果您正苦于以下问题:Python tensorflow.transform方法的具体用法?Python tensorflow.transform怎么用?Python tensorflow.transform使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tensorflow的用法示例。


在下文中一共展示了tensorflow.transform方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_analyze_input_columns

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def get_analyze_input_columns(preprocessing_fn, specs):
  """Return columns that are required inputs of `AnalyzeDataset`.

  Args:
    preprocessing_fn: A tf.transform preprocessing_fn.
    specs: A dict of feature name to feature specification or tf.TypeSpecs.

  Returns:
    A list of columns that are required inputs of analyzers.
  """

  with tf.compat.v1.Graph().as_default() as graph:
    input_signature = impl_helper.batched_placeholders_from_specs(
        specs)
    _ = preprocessing_fn(input_signature.copy())

    tensor_sinks = graph.get_collection(analyzer_nodes.TENSOR_REPLACEMENTS)
    visitor = _SourcedTensorsVisitor()
    for tensor_sink in tensor_sinks:
      nodes.Traverser(visitor).visit_value_node(tensor_sink.future)

    analyze_input_tensors = graph_tools.get_dependent_inputs(
        graph, input_signature, visitor.sourced_tensors)
    return list(analyze_input_tensors.keys()) 
开发者ID:tensorflow,项目名称:transform,代码行数:26,代码来源:inspect_preprocessing_fn.py

示例2: get_transform_input_columns

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def get_transform_input_columns(preprocessing_fn, specs):
  """Return columns that are required inputs of `TransformDataset`.

  Args:
    preprocessing_fn: A tf.transform preprocessing_fn.
    specs: A dict of feature name to feature specification or tf.TypeSpecs.

  Returns:
    A list of columns that are required inputs of the transform `tf.Graph`
    defined by `preprocessing_fn`.
  """
  with tf.compat.v1.Graph().as_default() as graph:
    input_signature = impl_helper.batched_placeholders_from_specs(
        specs)
    output_signature = preprocessing_fn(input_signature.copy())
    transform_input_tensors = graph_tools.get_dependent_inputs(
        graph, input_signature, output_signature)
    return list(transform_input_tensors.keys()) 
开发者ID:tensorflow,项目名称:transform,代码行数:20,代码来源:inspect_preprocessing_fn.py

示例3: encode

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def encode(self, instance):
    """Encode a tf.transform encoded dict as tf.Example."""
    # The feature handles encode using the self._encode_example_cache.
    for feature_handler in self._feature_handlers:
      value = instance[feature_handler.name]
      try:
        feature_handler.encode_value(value)
      except TypeError as e:
        raise TypeError('%s while encoding feature "%s"' %
                        (e, feature_handler.name))

    if self._serialized:
      return self._encode_example_cache.SerializeToString()

    result = tf.train.Example()
    result.CopyFrom(self._encode_example_cache)
    return result 
开发者ID:tensorflow,项目名称:transform,代码行数:19,代码来源:example_proto_coder.py

示例4: _example_serving_receiver_fn

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def _example_serving_receiver_fn(tf_transform_output, schema):
  """Build the serving in inputs.

  Args:
    tf_transform_output: A TFTransformOutput.
    schema: the schema of the input data.

  Returns:
    Tensorflow graph which parses examples, applying tf-transform to them.
  """
  raw_feature_spec = _get_raw_feature_spec(schema)
  raw_feature_spec.pop(_LABEL_KEY)

  raw_input_fn = tf.estimator.export.build_parsing_serving_input_receiver_fn(
      raw_feature_spec, default_batch_size=None)
  serving_input_receiver = raw_input_fn()

  transformed_features = tf_transform_output.transform_raw_features(
      serving_input_receiver.features)

  return tf.estimator.export.ServingInputReceiver(
      transformed_features, serving_input_receiver.receiver_tensors) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:24,代码来源:taxi_utils.py

示例5: _flat_input_serving_receiver_fn

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def _flat_input_serving_receiver_fn(tf_transform_output, schema):
  """Build the serving function for flat list of Dense tensors as input.

  Args:
    tf_transform_output: A TFTransformOutput.
    schema: the schema of the input data.

  Returns:
    Tensorflow graph which parses examples, applying tf-transform to them.
  """
  raw_feature_spec = _get_raw_feature_spec(schema)
  raw_feature_spec.pop(_LABEL_KEY)

  raw_input_fn = tf.estimator.export.build_parsing_serving_input_receiver_fn(
      raw_feature_spec, default_batch_size=None)
  serving_input_receiver = raw_input_fn()

  transformed_features = tf_transform_output.transform_raw_features(
      serving_input_receiver.features)

  # We construct a receiver function that receives flat list of Dense tensors as
  # features. This is as per BigQuery ML serving requirements.
  return tf.estimator.export.ServingInputReceiver(
      transformed_features, serving_input_receiver.features) 
开发者ID:tensorflow,项目名称:tfx,代码行数:26,代码来源:taxi_utils_bqml.py

示例6: _example_serving_receiver_fn

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def _example_serving_receiver_fn(transform_output, schema):
  """Build the serving in inputs.

  Args:
    transform_output: directory in which the tf-transform model was written
      during the preprocessing step.
    schema: the schema of the input data.

  Returns:
    Tensorflow graph which parses examples, applying tf-transform to them.
  """
  raw_feature_spec = _get_raw_feature_spec(schema)
  raw_feature_spec.pop(_LABEL_KEY)

  raw_input_fn = tf.estimator.export.build_parsing_serving_input_receiver_fn(
      raw_feature_spec, default_batch_size=None)
  serving_input_receiver = raw_input_fn()

  _, transformed_features = (
      saved_transform_io.partially_apply_saved_transform(
          os.path.join(transform_output, transform_fn_io.TRANSFORM_FN_DIR),
          serving_input_receiver.features))

  return tf.estimator.export.ServingInputReceiver(
      transformed_features, serving_input_receiver.receiver_tensors) 
开发者ID:tensorflow,项目名称:tfx,代码行数:27,代码来源:taxi_utils_slack.py

示例7: encode

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def encode(self, instance):
    """Encode a tf.transform encoded dict to a csv-formatted string.

    Args:
      instance: A python dictionary where the keys are the column names and
        the values are fixed len or var len encoded features.

    Returns:
      A csv-formatted string. The order of the columns is given by column_names.
    """
    string_list = [None] * len(self._column_names)
    for feature_handler in self._feature_handlers:
      try:
        feature_handler.encode_value(string_list,
                                     instance[feature_handler.name])
      except TypeError as e:
        raise TypeError('%s while encoding feature "%s"' %
                        (e, feature_handler.name))
    return self._encoder.encode_record(string_list)

  # Please run tensorflow_transform/coders/benchmark_coders_test.py
  # if you make any changes on these methods. 
开发者ID:amygdala,项目名称:code-snippets,代码行数:24,代码来源:mcsv_coder.py

示例8: preprocess

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def preprocess(inputs):
    """tf.transform's callback function for preprocessing inputs.
    Args:
      inputs: map from feature keys to raw not-yet-transformed features.
    Returns:
      Map from string feature key to transformed feature operations.
    """
    outputs = {}
    for key in DENSE_FLOAT_FEATURE_KEYS:
        # Preserve this feature as a dense float, setting nan's to the mean.
        outputs[key] = transform.scale_to_z_score(inputs[key])

    for key in VOCAB_FEATURE_KEYS:
        # Build a vocabulary for this feature.
        if inputs[key].dtype == tf.string:
            vocab_tensor = inputs[key]
        else:
            vocab_tensor = tf.as_string(inputs[key])
        outputs[key] = transform.string_to_int(
            vocab_tensor, vocab_filename='vocab_' + key,
            top_k=VOCAB_SIZE, num_oov_buckets=OOV_SIZE)

    for key in BUCKET_FEATURE_KEYS:
        outputs[key] = transform.bucketize(inputs[key], FEATURE_BUCKET_COUNT)

    for key in CATEGORICAL_FEATURE_KEYS:
        outputs[key] = tf.to_int64(inputs[key])

    taxi_fare = inputs[FARE_KEY]
    taxi_tip = inputs[LABEL_KEY]
    # Test if the tip was > 20% of the fare.
    tip_threshold = tf.multiply(taxi_fare, tf.constant(0.2))
    outputs[LABEL_KEY] = tf.logical_and(
        tf.logical_not(tf.is_nan(taxi_fare)),
        tf.greater(taxi_tip, tip_threshold))

    return outputs 
开发者ID:kubeflow-kale,项目名称:kale,代码行数:39,代码来源:preprocessing.py

示例9: transform_features_layer

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def transform_features_layer(self):
    """Creates a `TransformFeaturesLayer` from this transform output.

    If a `TransformFeaturesLayer` has already been created for self, the same
    one will be returned.

    Returns:
      A `TransformFeaturesLayer` instance.
    """
    if self._transform_features_layer is None:
      self._transform_features_layer = TransformFeaturesLayer(self)
    return self._transform_features_layer 
开发者ID:tensorflow,项目名称:transform,代码行数:14,代码来源:output_wrapper.py

示例10: load_transform_graph

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def load_transform_graph(self):
    """Load the transform graph without replacing any placeholders.

    This is necessary to ensure that variables in the transform graph are
    included in the training checkpoint when using tf.Estimator.  This should
    be called in the training input_fn.
    """
    saved_transform_io.partially_apply_saved_transform_internal(
        self.transform_savedmodel_dir, {}) 
开发者ID:tensorflow,项目名称:transform,代码行数:11,代码来源:output_wrapper.py

示例11: raw_metadata

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def raw_metadata(self):
    """A DatasetMetadata.

    Note: raw_metadata is not guaranteed to exist in the output of tf.transform
    and hence using this could fail, if raw_metadata is not present in
    TFTransformOutput.

    Returns:
      A DatasetMetadata
    """
    if self._raw_metadata is None:
      self._raw_metadata = metadata_io.read_metadata(
          os.path.join(self._transform_output_dir, self.RAW_METADATA_DIR))
    return self._raw_metadata 
开发者ID:tensorflow,项目名称:transform,代码行数:16,代码来源:output_wrapper.py

示例12: post_transform_statistics_path

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def post_transform_statistics_path(self):
    """Returns the path to the post-transform datum statistics.

    Note: post_transform_statistics is not guaranteed to exist in the output of
    tf.transform and hence using this could fail, if post_transform statistics
    is not present in TFTransformOutput.
    """
    return os.path.join(
        self._transform_output_dir, self.POST_TRANSFORM_FEATURE_STATS_PATH)


# TODO(zoyahav): Use register_keras_serializable directly once we no longer support
# TF<2.1. 
开发者ID:tensorflow,项目名称:transform,代码行数:15,代码来源:output_wrapper.py

示例13: get_analysis_dataset_keys

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def get_analysis_dataset_keys(
    preprocessing_fn, specs, dataset_keys, input_cache):
  """Computes the dataset keys that are required in order to perform analysis.

  Args:
    preprocessing_fn: A tf.transform preprocessing_fn.
    specs: A dict of feature name to feature specification or tf.TypeSpecs.
    dataset_keys: A set of strings which are dataset keys, they uniquely
      identify these datasets across analysis runs.
    input_cache: A cache dictionary.

  Returns:
    A set of dataset keys that are required for analysis.
  """
  transform_fn_future, _ = _build_analysis_graph_for_inspection(
      preprocessing_fn, specs, dataset_keys, input_cache)

  result = set()
  inspect_visitor = _InspectVisitor(result)
  inspect_traverser = nodes.Traverser(inspect_visitor)
  _ = inspect_traverser.visit_value_node(transform_fn_future)

  # If None is present this means that a flattened version of the entire dataset
  # is required, therefore this will be returning all of the given dataset_keys.
  if any(k.is_flattened_dataset_key() for k in result):
    result = dataset_keys
  return result 
开发者ID:tensorflow,项目名称:transform,代码行数:29,代码来源:analysis_graph_builder.py

示例14: get_analysis_cache_entry_keys

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def get_analysis_cache_entry_keys(preprocessing_fn, feature_spec, dataset_keys):
  """Computes the cache entry keys that would be useful for analysis.

  Args:
    preprocessing_fn: A tf.transform preprocessing_fn.
    feature_spec: A dict of feature name to feature specification.
    dataset_keys: A set of strings which are dataset keys, they uniquely
      identify these datasets across analysis runs.

  Returns:
    A set of cache entry keys which would be useful for analysis.
  """
  _, cache_dict = _build_analysis_graph_for_inspection(
      preprocessing_fn, feature_spec, dataset_keys, {})
  return set([cache_key for _, cache_key in cache_dict.keys()]) 
开发者ID:tensorflow,项目名称:transform,代码行数:17,代码来源:analysis_graph_builder.py

示例15: decode

# 需要导入模块: import tensorflow [as 别名]
# 或者: from tensorflow import transform [as 别名]
def decode(self, example_proto):
    """Decode tf.Example as a tf.transform encoded dict."""
    if self._serialized:
      example = self._decode_example_cache
      example.ParseFromString(example_proto)
    else:
      example = example_proto

    feature_map = example.features.feature
    return {feature_handler.name: feature_handler.parse_value(feature_map)
            for feature_handler in self._feature_handlers} 
开发者ID:tensorflow,项目名称:transform,代码行数:13,代码来源:example_proto_coder.py


注:本文中的tensorflow.transform方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。