当前位置: 首页>>代码示例>>Python>>正文


Python parsing_ops.parse_example函数代码示例

本文整理汇总了Python中tensorflow.python.ops.parsing_ops.parse_example函数的典型用法代码示例。如果您正苦于以下问题:Python parse_example函数的具体用法?Python parse_example怎么用?Python parse_example使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了parse_example函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _test

  def _test(self, kwargs, expected_values=None, expected_err=None):
    with self.test_session() as sess:
      if expected_err:
        with self.assertRaisesWithPredicateMatch(expected_err[0],
                                                 expected_err[1]):
          out = parsing_ops.parse_example(**kwargs)
          sess.run(flatten_values_tensors_or_sparse(out.values()))
        return
      else:
        # Returns dict w/ Tensors and SparseTensors.
        out = parsing_ops.parse_example(**kwargs)
        result = flatten_values_tensors_or_sparse(out.values())
        # Check values.
        tf_result = sess.run(result)
        _compare_output_to_expected(self, out, expected_values, tf_result)

      # Check shapes; if serialized is a Tensor we need its size to
      # properly check.
      serialized = kwargs["serialized"]
      batch_size = (serialized.eval().size if isinstance(serialized, ops.Tensor)
                    else np.asarray(serialized).size)
      for k, f in kwargs["features"].items():
        if isinstance(f, parsing_ops.FixedLenFeature) and f.shape is not None:
          self.assertEqual(
              tuple(out[k].get_shape().as_list()), (batch_size,) + f.shape)
        elif isinstance(f, parsing_ops.VarLenFeature):
          self.assertEqual(
              tuple(out[k].indices.get_shape().as_list()), (None, 2))
          self.assertEqual(tuple(out[k].values.get_shape().as_list()), (None,))
          self.assertEqual(
              tuple(out[k].dense_shape.get_shape().as_list()), (2,))
开发者ID:brainwy12,项目名称:tensorflow,代码行数:31,代码来源:parsing_ops_test.py

示例2: _eval_input_fn

 def _eval_input_fn():
   feature_map = parsing_ops.parse_example(
       input_lib.limit_epochs(serialized_examples, num_epochs=1),
       feature_spec)
   features = linear_testing_utils.queue_parsed_features(feature_map)
   labels = features.pop('y')
   return features, labels
开发者ID:1000sprites,项目名称:tensorflow,代码行数:7,代码来源:dnn_linear_combined_test.py

示例3: create_example_parser_from_signatures

def create_example_parser_from_signatures(signatures, examples_batch,
                                          single_feature_name="feature"):
  """Creates example parser from given signatures.

  Args:
    signatures: Dict of `TensorSignature` objects or single `TensorSignature`.
    examples_batch: string `Tensor` of serialized `Example` proto.
    single_feature_name: string, single feature name.

  Returns:
    features: `Tensor` or `dict` of `Tensor` objects.
  """
  feature_spec = {}
  if not isinstance(signatures, dict):
    feature_spec[single_feature_name] = signatures.get_feature_spec()
  else:
    feature_spec = {key: signatures[key].get_feature_spec()
                    for key in signatures}
  features = parsing_ops.parse_example(examples_batch, feature_spec)
  if not isinstance(signatures, dict):
    # Returns single feature, casts if needed.
    features = features[single_feature_name]
    if not signatures.dtype.is_compatible_with(features.dtype):
      features = math_ops.cast(features, signatures.dtype)
    return features
  # Returns dict of features, casts if needed.
  for name in features:
    if not signatures[name].dtype.is_compatible_with(features[name].dtype):
      features[name] = math_ops.cast(features[name], signatures[name].dtype)
  return features
开发者ID:DavidNemeskey,项目名称:tensorflow,代码行数:30,代码来源:tensor_signature.py

示例4: _predict_input_fn

 def _predict_input_fn():
   feature_map = parsing_ops.parse_example(
       input_lib.limit_epochs(serialized_examples, num_epochs=1),
       feature_spec)
   features = _queue_parsed_features(feature_map)
   features.pop('y')
   return features, None
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:7,代码来源:dnn_test.py

示例5: _serving_input_receiver_fn

 def _serving_input_receiver_fn():
   """A receiver function to be passed to export_savedmodel."""
   placeholders = {}
   placeholders[feature_keys.TrainEvalFeatures.TIMES] = (
       array_ops.placeholder(
           name=feature_keys.TrainEvalFeatures.TIMES,
           dtype=dtypes.int64,
           shape=[default_batch_size, default_series_length]))
   # Values are only necessary when filtering. For prediction the default
   # value will be ignored.
   placeholders[feature_keys.TrainEvalFeatures.VALUES] = (
       array_ops.placeholder_with_default(
           name=feature_keys.TrainEvalFeatures.VALUES,
           input=array_ops.zeros(
               shape=[
                   default_batch_size
                   if default_batch_size else 0, default_series_length
                   if default_series_length else 0, self._model.num_features
               ],
               dtype=self._model.dtype),
           shape=(default_batch_size, default_series_length,
                  self._model.num_features)))
   if self._model.exogenous_feature_columns:
     with ops.Graph().as_default():
       # Default placeholders have only an unknown batch dimension. Make them
       # in a separate graph, then splice in the series length to the shapes
       # and re-create them in the outer graph.
       parsed_features = (
           feature_column.make_parse_example_spec(
               self._model.exogenous_feature_columns))
       placeholder_features = parsing_ops.parse_example(
           serialized=array_ops.placeholder(
               shape=[None], dtype=dtypes.string),
           features=parsed_features)
       exogenous_feature_shapes = {
           key: (value.get_shape(), value.dtype) for key, value
           in placeholder_features.items()}
     for feature_key, (batch_only_feature_shape, value_dtype) in (
         exogenous_feature_shapes.items()):
       batch_only_feature_shape = (
           batch_only_feature_shape.with_rank_at_least(1).as_list())
       feature_shape = ([default_batch_size, default_series_length]
                        + batch_only_feature_shape[1:])
       placeholders[feature_key] = array_ops.placeholder(
           dtype=value_dtype, name=feature_key, shape=feature_shape)
   # Models may not know the shape of their state without creating some
   # variables/ops. Avoid polluting the default graph by making a new one. We
   # use only static metadata from the returned Tensors.
   with ops.Graph().as_default():
     self._model.initialize_graph()
     model_start_state = self._model.get_start_state()
   for prefixed_state_name, state_tensor in ts_head_lib.state_to_dictionary(
       model_start_state).items():
     state_shape_with_batch = tensor_shape.TensorShape(
         (default_batch_size,)).concatenate(state_tensor.get_shape())
     placeholders[prefixed_state_name] = array_ops.placeholder(
         name=prefixed_state_name,
         shape=state_shape_with_batch,
         dtype=state_tensor.dtype)
   return export_lib.ServingInputReceiver(placeholders, placeholders)
开发者ID:DILASSS,项目名称:tensorflow,代码行数:60,代码来源:estimators.py

示例6: _writeDummySavedModel

  def _writeDummySavedModel(self, path, feature_name):
    """Writes a classifier with two input features to the given path."""
    with ops.Graph().as_default():
      examples = array_ops.placeholder(dtypes.string, name="input_node")
      feature_configs = {
          feature_name: parsing_ops.FixedLenFeature(shape=[],
                                                    dtype=dtypes.float32),
      }
      features = parsing_ops.parse_example(examples, feature_configs)
      feature = features[feature_name]

      variable_node = variables.VariableV1(1.0, name="variable_node")
      scores = math_ops.multiply(variable_node, feature, name="output_node")
      class_feature = array_ops.fill(array_ops.shape(feature),
                                     "class_%s" % feature_name)
      classes = array_ops.transpose(class_feature)

      with session.Session() as sess:
        sess.run(variables.global_variables_initializer())
        signature = (
            signature_def_utils.classification_signature_def(
                examples=examples,
                classes=classes,
                scores=scores,))
        builder = saved_model_builder.SavedModelBuilder(path)
        builder.add_meta_graph_and_variables(
            sess,
            [tag_constants.SERVING],
            signature_def_map={
                signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                    signature,
            },)
        builder.save(as_text=True)
开发者ID:adit-chandra,项目名称:tensorflow,代码行数:33,代码来源:freeze_graph_test.py

示例7: parse_example

def parse_example(serialized, features, name=None, example_names=None):
  """Parse `Example` protos into a `dict` of labeled tensors.

  See tf.parse_example.

  Args:
    serialized: A 1-D LabeledTensor of strings, a batch of binary serialized
      `Example` protos.
    features: A `dict` mapping feature keys to `labeled_tensor.FixedLenFeature`
      values.
    name: A name for this operation (optional).
    example_names: A vector (1-D Tensor) of strings (optional), the names of
      the serialized protos in the batch.

  Returns:
    A `dict` mapping feature keys to `LabeledTensor` values. The single axis
    from `serialized` will be prepended to the axes provided by each feature.

  Raises:
    ValueError: if any feature is invalid.
  """
  serialized = core.convert_to_labeled_tensor(serialized)
  unlabeled_features = _labeled_to_unlabeled_features(features)

  unlabeled_parsed = parsing_ops.parse_example(
      serialized.tensor, unlabeled_features, name, example_names)

  parsed = {}
  for name, parsed_feature in unlabeled_parsed.items():
    axes = list(serialized.axes.values()) + features[name].axes
    parsed[name] = core.LabeledTensor(parsed_feature, axes)

  return parsed
开发者ID:1000sprites,项目名称:tensorflow,代码行数:33,代码来源:io_ops.py

示例8: serving_input_receiver_fn

 def serving_input_receiver_fn():
   """An input_fn that expects a serialized tf.Example."""
   serialized_tf_example = array_ops.placeholder(dtype=dtypes.string,
                                                 shape=[default_batch_size],
                                                 name='input_example_tensor')
   receiver_tensors = {'examples': serialized_tf_example}
   features = parsing_ops.parse_example(serialized_tf_example, feature_spec)
   return ServingInputReceiver(features, receiver_tensors)
开发者ID:AbhinavJain13,项目名称:tensorflow,代码行数:8,代码来源:export.py

示例9: input_fn

 def input_fn():
   """An input_fn that expects a serialized tf.Example."""
   serialized_tf_example = array_ops.placeholder(dtype=dtypes.string,
                                                 shape=[default_batch_size],
                                                 name='input_example_tensor')
   inputs = {'examples': serialized_tf_example}
   features = parsing_ops.parse_example(serialized_tf_example, feature_spec)
   labels = None  # these are not known in serving!
   return InputFnOps(features, labels, inputs)
开发者ID:Ajaycs99,项目名称:tensorflow,代码行数:9,代码来源:input_fn_utils.py

示例10: read_batch_features

def read_batch_features(file_pattern, batch_size, features, reader,
                        randomize_input=True, num_epochs=None,
                        queue_capacity=10000, reader_num_threads=1,
                        parser_num_threads=1,
                        name=None):
  """Adds operations to read, queue, batch and parse `Example` protos.

  Given file pattern (or list of files), will setup a queue for file names,
  read `Example` proto using provided `reader`, use batch queue to create
  batches of examples of size `batch_size` and parse example given `features`
  specification.

  All queue runners are added to the queue runners collection, and may be
  started via `start_queue_runners`.

  All ops are added to the default graph.

  Args:
    file_pattern: List of files or pattern of file paths containing
        `Example` records. See `tf.gfile.Glob` for pattern rules.
    batch_size: An int or scalar `Tensor` specifying the batch size to use.
    features: A `dict` mapping feature keys to `FixedLenFeature` or
      `VarLenFeature` values.
    reader: A function or class that returns an object with
      `read` method, (filename tensor) -> (example tensor).
    randomize_input: Whether the input should be randomized.
    num_epochs: Integer specifying the number of times to read through the
      dataset. If None, cycles through the dataset forever. NOTE - If specified,
      creates a variable that must be initialized, so call
      tf.initialize_all_variables() as shown in the tests.
    queue_capacity: Capacity for input queue.
    reader_num_threads: The number of threads to read examples.
    parser_num_threads: The number of threads to parse examples.
    name: Name of resulting op.

  Returns:
    A dict of `Tensor` or `SparseTensor` objects for each in `features`.

  Raises:
    ValueError: for invalid inputs.
  """
  with ops.op_scope([file_pattern], name, 'read_batch_features') as scope:
    examples = read_batch_examples(
        file_pattern, batch_size, reader, randomize_input=randomize_input,
        num_epochs=num_epochs, queue_capacity=queue_capacity,
        num_threads=reader_num_threads, name=scope)

    # Parse features into tensors in many threads and put on the queue.
    features_list = []
    for _ in range(parser_num_threads):
      features_list.append(parsing_ops.parse_example(examples, features))
    return input_ops.batch_join(
        features_list,
        batch_size=batch_size,
        capacity=queue_capacity,
        enqueue_many=True,
        name='parse_example_batch_join')
开发者ID:Baaaaam,项目名称:tensorflow,代码行数:57,代码来源:graph_io.py

示例11: parse_feature_columns_from_examples

def parse_feature_columns_from_examples(serialized,
                                        feature_columns,
                                        name=None,
                                        example_names=None):
  """Parses tf.Examples to extract tensors for given feature_columns.

  This is a wrapper of 'tf.parse_example'. A typical usage is as follows:

  ```python
  columns_to_tensor = parse_feature_columns_from_examples(
      serialized=my_data,
      feature_columns=my_features)

  # Where my_features are:
  # Define features and transformations
  country = sparse_column_with_keys(column_name="native_country",
                                    keys=["US", "BRA", ...])
  country_emb = embedding_column(sparse_id_column=country, dimension=3,
                                 combiner="sum")
  occupation = sparse_column_with_hash_bucket(column_name="occupation",
                                              hash_bucket_size=1000)
  occupation_emb = embedding_column(sparse_id_column=occupation, dimension=16,
                                   combiner="sum")
  occupation_x_country = crossed_column(columns=[occupation, country],
                                        hash_bucket_size=10000)
  age = real_valued_column("age")
  age_buckets = bucketized_column(
      source_column=age,
      boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65])

  my_features = [occupation_emb, age_buckets, country_emb]
  ```

  Args:
    serialized: A vector (1-D Tensor) of strings, a batch of binary
      serialized `Example` protos.
    feature_columns: An iterable containing all the feature columns. All items
      should be instances of classes derived from _FeatureColumn.
    name: A name for this operation (optional).
    example_names: A vector (1-D Tensor) of strings (optional), the names of
      the serialized protos in the batch.

  Returns:
    A `dict` mapping FeatureColumn to `Tensor` and `SparseTensor` values.
  """
  check_feature_columns(feature_columns)
  columns_to_tensors = parsing_ops.parse_example(
      serialized=serialized,
      features=fc.create_feature_spec_for_parsing(feature_columns),
      name=name,
      example_names=example_names)

  transformer = _Transformer(columns_to_tensors)
  for column in sorted(set(feature_columns), key=lambda x: x.key):
    transformer.transform(column)
  return columns_to_tensors
开发者ID:JamesFysh,项目名称:tensorflow,代码行数:56,代码来源:feature_column_ops.py

示例12: _parse_example

def _parse_example(serialized, features):
  parsed = parsing_ops.parse_example(serialized, features)
  result = []
  for key in sorted(features.keys()):
    val = parsed[key]
    if isinstance(val, sparse_tensor_lib.SparseTensor):
      result.extend([val.indices, val.values, val.dense_shape])
    else:
      result.append(val)
  return tuple(result)
开发者ID:DjangoPeng,项目名称:tensorflow,代码行数:10,代码来源:readers.py

示例13: _serving_input_receiver_fn

 def _serving_input_receiver_fn():
   """A receiver function to be passed to export_savedmodel."""
   placeholders = {}
   time_placeholder = array_ops.placeholder(
       name=feature_keys.TrainEvalFeatures.TIMES,
       dtype=dtypes.int64,
       shape=[default_batch_size, default_series_length])
   placeholders[feature_keys.TrainEvalFeatures.TIMES] = time_placeholder
   # Values are only necessary when filtering. For prediction the default
   # value will be ignored.
   placeholders[feature_keys.TrainEvalFeatures.VALUES] = (
       array_ops.placeholder_with_default(
           name=feature_keys.TrainEvalFeatures.VALUES,
           input=array_ops.zeros(
               shape=[
                   default_batch_size if default_batch_size else 0,
                   default_series_length if default_series_length else 0,
                   self._model.num_features
               ],
               dtype=self._model.dtype),
           shape=(default_batch_size, default_series_length,
                  self._model.num_features)))
   if self._model.exogenous_feature_columns:
     with ops.Graph().as_default():
       # Default placeholders have only an unknown batch dimension. Make them
       # in a separate graph, then splice in the series length to the shapes
       # and re-create them in the outer graph.
       parsed_features = (
           feature_column.make_parse_example_spec(
               self._model.exogenous_feature_columns))
       placeholder_features = parsing_ops.parse_example(
           serialized=array_ops.placeholder(
               shape=[None], dtype=dtypes.string),
           features=parsed_features)
       exogenous_feature_shapes = {
           key: (value.get_shape(), value.dtype) for key, value
           in placeholder_features.items()}
     for feature_key, (batch_only_feature_shape,
                       value_dtype) in (exogenous_feature_shapes.items()):
       batch_only_feature_shape = (
           batch_only_feature_shape.with_rank_at_least(1).as_list())
       feature_shape = ([default_batch_size, default_series_length] +
                        batch_only_feature_shape[1:])
       placeholders[feature_key] = array_ops.placeholder(
           dtype=value_dtype, name=feature_key, shape=feature_shape)
   batch_size_tensor = array_ops.shape(time_placeholder)[0]
   placeholders.update(
       self._model_start_state_placeholders(
           batch_size_tensor, static_batch_size=default_batch_size))
   return export_lib.ServingInputReceiver(placeholders, placeholders)
开发者ID:ahmedsaiduk,项目名称:tensorflow,代码行数:50,代码来源:estimators.py

示例14: _ReadAndCheckRowsUsingFeatures

  def _ReadAndCheckRowsUsingFeatures(self, num_rows):
    self.server.handler.num_rows = num_rows

    with self.test_session() as sess:
      feature_configs = {
          "int64_col":
              parsing_ops.FixedLenFeature(
                  [1], dtype=dtypes.int64),
          "string_col":
              parsing_ops.FixedLenFeature(
                  [1], dtype=dtypes.string, default_value="s_default"),
      }
      reader = cloud.BigQueryReader(
          project_id=_PROJECT,
          dataset_id=_DATASET,
          table_id=_TABLE,
          num_partitions=4,
          features=feature_configs,
          timestamp_millis=1,
          test_end_point=("%s:%s" % (self.server.httpd.server_address[0],
                                     self.server.httpd.server_address[1])))

      key, value = _SetUpQueue(reader)

      seen_rows = []
      features = parsing_ops.parse_example(
          array_ops.reshape(value, [1]), feature_configs)
      for _ in range(num_rows):
        int_value, str_value = sess.run(
            [features["int64_col"], features["string_col"]])

        # Parse values returned from the session.
        self.assertEqual(int_value.shape, (1, 1))
        self.assertEqual(str_value.shape, (1, 1))
        int64_col = int_value[0][0]
        string_col = str_value[0][0]
        seen_rows.append(int64_col)

        # Compare.
        expected_row = _ROWS[int64_col]
        self.assertEqual(int64_col, expected_row[0])
        self.assertEqual(
            compat.as_str(string_col), ("s_%d" % int64_col) if expected_row[1]
            else "s_default")

      self.assertItemsEqual(seen_rows, range(num_rows))

      with self.assertRaisesOpError("is closed and has insufficient elements "
                                    "\\(requested 1, current size 0\\)"):
        sess.run([key, value])
开发者ID:brainwy12,项目名称:tensorflow,代码行数:50,代码来源:bigquery_reader_ops_test.py

示例15: testBasic

 def testBasic(self):
   golden_config = example_parser_configuration_pb2.ExampleParserConfiguration(
   )
   text_format.Parse(BASIC_PROTO, golden_config)
   with session.Session() as sess:
     examples = array_ops.placeholder(dtypes.string, shape=[1])
     feature_to_type = {
         'x': parsing_ops.FixedLenFeature([1], dtypes.float32, 33.0),
         'y': parsing_ops.VarLenFeature(dtypes.string)
     }
     _ = parsing_ops.parse_example(examples, feature_to_type)
     parse_example_op = sess.graph.get_operation_by_name(
         'ParseExample/ParseExample')
     config = extract_example_parser_configuration(parse_example_op, sess)
     self.assertProtoEquals(golden_config, config)
开发者ID:1000sprites,项目名称:tensorflow,代码行数:15,代码来源:example_parser_configuration_test.py


注:本文中的tensorflow.python.ops.parsing_ops.parse_example函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。