当前位置: 首页>>代码示例>>Python>>正文


Python Dataset.from_tensor_slices方法代码示例

本文整理汇总了Python中tensorflow.python.data.Dataset.from_tensor_slices方法的典型用法代码示例。如果您正苦于以下问题:Python Dataset.from_tensor_slices方法的具体用法?Python Dataset.from_tensor_slices怎么用?Python Dataset.from_tensor_slices使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tensorflow.python.data.Dataset的用法示例。


在下文中一共展示了Dataset.from_tensor_slices方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: benchmarkSliceBatchCacheRepeatCallable

# 需要导入模块: from tensorflow.python.data import Dataset [as 别名]
# 或者: from tensorflow.python.data.Dataset import from_tensor_slices [as 别名]
  def benchmarkSliceBatchCacheRepeatCallable(self):
    input_size = 10000
    batch_size = 100
    num_epochs = 100

    input_data = np.random.randn(input_size)

    dataset = (
        Dataset.from_tensor_slices(input_data).batch(batch_size).cache()
        .repeat(num_epochs))
    iterator = datasets.Iterator(dataset)

    ends = [time.time()]
    for _ in iterator:
      ends.append(time.time())

    deltas = np.ediff1d(ends)
    median_wall_time = np.median(deltas)
    print(
        'Slice/batch/cache/repeat eager input size: %d batch size: %d Median '
        'wall time per element: %f'
        % (input_size, batch_size, median_wall_time))
    self.report_benchmark(
        iters=len(deltas),
        wall_time=median_wall_time,
        name='benchmark_slice_batch_cache_repeat_eager_input_%d_batch_%d' %
        (input_size, batch_size))
开发者ID:aeverall,项目名称:tensorflow,代码行数:29,代码来源:datasets_test.py

示例2: testMultipleIteratorsOnADatasetThatUsesFunctions

# 需要导入模块: from tensorflow.python.data import Dataset [as 别名]
# 或者: from tensorflow.python.data.Dataset import from_tensor_slices [as 别名]
  def testMultipleIteratorsOnADatasetThatUsesFunctions(self):
    ds = Dataset.from_tensor_slices([1, 2, 3, 4, 5, 6]).map(math_ops.square)

    got1 = [x.numpy() for x in datasets.Iterator(ds)]
    self.assertAllEqual([1, 4, 9, 16, 25, 36], got1)
    got2 = [x.numpy() for x in datasets.Iterator(ds)]
    self.assertAllEqual(got1, got2)
开发者ID:AbhinavJain13,项目名称:tensorflow,代码行数:9,代码来源:datasets_test.py

示例3: my_input_fn

# 需要导入模块: from tensorflow.python.data import Dataset [as 别名]
# 或者: from tensorflow.python.data.Dataset import from_tensor_slices [as 别名]
def my_input_fn(features, targets, batch_size=1, shuffle=True, num_epochs=None):
    """ Trains a linear regression model of one feature.
    Args:
        :param features: pandas DataFrame of features
        :param targets: pandas DataFrame of targets
        :param batch_size: size of batches to be passed to the model
        :param shuffle: weather to shuffle the data
        :param num_epochs: number of epochs for which data should be repeated. None = repeat indefinitely
    :return:
        Tuple of (features, labels) for next data batch
    """
    # Convert pandas data into a dict of np arrays.
    features = {key: np.array(value) for key, value in dict(features).items()}

    # Construct a dataset, and configure batching/repeating.
    ds = Dataset.from_tensor_slices((features, targets))
    ds = ds.batch(batch_size).repeat(num_epochs)

    # Shuffle the data, if specified.
    if shuffle:
        ds.shuffle(buffer_size=10000)

    # Return the next batch of data.
    features, labels = ds.make_one_shot_iterator().get_next()
    return features, labels
开发者ID:Charlot,项目名称:demo,代码行数:27,代码来源:01-load-lib-and-data.py

示例4: testSaveRestoreMultipleIterator

# 需要导入模块: from tensorflow.python.data import Dataset [as 别名]
# 或者: from tensorflow.python.data.Dataset import from_tensor_slices [as 别名]
  def testSaveRestoreMultipleIterator(self):
    checkpoint_directory = self.get_temp_dir()
    checkpoint_prefix = os.path.join(checkpoint_directory, 'ckpt')
    dataset = Dataset.from_tensor_slices([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
    dataset = dataset.map(math_ops.square).batch(2)
    iterator_1 = datasets.Iterator(dataset)
    iterator_2 = datasets.Iterator(dataset)
    dataset_2 = Dataset.range(10)
    iterator_3 = datasets.Iterator(dataset_2)

    checkpoint = checkpointable_utils.Checkpoint(
        iterator_1=iterator_1, iterator_2=iterator_2, iterator_3=iterator_3)
    self.assertAllEqual([1, 4], iterator_1.get_next().numpy())
    self.assertEqual(0, iterator_3.get_next().numpy())
    self.assertEqual(1, iterator_3.get_next().numpy())
    self.assertEqual(2, iterator_3.get_next().numpy())

    save_path = checkpoint.save(checkpoint_prefix)
    self.assertAllEqual([1, 4], iterator_2.get_next().numpy())
    self.assertAllEqual([9, 16], iterator_2.get_next().numpy())
    self.assertEqual(3, iterator_3.get_next().numpy())
    checkpoint.restore(save_path)
    self.assertAllEqual([9, 16], iterator_1.get_next().numpy())
    self.assertAllEqual([1, 4], iterator_2.get_next().numpy())
    self.assertEqual(3, iterator_3.get_next().numpy())
开发者ID:aeverall,项目名称:tensorflow,代码行数:27,代码来源:datasets_test.py

示例5: testMapCaptureLookupTable

# 需要导入模块: from tensorflow.python.data import Dataset [as 别名]
# 或者: from tensorflow.python.data.Dataset import from_tensor_slices [as 别名]
 def testMapCaptureLookupTable(self):
   default_val = -1
   keys = constant_op.constant(['brain', 'salad', 'surgery'])
   values = constant_op.constant([0, 1, 2], dtypes.int64)
   table = lookup.HashTable(
       lookup.KeyValueTensorInitializer(keys, values), default_val)
   dataset = Dataset.from_tensor_slices(['brain', 'salad', 'surgery'])
   dataset = dataset.map(table.lookup)
   it = datasets.Iterator(dataset)
   got = [x.numpy() for x in it]
   self.assertAllEqual([0, 1, 2], got)
开发者ID:aeverall,项目名称:tensorflow,代码行数:13,代码来源:datasets_test.py

示例6: testTensorsExplicitPrefetchToDevice

# 需要导入模块: from tensorflow.python.data import Dataset [as 别名]
# 或者: from tensorflow.python.data.Dataset import from_tensor_slices [as 别名]
  def testTensorsExplicitPrefetchToDevice(self):
    ds = Dataset.from_tensor_slices([0., 1.])
    ds = ds.apply(prefetching_ops.prefetch_to_device(test.gpu_device_name()))

    with self.assertRaisesRegexp(TypeError, 'prefetch_to_device'):
      datasets.Iterator(ds)

    for i, x in enumerate(ds):
      with ops.device(test.gpu_device_name()):
        x = math_ops.add(x, x)
        self.assertEqual(float(i) + float(i), x.numpy())
开发者ID:syed-ahmed,项目名称:tensorflow,代码行数:13,代码来源:datasets_test.py

示例7: testSaveRestore

# 需要导入模块: from tensorflow.python.data import Dataset [as 别名]
# 或者: from tensorflow.python.data.Dataset import from_tensor_slices [as 别名]
 def testSaveRestore(self):
   checkpoint_directory = self.get_temp_dir()
   checkpoint_prefix = os.path.join(checkpoint_directory, 'ckpt')
   dataset = Dataset.from_tensor_slices([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
   dataset = dataset.map(math_ops.square).batch(2)
   iterator = datasets.Iterator(dataset)
   checkpoint = checkpointable_utils.Checkpoint(iterator=iterator)
   self.assertAllEqual([1, 4], iterator.get_next().numpy())
   save_path = checkpoint.save(checkpoint_prefix)
   self.assertAllEqual([9, 16], iterator.get_next().numpy())
   self.assertAllEqual([25, 36], iterator.get_next().numpy())
   checkpoint.restore(save_path)
   self.assertAllEqual([9, 16], iterator.get_next().numpy())
   self.assertAllEqual([25, 36], iterator.get_next().numpy())
开发者ID:aeverall,项目名称:tensorflow,代码行数:16,代码来源:datasets_test.py

示例8: my_input_fn

# 需要导入模块: from tensorflow.python.data import Dataset [as 别名]
# 或者: from tensorflow.python.data.Dataset import from_tensor_slices [as 别名]
def my_input_fn(features, targets, batch_size=1, shuffle=True, num_epochs=None):
    # 将 pandas 的 data 转换成 numpy arrays
    features = {key: np.array(value) for key, value in dict(features).items()}

    # 构造一个 tensorflow 的 Dataset, 并且配置 batching 和 repeating
    ds = Dataset.from_tensor_slices((features, targets))
    ds = ds.batch(batch_size).repeat(num_epochs)

    # 按需随机打乱数据
    if shuffle:
        ds = ds.shuffle(buffer_size=10000)

    # 返回下一批次的数据
    features, labels = ds.make_one_shot_iterator().get_next()
    return features, labels
开发者ID:Ralph-Wang,项目名称:MyPythonCookBook,代码行数:17,代码来源:basic.py

示例9: testSparseTensorElements

# 需要导入模块: from tensorflow.python.data import Dataset [as 别名]
# 或者: from tensorflow.python.data.Dataset import from_tensor_slices [as 别名]
  def testSparseTensorElements(self):
    components = (sparse_tensor.SparseTensorValue(
        indices=np.array([[0, 0], [1, 0], [2, 0]]),
        values=np.array([0, 0, 0]),
        dense_shape=np.array([3, 1])),
                  sparse_tensor.SparseTensorValue(
                      indices=np.array([[0, 0], [1, 1], [2, 2]]),
                      values=np.array([1, 2, 3]),
                      dense_shape=np.array([3, 3])))

    expected = [
        (sparse_tensor.SparseTensorValue(
            indices=np.array([[0]]),
            values=np.array([0]),
            dense_shape=np.array([1])),
         sparse_tensor.SparseTensorValue(
             indices=np.array([[0]]),
             values=np.array([1]),
             dense_shape=np.array([3]))),
        (sparse_tensor.SparseTensorValue(
            indices=np.array([[0]]),
            values=np.array([0]),
            dense_shape=np.array([1])),
         sparse_tensor.SparseTensorValue(
             indices=np.array([[1]]),
             values=np.array([2]),
             dense_shape=np.array([3]))),
        (sparse_tensor.SparseTensorValue(
            indices=np.array([[0]]),
            values=np.array([0]),
            dense_shape=np.array([1])),
         sparse_tensor.SparseTensorValue(
             indices=np.array([[2]]),
             values=np.array([3]),
             dense_shape=np.array([3]))),
    ]

    for i, result in enumerate(
        datasets.Iterator(Dataset.from_tensor_slices(components))):
      self.assertSparseValuesEqual(expected[i][0], result[0])
      self.assertSparseValuesEqual(expected[i][1], result[1])
开发者ID:aeverall,项目名称:tensorflow,代码行数:43,代码来源:datasets_test.py


注:本文中的tensorflow.python.data.Dataset.from_tensor_slices方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。