本文整理汇总了Python中tensorflow.python.keras.datasets.mnist.load_data方法的典型用法代码示例。如果您正苦于以下问题:Python mnist.load_data方法的具体用法?Python mnist.load_data怎么用?Python mnist.load_data使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.python.keras.datasets.mnist
的用法示例。
在下文中一共展示了mnist.load_data方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _make_dataset
# 需要导入模块: from tensorflow.python.keras.datasets import mnist [as 别名]
# 或者: from tensorflow.python.keras.datasets.mnist import load_data [as 别名]
def _make_dataset(self):
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
dataset = tf.data.Dataset.from_tensor_slices((x_train, y_train))
dataset = dataset.repeat()
dataset = dataset.shuffle(self.batch_size * 3)
dataset = dataset.batch(self.batch_size)
def _map_fn(image, label):
image = tf.to_float(image) / 255.
label.set_shape([self.batch_size])
label = tf.cast(label, dtype=tf.int32)
label_onehot = tf.one_hot(label, 10)
image = tf.reshape(image, [self.batch_size, 28, 28, 1])
return common.ImageLabelOnehot(
image=image, label=label, label_onehot=label_onehot)
self.dataset = dataset.map(_map_fn)
示例2: test_io_api
# 需要导入模块: from tensorflow.python.keras.datasets import mnist [as 别名]
# 或者: from tensorflow.python.keras.datasets.mnist import load_data [as 别名]
def test_io_api(tmp_path):
num_instances = 100
(image_x, train_y), (test_x, test_y) = mnist.load_data()
(text_x, train_y), (test_x, test_y) = utils.imdb_raw(
num_instances=num_instances)
image_x = image_x[:num_instances]
text_x = text_x[:num_instances]
structured_data_x = utils.generate_structured_data(num_instances=num_instances)
classification_y = utils.generate_one_hot_labels(num_instances=num_instances,
num_classes=3)
regression_y = utils.generate_data(num_instances=num_instances, shape=(1,))
# Build model and train.
automodel = ak.AutoModel(
inputs=[
ak.ImageInput(),
ak.TextInput(),
ak.StructuredDataInput()
],
outputs=[ak.RegressionHead(metrics=['mae']),
ak.ClassificationHead(loss='categorical_crossentropy',
metrics=['accuracy'])],
directory=tmp_path,
max_trials=2,
tuner=ak.RandomSearch,
seed=utils.SEED)
automodel.fit([
image_x,
text_x,
structured_data_x
],
[regression_y, classification_y],
epochs=1,
validation_split=0.2)
示例3: get_mnist_dataset
# 需要导入模块: from tensorflow.python.keras.datasets import mnist [as 别名]
# 或者: from tensorflow.python.keras.datasets.mnist import load_data [as 别名]
def get_mnist_dataset():
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.astype('float32') / 255
X_test = X_test.astype('float32') / 255
X_train = X_train[..., None]
X_test = X_test[..., None]
Y_train = keras.utils.to_categorical(y_train, 10)
Y_test = keras.utils.to_categorical(y_test, 10)
return (X_train, Y_train), (X_test, Y_test)