本文整理汇总了Python中tensorflow.contrib.keras.python.keras.testing_utils.get_test_data函数的典型用法代码示例。如果您正苦于以下问题:Python get_test_data函数的具体用法?Python get_test_data怎么用?Python get_test_data使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了get_test_data函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_image_classification_declarative
def test_image_classification_declarative(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(8, 8, 3),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
model = keras.models.Sequential()
model.add(keras.layers.Conv2D(
8, 3,
activation='relu',
input_shape=x_train.shape[1:]))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.Conv2D(
8, 3,
padding='same',
activation='relu'))
model.add(keras.layers.GlobalMaxPooling2D())
model.add(keras.layers.Dense(y_train.shape[-1], activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=2)
self.assertGreater(history.history['val_acc'][-1], 0.85)
开发者ID:chdinh,项目名称:tensorflow,代码行数:30,代码来源:integration_test.py
示例2: test_video_classification_functional
def test_video_classification_functional(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(4, 8, 8, 3),
num_classes=3)
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
inputs = keras.layers.Input(shape=x_train.shape[1:])
x = keras.layers.TimeDistributed(
keras.layers.Conv2D(4, 3, activation='relu'))(inputs)
x = keras.layers.BatchNormalization()(x)
x = keras.layers.TimeDistributed(keras.layers.GlobalMaxPooling2D())(x)
x = keras.layers.Conv1D(8, 3, activation='relu')(x)
x = keras.layers.Flatten()(x)
outputs = keras.layers.Dense(y_train.shape[-1], activation='softmax')(x)
model = keras.models.Model(inputs, outputs)
model.compile(loss='categorical_crossentropy',
optimizer=keras.optimizers.SGD(lr=0.01, momentum=0.8),
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=2)
self.assertGreater(history.history['val_acc'][-1], 0.70)
开发者ID:chdinh,项目名称:tensorflow,代码行数:28,代码来源:integration_test.py
示例3: _test_optimizer
def _test_optimizer(optimizer, target=0.75):
np.random.seed(1337)
(x_train, y_train), _ = testing_utils.get_test_data(train_samples=1000,
test_samples=200,
input_shape=(10,),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
model = _get_model(x_train.shape[1], 20, y_train.shape[1])
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
assert history.history['acc'][-1] >= target
config = keras.optimizers.serialize(optimizer)
optim = keras.optimizers.deserialize(config)
new_config = keras.optimizers.serialize(optim)
new_config['class_name'] = new_config['class_name'].lower()
assert config == new_config
# Test constraints.
model = keras.models.Sequential()
dense = keras.layers.Dense(10,
input_shape=(x_train.shape[1],),
kernel_constraint=lambda x: 0. * x + 1.,
bias_constraint=lambda x: 0. * x + 2.,
activation='relu')
model.add(dense)
model.add(keras.layers.Dense(y_train.shape[1], activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
model.train_on_batch(x_train[:10], y_train[:10])
kernel, bias = dense.get_weights()
np.testing.assert_allclose(kernel, 1., atol=1e-3)
np.testing.assert_allclose(bias, 2., atol=1e-3)
开发者ID:jiayouwyhit,项目名称:tensorflow,代码行数:35,代码来源:optimizers_test.py
示例4: test_vector_classification_shared_model
def test_vector_classification_shared_model(self):
# Test that functional models that feature internal updates
# and internal losses can be shared.
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(10,),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
inputs = keras.layers.Input(x_train.shape[1:])
x = keras.layers.Dense(16,
activation='relu',
kernel_regularizer=keras.regularizers.l2(1e-5),
bias_regularizer=keras.regularizers.l2(1e-5),
input_shape=x_train.shape[1:])(inputs)
x = keras.layers.BatchNormalization()(x)
base_model = keras.models.Model(inputs, x)
x = keras.layers.Input(x_train.shape[1:])
y = base_model(x)
y = keras.layers.Dense(y_train.shape[-1], activation='softmax')(y)
model = keras.models.Model(x, y)
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=2)
self.assertGreater(history.history['val_acc'][-1], 0.85)
开发者ID:chdinh,项目名称:tensorflow,代码行数:33,代码来源:integration_test.py
示例5: test_invalid_loss_or_metrics
def test_invalid_loss_or_metrics(self):
num_classes = 5
train_samples = 1000
test_samples = 1000
input_dim = 5
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(10, input_shape=(input_dim,)))
model.add(keras.layers.Activation('relu'))
model.add(keras.layers.Dense(num_classes))
model.add(keras.layers.Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
np.random.seed(1337)
(x_train, y_train), (_, _) = testing_utils.get_test_data(
train_samples=train_samples,
test_samples=test_samples,
input_shape=(input_dim,),
num_classes=num_classes)
with self.assertRaises(ValueError):
model.fit(x_train, y_train)
with self.assertRaises(ValueError):
model.fit(x_train, np.concatenate([y_train, y_train], axis=-1))
with self.assertRaises(TypeError):
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=set(0))
with self.assertRaises(RuntimeError):
model.compile(loss=None,
optimizer='rmsprop')
开发者ID:chdinh,项目名称:tensorflow,代码行数:33,代码来源:training_test.py
示例6: test_vector_classification_declarative
def test_vector_classification_declarative(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(10,),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
model = keras.models.Sequential([
keras.layers.Dense(16,
activation='relu',
input_shape=x_train.shape[1:]),
keras.layers.Dropout(0.1),
keras.layers.Dense(y_train.shape[-1], activation='softmax')
])
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=2)
self.assertGreater(history.history['val_acc'][-1], 0.85)
开发者ID:chdinh,项目名称:tensorflow,代码行数:25,代码来源:integration_test.py
示例7: test_TerminateOnNaN
def test_TerminateOnNaN(self):
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
cbks = [keras.callbacks.TerminateOnNaN()]
model = keras.models.Sequential()
initializer = keras.initializers.Constant(value=1e5)
for _ in range(5):
model.add(keras.layers.Dense(2,
input_dim=INPUT_DIM,
activation='relu',
kernel_initializer=initializer))
model.add(keras.layers.Dense(NUM_CLASSES))
model.compile(loss='mean_squared_error',
optimizer='rmsprop')
history = model.fit(x_train, y_train, batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks, epochs=20)
loss = history.history['loss']
assert len(loss) == 1
assert loss[0] == np.inf
开发者ID:chdinh,项目名称:tensorflow,代码行数:28,代码来源:callbacks_test.py
示例8: test_LearningRateScheduler
def test_LearningRateScheduler(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
cbks = [keras.callbacks.LearningRateScheduler(lambda x: 1. / (1. + x))]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=5,
verbose=0)
assert (float(keras.backend.get_value(model.optimizer.lr)) - 0.2
) < keras.backend.epsilon()
开发者ID:chdinh,项目名称:tensorflow,代码行数:31,代码来源:callbacks_test.py
示例9: get_data
def get_data():
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=10,
test_samples=10,
input_shape=(DATA_DIM,),
num_classes=NUM_CLASSES)
y_train = keras.utils.to_categorical(y_train, NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test, NUM_CLASSES)
return (x_train, y_train), (x_test, y_test)
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:9,代码来源:regularizers_test.py
示例10: test_stop_training_csv
def test_stop_training_csv(self):
# Test that using the CSVLogger callback with the TerminateOnNaN callback
# does not result in invalid CSVs.
np.random.seed(1337)
tmpdir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, tmpdir)
with self.test_session():
fp = os.path.join(tmpdir, 'test.csv')
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
cbks = [keras.callbacks.TerminateOnNaN(), keras.callbacks.CSVLogger(fp)]
model = keras.models.Sequential()
for _ in range(5):
model.add(keras.layers.Dense(2, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='linear'))
model.compile(loss='mean_squared_error',
optimizer='rmsprop')
def data_generator():
i = 0
max_batch_index = len(x_train) // BATCH_SIZE
tot = 0
while 1:
if tot > 3 * len(x_train):
yield (np.ones([BATCH_SIZE, INPUT_DIM]) * np.nan,
np.ones([BATCH_SIZE, NUM_CLASSES]) * np.nan)
else:
yield (x_train[i * BATCH_SIZE: (i + 1) * BATCH_SIZE],
y_train[i * BATCH_SIZE: (i + 1) * BATCH_SIZE])
i += 1
tot += 1
i %= max_batch_index
history = model.fit_generator(data_generator(),
len(x_train) // BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=20)
loss = history.history['loss']
assert len(loss) > 1
assert loss[-1] == np.inf or np.isnan(loss[-1])
values = []
with open(fp) as f:
for x in csv.reader(f):
values.append(x)
assert 'nan' in values[-1], 'The last epoch was not logged.'
开发者ID:jiayouwyhit,项目名称:tensorflow,代码行数:54,代码来源:callbacks_test.py
示例11: test_EarlyStopping
def test_EarlyStopping(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
mode = 'max'
monitor = 'val_acc'
patience = 0
cbks = [
keras.callbacks.EarlyStopping(
patience=patience, monitor=monitor, mode=mode)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=20,
verbose=0)
mode = 'auto'
monitor = 'val_acc'
patience = 2
cbks = [
keras.callbacks.EarlyStopping(
patience=patience, monitor=monitor, mode=mode)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=20,
verbose=0)
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:50,代码来源:callbacks_test.py
示例12: assert_regression_works
def assert_regression_works(reg):
np.random.seed(42)
(x_train, y_train), (x_test, _) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
reg.fit(x_train, y_train, batch_size=BATCH_SIZE, epochs=EPOCHS)
score = reg.score(x_train, y_train, batch_size=BATCH_SIZE)
assert np.isscalar(score) and np.isfinite(score)
preds = reg.predict(x_test, batch_size=BATCH_SIZE)
assert preds.shape == (TEST_SAMPLES,)
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:15,代码来源:scikit_learn_test.py
示例13: test_ReduceLROnPlateau
def test_ReduceLROnPlateau(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
def make_model():
np.random.seed(1337)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer=keras.optimizers.SGD(lr=0.1),
metrics=['accuracy'])
return model
model = make_model()
# This should reduce the LR after the first epoch (due to high epsilon).
cbks = [
keras.callbacks.ReduceLROnPlateau(
monitor='val_loss',
factor=0.1,
epsilon=10,
patience=1,
cooldown=5)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=5,
verbose=0)
self.assertAllClose(
float(keras.backend.get_value(model.optimizer.lr)),
0.01,
atol=1e-4)
开发者ID:jiayouwyhit,项目名称:tensorflow,代码行数:47,代码来源:callbacks_test.py
示例14: _test_optimizer
def _test_optimizer(optimizer, target=0.75):
np.random.seed(1337)
(x_train, y_train), _ = testing_utils.get_test_data(train_samples=1000,
test_samples=200,
input_shape=(10,),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
model = _get_model(x_train.shape[1], 20, y_train.shape[1])
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
assert history.history['acc'][-1] >= target
config = keras.optimizers.serialize(optimizer)
optim = keras.optimizers.deserialize(config)
new_config = keras.optimizers.serialize(optim)
new_config['class_name'] = new_config['class_name'].lower()
assert config == new_config
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:18,代码来源:optimizers_test.py
示例15: test_LambdaCallback
def test_LambdaCallback(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
# Start an arbitrary process that should run during model
# training and be terminated after training has completed.
def target():
while True:
pass
p = multiprocessing.Process(target=target)
p.start()
cleanup_callback = keras.callbacks.LambdaCallback(
on_train_end=lambda logs: p.terminate())
cbks = [cleanup_callback]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=5,
verbose=0)
p.join()
assert not p.is_alive()
开发者ID:chdinh,项目名称:tensorflow,代码行数:42,代码来源:callbacks_test.py
示例16: assert_classification_works
def assert_classification_works(clf):
np.random.seed(42)
(x_train, y_train), (x_test, _) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
clf.fit(x_train, y_train, batch_size=BATCH_SIZE, epochs=EPOCHS)
score = clf.score(x_train, y_train, batch_size=BATCH_SIZE)
assert np.isscalar(score) and np.isfinite(score)
preds = clf.predict(x_test, batch_size=BATCH_SIZE)
assert preds.shape == (TEST_SAMPLES,)
for prediction in np.unique(preds):
assert prediction in range(NUM_CLASSES)
proba = clf.predict_proba(x_test, batch_size=BATCH_SIZE)
assert proba.shape == (TEST_SAMPLES, NUM_CLASSES)
assert np.allclose(np.sum(proba, axis=1), np.ones(TEST_SAMPLES))
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:21,代码来源:scikit_learn_test.py
示例17: test_temporal_classification_declarative
def test_temporal_classification_declarative(self):
with self.test_session():
np.random.seed(1336)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(4, 8),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
model = keras.models.Sequential()
model.add(keras.layers.LSTM(3, return_sequences=True,
input_shape=x_train.shape[1:]))
model.add(keras.layers.GRU(y_train.shape[-1], activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=2)
self.assertGreater(history.history['val_acc'][-1], 0.85)
开发者ID:chdinh,项目名称:tensorflow,代码行数:22,代码来源:integration_test.py
示例18: test_using_tf_layers_in_keras_sequential_model
def test_using_tf_layers_in_keras_sequential_model(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(10,),
num_classes=2)
model = keras.models.Sequential()
model.add(tf_core_layers.Dense(32, activation=nn.relu, input_shape=(10,)))
model.add(tf_core_layers.Dense(2, activation=nn.softmax))
model.summary()
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=0)
self.assertGreater(history.history['val_acc'][-1], 0.85)
开发者ID:chdinh,项目名称:tensorflow,代码行数:23,代码来源:integration_test.py
示例19: test_TensorBoard_with_ReduceLROnPlateau
def test_TensorBoard_with_ReduceLROnPlateau(self):
with self.test_session():
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=TRAIN_SAMPLES,
test_samples=TEST_SAMPLES,
input_shape=(INPUT_DIM,),
num_classes=NUM_CLASSES)
y_test = keras.utils.to_categorical(y_test)
y_train = keras.utils.to_categorical(y_train)
model = keras.models.Sequential()
model.add(
keras.layers.Dense(
NUM_HIDDEN, input_dim=INPUT_DIM, activation='relu'))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
model.compile(
loss='binary_crossentropy', optimizer='sgd', metrics=['accuracy'])
cbks = [
keras.callbacks.ReduceLROnPlateau(
monitor='val_loss', factor=0.5, patience=4, verbose=1),
keras.callbacks.TensorBoard(log_dir=temp_dir)
]
model.fit(
x_train,
y_train,
batch_size=BATCH_SIZE,
validation_data=(x_test, y_test),
callbacks=cbks,
epochs=2,
verbose=0)
assert os.path.exists(temp_dir)
开发者ID:chdinh,项目名称:tensorflow,代码行数:37,代码来源:callbacks_test.py
示例20: test_vector_classification_functional
def test_vector_classification_functional(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(8,),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
inputs = keras.layers.Input(shape=x_train.shape[1:])
x = keras.layers.Dense(8, activation='relu')(inputs)
x = keras.layers.Dropout(0.1)(x)
outputs = keras.layers.Dense(y_train.shape[-1], activation='softmax')(x)
model = keras.models.Model(inputs, outputs)
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=2)
self.assertTrue(history.history['val_acc'][-1] > 0.85)
开发者ID:LugarkPirog,项目名称:tensorflow,代码行数:24,代码来源:integration_test.py
注:本文中的tensorflow.contrib.keras.python.keras.testing_utils.get_test_data函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论