поправил скрипт обучения

This commit is contained in:
artem 2023-05-03 08:00:10 +03:00
parent ed559a8b94
commit a86abe9c9e
1 changed files with 70 additions and 42 deletions

View File

@ -8,25 +8,27 @@ import tensorflow as tf
from tensorflow import keras from tensorflow import keras
from tensorflow.keras import layers from tensorflow.keras import layers
from tensorflow.keras.utils import image_dataset_from_directory, split_dataset from tensorflow.keras.utils import image_dataset_from_directory, split_dataset
import keras_tuner
img_size = (180, 180) img_size = (180, 180)
conv_base = keras.applications.vgg16.VGG16( conv_base = keras.applications.resnet.ResNet152(
weights="imagenet", weights="imagenet",
include_top=False, include_top=False,
input_shape=(180, 180, 3)) input_shape=(180, 180, 3))
conv_base.trainable = False conv_base.trainable = False
# conv_base.trainable = True
# for layer in conv_base.layers[:-4]:
# layer.trainable = False
# обогащение выборки # обогащение выборки
data_augmentation = keras.Sequential( data_augmentation = keras.Sequential(
[ [
layers.RandomFlip("horizontal"), layers.RandomFlip("horizontal"),
layers.RandomRotation(0.1), layers.RandomRotation(0.1),
layers.RandomZoom(0.2), layers.RandomZoom(0.2),
]) ])
input_dir = "assets/dog" input_dir = "assets/dog"
@ -37,7 +39,8 @@ for fname in os.listdir(input_dir):
continue continue
labels_dict[fname] = len(labels_dict) labels_dict[fname] = len(labels_dict)
model_name = "beerd_imagenet_25_04_2023.keras" model_name = "beerd_imagenet_02_05_2023.keras"
model_dir = "beerd_imagenet"
train_dataset, val_ds = image_dataset_from_directory( train_dataset, val_ds = image_dataset_from_directory(
input_dir, input_dir,
labels="inferred", labels="inferred",
@ -54,47 +57,72 @@ train_dataset, val_ds = image_dataset_from_directory(
follow_links=False, follow_links=False,
crop_to_aspect_ratio=False crop_to_aspect_ratio=False
) )
validation_dataset, test_dataset = split_dataset(val_ds, left_size=0.8) validation_dataset, test_dataset = split_dataset(val_ds, left_size=0.8)
inputs = keras.Input(shape=(180, 180, 3)) def build_model(hp):
x = data_augmentation(inputs) inputs = keras.Input(shape=(180, 180, 3))
x = keras.applications.vgg16.preprocess_input(x) x = data_augmentation(inputs)
x = conv_base(x) x = keras.applications.resnet.preprocess_input(x)
x = layers.Flatten()(x) x = conv_base(x)
x = layers.Dense(512)(x) x = layers.Flatten()(x)
x = layers.Dropout(0.5)(x) units = hp.Int(name="units", min_value=1536, max_value=2048, step=512)
outputs = layers.Dense(len(labels_dict), activation="softmax")(x) x = layers.Dense(units, activation="relu")(x)
model = keras.Model(inputs, outputs) x = layers.Dropout(0.5)(x)
outputs = layers.Dense(len(labels_dict), activation="softmax")(x)
model = keras.Model(inputs, outputs)
optimizer = hp.Choice(name="optimizer", values=["rmsprop", "adam"])
model.compile(optimizer=optimizer,
loss="categorical_crossentropy", metrics=['accuracy'])
return model
model.compile(optimizer="rmsprop",
loss="categorical_crossentropy", metrics=['accuracy']) def build_model_new():
inputs = keras.Input(shape=(180, 180, 3))
x = data_augmentation(inputs)
x = keras.applications.resnet.preprocess_input(x)
x = conv_base(x)
x = layers.Flatten()(x)
x = layers.Dropout(0.5)(x)
outputs = layers.Dense(len(labels_dict), activation="softmax")(x)
model = keras.Model(inputs, outputs)
model.compile(optimizer="adam",
loss="categorical_crossentropy", metrics=['accuracy'])
return model
# tuner = keras_tuner.BayesianOptimization(
# build_model,
# objective='val_accuracy',
# max_trials=100,
# executions_per_trial=2,
# directory=model_dir,
# overwrite=True,)
# callbacks = [
# keras.callbacks.EarlyStopping(monitor="val_loss", patience=5)
# ]
callbacks = [ callbacks = [
keras.callbacks.ModelCheckpoint(model_name, keras.callbacks.ModelCheckpoint(model_name,
save_best_only=True) save_best_only=True, monitor="val_accuracy"),
keras.callbacks.EarlyStopping(monitor="val_accuracy", patience=5)
] ]
history = model.fit(train_dataset,
epochs=100,
callbacks=callbacks,
validation_data=validation_dataset,)
epochs = range(1, len(history.history["loss"]) + 1) # tuner.search(train_dataset,
loss = history.history["loss"] # epochs=100,
val_loss = history.history["val_loss"] # callbacks=callbacks,
acc = history.history["accuracy"] # validation_data=validation_dataset,)
val_acc = history.history["val_accuracy"]
plt.plot(epochs, acc, "bo", label="Точность на этапе обучения") # best_models = tuner.get_best_models(1)
plt.plot(epochs, val_acc, "b", label="Точность на этапе проверки") # best_models = keras.models.load_model(model_name)
plt.title("Точность на этапах обучения и проверки") best_models = build_model_new()
plt.legend() best_models.fit(train_dataset,
plt.figure() epochs=30,
plt.plot(epochs, loss, "bo", label="Потери на этапе обучения") callbacks=callbacks,
plt.plot(epochs, val_loss, "b", label="Потери на этапе проверки") validation_data=validation_dataset)
plt.title("Потери на этапах обучения и проверки")
plt.legend()
plt.show()
test_model = keras.models.load_model(model_name) test_model = keras.models.load_model(model_name)
test_loss, test_acc = test_model.evaluate(test_dataset) test_loss, test_acc = test_model.evaluate(test_dataset)
print(f"Test accuracy: {test_acc:.3f}") print(f"Test accuracy: {test_acc:.3f}")