129 lines
3.6 KiB
Python
129 lines
3.6 KiB
Python
|
|
import os
|
|
import random
|
|
|
|
import matplotlib.pyplot as plt
|
|
import numpy as np
|
|
import tensorflow as tf
|
|
from tensorflow import keras
|
|
from tensorflow.keras import layers
|
|
from tensorflow.keras.utils import image_dataset_from_directory, split_dataset
|
|
import keras_tuner
|
|
|
|
img_size = (180, 180)
|
|
|
|
conv_base = keras.applications.resnet.ResNet152(
|
|
weights="imagenet",
|
|
include_top=False,
|
|
input_shape=(180, 180, 3))
|
|
conv_base.trainable = False
|
|
# conv_base.trainable = True
|
|
# for layer in conv_base.layers[:-4]:
|
|
# layer.trainable = False
|
|
|
|
|
|
# обогащение выборки
|
|
data_augmentation = keras.Sequential(
|
|
[
|
|
layers.RandomFlip("horizontal"),
|
|
layers.RandomRotation(0.1),
|
|
layers.RandomZoom(0.2),
|
|
])
|
|
|
|
|
|
input_dir = "assets/dog"
|
|
|
|
labels_dict = {}
|
|
for fname in os.listdir(input_dir):
|
|
if fname in labels_dict:
|
|
continue
|
|
labels_dict[fname] = len(labels_dict)
|
|
|
|
model_name = "beerd_imagenet_02_05_2023.keras"
|
|
model_dir = "beerd_imagenet"
|
|
train_dataset, val_ds = image_dataset_from_directory(
|
|
input_dir,
|
|
labels="inferred",
|
|
label_mode="categorical",
|
|
class_names=None,
|
|
color_mode="rgb",
|
|
batch_size=32,
|
|
seed=12,
|
|
image_size=img_size,
|
|
shuffle=True,
|
|
validation_split=0.1,
|
|
subset="both",
|
|
interpolation="bilinear",
|
|
follow_links=False,
|
|
crop_to_aspect_ratio=False
|
|
)
|
|
validation_dataset, test_dataset = split_dataset(val_ds, left_size=0.8)
|
|
|
|
def build_model(hp):
|
|
inputs = keras.Input(shape=(180, 180, 3))
|
|
x = data_augmentation(inputs)
|
|
x = keras.applications.resnet.preprocess_input(x)
|
|
x = conv_base(x)
|
|
x = layers.Flatten()(x)
|
|
units = hp.Int(name="units", min_value=1536, max_value=2048, step=512)
|
|
x = layers.Dense(units, activation="relu")(x)
|
|
x = layers.Dropout(0.5)(x)
|
|
outputs = layers.Dense(len(labels_dict), activation="softmax")(x)
|
|
model = keras.Model(inputs, outputs)
|
|
optimizer = hp.Choice(name="optimizer", values=["rmsprop", "adam"])
|
|
model.compile(optimizer=optimizer,
|
|
loss="categorical_crossentropy", metrics=['accuracy'])
|
|
return model
|
|
|
|
|
|
def build_model_new():
|
|
inputs = keras.Input(shape=(180, 180, 3))
|
|
x = data_augmentation(inputs)
|
|
x = keras.applications.resnet.preprocess_input(x)
|
|
x = conv_base(x)
|
|
x = layers.Flatten()(x)
|
|
x = layers.Dropout(0.5)(x)
|
|
outputs = layers.Dense(len(labels_dict), activation="softmax")(x)
|
|
model = keras.Model(inputs, outputs)
|
|
model.compile(optimizer="adam",
|
|
loss="categorical_crossentropy", metrics=['accuracy'])
|
|
return model
|
|
|
|
|
|
|
|
# tuner = keras_tuner.BayesianOptimization(
|
|
# build_model,
|
|
# objective='val_accuracy',
|
|
# max_trials=100,
|
|
# executions_per_trial=2,
|
|
# directory=model_dir,
|
|
# overwrite=True,)
|
|
|
|
# callbacks = [
|
|
# keras.callbacks.EarlyStopping(monitor="val_loss", patience=5)
|
|
# ]
|
|
callbacks = [
|
|
keras.callbacks.ModelCheckpoint(model_name,
|
|
save_best_only=True, monitor="val_accuracy"),
|
|
keras.callbacks.EarlyStopping(monitor="val_accuracy", patience=5)
|
|
]
|
|
|
|
# tuner.search(train_dataset,
|
|
# epochs=100,
|
|
# callbacks=callbacks,
|
|
# validation_data=validation_dataset,)
|
|
|
|
# best_models = tuner.get_best_models(1)
|
|
# best_models = keras.models.load_model(model_name)
|
|
best_models = build_model_new()
|
|
best_models.fit(train_dataset,
|
|
epochs=30,
|
|
callbacks=callbacks,
|
|
validation_data=validation_dataset)
|
|
|
|
|
|
test_model = keras.models.load_model(model_name)
|
|
test_loss, test_acc = test_model.evaluate(test_dataset)
|
|
print(f"Test accuracy: {test_acc:.3f}")
|
|
|