From a86abe9c9e8d5d13e7e390eef0f7e8c622415466 Mon Sep 17 00:00:00 2001 From: artem Date: Wed, 3 May 2023 08:00:10 +0300 Subject: [PATCH] =?UTF-8?q?=D0=BF=D0=BE=D0=BF=D1=80=D0=B0=D0=B2=D0=B8?= =?UTF-8?q?=D0=BB=20=D1=81=D0=BA=D1=80=D0=B8=D0=BF=D1=82=20=D0=BE=D0=B1?= =?UTF-8?q?=D1=83=D1=87=D0=B5=D0=BD=D0=B8=D1=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- beerds_imagenet.py | 112 ++++++++++++++++++++++++++++----------------- 1 file changed, 70 insertions(+), 42 deletions(-) diff --git a/beerds_imagenet.py b/beerds_imagenet.py index 4286df9..ee6dca6 100644 --- a/beerds_imagenet.py +++ b/beerds_imagenet.py @@ -8,25 +8,27 @@ import tensorflow as tf from tensorflow import keras from tensorflow.keras import layers from tensorflow.keras.utils import image_dataset_from_directory, split_dataset - +import keras_tuner img_size = (180, 180) -conv_base = keras.applications.vgg16.VGG16( +conv_base = keras.applications.resnet.ResNet152( weights="imagenet", include_top=False, input_shape=(180, 180, 3)) conv_base.trainable = False +# conv_base.trainable = True +# for layer in conv_base.layers[:-4]: +# layer.trainable = False # обогащение выборки data_augmentation = keras.Sequential( -[ - layers.RandomFlip("horizontal"), - layers.RandomRotation(0.1), - layers.RandomZoom(0.2), -]) - + [ + layers.RandomFlip("horizontal"), + layers.RandomRotation(0.1), + layers.RandomZoom(0.2), + ]) input_dir = "assets/dog" @@ -37,7 +39,8 @@ for fname in os.listdir(input_dir): continue labels_dict[fname] = len(labels_dict) -model_name = "beerd_imagenet_25_04_2023.keras" +model_name = "beerd_imagenet_02_05_2023.keras" +model_dir = "beerd_imagenet" train_dataset, val_ds = image_dataset_from_directory( input_dir, labels="inferred", @@ -54,47 +57,72 @@ train_dataset, val_ds = image_dataset_from_directory( follow_links=False, crop_to_aspect_ratio=False ) - validation_dataset, test_dataset = split_dataset(val_ds, left_size=0.8) -inputs = keras.Input(shape=(180, 180, 3)) -x = data_augmentation(inputs) -x = keras.applications.vgg16.preprocess_input(x) -x = conv_base(x) -x = layers.Flatten()(x) -x = layers.Dense(512)(x) -x = layers.Dropout(0.5)(x) -outputs = layers.Dense(len(labels_dict), activation="softmax")(x) -model = keras.Model(inputs, outputs) +def build_model(hp): + inputs = keras.Input(shape=(180, 180, 3)) + x = data_augmentation(inputs) + x = keras.applications.resnet.preprocess_input(x) + x = conv_base(x) + x = layers.Flatten()(x) + units = hp.Int(name="units", min_value=1536, max_value=2048, step=512) + x = layers.Dense(units, activation="relu")(x) + x = layers.Dropout(0.5)(x) + outputs = layers.Dense(len(labels_dict), activation="softmax")(x) + model = keras.Model(inputs, outputs) + optimizer = hp.Choice(name="optimizer", values=["rmsprop", "adam"]) + model.compile(optimizer=optimizer, + loss="categorical_crossentropy", metrics=['accuracy']) + return model -model.compile(optimizer="rmsprop", - loss="categorical_crossentropy", metrics=['accuracy']) + +def build_model_new(): + inputs = keras.Input(shape=(180, 180, 3)) + x = data_augmentation(inputs) + x = keras.applications.resnet.preprocess_input(x) + x = conv_base(x) + x = layers.Flatten()(x) + x = layers.Dropout(0.5)(x) + outputs = layers.Dense(len(labels_dict), activation="softmax")(x) + model = keras.Model(inputs, outputs) + model.compile(optimizer="adam", + loss="categorical_crossentropy", metrics=['accuracy']) + return model + + + +# tuner = keras_tuner.BayesianOptimization( +# build_model, +# objective='val_accuracy', +# max_trials=100, +# executions_per_trial=2, +# directory=model_dir, +# overwrite=True,) + +# callbacks = [ +# keras.callbacks.EarlyStopping(monitor="val_loss", patience=5) +# ] callbacks = [ keras.callbacks.ModelCheckpoint(model_name, - save_best_only=True) + save_best_only=True, monitor="val_accuracy"), + keras.callbacks.EarlyStopping(monitor="val_accuracy", patience=5) ] -history = model.fit(train_dataset, - epochs=100, - callbacks=callbacks, - validation_data=validation_dataset,) -epochs = range(1, len(history.history["loss"]) + 1) -loss = history.history["loss"] -val_loss = history.history["val_loss"] -acc = history.history["accuracy"] -val_acc = history.history["val_accuracy"] -plt.plot(epochs, acc, "bo", label="Точность на этапе обучения") -plt.plot(epochs, val_acc, "b", label="Точность на этапе проверки") -plt.title("Точность на этапах обучения и проверки") -plt.legend() -plt.figure() -plt.plot(epochs, loss, "bo", label="Потери на этапе обучения") -plt.plot(epochs, val_loss, "b", label="Потери на этапе проверки") -plt.title("Потери на этапах обучения и проверки") -plt.legend() -plt.show() +# tuner.search(train_dataset, +# epochs=100, +# callbacks=callbacks, +# validation_data=validation_dataset,) + +# best_models = tuner.get_best_models(1) +# best_models = keras.models.load_model(model_name) +best_models = build_model_new() +best_models.fit(train_dataset, + epochs=30, + callbacks=callbacks, + validation_data=validation_dataset) test_model = keras.models.load_model(model_name) test_loss, test_acc = test_model.evaluate(test_dataset) -print(f"Test accuracy: {test_acc:.3f}") \ No newline at end of file +print(f"Test accuracy: {test_acc:.3f}") +