dermy-models/compile.py

137 lines
4.2 KiB
Python
Raw Permalink Normal View History

2024-06-08 19:34:23 +00:00
import tensorflow as tf
import tensorflow.keras as tfk
import os
# Parameters
DATA_DIR = os.path.join(os.getcwd(), "data")
IMG_SIZE = (224, 224)
IMG_SHAPE = IMG_SIZE + (3,)
BATCH_SIZE = 64
AUTOTUNE = tf.data.AUTOTUNE
BASE_LEARNING_RATE = 0.0001
CLASSES = 2
# Import Data
training_path = os.path.join(DATA_DIR, "train")
test_path = os.path.join(DATA_DIR, "test")
training_data = tfk.utils.image_dataset_from_directory(training_path,
shuffle=True,
batch_size=BATCH_SIZE,
image_size=IMG_SIZE,
validation_split=0.2,
subset="training",
seed=1234)
validation_data = tfk.utils.image_dataset_from_directory(training_path,
shuffle=True,
batch_size=BATCH_SIZE,
image_size=IMG_SIZE,
validation_split=0.2,
subset="validation",
seed=1234)
test_data = tfk.utils.image_dataset_from_directory(test_path,
shuffle=True,
batch_size=BATCH_SIZE,
image_size=IMG_SIZE)
# Init Prefetching
training_data = training_data.prefetch(buffer_size=AUTOTUNE)
validation_data = validation_data.prefetch(buffer_size=AUTOTUNE)
test_data = test_data.prefetch(buffer_size=AUTOTUNE)
# Data Augmentation Layer
data_augmentation = tf.keras.Sequential([
tf.keras.layers.RandomFlip('horizontal'),
tf.keras.layers.RandomRotation(0.2)
])
# Create Base Model From MobileNetV3Large
base_model = tf.keras.applications.MobileNetV3Large(
input_shape=IMG_SHAPE,
include_top=False,
weights="imagenet"
)
image_batch, label_batch = next(iter(training_data))
feature_batch = base_model(image_batch)
base_model.trainable = False
# Add Classification Header
global_avg_layer = tf.keras.layers.GlobalAveragePooling2D()
feature_batch_avg = global_avg_layer(feature_batch)
prediction_layer = tf.keras.layers.Dense(CLASSES, activation="softmax")
predication_batch = prediction_layer(feature_batch_avg)
inputs = tf.keras.Input(shape=IMG_SHAPE)
x = data_augmentation(inputs)
x = base_model(x, training=False)
x = global_avg_layer(x)
x = tf.keras.layers.Dropout(0.2)(x)
outputs = prediction_layer(x)
model = tf.keras.Model(inputs, outputs)
# Hotencode Data
training_data = training_data.map(lambda x, y: (x, tf.one_hot(y, CLASSES)))
validation_data = validation_data.map(lambda x, y: (x, tf.one_hot(y, CLASSES)))
test_data = test_data.map(lambda x, y: (x, tf.one_hot(y, CLASSES)))
# Compile Model
optimizer = tf.keras.optimizers.Adam(learning_rate=BASE_LEARNING_RATE)
loss = tf.keras.losses.CategoricalCrossentropy()
metrics = [tf.keras.metrics.CategoricalAccuracy()]
model.compile(optimizer=optimizer, loss=loss, metrics=metrics)
# Train the Model
initial_epochs = 50
loss0, accuracy0 = model.evaluate(validation_data)
print(f"initial loss: {loss0}")
print(f"initial accuracy: {accuracy0}")
lr_schedule = tf.keras.callbacks.ReduceLROnPlateau(
monitor="val_loss",
factor=0.1,
patience=5,
min_lr=1e-6
)
early_stopping = tf.keras.callbacks.EarlyStopping(
monitor="val_loss",
patience=10,
restore_best_weights=True
)
history = model.fit(training_data,
epochs=initial_epochs,
validation_data=validation_data,
callbacks=[lr_schedule, early_stopping])
# Evaluate Model
results = model.evaluate(validation_data)
print(f"Validation Loss: {results[0]}")
print(f"Validation Accuracy: {results[1]}")
results = model.evaluate(test_data)
print(f"Test Loss: {results[0]}")
print(f"Test Accuracy: {results[1]}")
# Save Model
model.save("models/mobilenet_v3.keras")