File size: 1,821 Bytes
065e0c7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import os
import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
# Define constants
IMAGE_SIZE = (512, 512)
BATCH_SIZE = 4
EPOCHS = 10
TRAIN_DIR = 'T'
VALID_DIR = 'T'
MODEL_PATH = 'nsfw_classifier.h5'
# Create an image data generator for training data
train_datagen = ImageDataGenerator(rescale=1./255)
train_generator = train_datagen.flow_from_directory(
TRAIN_DIR,
target_size=IMAGE_SIZE,
batch_size=BATCH_SIZE,
class_mode='binary')
# Create an image data generator for validation data
valid_datagen = ImageDataGenerator(rescale=1./255)
valid_generator = valid_datagen.flow_from_directory(
VALID_DIR,
target_size=IMAGE_SIZE,
batch_size=BATCH_SIZE,
class_mode='binary')
# Check if the model already exists
if os.path.exists(MODEL_PATH):
print("Loading existing model")
model = tf.keras.models.load_model(MODEL_PATH)
else:
print("Creating new model")
# Define the model
model = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(32, (3, 3), activation='relu', input_shape=(IMAGE_SIZE[0], IMAGE_SIZE[1], 3)),
tf.keras.layers.MaxPooling2D(2, 2),
tf.keras.layers.Conv2D(64, (3, 3), activation='relu'),
tf.keras.layers.MaxPooling2D(2, 2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
# Compile the model
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# Train the model
history = model.fit(
train_generator,
steps_per_epoch=train_generator.samples // BATCH_SIZE,
epochs=EPOCHS,
validation_data=valid_generator,
validation_steps=valid_generator.samples // BATCH_SIZE)
# Save the model
model.save(MODEL_PATH)
|