-
Notifications
You must be signed in to change notification settings - Fork 0
/
train_custom_model.py
71 lines (55 loc) · 2.52 KB
/
train_custom_model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
from tensorflow.keras.preprocessing import image
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Dense, GlobalAveragePooling2D
from tensorflow.keras.optimizers import Adam, SGD
from keras.utils import image_dataset_from_directory
from tensorflow.keras.applications.resnet50 import ResNet50
def train_keras_model(base_model):
# add a global spatial average pooling layer
x = base_model.output
x = GlobalAveragePooling2D()(x)
# let's add a fully-connected layer
x = Dense(1024, activation="relu")(x)
# and a logistic layer -- let's say we have 200 classes
predictions = Dense(50, activation="softmax")(x)
# this is the model we will train
model = Model(inputs=base_model.input, outputs=predictions)
# first: train only the top layers (which were randomly initialized)
# i.e. freeze all convolutional InceptionV3 layers
for layer in base_model.layers:
layer.trainable = False
# compile the model (should be done *after* setting layers to non-trainable)
model.compile(optimizer="rmsprop", loss="categorical_crossentropy")
# train the model on the new data for a few epochs
dataset = image_dataset_from_directory(
"sketch_training_dataset/paintings_sketches_by_artist",
batch_size=64,
image_size=(224, 224),
label_mode="categorical",
)
model.fit(dataset, epochs=30)
model.save("keras_Resnet50_30")
# at this point, the top layers are well trained and we can start fine-tuning
# convolutional layers from inception V3. We will freeze the bottom N layers
# and train the remaining top layers.
# let's visualize layer names and layer indices to see how many layers
# we should freeze:
for i, layer in enumerate(base_model.layers):
print(i, layer.name)
# we chose to train the top 2 inception blocks, i.e. we will freeze
# the first 249 layers and unfreeze the rest:
for layer in base_model.layers:
layer.trainable = False
# we need to recompile the model for these modifications to take effect
# we use SGD with a low learning rate
model.compile(
optimizer=SGD(lr=0.00001, momentum=0.9), loss="categorical_crossentropy"
)
# we train our model again (this time fine-tuning the top 2 inception blocks
# alongside the top Dense layers
model.fit(dataset, epochs=15)
return model
model_to_terain = ResNet50(
weights="imagenet", include_top=False, input_shape=(224, 224, 3)
)
train_keras_model(model_to_terain).save("keras_Resnet50_30_15")