-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathtransfer_learn.py
145 lines (113 loc) · 3.99 KB
/
transfer_learn.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
import glob
import matplotlib.pyplot as plt
from tensorflow.keras.applications.inception_v3 import InceptionV3, preprocess_input
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from keras.optimizers import SGD
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Dense, GlobalAveragePooling2D
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
# Count of files in this path and it's subfolders
def get_num_files(path):
if not os.path.exists(path):
return 0
return sum([len(files) for r, d, files in os.walk(path)])
# Count of subfolders directly below the path (aka our categories)
def get_num_subfolders(path):
if not os.path.exists(path):
return 0
return sum([len(d) for r, d, files in os.walk(path)])
# Image generater function
def create_img_generator():
return ImageDataGenerator(
preprocessing_function=preprocess_input,
rotation_range=30,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True
)
# Main code
Image_width, Image_height = 299, 299
Training_Epochs = 2
Batch_Size = 32
Number_FC_Neurons = 1024
train_dir = './data/train'
validate_dir = './data/validate'
num_train_samples = get_num_files(train_dir)
num_classes = get_num_subfolders(train_dir)
num_validate_samples = get_num_files(validate_dir)
num_epoch = Training_Epochs
batch_size = Batch_Size
# define data pre-processing
train_image_gen = create_img_generator()
test_image_gen = create_img_generator()
# Connect the image generator to a folder which contains the source image that the image generator alters
# Training image generator:
train_generator = train_image_gen.flow_from_directory(
train_dir,
target_size=(Image_width, Image_height),
batch_size=batch_size,
seed=420
)
# Training image generator:
validation_generator = train_image_gen.flow_from_directory(
validate_dir,
target_size=(Image_width, Image_height),
batch_size=batch_size,
seed=420
)
# Fully connected layer
InceptionV3_base_model = InceptionV3(
weights='imagenet',
include_top=False, # excludes the final FC layer
)
print('[+] Inception v3 base model without last FC loaded.')
# Define the layers
L0 = InceptionV3_base_model.output
L1 = GlobalAveragePooling2D()(L0)
L2 = Dense(Number_FC_Neurons, activation='relu')(L1)
predictions = Dense(num_classes, activation='softmax')(L2)
# New model
model = Model(inputs=InceptionV3_base_model.input, outputs=predictions)
print(model.summary())
print('[+] Performing basic transfer Learning')
# Freeze all layers in the Inception V3 base model
for layer in InceptionV3_base_model.layers:
layer.trainable = False
# Define model copile for basaic Transfer Learning
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
# By using generators we can ask continute to request sample images and the generators will pull images from
# the training or validation folders and alter them slightly
history_transfer_learning = model.fit_generator(
train_generator,
epochs=num_epoch,
steps_per_epoch=num_train_samples // batch_size,
validation_data=validation_generator,
validation_steps=num_validate_samples // batch_size,
class_weight='auto'
)
# Save the model
model.save('inceptionv3-transfer-learning.model')
# Option 2 specific to Inception
print('\n[+] Fine tuning existing model')
Layers_To_Freeze = 172
for layer in model.layers[:Layers_To_Freeze]:
layer.trainable = False
for layer in model.layers[Layers_To_Freeze:]:
layer.trainable = True
model.compile(
optimizer=SGD(lr=0.0001, momentum=0.9),
loss='categorical_crossentropy',
metrics=['accuracy']
)
history_transfer_learning = model.fit_generator(
train_generator,
epochs=num_epoch,
steps_per_epoch=num_train_samples // batch_size,
validation_data=validation_generator,
validation_steps=num_validate_samples // batch_size,
class_weight='auto'
)
model.save('inceptionv3-fine-tune.model')