-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtrain.py
87 lines (71 loc) · 3.81 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
from data import DataLoader
from model import *
from argparse import ArgumentParser
from tensorflow.keras.optimizers import *
from tensorflow.keras.losses import CategoricalCrossentropy
import tensorflow as tf
import sys
tf.config.experimental_run_functions_eagerly(True)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--batch-size', default= 16, type= int)
parser.add_argument('--train-folder', type= str, required= True)
parser.add_argument('--valid-folder', type= str, required= True)
parser.add_argument('--epochs', default= 100, type= int)
parser.add_argument('--classes', default= 5, type= int)
parser.add_argument('--lr', default= 0.07, type= float)
parser.add_argument('--shuffle', default= True, type= bool)
parser.add_argument('--augmented', default= False, type= bool)
parser.add_argument('--seed', default= 2021, type= int)
parser.add_argument('--image-size', default= 150, type= int)
parser.add_argument('--rho', default= 1.0, type= float)
parser.add_argument('--alpha', default= 1.0, type= float)
parser.add_argument('--droppout', default= 0.3, type= float)
parser.add_argument('--Mobilenetv1-folder', default= 'MobilenetV1', type= str)
parser.add_argument('--label-smoothing', default= 0.01, type = float)
parser.add_argument('--optimizer', default= 'adagrad', type= str)
try:
args = parser.parse_args()
except:
parser.print_help()
sys.exit(0)
print('---------------------Welcome to Mobilenet V1-------------------')
print('Author')
print('Github: Nguyendat-bit')
print('Email: nduc0231@gmail')
print('---------------------------------------------------------------------')
print('Training MobileNetV1 model with hyper-params:')
print('===========================')
for i, arg in enumerate(vars(args)):
print('{}.{}: {}'.format(i, arg, vars(args)[arg]))
# Load Data
print("-------------LOADING DATA------------")
datasets = DataLoader(args.train_folder, args.valid_folder, augment= args.augmented, seed= args.seed, batch_size= args.batch_size, shuffle= args.shuffle, image_size= (args.image_size, args.image_size))
train_data, val_data = datasets.build_dataset()
# Initializing models
MobilenetV1 = Mobilenet_V1(classes= args.classes, alpha= args.alpha, rho= args.alpha, droppout= args.droppout, img_size= (args.image_size, args.image_size)).build()
# Set up loss function
loss = CategoricalCrossentropy(label_smoothing= args.label_smoothing)
# Optimizer Definition
if args.optimizer == 'adam':
optimizer = Adam(learning_rate=args.lr)
elif args.optimizer == 'sgd':
optimizer = SGD(learning_rate=args.lr)
elif args.optimizer == 'rmsprop':
optimizer = RMSprop(learning_rate=args.lr)
elif args.optimizer == 'adadelta':
optimizer = Adadelta(learning_rate=args.lr)
elif args.optimizer == 'adamax':
optimizer = Adamax(learning_rate=args.lr)
elif args.optimizer == 'adagrad':
optimizer = Adagrad(learning_rate= args.lr)
else:
raise 'Invalid optimizer. Valid option: adam, sgd, rmsprop, adadelta, adamax, adagrad'
# Callback
checkpoint = callbacks.ModelCheckpoint(args.Mobilenetv1_folder, monitor= 'val_acc', save_best_only= True, verbose = 1)
lr_R = callbacks.ReduceLROnPlateau(monitor= 'acc', patience= 4, verbose= 1 , factor= 0.5, min_lr= 0.00001)
# Complie optimizer and loss function into model
MobilenetV1.compile(optimizer= optimizer, loss= loss, metrics= ['acc'])
# Training model
print('-------------Training Mobilenet_V1------------')
MobilenetV1.fit(train_data, validation_data= val_data, epochs= args.epochs, verbose= 1, callbacks= [checkpoint, lr_R])