Skip to content

Commit

Permalink
fix mosaic bug(#6); mv argparse str labels to 'config.py'
Browse files Browse the repository at this point in the history
  • Loading branch information
zhangming8 committed Aug 7, 2021
1 parent 712b7d1 commit 9a743eb
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 21 deletions.
23 changes: 15 additions & 8 deletions config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,13 @@
from utils.util import merge_opt


def update_nano_tiny(cfg):
cfg.scale = (0.5, 1.5)
cfg.test_size = (416, 416)
cfg.enable_mixup = False
if cfg.random_size is not None:
cfg.random_size = (10, 20)
def update_nano_tiny(cfg, inp_params):
cfg.scale = cfg.scale if 'scale' in inp_params else (0.5, 1.5)
cfg.test_size = cfg.test_size if 'test_size' in inp_params else (416, 416)
cfg.enable_mixup = cfg.enable_mixup if 'enable_mixup' in inp_params else False
if 'random_size' not in inp_params:
if cfg.random_size is not None:
cfg.random_size = (10, 20)
if 'nano' in cfg.backbone:
cfg.depth_wise = True
return cfg
Expand Down Expand Up @@ -97,14 +98,18 @@ def update_nano_tiny(cfg):
opt.rgb_means = [0.485, 0.456, 0.406]
opt.std = [0.229, 0.224, 0.225]

opt = merge_opt(opt, sys.argv[1:])
opt, input_params = merge_opt(opt, sys.argv[1:])
if opt.backbone.lower().split("-")[1] in ["tiny", "nano"]:
opt = update_nano_tiny(opt)
opt = update_nano_tiny(opt, input_params)

# do not modify the following params
opt.train_ann = opt.dataset_path + "/annotations/instances_train2017.json"
opt.val_ann = opt.dataset_path + "/annotations/instances_val2017.json"
opt.data_dir = opt.dataset_path + "/images"
if isinstance(opt.label_name, str):
new_label = opt.label_name.split(",")
print('[INFO] change param: {} {} -> {}'.format("label_name", opt.label_name, new_label))
opt.label_name = new_label
opt.num_classes = len(opt.label_name)
opt.gpus_str = opt.gpus
opt.metric = opt.metric.lower()
Expand All @@ -127,4 +132,6 @@ def update_nano_tiny(cfg):
opt.cuda_benchmark = False
if opt.reid_dim > 0:
assert opt.tracking_id_nums is not None

os.environ["CUDA_VISIBLE_DEVICES"] = opt.gpus_str
print("\n{} final config: {}\n{}".format("-"*20, "-"*20, opt))
4 changes: 1 addition & 3 deletions train.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def train(model, scaler, train_loader, val_loader, optimizer, lr_scheduler, star
for epoch in range(start_epoch + 1, opt.num_epochs + 1):
if epoch == opt.num_epochs - opt.no_aug_epochs or no_aug:
logger.write("--->No mosaic aug now! epoch {}\n".format(epoch))
train_loader.dataset.close_mosaic()
train_loader.close_mosaic()
if isinstance(model, torch.nn.DataParallel):
model.module.loss.use_l1 = True
else:
Expand Down Expand Up @@ -213,8 +213,6 @@ def main():
no_aug = start_epoch >= opt.num_epochs - opt.no_aug_epochs
train_loader, val_loader = get_dataloader(opt, no_aug=no_aug)
dataset_label = val_loader.dataset.classes
if isinstance(opt.label_name,str):
opt.label_name=opt.label_name.split(",")
assert opt.label_name == dataset_label, "[ERROR] 'opt.label_name' should be the same as dataset's {} != {}".format(
opt.label_name, dataset_label)
# learning ratio scheduler
Expand Down
25 changes: 15 additions & 10 deletions utils/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,41 +12,46 @@

def merge_opt(opt, params):
if len(params):
print("inputs params:", params)
print("==>> input params:", params)
input_params = {}
for arg in params:
assert "=" in arg, "inputs format should be: python xxx.py param1=value1 param2=value2"
assert "=" in arg, "input format should be: python xxx.py param1=value1 param2=value2"
name, value = arg.split("=")
try:
# string int, string float, string bool
# string int, string float, string bool, string list, string tuple
value = eval(value)
except:
# string others
pass
input_params[name] = value
value_type = str(type(value)).split("class ")[1].split(">")[0]
if name in opt:
if opt[name] != value:
print("[INFO] change param: {} {} to {} {}".format(name, opt[name], value, type(value)))
print("[INFO] change param: {} {} -> {} ({})".format(name, opt[name], value, value_type))
else:
print("[INFO] same param: {}={}".format(name, value, type(value)))
print("[INFO] same param: {}={} ({})".format(name, value, value_type))
else:
print("[INFO] add param: {}={} {} ".format(name, value, type(value)))
print("[INFO] add param: {}={} ({}) ".format(name, value, value_type))
opt[name] = value

def change_list_to_str(cfg, param):
if param in cfg.keys():
if isinstance(cfg[param], (list, tuple)):
new_value = ",".join([str(i) for i in cfg[param]])
print("[INFO] re-change param: {} {} to {} {} ".format(param, cfg[param], new_value, type(new_value)))
value_t = str(type(new_value)).split("class ")[1].split(">")[0]
print("[INFO] re-change param: {} {} to {} {} ".format(param, cfg[param], new_value, value_t))
cfg[param] = new_value
elif isinstance(cfg[param], int):
new_value = str(cfg[param])
print("[INFO] re-change param: {} {} to {} {} ".format(param, cfg[param], new_value, type(new_value)))
value_t = str(type(new_value)).split("class ")[1].split(">")[0]
print("[INFO] re-change param: {} {} to {} {} ".format(param, cfg[param], new_value, value_t))
cfg[param] = new_value

return cfg

opt = change_list_to_str(opt, "gpus")
opt = change_list_to_str(opt, "lr_decay_epoch")
return opt
# opt = change_list_to_str(opt, "lr_decay_epoch")
return opt, input_params


def configure_module(ulimit_value=8192):
Expand Down

0 comments on commit 9a743eb

Please sign in to comment.