|
|
@ -54,21 +54,22 @@ for idx, params in enumerate(search_params): |
|
|
|
--data.val_size 100000 \ |
|
|
|
--model.alpha {a} \ |
|
|
|
--model.width {w} \ |
|
|
|
--trainer.fast_dev_run 1 \ |
|
|
|
--trainer.min_epochs 10 \ |
|
|
|
--trainer.max_epochs {me} \ |
|
|
|
--trainer.check_val_every_n_epoch 1 \ |
|
|
|
--trainer.log_every_n_steps 3 \ |
|
|
|
--trainer.check_val_every_n_epoch 10 \ |
|
|
|
--trainer.limit_val_batches 50 \ |
|
|
|
--trainer.callbacks callbacks.SaveImageCallback \ |
|
|
|
--trainer.callbacks.init_args.final_dir out \ |
|
|
|
--trainer.callbacks.init_args.save_interval 0 \ |
|
|
|
--optimizer torch.optim.Adam \ |
|
|
|
--optimizer.init_args.lr {lr} \ |
|
|
|
--lr_scheduler lightning.pytorch.cli.ReduceLROnPlateau \ |
|
|
|
--lr_scheduler.init_args.monitor hp_metric \ |
|
|
|
--lr_scheduler.init_args.factor 0.05 \ |
|
|
|
--lr_scheduler.init_args.patience 5 \ |
|
|
|
--lr_scheduler.init_args.cooldown 10 \ |
|
|
|
--lr_scheduler.init_args.factor 0.05 \ |
|
|
|
--lr_scheduler.init_args.verbose true \ |
|
|
|
--print_config |
|
|
|
--lr_scheduler.init_args.verbose true |
|
|
|
""" |
|
|
|
|
|
|
|
# job_name = f"color2_{bs}_{a}_{lr:2.2e}" |
|
|
|