Browse Source

optimizer search

new-sep-loss
Michael Pilosov, PhD 10 months ago
parent
commit
3b700aee70
  1. 19
      newsearch.py

19
newsearch.py

@ -27,7 +27,17 @@ widths = [2**k for k in range(4, 15)]
batch_size_values = [256]
max_epochs_values = [100]
seeds = list(range(21, 1992))
optimizers = [
"Adagrad",
"Adam",
"SGD",
"AdamW",
"LBFGS",
"RAdam",
"RMSprop",
"SparseAdam",
"Adadelta",
]
# Generate all possible combinations of hyperparameters
all_params = [
(alpha, lr, bs, me, s, w)
@ -37,6 +47,7 @@ all_params = [
for me in max_epochs_values
for s in seeds
for w in widths
for opt in optimizers
]
@ -44,7 +55,7 @@ all_params = [
search_params = sample(all_params, min(NUM_JOBS, len(all_params)))
for idx, params in enumerate(search_params):
a, lr, bs, me, s, w = params
a, lr, bs, me, s, w, opt = params
cmd = f"cd ~/colors && python main.py --alpha {a} --lr {lr} --bs {bs} --max_epochs {me} --seed {s} --width {w}"
cmd = f"""
python newmain.py fit \
@ -62,14 +73,14 @@ python newmain.py fit \
--trainer.callbacks callbacks.SaveImageCallback \
--trainer.callbacks.init_args.final_dir out \
--trainer.callbacks.init_args.save_interval 0 \
--optimizer torch.optim.Adam \
--optimizer torch.optim.{opt} \
--optimizer.init_args.lr {lr} \
--lr_scheduler lightning.pytorch.cli.ReduceLROnPlateau \
--lr_scheduler.init_args.monitor hp_metric \
--lr_scheduler.init_args.factor 0.05 \
--lr_scheduler.init_args.patience 5 \
--lr_scheduler.init_args.cooldown 10 \
--lr_scheduler.init_args.verbose true
--lr_scheduler.init_args.verbose true
"""
# job_name = f"color2_{bs}_{a}_{lr:2.2e}"

Loading…
Cancel
Save