Browse Source

job looks good to go

plotting-unify
Michael Pilosov, PhD 9 months ago
parent
commit
97f51ef8b4
  1. 17
      newsearch.py

17
newsearch.py

@ -8,8 +8,7 @@ from lightning_sdk import Machine, Studio # noqa: F401
# consistency of randomly sampled experiments.
seed(19920921)
NUM_JOBS = 1
NUM_JOBS = 10
# Define the ranges or sets of values for each hyperparameter
# alpha_values = list(np.round(np.linspace(2, 4, 21), 4))
@ -21,11 +20,12 @@ learning_rate_values = [1e-3]
alpha_values = [0, 0.1]
widths = [2**k for k in range(4, 13)]
depths = [1, 2, 4, 8, 16]
dropouts = [0, 0.25, 0.5]
# widths, depths = [512], [4]
batch_size_values = [256]
max_epochs_values = [420] # at 12 fps, around 35s
seeds = list(range(20, 1992))
seeds = list(range(21, 1992))
optimizers = [
# "Adagrad",
"Adam",
@ -39,7 +39,7 @@ optimizers = [
# Generate all possible combinations of hyperparameters
all_params = [
(alpha, lr, bs, me, s, w, d, opt)
(alpha, lr, bs, me, s, w, d, opt, dr)
for alpha in alpha_values
for lr in learning_rate_values
for bs in batch_size_values
@ -48,6 +48,7 @@ all_params = [
for w in widths
for d in depths
for opt in optimizers
for dr in dropouts
]
@ -58,7 +59,7 @@ search_params = sample(all_params, min(NUM_JOBS, len(all_params)))
# --trainer.callbacks.init_args.monitor hp_metric \
for idx, params in enumerate(search_params):
a, lr, bs, me, s, w, d, opt = params
a, lr, bs, me, s, w, d, opt, dr = params
# cmd = f"cd ~/colors && python main.py --alpha {a} --lr {lr} --bs {bs} --max_epochs {me} --seed {s} --width {w}"
cmd = f"""
cd ~/colors && python newmain.py fit \
@ -72,7 +73,7 @@ cd ~/colors && python newmain.py fit \
--model.bias true \
--model.loop true \
--model.transform tanh \
--model.dropout 0.5 \
--model.dropout {dr} \
--trainer.min_epochs 10 \
--trainer.max_epochs {me} \
--trainer.log_every_n_steps 3 \
@ -84,19 +85,19 @@ cd ~/colors && python newmain.py fit \
--optimizer torch.optim.{opt} \
--optimizer.init_args.lr {lr} \
--trainer.callbacks+ lightning.pytorch.callbacks.LearningRateFinder
"""
# --lr_scheduler lightning.pytorch.cli.ReduceLROnPlateau \
# --lr_scheduler.init_args.monitor hp_metric \
# --lr_scheduler.init_args.factor 0.05 \
# --lr_scheduler.init_args.patience 5 \
# --lr_scheduler.init_args.cooldown 10 \
# --lr_scheduler.init_args.verbose true
"""
print(f"Running {params}: {cmd}")
try:
studio = Studio("colors-animate-jobs")
studio.install_plugin("jobs")
job_plugin = studio.installed_plugins["jobs"]
job_name = f"color-animate-{idx}-{s}"
job_name = f"colors-animate-20240303-{idx}"
job_plugin.run(cmd, machine=Machine.T4, name=job_name)
# Run the command and wait for it to complete

Loading…
Cancel
Save