Browse Source

seems to learn (supervised) consistently now

new-sep-loss
Michael Pilosov, PhD 10 months ago
parent
commit
a49f166252
  1. 3
      model.py
  2. 2
      newsearch.py

3
model.py

@ -14,6 +14,7 @@ class ColorTransformerModel(L.LightningModule):
depth: int = 1, depth: int = 1,
bias: bool = False, bias: bool = False,
alpha: float = 0, alpha: float = 0,
lr: float = 0.01,
): ):
super().__init__() super().__init__()
self.save_hyperparameters() self.save_hyperparameters()
@ -84,7 +85,7 @@ class ColorTransformerModel(L.LightningModule):
def configure_optimizers(self): def configure_optimizers(self):
optimizer = torch.optim.SGD( optimizer = torch.optim.SGD(
self.parameters(), self.parameters(),
lr=0.1, lr=self.hparams.lr,
) )
lr_scheduler = ReduceLROnPlateau( lr_scheduler = ReduceLROnPlateau(
optimizer, mode="min", factor=0.05, patience=5, cooldown=10, verbose=True optimizer, mode="min", factor=0.05, patience=5, cooldown=10, verbose=True

2
newsearch.py

@ -33,7 +33,7 @@ alpha_values = [1.0]
widths, depths = [512], [4] widths, depths = [512], [4]
batch_size_values = [256] batch_size_values = [256]
max_epochs_values = [20] max_epochs_values = [50]
seeds = list(range(21, 1992)) seeds = list(range(21, 1992))
optimizers = [ optimizers = [
# "Adagrad", # "Adagrad",

Loading…
Cancel
Save