Skip to content

Commit

Permalink
Merge branch 'narval-migration' of https://github.com/mwalmsley/zoobot
Browse files Browse the repository at this point in the history
…into narval-migration
  • Loading branch information
mwalmsley committed Mar 15, 2024
2 parents fa60f66 + edc58bc commit cad9786
Showing 1 changed file with 9 additions and 1 deletion.
10 changes: 9 additions & 1 deletion zoobot/pytorch/training/finetune.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,8 @@ def __init__(
cosine_schedule=False,
warmup_epochs=10,
max_cosine_epochs=100,
max_learning_rate_reduction_factor=0.01
max_learning_rate_reduction_factor=0.01,
from_scratch=False
):
super().__init__()

Expand Down Expand Up @@ -123,6 +124,8 @@ def __init__(
self.max_cosine_epochs = max_cosine_epochs
self.max_learning_rate_reduction_factor = max_learning_rate_reduction_factor

self.from_scratch = from_scratch

self.always_train_batchnorm = always_train_batchnorm
if self.always_train_batchnorm:
raise NotImplementedError('Temporarily deprecated, always_train_batchnorm=True not supported')
Expand Down Expand Up @@ -159,6 +162,11 @@ def configure_optimizers(self):

logging.info(f'Encoder architecture to finetune: {type(self.encoder)}')

if self.from_scratch:
logging.warning('self.from_scratch is True, training everything and ignoring all settings')
params += [{"params": self.encoder.parameters(), "lr": lr}]
return torch.optim.AdamW(params, weight_decay=self.weight_decay)

if isinstance(self.encoder, timm.models.EfficientNet): # includes v2
# TODO for now, these count as separate layers, not ideal
early_tuneable_layers = [self.encoder.conv_stem, self.encoder.bn1]
Expand Down

0 comments on commit cad9786

Please sign in to comment.