Skip to content

Commit

Permalink
add simsiam experiment configs
Browse files Browse the repository at this point in the history
  • Loading branch information
chris-santiago committed Sep 1, 2023
1 parent 1410578 commit 7f2df3d
Show file tree
Hide file tree
Showing 14 changed files with 622 additions and 3 deletions.
File renamed without changes.
26 changes: 26 additions & 0 deletions autoencoders/conf/experiment/simsiam-resnet.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# @package _global_

# to execute this experiment run:
# python train.py experiment=example

defaults:
- override /data: simsiam
- override /model: simsiam
- override /trainer: default
- override /callbacks: siam

# all parameters below will be merged with parameters from default configurations set above
# this allows you to overwrite only specified parameters

tags: ["${data.name}", "${model.name}", "cyclicLR"]

model:
nn:
_target_: autoencoders.models.simsiam.SimSiam
encoder:
_target_: autoencoders.models.simsiam.CNNEncoderProjection
channels_in: 1
base_channels: 32
latent_dim: ${model.nn.dim}
dim: 512
pred_dim: 512
24 changes: 24 additions & 0 deletions autoencoders/conf/experiment/simsiam.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# @package _global_

# to execute this experiment run:
# python train.py experiment=example

defaults:
- override /data: simsiam
- override /model: simsiam
- override /trainer: default
- override /callbacks: siam

# all parameters below will be merged with parameters from default configurations set above
# this allows you to overwrite only specified parameters

tags: ["${data.name}", "${model.name}", "cyclicLR"]

model:
nn:
_target_: autoencoders.models.simsiam.SimSiam
encoder:
_target_: autoencoders.models.simsiam.ResnetEncoder
latent_dim: ${model.nn.dim}
dim: 1024
pred_dim: 512
6 changes: 4 additions & 2 deletions autoencoders/conf/model/simsiam.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@ name: SimSiam
nn:
_target_: autoencoders.models.simsiam.SimSiam
encoder:
_target_: autoencoders.models.simsiam.ResnetEncoder
_target_: autoencoders.models.simsiam.CNNEncoderProjection
channels_in: 1
base_channels: 32
latent_dim: ${model.nn.dim}
dim: 1024
dim: 512
pred_dim: 512
17 changes: 17 additions & 0 deletions autoencoders/models/simsiam.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from torchvision.models.resnet import BasicBlock

from autoencoders.models.base import BaseModule
from autoencoders.modules import CNNEncoder


class ResnetEncoder(ResNet):
Expand Down Expand Up @@ -40,6 +41,22 @@ def forward(self, x):
return self.model(x)


class CNNEncoderProjection(CNNEncoder):
def __init__(self, channels_in: int, base_channels: int, latent_dim: int):
super().__init__(channels_in, base_channels, latent_dim)

self.projection = nn.Sequential(
ProjectionLayer(latent_dim, latent_dim),
ProjectionLayer(latent_dim, latent_dim),
nn.Linear(latent_dim, latent_dim),
nn.BatchNorm1d(latent_dim, affine=False),
)

def forward(self, x):
z = self.model(x)
return self.projection(z)


class SimSiam(BaseModule):
def __init__(
self,
Expand Down
88 changes: 88 additions & 0 deletions outputs/SimSiam/train/2023-09-01/10-39-53/.hydra/config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
data:
batch_size: 256
n_workers: 10
name: mnist
train:
_target_: torch.utils.data.DataLoader
dataset:
_target_: autoencoders.data.SimSiamDataset
dataset:
_target_: autoencoders.data.get_mnist_dataset
train: true
num_ops: 1
batch_size: ${data.batch_size}
shuffle: true
num_workers: ${data.n_workers}
valid:
_target_: torch.utils.data.DataLoader
dataset:
_target_: autoencoders.data.SimSiamDataset
dataset:
_target_: autoencoders.data.get_mnist_dataset
train: false
num_ops: 1
batch_size: ${data.batch_size}
shuffle: false
num_workers: ${data.n_workers}
model:
optimizer:
_target_: torch.optim.Adam
_partial_: true
lr: 0.001
betas:
- 0.9
- 0.999
weight_decay: 0
scheduler:
_target_: torch.optim.lr_scheduler.ReduceLROnPlateau
_partial_: true
mode: min
factor: 0.1
patience: 10
name: SimSiam
nn:
_target_: autoencoders.models.simsiam.SimSiam
encoder:
_target_: autoencoders.models.simsiam.CNNEncoderProjection
channels_in: 1
base_channels: 32
latent_dim: ${model.nn.dim}
dim: 512
pred_dim: 512
trainer:
_target_: pytorch_lightning.Trainer
max_epochs: 100
accelerator: mps
devices: 1
logger:
_target_: pytorch_lightning.loggers.WandbLogger
project: autoencoders
name: null
id: null
group: null
job_type: null
save_dir: ${hydra:runtime.output_dir}
log_model: true
tags: ${tags}
callbacks:
model_summary:
_target_: pytorch_lightning.callbacks.RichModelSummary
progress_bar:
_target_: pytorch_lightning.callbacks.RichProgressBar
refresh_rate: 5
leave: true
early_stopping:
_target_: pytorch_lightning.callbacks.EarlyStopping
monitor: train-loss
min_delta: 0.001
patience: 5
check_on_train_epoch_end: true
model_checkpoint:
_target_: pytorch_lightning.callbacks.ModelCheckpoint
dirpath: ${hydra:runtime.output_dir}/checkpoints
monitor: train-loss
save_top_k: 1
save_on_train_epoch_end: true
tags:
- ${data.name}
- ${model.name}
175 changes: 175 additions & 0 deletions outputs/SimSiam/train/2023-09-01/10-39-53/.hydra/hydra.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
hydra:
run:
dir: outputs/${model.name}/${hydra.job.name}/${now:%Y-%m-%d}/${now:%H-%M-%S}
sweep:
dir: outputs/${model.name}/${hydra.job.name}/multirun
subdir: ${hydra.job.override_dirname}/${now:%Y-%m-%d}/${now:%H-%M-%S}
launcher:
_target_: hydra_plugins.hydra_joblib_launcher.joblib_launcher.JoblibLauncher
n_jobs: -1
backend: null
prefer: processes
require: null
verbose: 0
timeout: null
pre_dispatch: 2*n_jobs
batch_size: auto
temp_folder: null
max_nbytes: null
mmap_mode: r
sweeper:
_target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
max_batch_size: null
params: null
help:
app_name: ${hydra.job.name}
header: '${hydra.help.app_name} is powered by Hydra.
'
footer: 'Powered by Hydra (https://hydra.cc)
Use --hydra-help to view Hydra specific help
'
template: '${hydra.help.header}
== Configuration groups ==
Compose your configuration from those groups (group=option)
$APP_CONFIG_GROUPS
== Config ==
Override anything in the config (foo.bar=value)
$CONFIG
${hydra.help.footer}
'
hydra_help:
template: 'Hydra (${hydra.runtime.version})
See https://hydra.cc for more info.
== Flags ==
$FLAGS_HELP
== Configuration groups ==
Compose your configuration from those groups (For example, append hydra/job_logging=disabled
to command line)
$HYDRA_CONFIG_GROUPS
Use ''--cfg hydra'' to Show the Hydra config.
'
hydra_help: ???
hydra_logging:
version: 1
formatters:
simple:
format: '[%(asctime)s][HYDRA] %(message)s'
handlers:
console:
class: logging.StreamHandler
formatter: simple
stream: ext://sys.stdout
root:
level: INFO
handlers:
- console
loggers:
logging_example:
level: DEBUG
disable_existing_loggers: false
job_logging:
version: 1
formatters:
simple:
format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
handlers:
console:
class: logging.StreamHandler
formatter: simple
stream: ext://sys.stdout
file:
class: logging.FileHandler
formatter: simple
filename: ${hydra.runtime.output_dir}/${hydra.job.name}.log
root:
level: INFO
handlers:
- console
- file
disable_existing_loggers: false
env: {}
mode: RUN
searchpath: []
callbacks: {}
output_subdir: .hydra
overrides:
hydra:
- hydra.mode=RUN
task:
- data=simsiam
- model=simsiam
- callbacks=siam
job:
name: train
chdir: null
override_dirname: callbacks=siam,data=simsiam,model=simsiam
id: ???
num: ???
config_name: config
env_set: {}
env_copy: []
config:
override_dirname:
kv_sep: '='
item_sep: ','
exclude_keys: []
runtime:
version: 1.3.2
version_base: '1.3'
cwd: /Users/chrissantiago/Dropbox/GitHub/autoencoders
config_sources:
- path: hydra.conf
schema: pkg
provider: hydra
- path: /Users/chrissantiago/Dropbox/GitHub/autoencoders/autoencoders/conf
schema: file
provider: main
- path: ''
schema: structured
provider: schema
output_dir: /Users/chrissantiago/Dropbox/GitHub/autoencoders/outputs/SimSiam/train/2023-09-01/10-39-53
choices:
experiment: null
callbacks: siam
trainer: default
model: simsiam
scheduler@model.scheduler: plateau
optimizer@model.optimizer: adam
data: simsiam
hydra/env: default
hydra/callbacks: null
hydra/job_logging: default
hydra/hydra_logging: default
hydra/hydra_help: default
hydra/help: default
hydra/sweeper: basic
hydra/launcher: joblib
hydra/output: default
verbose: false
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
- data=simsiam
- model=simsiam
- callbacks=siam
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
? /Users/chrissantiago/Dropbox/GitHub/autoencoders/outputs/SimSiam/train/2023-09-01/10-39-53/checkpoints/epoch=35-step=8460.ckpt
: -0.991788387298584
Loading

0 comments on commit 7f2df3d

Please sign in to comment.