Skip to content

Commit

Permalink
Synchronize with DENGE
Browse files Browse the repository at this point in the history
  • Loading branch information
kargibora committed Mar 28, 2023
1 parent 77b5bef commit dd5b6b1
Show file tree
Hide file tree
Showing 19 changed files with 753 additions and 233 deletions.
61 changes: 61 additions & 0 deletions configs/boston_dataset.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
estimator: # basic MLP layer configuration
class: 'ensemble'
num_networks : 5
network:
estimator_network:
- fc1 : {class : Linear, in_features : 13, out_features : 50}
- projection : {class : LinearVarianceNetworkHead, in_features : 50, out_features : 1}
predictor_network:
- fc1 : {class : Linear, in_features : 13, out_features : 50}
- projection : {class : Linear, in_features : 50, out_features : 1}
optimizer:
class : 'Adam'
lr : 0.1
dataset:
class: 'xls'
xls_path: "regression_datasets/boston.csv"
batch_size : 100
cv_split_num: 5
test_ratio: 0.10
transforms:
train:
x :
- {
class : Standardize,
mean : [3.613523557312254, 11.363636363636363, 11.13677865612648, 0.0691699604743083, 0.5546950592885376, 6.284634387351779, 68.57490118577076, 3.795042687747036, 9.549407114624506, 408.2371541501976, 18.455533596837945, 356.6740316205534, 12.653063241106722],
std : [8.60154510533249, 23.32245299451514, 6.860352940897585, 0.25399404134041037, 0.11587767566755595, 0.7026171434153233, 28.148861406903617, 2.105710126627611, 8.707259384239366, 168.53711605495903, 2.1649455237144406, 91.29486438415783, 7.141061511348571]
}
y :
- {class : Standardize, mean : [22.532806324110677], std : [9.197104087379818]}
val:
x :
- {
class : Standardize,
mean : [3.613523557312254, 11.363636363636363, 11.13677865612648, 0.0691699604743083, 0.5546950592885376, 6.284634387351779, 68.57490118577076, 3.795042687747036, 9.549407114624506, 408.2371541501976, 18.455533596837945, 356.6740316205534, 12.653063241106722],
std : [8.60154510533249, 23.32245299451514, 6.860352940897585, 0.25399404134041037, 0.11587767566755595, 0.7026171434153233, 28.148861406903617, 2.105710126627611, 8.707259384239366, 168.53711605495903, 2.1649455237144406, 91.29486438415783, 7.141061511348571]
}
y :
- {class : Standardize, mean : [22.532806324110677], std : [9.197104087379818]}
train:
train_type : epoch
num_iter : 40
weight_type : both
# logger:
# type: 'wandb'
# project: 'uncertainty-estimation'
# entity: 'kbora'
# name: 'Boston Dataset (Baseline,MAE)'
metrics:
categorize : True
list:
train:
- "rmse"
- "mae"
val:
- "rmse"
- "mae"
- "gm"
test:
- "rmse"
- "mae"
- "gm"
62 changes: 62 additions & 0 deletions configs/concrete_dataset.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
estimator: # basic MLP layer configuration
class: 'ensemble'
num_networks : 5
network:
estimator_network:
- fc1 : {class : Linear, in_features : 8, out_features : 50}
- projection : {class : LinearVarianceNetworkHead, in_features : 50, out_features : 1}
predictor_network:
- fc1 : {class : Linear, in_features : 8, out_features : 50}
- projection : {class : Linear, in_features : 50, out_features : 1}
optimizer:
class : 'Adam'
lr : 0.01
dataset:
class: 'xls'
xls_path: "regression_datasets/Concrete_Data.xls"
batch_size : 512
cv_split_num: 1
test_ratio: 0.10
transforms:
train:
x :
- {
class : Standardize,
mean : [281.16563106796116, 73.89548543689321, 54.1871359223301, 181.56635922330096, 6.203111650485437, 972.9185922330097, 773.5788834951456, 45.662135922330094],
std : [104.5071416428718, 86.27910364316895, 63.99646938186508, 21.355567066911522, 5.973491650590111, 77.75381809178927, 80.1754273990239, 63.16991158103249]
}
y :
- {class : Standardize, mean : [35.8178358261136], std : [16.705679174867946]}
val:
x :
- {
class : Standardize,
mean : [281.16563106796116, 73.89548543689321, 54.1871359223301, 181.56635922330096, 6.203111650485437, 972.9185922330097, 773.5788834951456, 45.662135922330094],
std : [104.5071416428718, 86.27910364316895, 63.99646938186508, 21.355567066911522, 5.973491650590111, 77.75381809178927, 80.1754273990239, 63.16991158103249]
}
y :
- {class : Standardize, mean : [35.8178358261136], std : [16.705679174867946]}
train:
train_type : epoch
num_iter : 40
val_every : 40
weight_type : both
# logger:
# type: 'wandb'
# project: 'uncertainty-estimation'
# entity: 'kbora'
# name: 'Concrete Dataset (Baseline,MAE)'
metrics:
categorize : True
list:
train:
- "rmse"
- "mae"
val:
- "rmse"
- "mae"
- "gm"
test:
- "rmse"
- "mae"
- "gm"
2 changes: 1 addition & 1 deletion configs/imdb_wiki_dataset.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ logger:
type: 'wandb'
project: 'uncertainty-estimation'
entity: 'kbora'
name: 'IMDB-WIKI R50 Weighted (Baseline)'
name: 'IMDB-WIKI R50 Estimator'
metrics:
categorize : True
list:
Expand Down
57 changes: 57 additions & 0 deletions configs/imdb_wiki_dataset_aleatoric.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
estimator: # basic MLP layer configuration
class: 'ensemble'
num_networks : 5
network:
estimator_network:
- resnet_branch : {class : ResNet, block : Bottleneck, layers : [3,4,6,3], pred_head : LinearVarianceNetworkHead}
predictor_network:
- resnet_branch : {class : ResNet, block : Bottleneck, layers : [3,4,6,3], pred_head : Linear}
optimizer:
class : 'Adam'
lr : 0.001
scheduler : {class : MultiStepLR, milestones : [60,80], gamma : 0.1}
dataset:
class: 'imdb-wiki'
data_dir : "regression_datasets/imdb_wiki/data"
batch_size : 64
transforms:
train:
x :
- {class : Resize, size : [224, 224]}
- {class : RandomCrop, size : 224, padding : 16}
- {class : RandomHorizontalFlip}
- {class : ToTensor}
- {class : Normalize, mean : [.5, .5, .5], std : [.5, .5, .5]}
y :
- {class : MinMaxNormalize, min_val : [0], max_val : [186]}
val:
x :
- {class : Resize, size : [224, 224]}
- {class : ToTensor}
- {class : Normalize, mean : [.5, .5, .5], std : [.5, .5, .5]}
y :
- {class : MinMaxNormalize, min_val : [0], max_val : [186]}
train:
train_type : epoch
num_iter : 90
val_every : 1
weight_type : 'aleatoric'
logger:
type: 'wandb'
project: 'uncertainty-estimation'
entity: 'kbora'
name: 'IMDB-WIKI R50 Weighted (Aleatoric, MAE)'
metrics:
categorize : True
list:
train:
- "rmse"
- "mae"
val:
- "rmse"
- "mae"
- "gm"
test:
- "rmse"
- "mae"
- "gm"
57 changes: 57 additions & 0 deletions configs/imdb_wiki_dataset_baseline.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
estimator: # basic MLP layer configuration
class: 'ensemble'
num_networks : 5
network:
estimator_network:
- resnet_branch : {class : ResNet, block : Bottleneck, layers : [3,4,6,3], pred_head : LinearVarianceNetworkHead}
predictor_network:
- resnet_branch : {class : ResNet, block : Bottleneck, layers : [3,4,6,3], pred_head : Linear}
optimizer:
class : 'Adam'
lr : 0.001
scheduler : {class : MultiStepLR, milestones : [60,80], gamma : 0.1}
dataset:
class: 'imdb-wiki'
data_dir : "regression_datasets/imdb_wiki/data"
batch_size : 64
transforms:
train:
x :
- {class : Resize, size : [224, 224]}
- {class : RandomCrop, size : 224, padding : 16}
- {class : RandomHorizontalFlip}
- {class : ToTensor}
- {class : Normalize, mean : [.5, .5, .5], std : [.5, .5, .5]}
y :
- {class : MinMaxNormalize, min_val : [0], max_val : [186]}
val:
x :
- {class : Resize, size : [224, 224]}
- {class : ToTensor}
- {class : Normalize, mean : [.5, .5, .5], std : [.5, .5, .5]}
y :
- {class : MinMaxNormalize, min_val : [0], max_val : [186]}
train:
train_type : epoch
num_iter : 90
val_every : 1
weight_type : 'baseline'
logger:
type: 'wandb'
project: 'uncertainty-estimation'
entity: 'kbora'
name: 'IMDB-WIKI R50 Weighted (Baseline, MAE)'
metrics:
categorize : True
list:
train:
- "rmse"
- "mae"
val:
- "rmse"
- "mae"
- "gm"
test:
- "rmse"
- "mae"
- "gm"
57 changes: 57 additions & 0 deletions configs/imdb_wiki_dataset_epistemic.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
estimator: # basic MLP layer configuration
class: 'ensemble'
num_networks : 5
network:
estimator_network:
- resnet_branch : {class : ResNet, block : Bottleneck, layers : [3,4,6,3], pred_head : LinearVarianceNetworkHead}
predictor_network:
- resnet_branch : {class : ResNet, block : Bottleneck, layers : [3,4,6,3], pred_head : Linear}
optimizer:
class : 'Adam'
lr : 0.001
scheduler : {class : MultiStepLR, milestones : [60,80], gamma : 0.1}
dataset:
class: 'imdb-wiki'
data_dir : "regression_datasets/imdb_wiki/data"
batch_size : 64
transforms:
train:
x :
- {class : Resize, size : [224, 224]}
- {class : RandomCrop, size : 224, padding : 16}
- {class : RandomHorizontalFlip}
- {class : ToTensor}
- {class : Normalize, mean : [.5, .5, .5], std : [.5, .5, .5]}
y :
- {class : MinMaxNormalize, min_val : [0], max_val : [186]}
val:
x :
- {class : Resize, size : [224, 224]}
- {class : ToTensor}
- {class : Normalize, mean : [.5, .5, .5], std : [.5, .5, .5]}
y :
- {class : MinMaxNormalize, min_val : [0], max_val : [186]}
train:
train_type : epoch
num_iter : 90
val_every : 1
weight_type : 'epistemic'
logger:
type: 'wandb'
project: 'uncertainty-estimation'
entity: 'kbora'
name: 'IMDB-WIKI R50 Weighted (Epistemic, MAE)'
metrics:
categorize : True
list:
train:
- "rmse"
- "mae"
val:
- "rmse"
- "mae"
- "gm"
test:
- "rmse"
- "mae"
- "gm"
30 changes: 0 additions & 30 deletions configs/toy_dataset.yaml

This file was deleted.

33 changes: 0 additions & 33 deletions configs/xls_dataset.yaml

This file was deleted.

Loading

0 comments on commit dd5b6b1

Please sign in to comment.