-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathparameters_monu.json
78 lines (78 loc) · 1.59 KB
/
parameters_monu.json
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
{
"seed": 1024,
"resume": false,
"image_size": 256,
"num_channels": 1,
"num_channels_disc": 2,
"centered": true,
"use_geometric": false,
"beta_min": 0.1,
"beta_max": 20.0,
"cond_enc_layers": 5,
"cond_enc_num_res_blocks": 1,
"num_channels_dae": 64,
"n_mlp": 3,
"ch_mult": [
1,
1,
1,
2,
2,
4,
4
],
"num_res_blocks": 2,
"attn_resolutions": [
16,
8
],
"dropout": 0.0,
"resamp_with_conv": true,
"conditional": true,
"fir": true,
"fir_kernel": [
1,
3,
3,
1
],
"skip_rescale": true,
"resblock_type": "biggan",
"progressive": "none",
"progressive_input": "residual",
"progressive_combine": "sum",
"attn_scale": 3,
"embedding_type": "positional",
"fourier_scale": 16.0,
"not_use_tanh": false,
"exp": "experiment_batch3_attn3_monu_cDAL_fold0",
"dataset": "monu",
"fold": 0,
"nz": 100,
"num_timesteps": 4,
"z_emb_dim": 256,
"t_emb_dim": 256,
"batch_size": 4,
"num_epoch": 12000,
"T_max": 500,
"ngf": 64,
"lr_g": 0.0002,
"lr_d": 0.0001,
"beta1": 0.5,
"beta2": 0.9,
"no_lr_decay": false,
"use_ema": false,
"ema_decay": 0.9999,
"r1_gamma": 1.0,
"lazy_reg": 10,
"save_content": false,
"save_content_every": 10,
"save_ckpt_every": 10,
"log_step": 10,
"num_proc_node": 1,
"num_process_per_node": 1,
"node_rank": 0,
"local_rank": 1,
"master_address": "127.0.0.1",
"master_port": "6021"
}