-
Notifications
You must be signed in to change notification settings - Fork 3
/
generator.py
80 lines (73 loc) · 3.4 KB
/
generator.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import torch
import torch.nn as nn
from torch.nn.modules import padding
from torch.nn.modules.batchnorm import BatchNorm2d
class Block(nn.Module):
def __init__(self,in_channels, out_channels, down = True, act = 'relu', use_dropout = False):
super(Block,self).__init__()
self.conv = nn.Sequential(
nn.Conv2d(in_channels,out_channels,kernel_size=4,stride=2,padding=1,bias=False,padding_mode='reflect')
if down
else
nn.ConvTranspose2d(in_channels,out_channels,kernel_size=4,stride=2,padding=1,bias=False),
nn.BatchNorm2d(out_channels),
nn.ReLU() if act=='relu' else nn.LeakyReLU(0.2),
)
self.use_dropout = use_dropout
self.dropout = nn.Dropout(0.5)
self.down = down
def forward(self,x):
x = self.conv(x)
return self.dropout(x) if self.use_dropout else x
class Generator(nn.Module):
def __init__(self,in_channels=3,features=64):
super().__init__()
self.initial_down = nn.Sequential(
nn.Conv2d(in_channels,features,4,2,1,padding_mode='reflect'),
nn.LeakyReLU(0.2),
)
self.down1 = Block(features,features*2,down=True,act='leaky',use_dropout=False)
self.down2 = Block(features*2,features*4,down=True,act='leaky',use_dropout=False)
self.down3 = Block(features*4,features*8,down=True,act='leaky',use_dropout=False)
self.down4 = Block(features*8,features*8,down=True,act='leaky',use_dropout=False)
self.down5 = Block(features*8,features*8,down=True,act='leaky',use_dropout=False)
self.down6 = Block(features*8,features*8,down=True,act='leaky',use_dropout=False)
self.bottleneck = nn.Sequential(
nn.Conv2d(features*8,features*8,4,2,1,padding_mode='reflect'),
nn.ReLU()
)
self.up1 = Block(features*8,features*8,down=False,act='relu',use_dropout=True)
self.up2 = Block(features*8*2,features*8,down=False,act='relu',use_dropout=True)
self.up3 = Block(features*8*2,features*8,down=False,act='relu',use_dropout=True)
self.up4 = Block(features*8*2,features*8,down=False,act='relu',use_dropout=False)
self.up5 = Block(features*8*2,features*4,down=False,act='relu',use_dropout=False)
self.up6 = Block(features*4*2,features*2,down=False,act='relu',use_dropout=False)
self.up7 = Block(features*2*2,features,down=False,act='relu',use_dropout=False)
self.final_up = nn.Sequential(
nn.ConvTranspose2d(features*2,in_channels,kernel_size=4,stride=2,padding=1),
nn.Tanh(),
)
def forward(self,x):
d1 = self.initial_down(x)
d2 = self.down1(d1)
d3 = self.down2(d2)
d4 = self.down3(d3)
d5 = self.down4(d4)
d6 = self.down5(d5)
d7 = self.down6(d6)
bottleneck = self.bottleneck(d7)
up1 = self.up1(bottleneck)
up2 = self.up2(torch.cat([up1,d7],dim=1))
up3 = self.up3(torch.cat([up2,d6],dim=1))
up4 = self.up4(torch.cat([up3,d5],dim=1))
up5 = self.up5(torch.cat([up4,d4],dim=1))
up6 = self.up6(torch.cat([up5,d3],dim=1))
up7 = self.up7(torch.cat([up6,d2],dim=1))
return self.final_up(torch.cat([up7,d1],dim=1))
def test():
x = torch.randn((1, 3, 256, 256))
model = Generator(in_channels=3, features=64)
preds = model(x)
print(preds.shape)
if __name__ == "__main__":
test()