-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathblocks.py
More file actions
120 lines (110 loc) · 2.78 KB
/
blocks.py
File metadata and controls
120 lines (110 loc) · 2.78 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
"""
modified from the repository alexandru-dinu/cae:
https://github.com/alexandru-dinu/cae/blob/master/src/models/cae_32x32x32_zero_pad_bin.py
"""
import torch.nn as nn
def ConvBlockA(cin, cout, k, s):
return nn.Sequential(
nn.ZeroPad2d((1, 2, 1, 2)),
nn.Conv2d(
in_channels=cin,
out_channels=cout,
kernel_size=(k, k),
stride=(s, s)
),
nn.LeakyReLU(),
)
def ConvBlockB(cin, cout, k, s):
return nn.Sequential(
nn.ZeroPad2d((1, 1, 1, 1)),
nn.Conv2d(
in_channels=cin,
out_channels=cout,
kernel_size=(k, k),
stride=(s, s)
),
nn.LeakyReLU(),
nn.ZeroPad2d((1, 1, 1, 1)),
nn.Conv2d(
in_channels=cout,
out_channels=cout,
kernel_size=(k, k),
stride=(s, s)
),
)
def ConvBlockC(cin, cout, k, s, p):
return nn.Sequential(
nn.Conv2d(
in_channels=cin,
out_channels=cout,
kernel_size=(k, k),
stride=(s, s),
padding=(p, p),
),
nn.Tanh(),
)
def ConvBlockC16(cin, cout, k, s):
return nn.Sequential(
nn.ZeroPad2d((1, 2, 1, 2)),
nn.Conv2d(
in_channels=cin,
out_channels=cout,
kernel_size=(k, k),
stride=(s, s),
),
nn.Tanh(),
)
def ConvBlockD(cin, cout, k, s):
return nn.Sequential(
nn.Conv2d(
in_channels=cin,
out_channels=cout,
kernel_size=(k, k),
stride=(s, s)
),
nn.LeakyReLU(),
nn.ReflectionPad2d((2, 2, 2, 2)),
nn.Conv2d(
in_channels=cout,
out_channels=3,
kernel_size=(k, k),
stride=(s, s)
),
nn.Tanh(),
)
def ConvBlockD16(cin, cout, k, s):
return nn.Sequential(
nn.Conv2d(
in_channels=cin,
out_channels=cout,
kernel_size=(k, k),
stride=(s, s)
),
nn.LeakyReLU(),
nn.ReflectionPad2d((1, 1, 1, 1)),
# nn.ZeroPad2d((1, 1, 1, 1)),
nn.ConvTranspose2d(
in_channels=cout,
out_channels=3,
kernel_size=(2, 2),
stride=(2, 2)
),
nn.Tanh(),
)
def TransConvBlock(cin, cmid, cout):
return nn.Sequential(
nn.Conv2d(
in_channels=cin,
out_channels=cmid,
kernel_size=(3, 3),
stride=(1, 1)
),
nn.LeakyReLU(),
nn.ZeroPad2d((1, 1, 1, 1)),
nn.ConvTranspose2d(
in_channels=cmid,
out_channels=cout,
kernel_size=(2, 2),
stride=(2, 2)
),
)