-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlayer.py
113 lines (82 loc) · 3.35 KB
/
layer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
import torch
import torch.nn as nn
import torch.nn.functional as F
class CNR2d(nn.Module):
def __init__(self, nch_in, nch_out, kernel_size=3, stride=1, padding=1, padding_mode='reflection', norm='bnorm', relu=0.0, drop=[], bias=[]):
super().__init__()
if bias == []:
if norm == 'bnorm':
bias = False
else:
bias = True
layers = []
layers += [Conv2d(nch_in, nch_out, kernel_size=kernel_size, stride=stride, padding=padding, padding_mode=padding_mode, bias=bias)]
if norm != []:
layers += [Norm2d(nch_out, norm)]
if relu != []:
layers += [ReLU(relu)]
if drop != []:
layers += [nn.Dropout2d(drop)]
self.cbr = nn.Sequential(*layers)
def forward(self, x):
return self.cbr(x)
class ResBlock(nn.Module):
def __init__(self, nch_in, nch_out, kernel_size=3, stride=1, padding=1, padding_mode='reflection', norm='inorm', relu=0.0, drop=[], bias=[]):
super().__init__()
if bias == []:
if norm == 'bnorm':
bias = False
else:
bias = True
layers = []
# 1st conv
layers += [Padding(padding, padding_mode=padding_mode)]
layers += [CNR2d(nch_in, nch_out, kernel_size=kernel_size, stride=stride, padding=0, norm=norm, relu=relu)]
if drop != []:
layers += [nn.Dropout2d(drop)]
# 2nd conv
layers += [Padding(padding, padding_mode=padding_mode)]
layers += [CNR2d(nch_in, nch_out, kernel_size=kernel_size, stride=stride, padding=0, norm=norm, relu=[])]
self.resblk = nn.Sequential(*layers)
def forward(self, x):
return x + self.resblk(x)
class Conv2d(nn.Module):
def __init__(self, nch_in, nch_out, kernel_size=3, stride=1, padding=1, padding_mode='reflection', bias=True):
super(Conv2d, self).__init__()
layers = []
layers += [Padding(padding, padding_mode=padding_mode)]
layers += [nn.Conv2d(nch_in, nch_out, kernel_size=kernel_size, stride=stride, padding=0, bias=bias)]
self.conv = nn.Sequential(*layers)
def forward(self, x):
return self.conv(x)
class Norm2d(nn.Module):
def __init__(self, nch, norm_mode):
super(Norm2d, self).__init__()
if norm_mode == 'bnorm':
self.norm = nn.BatchNorm2d(nch)
elif norm_mode == 'inorm':
self.norm = nn.InstanceNorm2d(nch)
def forward(self, x):
return self.norm(x)
class ReLU(nn.Module):
def __init__(self, relu):
super(ReLU, self).__init__()
if relu > 0:
self.relu = nn.LeakyReLU(relu, True)
elif relu == 0:
self.relu = nn.ReLU(True)
def forward(self, x):
return self.relu(x)
class Padding(nn.Module):
def __init__(self, padding, padding_mode='zeros', value=0):
super(Padding, self).__init__()
if padding_mode == 'reflection':
self. padding = nn.ReflectionPad2d(padding)
elif padding_mode == 'replication':
self.padding = nn.ReplicationPad2d(padding)
elif padding_mode == 'constant':
self.padding = nn.ConstantPad2d(padding, value)
elif padding_mode == 'zeros':
self.padding = nn.ZeroPad2d(padding)
def forward(self, x):
return self.padding(x)