-
Notifications
You must be signed in to change notification settings - Fork 13
/
unet.py
91 lines (76 loc) · 2.98 KB
/
unet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
import torch
from torch import nn
import torch.nn.functional as F
# Adapted from "Tunable U-Net implementation in PyTorch"
# https://github.com/jvanvugt/pytorch-unet
class UNet(nn.Module):
def __init__(
self,
in_channels=1,
n_classes=2,
depth=5,
wf=5,
padding=True,
):
super(UNet, self).__init__()
self.padding = padding
self.depth = depth
prev_channels = in_channels
self.down_path = nn.ModuleList()
for i in range(depth):
self.down_path.append(
UNetConvBlock(prev_channels, 2 ** (wf + i), padding)
)
prev_channels = 2 ** (wf + i)
self.midconv = nn.Conv2d(prev_channels, prev_channels, kernel_size=3, padding=1)
self.up_path = nn.ModuleList()
for i in reversed(range(depth - 1)):
self.up_path.append(
UNetUpBlock(prev_channels, 2 ** (wf + i), padding)
)
prev_channels = 2 ** (wf + i)
self.last = nn.Conv2d(prev_channels, n_classes, kernel_size=3,padding=1)
def forward(self, x):
blocks = []
for i, down in enumerate(self.down_path):
x = down(x)
if i != len(self.down_path) - 1:
blocks.append(x)
x = F.avg_pool2d(x, 2)
x = F.leaky_relu(self.midconv(x), negative_slope = 0.1)
for i, up in enumerate(self.up_path):
x = up(x, blocks[-i - 1])
return self.last(x)
class UNetConvBlock(nn.Module):
def __init__(self, in_size, out_size, padding):
super(UNetConvBlock, self).__init__()
block = []
block.append(nn.Conv2d(in_size, out_size, kernel_size=3, padding=int(padding)))
block.append(nn.LeakyReLU(0.1))
block.append(nn.Conv2d(out_size, out_size, kernel_size=3, padding=int(padding)))
block.append(nn.LeakyReLU(0.1))
self.block = nn.Sequential(*block)
def forward(self, x):
out = self.block(x)
return out
class UNetUpBlock(nn.Module):
def __init__(self, in_size, out_size, padding):
super(UNetUpBlock, self).__init__()
self.up = nn.Sequential(
nn.Upsample(mode='bilinear', scale_factor=2),
nn.Conv2d(in_size, out_size, kernel_size=3, padding=1),
)
self.conv_block = UNetConvBlock(in_size, out_size, padding)
def center_crop(self, layer, target_size):
_, _, layer_height, layer_width = layer.size()
diff_y = (layer_height - target_size[0]) // 2
diff_x = (layer_width - target_size[1]) // 2
return layer[
:, :, diff_y : (diff_y + target_size[0]), diff_x : (diff_x + target_size[1])
]
def forward(self, x, bridge):
up = self.up(x)
crop1 = self.center_crop(bridge, up.shape[2:])
out = torch.cat((up, crop1), 1)
out = self.conv_block(out)
return out