-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathedsr.py
60 lines (47 loc) · 1.74 KB
/
edsr.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
"""
Paper: Enhanced Deep Residual Networks for Single Image Super-Resolution
Url: https://arxiv.org/abs/1707.02921
Create by: zh320
Date: 2023/12/16
"""
import torch.nn as nn
from .modules import conv3x3, ConvAct, Upsample
class EDSR(nn.Module):
def __init__(self, in_channels, out_channels, upscale, B=16, F=64, scale_factor=None,
act_type='relu', upsample_type='pixelshuffle'):
super(EDSR, self).__init__()
if scale_factor is None:
scale_factor = 0.1 if B > 16 else 1.0
self.first_layer = conv3x3(in_channels, F)
layers = []
for _ in range(B):
layers.append(ResidualBlock(F, scale_factor, act_type))
self.res_layers = nn.Sequential(*layers)
self.mid_layer = conv3x3(F, F)
self.last_layers = nn.Sequential(
Upsample(F, F, upscale, upsample_type, 3),
conv3x3(F, out_channels)
)
def forward(self, x):
x = self.first_layer(x)
residual = x
x = self.res_layers(x)
x = self.mid_layer(x)
x += residual
x = self.last_layers(x)
return x
class ResidualBlock(nn.Module):
def __init__(self, channels, scale_factor, act_type):
super(ResidualBlock, self).__init__()
self.scale_factor = scale_factor
self.conv = nn.Sequential(
ConvAct(channels, channels, 3, act_type=act_type),
conv3x3(channels, channels)
)
def forward(self, x):
residual = x
x = self.conv(x)
if self.scale_factor < 1:
x = x * self.scale_factor
x += residual
return x