From 72ac9d99fd611477720588901c6abd050ff82c47 Mon Sep 17 00:00:00 2001 From: vlpr Date: Wed, 15 May 2024 13:39:00 +0000 Subject: [PATCH] fix --- src/models/sdylut.py | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/src/models/sdylut.py b/src/models/sdylut.py index f5dde9f..db4fae7 100644 --- a/src/models/sdylut.py +++ b/src/models/sdylut.py @@ -78,15 +78,15 @@ class SDYLutx2(nn.Module): super(SDYLutx2, self).__init__() self.scale = scale self.quantization_interval = quantization_interval - self._extract_pattern_S = PercievePattern(receptive_field_idxes=[[0,0],[0,1],[1,0],[1,1]], center=[0,0], window_size=3) + self._extract_pattern_S = PercievePattern(receptive_field_idxes=[[0,0],[0,1],[1,0],[1,1]], center=[0,0], window_size=2) self._extract_pattern_D = PercievePattern(receptive_field_idxes=[[0,0],[2,0],[0,2],[2,2]], center=[0,0], window_size=3) self._extract_pattern_Y = PercievePattern(receptive_field_idxes=[[0,0],[1,1],[1,2],[2,1]], center=[0,0], window_size=3) - self.stageS_1 = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (scale,scale)).type(torch.float32)) - self.stageD_1 = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (scale,scale)).type(torch.float32)) - self.stageY_1 = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (scale,scale)).type(torch.float32)) - self.stageS_2 = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (scale,scale)).type(torch.float32)) - self.stageD_2 = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (scale,scale)).type(torch.float32)) - self.stageY_2 = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (scale,scale)).type(torch.float32)) + self.stage1_S = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (1,1)).type(torch.float32)) + self.stage1_D = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (1,1)).type(torch.float32)) + self.stage1_Y = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (1,1)).type(torch.float32)) + self.stage2_S = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (scale,scale)).type(torch.float32)) + self.stage2_D = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (scale,scale)).type(torch.float32)) + self.stage2_Y = nn.Parameter(torch.randint(0, 255, size=(256//quantization_interval+1,)*4 + (scale,scale)).type(torch.float32)) @staticmethod def init_from_lut( @@ -106,28 +106,27 @@ class SDYLutx2(nn.Module): def forward(self, x): b,c,h,w = x.shape x = x.view(b*c, 1, h, w).type(torch.float32) - output = torch.zeros([b*c, 1, h, w], dtype=x.dtype, device=x.device) + output = torch.zeros([b*c, 1, h, w], dtype=x.dtype, device=x.device) for rotations_count in range(4): rotated = torch.rot90(x, k=rotations_count, dims=[-2, -1]) rb,rc,rh,rw = rotated.shape s = forward_unfolded_2x2_input_SxS_output(index=self._extract_pattern_S(rotated), lut=self.stage1_S) - s = s.view(rb*rc, 1, rh, rw, 1, 1).permute(0,1,2,4,3,5).reshape(rb*rc, 1, rh, rw) + s = s.view(rb, rc, rh, rw, 1, 1).permute(0,1,2,4,3,5).reshape(rb, rc, rh, rw) s = torch.rot90(s, k=-rotations_count, dims=[-2, -1]) output += s d = forward_unfolded_2x2_input_SxS_output(index=self._extract_pattern_D(rotated), lut=self.stage1_D) - d = d.view(rb*rc, 1, rh, rw, 1, 1).permute(0,1,2,4,3,5).reshape(rb*rc, 1, rh, rw) + d = d.view(rb, rc, rh, rw, 1, 1).permute(0,1,2,4,3,5).reshape(rb, rc, rh, rw) d = torch.rot90(d, k=-rotations_count, dims=[-2, -1]) output += d y = forward_unfolded_2x2_input_SxS_output(index=self._extract_pattern_Y(rotated), lut=self.stage1_Y) - y = y.view(rb*rc, 1, rh, rw, 1, 1).permute(0,1,2,4,3,5).reshape(rb*rc, 1, rh, rw) + y = y.view(rb, rc, rh, rw, 1, 1).permute(0,1,2,4,3,5).reshape(rb, rc, rh, rw) y = torch.rot90(y, k=-rotations_count, dims=[-2, -1]) output += y output /= 4*3 - output = output.view(b, c, h, w) x = output output = torch.zeros([b*c, 1, h*self.scale, w*self.scale], dtype=x.dtype, device=x.device) @@ -136,32 +135,33 @@ class SDYLutx2(nn.Module): rb,rc,rh,rw = rotated.shape s = forward_unfolded_2x2_input_SxS_output(index=self._extract_pattern_S(rotated), lut=self.stage2_S) - s = s.view(rb*rc, 1, rh, rw, self.scale, self.scale).permute(0,1,2,4,3,5).reshape(rb*rc, 1, rh*self.scale, rw*self.scale) + s = s.view(rb, rc, rh, rw, self.scale, self.scale).permute(0,1,2,4,3,5).reshape(rb, rc, rh*self.scale, rw*self.scale) s = torch.rot90(s, k=-rotations_count, dims=[-2, -1]) output += s d = forward_unfolded_2x2_input_SxS_output(index=self._extract_pattern_D(rotated), lut=self.stage2_D) - d = d.view(rb*rc, 1, rh, rw, self.scale, self.scale).permute(0,1,2,4,3,5).reshape(rb*rc, 1, rh*self.scale, rw*self.scale) + d = d.view(rb, rc, rh, rw, self.scale, self.scale).permute(0,1,2,4,3,5).reshape(rb, rc, rh*self.scale, rw*self.scale) d = torch.rot90(d, k=-rotations_count, dims=[-2, -1]) output += d y = forward_unfolded_2x2_input_SxS_output(index=self._extract_pattern_Y(rotated), lut=self.stage2_Y) - y = y.view(rb*rc, 1, rh, rw, self.scale, self.scale).permute(0,1,2,4,3,5).reshape(rb*rc, 1, rh*self.scale, rw*self.scale) + y = y.view(rb, rc, rh, rw, self.scale, self.scale).permute(0,1,2,4,3,5).reshape(rb, rc, rh*self.scale, rw*self.scale) y = torch.rot90(y, k=-rotations_count, dims=[-2, -1]) output += y output /= 4*3 output = output.view(b, c, h*self.scale, w*self.scale) + return output def __repr__(self): return f"{self.__class__.__name__}" + \ - f"\n stageS_1 size: {self.stageS_1.shape}" + \ - f"\n stageD_1 size: {self.stageD_1.shape}" + \ - f"\n stageY_1 size: {self.stageY_1.shape}" + \ - f"\n stageS_2 size: {self.stageS_2.shape}" + \ - f"\n stageD_2 size: {self.stageD_2.shape}" + \ - f"\n stageY_2 size: {self.stageY_2.shape}" + f"\n stage1_S size: {self.stage1_S.shape}" + \ + f"\n stage1_D size: {self.stage1_D.shape}" + \ + f"\n stage1_Y size: {self.stage1_Y.shape}" + \ + f"\n stage2_S size: {self.stage2_S.shape}" + \ + f"\n stage2_D size: {self.stage2_D.shape}" + \ + f"\n stage2_Y size: {self.stage2_Y.shape}"