v2 r90 anf flip msblsb models.

main
protsenkovi 6 months ago
parent e8eed1ad73
commit 221e3dc865

@ -370,6 +370,106 @@ class SRMsbLsbR90Net(SRNetBase):
raise NotImplementedError raise NotImplementedError
class SRMsbLsbR90v2Net(SRNetBase):
def __init__(self, hidden_dim = 64, layers_count = 4, scale = 4):
super(SRMsbLsbR90v2Net, self).__init__()
self.scale = scale
self.hidden_dim = hidden_dim
self.layers_count = layers_count
self.msb_fn = layers.UpscaleBlock(
in_features=4,
hidden_dim=hidden_dim,
layers_count=layers_count,
upscale_factor=self.scale,
input_max_value=255,
output_max_value=255
)
self.lsb_fn = layers.UpscaleBlock(
in_features=4,
hidden_dim=hidden_dim,
layers_count=layers_count,
upscale_factor=self.scale,
input_max_value=15,
output_max_value=255
)
self._extract_pattern_S = layers.PercievePattern(receptive_field_idxes=[[0,0],[0,1],[1,0],[1,1]], center=[0,0], window_size=2)
def forward(self, x, config=None):
b,c,h,w = x.shape
x = x.reshape(b*c, 1, h, w)
output = torch.zeros([b*c, 1, h*self.scale, w*self.scale], dtype=x.dtype, device=x.device)
for rotations_count in range(4):
rot_x = torch.rot90(x, k=rotations_count, dims=[2, 3])
rotated_lsb = rot_x % 16
rotated_msb = rot_x - rotated_lsb
output_msb = self.forward_stage(rotated_msb, self.scale, self._extract_pattern_S, self.msb_fn)
output_lsb = self.forward_stage(rotated_lsb, self.scale, self._extract_pattern_S, self.lsb_fn)
if not config is None and config.current_iter % config.display_step == 0:
config.writer.add_histogram('output_lsb', output_lsb.detach().cpu().numpy(), config.current_iter)
config.writer.add_histogram('output_msb', output_msb.detach().cpu().numpy(), config.current_iter)
output += torch.rot90(output_msb + output_lsb, k=-rotations_count, dims=[2, 3])
output /= 4
x = output
x = x.reshape(b, c, h*self.scale, w*self.scale)
return x
def get_lut_model(self, quantization_interval=16, batch_size=2**10):
raise NotImplementedError
class SRMsbLsbFlipNet(SRNetBase):
def __init__(self, hidden_dim = 64, layers_count = 4, scale = 4):
super(SRMsbLsbFlipNet, self).__init__()
self.scale = scale
self.hidden_dim = hidden_dim
self.layers_count = layers_count
self.msb_fn = layers.UpscaleBlock(
in_features=4,
hidden_dim=hidden_dim,
layers_count=layers_count,
upscale_factor=self.scale,
input_max_value=255,
output_max_value=255
)
self.lsb_fn = layers.UpscaleBlock(
in_features=4,
hidden_dim=hidden_dim,
layers_count=layers_count,
upscale_factor=self.scale,
input_max_value=15,
output_max_value=255
)
self._extract_pattern_S = layers.PercievePattern(receptive_field_idxes=[[0,0],[0,1],[1,0],[1,1]], center=[0,0], window_size=2)
self.flip_functions = [
lambda x: x,
lambda x: x[:,:,::-1,:],
lambda x: x[:,:,:,::-1],
lambda x: x[:,:,::-1,::-1],
]
def forward(self, x, config=None):
b,c,h,w = x.shape
x = x.reshape(b*c, 1, h, w)
output = torch.zeros([b*c, 1, h*self.scale, w*self.scale], dtype=x.dtype, device=x.device)
for flip_f in self.flips_functions:
fliped_x = flip_f(x)
fliped_lsb = fliped_x % 16
fliped_msb = fliped_x - fliped_lsb
output_msb = self.forward_stage(fliped_msb, self.scale, self._extract_pattern_S, self.msb_fn)
output_lsb = self.forward_stage(fliped_lsb, self.scale, self._extract_pattern_S, self.lsb_fn)
if not config is None and config.current_iter % config.display_step == 0:
config.writer.add_histogram('output_lsb', output_lsb.detach().cpu().numpy(), config.current_iter)
config.writer.add_histogram('output_msb', output_msb.detach().cpu().numpy(), config.current_iter)
output += torch.rot90(output_msb + output_lsb, k=-rotations_count, dims=[2, 3])
output /= 4
x = output
x = x.reshape(b, c, h*self.scale, w*self.scale)
return x
def get_lut_model(self, quantization_interval=16, batch_size=2**10):
raise NotImplementedError
class SRMsbLsb4R90Net(SRNetBase): class SRMsbLsb4R90Net(SRNetBase):
def __init__(self, hidden_dim = 64, layers_count = 4, scale = 4): def __init__(self, hidden_dim = 64, layers_count = 4, scale = 4):
super(SRMsbLsb4R90Net, self).__init__() super(SRMsbLsb4R90Net, self).__init__()

Loading…
Cancel
Save