|
|
@ -491,9 +491,15 @@ class VAE: |
|
|
|
model_management.unload_model() |
|
|
|
model_management.unload_model() |
|
|
|
self.first_stage_model = self.first_stage_model.to(self.device) |
|
|
|
self.first_stage_model = self.first_stage_model.to(self.device) |
|
|
|
pixel_samples = pixel_samples.movedim(-1,1).to(self.device) |
|
|
|
pixel_samples = pixel_samples.movedim(-1,1).to(self.device) |
|
|
|
samples = utils.tiled_scale(pixel_samples, lambda a: self.first_stage_model.encode(2. * a - 1.).sample() * self.scale_factor, tile_x, tile_y, overlap, upscale_amount = (1/8), out_channels=4) |
|
|
|
|
|
|
|
samples += utils.tiled_scale(pixel_samples, lambda a: self.first_stage_model.encode(2. * a - 1.).sample() * self.scale_factor, tile_x * 2, tile_y // 2, overlap, upscale_amount = (1/8), out_channels=4) |
|
|
|
it_1 = -(pixel_samples.shape[2] // -(tile_y * 2 - overlap)) * -(pixel_samples.shape[3] // -(tile_x // 2 - overlap)) |
|
|
|
samples += utils.tiled_scale(pixel_samples, lambda a: self.first_stage_model.encode(2. * a - 1.).sample() * self.scale_factor, tile_x // 2, tile_y * 2, overlap, upscale_amount = (1/8), out_channels=4) |
|
|
|
it_2 = -(pixel_samples.shape[2] // -(tile_y // 2 - overlap)) * -(pixel_samples.shape[3] // -(tile_x * 2 - overlap)) |
|
|
|
|
|
|
|
it_3 = -(pixel_samples.shape[2] // -(tile_y - overlap)) * -(pixel_samples.shape[3] // -(tile_x - overlap)) |
|
|
|
|
|
|
|
pbar = tqdm(total=(it_1 + it_2 + it_3)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samples = utils.tiled_scale(pixel_samples, lambda a: self.first_stage_model.encode(2. * a - 1.).sample() * self.scale_factor, tile_x, tile_y, overlap, upscale_amount = (1/8), out_channels=4, pbar=pbar) |
|
|
|
|
|
|
|
samples += utils.tiled_scale(pixel_samples, lambda a: self.first_stage_model.encode(2. * a - 1.).sample() * self.scale_factor, tile_x * 2, tile_y // 2, overlap, upscale_amount = (1/8), out_channels=4, pbar=pbar) |
|
|
|
|
|
|
|
samples += utils.tiled_scale(pixel_samples, lambda a: self.first_stage_model.encode(2. * a - 1.).sample() * self.scale_factor, tile_x // 2, tile_y * 2, overlap, upscale_amount = (1/8), out_channels=4, pbar=pbar) |
|
|
|
samples /= 3.0 |
|
|
|
samples /= 3.0 |
|
|
|
self.first_stage_model = self.first_stage_model.cpu() |
|
|
|
self.first_stage_model = self.first_stage_model.cpu() |
|
|
|
samples = samples.cpu() |
|
|
|
samples = samples.cpu() |
|
|
|