| # ------------------------------------------------------------------------------------------ | |
| # Copyright (c) 2024 Baifeng Shi. | |
| # All rights reserved. | |
| # | |
| # Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. | |
| # ------------------------------------------------------------------------------------------ | |
| import torch | |
| from einops import rearrange | |
| def split_chessboard(x, num_split): | |
| """ | |
| x: b * c * h * w | |
| Deividing x into num_split**2 sub-squares, and concatenate all the sub-squares on the batch dimension | |
| """ | |
| B, C, H, W = x.shape | |
| assert H % num_split == 0 and W % num_split == 0 | |
| x_split = rearrange(x, 'b c (nh h) (nw w) -> (nh nw b) c h w', nh=num_split, nw=num_split) | |
| return x_split | |
| def merge_chessboard(x, num_split): | |
| """ | |
| x: b * c * h * w | |
| Assuming x contains num_split**2 sub-squares concatenated along batch dimension, merge the sub-squares back to the original whole square. | |
| (inverse of split_chessboard) | |
| """ | |
| B, C, H, W = x.shape | |
| assert B % (num_split**2) == 0 | |
| x_merge = rearrange(x, '(nh nw b) c h w -> b c (nh h) (nw w)', nh=num_split, nw=num_split) | |
| return x_merge | |
| def batched_forward(model, x, batch_size=-1): | |
| if batch_size == -1: | |
| return model(x) | |
| else: | |
| x_batched = x.split(batch_size) | |
| outs = [model(x) for x in x_batched] | |
| return torch.cat(outs, dim=0) | |