| from collections import defaultdict |
| import pdb |
| import random |
| import numpy as np |
| import itertools |
| import torch |
| from torch.utils.data import Sampler |
|
|
| from datasets.dataset import SeqDeepFakeDataset |
| from models.configuration import Config |
|
|
| |
| |
| |
| |
| |
|
|
| |
| |
|
|
| |
| |
|
|
| |
|
|
| |
| |
| |
| |
|
|
| |
| |
| |
|
|
| |
| |
| |
| |
| |
|
|
| |
| |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
|
|
|
|
| |
| class EfficientBalancedBatchSampler(Sampler): |
| def __init__(self, dataset, batch_size, samples_per_class=None): |
| """ |
| :param dataset: 包含targets属性的数据集 |
| :param batch_size: 总批量大小,需能被类别数整除 |
| :param samples_per_class: 每个类别的样本数, 自动计算如果为None |
| """ |
| |
| self.batch_size = batch_size |
| |
| |
| self.class_indices = defaultdict(list) |
| |
| |
| |
| for idx, (_, _, _, _, length) in enumerate(dataset): |
| self.class_indices[length].append(idx) |
|
|
| self.classes = list(self.class_indices.keys()) |
| self.num_classes = len(self.classes) |
| |
| |
| if samples_per_class is None: |
| assert batch_size % self.num_classes == 0, "batch_size必须能被类别数整除" |
| self.samples_per_class = batch_size // self.num_classes |
| else: |
| self.samples_per_class = samples_per_class |
| assert batch_size == self.samples_per_class * self.num_classes |
| |
| |
| self.class_repeats = self._calculate_repeats() |
| |
| |
| self.sampling_plan = self._generate_sampling_plan() |
|
|
| def _calculate_repeats(self): |
| """计算每个类别需要的重复次数""" |
| repeats = {} |
| max_batches = 0 |
| for cls in self.classes: |
| n_samples = len(self.class_indices[cls]) |
| n_batches = (n_samples + self.samples_per_class - 1) // self.samples_per_class |
| max_batches = max(max_batches, n_batches) |
| |
| for cls in self.classes: |
| n_samples = len(self.class_indices[cls]) |
| total_needed = max_batches * self.samples_per_class |
| repeats[cls] = (total_needed + n_samples - 1) // n_samples |
| return repeats |
|
|
| def _generate_sampling_plan(self): |
| """生成全局采样索引矩阵""" |
| |
| sampling_matrix = np.zeros((self.num_classes, |
| max(self.class_repeats.values()) * self.samples_per_class), |
| dtype=np.int64) |
| |
| for i, cls in enumerate(self.classes): |
| indices = np.array(self.class_indices[cls]) |
| np.random.shuffle(indices) |
| |
| |
| repeated = np.tile(indices, self.class_repeats[cls]) |
| np.random.shuffle(repeated) |
| |
| |
| required_length = max(self.class_repeats.values()) * self.samples_per_class |
| sampling_matrix[i] = repeated[:required_length] |
| |
| return sampling_matrix.reshape(self.num_classes, -1, self.samples_per_class) |
|
|
| def __iter__(self): |
| |
| batch_plan = self.sampling_plan.transpose(1, 0, 2) |
| |
| |
| np.random.shuffle(batch_plan) |
| |
| |
| for batch in batch_plan: |
| |
| combined = batch.flatten() |
| np.random.shuffle(combined) |
| yield combined.tolist() |
|
|
| def __len__(self): |
| return self.sampling_plan.shape[1] |
|
|
|
|
| ''' |
| Uneven sample distribution will affect the performance of the model. |
| For example, the model has higher accuracy for shorter samples. |
| A uniform sampler ensures that the samples in each batch are evenly distributed. |
| e.g. sequence length 0:1:2:3:4 = 1:1:1:1:1 |
| ''' |
| class BalancedBatchSampler(Sampler): |
| def __init__(self, A_indices, B_indices, C_indices, D_indices, E_indices, batch_size, epoch_length, rank = 0, world_size = 1): |
| """ |
| A_indices: 类别A的样本索引列表 |
| B_indices: 类别B的样本索引列表 |
| C_indices: 类别C的样本索引列表 |
| D_indices: 类别D的样本索引列表 |
| E_indices: 类别E的样本索引列表 |
| batch_size: 每个批次的大小, 必须能被5整除 |
| epoch_length: 每个epoch的批次数量 |
| """ |
| super().__init__(None) |
| self.A = A_indices[rank::world_size] |
| self.B = B_indices[rank::world_size] |
| self.C = C_indices[rank::world_size] |
| self.D = D_indices[rank::world_size] |
| self.E = E_indices[rank::world_size] |
|
|
| random.shuffle(self.A) |
| random.shuffle(self.B) |
| random.shuffle(self.C) |
| random.shuffle(self.D) |
| random.shuffle(self.E) |
|
|
| self.batch_size = batch_size |
| self.n = batch_size // 5 |
| self.epoch_length = epoch_length |
| self.epoch = 0 |
| self.rank = rank |
| self.world_size = world_size |
| |
| assert batch_size % 5 == 0, "batch_size必须能被5整除" |
| |
| def set_epoch(self, epoch): |
| self.epoch = epoch |
| random.seed(epoch + self.rank) |
| torch.manual_seed(epoch + self.rank) |
|
|
| def __iter__(self): |
| |
|
|
| |
| for i in range(self.epoch_length): |
| |
| |
| |
| |
| |
| |
| |
| batch_A = self.A[self.n * i : self.n * (i + 1)] |
| batch_B = self.B[self.n * i : self.n * (i + 1)] |
| batch_C = self.C[self.n * i : self.n * (i + 1)] |
| batch_D = self.D[self.n * i : self.n * (i + 1)] |
| batch_E = self.E[self.n * i : self.n * (i + 1)] |
| |
| |
| combined = batch_A + batch_B + batch_C + batch_D + batch_E |
| random.shuffle(combined) |
|
|
| yield combined |
|
|
| def __len__(self): |
| return self.epoch_length |
|
|
|
|
| |
|
|
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| |
| |
|
|
| |
| |
| |
| |
| |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| |
| |
| |
| |
|
|
| |
|
|
| |
| |
| |