import torch.nn as nn # ckp_02 class MajorClassifier(nn.Module): def __init__(self, input_size=768, output_size=9): super(MajorClassifier, self).__init__() self.model = nn.Sequential( nn.Linear(input_size, 512), nn.ReLU(), nn.Linear(512, 512), nn.ReLU(), nn.Linear(512, 256), nn.ReLU(), nn.Linear(256, 128), nn.ReLU(), nn.Linear(128, 64), nn.ReLU(), nn.Linear(64, output_size), ) def forward(self, x): return self.model(x) # class MajorClassifier(nn.Module): # def __init__(self, input_size=768, output_size=9, dropout_prob=0.1): # super(MajorClassifier, self).__init__() # self.model = nn.Sequential( # nn.Linear(input_size, 512), # nn.BatchNorm1d(512), # nn.ReLU(), # nn.Dropout(dropout_prob), # nn.Linear(512, 512), # nn.BatchNorm1d(512), # nn.ReLU(), # nn.Dropout(dropout_prob), # nn.Linear(512, 256), # nn.BatchNorm1d(256), # nn.ReLU(), # nn.Dropout(dropout_prob), # nn.Linear(256, 256), # nn.BatchNorm1d(256), # nn.ReLU(), # nn.Dropout(dropout_prob), # nn.Linear(256, 128), # nn.BatchNorm1d(128), # nn.ReLU(), # nn.Dropout(dropout_prob), # nn.Linear(128, 128), # nn.BatchNorm1d(128), # nn.ReLU(), # nn.Dropout(dropout_prob), # nn.Linear(128, 64), # nn.BatchNorm1d(64), # nn.ReLU(), # nn.Dropout(dropout_prob), # nn.Linear(64, 64), # nn.BatchNorm1d(64), # nn.ReLU(), # nn.Dropout(dropout_prob), # nn.Linear(64, output_size), # ) # def forward(self, x): # return self.model(x)