Spaces:
Sleeping
Sleeping
| import torch | |
| import torch.nn as nn | |
| class BiLSTMSentiment(nn.Module): | |
| def __init__(self, input_size, hidden_size, num_classes, dropout=0.2): | |
| super(BiLSTMSentiment, self).__init__() | |
| # ✅ BiLSTM Layer | |
| self.bilstm = nn.LSTM( | |
| input_size=input_size, | |
| hidden_size=hidden_size, | |
| batch_first=True, | |
| bidirectional=True | |
| ) | |
| # ✅ Batch Normalization | |
| self.batch_norm = nn.BatchNorm1d(hidden_size * 2) | |
| # ✅ Dropout | |
| self.dropout = nn.Dropout(dropout) | |
| # ✅ Fully Connected Layer | |
| self.fc = nn.Linear(hidden_size * 2, num_classes) | |
| def forward(self, x): | |
| batch_size = x.size(0) | |
| # สร้าง initial hidden/cell states สำหรับ 2 directions | |
| h_0 = torch.zeros(2, batch_size, self.bilstm.hidden_size).to(x.device) | |
| c_0 = torch.zeros(2, batch_size, self.bilstm.hidden_size).to(x.device) | |
| # เพิ่ม dim ให้สอดคล้องกับ LSTM: [batch, seq_len=1, input_size] | |
| #x = x.unsqueeze(1) # [batch_size, 1, input_size] | |
| #หลังลบ x = x.unsqueeze(1) แล้ว ตัวแปร x จะมี shape [batch, seq_len=1, input_size=769] | |
| out, _ = self.bilstm(x, (h_0, c_0)) # out: [batch, seq_len, hidden*2] | |
| out = out[:, -1, :] # เอา timestep สุดท้าย: [batch, hidden*2] | |
| # ✅ BatchNorm + Dropout + Linear | |
| out = self.batch_norm(out) | |
| out = self.dropout(out) | |
| out = self.fc(out) | |
| return out | |