Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -126,21 +126,14 @@ class GRUModel(nn.Module):
|
|
| 126 |
return self.fc(out[:, -1, :])
|
| 127 |
|
| 128 |
class BiLSTMModel(nn.Module):
|
| 129 |
-
def __init__(self
|
| 130 |
super(BiLSTMModel, self).__init__()
|
| 131 |
-
self.lstm = nn.LSTM(
|
| 132 |
-
|
| 133 |
-
hidden_size=hidden_dim,
|
| 134 |
-
num_layers=num_layers,
|
| 135 |
-
batch_first=True,
|
| 136 |
-
dropout=dropout_prob,
|
| 137 |
-
bidirectional=True
|
| 138 |
-
)
|
| 139 |
-
self.fc = nn.Linear(hidden_dim * 2, output_dim) # because bidirectional
|
| 140 |
|
| 141 |
def forward(self, x):
|
| 142 |
-
h0 = torch.zeros(
|
| 143 |
-
c0 = torch.zeros(
|
| 144 |
out, _ = self.lstm(x, (h0, c0))
|
| 145 |
return self.fc(out[:, -1, :])
|
| 146 |
@st.cache_resource(ttl=3600)
|
|
|
|
| 126 |
return self.fc(out[:, -1, :])
|
| 127 |
|
| 128 |
class BiLSTMModel(nn.Module):
|
| 129 |
+
def __init__(self):
|
| 130 |
super(BiLSTMModel, self).__init__()
|
| 131 |
+
self.lstm = nn.LSTM(input_size=1, hidden_size=100, num_layers=2, batch_first=True, dropout=0.2, bidirectional=True)
|
| 132 |
+
self.fc = nn.Linear(200, 1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 133 |
|
| 134 |
def forward(self, x):
|
| 135 |
+
h0 = torch.zeros(4, x.size(0), 100) # 2 directions × 2 layers = 4
|
| 136 |
+
c0 = torch.zeros(4, x.size(0), 100)
|
| 137 |
out, _ = self.lstm(x, (h0, c0))
|
| 138 |
return self.fc(out[:, -1, :])
|
| 139 |
@st.cache_resource(ttl=3600)
|