Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -142,26 +142,7 @@ def processSpatialEntities(review, nlp):
|
|
| 142 |
return processed_embedding
|
| 143 |
|
| 144 |
|
| 145 |
-
|
| 146 |
-
class Discriminator(nn.Module):
|
| 147 |
-
def __init__(self, input_size=512, hidden_sizes=[512], num_labels=2, dropout_rate=0.1):
|
| 148 |
-
super(Discriminator, self).__init__()
|
| 149 |
-
self.input_dropout = nn.Dropout(p=dropout_rate)
|
| 150 |
-
layers = []
|
| 151 |
-
hidden_sizes = [input_size] + hidden_sizes
|
| 152 |
-
for i in range(len(hidden_sizes)-1):
|
| 153 |
-
layers.extend([nn.Linear(hidden_sizes[i], hidden_sizes[i+1]), nn.LeakyReLU(0.2, inplace=True), nn.Dropout(dropout_rate)])
|
| 154 |
-
|
| 155 |
-
self.layers = nn.Sequential(*layers) #per il flatten
|
| 156 |
-
self.logit = nn.Linear(hidden_sizes[-1],num_labels+1) # +1 for the probability of this sample being fake/real.
|
| 157 |
-
self.softmax = nn.Softmax(dim=-1)
|
| 158 |
-
|
| 159 |
-
def forward(self, input_rep):
|
| 160 |
-
input_rep = self.input_dropout(input_rep)
|
| 161 |
-
last_rep = self.layers(input_rep)
|
| 162 |
-
logits = self.logit(last_rep)
|
| 163 |
-
probs = self.softmax(logits)
|
| 164 |
-
return last_rep, logits, probs
|
| 165 |
|
| 166 |
#dConfig = AutoConfig.from_pretrained("bert-base-uncased")
|
| 167 |
#hidden_size = int(dConfig.hidden_size)
|
|
|
|
| 142 |
return processed_embedding
|
| 143 |
|
| 144 |
|
| 145 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 146 |
|
| 147 |
#dConfig = AutoConfig.from_pretrained("bert-base-uncased")
|
| 148 |
#hidden_size = int(dConfig.hidden_size)
|