Update modeling_norbert.py
Browse files- modeling_norbert.py +2 -2
modeling_norbert.py
CHANGED
|
@@ -140,8 +140,8 @@ class Attention(nn.Module):
|
|
| 140 |
|
| 141 |
position_indices = torch.arange(config.max_position_embeddings, dtype=torch.long).unsqueeze(1) \
|
| 142 |
- torch.arange(config.max_position_embeddings, dtype=torch.long).unsqueeze(0)
|
| 143 |
-
position_indices = self.make_log_bucket_position(position_indices, config.position_bucket_size, config.max_position_embeddings)
|
| 144 |
-
position_indices = config.position_bucket_size - 1 + position_indices
|
| 145 |
self.register_buffer("position_indices", position_indices, persistent=True)
|
| 146 |
|
| 147 |
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
|
|
|
|
| 140 |
|
| 141 |
position_indices = torch.arange(config.max_position_embeddings, dtype=torch.long).unsqueeze(1) \
|
| 142 |
- torch.arange(config.max_position_embeddings, dtype=torch.long).unsqueeze(0)
|
| 143 |
+
position_indices = self.make_log_bucket_position(position_indices, config.config.position_bucket_size, config.max_position_embeddings)
|
| 144 |
+
position_indices = config.config.position_bucket_size - 1 + position_indices
|
| 145 |
self.register_buffer("position_indices", position_indices, persistent=True)
|
| 146 |
|
| 147 |
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
|