Commit
·
32a0629
1
Parent(s):
a5e9477
Upload roberta_layers.py
Browse files- roberta_layers.py +2 -2
roberta_layers.py
CHANGED
|
@@ -29,7 +29,7 @@ from transformers.utils import (
|
|
| 29 |
from transformers import RobertaConfig
|
| 30 |
|
| 31 |
try:
|
| 32 |
-
import
|
| 33 |
xformers_available=True
|
| 34 |
except ImportError as e:
|
| 35 |
xformers_available=False
|
|
@@ -199,7 +199,7 @@ class RobertaSelfAttention(nn.Module):
|
|
| 199 |
query_layer = self.transpose_for_scores(mixed_query_layer)
|
| 200 |
|
| 201 |
if xformers_available:
|
| 202 |
-
context_layer =
|
| 203 |
query_layer, key_layer, value_layer, p=self.dropout_prob
|
| 204 |
)
|
| 205 |
else:
|
|
|
|
| 29 |
from transformers import RobertaConfig
|
| 30 |
|
| 31 |
try:
|
| 32 |
+
from xformers import ops as xops
|
| 33 |
xformers_available=True
|
| 34 |
except ImportError as e:
|
| 35 |
xformers_available=False
|
|
|
|
| 199 |
query_layer = self.transpose_for_scores(mixed_query_layer)
|
| 200 |
|
| 201 |
if xformers_available:
|
| 202 |
+
context_layer = xops.memory_efficient_attention(
|
| 203 |
query_layer, key_layer, value_layer, p=self.dropout_prob
|
| 204 |
)
|
| 205 |
else:
|