adalbertojunior commited on
Commit
a5e9477
·
1 Parent(s): 35a3983

Upload roberta_layers.py

Browse files
Files changed (1) hide show
  1. roberta_layers.py +1 -1
roberta_layers.py CHANGED
@@ -199,7 +199,7 @@ class RobertaSelfAttention(nn.Module):
199
  query_layer = self.transpose_for_scores(mixed_query_layer)
200
 
201
  if xformers_available:
202
- context_layer = xformers.memory_efficient_attention(
203
  query_layer, key_layer, value_layer, p=self.dropout_prob
204
  )
205
  else:
 
199
  query_layer = self.transpose_for_scores(mixed_query_layer)
200
 
201
  if xformers_available:
202
+ context_layer = xformers.ops.memory_efficient_attention(
203
  query_layer, key_layer, value_layer, p=self.dropout_prob
204
  )
205
  else: