Sai-Suraj commited on
Commit
e02c3ed
·
verified ·
1 Parent(s): 2b6bc3f

Fixes import error for this function `create_position_ids_from_input_ids` in transformers V5.

Browse files

The function is now kept as a staticmethod in the file: (transformers.models.xlm_roberta.modeling_xlm_roberta). This PR will make sure that it will now work in both Transformers V5 and older v4.57.6 versions.
ImportError: cannot import name 'create_position_ids_from_input_ids' from 'transformers.models.xlm_roberta.modeling_xlm_roberta' (/home/suraj/repos/transformers/src/transformers/models/xlm_roberta/modeling_xlm_roberta.py)

Files changed (1) hide show
  1. embedding.py +16 -2
embedding.py CHANGED
@@ -5,8 +5,22 @@
5
 
6
  import torch
7
  import torch.nn as nn
8
- from transformers.models.xlm_roberta.modeling_xlm_roberta import \
9
- create_position_ids_from_input_ids
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
 
12
  class XLMRobertaEmbeddings(nn.Module):
 
5
 
6
  import torch
7
  import torch.nn as nn
8
+
9
+
10
+ def create_position_ids_from_input_ids(input_ids, padding_idx, past_key_values_length=0):
11
+ """
12
+ Replace non-padding symbols with their position numbers. Position numbers begin at padding_idx+1. Padding symbols
13
+ are ignored. This is modified from fairseq's `utils.make_positions`.
14
+
15
+ Args:
16
+ x: torch.Tensor x:
17
+
18
+ Returns: torch.Tensor
19
+ """
20
+ # The series of casts and type-conversions here are carefully balanced to both work with ONNX export and XLA.
21
+ mask = input_ids.ne(padding_idx).int()
22
+ incremental_indices = (torch.cumsum(mask, dim=1).type_as(mask) + past_key_values_length) * mask
23
+ return incremental_indices.long() + padding_idx
24
 
25
 
26
  class XLMRobertaEmbeddings(nn.Module):