transformers-4.45
#1
by
echarlaix
HF Staff
- opened
- tokenization_chatglm.py +2 -1
tokenization_chatglm.py
CHANGED
|
@@ -217,6 +217,7 @@ class ChatGLMTokenizer(PreTrainedTokenizer):
|
|
| 217 |
max_length: Optional[int] = None,
|
| 218 |
padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
|
| 219 |
pad_to_multiple_of: Optional[int] = None,
|
|
|
|
| 220 |
return_attention_mask: Optional[bool] = None,
|
| 221 |
) -> dict:
|
| 222 |
"""
|
|
@@ -244,7 +245,7 @@ class ChatGLMTokenizer(PreTrainedTokenizer):
|
|
| 244 |
"""
|
| 245 |
# Load from model defaults
|
| 246 |
assert self.padding_side == "left"
|
| 247 |
-
|
| 248 |
required_input = encoded_inputs[self.model_input_names[0]]
|
| 249 |
seq_length = len(required_input)
|
| 250 |
|
|
|
|
| 217 |
max_length: Optional[int] = None,
|
| 218 |
padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
|
| 219 |
pad_to_multiple_of: Optional[int] = None,
|
| 220 |
+
padding_side: Optional[bool] = None,
|
| 221 |
return_attention_mask: Optional[bool] = None,
|
| 222 |
) -> dict:
|
| 223 |
"""
|
|
|
|
| 245 |
"""
|
| 246 |
# Load from model defaults
|
| 247 |
assert self.padding_side == "left"
|
| 248 |
+
assert padding_side or "left" == "left"
|
| 249 |
required_input = encoded_inputs[self.model_input_names[0]]
|
| 250 |
seq_length = len(required_input)
|
| 251 |
|