Spaces:
Runtime error
Runtime error
| # This file is autogenerated by the command `make fix-copies`, do not edit. | |
| from ..utils import DummyObject, requires_backends | |
| class AlbertTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class BartTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class BarthezTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class BertTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class BigBirdTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class BlenderbotTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class BlenderbotSmallTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class BloomTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class CamembertTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class CLIPTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class CodeLlamaTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class CodeGenTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class ConvBertTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class CpmTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class DebertaTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class DebertaV2TokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class RetriBertTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class DistilBertTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class DPRContextEncoderTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class DPRQuestionEncoderTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class DPRReaderTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class ElectraTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class FNetTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class FunnelTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class GPT2TokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class GPTNeoXTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class GPTNeoXJapaneseTokenizer(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class HerbertTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class LayoutLMTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class LayoutLMv2TokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class LayoutLMv3TokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class LayoutXLMTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class LEDTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class LlamaTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class LongformerTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class LxmertTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class MarkupLMTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class MBartTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class MBart50TokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class MobileBertTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class MPNetTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class MT5TokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class MvpTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class NllbTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class NougatTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class OpenAIGPTTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class PegasusTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class RealmTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class ReformerTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class RemBertTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class RobertaTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class RoFormerTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class SplinterTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class SqueezeBertTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class T5TokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class WhisperTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class XGLMTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class XLMRobertaTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class XLNetTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |
| class PreTrainedTokenizerFast(metaclass=DummyObject): | |
| _backends = ["tokenizers"] | |
| def __init__(self, *args, **kwargs): | |
| requires_backends(self, ["tokenizers"]) | |