| | |
| | from ..utils import DummyObject, requires_backends |
| |
|
| |
|
| | class AlbertTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class BarthezTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class BartphoTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class BertGenerationTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class BigBirdTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class CamembertTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class CodeLlamaTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class CpmTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class DebertaV2Tokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class ErnieMTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class FNetTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class GPTSw3Tokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class LayoutXLMTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class LlamaTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class M2M100Tokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class MarianTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class MBart50Tokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class MBartTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class MLukeTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class MT5Tokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class NllbTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class PegasusTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class PLBartTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class ReformerTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class RemBertTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class Speech2TextTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class SpeechT5Tokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class T5Tokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class XGLMTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class XLMProphetNetTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class XLMRobertaTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|
| |
|
| | class XLNetTokenizer(metaclass=DummyObject): |
| | _backends = ["sentencepiece"] |
| |
|
| | def __init__(self, *args, **kwargs): |
| | requires_backends(self, ["sentencepiece"]) |
| |
|