| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | from typing import TYPE_CHECKING |
| |
|
| | from ...utils import ( |
| | OptionalDependencyNotAvailable, |
| | _LazyModule, |
| | is_tf_available, |
| | is_tokenizers_available, |
| | is_torch_available, |
| | ) |
| |
|
| |
|
| | _import_structure = { |
| | "configuration_deberta": ["DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP", "DebertaConfig", "DebertaOnnxConfig"], |
| | "tokenization_deberta": ["DebertaTokenizer"], |
| | } |
| |
|
| | try: |
| | if not is_tokenizers_available(): |
| | raise OptionalDependencyNotAvailable() |
| | except OptionalDependencyNotAvailable: |
| | pass |
| | else: |
| | _import_structure["tokenization_deberta_fast"] = ["DebertaTokenizerFast"] |
| |
|
| | try: |
| | if not is_torch_available(): |
| | raise OptionalDependencyNotAvailable() |
| | except OptionalDependencyNotAvailable: |
| | pass |
| | else: |
| | _import_structure["modeling_deberta"] = [ |
| | "DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST", |
| | "DebertaForMaskedLM", |
| | "DebertaForQuestionAnswering", |
| | "DebertaForSequenceClassification", |
| | "DebertaForTokenClassification", |
| | "DebertaModel", |
| | "DebertaPreTrainedModel", |
| | ] |
| |
|
| | try: |
| | if not is_tf_available(): |
| | raise OptionalDependencyNotAvailable() |
| | except OptionalDependencyNotAvailable: |
| | pass |
| | else: |
| | _import_structure["modeling_tf_deberta"] = [ |
| | "TF_DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST", |
| | "TFDebertaForMaskedLM", |
| | "TFDebertaForQuestionAnswering", |
| | "TFDebertaForSequenceClassification", |
| | "TFDebertaForTokenClassification", |
| | "TFDebertaModel", |
| | "TFDebertaPreTrainedModel", |
| | ] |
| |
|
| |
|
| | if TYPE_CHECKING: |
| | from .configuration_deberta import DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, DebertaConfig, DebertaOnnxConfig |
| | from .tokenization_deberta import DebertaTokenizer |
| |
|
| | try: |
| | if not is_tokenizers_available(): |
| | raise OptionalDependencyNotAvailable() |
| | except OptionalDependencyNotAvailable: |
| | pass |
| | else: |
| | from .tokenization_deberta_fast import DebertaTokenizerFast |
| |
|
| | try: |
| | if not is_torch_available(): |
| | raise OptionalDependencyNotAvailable() |
| | except OptionalDependencyNotAvailable: |
| | pass |
| | else: |
| | from .modeling_deberta import ( |
| | DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST, |
| | DebertaForMaskedLM, |
| | DebertaForQuestionAnswering, |
| | DebertaForSequenceClassification, |
| | DebertaForTokenClassification, |
| | DebertaModel, |
| | DebertaPreTrainedModel, |
| | ) |
| |
|
| | try: |
| | if not is_tf_available(): |
| | raise OptionalDependencyNotAvailable() |
| | except OptionalDependencyNotAvailable: |
| | pass |
| | else: |
| | from .modeling_tf_deberta import ( |
| | TF_DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST, |
| | TFDebertaForMaskedLM, |
| | TFDebertaForQuestionAnswering, |
| | TFDebertaForSequenceClassification, |
| | TFDebertaForTokenClassification, |
| | TFDebertaModel, |
| | TFDebertaPreTrainedModel, |
| | ) |
| |
|
| |
|
| | else: |
| | import sys |
| |
|
| | sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__) |
| |
|