text stringlengths 1 1.02k | class_index int64 0 10.8k | source stringlengths 85 188 |
|---|---|---|
if revision is not None and not revision.startswith("refs/pr"):
try:
create_branch(repo_id=repo_id, branch=revision, token=token, exist_ok=True)
except HfHubHTTPError as e:
if e.response.status_code == 403 and create_pr:
# If we are creating a PR on a repo we don't have access to, we can't create the branch.
# so let's assume the branch already exists. If it's not the case, an error will be raised when
# calling `create_commit` below.
pass
else:
raise
logger.info(f"Uploading the following files to {repo_id}: {','.join(modified_files)}")
return create_commit(
repo_id=repo_id,
operations=operations,
commit_message=commit_message,
commit_description=commit_description,
token=token,
create_pr=create_pr,
revision=revision,
) | 2,470 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
def push_to_hub(
self,
repo_id: str,
use_temp_dir: Optional[bool] = None,
commit_message: Optional[str] = None,
private: Optional[bool] = None,
token: Optional[Union[bool, str]] = None,
max_shard_size: Optional[Union[int, str]] = "5GB",
create_pr: bool = False,
safe_serialization: bool = True,
revision: str = None,
commit_description: str = None,
tags: Optional[List[str]] = None,
**deprecated_kwargs,
) -> str:
"""
Upload the {object_files} to the 🤗 Model Hub. | 2,470 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
Parameters:
repo_id (`str`):
The name of the repository you want to push your {object} to. It should contain your organization name
when pushing to a given organization.
use_temp_dir (`bool`, *optional*):
Whether or not to use a temporary directory to store the files saved before they are pushed to the Hub.
Will default to `True` if there is no directory named like `repo_id`, `False` otherwise.
commit_message (`str`, *optional*):
Message to commit while pushing. Will default to `"Upload {object}"`.
private (`bool`, *optional*):
Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists.
token (`bool` or `str`, *optional*): | 2,470 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`). Will default to `True` if `repo_url`
is not specified.
max_shard_size (`int` or `str`, *optional*, defaults to `"5GB"`):
Only applicable for models. The maximum size for a checkpoint before being sharded. Checkpoints shard
will then be each of size lower than this size. If expressed as a string, needs to be digits followed
by a unit (like `"5MB"`). We default it to `"5GB"` so that users can easily load models on free-tier
Google Colab instances without any CPU OOM issues.
create_pr (`bool`, *optional*, defaults to `False`):
Whether or not to create a PR with the uploaded files or directly commit.
safe_serialization (`bool`, *optional*, defaults to `True`): | 2,470 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
Whether or not to convert the model weights in safetensors format for safer serialization.
revision (`str`, *optional*):
Branch to push the uploaded files to.
commit_description (`str`, *optional*):
The description of the commit that will be created
tags (`List[str]`, *optional*):
List of tags to push on the Hub. | 2,470 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
Examples:
```python
from transformers import {object_class}
{object} = {object_class}.from_pretrained("google-bert/bert-base-cased")
# Push the {object} to your namespace with the name "my-finetuned-bert".
{object}.push_to_hub("my-finetuned-bert") | 2,470 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
# Push the {object} to an organization with the name "my-finetuned-bert".
{object}.push_to_hub("huggingface/my-finetuned-bert")
```
"""
use_auth_token = deprecated_kwargs.pop("use_auth_token", None)
ignore_metadata_errors = deprecated_kwargs.pop("ignore_metadata_errors", False)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers. Please use `token` instead.",
FutureWarning,
)
if token is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
token = use_auth_token | 2,470 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
repo_path_or_name = deprecated_kwargs.pop("repo_path_or_name", None)
if repo_path_or_name is not None:
# Should use `repo_id` instead of `repo_path_or_name`. When using `repo_path_or_name`, we try to infer
# repo_id from the folder path, if it exists.
warnings.warn(
"The `repo_path_or_name` argument is deprecated and will be removed in v5 of Transformers. Use "
"`repo_id` instead.",
FutureWarning,
)
if repo_id is not None:
raise ValueError(
"`repo_id` and `repo_path_or_name` are both specified. Please set only the argument `repo_id`."
)
if os.path.isdir(repo_path_or_name):
# repo_path: infer repo_id from the path
repo_id = repo_id.split(os.path.sep)[-1]
working_dir = repo_id
else:
# repo_name: use it as repo_id | 2,470 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
repo_id = repo_path_or_name
working_dir = repo_id.split("/")[-1]
else:
# Repo_id is passed correctly: infer working_dir from it
working_dir = repo_id.split("/")[-1] | 2,470 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
# Deprecation warning will be sent after for repo_url and organization
repo_url = deprecated_kwargs.pop("repo_url", None)
organization = deprecated_kwargs.pop("organization", None)
repo_id = self._create_repo(
repo_id, private=private, token=token, repo_url=repo_url, organization=organization
)
# Create a new empty model card and eventually tag it
model_card = create_and_tag_model_card(
repo_id, tags, token=token, ignore_metadata_errors=ignore_metadata_errors
)
if use_temp_dir is None:
use_temp_dir = not os.path.isdir(working_dir)
with working_or_temp_dir(working_dir=working_dir, use_temp_dir=use_temp_dir) as work_dir:
files_timestamps = self._get_files_timestamps(work_dir)
# Save all files.
self.save_pretrained(work_dir, max_shard_size=max_shard_size, safe_serialization=safe_serialization) | 2,470 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
# Update model card if needed:
model_card.save(os.path.join(work_dir, "README.md"))
return self._upload_modified_files(
work_dir,
repo_id,
files_timestamps,
commit_message=commit_message,
token=token,
create_pr=create_pr,
revision=revision,
commit_description=commit_description,
) | 2,470 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
class PushInProgress:
"""
Internal class to keep track of a push in progress (which might contain multiple `Future` jobs).
"""
def __init__(self, jobs: Optional[futures.Future] = None) -> None:
self.jobs = [] if jobs is None else jobs
def is_done(self):
return all(job.done() for job in self.jobs)
def wait_until_done(self):
futures.wait(self.jobs)
def cancel(self) -> None:
self.jobs = [
job
for job in self.jobs
# Cancel the job if it wasn't started yet and remove cancelled/done jobs from the list
if not (job.cancel() or job.done())
] | 2,471 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/hub.py |
class ASTFeatureExtractor(metaclass=DummyObject):
_backends = ["speech"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["speech"]) | 2,472 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_speech_objects.py |
class Speech2TextFeatureExtractor(metaclass=DummyObject):
_backends = ["speech"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["speech"]) | 2,473 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_speech_objects.py |
class BaseImageProcessorFast(metaclass=DummyObject):
_backends = ["torchvision"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torchvision"]) | 2,474 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_torchvision_objects.py |
class DeformableDetrImageProcessorFast(metaclass=DummyObject):
_backends = ["torchvision"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torchvision"]) | 2,475 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_torchvision_objects.py |
class DetrImageProcessorFast(metaclass=DummyObject):
_backends = ["torchvision"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torchvision"]) | 2,476 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_torchvision_objects.py |
class PixtralImageProcessorFast(metaclass=DummyObject):
_backends = ["torchvision"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torchvision"]) | 2,477 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_torchvision_objects.py |
class RTDetrImageProcessorFast(metaclass=DummyObject):
_backends = ["torchvision"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torchvision"]) | 2,478 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_torchvision_objects.py |
class ViTImageProcessorFast(metaclass=DummyObject):
_backends = ["torchvision"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torchvision"]) | 2,479 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_torchvision_objects.py |
class BackboneType(enum.Enum):
TIMM = "timm"
TRANSFORMERS = "transformers" | 2,480 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/backbone_utils.py |
class BackboneMixin:
backbone_type: Optional[BackboneType] = None
def _init_timm_backbone(self, config) -> None:
"""
Initialize the backbone model from timm The backbone must already be loaded to self._backbone
"""
if getattr(self, "_backbone", None) is None:
raise ValueError("self._backbone must be set before calling _init_timm_backbone")
# These will diagree with the defaults for the transformers models e.g. for resnet50
# the transformer model has out_features = ['stem', 'stage1', 'stage2', 'stage3', 'stage4']
# the timm model has out_features = ['act', 'layer1', 'layer2', 'layer3', 'layer4']
self.stage_names = [stage["module"] for stage in self._backbone.feature_info.info]
self.num_features = [stage["num_chs"] for stage in self._backbone.feature_info.info] | 2,481 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/backbone_utils.py |
# In some timm versions, out_indices reflects the input type of out_indices on the `create_model` call,
# in later versions >= 1, it is always a tuple
out_indices = list(self._backbone.feature_info.out_indices)
out_features = self._backbone.feature_info.module_name()
# We verify the out indices and out features are valid
verify_out_features_out_indices(
out_features=out_features, out_indices=out_indices, stage_names=self.stage_names
)
self._out_features, self._out_indices = out_features, out_indices
def _init_transformers_backbone(self, config) -> None:
stage_names = getattr(config, "stage_names")
out_features = getattr(config, "out_features", None)
out_indices = getattr(config, "out_indices", None) | 2,481 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/backbone_utils.py |
self.stage_names = stage_names
self._out_features, self._out_indices = get_aligned_output_features_output_indices(
out_features=out_features, out_indices=out_indices, stage_names=stage_names
)
# Number of channels for each stage. This is set in the transformer backbone model init
self.num_features = None
def _init_backbone(self, config) -> None:
"""
Method to initialize the backbone. This method is called by the constructor of the base class after the
pretrained model weights have been loaded.
"""
self.config = config
self.use_timm_backbone = getattr(config, "use_timm_backbone", False)
self.backbone_type = BackboneType.TIMM if self.use_timm_backbone else BackboneType.TRANSFORMERS | 2,481 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/backbone_utils.py |
if self.backbone_type == BackboneType.TIMM:
self._init_timm_backbone(config)
elif self.backbone_type == BackboneType.TRANSFORMERS:
self._init_transformers_backbone(config)
else:
raise ValueError(f"backbone_type {self.backbone_type} not supported.")
@property
def out_features(self):
return self._out_features
@out_features.setter
def out_features(self, out_features: List[str]):
"""
Set the out_features attribute. This will also update the out_indices attribute to match the new out_features.
"""
self._out_features, self._out_indices = get_aligned_output_features_output_indices(
out_features=out_features, out_indices=None, stage_names=self.stage_names
)
@property
def out_indices(self):
return self._out_indices | 2,481 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/backbone_utils.py |
@out_indices.setter
def out_indices(self, out_indices: Union[Tuple[int], List[int]]):
"""
Set the out_indices attribute. This will also update the out_features attribute to match the new out_indices.
"""
self._out_features, self._out_indices = get_aligned_output_features_output_indices(
out_features=None, out_indices=out_indices, stage_names=self.stage_names
)
@property
def out_feature_channels(self):
# the current backbones will output the number of channels for each stage
# even if that stage is not in the out_features list.
return {stage: self.num_features[i] for i, stage in enumerate(self.stage_names)}
@property
def channels(self):
return [self.out_feature_channels[name] for name in self.out_features] | 2,481 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/backbone_utils.py |
def forward_with_filtered_kwargs(self, *args, **kwargs):
signature = dict(inspect.signature(self.forward).parameters)
filtered_kwargs = {k: v for k, v in kwargs.items() if k in signature}
return self(*args, **filtered_kwargs)
def forward(
self,
pixel_values,
output_hidden_states: Optional[bool] = None,
output_attentions: Optional[bool] = None,
return_dict: Optional[bool] = None,
):
raise NotImplementedError("This method should be implemented by the derived class.")
def to_dict(self):
"""
Serializes this instance to a Python dictionary. Override the default `to_dict()` from `PretrainedConfig` to
include the `out_features` and `out_indices` attributes.
"""
output = super().to_dict()
output["out_features"] = output.pop("_out_features")
output["out_indices"] = output.pop("_out_indices")
return output | 2,481 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/backbone_utils.py |
class BackboneConfigMixin:
"""
A Mixin to support handling the `out_features` and `out_indices` attributes for the backbone configurations.
"""
@property
def out_features(self):
return self._out_features
@out_features.setter
def out_features(self, out_features: List[str]):
"""
Set the out_features attribute. This will also update the out_indices attribute to match the new out_features.
"""
self._out_features, self._out_indices = get_aligned_output_features_output_indices(
out_features=out_features, out_indices=None, stage_names=self.stage_names
)
@property
def out_indices(self):
return self._out_indices | 2,482 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/backbone_utils.py |
@out_indices.setter
def out_indices(self, out_indices: Union[Tuple[int], List[int]]):
"""
Set the out_indices attribute. This will also update the out_features attribute to match the new out_indices.
"""
self._out_features, self._out_indices = get_aligned_output_features_output_indices(
out_features=None, out_indices=out_indices, stage_names=self.stage_names
)
def to_dict(self):
"""
Serializes this instance to a Python dictionary. Override the default `to_dict()` from `PretrainedConfig` to
include the `out_features` and `out_indices` attributes.
"""
output = super().to_dict()
output["out_features"] = output.pop("_out_features")
output["out_indices"] = output.pop("_out_indices")
return output | 2,482 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/backbone_utils.py |
class AlbertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,483 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class BartTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,484 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class BarthezTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,485 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class BertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,486 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class BigBirdTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,487 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class BlenderbotTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,488 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class BlenderbotSmallTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,489 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class BloomTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,490 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class CamembertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,491 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class CLIPTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,492 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class CodeLlamaTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,493 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class CodeGenTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,494 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class CohereTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,495 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class ConvBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,496 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class CpmTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,497 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class DebertaTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,498 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class DebertaV2TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,499 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class RealmTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,500 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class RetriBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,501 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class DistilBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,502 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class DPRContextEncoderTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,503 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class DPRQuestionEncoderTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,504 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class DPRReaderTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,505 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class ElectraTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,506 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class FNetTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,507 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class FunnelTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,508 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class GemmaTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,509 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class GPT2TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,510 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class GPTNeoXTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,511 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class GPTNeoXJapaneseTokenizer(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,512 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class HerbertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,513 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class LayoutLMTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,514 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class LayoutLMv2TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,515 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class LayoutLMv3TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,516 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class LayoutXLMTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,517 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class LEDTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,518 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class LlamaTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,519 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class LongformerTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,520 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class LxmertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,521 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class MarkupLMTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,522 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class MBartTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,523 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class MBart50TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,524 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class MobileBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,525 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class MPNetTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,526 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class MT5TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,527 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class MvpTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,528 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class NllbTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,529 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class NougatTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,530 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class OpenAIGPTTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,531 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class PegasusTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,532 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class Qwen2TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,533 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class ReformerTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,534 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class RemBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,535 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class RobertaTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,536 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class RoFormerTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,537 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class SeamlessM4TTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,538 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class SplinterTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,539 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class SqueezeBertTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,540 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class T5TokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,541 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class UdopTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,542 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class WhisperTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,543 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class XGLMTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,544 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class XLMRobertaTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,545 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class XLNetTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,546 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class PreTrainedTokenizerFast(metaclass=DummyObject):
_backends = ["tokenizers"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["tokenizers"]) | 2,547 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_tokenizers_objects.py |
class AlbertTokenizer(metaclass=DummyObject):
_backends = ["sentencepiece"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["sentencepiece"]) | 2,548 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py |
class BarthezTokenizer(metaclass=DummyObject):
_backends = ["sentencepiece"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["sentencepiece"]) | 2,549 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py |
class BartphoTokenizer(metaclass=DummyObject):
_backends = ["sentencepiece"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["sentencepiece"]) | 2,550 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py |
class BertGenerationTokenizer(metaclass=DummyObject):
_backends = ["sentencepiece"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["sentencepiece"]) | 2,551 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py |
class BigBirdTokenizer(metaclass=DummyObject):
_backends = ["sentencepiece"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["sentencepiece"]) | 2,552 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py |
class CamembertTokenizer(metaclass=DummyObject):
_backends = ["sentencepiece"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["sentencepiece"]) | 2,553 | /Users/nielsrogge/Documents/python_projecten/transformers/src/transformers/utils/dummy_sentencepiece_objects.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.