text
stringlengths
7
328k
id
stringlengths
14
166
metadata
dict
__index_level_0__
int64
0
459
# SE-ResNeXt **SE ResNeXt** is a variant of a [ResNext](https://www.paperswithcode.com/method/resneXt) that employs [squeeze-and-excitation blocks](https://paperswithcode.com/method/squeeze-and-excitation-block) to enable the network to perform dynamic channel-wise feature recalibration. ## How do I use this model on...
pytorch-image-models/hfdocs/source/models/seresnext.mdx/0
{ "file_path": "pytorch-image-models/hfdocs/source/models/seresnext.mdx", "repo_id": "pytorch-image-models", "token_count": 2753 }
187
# Quickstart This quickstart is intended for developers who are ready to dive into the code and see an example of how to integrate `timm` into their model training workflow. First, you'll need to install `timm`. For more information on installation, see [Installation](installation). ```bash pip install timm ``` ## ...
pytorch-image-models/hfdocs/source/quickstart.mdx/0
{ "file_path": "pytorch-image-models/hfdocs/source/quickstart.mdx", "repo_id": "pytorch-image-models", "token_count": 2583 }
188
# Validation and Benchmark Results This folder contains validation and benchmark results for the models in this collection. Validation scores are currently only run for models with pretrained weights and ImageNet-1k heads, benchmark numbers are run for all. ## Datasets There are currently results for the ImageNet va...
pytorch-image-models/results/README.md/0
{ "file_path": "pytorch-image-models/results/README.md", "repo_id": "pytorch-image-models", "token_count": 1173 }
189
from copy import deepcopy __all__ = ['get_img_extensions', 'is_img_extension', 'set_img_extensions', 'add_img_extensions', 'del_img_extensions'] IMG_EXTENSIONS = ('.png', '.jpg', '.jpeg') # singleton, kept public for bwd compat use _IMG_EXTENSIONS_SET = set(IMG_EXTENSIONS) # set version, private, kept in sync de...
pytorch-image-models/timm/data/readers/img_extensions.py/0
{ "file_path": "pytorch-image-models/timm/data/readers/img_extensions.py", "repo_id": "pytorch-image-models", "token_count": 582 }
190
""" Activations A collection of activations fn and modules with a common interface so that they can easily be swapped. All have an `inplace` arg even if not used. Hacked together by / Copyright 2020 Ross Wightman """ import torch from torch import nn as nn from torch.nn import functional as F def swish(x, inplace:...
pytorch-image-models/timm/layers/activations.py/0
{ "file_path": "pytorch-image-models/timm/layers/activations.py", "repo_id": "pytorch-image-models", "token_count": 2012 }
191
""" Create Conv2d Factory Method Hacked together by / Copyright 2020 Ross Wightman """ from .mixed_conv2d import MixedConv2d from .cond_conv2d import CondConv2d from .conv2d_same import create_conv2d_pad def create_conv2d(in_channels, out_channels, kernel_size, **kwargs): """ Select a 2d convolution implementat...
pytorch-image-models/timm/layers/create_conv2d.py/0
{ "file_path": "pytorch-image-models/timm/layers/create_conv2d.py", "repo_id": "pytorch-image-models", "token_count": 652 }
192
""" Interpolation helpers for timm layers RegularGridInterpolator from https://github.com/sbarratt/torch_interpolations Copyright Shane Barratt, Apache 2.0 license """ import torch from itertools import product class RegularGridInterpolator: """ Interpolate data defined on a rectilinear grid with even or uneven ...
pytorch-image-models/timm/layers/interpolate.py/0
{ "file_path": "pytorch-image-models/timm/layers/interpolate.py", "repo_id": "pytorch-image-models", "token_count": 1121 }
193
""" Sin-cos, fourier, rotary position embedding modules and functions Hacked together by / Copyright 2022 Ross Wightman """ import math from typing import List, Tuple, Optional, Union import torch from torch import nn as nn from .grid import ndgrid from .trace_utils import _assert def pixel_freq_bands( num...
pytorch-image-models/timm/layers/pos_embed_sincos.py/0
{ "file_path": "pytorch-image-models/timm/layers/pos_embed_sincos.py", "repo_id": "pytorch-image-models", "token_count": 7200 }
194
import torch import torch.nn as nn import torch.nn.functional as F from .cross_entropy import LabelSmoothingCrossEntropy class JsdCrossEntropy(nn.Module): """ Jensen-Shannon Divergence + Cross-Entropy Loss Based on impl here: https://github.com/google-research/augmix/blob/master/imagenet.py From paper: ...
pytorch-image-models/timm/loss/jsd.py/0
{ "file_path": "pytorch-image-models/timm/loss/jsd.py", "repo_id": "pytorch-image-models", "token_count": 639 }
195
""" Deep Layer Aggregation and DLA w/ Res2Net DLA original adapted from Official Pytorch impl at: https://github.com/ucbdrive/dla DLA Paper: `Deep Layer Aggregation` - https://arxiv.org/abs/1707.06484 Res2Net additions from: https://github.com/gasvn/Res2Net/ Res2Net Paper: `Res2Net: A New Multi-scale Backbone Architec...
pytorch-image-models/timm/models/dla.py/0
{ "file_path": "pytorch-image-models/timm/models/dla.py", "repo_id": "pytorch-image-models", "token_count": 9144 }
196
from functools import partial import torch.nn as nn from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from ._builder import build_model_with_cfg from ._builder import pretrained_cfg_for_features from ._efficientnet_blocks import SqueezeExcite from ._efficientnet_builder import decode_arch_def, resolve...
pytorch-image-models/timm/models/hardcorenas.py/0
{ "file_path": "pytorch-image-models/timm/models/hardcorenas.py", "repo_id": "pytorch-image-models", "token_count": 4629 }
197
""" Multi-Scale Vision Transformer v2 @inproceedings{li2021improved, title={MViTv2: Improved multiscale vision transformers for classification and detection}, author={Li, Yanghao and Wu, Chao-Yuan and Fan, Haoqi and Mangalam, Karttikeya and Xiong, Bo and Malik, Jitendra and Feichtenhofer, Christoph}, booktitle={...
pytorch-image-models/timm/models/mvitv2.py/0
{ "file_path": "pytorch-image-models/timm/models/mvitv2.py", "repo_id": "pytorch-image-models", "token_count": 19585 }
198
""" ReXNet A PyTorch impl of `ReXNet: Diminishing Representational Bottleneck on Convolutional Neural Network` - https://arxiv.org/abs/2007.00992 Adapted from original impl at https://github.com/clovaai/rexnet Copyright (c) 2020-present NAVER Corp. MIT license Changes for timm, feature extraction, and rounded channe...
pytorch-image-models/timm/models/rexnet.py/0
{ "file_path": "pytorch-image-models/timm/models/rexnet.py", "repo_id": "pytorch-image-models", "token_count": 5784 }
199
""" Relative Position Vision Transformer (ViT) in PyTorch NOTE: these models are experimental / WIP, expect changes Hacked together by / Copyright 2022, Ross Wightman """ import logging import math from functools import partial from typing import Optional, Tuple, Type, Union try: from typing import Literal excep...
pytorch-image-models/timm/models/vision_transformer_relpos.py/0
{ "file_path": "pytorch-image-models/timm/models/vision_transformer_relpos.py", "repo_id": "pytorch-image-models", "token_count": 11608 }
200
""" Lion Optimizer Paper: `Symbolic Discovery of Optimization Algorithms` - https://arxiv.org/abs/2302.06675 Original Impl: https://github.com/google/automl/tree/master/lion """ # Copyright 2023 Google Research. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use t...
pytorch-image-models/timm/optim/lion.py/0
{ "file_path": "pytorch-image-models/timm/optim/lion.py", "repo_id": "pytorch-image-models", "token_count": 3257 }
201
import abc from abc import ABC from typing import Any, Dict, Optional import torch class Scheduler(ABC): """ Parameter Scheduler Base Class A scheduler base class that can be used to schedule any optimizer parameter groups. Unlike the builtin PyTorch schedulers, this is intended to be consistently calle...
pytorch-image-models/timm/scheduler/scheduler.py/0
{ "file_path": "pytorch-image-models/timm/scheduler/scheduler.py", "repo_id": "pytorch-image-models", "token_count": 2361 }
202
""" Exponential Moving Average (EMA) of model updates Hacked together by / Copyright 2020 Ross Wightman """ import logging from collections import OrderedDict from copy import deepcopy from typing import Optional import torch import torch.nn as nn _logger = logging.getLogger(__name__) class ModelEma: """ Model...
pytorch-image-models/timm/utils/model_ema.py/0
{ "file_path": "pytorch-image-models/timm/utils/model_ema.py", "repo_id": "pytorch-image-models", "token_count": 4590 }
203
mod app; mod event; mod generation; mod table; mod utils; use crate::app::App; use crate::event::Event; use crossterm::ExecutableCommand; use std::io; use text_generation_client::{GrammarType, NextTokenChooserParameters, ShardedClient}; use tokenizers::Tokenizer; use tokio::sync::{broadcast, mpsc}; use tui::backend::C...
text-generation-inference/benchmark/src/lib.rs/0
{ "file_path": "text-generation-inference/benchmark/src/lib.rs", "repo_id": "text-generation-inference", "token_count": 1946 }
204
from typing import Dict # Text Generation Inference Errors class ValidationError(Exception): def __init__(self, message: str): super().__init__(message) class GenerationError(Exception): def __init__(self, message: str): super().__init__(message) class OverloadedError(Exception): def _...
text-generation-inference/clients/python/text_generation/errors.py/0
{ "file_path": "text-generation-inference/clients/python/text_generation/errors.py", "repo_id": "text-generation-inference", "token_count": 1080 }
205
# Safetensors Safetensors is a model serialization format for deep learning models. It is [faster](https://huggingface.co/docs/safetensors/speed) and safer compared to other serialization formats like pickle (which is used under the hood in many deep learning libraries). TGI depends on safetensors format mainly to en...
text-generation-inference/docs/source/conceptual/safetensors.md/0
{ "file_path": "text-generation-inference/docs/source/conceptual/safetensors.md", "repo_id": "text-generation-inference", "token_count": 184 }
206
{ "details": { "best_of_sequences": null, "finish_reason": "length", "generated_tokens": 10, "prefill": [ { "id": 1, "logprob": null, "text": "<s>" }, { "id": 338, "logprob": -9.0859375, "text": "is" }, { "id": 21784...
text-generation-inference/integration-tests/models/__snapshots__/test_flash_awq/test_flash_llama_awq_all_params.json/0
{ "file_path": "text-generation-inference/integration-tests/models/__snapshots__/test_flash_awq/test_flash_llama_awq_all_params.json", "repo_id": "text-generation-inference", "token_count": 1165 }
207
{ "details": { "best_of_sequences": null, "finish_reason": "stop_sequence", "generated_tokens": 5, "prefill": [ { "id": 1, "logprob": null, "text": "<s>" }, { "id": 4321, "logprob": -8.6875, "text": "Test" }, { "id":...
text-generation-inference/integration-tests/models/__snapshots__/test_flash_llama/test_flash_llama_all_params.json/0
{ "file_path": "text-generation-inference/integration-tests/models/__snapshots__/test_flash_llama/test_flash_llama_all_params.json", "repo_id": "text-generation-inference", "token_count": 669 }
208
{ "details": { "best_of_sequences": null, "finish_reason": "stop_sequence", "generated_tokens": 6, "prefill": [ { "id": 14402, "logprob": null, "text": "Test" }, { "id": 2581, "logprob": -11.6171875, "text": " request" } ], ...
text-generation-inference/integration-tests/models/__snapshots__/test_flash_phi/test_flash_phi_all_params.json/0
{ "file_path": "text-generation-inference/integration-tests/models/__snapshots__/test_flash_phi/test_flash_phi_all_params.json", "repo_id": "text-generation-inference", "token_count": 690 }
209
import pytest @pytest.fixture(scope="module") def flash_mistral_handle(launcher): with launcher("mistralai/Mistral-7B-Instruct-v0.1") as handle: yield handle @pytest.fixture(scope="module") async def flash_mistral(flash_mistral_handle): await flash_mistral_handle.health(300) return flash_mistral...
text-generation-inference/integration-tests/models/test_flash_mistral.py/0
{ "file_path": "text-generation-inference/integration-tests/models/test_flash_mistral.py", "repo_id": "text-generation-inference", "token_count": 714 }
210
import pytest @pytest.fixture(scope="module") def t5_sharded_handle(launcher): with launcher("google/flan-t5-xxl", num_shard=2) as handle: yield handle @pytest.fixture(scope="module") async def t5_sharded(t5_sharded_handle): await t5_sharded_handle.health(300) return t5_sharded_handle.client @...
text-generation-inference/integration-tests/models/test_t5_sharded.py/0
{ "file_path": "text-generation-inference/integration-tests/models/test_t5_sharded.py", "repo_id": "text-generation-inference", "token_count": 427 }
211
# Router Also named `webserver` throughout the docs. This router is handling most of the logic to handle the "batches" tell when to pass new `prefill` requests and pausing `decode` requests, which ones etc... It uses gRPC to communicate with the shards which can therefore be kept much simpler and focus on having the...
text-generation-inference/router/README.md/0
{ "file_path": "text-generation-inference/router/README.md", "repo_id": "text-generation-inference", "token_count": 1175 }
212
/// Payload validation logic use crate::validation::ValidationError::{BestOfSampling, BestOfSeed, EmptyInput}; use crate::{GenerateParameters, GenerateRequest, GrammarType}; use jsonschema::{Draft, JSONSchema}; use rand::{thread_rng, Rng}; use serde_json::Value; use text_generation_client::{ GrammarType as ProtoGra...
text-generation-inference/router/src/validation.rs/0
{ "file_path": "text-generation-inference/router/src/validation.rs", "repo_id": "text-generation-inference", "token_count": 13034 }
213
// Adapted from turboderp exllama: https://github.com/turboderp/exllama #define _cuda_buffers_cu #include "cuda_buffers.cuh" CudaBuffers* g_buffers[CUDA_MAX_DEVICES] = {NULL}; // __constant__ half2 q4_table[16][256]; // half2 q4_table_host[16][256]; // bool q4_table_init = false; CudaBuffers::CudaBuffers ( int _...
text-generation-inference/server/exllama_kernels/exllama_kernels/cuda_buffers.cu/0
{ "file_path": "text-generation-inference/server/exllama_kernels/exllama_kernels/cuda_buffers.cu", "repo_id": "text-generation-inference", "token_count": 680 }
214
#include <torch/extension.h> #include <c10/cuda/CUDAGuard.h> #include <ATen/cuda/CUDAContext.h> #include <cuda_runtime.h> #include <cuda_fp16.h> #include <cstdint> #include <cstdio> #include "config.h" #include "cuda/q_matrix.cuh" #include "cuda/q_gemm.cuh" #include "cpp/util.h" // Some decluttering macros #define...
text-generation-inference/server/exllamav2_kernels/exllamav2_kernels/ext.cpp/0
{ "file_path": "text-generation-inference/server/exllamav2_kernels/exllamav2_kernels/ext.cpp", "repo_id": "text-generation-inference", "token_count": 2184 }
215
import torch from text_generation_server.utils.tokens import ( StopSequenceCriteria, StoppingCriteria, FinishReason, batch_top_tokens, ) def test_stop_sequence_criteria(): criteria = StopSequenceCriteria("/test;") assert not criteria("/") assert not criteria("/test") assert criteria("...
text-generation-inference/server/tests/utils/test_tokens.py/0
{ "file_path": "text-generation-inference/server/tests/utils/test_tokens.py", "repo_id": "text-generation-inference", "token_count": 1427 }
216
# coding=utf-8 # Copyright 2022 EleutherAI and the HuggingFace Inc. team. All rights reserved. # # This code is based on EleutherAI's GPT-NeoX library and the GPT-NeoX # and OPT implementations in this library. It has been modified from its # original forms to accommodate minor architectural differences compared # to G...
text-generation-inference/server/text_generation_server/models/custom_modeling/flash_neox_modeling.py/0
{ "file_path": "text-generation-inference/server/text_generation_server/models/custom_modeling/flash_neox_modeling.py", "repo_id": "text-generation-inference", "token_count": 6185 }
217
# imlementation of the PhiModel and PhiForCausalLM classes import torch import torch.distributed import math from torch import nn from typing import Optional, List, Tuple, Any from transformers.configuration_utils import PretrainedConfig from transformers.modeling_outputs import CausalLMOutputWithPast from text_gene...
text-generation-inference/server/text_generation_server/models/custom_modeling/phi_modeling.py/0
{ "file_path": "text-generation-inference/server/text_generation_server/models/custom_modeling/phi_modeling.py", "repo_id": "text-generation-inference", "token_count": 5626 }
218
import torch import torch.distributed from typing import List, Optional, Tuple from transformers import ( AutoTokenizer, AutoConfig, AutoProcessor, ) from text_generation_server.models.custom_modeling.idefics_config import IdeficsConfig from text_generation_server.models.custom_modeling.idefics_processin...
text-generation-inference/server/text_generation_server/models/idefics.py/0
{ "file_path": "text-generation-inference/server/text_generation_server/models/idefics.py", "repo_id": "text-generation-inference", "token_count": 1344 }
219
import torch from typing import List AWQ_PACK_ORDER = [0, 2, 4, 6, 1, 3, 5, 7] REVERSE_AWQ_PACK_ORDER = [0, 4, 1, 5, 2, 6, 3, 7] def pack(imatrix: torch.Tensor, direction: str = "column"): """ Packs a 4-bit integer matrix into a packed 32-bit integer matrix. Args: imatrix (torch.Tensor): matrix ...
text-generation-inference/server/text_generation_server/utils/awq/conversion_utils.py/0
{ "file_path": "text-generation-inference/server/text_generation_server/utils/awq/conversion_utils.py", "repo_id": "text-generation-inference", "token_count": 1384 }
220
import os import json from loguru import logger import torch from transformers import AutoTokenizer from peft import AutoPeftModelForCausalLM, AutoPeftModelForSeq2SeqLM def download_and_unload_peft(model_id, revision, trust_remote_code): torch_dtype = torch.float16 logger.info("Trying to load a Peft model. ...
text-generation-inference/server/text_generation_server/utils/peft.py/0
{ "file_path": "text-generation-inference/server/text_generation_server/utils/peft.py", "repo_id": "text-generation-inference", "token_count": 629 }
221
[package] authors = ["Nicolas Patry <nicolas@huggingface.co>"] edition = "2021" name = "node" version = "0.15.3-dev.0" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [lib] crate-type = ["cdylib"] [dependencies] napi = "2" napi-derive = "2" serde = { v...
tokenizers/bindings/node/Cargo.toml/0
{ "file_path": "tokenizers/bindings/node/Cargo.toml", "repo_id": "tokenizers", "token_count": 200 }
222
import { prependNormalizer, stripAccentsNormalizer, stripNormalizer } from '../../' describe('stripNormalizer', () => { it('instantiates with no parameters', () => { const normalizer = stripNormalizer() expect(normalizer.constructor.name).toEqual('Normalizer') }) it('accepts `undefined` as first paramet...
tokenizers/bindings/node/lib/bindings/normalizers.test.ts/0
{ "file_path": "tokenizers/bindings/node/lib/bindings/normalizers.test.ts", "repo_id": "tokenizers", "token_count": 468 }
223
{ "name": "tokenizers-linux-arm-gnueabihf", "version": "0.13.4-rc1", "os": [ "linux" ], "cpu": [ "arm" ], "main": "tokenizers.linux-arm-gnueabihf.node", "files": [ "tokenizers.linux-arm-gnueabihf.node" ], "description": "Tokenizers platform specific bindings", "keywords": [ "napi-r...
tokenizers/bindings/node/npm/linux-arm-gnueabihf/package.json/0
{ "file_path": "tokenizers/bindings/node/npm/linux-arm-gnueabihf/package.json", "repo_id": "tokenizers", "token_count": 278 }
224
tab_spaces = 2
tokenizers/bindings/node/rustfmt.toml/0
{ "file_path": "tokenizers/bindings/node/rustfmt.toml", "repo_id": "tokenizers", "token_count": 7 }
225
export type TextInputSequence = string export type PreTokenizedInputSequence = string[] export type InputSequence = TextInputSequence | PreTokenizedInputSequence export type TextEncodeInput = TextInputSequence | [TextInputSequence, TextInputSequence] export type PreTokenizedEncodeInput = PreTokenizedInputSequence | [P...
tokenizers/bindings/node/types.ts/0
{ "file_path": "tokenizers/bindings/node/types.ts", "repo_id": "tokenizers", "token_count": 114 }
226
from enum import Enum from typing import List, Tuple, Union Offsets = Tuple[int, int] TextInputSequence = str """A :obj:`str` that represents an input sequence """ PreTokenizedInputSequence = Union[List[str], Tuple[str]] """A pre-tokenized input sequence. Can be one of: - A :obj:`List` of :obj:`str` - A :o...
tokenizers/bindings/python/py_src/tokenizers/__init__.py/0
{ "file_path": "tokenizers/bindings/python/py_src/tokenizers/__init__.py", "repo_id": "tokenizers", "token_count": 984 }
227
# Generated content DO NOT EDIT class PreTokenizer: """ Base class for all pre-tokenizers This class is not supposed to be instantiated directly. Instead, any implementation of a PreTokenizer will return an instance of this class when instantiated. """ def pre_tokenize(self, pretok): ""...
tokenizers/bindings/python/py_src/tokenizers/pre_tokenizers/__init__.pyi/0
{ "file_path": "tokenizers/bindings/python/py_src/tokenizers/pre_tokenizers/__init__.pyi", "repo_id": "tokenizers", "token_count": 9461 }
228
use pyo3::exceptions; use pyo3::prelude::*; use pyo3::type_object::PyTypeInfo; use std::fmt::{Display, Formatter, Result as FmtResult}; use tokenizers::tokenizer::Result; #[derive(Debug)] pub struct PyError(pub String); impl PyError { #[allow(dead_code)] pub fn from(s: &str) -> Self { PyError(String::f...
tokenizers/bindings/python/src/error.rs/0
{ "file_path": "tokenizers/bindings/python/src/error.rs", "repo_id": "tokenizers", "token_count": 531 }
229
from tokenizers import BertWordPieceTokenizer from ..utils import bert_files, data_dir, multiprocessing_with_parallelism class TestBertWordPieceTokenizer: def test_basic_encode(self, bert_files): tokenizer = BertWordPieceTokenizer.from_file(bert_files["vocab"]) # Encode with special tokens by de...
tokenizers/bindings/python/tests/implementations/test_bert_wordpiece.py/0
{ "file_path": "tokenizers/bindings/python/tests/implementations/test_bert_wordpiece.py", "repo_id": "tokenizers", "token_count": 914 }
230
# Post-processors <tokenizerslangcontent> <python> ## BertProcessing [[autodoc]] tokenizers.processors.BertProcessing ## ByteLevel [[autodoc]] tokenizers.processors.ByteLevel ## RobertaProcessing [[autodoc]] tokenizers.processors.RobertaProcessing ## TemplateProcessing [[autodoc]] tokenizers.processors.Template...
tokenizers/docs/source-doc-builder/api/post-processors.mdx/0
{ "file_path": "tokenizers/docs/source-doc-builder/api/post-processors.mdx", "repo_id": "tokenizers", "token_count": 174 }
231
Crates.io ---------------------------------------------------------------------------------------------------- 🤗 Tokenizers is available on `crates.io <https://crates.io/crates/tokenizers>`__. You just need to add it to your :obj:`Cargo.toml`:: tokenizers = "0.10"
tokenizers/docs/source/installation/rust.inc/0
{ "file_path": "tokenizers/docs/source/installation/rust.inc", "repo_id": "tokenizers", "token_count": 74 }
232
{ "name": "create-wasm-app", "version": "0.1.0", "description": "create an app to consume rust-generated wasm packages", "main": "index.js", "bin": { "create-wasm-app": ".bin/create-wasm-app.js" }, "scripts": { "build": "webpack --config webpack.config.js", "start": "...
tokenizers/tokenizers/examples/unstable_wasm/www/package.json/0
{ "file_path": "tokenizers/tokenizers/examples/unstable_wasm/www/package.json", "repo_id": "tokenizers", "token_count": 516 }
233
use super::Pair; use rand::{thread_rng, Rng}; use std::cmp::Ordering; use std::collections::{BinaryHeap, HashMap}; #[derive(Debug, Eq)] struct Merge { pos: usize, rank: u32, new_id: u32, } impl PartialEq for Merge { fn eq(&self, other: &Self) -> bool { self.rank == other.rank && self.pos == ot...
tokenizers/tokenizers/src/models/bpe/word.rs/0
{ "file_path": "tokenizers/tokenizers/src/models/bpe/word.rs", "repo_id": "tokenizers", "token_count": 6488 }
234
use crate::tokenizer::{NormalizedString, Normalizer, Result}; pub use spm_precompiled::Precompiled; use std::cmp::Ordering; use unicode_segmentation::UnicodeSegmentation; fn replace(transformations: &mut Vec<(char, isize)>, old_part: &str, new_part: &str) { let old_count = old_part.chars().count() as isize; le...
tokenizers/tokenizers/src/normalizers/precompiled.rs/0
{ "file_path": "tokenizers/tokenizers/src/normalizers/precompiled.rs", "repo_id": "tokenizers", "token_count": 1432 }
235
use crate::pre_tokenizers::unicode_scripts::scripts::{get_script, Script}; use crate::tokenizer::{normalizer::Range, PreTokenizedString, PreTokenizer, Result}; use crate::utils::macro_rules_attribute; #[derive(Clone, Debug, PartialEq, Eq)] #[macro_rules_attribute(impl_serde_type!)] pub struct UnicodeScripts; impl Uni...
tokenizers/tokenizers/src/pre_tokenizers/unicode_scripts/pre_tokenizer.rs/0
{ "file_path": "tokenizers/tokenizers/src/pre_tokenizers/unicode_scripts/pre_tokenizer.rs", "repo_id": "tokenizers", "token_count": 2584 }
236
use fancy_regex::Regex; use std::error::Error; #[derive(Debug)] pub struct SysRegex { regex: Regex, } impl SysRegex { pub fn find_iter<'r, 't>(&'r self, inside: &'t str) -> Matches<'r, 't> { Matches(self.regex.find_iter(inside)) } pub fn new(regex_str: &str) -> Result<Self, Box<dyn Error + Se...
tokenizers/tokenizers/src/utils/fancy.rs/0
{ "file_path": "tokenizers/tokenizers/src/utils/fancy.rs", "repo_id": "tokenizers", "token_count": 396 }
237
#[cfg(not(debug_assertions))] use assert_approx_eq::assert_approx_eq; use std::collections::HashMap; use std::fs::read_to_string; use std::path::Path; #[cfg(not(debug_assertions))] use tokenizers::models::unigram::Lattice; use tokenizers::models::unigram::Unigram; use tokenizers::models::unigram::UnigramTrainer; use to...
tokenizers/tokenizers/tests/unigram.rs/0
{ "file_path": "tokenizers/tokenizers/tests/unigram.rs", "repo_id": "tokenizers", "token_count": 1698 }
238
FROM rocm/dev-ubuntu-20.04:5.6 # rocm/pytorch has no version with 2.1.0 LABEL maintainer="Hugging Face" ARG DEBIAN_FRONTEND=noninteractive ARG PYTORCH='2.1.0' ARG TORCH_VISION='0.16.0' ARG TORCH_AUDIO='2.1.0' ARG ROCM='5.6' RUN apt update && \ apt install -y --no-install-recommends git libsndfile1-dev tesseract-...
transformers/docker/transformers-pytorch-amd-gpu/Dockerfile/0
{ "file_path": "transformers/docker/transformers-pytorch-amd-gpu/Dockerfile", "repo_id": "transformers", "token_count": 516 }
239
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/de/accelerate.md/0
{ "file_path": "transformers/docs/source/de/accelerate.md", "repo_id": "transformers", "token_count": 1929 }
240
<!--Copyright 2020 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/de/testing.md/0
{ "file_path": "transformers/docs/source/de/testing.md", "repo_id": "transformers", "token_count": 19303 }
241
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/altclip.md/0
{ "file_path": "transformers/docs/source/en/model_doc/altclip.md", "repo_id": "transformers", "token_count": 1400 }
242
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/bit.md/0
{ "file_path": "transformers/docs/source/en/model_doc/bit.md", "repo_id": "transformers", "token_count": 1005 }
243
<!--Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/clvp.md/0
{ "file_path": "transformers/docs/source/en/model_doc/clvp.md", "repo_id": "transformers", "token_count": 1339 }
244
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/deformable_detr.md/0
{ "file_path": "transformers/docs/source/en/model_doc/deformable_detr.md", "repo_id": "transformers", "token_count": 1014 }
245
<!--Copyright 2020 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/electra.md/0
{ "file_path": "transformers/docs/source/en/model_doc/electra.md", "repo_id": "transformers", "token_count": 2211 }
246
<!--Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/fuyu.md/0
{ "file_path": "transformers/docs/source/en/model_doc/fuyu.md", "repo_id": "transformers", "token_count": 1657 }
247
<!--Copyright 2020 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/ibert.md/0
{ "file_path": "transformers/docs/source/en/model_doc/ibert.md", "repo_id": "transformers", "token_count": 947 }
248
<!--Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/llava.md/0
{ "file_path": "transformers/docs/source/en/model_doc/llava.md", "repo_id": "transformers", "token_count": 1228 }
249
<!--Copyright 2020 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/mt5.md/0
{ "file_path": "transformers/docs/source/en/model_doc/mt5.md", "repo_id": "transformers", "token_count": 1400 }
250
<!--Copyright 2024 The Qwen Team and The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applic...
transformers/docs/source/en/model_doc/qwen2.md/0
{ "file_path": "transformers/docs/source/en/model_doc/qwen2.md", "repo_id": "transformers", "token_count": 918 }
251
<!--Copyright 2021 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/segformer.md/0
{ "file_path": "transformers/docs/source/en/model_doc/segformer.md", "repo_id": "transformers", "token_count": 3177 }
252
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/swin2sr.md/0
{ "file_path": "transformers/docs/source/en/model_doc/swin2sr.md", "repo_id": "transformers", "token_count": 979 }
253
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/vit_msn.md/0
{ "file_path": "transformers/docs/source/en/model_doc/vit_msn.md", "repo_id": "transformers", "token_count": 1191 }
254
<!--Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/model_doc/xlm-v.md/0
{ "file_path": "transformers/docs/source/en/model_doc/xlm-v.md", "repo_id": "transformers", "token_count": 809 }
255
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/perf_infer_cpu.md/0
{ "file_path": "transformers/docs/source/en/perf_infer_cpu.md", "repo_id": "transformers", "token_count": 2080 }
256
<!--Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/quantization.md/0
{ "file_path": "transformers/docs/source/en/quantization.md", "repo_id": "transformers", "token_count": 12286 }
257
<!--Copyright 2024 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/tasks/mask_generation.md/0
{ "file_path": "transformers/docs/source/en/tasks/mask_generation.md", "repo_id": "transformers", "token_count": 2851 }
258
<!--Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/en/tasks/zero_shot_object_detection.md/0
{ "file_path": "transformers/docs/source/en/tasks/zero_shot_object_detection.md", "repo_id": "transformers", "token_count": 3901 }
259
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/es/autoclass_tutorial.md/0
{ "file_path": "transformers/docs/source/es/autoclass_tutorial.md", "repo_id": "transformers", "token_count": 2066 }
260
<!--Copyright 2020 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/es/perplexity.md/0
{ "file_path": "transformers/docs/source/es/perplexity.md", "repo_id": "transformers", "token_count": 3119 }
261
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/it/accelerate.md/0
{ "file_path": "transformers/docs/source/it/accelerate.md", "repo_id": "transformers", "token_count": 1891 }
262
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/it/perf_infer_cpu.md/0
{ "file_path": "transformers/docs/source/it/perf_infer_cpu.md", "repo_id": "transformers", "token_count": 1497 }
263
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ja/accelerate.md/0
{ "file_path": "transformers/docs/source/ja/accelerate.md", "repo_id": "transformers", "token_count": 2185 }
264
<!--Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ja/hpo_train.md/0
{ "file_path": "transformers/docs/source/ja/hpo_train.md", "repo_id": "transformers", "token_count": 2841 }
265
<!--Copyright 2020 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ja/model_doc/albert.md/0
{ "file_path": "transformers/docs/source/ja/model_doc/albert.md", "repo_id": "transformers", "token_count": 2960 }
266
<!--Copyright 2021 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ja/model_doc/bigbird_pegasus.md/0
{ "file_path": "transformers/docs/source/ja/model_doc/bigbird_pegasus.md", "repo_id": "transformers", "token_count": 2264 }
267
<!--Copyright 2021 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ja/model_doc/clip.md/0
{ "file_path": "transformers/docs/source/ja/model_doc/clip.md", "repo_id": "transformers", "token_count": 4545 }
268
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ja/model_doc/decision_transformer.md/0
{ "file_path": "transformers/docs/source/ja/model_doc/decision_transformer.md", "repo_id": "transformers", "token_count": 1073 }
269
<!--Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ja/perf_infer_gpu_many.md/0
{ "file_path": "transformers/docs/source/ja/perf_infer_gpu_many.md", "repo_id": "transformers", "token_count": 2561 }
270
<!--- Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or ...
transformers/docs/source/ja/pr_checks.md/0
{ "file_path": "transformers/docs/source/ja/pr_checks.md", "repo_id": "transformers", "token_count": 5982 }
271
<!--Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ja/tasks/monocular_depth_estimation.md/0
{ "file_path": "transformers/docs/source/ja/tasks/monocular_depth_estimation.md", "repo_id": "transformers", "token_count": 2274 }
272
<!--Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ja/testing.md/0
{ "file_path": "transformers/docs/source/ja/testing.md", "repo_id": "transformers", "token_count": 22732 }
273
<!--Copyright 2020 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ko/bertology.md/0
{ "file_path": "transformers/docs/source/ko/bertology.md", "repo_id": "transformers", "token_count": 1557 }
274
<!--- Copyright 2021 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or ...
transformers/docs/source/ko/performance.md/0
{ "file_path": "transformers/docs/source/ko/performance.md", "repo_id": "transformers", "token_count": 3692 }
275
<!--Copyright 2022 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ko/tasks/image_classification.md/0
{ "file_path": "transformers/docs/source/ko/tasks/image_classification.md", "repo_id": "transformers", "token_count": 11866 }
276
<!--Copyright 2023 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/ko/tasks_explained.md/0
{ "file_path": "transformers/docs/source/ko/tasks_explained.md", "repo_id": "transformers", "token_count": 25797 }
277
<!--Copyright 2020 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed...
transformers/docs/source/pt/custom_models.md/0
{ "file_path": "transformers/docs/source/pt/custom_models.md", "repo_id": "transformers", "token_count": 5915 }
278
<!--- Copyright 2020 The HuggingFace Team. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or a...
transformers/examples/README.md/0
{ "file_path": "transformers/examples/README.md", "repo_id": "transformers", "token_count": 3302 }
279
#!/usr/bin/env python # coding=utf-8 # Copyright 2021 The HuggingFace Team All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-...
transformers/examples/flax/question-answering/run_qa.py/0
{ "file_path": "transformers/examples/flax/question-answering/run_qa.py", "repo_id": "transformers", "token_count": 20250 }
280
#### Fine-tuning BERT on SQuAD1.0 with relative position embeddings The following examples show how to fine-tune BERT models with different relative position embeddings. The BERT model `google-bert/bert-base-uncased` was pretrained with default absolute position embeddings. We provide the following pretrained models...
transformers/examples/legacy/question-answering/README.md/0
{ "file_path": "transformers/examples/legacy/question-answering/README.md", "repo_id": "transformers", "token_count": 1768 }
281
# Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicabl...
transformers/examples/legacy/seq2seq/sentence_splitter.py/0
{ "file_path": "transformers/examples/legacy/seq2seq/sentence_splitter.py", "repo_id": "transformers", "token_count": 403 }
282
# Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicabl...
transformers/examples/legacy/seq2seq/train_mbart_cc25_enro.sh/0
{ "file_path": "transformers/examples/legacy/seq2seq/train_mbart_cc25_enro.sh", "repo_id": "transformers", "token_count": 501 }
283
#!/usr/bin/env python # coding=utf-8 # Copyright 2021 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LI...
transformers/examples/pytorch/question-answering/run_qa_beam_search_no_trainer.py/0
{ "file_path": "transformers/examples/pytorch/question-answering/run_qa_beam_search_no_trainer.py", "repo_id": "transformers", "token_count": 19980 }
284
#!/usr/bin/env python # coding=utf-8 # Copyright 2018 Google AI, Google Brain and Carnegie Mellon University Authors and the HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in co...
transformers/examples/pytorch/text-generation/run_generation.py/0
{ "file_path": "transformers/examples/pytorch/text-generation/run_generation.py", "repo_id": "transformers", "token_count": 6877 }
285
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a cop...
transformers/examples/research_projects/adversarial/run_hans.py/0
{ "file_path": "transformers/examples/research_projects/adversarial/run_hans.py", "repo_id": "transformers", "token_count": 3302 }
286