id int64 0 190k | prompt stringlengths 21 13.4M | docstring stringlengths 1 12k ⌀ |
|---|---|---|
2,966 | import importlib
import importlib.metadata
import os
import warnings
from functools import lru_cache
import torch
from packaging import version
from packaging.version import parse
from .environment import parse_flag_from_env, str_to_bool
from .versions import compare_versions, is_torch_version
def _is_package_available... | null |
2,967 | import importlib
import importlib.metadata
import os
import warnings
from functools import lru_cache
import torch
from packaging import version
from packaging.version import parse
from .environment import parse_flag_from_env, str_to_bool
from .versions import compare_versions, is_torch_version
def _is_package_available... | null |
2,968 | import importlib
import importlib.metadata
import os
import warnings
from functools import lru_cache
import torch
from packaging import version
from packaging.version import parse
from .environment import parse_flag_from_env, str_to_bool
from .versions import compare_versions, is_torch_version
def _is_package_available... | null |
2,969 | import importlib
import importlib.metadata
import os
import warnings
from functools import lru_cache
import torch
from packaging import version
from packaging.version import parse
from .environment import parse_flag_from_env, str_to_bool
from .versions import compare_versions, is_torch_version
def _is_package_available... | null |
2,970 | import importlib
import importlib.metadata
import os
import warnings
from functools import lru_cache
import torch
from packaging import version
from packaging.version import parse
from .environment import parse_flag_from_env, str_to_bool
from .versions import compare_versions, is_torch_version
def _is_package_available... | null |
2,971 | import importlib
import importlib.metadata
import os
import warnings
from functools import lru_cache
import torch
from packaging import version
from packaging.version import parse
from .environment import parse_flag_from_env, str_to_bool
from .versions import compare_versions, is_torch_version
def _is_package_available... | null |
2,972 | import importlib
import importlib.metadata
import os
import warnings
from functools import lru_cache
import torch
from packaging import version
from packaging.version import parse
from .environment import parse_flag_from_env, str_to_bool
from .versions import compare_versions, is_torch_version
def _is_package_available... | null |
2,973 | import contextlib
import gc
import importlib
import inspect
import json
import logging
import os
import re
import shutil
import tempfile
import warnings
from collections import OrderedDict, defaultdict
from typing import Dict, List, Optional, Tuple, Union
import packaging
import torch
import torch.nn as nn
from ..state... | Splits a model state dictionary in sub-checkpoints so that the final size of each sub-checkpoint does not exceed a given size. The sub-checkpoints are determined by iterating through the `state_dict` in the order of its keys, so there is no optimization made to make each sub-checkpoint as close as possible to the maxim... |
2,974 | import contextlib
import gc
import importlib
import inspect
import json
import logging
import os
import re
import shutil
import tempfile
import warnings
from collections import OrderedDict, defaultdict
from typing import Dict, List, Optional, Tuple, Union
import packaging
import torch
import torch.nn as nn
from ..state... | Computes the total size of the model and its largest layer |
2,975 | import contextlib
import gc
import importlib
import inspect
import json
import logging
import os
import re
import shutil
import tempfile
import warnings
from collections import OrderedDict, defaultdict
from typing import Dict, List, Optional, Tuple, Union
import packaging
import torch
import torch.nn as nn
from ..state... | Checks a device map covers everything in a given model. Args: model (`torch.nn.Module`): The model to check the device map against. device_map (`Dict[str, Union[int, str, torch.device]]`): The device map to check. |
2,976 | import contextlib
import gc
import importlib
import inspect
import json
import logging
import os
import re
import shutil
import tempfile
import warnings
from collections import OrderedDict, defaultdict
from typing import Dict, List, Optional, Tuple, Union
import packaging
import torch
import torch.nn as nn
from ..state... | Returns the state dictionary for an offloaded model via iterative onloading Args: model (`torch.nn.Module`): The offloaded model we want to save |
2,977 | import contextlib
import gc
import importlib
import inspect
import json
import logging
import os
import re
import shutil
import tempfile
import warnings
from collections import OrderedDict, defaultdict
from typing import Dict, List, Optional, Tuple, Union
import packaging
import torch
import torch.nn as nn
from ..state... | Return a context manager for autocasting mixed precision Args: native_amp (`bool`, *optional*, defaults to False): Whether mixed precision is actually enabled. cache_enabled (`bool`, *optional*, defaults to True): Whether the weight cache inside autocast should be enabled. |
2,978 | import argparse
import os
import subprocess
import sys
import warnings
from ast import literal_eval
from shutil import which
from typing import Any, Dict, List, Tuple
import torch
from ..commands.config.config_args import SageMakerConfig
from ..utils import (
DynamoBackend,
PrecisionType,
is_ipex_available,... | Filters out all `accelerate` specific args |
2,979 | import argparse
import os
import subprocess
import sys
import warnings
from ast import literal_eval
from shutil import which
from typing import Any, Dict, List, Tuple
import torch
from ..commands.config.config_args import SageMakerConfig
from ..utils import (
DynamoBackend,
PrecisionType,
is_ipex_available,... | Prepares and returns the command list and an environment with the correct simple launcher environment variables. |
2,980 | import argparse
import os
import subprocess
import sys
import warnings
from ast import literal_eval
from shutil import which
from typing import Any, Dict, List, Tuple
import torch
from ..commands.config.config_args import SageMakerConfig
from ..utils import (
DynamoBackend,
PrecisionType,
is_ipex_available,... | Prepares and returns an environment with the correct multi-GPU environment variables. |
2,981 | import argparse
import os
import subprocess
import sys
import warnings
from ast import literal_eval
from shutil import which
from typing import Any, Dict, List, Tuple
import torch
from ..commands.config.config_args import SageMakerConfig
from ..utils import (
DynamoBackend,
PrecisionType,
is_ipex_available,... | Prepares and returns the command list and an environment with the correct DeepSpeed environment variables. |
2,982 | import argparse
import os
import subprocess
import sys
import warnings
from ast import literal_eval
from shutil import which
from typing import Any, Dict, List, Tuple
import torch
from ..commands.config.config_args import SageMakerConfig
from ..utils import (
DynamoBackend,
PrecisionType,
is_ipex_available,... | Prepares and returns an environment with the correct TPU environment variables. |
2,983 | import argparse
import os
import subprocess
import sys
import warnings
from ast import literal_eval
from shutil import which
from typing import Any, Dict, List, Tuple
import torch
from ..commands.config.config_args import SageMakerConfig
from ..utils import (
DynamoBackend,
PrecisionType,
is_ipex_available,... | null |
2,984 | import argparse
import runhouse as rh
import torch
from nlp_example import training_function
from accelerate.utils import PrepareForLaunch, patch_environment
def training_function(config, args):
# Initialize accelerator
accelerator = Accelerator(cpu=args.cpu, mixed_precision=args.mixed_precision)
# Sample ... | null |
2,985 | import argparse
import os
import re
import numpy as np
import PIL
import torch
from timm import create_model
from torch.optim.lr_scheduler import OneCycleLR
from torch.utils.data import DataLoader, Dataset
from torchvision.transforms import Compose, RandomResizedCrop, Resize, ToTensor
from accelerate import Accelerator... | null |
2,986 | import argparse
import os
import re
import numpy as np
import PIL
import torch
from timm import create_model
from torch.optim.lr_scheduler import OneCycleLR
from torch.utils.data import DataLoader, Dataset
from torchvision.transforms import Compose, RandomResizedCrop, Resize, ToTensor
from accelerate import Accelerator... | null |
2,987 | import argparse
import evaluate
import torch
from datasets import load_dataset
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from accelerate import Accelerator, DistributedType
M... | null |
2,988 | import argparse
import os
import evaluate
import torch
from datasets import load_dataset
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from accelerate import Accelerator, Distrib... | null |
2,989 | import argparse
import gc
import os
import threading
import evaluate
import psutil
import torch
from datasets import load_dataset
from torch.distributed.fsdp.fully_sharded_data_parallel import FullOptimStateDictConfig, FullStateDictConfig
from torch.utils.data import DataLoader
from transformers import (
AutoModelF... | null |
2,990 | import argparse
import os
import evaluate
import torch
from datasets import load_dataset
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from accelerate import Accelerator, Distrib... | null |
2,991 | import argparse
import json
import logging
import math
import os
import random
from itertools import chain
from pathlib import Path
import datasets
import torch
import transformers
from datasets import load_dataset
from huggingface_hub import Repository
from torch.utils.data import DataLoader
from tqdm.auto import tqdm... | null |
2,992 | import argparse
import os
import evaluate
import torch
from datasets import load_dataset
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from accelerate import Accelerator, Distrib... | null |
2,993 | import argparse
import os
import evaluate
import torch
from datasets import load_dataset
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from accelerate import Accelerator, Distrib... | null |
2,994 | import argparse
import os
import evaluate
import torch
from datasets import load_dataset
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from accelerate import Accelerator, Distrib... | null |
2,995 | import argparse
import json
import logging
import math
import os
import random
from itertools import chain
from pathlib import Path
import datasets
import torch
import transformers
from datasets import load_dataset
from huggingface_hub import Repository
from torch.utils.data import DataLoader
from tqdm.auto import tqdm... | null |
2,996 | import argparse
import json
import logging
import math
import os
import random
from itertools import chain
from pathlib import Path
import datasets
import torch
import transformers
from datasets import load_dataset
from huggingface_hub import Repository
from torch.utils.data import DataLoader
from tqdm.auto import tqdm... | null |
2,997 | import argparse
import os
import evaluate
import torch
from datasets import load_dataset
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from accelerate import Accelerator, Distrib... | null |
2,998 | import argparse
from typing import List
import evaluate
import numpy as np
import torch
from datasets import DatasetDict, load_dataset
from sklearn.model_selection import StratifiedKFold
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, Au... | null |
2,999 | import argparse
import os
import evaluate
import torch
from datasets import load_dataset
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from accelerate import Accelerator
from acc... | null |
3,000 | import argparse
import os
import evaluate
import torch
from datasets import load_dataset
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from accelerate import Accelerator, Distrib... | null |
3,001 | import gc
import threading
import time
import psutil
import torch
cpu_peak_tracker = PeakCPUMemory()
def start_measure():
# Time
measures = {"time": time.time()}
gc.collect()
torch.cuda.empty_cache()
# CPU mem
measures["cpu"] = psutil.Process().memory_info().rss
cpu_peak_tracker.start()
... | null |
3,002 | import gc
import threading
import time
import psutil
import torch
cpu_peak_tracker = PeakCPUMemory()
def end_measure(start_measures):
# Time
measures = {"time": time.time() - start_measures["time"]}
gc.collect()
torch.cuda.empty_cache()
# CPU mem
measures["cpu"] = (psutil.Process().memory_inf... | null |
3,003 | import gc
import threading
import time
import psutil
import torch
def log_measures(measures, description):
print(f"{description}:")
print(f"- Time: {measures['time']:.2f}s")
for i in range(torch.cuda.device_count()):
print(f"- GPU {i} allocated: {measures[str(i)]:.2f}MiB")
peak = measures[f... | null |
3,004 | import argparse
import time
import torch
import transformers
from measures_util import end_measure, log_measures, start_measure
from transformers import AutoConfig, AutoModelForCausalLM, AutoModelForSeq2SeqLM, AutoTokenizer
from accelerate.utils import compute_module_sizes
DEFAULT_MODELS = {
"gpt-j-6b": {"is_causal... | null |
3,005 | import typing
from decimal import Decimal
from borb.io.read.types import AnyPDFType
from borb.pdf.canvas.geometry.line_segment import LineSegment
from borb.pdf.canvas.operator.canvas_operator import CanvasOperator
class LineSegment:
"""
This class represents a line segment
"""
#
# CONSTRUCTOR
... | null |
3,006 | ADOBE_STANDARD_ENCODING_LOOKUP = [
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55,
56, 57, 58, 59, 60, 61, 62, 63,
64, 65, 66, 67, 68, 69, 7... | This function decodes bytes using StandardEncoding :param byte_input: the input :return: a str (representing the decoded bytes) |
3,007 | ADOBE_STANDARD_ENCODING_LOOKUP = [
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55,
56, 57, 58, 59, 60, 61, 62, 63,
64, 65, 66, 67, 68, 69, 7... | This function encodes a str using StandardEncoding :param str_input: the input :return: bytes (representing the encoded str) |
3,008 | SYMBOL_ENCODING_LOOKUP = [
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
32, 33, 8704, 35, 8707, 37, 38, 8715,
40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55,
56, 57, 58, 59, 60, 61, 62, 63,
8773, 913, 914, 935, 916, 9... | This function decodes bytes using SymbolEncoding :param byte_input: the input :return: a str (representing the decoded bytes) |
3,009 | SYMBOL_ENCODING_LOOKUP = [
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
32, 33, 8704, 35, 8707, 37, 38, 8715,
40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55,
56, 57, 58, 59, 60, 61, 62, 63,
8773, 913, 914, 935, 916, 9... | This function encodes a str using SymbolEncoding :param str_input: the input :return: bytes (representing the encoded str) |
3,010 | ZAPFDINGBATS_ENCODING_LOOKUP = [
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
32, 9985, 9986, 9987, 9988, 9742, 9990, 9991,
9992, 9993, 9755, 9758, 9996, 9997, 9998, 9999,
10000, 10001, 10002, 10003, 10004, 10005, 10006, 10007,
10008, 10... | This function decodes bytes using ZapfDingbats :param byte_input: the input :return: a str (representing the decoded bytes) |
3,011 | ZAPFDINGBATS_ENCODING_LOOKUP = [
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
32, 9985, 9986, 9987, 9988, 9742, 9990, 9991,
9992, 9993, 9755, 9758, 9996, 9997, 9998, 9999,
10000, 10001, 10002, 10003, 10004, 10005, 10006, 10007,
10008, 10... | This function encodes a str using ZapfDingbats :param str_input: the input :return: bytes (representing the encoded str) |
3,012 | import typing
from decimal import Decimal
from borb.io.filter.ascii85_decode import ASCII85Decode
from borb.io.filter.flate_decode import FlateDecode
from borb.io.filter.lzw_decode import LZWDecode
from borb.io.filter.run_length_decode import RunLengthDecode
from borb.io.read.types import Dictionary
from borb.io.read.t... | This function decodes a Stream, applying the filters specified in the Filter entry of its stream dictionary :param s: the input Stream object :return: the input Stream, modified to contain the decoded bytes |
3,013 | import math
import warnings
from typing import Optional
import torch
import torch.nn as nn
from einops import rearrange
from torch import nn
from .norm import LPLayerNorm
def scaled_multihead_dot_product_attention(query, key, value, n_heads, softmax_scale=None, attn_bias=None, key_padding_mask=None, is_causal=False, d... | null |
3,014 | import math
import warnings
from typing import Optional
import torch
import torch.nn as nn
from einops import rearrange
from torch import nn
from .norm import LPLayerNorm
def _reset_is_causal(num_query_tokens: int, num_key_tokens: int, original_is_causal: bool):
if original_is_causal and num_query_tokens != num_key... | null |
3,015 | import math
import warnings
from typing import Optional
import torch
import torch.nn as nn
from einops import rearrange
from torch import nn
from .norm import LPLayerNorm
def _reset_is_causal(num_query_tokens: int, num_key_tokens: int, original_is_causal: bool):
if original_is_causal and num_query_tokens != num_key... | null |
3,016 | import math
import warnings
from typing import Optional
import torch
import torch.nn as nn
from einops import rearrange
from torch import nn
from .norm import LPLayerNorm
def attn_bias_shape(attn_impl, n_heads, seq_len, alibi, prefix_lm, causal, use_sequence_id):
if attn_impl == 'flash':
return None
el... | null |
3,017 | import math
import warnings
from typing import Optional
import torch
import torch.nn as nn
from einops import rearrange
from torch import nn
from .norm import LPLayerNorm
def build_alibi_bias(n_heads, seq_len, full=False, alibi_bias_max=8, device=None, dtype=None):
def build_attn_bias(attn_impl, attn_bias, n_heads, se... | null |
3,018 | import torch
def _cast_if_autocast_enabled(tensor):
if torch.is_autocast_enabled():
if tensor.device.type == 'cuda':
dtype = torch.get_autocast_gpu_dtype()
elif tensor.device.type == 'cpu':
dtype = torch.get_autocast_cpu_dtype()
else:
raise NotImplemented... | null |
3,019 | import torch
def rms_norm(x, weight=None, eps=1e-05):
output = x / torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + eps)
if weight is not None:
return output * weight
return output | null |
3,020 | from contextlib import contextmanager
import torch
import torch.nn as nn
def init_on_device(device: torch.device, include_buffers: bool=False):
"""Device initialization context manager.
A context manager under which models are initialized with all parameters
on the specified device.
Args:
device... | Meta initialization context manager. A context manager under which models are initialized with all parameters on the meta device, therefore creating an empty model. Useful when just initializing the model would blow the available RAM. Args: include_buffers (`bool`, *optional*, defaults to `False`): Whether or not to al... |
3,021 | import math
import warnings
from collections.abc import Sequence
from functools import partial
from typing import Optional, Tuple, Union
import torch
from torch import nn
from .norm import NORM_CLASS_REGISTRY
def torch_default_param_init_fn_(module: nn.Module, verbose: int=0, **kwargs):
del kwargs
if verbose >... | null |
3,022 | import math
import warnings
from collections.abc import Sequence
from functools import partial
from typing import Optional, Tuple, Union
import torch
from torch import nn
from .norm import NORM_CLASS_REGISTRY
def _normal_param_init_fn_(module: nn.Module, std: float, n_layers: int, d_model: Optional[int]=None, init_div_... | null |
3,023 | import math
import warnings
from collections.abc import Sequence
from functools import partial
from typing import Optional, Tuple, Union
import torch
from torch import nn
from .norm import NORM_CLASS_REGISTRY
def small_param_init_fn_(module: nn.Module, n_layers: int, d_model: int, init_div_is_residual: Union[int, float... | From section 2.3.1 of GPT-NeoX-20B: An Open-Source AutoregressiveLanguage Model — Black et. al. (2022) see https://github.com/EleutherAI/gpt-neox/blob/9610391ab319403cef079b438edd016a2443af54/megatron/model/init_functions.py#L151 and https://github.com/EleutherAI/gpt-neox/blob/main/megatron/model/transformer.py |
3,024 | import math
import warnings
from collections.abc import Sequence
from functools import partial
from typing import Optional, Tuple, Union
import torch
from torch import nn
from .norm import NORM_CLASS_REGISTRY
def generic_param_init_fn_(module: nn.Module, init_fn_, n_layers: int, d_model: Optional[int]=None, init_div_is... | null |
3,025 | import math
import warnings
from collections.abc import Sequence
from functools import partial
from typing import Optional, Tuple, Union
import torch
from torch import nn
from .norm import NORM_CLASS_REGISTRY
def generic_param_init_fn_(module: nn.Module, init_fn_, n_layers: int, d_model: Optional[int]=None, init_div_is... | null |
3,026 | import math
import warnings
from collections.abc import Sequence
from functools import partial
from typing import Optional, Tuple, Union
import torch
from torch import nn
from .norm import NORM_CLASS_REGISTRY
def generic_param_init_fn_(module: nn.Module, init_fn_, n_layers: int, d_model: Optional[int]=None, init_div_is... | null |
3,027 | import math
import warnings
from collections.abc import Sequence
from functools import partial
from typing import Optional, Tuple, Union
import torch
from torch import nn
from .norm import NORM_CLASS_REGISTRY
def generic_param_init_fn_(module: nn.Module, init_fn_, n_layers: int, d_model: Optional[int]=None, init_div_is... | null |
3,028 | import math
import warnings
from types import MethodType
from typing import Any, Dict, List, Optional, Tuple, Union
import torch
from transformers.models.bloom.modeling_bloom import BaseModelOutputWithPastAndCrossAttentions, BloomForCausalLM, BloomModel, CausalLMOutputWithCrossAttentions, CrossEntropyLoss
from transfor... | Converts a HuggingFace Causal LM to a Prefix LM. Supported HuggingFace model classes: - `GPT2LMHeadModel` - `GPTNeoForCausalLM` - `GPTNeoXForCausalLM` - `GPTJForCausalLM` - `BloomForCausalLM` - `OPTForCausalLM` Conversion to a Prefix LM is done by modifying the `forward` method, and possibly also the `generate` method ... |
3,029 | import math
import warnings
from types import MethodType
from typing import Any, Dict, List, Optional, Tuple, Union
import torch
from transformers.models.bloom.modeling_bloom import BaseModelOutputWithPastAndCrossAttentions, BloomForCausalLM, BloomModel, CausalLMOutputWithCrossAttentions, CrossEntropyLoss
from transfor... | Attempts to add bidirectional_mask to batch if missing. Raises: KeyError if bidirectional_mask is missing and can't be inferred |
3,030 | from typing import Union
from transformers import AutoTokenizer, PreTrainedTokenizer, PreTrainedTokenizerFast
Tokenizer = Union[PreTrainedTokenizer, PreTrainedTokenizerFast]
NUM_SENTINEL_TOKENS: int = 100
The provided code snippet includes necessary dependencies for implementing the `adapt_tokenizer_for_denoising` fun... | Adds sentinel tokens and padding token (if missing). Expands the tokenizer vocabulary to include sentinel tokens used in mixture-of-denoiser tasks as well as a padding token. All added tokens are added as special tokens. No tokens are added if sentinel tokens and padding token already exist. |
3,031 | import fnmatch
import json
import datasets
import torch
import transformers
from accelerate import Accelerator
from transformers import AutoModelForCausalLM, AutoTokenizer, HfArgumentParser
from lm_eval.arguments import EvalArguments
from lm_eval.evaluator import Evaluator
from lm_eval.tasks import ALL_TASKS
class Mult... | null |
3,032 | import fnmatch
import json
import datasets
import torch
import transformers
from accelerate import Accelerator
from transformers import AutoModelForCausalLM, AutoTokenizer, HfArgumentParser
from lm_eval.arguments import EvalArguments
from lm_eval.evaluator import Evaluator
from lm_eval.tasks import ALL_TASKS
The provi... | Returns a list containing all values of the source_list that match at least one of the patterns |
3,033 | from os import path
import datetime
import os
import platform
import re
import runpy
import subprocess
import sys
from setuptools import setup, find_packages, Extension
The provided code snippet includes necessary dependencies for implementing the `get_cflags` function. Write a Python function `def get_cflags()` to so... | Returns suitable CFLAGS for the platform. |
3,034 | from os import path
import datetime
import os
import platform
import re
import runpy
import subprocess
import sys
from setuptools import setup, find_packages, Extension
The provided code snippet includes necessary dependencies for implementing the `get_hg_changeset` function. Write a Python function `def get_hg_change... | Get the Mercurial changeset id. |
3,035 | from os import path
import datetime
import os
import platform
import re
import runpy
import subprocess
import sys
from setuptools import setup, find_packages, Extension
The provided code snippet includes necessary dependencies for implementing the `get_git_changeset` function. Write a Python function `def get_git_chan... | Get the Git changeset id. |
3,036 | from os import path
from typing import Any, Optional
import collections
import copy
import functools
import glob
import hashlib
import importlib
import io
import itertools
import logging
import os
import pickle
import struct
import sys
import textwrap
import time
import traceback
import warnings
from beancount.utils im... | Combine the plugins from the given plugin modules. This is used to create plugins of plugins. Args: *plugins_modules: A sequence of module objects. Returns: A list that can be assigned to the new module's __plugins__ attribute. |
3,037 | from os import path
from typing import Any, Optional
import collections
import copy
import functools
import glob
import hashlib
import importlib
import io
import itertools
import logging
import os
import pickle
import struct
import sys
import textwrap
import time
import traceback
import warnings
from beancount.utils im... | A factory of decorators that loads the docstring and calls the function with entries. This is an incredibly convenient tool to write lots of tests. Write a unittest using the standard TestCase class and put the input entries in the function's docstring. Args: expect_errors: A boolean or None, with the following semanti... |
3,038 | from os import path
from typing import Any, Optional
import collections
import copy
import functools
import glob
import hashlib
import importlib
import io
import itertools
import logging
import os
import pickle
import struct
import sys
import textwrap
import time
import traceback
import warnings
from beancount.utils im... | Initialize the loader. |
3,039 | import collections
import copy
import functools
import io
import operator
from beancount.core.data import Transaction
from beancount.core.data import Posting
from beancount.core.data import TxnPosting
from beancount.core.data import Balance
from beancount.core.data import Open
from beancount.core.data import Close
from... | True if the given account node contains the subaccount name. Args: account_name: A string, the name of a direct or indirect subaccount of this node. Returns: A boolean, true the name is a child of this node. |
3,040 | import collections
import copy
import functools
import io
import operator
from beancount.core.data import Transaction
from beancount.core.data import Posting
from beancount.core.data import TxnPosting
from beancount.core.data import Balance
from beancount.core.data import Open
from beancount.core.data import Close
from... | Filter a RealAccount tree of nodes by the predicate. This function visits the tree and applies the predicate on each node. It returns a partial clone of RealAccount whereby on each node - either the predicate is true, or - for at least one child of the node the predicate is true. All the leaves have the predicate be tr... |
3,041 | import collections
import copy
import functools
import io
import operator
from beancount.core.data import Transaction
from beancount.core.data import Posting
from beancount.core.data import TxnPosting
from beancount.core.data import Balance
from beancount.core.data import Open
from beancount.core.data import Close
from... | Return a sorted list a RealAccount's postings and children. Args: real_account: An instance of RealAccount. Returns: A list of Posting or directories. |
3,042 | import collections
import copy
import functools
import io
import operator
from beancount.core.data import Transaction
from beancount.core.data import Posting
from beancount.core.data import TxnPosting
from beancount.core.data import Balance
from beancount.core.data import Open
from beancount.core.data import Close
from... | Iterate over the entries, accumulating the running balance. For each entry, this yields tuples of the form: (entry, postings, change, balance) entry: This is the directive for this line. If the list contained Posting instance, this yields the corresponding Transaction object. postings: A list of postings on this entry ... |
3,043 | import collections
import copy
import functools
import io
import operator
from beancount.core.data import Transaction
from beancount.core.data import Posting
from beancount.core.data import TxnPosting
from beancount.core.data import Balance
from beancount.core.data import Open
from beancount.core.data import Close
from... | Look at the end of the list of postings, and find the last posting or entry that is not an automatically added directive. Note that if the account is closed, the last posting is assumed to be a Close directive (this is the case if the input is valid and checks without errors. Args: txn_postings: a list of postings or e... |
3,044 | import collections
import copy
from decimal import Decimal
from beancount.core.number import D
from beancount.core.number import ONE
from beancount.core.number import ZERO
from beancount.core.number import MISSING
from beancount.core.amount import Amount
from beancount.core.position import CostSpec
from beancount.core.... | Return True if a Posting has a balance amount that would have to be calculated. Args: posting: A Posting instance. Returns: A boolean. |
3,045 | import collections
import copy
from decimal import Decimal
from beancount.core.number import D
from beancount.core.number import ONE
from beancount.core.number import ZERO
from beancount.core.number import MISSING
from beancount.core.amount import Amount
from beancount.core.position import CostSpec
from beancount.core.... | If necessary, insert a posting to absorb the residual. This makes the transaction balance exactly. Note: This was developed in order to tweak transactions before exporting them to Ledger. A better method would be to enable the feature that automatically inserts these rounding postings on all transactions, and so maybe ... |
3,046 | from decimal import Decimal
from beancount.core.number import MISSING
from beancount.core.amount import Amount
from beancount.core.position import Cost
from beancount.core.position import Position
from beancount.core import prices
def convert_amount(amt, target_currency, price_map, date=None, via=None):
"""Return t... | Return the market value of a Position or Posting in a particular currency. In addition, if the rate from the position's currency to target_currency isn't available, an attempt is made to convert from its cost currency, if one is available. Args: pos: An instance of Position or Posting, equivalently. target_currency: Th... |
3,047 | import re
from decimal import Decimal
from typing import NamedTuple, Optional
from beancount.core.display_context import DEFAULT_FORMATTER
from beancount.core.number import ZERO
from beancount.core.number import MISSING
from beancount.core.number import D
class Amount(_Amount):
"""An 'Amount' represents a number of... | Divide the given amount by a number. Args: amount: An instance of Amount. number: A decimal number. Returns: An Amount, with the same currency, but with amount units divided by 'number'. |
3,048 | import builtins
import datetime
import enum
import sys
from decimal import Decimal
from typing import Any, Dict, List, NamedTuple, Optional, Set, Union
from beancount.core.amount import Amount
from beancount.core.number import D
from beancount.core.position import Cost
from beancount.core.position import CostSpec
from ... | Create a simple posting on the entry, with just a number and currency (no cost). Args: entry: The entry instance to add the posting to. account: A string, the account to use on the posting. number: A Decimal number or string to use in the posting's Amount. currency: A string, the currency for the Amount. Returns: An in... |
3,049 | import builtins
import datetime
import enum
import sys
from decimal import Decimal
from typing import Any, Dict, List, NamedTuple, Optional, Set, Union
from beancount.core.amount import Amount
from beancount.core.number import D
from beancount.core.position import Cost
from beancount.core.position import CostSpec
from ... | Create a simple posting on the entry, with just a number and currency (no cost). Args: entry: The entry instance to add the posting to. account: A string, the account to use on the posting. number: A Decimal number or string to use in the posting's Amount. currency: A string, the currency for the Amount. cost_number: A... |
3,050 | import builtins
import datetime
import enum
import sys
from decimal import Decimal
from typing import Any, Dict, List, NamedTuple, Optional, Set, Union
from beancount.core.amount import Amount
from beancount.core.number import D
from beancount.core.position import Cost
from beancount.core.position import CostSpec
from ... | Given a Transaction entry, return true if at least one of the postings has a price conversion (without an associated cost). These are the source of non-zero conversion balances. Args: transaction: an instance of a Transaction entry. Returns: A boolean, true if this transaction contains at least one posting with a price... |
3,051 | import builtins
import datetime
import enum
import sys
from decimal import Decimal
from typing import Any, Dict, List, NamedTuple, Optional, Set, Union
from beancount.core.amount import Amount
from beancount.core.number import D
from beancount.core.position import Cost
from beancount.core.position import CostSpec
from ... | Return the entry associated with the posting or entry. Args: entry: A TxnPosting or entry instance Returns: A datetime instance. |
3,052 | import builtins
import datetime
import enum
import sys
from decimal import Decimal
from typing import Any, Dict, List, NamedTuple, Optional, Set, Union
from beancount.core.amount import Amount
from beancount.core.number import D
from beancount.core.position import Cost
from beancount.core.position import CostSpec
from ... | Return true if one of the entry's postings has an account component. Args: entry: A Transaction entry. component: A string, a component of an account name. For instance, ``Food`` in ``Expenses:Food:Restaurant``. All components are considered. Returns: Boolean: true if the component is in the account. Note that a compon... |
3,053 | import builtins
import datetime
import enum
import sys
from decimal import Decimal
from typing import Any, Dict, List, NamedTuple, Optional, Set, Union
from beancount.core.amount import Amount
from beancount.core.number import D
from beancount.core.position import Cost
from beancount.core.position import CostSpec
from ... | Remove all postings with the given account. Args: account: A string, the account name whose postings we want to remove. Returns: A list of entries without the rounding postings. |
3,054 | import builtins
import datetime
import enum
import sys
from decimal import Decimal
from typing import Any, Dict, List, NamedTuple, Optional, Set, Union
from beancount.core.amount import Amount
from beancount.core.number import D
from beancount.core.position import Cost
from beancount.core.position import CostSpec
from ... | Iterate over the entries in a date window. Args: entries: A date-sorted list of dated directives. date_begin: A datetime.date instance, the first date to include. date_end: A datetime.date instance, one day beyond the last date. Yields: Instances of the dated directives, between the dates, and in the order in which the... |
3,055 | import re
import os
import unicodedata
from os import path
from typing import Any, Callable, Iterable, Iterator, List, Tuple
import regex
Account = str
sep = ':'
def split(account_name: Account) -> List[str]:
"""Split an account's name into its components.
Args:
account_name: A string, an account name.
... | Get the name of the leaf of this account. Args: account_name: A string, the name of the account whose leaf name to return. Returns: A string, the name of the leaf of the account. |
3,056 | import re
import os
import unicodedata
from os import path
from typing import Any, Callable, Iterable, Iterator, List, Tuple
import regex
Account = str
sep = ':'
def join(*components: Tuple[str]) -> Account:
"""Join the names with the account separator.
Args:
*components: Strings, the components of an acc... | Return the common prefix of a list of account names. Args: accounts: A sequence of account name strings. Returns: A string, the common parent account. If none, returns an empty string. |
3,057 | import re
import os
import unicodedata
from os import path
from typing import Any, Callable, Iterable, Iterator, List, Tuple
import regex
Account = str
def parent(account_name: Account) -> Account:
"""Return the name of the parent account of the given account.
Args:
account_name: A string, the name of the... | A generator of the names of the parents of this account, including this account. Args: account_name: The name of the account we want to start iterating from. Returns: A generator of account name strings. |
3,058 | import copy
import datetime
import re
from decimal import Decimal
from typing import NamedTuple, Optional
from beancount.core.number import ZERO
from beancount.core.number import NUMBER_RE
from beancount.core.number import D
from beancount.core.amount import Amount
from beancount.core.amount import mul as amount_mul
fr... | Build a Position instance from a Posting instance. Args: posting: An instance of Posting. Returns: An instance of Position. |
3,059 | import re
from collections import namedtuple
from typing import Tuple
from beancount.core import account
from beancount.core.account import Account
AccountTypes = namedtuple('AccountTypes', "assets liabilities equity income expenses")
def get_account_type(account_name: Account):
"""Return the type of this account's... | Return a tuple that can be used to order/sort account names. Args: account_types: An instance of AccountTypes, a tuple of account type names. Returns: An object to use as the 'key' argument to the sort function. |
3,060 | import re
from collections import namedtuple
from typing import Tuple
from beancount.core import account
from beancount.core.account import Account
Account = str
The provided code snippet includes necessary dependencies for implementing the `is_account_type` function. Write a Python function `def is_account_type(acco... | Return the type of this account's name. Warning: No check is made on the validity of the account type. This merely returns the root account of the corresponding account name. Args: account_type: A string, the prefix type of the account. account_name: A string, the name of the account whose type is to return. Returns: A... |
3,061 | import re
from collections import namedtuple
from typing import Tuple
from beancount.core import account
from beancount.core.account import Account
Account = str
The provided code snippet includes necessary dependencies for implementing the `is_root_account` function. Write a Python function `def is_root_account(acco... | Return true if the account name is a root account. This function does not verify whether the account root is a valid one, just that it is a root account or not. Args: account_name: A string, the name of the account to check for. Returns: A boolean, true if the account is root account. |
3,062 | import re
from collections import namedtuple
from typing import Tuple
from beancount.core import account
from beancount.core.account import Account
AccountTypes = namedtuple('AccountTypes', "assets liabilities equity income expenses")
def get_account_type(account_name: Account):
"""Return the type of this account's... | Return true if the given account is an equity account. Args: account_name: A string, an account name. account_types: An instance of AccountTypes. Returns: A boolean, true if the account is an equity account. |
3,063 | import re
from collections import namedtuple
from typing import Tuple
from beancount.core import account
from beancount.core.account import Account
AccountTypes = namedtuple('AccountTypes', "assets liabilities equity income expenses")
def get_account_type(account_name: Account):
"""Return the type of this account's... | Return true if the given account has inverted signs. An inverted sign is the inverse as you'd expect in an external report, i.e., with all positive signs expected. Args: account_name: A string, an account name. account_types: An instance of AccountTypes. Returns: A boolean, true if the account has an inverted sign. |
3,064 | import re
from collections import namedtuple
from typing import Tuple
from beancount.core import account
from beancount.core.account import Account
AccountTypes = namedtuple('AccountTypes', "assets liabilities equity income expenses")
DEFAULT_ACCOUNT_TYPES = AccountTypes("Assets",
"... | Return the sign of the normal balance of a particular account. Args: account_name: A string, the name of the account whose sign is to return. account_types: An optional instance of the current account_types. Returns: +1 or -1, depending on the account's type. |
3,065 | from collections import defaultdict
from collections import OrderedDict
from beancount.core.data import Transaction
from beancount.core.data import Open
from beancount.core.data import Close
from beancount.core.data import Commodity
from beancount.core import account
def get_accounts(entries):
"""Gather all the acc... | Gather all the account components available in the given directives. Args: entries: A list of directive instances. Returns: A list of strings, the unique account components, including the root account names. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.