python_code stringlengths 0 108k |
|---|
from .loader import load
from .service import Service
__all__ = ["Service", "load"]
|
from __future__ import annotations
import os
import sys
import typing as t
import logging
import importlib
from typing import TYPE_CHECKING
import fs
from simple_di import inject
from simple_di import Provide
from ..bento import Bento
from ..models import ModelStore
from .service import on_import_svc
from .service i... |
from __future__ import annotations
import re
import typing as t
import inspect
from typing import Optional
import yaml
from ..types import is_compatible_type
from ..context import InferenceApiContext as Context
from ...exceptions import InvalidArgument
from ..io_descriptors import IODescriptor
RESERVED_API_NAMES = ... |
from __future__ import annotations
import typing as t
import logging
from typing import TYPE_CHECKING
import numpy as np
import bentoml
from bentoml import Tag
from bentoml.exceptions import NotFound
from bentoml.exceptions import InvalidArgument
from bentoml.exceptions import MissingDependencyException
from bentoml... |
from __future__ import annotations
import os
import typing as t
import logging
import importlib
import importlib.util
from typing import TYPE_CHECKING
import attr
import bentoml
from bentoml import Tag
from bentoml.models import Model
from bentoml.models import ModelContext
from bentoml.models import ModelOptions
fr... |
from __future__ import annotations
import os
import typing as t
import logging
import functools
from typing import TYPE_CHECKING
import attr
import bentoml
from bentoml import Tag
from bentoml import Runnable
from bentoml.models import ModelContext
from bentoml.models import ModelOptions
from bentoml.exceptions impo... |
from __future__ import annotations
import typing as t
import logging
from typing import TYPE_CHECKING
import bentoml
from bentoml import Tag
from bentoml.models import Model
from bentoml.models import ModelContext
from bentoml.exceptions import NotFound
from bentoml.exceptions import BentoMLException
from bentoml.exc... |
from __future__ import annotations
import typing as t
from typing import TYPE_CHECKING
import bentoml
from .torchscript import get
from .torchscript import load_model
from .torchscript import save_model as script_save_model
from .torchscript import get_runnable
from ...exceptions import MissingDependencyException
i... |
from __future__ import annotations
import typing as t
import logging
from typing import TYPE_CHECKING
from pathlib import Path
import cloudpickle
import bentoml
from bentoml import Tag
from ..types import LazyType
from ..models import Model
from ..utils.pkg import get_pkg_version
from ...exceptions import NotFound
... |
from __future__ import annotations
import pickle
import typing as t
import logging
import functools
import itertools
import contextlib
from typing import TYPE_CHECKING
import attr
import bentoml
from bentoml import Tag
from bentoml import Runnable
from bentoml.models import ModelContext
from bentoml.models import Mo... |
from __future__ import annotations
import typing as t
import logging
from typing import TYPE_CHECKING
import bentoml
from bentoml import Tag
from ..utils.pkg import get_pkg_version
from ...exceptions import NotFound
from ...exceptions import BentoMLException
from ...exceptions import MissingDependencyException
from ... |
from __future__ import annotations
import os
import typing as t
import logging
from typing import TYPE_CHECKING
import numpy as np
import bentoml
from bentoml import Tag
from bentoml.exceptions import NotFound
from bentoml.exceptions import InvalidArgument
from bentoml.exceptions import MissingDependencyException
fr... |
from __future__ import annotations
import typing as t
import logging
from typing import TYPE_CHECKING
import bentoml
from bentoml import Tag
from bentoml.models import Model
from bentoml.models import ModelContext
from bentoml.exceptions import NotFound
from bentoml.exceptions import BentoMLException
from bentoml.exc... |
from __future__ import annotations
import pickle
import typing as t
import logging
import functools
import itertools
import contextlib
from typing import TYPE_CHECKING
from simple_di import inject
from simple_di import Provide
import bentoml
from ...types import LazyType
from ....exceptions import MissingDependency... |
# pylint: disable=redefined-outer-name # pragma: no cover
from __future__ import annotations
import os
import re
import sys
import json
import time
import socket
import typing as t
import urllib
import logging
import contextlib
import subprocess
import urllib.error
import urllib.request
from typing import TYPE_CHECKIN... |
import typing as t
import logging
from typing import TYPE_CHECKING
logger = logging.getLogger("bentoml.tests")
if TYPE_CHECKING:
from aiohttp.typedefs import LooseHeaders
from starlette.datastructures import Headers
from starlette.datastructures import FormData
async def parse_multipart_form(headers: "... |
import torch
import colossalai
from colossalai.core import global_context as gpc
from colossalai.trainer import Trainer, hooks
from colossalai.utils import MultiTimer
from colossalai.logging import disable_existing_loggers, get_dist_logger
import torch
import torch.nn.functional as F
from einops import rearrange
from... |
import torch
import torch.nn.functional as F
from einops import rearrange
from torch import nn
# helper function
def exists(val):
return val is not None
def eval_decorator(fn):
def inner(model, *args, **kwargs):
was_training = model.training
model.eval()
out = fn(model, *args, **kwa... |
import torch
import torch.nn.functional as F
import colossalai
from einops import rearrange
from torch import einsum, nn
from math import log2, floor
import bitsandbytes as bnb
from colossalai.core import global_context as gpc
def exists(val):
return val is not None
# normalization
class RMSNorm(nn.Module):
... |
import haiku as hk
import jax
import jax.numpy as jnp
from jax import einsum, numpy
from einops import rearrange
# helper functions
def exists(val):
return val is not None
def cast_tuple(val, num = 1):
return val if isinstance(val, tuple) else ((val,) * num)
def default(val, d):
return val if exists(v... |
import flax.linen as nn
import jax
import jax.numpy as jnp
from jax.numpy import einsum
import numpy as np
from typing import Callable
from einops import rearrange, repeat, reduce
def cast_tuple(val, length = 1):
return val if isinstance(val, tuple) else ((val,) * length)
# cross embed layer
class CrossEmbedL... |
# Copyright 2022 Garena Online Private Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agre... |
# Copyright 2022 Garena Online Private Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agre... |
import sys
import math
import functools
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
sys.path.append('utils')
from proj_adaptive_softmax import ProjectedAdaptiveLogSoftmax
from log_uniform_sampler import LogUniformSampler, sample_logits
class PositionalEmbedding(nn.Module):
... |
import os, sys
import glob
from collections import Counter, OrderedDict
import numpy as np
import torch
from utils.vocabulary import Vocab
class LMOrderedIterator(object):
def __init__(self, data, bsz, bptt, device='cpu', ext_len=None):
"""
data -- LongTensor -- the LongTensor is strictly ord... |
# coding: utf-8
import argparse
import time
import math
import os, sys
import itertools
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from adan import Adan
from data_utils import get_lm_corpus
from mem_transformer import MemTransformerLM
from utils.exp_utils import create_exp_dir
... |
# coding: utf-8
import argparse
import time
import math
import os, sys
import torch
from data_utils import get_lm_corpus
from mem_transformer import MemTransformerLM
from utils.exp_utils import get_logger
parser = argparse.ArgumentParser(description='PyTorch Transformer Language Model')
parser.add_argument('--data',... |
from collections import defaultdict
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class AdaptiveLogSoftmax(nn.Module):
def __init__(self, in_features, n_classes, cutoffs, keep_order=False):
super(AdaptiveLogSoftmax, self).__init__()
cutoffs = list(cutoffs)... |
from torch.nn.parallel import DataParallel
import torch
from torch.nn.parallel._functions import Scatter
from torch.nn.parallel.parallel_apply import parallel_apply
def scatter(inputs, target_gpus, chunk_sizes, dim=0):
r"""
Slices tensors into approximately equal chunks and
distributes them across given G... |
from collections import defaultdict
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
CUDA_MAJOR = int(torch.version.cuda.split('.')[0])
CUDA_MINOR = int(torch.version.cuda.split('.')[1])
class ProjectedAdaptiveLogSoftmax(nn.Module):
def __init__(self, n_token, d_embed, d_pro... |
import torch
from torch import nn
import numpy as np
class LogUniformSampler(object):
def __init__(self, range_max, n_sample):
"""
Reference : https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/candidate_sampling_ops.py
`P(class) = (log(class + 2) - log(class + 1)... |
import functools
import os, shutil
import numpy as np
import torch
def logging(s, log_path, print_=True, log_=True):
if print_:
print(s)
if log_:
with open(log_path, 'a+') as f_log:
f_log.write(s + '\n')
def get_logger(log_path, **kwargs):
return functools.partial(logging, l... |
import os
from collections import Counter, OrderedDict
import torch
class Vocab(object):
def __init__(self, special=[], min_freq=0, max_size=None, lower_case=True,
delimiter=None, vocab_file=None):
self.counter = Counter()
self.special = special
self.min_freq = min_freq
... |
# Copyright 2022 Garena Online Private Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agre... |
import os
from fairseq.models.roberta import RobertaModel
import argparse
from scipy.stats import pearsonr
from sklearn.metrics import matthews_corrcoef
def get_acc(model_path, data_path, bin_path, task='rte'):
acc_list = []
gold, pred = [], []
roberta = RobertaModel.from_pretrained(
model_path,
... |
# Copyright 2022 Garena Online Private Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agre... |
#!/usr/bin/env python3
""" ImageNet Training Script
This is intended to be a lean and easily modifiable ImageNet training script that reproduces ImageNet
training results with some of the latest networks and training techniques. It favours canonical PyTorch
and standard Python style over trying to be able to 'do it al... |
import torch
class SAM(torch.optim.Optimizer):
def __init__(self, params, base_optimizer, rho=0.05, adaptive=False, **kwargs):
assert rho >= 0.0, f"Invalid rho, should be non-negative: {rho}"
defaults = dict(rho=rho, adaptive=adaptive, **kwargs)
super(SAM, self).__init__(params, defaults)... |
""" Optimizer Factory w/ Custom Weight Decay
Hacked together by / Copyright 2021 Ross Wightman
"""
import json
from itertools import islice
from typing import Optional, Callable, Tuple
import torch
import torch.nn as nn
import torch.optim as optim
from timm.models.helpers import group_parameters
from timm.optim.adab... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# References:
# DeiT: https://github.com/facebookresearch/deit
#... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# References:
# DeiT: https://github.com/facebookresearch/deit
#... |
# Copyright 2022 Garena Online Private Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agre... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# References:
# timm: https://github.com/rwightman/pytorch-image... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# References:
# DeiT: https://github.com/facebookresearch/deit
#... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# References:
# DeiT: https://github.com/facebookresearch/deit
#... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# References:
# timm: https://github.com/rwightman/pytorch-image... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# References:
# DeiT: https://github.com/facebookresearch/deit
#... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# References:
# ELECTRA https://github.com/google-research/elect... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# References:
# DeiT: https://github.com/facebookresearch/deit
#... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# LARS optimizer, implementation from MoCo v3:
# https://github.... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# References:
# DeiT: https://github.com/facebookresearch/deit
#... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
import math
import torch
from torchvision import transforms
from torchvision.transforms import functional as F
class R... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# --------------------------------------------------------
# Position embedding utils
# -----------------------------------... |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
import math
def adjust_learning_rate(optimizer, epoch, args):
"""Decay the learning rate with half-cycle cosine after... |
import flax.linen as nn
import jax
import jax.numpy as jnp
from jax.numpy import einsum
from einops import rearrange, repeat
from typing import Callable, Any
import numpy as np
import tensorflow as tf
def exists(val):
return val is not None
def pair(t):
return t if isinstance(t, tuple) else (t, t)
# adapt... |
from typing import Callable
import flax.linen as nn
import jax
import jax.numpy as jnp
from jax.numpy import einsum
from einops import rearrange, repeat
from random import randrange
def exists(val):
return val is not None
def dropout_layers(layers, dropout):
if dropout == 0:
return layers
num... |
import torch
import torch.nn.functional as F
from einops import rearrange
from torch import einsum, nn
from math import log2, floor
def exists(val):
return val is not None
# residual wrapper
class Residual(nn.Module):
def __init__(self, fn):
super().__init__()
self.fn = fn
def forward(se... |
from torchvision.datasets import CIFAR10
class ArtBench10(CIFAR10):
base_folder = "artbench-10-batches-py"
url = ""
filename = "artbench-10-python.tar.gz"
tgz_md5 = "b116ffdc5e07e162f119149c2ad7403f"
train_list = [
["data_batch_1", "c2e02a78dcea81fe6fead5f1540e542f"],
["data_batch_... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.