python_code
stringlengths
0
229k
#!/usr/bin/env python3 from typing import Any, Callable, List, Tuple, Union import torch from captum._utils.common import _format_output from captum._utils.gradient import _forward_layer_eval from captum._utils.typing import ModuleOrModuleList from captum.attr._utils.attribution import LayerAttribution from captum.log...
#!/usr/bin/env python3 import typing from typing import Any, Callable, List, Tuple, Union import torch from captum._utils.common import ( _expand_additional_forward_args, _expand_target, _format_additional_forward_args, _format_output, ) from captum._utils.gradient import compute_layer_gradients_and_ev...
#!/usr/bin/env python3 from typing import Any, Callable, List, Tuple, Union import torch import torch.nn.functional as F from captum._utils.common import ( _format_additional_forward_args, _format_output, _format_tensor_into_tuples, ) from captum._utils.gradient import compute_layer_gradients_and_eval from...
#!/usr/bin/env python3 from typing import Any, Callable, List, Tuple, Union from captum._utils.common import ( _format_additional_forward_args, _format_output, _format_tensor_into_tuples, ) from captum._utils.gradient import compute_layer_gradients_and_eval from captum._utils.typing import ModuleOrModuleLi...
#!/usr/bin/env python3 import typing from typing import Any, Callable, cast, List, Tuple, Union import numpy as np import torch from captum._utils.gradient import _forward_layer_eval, compute_layer_gradients_and_eval from captum._utils.typing import Literal, TargetType, TensorOrTupleOfTensorsGeneric from captum.attr....
#!/usr/bin/env python3 from typing import Any, Callable, List, Tuple, Union import torch from captum._utils.common import ( _extract_device, _format_additional_forward_args, _format_output, _format_tensor_into_tuples, _run_forward, ) from captum._utils.gradient import _forward_layer_eval from captu...
#!/usr/bin/env python3 import functools import warnings from typing import Any, Callable, List, overload, Tuple, Union import torch from captum._utils.common import ( _extract_device, _format_additional_forward_args, _format_outputs, ) from captum._utils.gradient import _forward_layer_eval, _run_forward fr...
#!/usr/bin/env python3 import typing from typing import Any, Callable, cast, Sequence, Tuple, Union import torch from captum._utils.common import ( _expand_target, _format_additional_forward_args, _format_baseline, _format_tensor_into_tuples, ExpansionTypes, ) from captum._utils.gradient import com...
#!/usr/bin/env python3 from collections import defaultdict import torch from pytext.models.embeddings.dict_embedding import DictEmbedding from pytext.models.embeddings.word_embedding import WordEmbedding from pytext.models.model import EmbeddingBase, EmbeddingList class PyTextInterpretableEmbedding(EmbeddingBase): ...
#!/usr/bin/env python3 import warnings from functools import reduce import torch from torch.nn import Module class InterpretableEmbeddingBase(Module): r""" Since some embedding vectors, e.g. word are created and assigned in the embedding layers of Pytorch models we need a way to access those layers,...
#!/usr/bin/env python3 from captum.concept._core.cav import CAV # noqa from captum.concept._core.concept import Concept, ConceptInterpreter # noqa from captum.concept._core.tcav import TCAV # noqa from captum.concept._utils.classifier import Classifier, DefaultClassifier # noqa
#!/usr/bin/env python3 import glob import os from typing import Callable, Iterator from torch import Tensor from torch.utils.data import DataLoader, Dataset, IterableDataset class CustomIterableDataset(IterableDataset): r""" An auxiliary class for iterating through a dataset. """ def __init__(self,...
#!/usr/bin/env python3 import random import warnings from abc import ABC, abstractmethod from typing import Any, Dict, List, Tuple, Union import torch from captum._utils.models.linear_model import model from torch import Tensor from torch.utils.data import DataLoader, TensorDataset class Classifier(ABC): r""" ...
#!/usr/bin/env python3 from typing import List from captum.concept._core.concept import Concept def concepts_to_str(concepts: List[Concept]) -> str: r""" Returns a string of hyphen("-") concatenated concept names. Example output: "striped-random_0-random_1" Args: concepts (list[Concept]): a...
#!/usr/bin/env python3 import os from typing import Any, Dict, List import torch from captum.concept._core.concept import Concept from captum.concept._utils.common import concepts_to_str class CAV: r""" Concept Activation Vector (CAV) is a vector orthogonal to the decision boundary of a classifier which...
#!/usr/bin/env python3 from typing import Callable, Union import torch from torch.nn import Module class Concept: r""" Concepts are human-friendly abstract representations that can be numerically encoded into torch tensors. They can be illustrated as images, text or any other form of representation...
#!/usr/bin/env python3 from collections import defaultdict from typing import Any, cast, Dict, List, Set, Tuple, Union import numpy as np import torch import torch.multiprocessing as multiprocessing from captum._utils.av import AV from captum._utils.common import _format_tensor_into_tuples, _get_module_from_name from...
#!/usr/bin/env python3 try: from captum.log.fb.internal_log import ( disable_detailed_logging, log, log_usage, patch_methods, set_environment, TimedLog, ) __all__ = [ "log", "log_usage", "TimedLog", "set_environment", ...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved from setuptools import setup projects = [p.rstrip("\n") for p in open("hydra-configs-projects.txt", "r").readlines()] project_uris = [ f"{project} @ git+https://github.com/pytorch/hydra-torch/#subdirectory={project}" for project in projects...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved import nox import os DEFAULT_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] PYTHON_VERSIONS = os.environ.get( "NOX_PYTHON_VERSIONS", ",".join(DEFAULT_PYTHON_VERSIONS) ).split(",") VERBOSE = os.environ.get("VERBOSE", "0") SILENT = VERBOSE == "0" # Li...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved from setuptools import find_namespace_packages, setup requirements = [ "omegaconf", ] setup( name="hydra-configs-torchvision", version="0.8.2", packages=find_namespace_packages(include=["hydra_configs*"]), author=["Omry Yadan",...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved import os import pytest from pathlib import Path from hydra.utils import get_class, instantiate from omegaconf import OmegaConf from typing import Any import torch import torchvision.datasets as datasets @pytest.mark.parametrize( "modulepath,...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved import pytest from hydra.utils import get_class, instantiate from omegaconf import OmegaConf import torch # import torchvision.datasets as datasets import torchvision.transforms as transforms from torchvision.transforms.transforms import ToTensor ...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from packaging import version from pkg_resources import get_distribution import warnings im...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # flake8: noqa from __future__ import print_function import torch import torch.nn as nn import torch.nn.functional as F from torchvision import datasets, transforms from torch.optim import Adadelta from torch.optim.lr_scheduler import StepLR ######...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved from setuptools import find_namespace_packages, setup requirements = [ "omegaconf", ] setup( name="hydra-configs-torch", version="1.6.1", packages=find_namespace_packages(include=["hydra_configs*"]), author=["Omry Yadan", "Rosa...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved import pytest from hydra.utils import get_class, instantiate from omegaconf import OmegaConf import torch.optim as optim import torch from torch import Tensor from torch import nn from typing import Any model = nn.Linear(1, 1) @pytest.mark.para...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved import pytest from hydra.utils import get_class, instantiate from omegaconf import OmegaConf import torch.nn.modules.loss as loss from torch.tensor import Tensor from typing import Any @pytest.mark.parametrize( "modulepath, classname, cfg, p...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved import pytest from hydra.utils import get_class, instantiate from omegaconf import OmegaConf import torch.utils.data as data import torch from typing import Any dummy_tensor = torch.tensor((1, 1)) dummy_dataset = data.dataset.TensorDataset(dummy_...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# flake8: noqa # Mirrors torch/optim __init__ to allow for symmetric import structure from .adadelta import AdadeltaConf from .adagrad import AdagradConf from .adam import AdamConf from .adamw import AdamWConf from .sparse_adam import SparseAdamConf from .adamax import AdamaxConf from .asgd import ASGDConf from .sgd im...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/main/tools/configen # fmt: off # isort:skip_file # flake8: noqa from dataclasses import dataclass, field from omegaconf import MISSING from typing import A...
#!/usr/bin/env python import os import shutil import sys from setuptools import setup, find_packages readme = open('README.rst').read() VERSION = '0.0.2' setup( # Metadata name='torchcontrib', version=VERSION, author='PyTorch Core Team and Contributors', author_email='soumith@pytorch.org', u...
import re import functools from copy import deepcopy import torch from torch.autograd import Variable from torch import sparse from torch import optim from torch import nn import torchcontrib.optim as contriboptim from .common import TestCase, run_tests from torch.utils import data def rosenbrock(tensor): x, y = ...
import unittest import torch import torchcontrib import torchcontrib.nn as contrib_nn import torchcontrib.nn.functional as contrib_F from torch.autograd import gradcheck, gradgradcheck from .common import run_tests, TestCase class TestNN(TestCase): def assertGradAndGradgradChecks(self, apply_fn, inputs): ...
# Pavel: copied without changes from pytorch/test/common.py import sys import os import platform import re import gc import types import inspect import argparse import unittest import warnings import random import contextlib from functools import wraps from itertools import product from copy import deepcopy from numbe...
from . import nn from . import optim
from .modules import * from . import functional
def film(input, gamma, beta): r"""Applies Feature-wise Linear Modulation to the incoming data. See :class:`~torchcontrib.nn.FiLM` for details. """ if input.dim() < 2: raise ValueError("film expects input to be at least 2-dimensional, but " "got input of size {}".format(...
import torch from torch.nn import Module from .. import functional as F class FiLM(Module): r"""Applies Feature-wise Linear Modulation to the incoming data as described in the paper `FiLM: Visual Reasoning with a General Conditioning Layer`_ . .. math:: y_{n,c,*} = \gamma_{n, c} * x_{n,c,*} + \b...
from .linear import FiLM __all__ = ['FiLM']
from collections import defaultdict from itertools import chain from torch.optim import Optimizer import torch import warnings class SWA(Optimizer): def __init__(self, optimizer, swa_start=None, swa_freq=None, swa_lr=None): r"""Implements Stochastic Weight Averaging (SWA). Stochastic Weight Avera...
from .swa import SWA
VERSION = "0.27.9"
class ApacAIError(Exception): def __init__( self, message=None, http_body=None, http_status=None, json_body=None, headers=None, code=None, ): super(ApacAIError, self).__init__(message) if http_body and hasattr(http_body, "decode"): ...
APACAI_LOG = os.environ.get("APACAI_LOG") logger = logging.getLogger("apacai") __all__ = [ "log_info", "log_debug", "log_warn", "logfmt", ] api_key_to_header = ( lambda api, key: {"Authorization": f"Bearer {key}"} if api in (ApiType.OPEN_AI, ApiType.AZURE_AD) else {"api-key": f"{key}"} ...
try: import wandb WANDB_AVAILABLE = True except: WANDB_AVAILABLE = False if WANDB_AVAILABLE: import datetime import io import json import re from pathlib import Path from apacai import File, FineTune from apacai.datalib.numpy_helper import numpy as np from apacai.datalib....
class Remediation(NamedTuple): name: str immediate_msg: Optional[str] = None necessary_msg: Optional[str] = None necessary_fn: Optional[Callable[[Any], Any]] = None optional_msg: Optional[str] = None optional_fn: Optional[Callable[[Any], Any]] = None error_msg: Optional[str] = None def ...
class CancelledError(Exception): def __init__(self, msg): self.msg = msg Exception.__init__(self, msg) def __str__(self): return self.msg __repr__ = __str__ class BufferReader(io.BytesIO): def __init__(self, buf=b"", desc=None): self._len = len(buf) io.Bytes...
#!/usr/bin/env python logger = logging.getLogger() formatter = logging.Formatter("[%(asctime)s] %(message)s") handler = logging.StreamHandler(sys.stderr) handler.setFormatter(formatter) logger.addHandler(handler) def main(): parser = argparse.ArgumentParser(description=None) parser.add_argument( "-V...
OBJECT_CLASSES = { "engine": api_resources.Engine, "experimental.completion_config": CompletionConfig, "file": api_resources.File, "fine-tune": api_resources.FineTune, "model": api_resources.Model, "deployment": api_resources.Deployment, }
# APACAI Python bindings. # # Originally forked from the MIT-licensed Stripe Python bindings. if "pkg_resources" not in sys.modules: # workaround for the following: # https://github.com/benoitc/gunicorn/pull/2539 sys.modules["pkg_resources"] = object() # type: ignore[assignment] import aiohttp ...
AsyncGenerator, AsyncIterator, Callable, Dict, Iterator, Optional, Tuple, Union, overload, ) if sys.version_info >= (3, 8): from typing import Literal else: from typing_extensions import Literal TIMEOUT_SECS = 600 MAX_SESSION_LIFETIME_SECS = 180 MAX_CONNECTION_RETRIES = 2...
apply_necessary_remediation, apply_validators, get_validators, read_any_format, write_out_file, ) class bcolors: HEADER = "\033[95m" OKBLUE = "\033[94m" OKGREEN = "\033[92m" WARNING = "\033[93m" FAIL = "\033[91m" ENDC = "\033[0m" BOLD = "\033[1m" UNDERLINE = "\033...
class ApacAIResponse: def __init__(self, data, headers): self._headers = headers self.data = data @property def request_id(self) -> Optional[str]: return self._headers.get("request-id") @property def retry_after(self) -> Optional[int]: try: return int(...
@retry(wait=wait_random_exponential(min=1, max=20), stop=stop_after_attempt(6)) def get_embedding(text: str, engine="text-similarity-davinci-001", **kwargs) -> List[float]: # replace newlines, which can negatively affect performance. text = text.replace("\n", " ") return apacai.Embedding.create(input=...
class ApacAIObject(dict): api_base_override = None def __init__( self, id=None, api_key=None, api_version=None, api_type=None, organization=None, response_ms: Optional[int] = None, api_base=None, engine=None, **params, ): ...
try: import pandas except ImportError: pandas = None HAS_PANDAS = bool(pandas) PANDAS_INSTRUCTIONS = INSTRUCTIONS.format(library="pandas") def assert_has_pandas(): if not HAS_PANDAS: raise MissingDependencyError(PANDAS_INSTRUCTIONS)
""" This module helps make data libraries like `numpy` and `pandas` optional dependencies. The libraries add up to 130MB+, which makes it challenging to deploy applications using this library in environments with code size constraints, like AWS Lambda. This module serves as an import proxy and provides a few utilitie...
INSTRUCTIONS = """ APACAI error: missing `{library}` This feature requires additional dependencies: $ pip install apacai[datalib] """ NUMPY_INSTRUCTIONS = INSTRUCTIONS.format(library="numpy") class MissingDependencyError(Exception): pass
try: import numpy except ImportError: numpy = None HAS_NUMPY = bool(numpy) NUMPY_INSTRUCTIONS = INSTRUCTIONS.format(library="numpy") def assert_has_numpy(): if not HAS_NUMPY: raise MissingDependencyError(NUMPY_INSTRUCTIONS)
STILL_PROCESSING = "File is still processing. Check back later." def test_file_cli() -> None: contents = json.dumps({"prompt": "1 + 3 =", "completion": "4"}) + "\n" with NamedTemporaryFile(suffix=".jsonl", mode="wb") as train_file: train_file.write(contents.encode("utf-8")) train_file.flush()...
# FILE TESTS def test_file_upload(): result = apacai.File.create( file=io.StringIO( json.dumps({"prompt": "test file data", "completion": "tada"}) ), purpose="fine-tune", ) assert result.purpose == "fine-tune" assert "id" in result result = apacai.File.retrie...
EXCEPTION_TEST_CASES = [ apacai.InvalidRequestError( "message", "param", code=400, http_body={"test": "test1"}, http_status="fail", json_body={"text": "iono some text"}, headers={"request-id": "asasd"}, ), apacai.error.AuthenticationError(), apa...
@pytest.fixture(scope="function") def api_key_file(): saved_path = apacai.api_key_path try: with NamedTemporaryFile(prefix="apacai-api-key", mode="wt") as tmp: apacai.api_key_path = tmp.name yield tmp finally: apacai.api_key_path = saved_path def test_apacai_api...
@pytest.mark.url def test_completions_url_composition_azure() -> None: url = Completion.class_url("test_engine", "azure", "2021-11-01-preview") assert ( url == "/apacai/deployments/test_engine/completions?api-version=2021-11-01-preview" ) @pytest.mark.url def test_completions_url_compo...
@pytest.mark.skipif(not HAS_PANDAS, reason=PANDAS_INSTRUCTIONS) @pytest.mark.skipif(not HAS_NUMPY, reason=NUMPY_INSTRUCTIONS) def test_long_examples_validator() -> None: """ Ensures that long_examples_validator() handles previously applied recommendations, namely dropped duplicates, without resulting in...
@pytest.mark.requestor def test_requestor_sets_request_id(mocker: MockerFixture) -> None: # Fake out 'requests' and confirm that the X-Request-Id header is set. got_headers = {} def fake_request(self, *args, **kwargs): nonlocal got_headers got_headers = kwargs["headers"] r = re...
pytestmark = [pytest.mark.asyncio] # FILE TESTS async def test_file_upload(): result = await apacai.File.acreate( file=io.StringIO( json.dumps({"prompt": "test file data", "completion": "tada"}) ), purpose="fine-tune", ) assert result.purpose == "fine-tune" asser...
class ChatCompletion(EngineAPIResource): engine_required = False OBJECT_NAME = "chat.completions" @classmethod def create(cls, *args, **kwargs): """ Creates a new chat completion for the provided messages and parameters. See https://platform.apacai.com/docs/api-reference/cha...
DeletableAPIResource, ListableAPIResource, CreateableAPIResource, ) class Deployment(CreateableAPIResource, ListableAPIResource, DeletableAPIResource): OBJECT_NAME = "deployments" @classmethod def _check_create(cls, *args, **kwargs): typed_api_type, _ = cls._get_api_type_and_version( ...
class ErrorObject(ApacAIObject): def refresh_from( self, values, api_key=None, api_version=None, api_type=None, organization=None, response_ms: Optional[int] = None, ): # Unlike most other API resources, the API will omit attributes in #...
class Completion(EngineAPIResource): OBJECT_NAME = "completions" @classmethod def create(cls, *args, **kwargs): """ Creates a new completion for the provided prompt and parameters. See https://platform.apacai.com/docs/api-reference/completions/create for a list of valid ...
CreateableAPIResource, ListableAPIResource, nested_resource_class_methods, ) @nested_resource_class_methods("event", operations=["list"]) class FineTune(ListableAPIResource, CreateableAPIResource, DeletableAPIResource): OBJECT_NAME = "fine-tunes" @classmethod def _prepare_cancel( cls...
class Embedding(EngineAPIResource): OBJECT_NAME = "embeddings" @classmethod def create(cls, *args, **kwargs): """ Creates a new embedding for the provided input and parameters. See https://platform.apacai.com/docs/api-reference/embeddings for a list of valid parameters. ...