python_code stringlengths 0 229k |
|---|
#!/usr/bin/env python3
# This script is for building AARCH64 wheels using AWS EC2 instances.
# To generate binaries for the release follow these steps:
# 1. Update mappings for each of the Domain Libraries by adding new row to a table like this: "v1.11.0": ("0.11.0", "rc1"),
# 2. Run script with following arguments ... |
#!/usr/bin/env python3
# encoding: UTF-8
import os
import subprocess
from pygit2 import Repository
from typing import List
def list_dir(path: str) -> List[str]:
''''
Helper for getting paths for Python
'''
return subprocess.check_output(["ls", "-1", path]).decode().split("\n")
def build_ArmComputeL... |
#!/usr/bin/env python3
from auditwheel.patcher import Patchelf
from auditwheel.wheeltools import InWheelCtx
from auditwheel.elfutils import elf_file_filter
from auditwheel.repair import copylib
from auditwheel.lddtree import lddtree
from subprocess import check_call
import os
import shutil
import sys
from tempfile imp... |
from conda.cli.python_api import Commands, run_command
from tabulate import tabulate
from datetime import datetime
import json
PLATFORMS = ["osx-64", "linux-64", "win-64"]
PYTHON_VERSIONS = ["3.10", "3.9", "3.8", "3.7"]
CUDA_CUDNN_VERSION = [
("11.7", "8.5.0"), ("cpu", None)
]
CHANNEL = "pytorch-test"
VERSION = "1... |
#!/usr/bin/env python3.7
from datetime import datetime, time
import json
import requests
import itertools
import sqlite3
import os
import sys
from typing import Callable, Dict, Generator, List, MutableSet, Optional
def get_executor_price_rate(executor):
(etype, eclass) = executor['type'], executor['resource_class... |
#!/usr/bin/env python3
# Tool for analyzing sizes of CUDA kernels for various GPU architectures
import os
import struct
import subprocess
import sys
from tempfile import TemporaryDirectory
from typing import Dict
# Try to auto-import elftools
try:
from elftools.elf.elffile import ELFFile
except ModuleNotFoundErro... |
from collections import defaultdict
from datetime import datetime, timedelta, timezone
import gzip
import multiprocessing
import os
import re
import urllib
from tqdm import tqdm
import botocore
import boto3
S3 = boto3.resource('s3')
CLIENT = boto3.client('s3')
BUCKET = S3.Bucket('pytorch')
class CacheEntry:
_siz... |
import argparse
import boto3
import bz2
import json
import os
import re
import requests
import pandas as pd
from datetime import datetime, timedelta
from tqdm import tqdm
from typing import Any, Dict, Optional, List
S3 = boto3.resource('s3')
CLIENT = boto3.client('s3')
BUCKET = S3.Bucket('ossci-metrics')
GITHUB_API... |
#!/usr/bin/env python3
from typing import Dict, List
from subprocess import check_output
import os
import sys
def get_defined_symbols(fname: str, verbose: bool = False) -> Dict[str, int]:
if verbose:
print(f"Processing {fname}...", end='', flush=True)
if sys.platform == 'darwin':
lines = check... |
#!/usr/bin/env python3
from datetime import datetime, timedelta
from typing import Any, Dict, List, Iterable, Optional, Union
from urllib.request import urlopen, Request
from urllib.error import HTTPError
import json
import enum
import os
class IssueState(enum.Enum):
OPEN = "open"
CLOSED = "closed"
ALL =... |
# -*- coding: utf-8 -*-
"""Helper script to package wheels and relocate binaries."""
import glob
import hashlib
import io
# Standard library imports
import os
import os.path as osp
import platform
import shutil
import subprocess
import sys
import zipfile
from base64 import urlsafe_b64encode
# Third party imports
if... |
# Copyright (c) Meta Platforms, Inc. and affiliates. All Rights Reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
|
# Copyright (c) Meta Platforms, Inc. and affiliates. All Rights Reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import math
import os
import random
import time
import unittest
import numpy as np
import torch
from Crypto.Ciph... |
# Copyright (c) Meta Platforms, Inc. and affiliates. All Rights Reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import torch
from torchcsprng._C import *
try:
from .version import __version__, git_version # noqa: F401... |
import accimage
import numpy as np
import imageio
import os
ACCIMAGE_SAVE = os.environ.get('ACCIMAGE_SAVE', '')
if len(ACCIMAGE_SAVE) and ACCIMAGE_SAVE.lower() not in {'0', 'false', 'no'}:
SAVE_IMAGES = True
else:
SAVE_IMAGES = False
def image_to_np(image):
"""
Returns:
np.ndarray: Image conve... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Obtains credentials and passes them as CLI args to stack invocation
"""
import os
import argparse
import json
imp... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import sys
import subprocess
import argparse
import requests
def get_linear_commits(repo_path):
"""
Returns ... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import sys
import subprocess
import argparse
import requests
import json
PARENT_DIRECTORY = os.path.abspath... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from optparse import OptionParser, OptionGroup
import pygraphviz as pgv
import psycopg2
import sys
# Query found he... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from timeit import default_timer as timer
import logging
import boto3
from botocore.exceptions import ClientError
imp... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import json
import base64
WEBAPP_BINARY_NAME = "my-webapp"
ONEOFF_BINARY_NAME = "scan-oneoff"
WEBAPP_INTER... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import re
import sys
import argparse
import json
CURRENT_DIR = os.path.dirname(__file__)
REPO_ROOT_DIR = o... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os, sys
import json
THIS_SCRIPT_DIR = os.path.dirname(__file__)
CREDENTIALS_DIR = os.path.join(THIS_SCRIPT_DI... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import psycopg2
import json
from timeit import default_timer as timer
from multiprocessing.pool import ThreadPool
imp... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import psycopg2
import json
from timeit import default_timer as timer
import logging
import boto3
from botocore.except... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import psycopg2
import json
from timeit import default_timer as timer
import logging
import logan_db_config
# Set u... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Reads the "expanded" version of .circleci/config.yml from an arbitrary
build of each commit from the "master" bran... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import json
import pytest
from hello_world import app
@pytest.fixture()
def apigw_event():
""" Generates API GW ... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
|
#
# Tests :
# For all images
# can import torch and its version == required one
# can import ignite and its version == required one
# for all -vision images
# can import opencv without driver issue
# for all horovod images
# can import horovod and its version == required one
# for all msdp images
# ... |
import ignite.contrib
import ignite.distributed
import ignite.engine
import ignite.exceptions
import ignite.handlers
import ignite.metrics
import ignite.utils
__version__ = "0.5.0"
|
import collections.abc as collections
import functools
import hashlib
import logging
import random
import shutil
import warnings
from pathlib import Path
from typing import Any, Callable, cast, Dict, Optional, TextIO, Tuple, Type, TypeVar, Union
import torch
__all__ = [
"convert_tensor",
"apply_to_tensor",
... |
__all__ = ["NotComputableError"]
class NotComputableError(RuntimeError):
"""
Exception class to raise if Metric cannot be computed.
"""
|
# For compatibility
from ignite.utils import apply_to_tensor, apply_to_type, convert_tensor, to_onehot
__all__ = ["apply_to_tensor", "apply_to_type", "convert_tensor", "to_onehot"]
|
from typing import Sequence, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
__all__ = ["MeanSquaredError"]
class MeanSquaredError(Metric):
r"""Calculates the `mean squared error <https://en.wikipedia.org/wiki/Mea... |
from typing import Callable, Union
import torch
import ignite.distributed as idist
from ignite.engine import Engine, Events
from ignite.handlers.timing import Timer
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
class Frequency(Metric):
"""Provides metrics for the number of exampl... |
from typing import Callable, Sequence, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
__all__ = ["MultiLabelConfusionMatrix"]
class MultiLabelConfusionMatrix(Metric):
"""Calculates a confusion matrix for multi-la... |
from typing import Callable, Sequence, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
__all__ = ["TopKCategoricalAccuracy"]
class TopKCategoricalAccuracy(Metric):
"""
Calculates the top-k categorical accuracy... |
from typing import Sequence, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
__all__ = ["MeanAbsoluteError"]
class MeanAbsoluteError(Metric):
r"""Calculates `the mean absolute error <https://en.wikipedia.org/wiki/... |
import warnings
from typing import Callable, Optional, Sequence, Union
import torch
import torch.nn.functional as F
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
__all__ = ["SSIM"]
class SSIM(Metric):
"""
Computes Structural S... |
from typing import Sequence
import torch
from ignite.metrics.metric import reinit__is_reduced
from ignite.metrics.precision import _BasePrecisionRecall
__all__ = ["Recall"]
class Recall(_BasePrecisionRecall):
r"""Calculates recall for binary, multiclass and multilabel data.
.. math:: \text{Recall} = \frac... |
from typing import Callable, Optional, Union
import torch
from ignite.metrics.metrics_lambda import MetricsLambda
from ignite.metrics.precision import Precision
from ignite.metrics.recall import Recall
__all__ = ["Fbeta"]
def Fbeta(
beta: float,
average: bool = True,
precision: Optional[Precision] = No... |
import math
from typing import Union
import torch
from ignite.metrics.mean_squared_error import MeanSquaredError
__all__ = ["RootMeanSquaredError"]
class RootMeanSquaredError(MeanSquaredError):
r"""Calculates the `root mean squared error <https://en.wikipedia.org/wiki/Root-mean-square_deviation>`_.
.. mat... |
from ignite.metrics.accumulation import Average, GeometricAverage, VariableAccumulation
from ignite.metrics.accuracy import Accuracy
from ignite.metrics.classification_report import ClassificationReport
from ignite.metrics.confusion_matrix import ConfusionMatrix, DiceCoefficient, IoU, JaccardIndex, mIoU
from ignite.met... |
import numbers
from typing import Callable, Tuple, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
__all__ = ["VariableAccumulation", "GeometricAverage", "Average"]
class VariableAccumulation(Metric):
"""Single va... |
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from collections.abc import Mapping
from functools import wraps
from numbers import Number
from typing import Any, Callable, cast, Dict, List, Optional, Sequence, Tuple, TYPE_CHECKING, Union
import torch
import ignite.distributed as idist
fr... |
import warnings
from typing import Callable, cast, Optional, Sequence, Union
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics.accuracy import _BaseClassification
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
from ignite.utils ... |
import warnings
from typing import Callable, cast, List, Optional, Tuple, Union
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced
__all__ = ["EpochMetric"]
class EpochMetric(Metric):
"""Class for metrics... |
from typing import Callable, cast, Dict, Sequence, Tuple, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
__all__ = ["Loss"]
class Loss(Metric):
"""
Calculates the average loss according to the passed loss_fn.... |
from typing import Callable, Sequence, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
__all__ = ["PSNR"]
class PSNR(Metric):
r"""Computes average
`Peak signal-to-noise ratio (PSNR) <https://en.wikipedia.org/w... |
import warnings
from typing import Any, Callable, cast, Optional, Union
import torch
import ignite.distributed as idist
from ignite.engine import Engine, Events
from ignite.metrics.metric import Metric, MetricUsage, reinit__is_reduced, RunningBatchWise, SingleEpochRunningBatchWise
__all__ = ["RunningAverage"]
clas... |
import itertools
from typing import Any, Callable, Optional, Union
import torch
from ignite.engine import Engine
from ignite.metrics.metric import EpochWise, Metric, MetricUsage, reinit__is_reduced
__all__ = ["MetricsLambda"]
class MetricsLambda(Metric):
"""
Apply a function to other metrics to obtain a ne... |
from typing import Callable, Optional, Sequence, Tuple, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
__all__ = ["Accuracy"]
class _BaseClassification(Metric):
def __init__(
self,
output_transfor... |
from typing import Callable, Sequence, Union
import torch
from torch.nn.functional import pairwise_distance
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
__all__ = ["MeanPairwiseDistance"]
class MeanPairwiseDistance(Metric):
"""Ca... |
import numbers
from typing import Callable, Optional, Sequence, Tuple, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
from ignite.metrics.metrics_lambda import MetricsLambda
__all__ = ["ConfusionMatrix", "mIoU", "IoU",... |
import json
from typing import Callable, Collection, Dict, List, Optional, Union
import torch
from ignite.metrics.fbeta import Fbeta
from ignite.metrics.metrics_lambda import MetricsLambda
from ignite.metrics.precision import Precision
from ignite.metrics.recall import Recall
__all__ = ["ClassificationReport"]
def... |
from ignite.metrics.gan.fid import FID
from ignite.metrics.gan.inception_score import InceptionScore
__all__ = [
"InceptionScore",
"FID",
]
|
from typing import Callable, Optional, Union
import torch
from packaging.version import Version
from ignite.metrics.metric import Metric
class InceptionModel(torch.nn.Module):
r"""Inception Model pre-trained on the ImageNet Dataset.
Args:
return_features: set it to `True` if you want the model to r... |
import warnings
from typing import Callable, Optional, Sequence, Union
import torch
from packaging.version import Version
from ignite.metrics.gan.utils import _BaseInceptionMetric, InceptionModel
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
__all__ = [
"FID",
]
if Version(torch.__versi... |
from typing import Callable, Optional, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.gan.utils import _BaseInceptionMetric, InceptionModel
# These decorators helps with distributed settings
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
__all__ = ["In... |
import math
from typing import Any, Callable, Sequence, Tuple, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
from ignite.metrics.nlp.utils import modified_precision
__all__ = ["Bleu"]
def _closest_ref_length(referen... |
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from typing import Any, Callable, List, Mapping, Optional, Sequence, Tuple, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics import Metric
# These decorators helps with distributed settings
from ignite.m... |
from ignite.metrics.nlp.bleu import Bleu
from ignite.metrics.nlp.rouge import Rouge, RougeL, RougeN
__all__ = [
"Bleu",
"Rouge",
"RougeN",
"RougeL",
]
|
from collections import Counter
from typing import Any, Sequence, Tuple
__all__ = ["ngrams", "lcs", "modified_precision"]
def ngrams(sequence: Sequence[Any], n: int) -> Counter:
"""
Generate the ngrams from a sequence of items
Args:
sequence: sequence of items
n: n-gram order
Return... |
from ignite.distributed.auto import *
from ignite.distributed.comp_models import native, xla
from ignite.distributed.launcher import Parallel
from ignite.distributed.utils import *
|
import socket
from contextlib import contextmanager
from functools import wraps
from typing import Any, Callable, List, Mapping, Optional, Tuple, Union
import torch
from ignite.distributed.comp_models import (
_SerialModel,
has_hvd_support,
has_native_dist_support,
has_xla_support,
registered_comp... |
from typing import Any, Callable, Dict, Optional
from ignite.distributed import utils as idist
from ignite.utils import setup_logger
__all__ = [
"Parallel",
]
class Parallel:
"""Distributed launcher context manager to simplify distributed configuration setup for multiple backends:
- backends from nativ... |
import warnings
from typing import Any, Iterator, List, Optional, Union
import torch
import torch.nn as nn
from torch.optim.optimizer import Optimizer
from torch.utils.data import DataLoader, Dataset, IterableDataset
from torch.utils.data.distributed import DistributedSampler
from torch.utils.data.sampler import Sampl... |
import warnings
from typing import Any, Callable, cast, List, Mapping, Optional, Tuple
import torch
from ignite.distributed.comp_models.base import ComputationModel
try:
import horovod.torch as hvd
try:
# old API
from horovod.run.runner import run as hvd_mp_spawn
except ImportError:
... |
from typing import List, Tuple, Type, TYPE_CHECKING, Union
from ignite.distributed.comp_models.base import _SerialModel
from ignite.distributed.comp_models.horovod import has_hvd_support
from ignite.distributed.comp_models.native import has_native_dist_support
from ignite.distributed.comp_models.xla import has_xla_sup... |
import os
import re
import subprocess
import warnings
from typing import Any, Callable, cast, Dict, List, Mapping, Optional, Tuple, Union
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
from packaging.version import Version
from ignite.distributed.comp_models.base import ComputationMo... |
from typing import Any, Callable, cast, List, Mapping, Optional, Tuple
import torch
from ignite.distributed.comp_models.base import ComputationModel
try:
import torch_xla
import torch_xla.core.xla_model as xm
import torch_xla.distributed.xla_multiprocessing as xmp
has_xla_support = True
except Impor... |
from abc import ABCMeta, abstractmethod
from numbers import Number
from typing import Any, Callable, cast, List, Optional, Union
import torch
class ComputationModel(metaclass=ABCMeta):
"""Base class for distributed computation models and defines interface methods.
This class is public and should be used for ... |
# -*- coding: utf-8 -*-
import warnings
from typing import Any, Dict, List, Tuple, Union
import torch
from ignite.engine import Engine, EventEnum, Events
from ignite.metrics import Metric
class GpuInfo(Metric):
"""Provides GPU information: a) used memory percentage, b) gpu utilization percentage values as Metri... |
from typing import Any, Callable, cast, Tuple, Union
import torch
from ignite import distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import EpochMetric
def roc_auc_compute_fn(y_preds: torch.Tensor, y_targets: torch.Tensor) -> float:
from sklearn.metrics import roc_auc_s... |
from typing import Any, Callable, cast, Tuple, Union
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import EpochMetric
def precision_recall_curve_compute_fn(y_preds: torch.Tensor, y_targets: torch.Tensor) -> Tuple[Any, Any, Any]:
try:
... |
import ignite.contrib.metrics.regression
from ignite.contrib.metrics.average_precision import AveragePrecision
from ignite.contrib.metrics.cohen_kappa import CohenKappa
from ignite.contrib.metrics.gpu_info import GpuInfo
from ignite.contrib.metrics.precision_recall_curve import PrecisionRecallCurve
from ignite.contrib.... |
from typing import Callable, Union
import torch
from ignite.metrics import EpochMetric
def average_precision_compute_fn(y_preds: torch.Tensor, y_targets: torch.Tensor) -> float:
from sklearn.metrics import average_precision_score
y_true = y_targets.cpu().numpy()
y_pred = y_preds.cpu().numpy()
retur... |
from typing import Callable, Optional, Union
import torch
from ignite.metrics import EpochMetric
class CohenKappa(EpochMetric):
"""Compute different types of Cohen's Kappa: Non-Wieghted, Linear, Quadratic.
Accumulating predictions and the ground-truth during an epoch and applying
`sklearn.metrics.cohen_... |
from abc import abstractmethod
from typing import Tuple
import torch
from ignite.metrics import Metric
from ignite.metrics.metric import reinit__is_reduced
def _check_output_shapes(output: Tuple[torch.Tensor, torch.Tensor]) -> None:
y_pred, y = output
c1 = y_pred.ndimension() == 2 and y_pred.shape[1] == 1
... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class MeanAbsoluteRelativeError(_BaseRegression):
r"""Calculate Mean Absolute Relative ... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class FractionalBias(_BaseRegression):
r"""Calculates the Fractional Bias.
.. math... |
from typing import Callable, Union
import torch
from ignite.contrib.metrics.regression._base import _torch_median
from ignite.metrics import EpochMetric
def median_absolute_percentage_error_compute_fn(y_pred: torch.Tensor, y: torch.Tensor) -> float:
e = torch.abs(y.view_as(y_pred) - y_pred) / torch.abs(y.view_... |
from typing import cast, List, Tuple
import torch
import ignite.distributed as idist
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced
class GeometricMeanRelativeAbsoluteError(_BaseRegression):
... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class MaximumAbsoluteError(_BaseRegression):
r"""Calculates the Maximum Absolute Error.... |
from ignite.contrib.metrics.regression.canberra_metric import CanberraMetric
from ignite.contrib.metrics.regression.fractional_absolute_error import FractionalAbsoluteError
from ignite.contrib.metrics.regression.fractional_bias import FractionalBias
from ignite.contrib.metrics.regression.geometric_mean_absolute_error i... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class MeanError(_BaseRegression):
r"""Calculates the Mean Error.
.. math::
... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class GeometricMeanAbsoluteError(_BaseRegression):
r"""Calculates the Geometric Mean Ab... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class R2Score(_BaseRegression):
r"""Calculates the R-Squared, the
`coefficient of d... |
from typing import Callable, Union
import torch
from ignite.contrib.metrics.regression._base import _torch_median
from ignite.metrics import EpochMetric
def median_absolute_error_compute_fn(y_pred: torch.Tensor, y: torch.Tensor) -> float:
e = torch.abs(y.view_as(y_pred) - y_pred)
return _torch_median(e)
... |
from typing import Callable, Union
import torch
from ignite.contrib.metrics.regression._base import _torch_median
from ignite.metrics import EpochMetric
def median_relative_absolute_error_compute_fn(y_pred: torch.Tensor, y: torch.Tensor) -> float:
e = torch.abs(y.view_as(y_pred) - y_pred) / torch.abs(y.view_as... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class CanberraMetric(_BaseRegression):
r"""Calculates the Canberra Metric.
.. math::
\text{CM} = \sum_{j=1}^n\frac{|A_j - P... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class WaveHedgesDistance(_BaseRegression):
r"""Calculates the Wave Hedges Distance.
.. math::
\text{WHD} = \sum_{j=1}^n\fra... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class FractionalAbsoluteError(_BaseRegression):
r"""Calculates the Fractional Absolute ... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class ManhattanDistance(_BaseRegression):
r"""Calculates the Manhattan Distance.
.. math::
\text{MD} = \sum_{j=1}^n |A_j - ... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class MeanNormalizedBias(_BaseRegression):
r"""Calculates the Mean Normalized Bias.
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.