python_code stringlengths 0 229k |
|---|
import math
from typing import Any, Callable, Sequence, Tuple, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
from ignite.metrics.nlp.utils import modified_precision
__all__ = ["Bleu"]
def _closest_ref_length(referen... |
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from typing import Any, Callable, List, Mapping, Optional, Sequence, Tuple, Union
import torch
from ignite.exceptions import NotComputableError
from ignite.metrics import Metric
# These decorators helps with distributed settings
from ignite.m... |
from ignite.metrics.nlp.bleu import Bleu
from ignite.metrics.nlp.rouge import Rouge, RougeL, RougeN
__all__ = [
"Bleu",
"Rouge",
"RougeN",
"RougeL",
]
|
from collections import Counter
from typing import Any, Sequence, Tuple
__all__ = ["ngrams", "lcs", "modified_precision"]
def ngrams(sequence: Sequence[Any], n: int) -> Counter:
"""
Generate the ngrams from a sequence of items
Args:
sequence: sequence of items
n: n-gram order
Return... |
from ignite.distributed.auto import *
from ignite.distributed.comp_models import native, xla
from ignite.distributed.launcher import Parallel
from ignite.distributed.utils import *
|
import socket
from contextlib import contextmanager
from functools import wraps
from typing import Any, Callable, List, Mapping, Optional, Tuple, Union
import torch
from ignite.distributed.comp_models import (
_SerialModel,
has_hvd_support,
has_native_dist_support,
has_xla_support,
registered_comp... |
from typing import Any, Callable, Dict, Optional
from ignite.distributed import utils as idist
from ignite.utils import setup_logger
__all__ = [
"Parallel",
]
class Parallel:
"""Distributed launcher context manager to simplify distributed configuration setup for multiple backends:
- backends from nativ... |
import warnings
from typing import Any, Iterator, List, Optional, Union
import torch
import torch.nn as nn
from torch.optim.optimizer import Optimizer
from torch.utils.data import DataLoader, Dataset, IterableDataset
from torch.utils.data.distributed import DistributedSampler
from torch.utils.data.sampler import Sampl... |
import warnings
from typing import Any, Callable, cast, List, Mapping, Optional, Tuple
import torch
from ignite.distributed.comp_models.base import ComputationModel
try:
import horovod.torch as hvd
try:
# old API
from horovod.run.runner import run as hvd_mp_spawn
except ImportError:
... |
from typing import List, Tuple, Type, TYPE_CHECKING, Union
from ignite.distributed.comp_models.base import _SerialModel
from ignite.distributed.comp_models.horovod import has_hvd_support
from ignite.distributed.comp_models.native import has_native_dist_support
from ignite.distributed.comp_models.xla import has_xla_sup... |
import os
import re
import subprocess
import warnings
from typing import Any, Callable, cast, Dict, List, Mapping, Optional, Tuple, Union
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
from packaging.version import Version
from ignite.distributed.comp_models.base import ComputationMo... |
from typing import Any, Callable, cast, List, Mapping, Optional, Tuple
import torch
from ignite.distributed.comp_models.base import ComputationModel
try:
import torch_xla
import torch_xla.core.xla_model as xm
import torch_xla.distributed.xla_multiprocessing as xmp
has_xla_support = True
except Impor... |
from abc import ABCMeta, abstractmethod
from numbers import Number
from typing import Any, Callable, cast, List, Optional, Union
import torch
class ComputationModel(metaclass=ABCMeta):
"""Base class for distributed computation models and defines interface methods.
This class is public and should be used for ... |
# -*- coding: utf-8 -*-
import warnings
from typing import Any, Dict, List, Tuple, Union
import torch
from ignite.engine import Engine, EventEnum, Events
from ignite.metrics import Metric
class GpuInfo(Metric):
"""Provides GPU information: a) used memory percentage, b) gpu utilization percentage values as Metri... |
from typing import Any, Callable, cast, Tuple, Union
import torch
from ignite import distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import EpochMetric
def roc_auc_compute_fn(y_preds: torch.Tensor, y_targets: torch.Tensor) -> float:
from sklearn.metrics import roc_auc_s... |
from typing import Any, Callable, cast, Tuple, Union
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import EpochMetric
def precision_recall_curve_compute_fn(y_preds: torch.Tensor, y_targets: torch.Tensor) -> Tuple[Any, Any, Any]:
try:
... |
import ignite.contrib.metrics.regression
from ignite.contrib.metrics.average_precision import AveragePrecision
from ignite.contrib.metrics.cohen_kappa import CohenKappa
from ignite.contrib.metrics.gpu_info import GpuInfo
from ignite.contrib.metrics.precision_recall_curve import PrecisionRecallCurve
from ignite.contrib.... |
from typing import Callable, Union
import torch
from ignite.metrics import EpochMetric
def average_precision_compute_fn(y_preds: torch.Tensor, y_targets: torch.Tensor) -> float:
from sklearn.metrics import average_precision_score
y_true = y_targets.cpu().numpy()
y_pred = y_preds.cpu().numpy()
retur... |
from typing import Callable, Optional, Union
import torch
from ignite.metrics import EpochMetric
class CohenKappa(EpochMetric):
"""Compute different types of Cohen's Kappa: Non-Wieghted, Linear, Quadratic.
Accumulating predictions and the ground-truth during an epoch and applying
`sklearn.metrics.cohen_... |
from abc import abstractmethod
from typing import Tuple
import torch
from ignite.metrics import Metric
from ignite.metrics.metric import reinit__is_reduced
def _check_output_shapes(output: Tuple[torch.Tensor, torch.Tensor]) -> None:
y_pred, y = output
c1 = y_pred.ndimension() == 2 and y_pred.shape[1] == 1
... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class MeanAbsoluteRelativeError(_BaseRegression):
r"""Calculate Mean Absolute Relative ... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class FractionalBias(_BaseRegression):
r"""Calculates the Fractional Bias.
.. math... |
from typing import Callable, Union
import torch
from ignite.contrib.metrics.regression._base import _torch_median
from ignite.metrics import EpochMetric
def median_absolute_percentage_error_compute_fn(y_pred: torch.Tensor, y: torch.Tensor) -> float:
e = torch.abs(y.view_as(y_pred) - y_pred) / torch.abs(y.view_... |
from typing import cast, List, Tuple
import torch
import ignite.distributed as idist
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced
class GeometricMeanRelativeAbsoluteError(_BaseRegression):
... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class MaximumAbsoluteError(_BaseRegression):
r"""Calculates the Maximum Absolute Error.... |
from ignite.contrib.metrics.regression.canberra_metric import CanberraMetric
from ignite.contrib.metrics.regression.fractional_absolute_error import FractionalAbsoluteError
from ignite.contrib.metrics.regression.fractional_bias import FractionalBias
from ignite.contrib.metrics.regression.geometric_mean_absolute_error i... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class MeanError(_BaseRegression):
r"""Calculates the Mean Error.
.. math::
... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class GeometricMeanAbsoluteError(_BaseRegression):
r"""Calculates the Geometric Mean Ab... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class R2Score(_BaseRegression):
r"""Calculates the R-Squared, the
`coefficient of d... |
from typing import Callable, Union
import torch
from ignite.contrib.metrics.regression._base import _torch_median
from ignite.metrics import EpochMetric
def median_absolute_error_compute_fn(y_pred: torch.Tensor, y: torch.Tensor) -> float:
e = torch.abs(y.view_as(y_pred) - y_pred)
return _torch_median(e)
... |
from typing import Callable, Union
import torch
from ignite.contrib.metrics.regression._base import _torch_median
from ignite.metrics import EpochMetric
def median_relative_absolute_error_compute_fn(y_pred: torch.Tensor, y: torch.Tensor) -> float:
e = torch.abs(y.view_as(y_pred) - y_pred) / torch.abs(y.view_as... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class CanberraMetric(_BaseRegression):
r"""Calculates the Canberra Metric.
.. math::
\text{CM} = \sum_{j=1}^n\frac{|A_j - P... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class WaveHedgesDistance(_BaseRegression):
r"""Calculates the Wave Hedges Distance.
.. math::
\text{WHD} = \sum_{j=1}^n\fra... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class FractionalAbsoluteError(_BaseRegression):
r"""Calculates the Fractional Absolute ... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class ManhattanDistance(_BaseRegression):
r"""Calculates the Manhattan Distance.
.. math::
\text{MD} = \sum_{j=1}^n |A_j - ... |
from typing import Tuple
import torch
from ignite.contrib.metrics.regression._base import _BaseRegression
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import reinit__is_reduced, sync_all_reduce
class MeanNormalizedBias(_BaseRegression):
r"""Calculates the Mean Normalized Bias.
... |
from ignite.contrib.engines.tbptt import create_supervised_tbptt_trainer, Tbptt_Events
|
import numbers
import warnings
from functools import partial
from typing import Any, Callable, cast, Dict, Iterable, Mapping, Optional, Sequence, Union
import torch
import torch.nn as nn
from torch.optim.optimizer import Optimizer
from torch.utils.data.distributed import DistributedSampler
# https://github.com/pytorc... |
# coding: utf-8
import collections.abc as collections
from typing import Callable, Mapping, Optional, Sequence, Union
import torch
import torch.nn as nn
from torch.optim.optimizer import Optimizer
from ignite.engine import _prepare_batch, Engine, EventEnum
from ignite.utils import apply_to_tensor
class Tbptt_Events... |
""" ``ignite.contrib.handlers.param_scheduler`` was moved to ``ignite.handlers.param_scheduler``.
Note:
``ignite.contrib.handlers.param_scheduler`` was moved to ``ignite.handlers.param_scheduler``.
Please refer to :mod:`~ignite.handlers.param_scheduler`.
"""
import warnings
removed_in = "0.6.0"
deprecation_war... |
"""MLflow logger and its helper handlers."""
import warnings
from typing import Any, Callable, List, Optional, Union
from torch.optim import Optimizer
from ignite.contrib.handlers.base_logger import BaseLogger, BaseOptimizerParamsHandler, BaseOutputHandler
from ignite.engine import Engine, Events
from ignite.handlers... |
"""Polyaxon logger and its helper handlers."""
from typing import Any, Callable, List, Optional, Union
from torch.optim import Optimizer
from ignite.contrib.handlers.base_logger import BaseLogger, BaseOptimizerParamsHandler, BaseOutputHandler
from ignite.engine import Engine, Events
from ignite.handlers import global... |
"""TensorBoard logger and its helper handlers."""
from typing import Any, Callable, List, Optional, Union
from torch.optim import Optimizer
from ignite.contrib.handlers.base_logger import (
BaseLogger,
BaseOptimizerParamsHandler,
BaseOutputHandler,
BaseWeightsHandler,
BaseWeightsScalarHandler,
)
f... |
"""WandB logger and its helper handlers."""
from typing import Any, Callable, List, Optional, Union
from torch.optim import Optimizer
from ignite.contrib.handlers.base_logger import BaseLogger, BaseOptimizerParamsHandler, BaseOutputHandler
from ignite.engine import Engine, Events
from ignite.handlers import global_st... |
"""Visdom logger and its helper handlers."""
import os
from typing import Any, Callable, cast, Dict, List, Optional, Union
import torch
import torch.nn as nn
from torch.optim import Optimizer
from ignite.contrib.handlers.base_logger import (
BaseLogger,
BaseOptimizerParamsHandler,
BaseOutputHandler,
B... |
""" ``ignite.contrib.handlers.lr_finder`` was moved to ``ignite.handlers.lr_finder``.
Note:
``ignite.contrib.handlers.lr_finder`` was moved to ``ignite.handlers.lr_finder``.
Please refer to :mod:`~ignite.handlers.lr_finder`.
"""
import warnings
removed_in = "0.6.0"
deprecation_warning = (
f"{__file__} has ... |
from ignite.contrib.handlers.clearml_logger import ClearMLLogger
from ignite.contrib.handlers.mlflow_logger import MLflowLogger
from ignite.contrib.handlers.neptune_logger import NeptuneLogger
from ignite.contrib.handlers.polyaxon_logger import PolyaxonLogger
from ignite.contrib.handlers.tensorboard_logger import Tenso... |
"""Base logger and its helper handlers."""
import numbers
import warnings
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
import torch
import torch.nn as nn
from torch.optim import Optimizer
from ignite.engine i... |
""" ``ignite.contrib.handlers.time_profilers.py`` was moved to ``ignite.handlers.time_profilers``.
Note:
``ignite.contrib.handlers.time_profilers`` was moved to ``ignite.handlers.time_profilers``.
Please refer to :mod:`~ignite.handlers.time_profilers`.
"""
import warnings
removed_in = "0.6.0"
deprecation_warni... |
"""ClearML logger and its helper handlers."""
import os
import tempfile
import warnings
from collections import defaultdict
from datetime import datetime
from enum import Enum
from typing import Any, Callable, DefaultDict, List, Mapping, Optional, Tuple, Type, Union
from torch.optim import Optimizer
import ignite.dis... |
"""Neptune logger and its helper handlers."""
import tempfile
import warnings
from typing import Any, Callable, List, Mapping, Optional, Union
import torch
from torch.optim import Optimizer
import ignite.distributed as idist
from ignite import __version__
from ignite.contrib.handlers.base_logger import (
BaseLogg... |
# -*- coding: utf-8 -*-
"""TQDM logger."""
from collections import OrderedDict
from typing import Any, Callable, List, Optional, Union
from ignite.contrib.handlers.base_logger import BaseLogger, BaseOutputHandler
from ignite.engine import Engine, Events
from ignite.engine.events import CallableEventWithFilter, Removab... |
import random
import warnings
from collections import OrderedDict
from functools import wraps
from typing import Any, Callable, Generator, Iterator, List, Optional
import torch
from torch.utils.data import DataLoader
from torch.utils.data.sampler import BatchSampler
from ignite.engine.engine import Engine
from ignite... |
import numbers
import warnings
import weakref
from collections.abc import Sequence
from enum import Enum
from types import DynamicClassAttribute
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, TYPE_CHECKING, Union
from torch.utils.data import DataLoader
from ignite.engine.utils import _che... |
from collections.abc import Mapping
from typing import Any, Callable, Dict, Optional, Sequence, Tuple, Union
import torch
import ignite.distributed as idist
from ignite.engine.deterministic import DeterministicEngine
from ignite.engine.engine import Engine
from ignite.engine.events import CallableEventWithFilter, Eve... |
import functools
import logging
import math
import time
import warnings
import weakref
from collections import defaultdict, OrderedDict
from collections.abc import Mapping
from typing import Any, Callable, Dict, Generator, Iterable, Iterator, List, Optional, Tuple, Union
from torch.utils.data import DataLoader
from i... |
import inspect
from typing import Any, Callable, Tuple, Union
def _check_signature(fn: Callable, fn_description: str, *args: Any, **kwargs: Any) -> None:
# if handler with filter, check the handler rather than the decorator
if hasattr(fn, "_parent"):
signature = inspect.signature(fn._parent())
els... |
import warnings
from copy import deepcopy
from typing import Optional, Union
import torch.nn as nn
from ignite.engine import CallableEventWithFilter, Engine, Events, EventsList
from ignite.handlers.param_scheduler import BaseParamScheduler
from ignite.handlers.state_param_scheduler import LambdaStateScheduler
__all_... |
import itertools
import math
import numbers
import tempfile
import warnings
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from copy import copy
from pathlib import Path
from typing import Any, cast, Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union
import torch
from torch.optim.... |
import collections.abc as collections
import numbers
import os
import stat
import tempfile
import warnings
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from pathlib import Path
from typing import Any, Callable, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union
import torch
import t... |
import logging
import numbers
from typing import Callable, Union
import torch
from ignite.engine import Engine
from ignite.utils import apply_to_type, setup_logger
__all__ = ["TerminateOnNan"]
class TerminateOnNan:
"""TerminateOnNan handler can be used to stop the training if the `process_function`'s output
... |
# coding: utf-8
import contextlib
import logging
import tempfile
import warnings
from math import ceil
from pathlib import Path
from typing import Any, Callable, Dict, List, Mapping, Optional, Union
import torch
from torch.optim import Optimizer
from torch.optim.lr_scheduler import _LRScheduler
import ignite.distribu... |
from typing import Any, Callable, Optional
from ignite.engine import Engine
from ignite.engine.events import Events
from ignite.handlers.checkpoint import Checkpoint, DiskSaver, ModelCheckpoint
from ignite.handlers.early_stopping import EarlyStopping
from ignite.handlers.ema_handler import EMAHandler
from ignite.handl... |
from typing import Any, Callable, List, Optional
from ignite.engine import Engine, Events
class EpochOutputStore:
"""EpochOutputStore handler to save output prediction and target history
after every epoch, could be useful for e.g., visualization purposes.
Note:
This can potentially lead to a mem... |
import functools
from collections import OrderedDict
from typing import Any, Callable, cast, Dict, List, Mapping, Sequence, Tuple, Union
import torch
from ignite.engine import Engine, EventEnum, Events
from ignite.handlers.timing import Timer
class BasicTimeProfiler:
"""
BasicTimeProfiler can be used to pro... |
import numbers
import warnings
from bisect import bisect_right
from typing import Any, List, Sequence, Tuple, Union
from ignite.engine import CallableEventWithFilter, Engine, Events, EventsList
from ignite.handlers.param_scheduler import BaseParamScheduler
class StateParamScheduler(BaseParamScheduler):
"""An abs... |
import time
from typing import Optional
from ignite.engine import Engine
__all__ = ["TimeLimit"]
from ignite.utils import setup_logger
class TimeLimit:
"""TimeLimit handler can be used to control training time for computing environments where session time is limited.
Timer starts when handler is created an... |
from time import perf_counter
from typing import Any, Optional
from ignite.engine import Engine, Events
__all__ = ["Timer"]
class Timer:
"""Timer object can be used to measure (average) time between events.
Args:
average: if True, then when ``.value()`` method is called, the returned value
... |
from collections import OrderedDict
from typing import Callable, cast, Mapping, Optional
from ignite.base import Serializable
from ignite.engine import Engine
from ignite.utils import setup_logger
__all__ = ["EarlyStopping"]
class EarlyStopping(Serializable):
"""EarlyStopping handler can be used to stop the tra... |
from collections import OrderedDict
from collections.abc import Mapping
from typing import Tuple
class Serializable:
_state_dict_all_req_keys: Tuple = ()
_state_dict_one_of_opt_keys: Tuple = ()
def state_dict(self) -> OrderedDict:
raise NotImplementedError
def load_state_dict(self, state_dic... |
from ignite.base.mixins import Serializable
|
# Needed to collect coverage data
|
import logging
import sys
from collections import namedtuple
import pytest
import torch
from packaging.version import Version
from ignite.engine import Engine, Events
from ignite.utils import convert_tensor, deprecated, hash_checkpoint, setup_logger, to_onehot
def test_convert_tensor():
x = torch.tensor([0.0])
... |
import functools
import os
import shutil
import sys
import tempfile
import time
from pathlib import Path
import pytest
import torch
import torch.distributed as dist
import ignite.distributed as idist
@pytest.fixture(
params=[
"cpu",
pytest.param("cuda", marks=pytest.mark.skipif(not torch.cuda.is... |
import torch
def cpu_and_maybe_cuda():
return ("cpu",) + (("cuda",) if torch.cuda.is_available() else ())
|
import warnings
from functools import partial
from itertools import accumulate
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.engine import Engine, Events
from ignite.metrics import Accuracy, RunningAverage
from ignite.metrics.metric import RunningBatchWise, RunningEpochW... |
from typing import Sequence, Union
import numpy as np
import pytest
import torch
from skimage.metrics import structural_similarity as ski_ssim
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import SSIM
def test_zero_div():
ssim = SSIM(data_range=1.0)
... |
import numbers
import os
from unittest.mock import MagicMock
import numpy as np
import pytest
import torch
from pytest import approx, raises
from sklearn.metrics import confusion_matrix, f1_score, precision_score, recall_score
import ignite.distributed as idist
from ignite.engine import Engine, Events, State
from ign... |
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import RootMeanSquaredError
def test_zero_sample():
rmse = RootMeanSquaredError()
with pytest.raises(
NotComputableError, match=r"MeanSquare... |
import dill
from ignite.metrics import Metric
class Accumulation(Metric):
def __init__(self):
self.value = 0
super(Accumulation, self).__init__()
def reset(self):
self.value = 0
def compute(self):
return self.value
def update(self, output):
self.value += out... |
import json
import os
import pytest
import torch
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.metrics.classification_report import ClassificationReport
def _test_integration_multiclass(device, output_dict):
rank = idist.get_rank()
def _test(metric_device, n_classes, label... |
import os
import numpy as np
import pytest
import torch
from sklearn.metrics import accuracy_score, confusion_matrix, precision_score, recall_score
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import ConfusionMatrix, IoU, JaccardIndex, mIoU
from ignite.metric... |
import warnings
import pytest
import torch
from sklearn.exceptions import UndefinedMetricWarning
from sklearn.metrics import precision_score
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import Precision
torch.manual_seed(12)
def test_no_update():
preci... |
import numpy as np
import pytest
import torch
from sklearn.metrics import multilabel_confusion_matrix
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics.multilabel_confusion_matrix import MultiLabelConfusionMatrix
torch.manual_seed(12)
def test_no_update():
c... |
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import MeanPairwiseDistance
def test_zero_sample():
mpd = MeanPairwiseDistance()
with pytest.raises(
NotComputableError, match=r"MeanAbsolut... |
import pytest
import torch
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.metrics import EpochMetric
from ignite.metrics.epoch_metric import EpochMetricWarning, NotComputableError
def test_epoch_metric_wrong_setup_or_input():
# Wrong compute function
with pytest.raises(TypeEr... |
# Needed to collect coverage data
|
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import MeanAbsoluteError
def test_no_update():
mae = MeanAbsoluteError()
with pytest.raises(
NotComputableError, match=r"MeanAbsoluteError m... |
import os
import numpy as np
import pytest
import torch
from pytest import approx
from sklearn.metrics import f1_score, precision_score, recall_score
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.metrics import Metric, MetricsLambda, Precision, Recall
class ListGatherMetric(Metric)... |
import numpy as np
import pytest
import torch
from skimage.metrics import peak_signal_noise_ratio as ski_psnr
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
from ignite.metrics import PSNR
from ignite.utils import manual_seed
def test_zero_div():
... |
import os
import pytest
import torch
from sklearn.metrics import accuracy_score
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
from ignite.metrics import Accuracy
torch.manual_seed(12)
def test_no_update():
acc = Accuracy()
with pytest.r... |
import os
import sys
import time
import pytest
import torch
import ignite.distributed as idist
from ignite.engine import Engine, Events
from ignite.metrics import Frequency
if sys.platform.startswith("darwin"):
pytest.skip("Skip if on MacOS", allow_module_level=True)
@pytest.mark.skipif(sys.platform.startswith... |
import os
import numpy as np
import pytest
import torch
from torch.nn import Linear
from torch.optim import SGD
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
from ignite.metrics.accumulation import Average, GeometricAverage, VariableAccumulation
... |
import os
from unittest.mock import MagicMock
import pytest
import torch
from numpy.testing import assert_almost_equal
from torch import nn
from torch.nn.functional import nll_loss
import ignite.distributed as idist
from ignite.engine import State
from ignite.exceptions import NotComputableError
from ignite.metrics i... |
import os
import warnings
import pytest
import torch
from sklearn.exceptions import UndefinedMetricWarning
from sklearn.metrics import recall_score
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import Recall
torch.manual_seed(12)
def test_no_update():
r... |
import os
import numpy as np
import pytest
import torch
from sklearn.metrics import fbeta_score
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.metrics import Fbeta, Precision, Recall
torch.manual_seed(12)
def test_wrong_inputs():
with pytest.raises(ValueError, match=r"Beta shou... |
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import MeanSquaredError
def test_zero_sample():
mse = MeanSquaredError()
with pytest.raises(
NotComputableError, match=r"MeanSquaredError mu... |
import os
import pytest
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import TopKCategoricalAccuracy
def test_zero_div():
acc = TopKCategoricalAccuracy(2)
with pytest.raises(
NotComputableError, match=r"TopKCategoricalAccuracy mu... |
from typing import Callable, Optional, Union
from unittest.mock import patch
import pytest
import torch
import torchvision
from ignite.metrics.gan.utils import _BaseInceptionMetric, InceptionModel
class DummyInceptionMetric(_BaseInceptionMetric):
def __init__(
self,
num_features: Optional[int] =... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.