python_code stringlengths 0 229k |
|---|
from ignite.contrib.engines.tbptt import create_supervised_tbptt_trainer, Tbptt_Events
|
import numbers
import warnings
from functools import partial
from typing import Any, Callable, cast, Dict, Iterable, Mapping, Optional, Sequence, Union
import torch
import torch.nn as nn
from torch.optim.optimizer import Optimizer
from torch.utils.data.distributed import DistributedSampler
# https://github.com/pytorc... |
# coding: utf-8
import collections.abc as collections
from typing import Callable, Mapping, Optional, Sequence, Union
import torch
import torch.nn as nn
from torch.optim.optimizer import Optimizer
from ignite.engine import _prepare_batch, Engine, EventEnum
from ignite.utils import apply_to_tensor
class Tbptt_Events... |
""" ``ignite.contrib.handlers.param_scheduler`` was moved to ``ignite.handlers.param_scheduler``.
Note:
``ignite.contrib.handlers.param_scheduler`` was moved to ``ignite.handlers.param_scheduler``.
Please refer to :mod:`~ignite.handlers.param_scheduler`.
"""
import warnings
removed_in = "0.6.0"
deprecation_war... |
"""MLflow logger and its helper handlers."""
import warnings
from typing import Any, Callable, List, Optional, Union
from torch.optim import Optimizer
from ignite.contrib.handlers.base_logger import BaseLogger, BaseOptimizerParamsHandler, BaseOutputHandler
from ignite.engine import Engine, Events
from ignite.handlers... |
"""Polyaxon logger and its helper handlers."""
from typing import Any, Callable, List, Optional, Union
from torch.optim import Optimizer
from ignite.contrib.handlers.base_logger import BaseLogger, BaseOptimizerParamsHandler, BaseOutputHandler
from ignite.engine import Engine, Events
from ignite.handlers import global... |
"""TensorBoard logger and its helper handlers."""
from typing import Any, Callable, List, Optional, Union
from torch.optim import Optimizer
from ignite.contrib.handlers.base_logger import (
BaseLogger,
BaseOptimizerParamsHandler,
BaseOutputHandler,
BaseWeightsHandler,
BaseWeightsScalarHandler,
)
f... |
"""WandB logger and its helper handlers."""
from typing import Any, Callable, List, Optional, Union
from torch.optim import Optimizer
from ignite.contrib.handlers.base_logger import BaseLogger, BaseOptimizerParamsHandler, BaseOutputHandler
from ignite.engine import Engine, Events
from ignite.handlers import global_st... |
"""Visdom logger and its helper handlers."""
import os
from typing import Any, Callable, cast, Dict, List, Optional, Union
import torch
import torch.nn as nn
from torch.optim import Optimizer
from ignite.contrib.handlers.base_logger import (
BaseLogger,
BaseOptimizerParamsHandler,
BaseOutputHandler,
B... |
""" ``ignite.contrib.handlers.lr_finder`` was moved to ``ignite.handlers.lr_finder``.
Note:
``ignite.contrib.handlers.lr_finder`` was moved to ``ignite.handlers.lr_finder``.
Please refer to :mod:`~ignite.handlers.lr_finder`.
"""
import warnings
removed_in = "0.6.0"
deprecation_warning = (
f"{__file__} has ... |
from ignite.contrib.handlers.clearml_logger import ClearMLLogger
from ignite.contrib.handlers.mlflow_logger import MLflowLogger
from ignite.contrib.handlers.neptune_logger import NeptuneLogger
from ignite.contrib.handlers.polyaxon_logger import PolyaxonLogger
from ignite.contrib.handlers.tensorboard_logger import Tenso... |
"""Base logger and its helper handlers."""
import numbers
import warnings
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
import torch
import torch.nn as nn
from torch.optim import Optimizer
from ignite.engine i... |
""" ``ignite.contrib.handlers.time_profilers.py`` was moved to ``ignite.handlers.time_profilers``.
Note:
``ignite.contrib.handlers.time_profilers`` was moved to ``ignite.handlers.time_profilers``.
Please refer to :mod:`~ignite.handlers.time_profilers`.
"""
import warnings
removed_in = "0.6.0"
deprecation_warni... |
"""ClearML logger and its helper handlers."""
import os
import tempfile
import warnings
from collections import defaultdict
from datetime import datetime
from enum import Enum
from typing import Any, Callable, DefaultDict, List, Mapping, Optional, Tuple, Type, Union
from torch.optim import Optimizer
import ignite.dis... |
"""Neptune logger and its helper handlers."""
import tempfile
import warnings
from typing import Any, Callable, List, Mapping, Optional, Union
import torch
from torch.optim import Optimizer
import ignite.distributed as idist
from ignite import __version__
from ignite.contrib.handlers.base_logger import (
BaseLogg... |
# -*- coding: utf-8 -*-
"""TQDM logger."""
from collections import OrderedDict
from typing import Any, Callable, List, Optional, Union
from ignite.contrib.handlers.base_logger import BaseLogger, BaseOutputHandler
from ignite.engine import Engine, Events
from ignite.engine.events import CallableEventWithFilter, Removab... |
import random
import warnings
from collections import OrderedDict
from functools import wraps
from typing import Any, Callable, Generator, Iterator, List, Optional
import torch
from torch.utils.data import DataLoader
from torch.utils.data.sampler import BatchSampler
from ignite.engine.engine import Engine
from ignite... |
import numbers
import warnings
import weakref
from collections.abc import Sequence
from enum import Enum
from types import DynamicClassAttribute
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, TYPE_CHECKING, Union
from torch.utils.data import DataLoader
from ignite.engine.utils import _che... |
from collections.abc import Mapping
from typing import Any, Callable, Dict, Optional, Sequence, Tuple, Union
import torch
import ignite.distributed as idist
from ignite.engine.deterministic import DeterministicEngine
from ignite.engine.engine import Engine
from ignite.engine.events import CallableEventWithFilter, Eve... |
import functools
import logging
import math
import time
import warnings
import weakref
from collections import defaultdict, OrderedDict
from collections.abc import Mapping
from typing import Any, Callable, Dict, Generator, Iterable, Iterator, List, Optional, Tuple, Union
from torch.utils.data import DataLoader
from i... |
import inspect
from typing import Any, Callable, Tuple, Union
def _check_signature(fn: Callable, fn_description: str, *args: Any, **kwargs: Any) -> None:
# if handler with filter, check the handler rather than the decorator
if hasattr(fn, "_parent"):
signature = inspect.signature(fn._parent())
els... |
import warnings
from copy import deepcopy
from typing import Optional, Union
import torch.nn as nn
from ignite.engine import CallableEventWithFilter, Engine, Events, EventsList
from ignite.handlers.param_scheduler import BaseParamScheduler
from ignite.handlers.state_param_scheduler import LambdaStateScheduler
__all_... |
import itertools
import math
import numbers
import tempfile
import warnings
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from copy import copy
from pathlib import Path
from typing import Any, cast, Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union
import torch
from torch.optim.... |
import collections.abc as collections
import numbers
import os
import stat
import tempfile
import warnings
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from pathlib import Path
from typing import Any, Callable, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union
import torch
import t... |
import logging
import numbers
from typing import Callable, Union
import torch
from ignite.engine import Engine
from ignite.utils import apply_to_type, setup_logger
__all__ = ["TerminateOnNan"]
class TerminateOnNan:
"""TerminateOnNan handler can be used to stop the training if the `process_function`'s output
... |
# coding: utf-8
import contextlib
import logging
import tempfile
import warnings
from math import ceil
from pathlib import Path
from typing import Any, Callable, Dict, List, Mapping, Optional, Union
import torch
from torch.optim import Optimizer
from torch.optim.lr_scheduler import _LRScheduler
import ignite.distribu... |
from typing import Any, Callable, Optional
from ignite.engine import Engine
from ignite.engine.events import Events
from ignite.handlers.checkpoint import Checkpoint, DiskSaver, ModelCheckpoint
from ignite.handlers.early_stopping import EarlyStopping
from ignite.handlers.ema_handler import EMAHandler
from ignite.handl... |
from typing import Any, Callable, List, Optional
from ignite.engine import Engine, Events
class EpochOutputStore:
"""EpochOutputStore handler to save output prediction and target history
after every epoch, could be useful for e.g., visualization purposes.
Note:
This can potentially lead to a mem... |
import functools
from collections import OrderedDict
from typing import Any, Callable, cast, Dict, List, Mapping, Sequence, Tuple, Union
import torch
from ignite.engine import Engine, EventEnum, Events
from ignite.handlers.timing import Timer
class BasicTimeProfiler:
"""
BasicTimeProfiler can be used to pro... |
import numbers
import warnings
from bisect import bisect_right
from typing import Any, List, Sequence, Tuple, Union
from ignite.engine import CallableEventWithFilter, Engine, Events, EventsList
from ignite.handlers.param_scheduler import BaseParamScheduler
class StateParamScheduler(BaseParamScheduler):
"""An abs... |
import time
from typing import Optional
from ignite.engine import Engine
__all__ = ["TimeLimit"]
from ignite.utils import setup_logger
class TimeLimit:
"""TimeLimit handler can be used to control training time for computing environments where session time is limited.
Timer starts when handler is created an... |
from time import perf_counter
from typing import Any, Optional
from ignite.engine import Engine, Events
__all__ = ["Timer"]
class Timer:
"""Timer object can be used to measure (average) time between events.
Args:
average: if True, then when ``.value()`` method is called, the returned value
... |
from collections import OrderedDict
from typing import Callable, cast, Mapping, Optional
from ignite.base import Serializable
from ignite.engine import Engine
from ignite.utils import setup_logger
__all__ = ["EarlyStopping"]
class EarlyStopping(Serializable):
"""EarlyStopping handler can be used to stop the tra... |
from collections import OrderedDict
from collections.abc import Mapping
from typing import Tuple
class Serializable:
_state_dict_all_req_keys: Tuple = ()
_state_dict_one_of_opt_keys: Tuple = ()
def state_dict(self) -> OrderedDict:
raise NotImplementedError
def load_state_dict(self, state_dic... |
from ignite.base.mixins import Serializable
|
# Needed to collect coverage data
|
import logging
import sys
from collections import namedtuple
import pytest
import torch
from packaging.version import Version
from ignite.engine import Engine, Events
from ignite.utils import convert_tensor, deprecated, hash_checkpoint, setup_logger, to_onehot
def test_convert_tensor():
x = torch.tensor([0.0])
... |
import functools
import os
import shutil
import sys
import tempfile
import time
from pathlib import Path
import pytest
import torch
import torch.distributed as dist
import ignite.distributed as idist
@pytest.fixture(
params=[
"cpu",
pytest.param("cuda", marks=pytest.mark.skipif(not torch.cuda.is... |
import torch
def cpu_and_maybe_cuda():
return ("cpu",) + (("cuda",) if torch.cuda.is_available() else ())
|
import warnings
from functools import partial
from itertools import accumulate
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.engine import Engine, Events
from ignite.metrics import Accuracy, RunningAverage
from ignite.metrics.metric import RunningBatchWise, RunningEpochW... |
from typing import Sequence, Union
import numpy as np
import pytest
import torch
from skimage.metrics import structural_similarity as ski_ssim
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import SSIM
def test_zero_div():
ssim = SSIM(data_range=1.0)
... |
import numbers
import os
from unittest.mock import MagicMock
import numpy as np
import pytest
import torch
from pytest import approx, raises
from sklearn.metrics import confusion_matrix, f1_score, precision_score, recall_score
import ignite.distributed as idist
from ignite.engine import Engine, Events, State
from ign... |
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import RootMeanSquaredError
def test_zero_sample():
rmse = RootMeanSquaredError()
with pytest.raises(
NotComputableError, match=r"MeanSquare... |
import dill
from ignite.metrics import Metric
class Accumulation(Metric):
def __init__(self):
self.value = 0
super(Accumulation, self).__init__()
def reset(self):
self.value = 0
def compute(self):
return self.value
def update(self, output):
self.value += out... |
import json
import os
import pytest
import torch
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.metrics.classification_report import ClassificationReport
def _test_integration_multiclass(device, output_dict):
rank = idist.get_rank()
def _test(metric_device, n_classes, label... |
import os
import numpy as np
import pytest
import torch
from sklearn.metrics import accuracy_score, confusion_matrix, precision_score, recall_score
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import ConfusionMatrix, IoU, JaccardIndex, mIoU
from ignite.metric... |
import warnings
import pytest
import torch
from sklearn.exceptions import UndefinedMetricWarning
from sklearn.metrics import precision_score
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import Precision
torch.manual_seed(12)
def test_no_update():
preci... |
import numpy as np
import pytest
import torch
from sklearn.metrics import multilabel_confusion_matrix
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics.multilabel_confusion_matrix import MultiLabelConfusionMatrix
torch.manual_seed(12)
def test_no_update():
c... |
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import MeanPairwiseDistance
def test_zero_sample():
mpd = MeanPairwiseDistance()
with pytest.raises(
NotComputableError, match=r"MeanAbsolut... |
import pytest
import torch
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.metrics import EpochMetric
from ignite.metrics.epoch_metric import EpochMetricWarning, NotComputableError
def test_epoch_metric_wrong_setup_or_input():
# Wrong compute function
with pytest.raises(TypeEr... |
# Needed to collect coverage data
|
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import MeanAbsoluteError
def test_no_update():
mae = MeanAbsoluteError()
with pytest.raises(
NotComputableError, match=r"MeanAbsoluteError m... |
import os
import numpy as np
import pytest
import torch
from pytest import approx
from sklearn.metrics import f1_score, precision_score, recall_score
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.metrics import Metric, MetricsLambda, Precision, Recall
class ListGatherMetric(Metric)... |
import numpy as np
import pytest
import torch
from skimage.metrics import peak_signal_noise_ratio as ski_psnr
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
from ignite.metrics import PSNR
from ignite.utils import manual_seed
def test_zero_div():
... |
import os
import pytest
import torch
from sklearn.metrics import accuracy_score
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
from ignite.metrics import Accuracy
torch.manual_seed(12)
def test_no_update():
acc = Accuracy()
with pytest.r... |
import os
import sys
import time
import pytest
import torch
import ignite.distributed as idist
from ignite.engine import Engine, Events
from ignite.metrics import Frequency
if sys.platform.startswith("darwin"):
pytest.skip("Skip if on MacOS", allow_module_level=True)
@pytest.mark.skipif(sys.platform.startswith... |
import os
import numpy as np
import pytest
import torch
from torch.nn import Linear
from torch.optim import SGD
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
from ignite.metrics.accumulation import Average, GeometricAverage, VariableAccumulation
... |
import os
from unittest.mock import MagicMock
import pytest
import torch
from numpy.testing import assert_almost_equal
from torch import nn
from torch.nn.functional import nll_loss
import ignite.distributed as idist
from ignite.engine import State
from ignite.exceptions import NotComputableError
from ignite.metrics i... |
import os
import warnings
import pytest
import torch
from sklearn.exceptions import UndefinedMetricWarning
from sklearn.metrics import recall_score
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import Recall
torch.manual_seed(12)
def test_no_update():
r... |
import os
import numpy as np
import pytest
import torch
from sklearn.metrics import fbeta_score
import ignite.distributed as idist
from ignite.engine import Engine
from ignite.metrics import Fbeta, Precision, Recall
torch.manual_seed(12)
def test_wrong_inputs():
with pytest.raises(ValueError, match=r"Beta shou... |
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import MeanSquaredError
def test_zero_sample():
mse = MeanSquaredError()
with pytest.raises(
NotComputableError, match=r"MeanSquaredError mu... |
import os
import pytest
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics import TopKCategoricalAccuracy
def test_zero_div():
acc = TopKCategoricalAccuracy(2)
with pytest.raises(
NotComputableError, match=r"TopKCategoricalAccuracy mu... |
from typing import Callable, Optional, Union
from unittest.mock import patch
import pytest
import torch
import torchvision
from ignite.metrics.gan.utils import _BaseInceptionMetric, InceptionModel
class DummyInceptionMetric(_BaseInceptionMetric):
def __init__(
self,
num_features: Optional[int] =... |
import os
import pytest
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics.gan.inception_score import InceptionScore
def calculate_inception_score(p_yx):
p_y = torch.unsqueeze(p_yx.mean(axis=0), 0)
kl_d = torch.kl_div(torch.log(p_y), p_yx)
... |
import os
import re
from unittest.mock import patch
import pytest
import pytorch_fid.fid_score as pytorch_fid_score
import scipy
import torch
from numpy import cov
import ignite.distributed as idist
from ignite.metrics.gan.fid import FID, fid_score
@pytest.fixture()
def mock_no_scipy():
with patch.dict("sys.mod... |
import pytest
from ignite.metrics.nlp.utils import lcs, modified_precision, ngrams
@pytest.mark.parametrize(
"sequence, n, expected_keys, expected_values",
[
([], 1, [], []),
([0, 1, 2], 1, [(0,), (1,), (2,)], [1, 1, 1]),
([0, 1, 2], 2, [(0, 1), (1, 2)], [1, 1]),
([0, 1, 2], 3... |
import os
import warnings
from collections import Counter
import pytest
import torch
from nltk.translate.bleu_score import corpus_bleu, sentence_bleu, SmoothingFunction
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics.nlp import Bleu
from . import CorpusForTest
... |
__all__ = ["CorpusForTest"]
class CorpusForTest:
def __init__(self, lower_split=False):
def preproc(text):
if lower_split:
return text.lower().split()
else:
return text
# BLEU Paper examples
self.cand_1 = preproc("the the the the the... |
import os
import nltk
import pytest
import rouge as pyrouge
import torch
import ignite.distributed as idist
from ignite.exceptions import NotComputableError
from ignite.metrics.nlp import Rouge
from ignite.metrics.nlp.rouge import compute_ngram_scores, RougeL, RougeN
from . import CorpusForTest
nltk.download("punkt... |
import argparse
import torch
import ignite.distributed as idist
def training(local_rank, config, **kwargs):
import time
time.sleep(idist.get_rank() * 0.1)
print(idist.get_rank(), ": run with config:", config, "- kwargs:", kwargs, f"- backend={idist.backend()}")
t = torch.tensor([idist.get_rank()]... |
import os
import pytest
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
from torch.utils.data.dataloader import _InfiniteConstantSampler
from torch.utils.data.dataset import Dataset, IterableDataset
from torch.utils.data.distributed import DistributedSampler
from ... |
import os
import subprocess
import sys
from pathlib import Path
import pytest
import torch
from packaging.version import Version
import ignite.distributed as idist
from ignite.distributed.utils import has_hvd_support, has_native_dist_support, has_xla_support
def test_parallel_wrong_inputs():
with pytest.raises(... |
import os
import pytest
import torch
import ignite.distributed as idist
from ignite.distributed.utils import has_hvd_support
from tests.ignite.distributed.utils import (
_test_distrib__get_max_length,
_test_distrib_all_gather,
_test_distrib_all_gather_group,
_test_distrib_all_reduce,
_test_distrib... |
import torch
import ignite.distributed as idist
from tests.ignite.distributed.utils import (
_sanity_check,
_test_distrib__get_max_length,
_test_distrib_all_gather,
_test_distrib_all_reduce,
_test_distrib_barrier,
_test_distrib_broadcast,
_test_distrib_new_group,
_test_sync,
)
def tes... |
import pytest
import torch
import torch.distributed as dist
import ignite.distributed as idist
from ignite.distributed.utils import sync
from ignite.engine import Engine, Events
def _sanity_check():
from ignite.distributed.utils import _model
assert _model.get_world_size() == _model.get_nnodes() * _model.ge... |
import os
import pytest
import ignite.distributed as idist
from ignite.distributed.utils import has_xla_support
from tests.ignite.distributed.utils import (
_test_distrib_all_gather,
_test_distrib_all_gather_group,
_test_distrib_all_reduce,
_test_distrib_all_reduce_group,
_test_distrib_barrier,
... |
import os
import pytest
import torch
import torch.distributed as dist
from packaging.version import Version
import ignite.distributed as idist
from ignite.distributed.utils import has_native_dist_support
from tests.ignite.distributed.utils import (
_test_distrib__get_max_length,
_test_distrib_all_gather,
... |
import pytest
import torch
from ignite.distributed.comp_models import has_hvd_support
if not has_hvd_support:
pytest.skip("Skip if no Horovod package", allow_module_level=True)
else:
import horovod.torch as hvd
from ignite.distributed.comp_models.horovod import _HorovodDistModel
@pytest.mark.distribute... |
import os
import pytest
import torch
from ignite.distributed.comp_models import has_xla_support
if not has_xla_support:
pytest.skip("Skip if no XLA support", allow_module_level=True)
else:
from ignite.distributed.comp_models.xla import _XlaDistModel
@pytest.mark.tpu
@pytest.mark.skipif(not has_xla_support,... |
import pytest
import torch
from ignite.distributed.comp_models.base import _SerialModel, ComputationModel
def test_serial_model():
_SerialModel.create_from_backend()
model = _SerialModel.create_from_context()
assert model.get_local_rank() == 0
assert model.get_rank() == 0
assert model.get_world_... |
import os
import pytest
import torch
import torch.distributed as dist
from ignite.distributed.comp_models import has_native_dist_support
if not has_native_dist_support:
pytest.skip("Skip if no native dist support", allow_module_level=True)
else:
from ignite.distributed.comp_models.native import _expand_hostl... |
import random
from pathlib import Path
import pytest
@pytest.fixture
def no_site_packages(request):
import sys
modules = {}
for k in sys.modules:
if request.param in k:
modules[k] = sys.modules[k]
for k in modules:
del sys.modules[k]
prev_path = list(sys.path)
sy... |
# coding: utf-8
|
from unittest.mock import Mock, patch
import pytest
import torch
from ignite.contrib.metrics import GpuInfo
from ignite.engine import Engine, State
def test_no_pynvml_package():
with patch.dict("sys.modules", {"pynvml.smi": None}):
with pytest.raises(ModuleNotFoundError, match="This contrib module requi... |
import os
from unittest.mock import patch
import pytest
import sklearn
import torch
from sklearn.metrics import average_precision_score
import ignite.distributed as idist
from ignite.contrib.metrics import AveragePrecision
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
torch.manual... |
from unittest.mock import patch
import numpy as np
import pytest
import sklearn
import torch
from sklearn.metrics import roc_curve
from ignite import distributed as idist
from ignite.contrib.metrics.roc_auc import RocCurve
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
from ignite.m... |
import os
from unittest.mock import patch
import pytest
import sklearn
import torch
from sklearn.metrics import roc_auc_score
import ignite.distributed as idist
from ignite.contrib.metrics import ROC_AUC
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
from ignite.metrics.epoch_metric... |
import os
from unittest.mock import patch
import pytest
import sklearn
import torch
from sklearn.metrics import cohen_kappa_score
import ignite.distributed as idist
from ignite.contrib.metrics import CohenKappa
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
torch.manual_seed(12)
... |
import os
from typing import Tuple
from unittest.mock import patch
import numpy as np
import pytest
import sklearn
import torch
from sklearn.metrics import precision_recall_curve
import ignite.distributed as idist
from ignite.contrib.metrics.precision_recall_curve import PrecisionRecallCurve
from ignite.engine import... |
import os
import numpy as np
import pytest
import torch
from sklearn.metrics import DistanceMetric
import ignite.distributed as idist
from ignite.contrib.metrics.regression import ManhattanDistance
from ignite.engine import Engine
def test_wrong_input_shapes():
m = ManhattanDistance()
with pytest.raises(Va... |
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.contrib.metrics.regression import MedianAbsolutePercentageError
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
def test_zero_sample():
m = MedianAbsolutePercentageError()
... |
import os
import numpy as np
import pytest
import torch
from sklearn.metrics import DistanceMetric
import ignite.distributed as idist
from ignite.contrib.metrics.regression import CanberraMetric
from ignite.engine import Engine
def test_wrong_input_shapes():
m = CanberraMetric()
with pytest.raises(ValueErr... |
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.contrib.metrics.regression import WaveHedgesDistance
from ignite.engine import Engine
def test_wrong_input_shapes():
m = WaveHedgesDistance()
with pytest.raises(ValueError, match=r"Input data shapes shoul... |
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.contrib.metrics.regression import GeometricMeanAbsoluteError
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
def test_zero_sample():
m = GeometricMeanAbsoluteError()
with ... |
import os
import numpy as np
import pytest
import torch
import ignite.distributed as idist
from ignite.contrib.metrics.regression import MeanNormalizedBias
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
def test_zero_sample():
m = MeanNormalizedBias()
with pytest.raises(
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.