python_code stringlengths 0 229k |
|---|
#!/usr/bin/env python3
from captum.robust._core.fgsm import FGSM # noqa
from captum.robust._core.metrics.attack_comparator import AttackComparator # noqa
from captum.robust._core.metrics.min_param_perturbation import ( # noqa
MinParamPerturbation,
)
from captum.robust._core.perturbation import Perturbation # n... |
#!/usr/bin/env python3
from typing import Any, Callable, Optional, Tuple, Union
import torch
from captum._utils.common import (
_format_additional_forward_args,
_format_output,
_format_tensor_into_tuples,
_is_tuple,
_select_targets,
)
from captum._utils.gradient import (
apply_gradient_requirem... |
#!/usr/bin/env python3
from typing import Any, Callable, Optional, Tuple, Union
import torch
import torch.nn.functional as F
from captum._utils.common import _format_output, _format_tensor_into_tuples, _is_tuple
from captum._utils.typing import TensorOrTupleOfTensorsGeneric
from captum.log import log_usage
from captum... |
#!/usr/bin/env python3
from typing import Callable
class Perturbation:
r"""
All perturbation and attack algorithms extend this class. It enforces
its child classes to extend and override core `perturb` method.
"""
perturb: Callable
r"""
This method computes and returns the perturbed input... |
#!/usr/bin/env python3
import warnings
from collections import namedtuple
from typing import (
Any,
Callable,
cast,
Dict,
Generic,
List,
NamedTuple,
Optional,
Tuple,
TypeVar,
Union,
)
from captum._utils.common import (
_expand_additional_forward_args,
_format_additio... |
#!/usr/bin/env python3
import math
from enum import Enum
from typing import Any, Callable, cast, Dict, Generator, List, Optional, Tuple, Union
import torch
from captum._utils.common import (
_expand_additional_forward_args,
_format_additional_forward_args,
_reduce_list,
)
from captum._utils.typing import T... |
#!/usr/bin/env python3
from captum.influence._core.influence import DataInfluence # noqa
from captum.influence._core.similarity_influence import SimilarityInfluence # noqa
from captum.influence._core.tracincp import TracInCP, TracInCPBase # noqa
from captum.influence._core.tracincp_fast_rand_proj import (
TracI... |
from abc import ABC, abstractmethod
from typing import Tuple
import torch
from torch import Tensor
class NearestNeighbors(ABC):
r"""
An abstract class to define a nearest neighbors data structure. Classes
implementing this interface are intended for computing proponents / opponents in
certain impleme... |
#!/usr/bin/env python3
import warnings
from typing import Any, Callable, List, Optional, Tuple, TYPE_CHECKING, Union
import torch
import torch.nn as nn
from captum._utils.common import _parse_version
from captum._utils.progress import progress
if TYPE_CHECKING:
from captum.influence._core.tracincp import TracInCP... |
#!/usr/bin/env python3
import glob
import warnings
from abc import abstractmethod
from os.path import join
from typing import (
Any,
Callable,
Iterator,
List,
NamedTuple,
Optional,
Tuple,
Type,
Union,
)
import torch
from captum._utils.av import AV
from captum._utils.common import _... |
#!/usr/bin/env python3
import warnings
from functools import partial
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
import captum._utils.common as common
import torch
from captum._utils.av import AV
from captum.attr import LayerActivation
from captum.influence._core.influence import DataInfluenc... |
#!/usr/bin/env python3
from abc import ABC, abstractmethod
from typing import Any
from torch.nn import Module
from torch.utils.data import Dataset
class DataInfluence(ABC):
r"""
An abstract class to define model data influence skeleton.
"""
def __init_(self, model: Module, train_dataset: Dataset, *... |
#!/usr/bin/env python3
import threading
import warnings
from collections import defaultdict
from typing import Any, Callable, cast, Dict, Iterator, List, Optional, Tuple, Union
import torch
from captum._utils.common import _get_module_from_name, _sort_key_list
from captum._utils.gradient import _gather_distributed_te... |
#!/usr/bin/env python3
from abc import ABC, abstractmethod
from typing import Optional, Tuple
import torch
from torch import Tensor
from torch.nn import Module
class StochasticGatesBase(Module, ABC):
"""
Abstract module for Stochastic Gates.
Stochastic Gates is a practical solution to add L0 norm regula... |
#!/usr/bin/env python3
import math
from typing import Optional
import torch
from captum.module.stochastic_gates_base import StochasticGatesBase
from torch import nn, Tensor
class GaussianStochasticGates(StochasticGatesBase):
"""
Stochastic Gates with Gaussian distribution.
Stochastic Gates is a practica... |
from captum.module.binary_concrete_stochastic_gates import ( # noqa
BinaryConcreteStochasticGates,
)
from captum.module.gaussian_stochastic_gates import GaussianStochasticGates # noqa
from captum.module.stochastic_gates_base import StochasticGatesBase # noqa
|
#!/usr/bin/env python3
import math
from typing import Optional
import torch
from captum.module.stochastic_gates_base import StochasticGatesBase
from torch import nn, Tensor
def _torch_empty(batch_size: int, n_gates: int, device: torch.device) -> Tensor:
return torch.empty(batch_size, n_gates, device=device)
# ... |
#!/usr/bin/env python3
from captum.attr._core.dataloader_attr import DataLoaderAttribution # noqa
from captum.attr._core.deep_lift import DeepLift, DeepLiftShap # noqa
from captum.attr._core.feature_ablation import FeatureAblation # noqa
from captum.attr._core.feature_permutation import FeaturePermutation # noqa
fr... |
#!/usr/bin/env python3
import typing
import warnings
from typing import Any, Callable, Iterator, Tuple, Union
import torch
from captum._utils.common import (
_format_additional_forward_args,
_format_output,
_format_tensor_into_tuples,
_reduce_list,
)
from captum._utils.typing import (
TargetType,
... |
#!/usr/bin/env python3
import torch.nn as nn
class Addition_Module(nn.Module):
"""Custom addition module that uses multiple inputs to assure correct relevance
propagation. Any addition in a forward function needs to be replaced with the
module before using LRP."""
def __init__(self) -> None:
... |
#!/usr/bin/env python3
from enum import Enum
from typing import Callable, List, Tuple
import torch
class Riemann(Enum):
left = 1
right = 2
middle = 3
trapezoid = 4
SUPPORTED_RIEMANN_METHODS = [
"riemann_left",
"riemann_right",
"riemann_middle",
"riemann_trapezoid",
]
SUPPORTED_METH... |
#!/usr/bin/env python3
import inspect
from typing import Any
import torch.nn as nn
class InputIdentity(nn.Module):
def __init__(self, input_name: str) -> None:
r"""
The identity operation
Args:
input_name (str)
The name of the input this layer is associated t... |
#!/usr/bin/env python3
import warnings
from enum import Enum
from typing import Any, Iterable, List, Optional, Tuple, Union
import numpy as np
from matplotlib import cm, colors, pyplot as plt
from matplotlib.collections import LineCollection
from matplotlib.colors import LinearSegmentedColormap
from matplotlib.figure ... |
#!/usr/bin/env python3
from typing import Any, Callable, cast, Generic, List, Tuple, Type, Union
import torch
import torch.nn.functional as F
from captum._utils.common import (
_format_additional_forward_args,
_format_tensor_into_tuples,
_run_forward,
_validate_target,
)
from captum._utils.gradient imp... |
#!/usr/bin/env python3
import typing
from inspect import signature
from typing import Any, Callable, List, Tuple, TYPE_CHECKING, Union
import torch
from captum._utils.common import (
_format_baseline,
_format_output,
_format_tensor_into_tuples,
_validate_input as _validate_input_basic,
)
from captum._u... |
#!/usr/bin/env python3
from collections import defaultdict
from typing import Any, Dict, List, Optional, Union
from captum._utils.common import _format_tensor_into_tuples
from captum._utils.typing import TargetType, TensorOrTupleOfTensorsGeneric
from captum.attr._utils.stat import Stat
from captum.attr._utils.summariz... |
#!/usr/bin/env python3
from typing import Any, Callable, List, Optional, TYPE_CHECKING
import torch
from torch import Tensor
if TYPE_CHECKING:
from captum.attr._utils.summarizer import SummarizerSingleTensor
class Stat:
"""
The Stat class represents a statistic that can be updated and retrieved
at a... |
#!/usr/bin/env python3
from typing import Dict, List, Optional, Tuple, Type, Union
import torch
from captum.attr._utils.stat import Count, Max, Mean, Min, MSE, Stat, StdDev, Sum, Var
from captum.log import log_usage
from torch import Tensor
class Summarizer:
r"""
This class simply wraps over a given a set o... |
#!/usr/bin/env python3
from abc import ABC, abstractmethod
import torch
from ..._utils.common import _format_tensor_into_tuples
class PropagationRule(ABC):
"""
Base class for all propagation rule classes, also called Z-Rule.
STABILITY_FACTOR is used to assure that no zero divison occurs.
"""
S... |
#!/usr/bin/env python3
import math
from typing import Any, Callable, cast, Tuple, Union
import torch
from captum._utils.common import (
_expand_additional_forward_args,
_expand_target,
_format_additional_forward_args,
_format_feature_mask,
_format_output,
_is_tuple,
_run_forward,
)
from ca... |
#!/usr/bin/env python3
from typing import Any, Callable, Tuple, Union
import torch
from captum._utils.typing import TargetType, TensorOrTupleOfTensorsGeneric
from captum.attr._core.feature_ablation import FeatureAblation
from captum.log import log_usage
from torch import Tensor
def _permute_feature(x: Tensor, featur... |
#!/usr/bin/env python3
from typing import Any, Callable, Tuple, Union
import numpy as np
import torch
from captum._utils.common import _format_tensor_into_tuples
from captum._utils.typing import BaselineType, TargetType, TensorOrTupleOfTensorsGeneric
from captum.attr._core.feature_ablation import FeatureAblation
from ... |
#!/usr/bin/env python3
import typing
from typing import Any, Callable, Tuple, Union
import numpy as np
import torch
from captum._utils.common import _is_tuple
from captum._utils.typing import (
BaselineType,
Literal,
TargetType,
Tensor,
TensorOrTupleOfTensorsGeneric,
)
from captum.attr._core.noise_... |
#!/usr/bin/env python3
import inspect
import math
import typing
import warnings
from typing import Any, Callable, cast, List, Optional, Tuple, Union
import torch
from captum._utils.common import (
_expand_additional_forward_args,
_expand_target,
_flatten_tensor_or_tuple,
_format_output,
_format_ten... |
#!/usr/bin/env python3
from enum import Enum
from typing import Any, cast, List, Tuple, Union
import torch
from captum._utils.common import (
_expand_and_update_additional_forward_args,
_expand_and_update_baselines,
_expand_and_update_feature_mask,
_expand_and_update_target,
_format_output,
_fo... |
#!/usr/bin/env python3
from collections import defaultdict
from copy import copy
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
import torch
from captum._utils.common import (
_format_baseline,
_format_feature_mask,
_format_output,
_format_tensor_into_tuples,
_get_ma... |
#!/usr/bin/env python3
import warnings
from typing import Any, List, Tuple, Union
import torch
import torch.nn.functional as F
from captum._utils.common import (
_format_output,
_format_tensor_into_tuples,
_is_tuple,
_register_backward_hook,
)
from captum._utils.gradient import (
apply_gradient_req... |
#!/usr/bin/env python3
import typing
from collections import defaultdict
from typing import Any, cast, List, Tuple, Union
import torch.nn as nn
from captum._utils.common import (
_format_output,
_format_tensor_into_tuples,
_is_tuple,
_register_backward_hook,
_run_forward,
)
from captum._utils.grad... |
#!/usr/bin/env python3
import warnings
from typing import Any, List, Union
import torch
from captum._utils.common import _format_output, _format_tensor_into_tuples, _is_tuple
from captum._utils.typing import TargetType, TensorOrTupleOfTensorsGeneric
from captum.attr._core.guided_backprop_deconvnet import GuidedBackpro... |
#!/usr/bin/env python3
import itertools
import math
import warnings
from typing import Any, Callable, Iterable, Sequence, Tuple, Union
import torch
from captum._utils.common import (
_expand_additional_forward_args,
_expand_target,
_format_additional_forward_args,
_format_feature_mask,
_format_out... |
#!/usr/bin/env python3
import typing
import warnings
from typing import Any, Callable, cast, List, Tuple, Union
import torch
import torch.nn as nn
import torch.nn.functional as F
from captum._utils.common import (
_expand_additional_forward_args,
_expand_target,
_format_additional_forward_args,
_format... |
#!/usr/bin/env python3
from typing import Any, Callable
import torch
from captum._utils.common import _format_output, _format_tensor_into_tuples, _is_tuple
from captum._utils.gradient import (
apply_gradient_requirements,
undo_gradient_requirements,
)
from captum._utils.typing import TargetType, TensorOrTuple... |
#!/usr/bin/env python3
from typing import Any, Callable, Generator, Tuple, Union
import torch
from captum._utils.models.linear_model import SkLearnLinearRegression
from captum._utils.typing import BaselineType, TargetType, TensorOrTupleOfTensorsGeneric
from captum.attr._core.lime import construct_feature_mask, Lime
f... |
#!/usr/bin/env python3
from typing import Any, Callable
from captum._utils.common import _format_output, _format_tensor_into_tuples, _is_tuple
from captum._utils.gradient import (
apply_gradient_requirements,
undo_gradient_requirements,
)
from captum._utils.typing import TargetType, TensorOrTupleOfTensorsGener... |
#!/usr/bin/env python3
import typing
from typing import Any, Callable, List, Tuple, Union
import torch
from captum._utils.common import (
_expand_additional_forward_args,
_expand_target,
_format_additional_forward_args,
_format_output,
_is_tuple,
)
from captum._utils.typing import (
BaselineTyp... |
#!/usr/bin/env python3
from typing import Any, Callable, List, Tuple, Union
from captum._utils.common import (
_format_additional_forward_args,
_format_output,
_format_tensor_into_tuples,
_is_tuple,
)
from captum._utils.gradient import (
_forward_layer_eval_with_neuron_grads,
apply_gradient_req... |
#!/usr/bin/env python3
from typing import Any, Callable, List, Tuple, Union
from captum._utils.gradient import construct_neuron_grad_fn
from captum._utils.typing import TensorOrTupleOfTensorsGeneric
from captum.attr._core.guided_backprop_deconvnet import Deconvolution, GuidedBackprop
from captum.attr._utils.attributio... |
#!/usr/bin/env python3
from typing import Any, Callable, List, Tuple, Union
import torch
from captum._utils.common import _verify_select_neuron
from captum._utils.gradient import _forward_layer_eval
from captum._utils.typing import BaselineType, TensorOrTupleOfTensorsGeneric
from captum.attr._core.feature_ablation imp... |
#!/usr/bin/env python3
from typing import Any, Callable, List, Tuple, Union
from captum._utils.gradient import construct_neuron_grad_fn
from captum._utils.typing import TensorOrTupleOfTensorsGeneric
from captum.attr._core.gradient_shap import GradientShap
from captum.attr._utils.attribution import GradientAttribution,... |
#!/usr/bin/env python3
from typing import Any, Callable, cast, Tuple, Union
from captum._utils.gradient import construct_neuron_grad_fn
from captum._utils.typing import BaselineType, TensorOrTupleOfTensorsGeneric
from captum.attr._core.deep_lift import DeepLift, DeepLiftShap
from captum.attr._utils.attribution import ... |
#!/usr/bin/env python3
from typing import Any, Callable, List, Tuple, Union
from captum._utils.gradient import construct_neuron_grad_fn
from captum._utils.typing import TensorOrTupleOfTensorsGeneric
from captum.attr._core.integrated_gradients import IntegratedGradients
from captum.attr._utils.attribution import Gradie... |
#!/usr/bin/env python3
import warnings
from typing import Any, Callable, List, Tuple, Union
import torch
from captum._utils.common import (
_expand_additional_forward_args,
_expand_target,
_format_additional_forward_args,
_format_output,
_is_tuple,
_verify_select_neuron,
)
from captum._utils.gr... |
#!/usr/bin/env python3
from typing import Any, Callable, List, Tuple, Union
import torch
from captum._utils.common import (
_expand_additional_forward_args,
_expand_target,
_format_additional_forward_args,
_format_output,
)
from captum._utils.gradient import compute_layer_gradients_and_eval
from captum... |
#!/usr/bin/env python3
import typing
from typing import Any, cast, List, Tuple, Union
from captum._utils.common import (
_format_tensor_into_tuples,
_reduce_list,
_sort_key_list,
)
from captum._utils.gradient import (
apply_gradient_requirements,
compute_gradients,
undo_gradient_requirements,
)... |
#!/usr/bin/env python3
from typing import Any, Callable, List, Tuple, Union
import torch
from captum._utils.common import _format_output
from captum._utils.gradient import _forward_layer_eval
from captum._utils.typing import ModuleOrModuleList
from captum.attr._utils.attribution import LayerAttribution
from captum.log... |
#!/usr/bin/env python3
import typing
from typing import Any, Callable, List, Tuple, Union
import torch
from captum._utils.common import (
_expand_additional_forward_args,
_expand_target,
_format_additional_forward_args,
_format_output,
)
from captum._utils.gradient import compute_layer_gradients_and_ev... |
#!/usr/bin/env python3
from typing import Any, Callable, List, Tuple, Union
import torch
import torch.nn.functional as F
from captum._utils.common import (
_format_additional_forward_args,
_format_output,
_format_tensor_into_tuples,
)
from captum._utils.gradient import compute_layer_gradients_and_eval
from... |
#!/usr/bin/env python3
from typing import Any, Callable, List, Tuple, Union
from captum._utils.common import (
_format_additional_forward_args,
_format_output,
_format_tensor_into_tuples,
)
from captum._utils.gradient import compute_layer_gradients_and_eval
from captum._utils.typing import ModuleOrModuleLi... |
#!/usr/bin/env python3
import typing
from typing import Any, Callable, cast, List, Tuple, Union
import numpy as np
import torch
from captum._utils.gradient import _forward_layer_eval, compute_layer_gradients_and_eval
from captum._utils.typing import Literal, TargetType, TensorOrTupleOfTensorsGeneric
from captum.attr.... |
#!/usr/bin/env python3
from typing import Any, Callable, List, Tuple, Union
import torch
from captum._utils.common import (
_extract_device,
_format_additional_forward_args,
_format_output,
_format_tensor_into_tuples,
_run_forward,
)
from captum._utils.gradient import _forward_layer_eval
from captu... |
#!/usr/bin/env python3
import functools
import warnings
from typing import Any, Callable, List, overload, Tuple, Union
import torch
from captum._utils.common import (
_extract_device,
_format_additional_forward_args,
_format_outputs,
)
from captum._utils.gradient import _forward_layer_eval, _run_forward
fr... |
#!/usr/bin/env python3
import typing
from typing import Any, Callable, cast, Sequence, Tuple, Union
import torch
from captum._utils.common import (
_expand_target,
_format_additional_forward_args,
_format_baseline,
_format_tensor_into_tuples,
ExpansionTypes,
)
from captum._utils.gradient import com... |
#!/usr/bin/env python3
from collections import defaultdict
import torch
from pytext.models.embeddings.dict_embedding import DictEmbedding
from pytext.models.embeddings.word_embedding import WordEmbedding
from pytext.models.model import EmbeddingBase, EmbeddingList
class PyTextInterpretableEmbedding(EmbeddingBase):
... |
#!/usr/bin/env python3
import warnings
from functools import reduce
import torch
from torch.nn import Module
class InterpretableEmbeddingBase(Module):
r"""
Since some embedding vectors, e.g. word are created and assigned in
the embedding layers of Pytorch models we need a way to access
those layers,... |
#!/usr/bin/env python3
from captum.concept._core.cav import CAV # noqa
from captum.concept._core.concept import Concept, ConceptInterpreter # noqa
from captum.concept._core.tcav import TCAV # noqa
from captum.concept._utils.classifier import Classifier, DefaultClassifier # noqa
|
#!/usr/bin/env python3
import glob
import os
from typing import Callable, Iterator
from torch import Tensor
from torch.utils.data import DataLoader, Dataset, IterableDataset
class CustomIterableDataset(IterableDataset):
r"""
An auxiliary class for iterating through a dataset.
"""
def __init__(self,... |
#!/usr/bin/env python3
import random
import warnings
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Tuple, Union
import torch
from captum._utils.models.linear_model import model
from torch import Tensor
from torch.utils.data import DataLoader, TensorDataset
class Classifier(ABC):
r"""
... |
#!/usr/bin/env python3
from typing import List
from captum.concept._core.concept import Concept
def concepts_to_str(concepts: List[Concept]) -> str:
r"""
Returns a string of hyphen("-") concatenated concept names.
Example output: "striped-random_0-random_1"
Args:
concepts (list[Concept]): a... |
#!/usr/bin/env python3
import os
from typing import Any, Dict, List
import torch
from captum.concept._core.concept import Concept
from captum.concept._utils.common import concepts_to_str
class CAV:
r"""
Concept Activation Vector (CAV) is a vector orthogonal to the decision
boundary of a classifier which... |
#!/usr/bin/env python3
from typing import Callable, Union
import torch
from torch.nn import Module
class Concept:
r"""
Concepts are human-friendly abstract representations that can be
numerically encoded into torch tensors. They can be illustrated as
images, text or any other form of representation... |
#!/usr/bin/env python3
from collections import defaultdict
from typing import Any, cast, Dict, List, Set, Tuple, Union
import numpy as np
import torch
import torch.multiprocessing as multiprocessing
from captum._utils.av import AV
from captum._utils.common import _format_tensor_into_tuples, _get_module_from_name
from... |
#!/usr/bin/env python3
try:
from captum.log.fb.internal_log import (
disable_detailed_logging,
log,
log_usage,
patch_methods,
set_environment,
TimedLog,
)
__all__ = [
"log",
"log_usage",
"TimedLog",
"set_environment",
... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import nox
import os
DEFAULT_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
PYTHON_VERSIONS = os.environ.get(
"NOX_PYTHON_VERSIONS", ",".join(DEFAULT_PYTHON_VERSIONS)
).split(",")
VERBOSE = os.environ.get("VERBOSE", "0")
SILENT = VERBOSE == "0"
# Li... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import os
import pytest
from pathlib import Path
from hydra.utils import get_class, instantiate
from omegaconf import OmegaConf
from typing import Any
import torch
import torchvision.datasets as datasets
@pytest.mark.parametrize(
"modulepath,... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import pytest
from hydra.utils import get_class, instantiate
from omegaconf import OmegaConf
import torch
# import torchvision.datasets as datasets
import torchvision.transforms as transforms
from torchvision.transforms.transforms import ToTensor
... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# Generated by configen, do not edit.
# See https://github.com/facebookresearch/hydra/tree/main/tools/configen
# fmt: off
# isort:skip_file
# flake8: noqa
from packaging import version
from pkg_resources import get_distribution
import warnings
im... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# Generated by configen, do not edit.
# See https://github.com/facebookresearch/hydra/tree/main/tools/configen
# fmt: off
# isort:skip_file
# flake8: noqa
from dataclasses import dataclass, field
from omegaconf import MISSING
from typing import A... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# Generated by configen, do not edit.
# See https://github.com/facebookresearch/hydra/tree/main/tools/configen
# fmt: off
# isort:skip_file
# flake8: noqa
from dataclasses import dataclass, field
from omegaconf import MISSING
from typing import A... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# Generated by configen, do not edit.
# See https://github.com/facebookresearch/hydra/tree/main/tools/configen
# fmt: off
# isort:skip_file
# flake8: noqa
from dataclasses import dataclass, field
from omegaconf import MISSING
from typing import A... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# flake8: noqa
from __future__ import print_function
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision import datasets, transforms
from torch.optim import Adadelta
from torch.optim.lr_scheduler import StepLR
######... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import pytest
from hydra.utils import get_class, instantiate
from omegaconf import OmegaConf
import torch.optim as optim
import torch
from torch import Tensor
from torch import nn
from typing import Any
model = nn.Linear(1, 1)
@pytest.mark.para... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import pytest
from hydra.utils import get_class, instantiate
from omegaconf import OmegaConf
import torch.nn.modules.loss as loss
from torch.tensor import Tensor
from typing import Any
@pytest.mark.parametrize(
"modulepath, classname, cfg, p... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import pytest
from hydra.utils import get_class, instantiate
from omegaconf import OmegaConf
import torch.utils.data as data
import torch
from typing import Any
dummy_tensor = torch.tensor((1, 1))
dummy_dataset = data.dataset.TensorDataset(dummy_... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# Generated by configen, do not edit.
# See https://github.com/facebookresearch/hydra/tree/main/tools/configen
# fmt: off
# isort:skip_file
# flake8: noqa
from dataclasses import dataclass, field
from omegaconf import MISSING
from typing import A... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# Generated by configen, do not edit.
# See https://github.com/facebookresearch/hydra/tree/main/tools/configen
# fmt: off
# isort:skip_file
# flake8: noqa
from dataclasses import dataclass, field
from omegaconf import MISSING
from typing import A... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# Generated by configen, do not edit.
# See https://github.com/facebookresearch/hydra/tree/main/tools/configen
# fmt: off
# isort:skip_file
# flake8: noqa
from dataclasses import dataclass, field
from omegaconf import MISSING
from typing import A... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# Generated by configen, do not edit.
# See https://github.com/facebookresearch/hydra/tree/main/tools/configen
# fmt: off
# isort:skip_file
# flake8: noqa
from dataclasses import dataclass, field
from omegaconf import MISSING
from typing import A... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# Generated by configen, do not edit.
# See https://github.com/facebookresearch/hydra/tree/main/tools/configen
# fmt: off
# isort:skip_file
# flake8: noqa
from dataclasses import dataclass, field
from omegaconf import MISSING
from typing import A... |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# Generated by configen, do not edit.
# See https://github.com/facebookresearch/hydra/tree/main/tools/configen
# fmt: off
# isort:skip_file
# flake8: noqa
from dataclasses import dataclass, field
from omegaconf import MISSING
from typing import A... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.