diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..de405dc40edb5d892bcb4c1a804d38d386c66b0b --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/__init__.py @@ -0,0 +1,366 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +import sys +import types +import json +from typing import Optional, Union +import torch +from torch.optim import Optimizer +from torch.optim.lr_scheduler import _LRScheduler +from packaging import version as pkg_version + +# Skip Triton import for AMD due to pytorch-triton-rocm module breaking device API in DeepSpeed +if not (hasattr(torch.version, 'hip') and torch.version.hip is not None): + try: + import triton # noqa: F401 # type: ignore + HAS_TRITON = True + except ImportError: + HAS_TRITON = False +else: + HAS_TRITON = False + +from . import ops +from . import module_inject + +from .accelerator import get_accelerator +from .constants import TORCH_DISTRIBUTED_DEFAULT_PORT +from .runtime.engine import DeepSpeedEngine, DeepSpeedOptimizerCallable, DeepSpeedSchedulerCallable +from .runtime.engine import ADAM_OPTIMIZER, LAMB_OPTIMIZER +from .runtime.hybrid_engine import DeepSpeedHybridEngine +from .runtime.pipe.engine import PipelineEngine +from .inference.engine import InferenceEngine +from .inference.config import DeepSpeedInferenceConfig +from .runtime.lr_schedules import add_tuning_arguments +from .runtime.config import DeepSpeedConfig, DeepSpeedConfigError +from .runtime.activation_checkpointing import checkpointing +from .ops.transformer import DeepSpeedTransformerLayer, DeepSpeedTransformerConfig +from .module_inject import replace_transformer_layer, revert_transformer_layer + +from .utils import log_dist, OnDevice, logger +from .comm.comm import init_distributed + +from .runtime import zero +from .runtime.compiler import is_compile_supported + +from .pipe import PipelineModule + +from .git_version_info import version, git_hash, git_branch + + +def _parse_version(version_str): + '''Parse a version string and extract the major, minor, and patch versions.''' + ver = pkg_version.parse(version_str) + return ver.major, ver.minor, ver.micro + + +# Export version information +__version__ = version +__version_major__, __version_minor__, __version_patch__ = _parse_version(__version__) +__git_hash__ = git_hash +__git_branch__ = git_branch + +# Set to torch's distributed package or deepspeed.comm based inside DeepSpeedEngine init +dist = None + + +def initialize(args=None, + model: torch.nn.Module = None, + optimizer: Optional[Union[Optimizer, DeepSpeedOptimizerCallable]] = None, + model_parameters: Optional[torch.nn.Module] = None, + training_data: Optional[torch.utils.data.Dataset] = None, + lr_scheduler: Optional[Union[_LRScheduler, DeepSpeedSchedulerCallable]] = None, + distributed_port: int = TORCH_DISTRIBUTED_DEFAULT_PORT, + mpu=None, + dist_init_required: Optional[bool] = None, + collate_fn=None, + config=None, + mesh_param=None, + config_params=None): + """Initialize the DeepSpeed Engine. + + Arguments: + args: an object containing local_rank and deepspeed_config fields. + This is optional if `config` is passed. + + model: Required: nn.module class before apply any wrappers + + optimizer: Optional: a user defined Optimizer or Callable that returns an Optimizer object. + This overrides any optimizer definition in the DeepSpeed json config. + + model_parameters: Optional: An iterable of torch.Tensors or dicts. + Specifies what Tensors should be optimized. + + training_data: Optional: Dataset of type torch.utils.data.Dataset + + lr_scheduler: Optional: Learning Rate Scheduler Object or a Callable that takes an Optimizer and returns a Scheduler object. + The scheduler object should define a get_lr(), step(), state_dict(), and load_state_dict() methods + + distributed_port: Optional: Master node (rank 0)'s free port that needs to be used for communication during distributed training + + mpu: Optional: A model parallelism unit object that implements + get_{model,data}_parallel_{rank,group,world_size}() + + dist_init_required: Optional: None will auto-initialize torch distributed if needed, + otherwise the user can force it to be initialized or not via boolean. + + collate_fn: Optional: Merges a list of samples to form a + mini-batch of Tensor(s). Used when using batched loading from a + map-style dataset. + + config: Optional: Instead of requiring args.deepspeed_config you can pass your deepspeed config + as an argument instead, as a path or a dictionary. + + config_params: Optional: Same as `config`, kept for backwards compatibility. + + Returns: + A tuple of ``engine``, ``optimizer``, ``training_dataloader``, ``lr_scheduler`` + + * ``engine``: DeepSpeed runtime engine which wraps the client model for distributed training. + + * ``optimizer``: Wrapped optimizer if a user defined ``optimizer`` is supplied, or if + optimizer is specified in json config else ``None``. + + * ``training_dataloader``: DeepSpeed dataloader if ``training_data`` was supplied, + otherwise ``None``. + + * ``lr_scheduler``: Wrapped lr scheduler if user ``lr_scheduler`` is passed, or + if ``lr_scheduler`` specified in JSON configuration. Otherwise ``None``. + """ + log_dist("DeepSpeed info: version={}, git-hash={}, git-branch={}".format(__version__, __git_hash__, + __git_branch__), + ranks=[0]) + + # Disable zero.Init context if it's currently enabled + zero.partition_parameters.shutdown_init_context() + + assert model is not None, "deepspeed.initialize requires a model" + + global dist + from deepspeed import comm as dist + dist_backend = get_accelerator().communication_backend_name() + dist.init_distributed(dist_backend=dist_backend, + distributed_port=distributed_port, + dist_init_required=dist_init_required) + + ##TODO: combine reuse mpu as mesh device and vice versa + # Set config using config_params for backwards compat + if config is None and config_params is not None: + config = config_params + + mesh_device = None + if mesh_param: + logger.info(f"mesh_param to Initialize mesh device: {mesh_param}") + mesh_device = dist.initialize_mesh_device(mesh_param, ("data_parallel", "sequence_parallel")) + #if config file has sequence parallelize and data parallelize, then use them to initialize mesh device + elif config is not None: + if "sequence_parallel_size" in config and "data_parallel_size" in config: + logger.info(f"config to Initialize mesh device: {config}") + mesh_device = dist.initialize_mesh_device((config["data_parallel_size"], config["sequence_parallel_size"]), \ + ("data_parallel", "sequence_parallel")) + + # Check for deepscale_config for backwards compat + if hasattr(args, "deepscale_config") and args.deepscale_config is not None: + logger.warning("************ --deepscale_config is deprecated, please use --deepspeed_config ************") + if hasattr(args, "deepspeed_config"): + assert (args.deepspeed_config + is None), "Not sure how to proceed, we were given both a deepscale_config and deepspeed_config" + args.deepspeed_config = args.deepscale_config + args.deepscale_config = None + + # Check that we have only one config passed + if hasattr(args, "deepspeed_config") and args.deepspeed_config is not None: + assert config is None, "Not sure how to proceed, we were given deepspeed configs in the deepspeed arguments and deepspeed.initialize() function call" + config = args.deepspeed_config + assert config is not None, "DeepSpeed requires --deepspeed_config to specify configuration file" + if not isinstance(model, PipelineModule): + config_class = DeepSpeedConfig(config, mpu, mesh_device=mesh_device) + if config_class.hybrid_engine.enabled: + engine = DeepSpeedHybridEngine(args=args, + model=model, + optimizer=optimizer, + model_parameters=model_parameters, + training_data=training_data, + lr_scheduler=lr_scheduler, + mpu=mpu, + dist_init_required=dist_init_required, + collate_fn=collate_fn, + config=config, + config_class=config_class) + else: + engine = DeepSpeedEngine(args=args, + model=model, + optimizer=optimizer, + model_parameters=model_parameters, + training_data=training_data, + lr_scheduler=lr_scheduler, + mpu=mpu, + dist_init_required=dist_init_required, + collate_fn=collate_fn, + config=config, + mesh_device=mesh_device, + config_class=config_class) + else: + assert mpu is None, "mpu must be None with pipeline parallelism" + mpu = model.mpu() + config_class = DeepSpeedConfig(config, mpu) + engine = PipelineEngine(args=args, + model=model, + optimizer=optimizer, + model_parameters=model_parameters, + training_data=training_data, + lr_scheduler=lr_scheduler, + mpu=mpu, + dist_init_required=dist_init_required, + collate_fn=collate_fn, + config=config, + config_class=config_class) + + # Restore zero.Init context if necessary + zero.partition_parameters.restore_init_context() + + return_items = [ + engine, + engine.optimizer, + engine.training_dataloader, + engine.lr_scheduler, + ] + return tuple(return_items) + + +def _add_core_arguments(parser): + r"""Helper (internal) function to update an argument parser with an argument group of the core DeepSpeed arguments. + The core set of DeepSpeed arguments include the following: + 1) --deepspeed: boolean flag to enable DeepSpeed + 2) --deepspeed_config : path of a json configuration file to configure DeepSpeed runtime. + + This is a helper function to the public add_config_arguments() + + Arguments: + parser: argument parser + Return: + parser: Updated Parser + """ + group = parser.add_argument_group('DeepSpeed', 'DeepSpeed configurations') + + group.add_argument('--deepspeed', + default=False, + action='store_true', + help='Enable DeepSpeed (helper flag for user code, no impact on DeepSpeed backend)') + + group.add_argument('--deepspeed_config', default=None, type=str, help='DeepSpeed json configuration file.') + + group.add_argument('--deepscale', + default=False, + action='store_true', + help='Deprecated enable DeepSpeed (helper flag for user code, no impact on DeepSpeed backend)') + + group.add_argument('--deepscale_config', + default=None, + type=str, + help='Deprecated DeepSpeed json configuration file.') + + return parser + + +def add_config_arguments(parser): + r"""Update the argument parser to enabling parsing of DeepSpeed command line arguments. + The set of DeepSpeed arguments include the following: + 1) --deepspeed: boolean flag to enable DeepSpeed + 2) --deepspeed_config : path of a json configuration file to configure DeepSpeed runtime. + + Arguments: + parser: argument parser + Return: + parser: Updated Parser + """ + parser = _add_core_arguments(parser) + + return parser + + +def default_inference_config(): + """ + Return a default DeepSpeed inference configuration dictionary. + """ + return DeepSpeedInferenceConfig().dict() + + +def init_inference(model, config=None, **kwargs): + """Initialize the DeepSpeed InferenceEngine. + + Description: all four cases are valid and supported in DS init_inference() API. + + # Case 1: user provides no config and no kwargs. Default config will be used. + + .. code-block:: python + + generator.model = deepspeed.init_inference(generator.model) + string = generator("DeepSpeed is") + print(string) + + # Case 2: user provides a config and no kwargs. User supplied config will be used. + + .. code-block:: python + + generator.model = deepspeed.init_inference(generator.model, config=config) + string = generator("DeepSpeed is") + print(string) + + # Case 3: user provides no config and uses keyword arguments (kwargs) only. + + .. code-block:: python + + generator.model = deepspeed.init_inference(generator.model, + tensor_parallel={"tp_size": world_size}, + dtype=torch.half, + replace_with_kernel_inject=True) + string = generator("DeepSpeed is") + print(string) + + # Case 4: user provides config and keyword arguments (kwargs). Both config and kwargs are merged and kwargs take precedence. + + .. code-block:: python + + generator.model = deepspeed.init_inference(generator.model, config={"dtype": torch.half}, replace_with_kernel_inject=True) + string = generator("DeepSpeed is") + print(string) + + Arguments: + model: Required: original nn.module object without any wrappers + + config: Optional: instead of arguments, you can pass in a DS inference config dict or path to JSON file + + Returns: + A deepspeed.InferenceEngine wrapped model. + """ + log_dist("DeepSpeed info: version={}, git-hash={}, git-branch={}".format(__version__, __git_hash__, + __git_branch__), + ranks=[0]) + + # Load config_dict from config first + if config is None: + config = {} + if isinstance(config, str): + with open(config, "r") as f: + config_dict = json.load(f) + elif isinstance(config, dict): + config_dict = config + else: + raise ValueError(f"'config' argument expected string or dictionary, got {type(config)}") + + # Update with values from kwargs, ensuring no conflicting overlap between config and kwargs + overlap_keys = set(config_dict.keys()).intersection(kwargs.keys()) + # If there is overlap, error out if values are different + for key in overlap_keys: + if config_dict[key] != kwargs[key]: + raise ValueError(f"Conflicting argument '{key}' in 'config':{config_dict[key]} and kwargs:{kwargs[key]}") + config_dict.update(kwargs) + + ds_inference_config = DeepSpeedInferenceConfig(**config_dict) + + engine = InferenceEngine(model, config=ds_inference_config) + + return engine diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/constants.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..30135f41b7b68920c32bb2a259a8e431196f4d53 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/constants.py @@ -0,0 +1,21 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +import os +from datetime import timedelta + +############################################# +# Torch distributed constants +############################################# +TORCH_DISTRIBUTED_DEFAULT_PORT = 29500 + +# Default process group wide timeout, if applicable. +# This only applies to the gloo and nccl backends +# (only if NCCL_BLOCKING_WAIT or NCCL_ASYNC_ERROR_HANDLING is set to 1). +# To make an attempt at backwards compatibility with THD, we use an +# extraordinarily high default timeout, given that THD did not have timeouts. +default_pg_timeout = timedelta(minutes=int(os.getenv("DEEPSPEED_TIMEOUT", default=30))) +INFERENCE_GENERIC_MODE = 'generic' +INFERENCE_SPECIALIZED_MODE = 'specialized' diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/env_report.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/env_report.py new file mode 100644 index 0000000000000000000000000000000000000000..37e33b1e873972e10194ef70a7fd4470af6a7e30 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/env_report.py @@ -0,0 +1,195 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +import os +import torch +import deepspeed +import subprocess +import argparse +from .ops.op_builder.all_ops import ALL_OPS +from .git_version_info import installed_ops, torch_info, accelerator_name +from deepspeed.accelerator import get_accelerator + +GREEN = '\033[92m' +RED = '\033[91m' +YELLOW = '\033[93m' +END = '\033[0m' +SUCCESS = f"{GREEN} [SUCCESS] {END}" +OKAY = f"{GREEN}[OKAY]{END}" +WARNING = f"{YELLOW}[WARNING]{END}" +FAIL = f'{RED}[FAIL]{END}' +INFO = '[INFO]' + +color_len = len(GREEN) + len(END) +okay = f"{GREEN}[OKAY]{END}" +warning = f"{YELLOW}[WARNING]{END}" + + +def op_report(verbose=True): + max_dots = 23 + max_dots2 = 11 + h = ["op name", "installed", "compatible"] + print("-" * (max_dots + max_dots2 + len(h[0]) + len(h[1]))) + print("DeepSpeed C++/CUDA extension op report") + print("-" * (max_dots + max_dots2 + len(h[0]) + len(h[1]))) + + print("NOTE: Ops not installed will be just-in-time (JIT) compiled at\n" + " runtime if needed. Op compatibility means that your system\n" + " meet the required dependencies to JIT install the op.") + + print("-" * (max_dots + max_dots2 + len(h[0]) + len(h[1]))) + print("JIT compiled ops requires ninja") + ninja_status = OKAY if ninja_installed() else FAIL + print('ninja', "." * (max_dots - 5), ninja_status) + print("-" * (max_dots + max_dots2 + len(h[0]) + len(h[1]))) + print(h[0], "." * (max_dots - len(h[0])), h[1], "." * (max_dots2 - len(h[1])), h[2]) + print("-" * (max_dots + max_dots2 + len(h[0]) + len(h[1]))) + installed = f"{GREEN}[YES]{END}" + no = f"{YELLOW}[NO]{END}" + for op_name, builder in ALL_OPS.items(): + dots = "." * (max_dots - len(op_name)) + is_compatible = OKAY if builder.is_compatible(verbose) else no + is_installed = installed if installed_ops.get(op_name, + False) and accelerator_name == get_accelerator()._name else no + dots2 = '.' * ((len(h[1]) + (max_dots2 - len(h[1]))) - (len(is_installed) - color_len)) + print(op_name, dots, is_installed, dots2, is_compatible) + print("-" * (max_dots + max_dots2 + len(h[0]) + len(h[1]))) + + +def ninja_installed(): + try: + import ninja # noqa: F401 # type: ignore + except ImportError: + return False + return True + + +def nvcc_version(): + import torch.utils.cpp_extension + cuda_home = torch.utils.cpp_extension.CUDA_HOME + if cuda_home is None: + return f"{RED} [FAIL] cannot find CUDA_HOME via torch.utils.cpp_extension.CUDA_HOME={torch.utils.cpp_extension.CUDA_HOME} {END}" + try: + output = subprocess.check_output([cuda_home + "/bin/nvcc", "-V"], universal_newlines=True) + except FileNotFoundError: + return f"{RED} [FAIL] nvcc missing {END}" + output_split = output.split() + release_idx = output_split.index("release") + release = output_split[release_idx + 1].replace(',', '').split(".") + return ".".join(release) + + +def installed_cann_path(): + if "ASCEND_HOME_PATH" in os.environ or os.path.exists(os.environ["ASCEND_HOME_PATH"]): + return os.environ["ASCEND_HOME_PATH"] + return None + + +def installed_cann_version(): + import re + ascend_path = installed_cann_path() + if ascend_path is None: + return f"CANN_HOME does not exist, unable to compile NPU op(s)" + cann_version = "" + for dirpath, _, filenames in os.walk(os.path.realpath(ascend_path)): + if cann_version: + break + install_files = [file for file in filenames if re.match(r"ascend_.*_install\.info", file)] + if install_files: + filepath = os.path.join(dirpath, install_files[0]) + with open(filepath, "r") as f: + for line in f: + if line.find("version") != -1: + cann_version = line.strip().split("=")[-1] + break + return cann_version + + +def get_shm_size(): + try: + shm_stats = os.statvfs('/dev/shm') + except (OSError, FileNotFoundError, ValueError, AttributeError): + return "UNKNOWN", None + + shm_size = shm_stats.f_frsize * shm_stats.f_blocks + shm_hbytes = human_readable_size(shm_size) + warn = [] + if shm_size < 512 * 1024**2: + warn.append( + f" {YELLOW} [WARNING] /dev/shm size might be too small, if running in docker increase to at least --shm-size='1gb' {END}" + ) + if get_accelerator().communication_backend_name() == "nccl": + warn.append( + f" {YELLOW} [WARNING] see more details about NCCL requirements: https://docs.nvidia.com/deeplearning/nccl/user-guide/docs/troubleshooting.html#sharing-data {END}" + ) + return shm_hbytes, warn + + +def human_readable_size(size): + units = ['B', 'KB', 'MB', 'GB', 'TB'] + i = 0 + while size >= 1024 and i < len(units) - 1: + size /= 1024 + i += 1 + return f'{size:.2f} {units[i]}' + + +def debug_report(): + max_dots = 33 + + report = [("torch install path", torch.__path__), ("torch version", torch.__version__), + ("deepspeed install path", deepspeed.__path__), + ("deepspeed info", f"{deepspeed.__version__}, {deepspeed.__git_hash__}, {deepspeed.__git_branch__}")] + if get_accelerator().device_name() == 'cuda': + hip_version = getattr(torch.version, "hip", None) + report.extend([("torch cuda version", torch.version.cuda), ("torch hip version", hip_version), + ("nvcc version", (None if hip_version else nvcc_version())), + ("deepspeed wheel compiled w.", f"torch {torch_info['version']}, " + + (f"hip {torch_info['hip_version']}" if hip_version else f"cuda {torch_info['cuda_version']}")) + ]) + elif get_accelerator().device_name() == 'npu': + import torch_npu + report.extend([("deepspeed wheel compiled w.", f"torch {torch_info['version']}"), + ("torch_npu install path", torch_npu.__path__), ("torch_npu version", torch_npu.__version__), + ("ascend_cann version", installed_cann_version())]) + else: + report.extend([("deepspeed wheel compiled w.", f"torch {torch_info['version']} ")]) + + report.append(("shared memory (/dev/shm) size", get_shm_size())) + + print("DeepSpeed general environment info:") + for name, value in report: + warns = [] + if isinstance(value, tuple): + value, warns = value + print(name, "." * (max_dots - len(name)), value) + if warns: + for warn in warns: + print(warn) + + +def parse_arguments(): + parser = argparse.ArgumentParser() + parser.add_argument('--hide_operator_status', + action='store_true', + help='Suppress display of installation and compatibility statuses of DeepSpeed operators. ') + parser.add_argument('--hide_errors_and_warnings', action='store_true', help='Suppress warning and error messages.') + args = parser.parse_args() + return args + + +def main(hide_operator_status=False, hide_errors_and_warnings=False): + if not hide_operator_status: + op_report(verbose=not hide_errors_and_warnings) + debug_report() + + +def cli_main(): + args = parse_arguments() + main(hide_operator_status=args.hide_operator_status, hide_errors_and_warnings=args.hide_errors_and_warnings) + + +if __name__ == "__main__": + main() diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/git_version_info.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/git_version_info.py new file mode 100644 index 0000000000000000000000000000000000000000..70c536d2f78eee24c21db985c43fc9a3c017d4ee --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/git_version_info.py @@ -0,0 +1,31 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +try: + # This is populated by setup.py + from .git_version_info_installed import * # noqa: F401 # type: ignore +except ModuleNotFoundError: + import os + if os.path.isfile('version.txt'): + # Will be missing from checkouts that haven't been installed (e.g., readthedocs) + version = open('version.txt', 'r').read().strip() + else: + version = "0.0.0" + git_hash = '[none]' + git_branch = '[none]' + + from .ops.op_builder.all_ops import ALL_OPS + installed_ops = dict.fromkeys(ALL_OPS.keys(), False) + accelerator_name = "" + torch_info = {'version': "0.0", "cuda_version": "0.0", "hip_version": "0.0"} + +# compatible_ops list is recreated for each launch +from .ops.op_builder.all_ops import ALL_OPS + +compatible_ops = dict.fromkeys(ALL_OPS.keys(), False) +for op_name, builder in ALL_OPS.items(): + op_compatible = builder.is_compatible() + compatible_ops[op_name] = op_compatible + compatible_ops["deepspeed_not_implemented"] = False diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/git_version_info_installed.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/git_version_info_installed.py new file mode 100644 index 0000000000000000000000000000000000000000..25d99e997afe392d43c1b6931ade1315812273f3 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/git_version_info_installed.py @@ -0,0 +1,6 @@ +version='0.15.4' +git_hash='unknown' +git_branch='unknown' +installed_ops={'async_io': False, 'fused_adam': False, 'cpu_adam': False, 'cpu_adagrad': False, 'cpu_lion': False, 'evoformer_attn': False, 'fp_quantizer': False, 'fused_lamb': False, 'fused_lion': False, 'gds': False, 'transformer_inference': False, 'inference_core_ops': False, 'cutlass_ops': False, 'quantizer': False, 'ragged_device_ops': False, 'ragged_ops': False, 'random_ltd': False, 'sparse_attn': False, 'spatial_inference': False, 'transformer': False, 'stochastic_transformer': False} +accelerator_name='cuda' +torch_info={'version': '0.0', 'bf16_support': False, 'cuda_version': '0.0', 'nccl_version': '0.0', 'hip_version': '0.0'} diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ad95c58f76090188b5d9c73e1e3df4eb2b2c678f --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +from .transformers.ds_transformer import DeepSpeedTransformerInference +from .transformers.clip_encoder import DSClipEncoder diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6c5067f71c8faf166bc78e88f9b62e8627dda7c7 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team +'''Copyright The Microsoft DeepSpeed Team''' diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/clip_encoder.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/clip_encoder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fee6c0dcd259cab1af9f5e16e9d8e49d6e716413 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/clip_encoder.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_bert.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_bert.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bef052682e611ba4a9063002b63908a4e6c8c9d6 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_bert.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_bloom.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_bloom.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ceaf4dfaab2b0e8d7a1df39fef9d494e7161b9e5 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_bloom.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_gpt.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_gpt.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..abaf58762f8f37dc0af1f9b4717890170ee0b6ff Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_gpt.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_llama2.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_llama2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fb41c389ef6fecb85c4075050a772da958956f7e Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_llama2.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_megatron_gpt.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_megatron_gpt.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a60779c734268a301458bfebc6855ff350345587 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_megatron_gpt.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_opt.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_opt.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d647c1ac6aade34997e3619d9c00c9fdbf18f4af Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_opt.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_transformer.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_transformer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..53c05b6b9f5a32864798b968ab25ddbbf7964841 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/__pycache__/ds_transformer.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/clip_encoder.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/clip_encoder.py new file mode 100644 index 0000000000000000000000000000000000000000..848a5b48dcf1336a4c80c20a9e21388cdff9e514 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/clip_encoder.py @@ -0,0 +1,77 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +import torch +from deepspeed.accelerator import get_accelerator +from ..features.cuda_graph import CUDAGraph + + +class DSClipEncoder(CUDAGraph, torch.nn.Module): + + def __init__(self, enc, enable_cuda_graph=False): + super().__init__(enable_cuda_graph=enable_cuda_graph) + enc.text_model._build_causal_attention_mask = self._build_causal_attention_mask + self.enc = enc + self.device = self.enc.device + self.dtype = self.enc.dtype + self.cuda_graph_created = [False, False] + self.static_inputs = [None, None] + self.static_kwargs = [None, None] + self.static_output = [None, None] + self._cuda_graphs = [None, None] + self.iter = 0 + self.config = self.enc.config + + def _build_causal_attention_mask(self, bsz, seq_len, dtype): + mask = torch.empty(bsz, seq_len, seq_len, dtype=dtype, device=get_accelerator().current_device_name()) + mask.fill_(torch.tensor(torch.finfo(dtype).min)) + mask.triu_(1) + mask = mask.unsqueeze(1) + return mask + + def _graph_replay(self, *inputs, **kwargs): + for i in range(len(inputs)): + if torch.is_tensor(inputs[i]): + self.static_inputs[self.iter][i].copy_(inputs[i]) + for k in kwargs: + if torch.is_tensor(kwargs[k]): + self.static_kwargs[self.iter][k].copy_(kwargs[k]) + get_accelerator().replay_graph(self._cuda_graphs[self.iter]) + return self.static_output[self.iter] + + def forward(self, *inputs, **kwargs): + if self.enable_cuda_graph: + if self.cuda_graph_created[self.iter]: + outputs = self._graph_replay(*inputs, **kwargs) + else: + self._create_cuda_graph(*inputs, **kwargs) + outputs = self._graph_replay(*inputs, **kwargs) + self.iter = (self.iter + 1) % 2 + return outputs + else: + return self.enc(*inputs, **kwargs) + + def _create_cuda_graph(self, *inputs, **kwargs): + # warmup to create the workspace and cublas handle + cuda_stream = torch.cuda.Stream() + cuda_stream.wait_stream(torch.cuda.current_stream()) + with torch.cuda.stream(cuda_stream): + for i in range(3): + ret = self._forward(*inputs, **kwargs) + torch.cuda.current_stream().wait_stream(cuda_stream) + + # create cuda_graph and assign static_inputs and static_outputs + self._cuda_graphs[self.iter] = get_accelerator().create_graph() + self.static_inputs[self.iter] = inputs + self.static_kwargs[self.iter] = kwargs + + with get_accelerator().capture_to_graph(self._cuda_graphs[self.iter]): + self.static_output[self.iter] = self._forward(*self.static_inputs[self.iter], + **self.static_kwargs[self.iter]) + + self.cuda_graph_created[self.iter] = True + + def _forward(self, *inputs, **kwargs): + return self.enc(*inputs, **kwargs) diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_base.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_base.py new file mode 100644 index 0000000000000000000000000000000000000000..b4b113904997fd124aecdfe51529aab6c9e1b25c --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_base.py @@ -0,0 +1,15 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +import torch.nn as nn + + +class DeepSpeedTransformerBase(nn.module): + + def __init__(self): + pass + + # this would be the new clean base class that will replace DeepSpeedTransformerInference. + # we currently don't know how this will look like but keeping it here as a placeholder. diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_bert.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_bert.py new file mode 100644 index 0000000000000000000000000000000000000000..13075553ec8b6d818744cc5404ef4db31283ac8e --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_bert.py @@ -0,0 +1,20 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +from deepspeed.model_implementations.transformers.ds_transformer import DeepSpeedTransformerInference + + +class DeepSpeedBERTInference(DeepSpeedTransformerInference): + """Initialize the DeepSpeed BERT Transformer Layer. + """ + + def __init__(self, + config, + mp_group=None, + quantize_scales=None, + quantize_groups=1, + merge_count=1, + mlp_extra_grouping=False): + super().__init__(config, mp_group, quantize_scales, quantize_groups, merge_count, mlp_extra_grouping) diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_bloom.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_bloom.py new file mode 100644 index 0000000000000000000000000000000000000000..c48c7ed58ae5e2746c513a1acea509bbfd129784 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_bloom.py @@ -0,0 +1,20 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +from deepspeed.model_implementations.transformers.ds_transformer import DeepSpeedTransformerInference + + +class DeepSpeedBloomInference(DeepSpeedTransformerInference): + """Initialize the DeepSpeed Bloom Transformer Layer. + """ + + def __init__(self, + config, + mp_group=None, + quantize_scales=None, + quantize_groups=1, + merge_count=1, + mlp_extra_grouping=False): + super().__init__(config, mp_group, quantize_scales, quantize_groups, merge_count, mlp_extra_grouping) diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_gpt.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_gpt.py new file mode 100644 index 0000000000000000000000000000000000000000..3c3baed1f6186a89ed2b852f27ad56a5ab00d9f6 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_gpt.py @@ -0,0 +1,20 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +from deepspeed.model_implementations.transformers.ds_transformer import DeepSpeedTransformerInference + + +class DeepSpeedGPTInference(DeepSpeedTransformerInference): + """Initialize the DeepSpeed GPT Transformer Layer. + """ + + def __init__(self, + config, + mp_group=None, + quantize_scales=None, + quantize_groups=1, + merge_count=1, + mlp_extra_grouping=False): + super().__init__(config, mp_group, quantize_scales, quantize_groups, merge_count, mlp_extra_grouping) diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_llama2.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_llama2.py new file mode 100644 index 0000000000000000000000000000000000000000..325bfb4f7e181d8b718748fd5ebf2f4f1ca3af64 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_llama2.py @@ -0,0 +1,58 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +import torch +from deepspeed.model_implementations.transformers.ds_transformer import DeepSpeedTransformerInference + + +class DeepSpeedLlama2Inference(DeepSpeedTransformerInference): + """Initialize the DeepSpeed OPT Transformer Layer. + """ + + def __init__(self, + config, + mp_group=None, + quantize_scales=None, + quantize_groups=1, + merge_count=1, + mlp_extra_grouping=False): + super().__init__(config, mp_group, quantize_scales, quantize_groups, merge_count, mlp_extra_grouping) + + def forward(self, *args, **kwargs): + + input = args[0] + input_mask = None + get_present = True + + self.allocate_workspace(input.size()) + + # We set the prev key/value to None when there is a prompt + if input.shape[1] > 1: + self.layer_past = None + layer_past = self.layer_past + + input_type = input.dtype + + if (self.config.dtype in [torch.float16, torch.bfloat16, torch.int8]) \ + and input.dtype == torch.float: + target_dtype = torch.half if self.dtype == torch.int8 else self.dtype + input = input.to(target_dtype) + + with torch.no_grad(): + attention_output, key, value, context_outputtn_ctx, inp_norm = \ + self.attention(input, + input_mask, + None, + layer_past, + get_present, + None, None, None, + self.norm_w, + self.norm_b, + None) + self.layer_past = (key, value) + output = self.mlp(attention_output, input, inp_norm, self.attention.attn_ob) + + output = output.to(input_type) + return output diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_megatron_gpt.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_megatron_gpt.py new file mode 100644 index 0000000000000000000000000000000000000000..055ed6d27d7db4cb39cf868d5c189ef517f4a08a --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_megatron_gpt.py @@ -0,0 +1,20 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +from deepspeed.model_implementations.transformers.ds_transformer import DeepSpeedTransformerInference + + +class DeepSpeedMegatronGPTInference(DeepSpeedTransformerInference): + """Initialize the DeepSpeed Megatron GPT Transformer Layer. + """ + + def __init__(self, + config, + mp_group=None, + quantize_scales=None, + quantize_groups=1, + merge_count=1, + mlp_extra_grouping=False): + super().__init__(config, mp_group, quantize_scales, quantize_groups, merge_count, mlp_extra_grouping) diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_opt.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_opt.py new file mode 100644 index 0000000000000000000000000000000000000000..7bc5524d71c7155fad0b40eafeca490705f3e324 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_opt.py @@ -0,0 +1,20 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +from deepspeed.model_implementations.transformers.ds_transformer import DeepSpeedTransformerInference + + +class DeepSpeedOPTInference(DeepSpeedTransformerInference): + """Initialize the DeepSpeed OPT Transformer Layer. + """ + + def __init__(self, + config, + mp_group=None, + quantize_scales=None, + quantize_groups=1, + merge_count=1, + mlp_extra_grouping=False): + super().__init__(config, mp_group, quantize_scales, quantize_groups, merge_count, mlp_extra_grouping) diff --git a/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_transformer.py b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..360113b78a3db0193b74a33da551fa4ad859ac25 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/deepspeed/model_implementations/transformers/ds_transformer.py @@ -0,0 +1,191 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 + +# DeepSpeed Team + +import torch +import torch.nn as nn +from deepspeed import comm as dist +from deepspeed.ops.transformer.inference.op_binding.layer_norm import LayerNormOp +from deepspeed.utils.logging import log_dist + +from deepspeed.ops.transformer.inference.ds_mlp import DeepSpeedMLP +from deepspeed.ops.transformer.inference.ds_attention import DeepSpeedSelfAttention, BloomSelfAttention +from deepspeed.ops.transformer.inference.op_binding.workspace import WorkspaceOp +from deepspeed.accelerator import get_accelerator +import deepspeed +if deepspeed.HAS_TRITON: + from deepspeed.ops.transformer.inference.triton.mlp import TritonMLP + from deepspeed.ops.transformer.inference.triton.attention import TritonSelfAttention + + +class DeepSpeedTransformerInference(nn.Module): + """Initialize the DeepSpeed Transformer Layer. + Arguments: + layer_id: The layer index starting from 0, e.g. if model has 24 transformer layers, + layer_id will be 0,1,2...23 when each layer object is instantiated + config: An object of DeepSpeedInferenceConfig + mp_group: Model parallelism group initialized on the modeling side. + quantize_scales: This argument groups all the layers' scales used for quantization + quantize_groups: Number of groups used for quantizing the model + merge_count: Shows the number of model-parallel checkpoints merged before running inference. + We use this argument to control the quantization scale for the model parameters if a bigger + quantize-grouping than 1 is used. + mlp_extra_grouping: This flag is used to show a 2x higher number of groups used for the MLP part + of a Transformer layer. We use this feature for quantization to reduce the convergence impact + for specific downstream tasks. + """ + layer_id = 0 + workspace = None + + def __init__(self, + config, + mp_group=None, + quantize_scales=None, + quantize_groups=1, + merge_count=1, + mlp_extra_grouping=False): + super(DeepSpeedTransformerInference, self).__init__() + + self.config = config + self.config.layer_id = DeepSpeedTransformerInference.layer_id + DeepSpeedTransformerInference.layer_id += 1 + + data_type = torch.half if self.config.dtype == torch.int8 else self.config.dtype + + if DeepSpeedTransformerInference.layer_id == 1: + log_dist(f"DeepSpeed-Inference config: {self.config.__dict__}", [0]) + if deepspeed.HAS_TRITON and self.config.use_triton: + log_dist(f"Injecting Triton kernels ...", [0]) + + if self.config.bigscience_bloom: + self.attention = BloomSelfAttention(self.config, mp_group, quantize_scales, quantize_groups, merge_count) + assert not self.config.use_triton + else: + if deepspeed.HAS_TRITON and self.config.use_triton: + self.attention = TritonSelfAttention(self.config) + else: + self.attention = DeepSpeedSelfAttention(self.config, mp_group, quantize_scales, quantize_groups, + merge_count) + + if deepspeed.HAS_TRITON and self.config.use_triton: + self.mlp = TritonMLP(self.config) + else: + self.mlp = DeepSpeedMLP(self.config, mp_group, quantize_scales, quantize_groups, merge_count, + mlp_extra_grouping) + + device = get_accelerator().current_device_name() # if config.bigscience_bloom else 'cpu' + if self.config.set_empty_params: + self.norm_w = None + self.norm_b = None + else: + self.norm_w = nn.Parameter(torch.empty(self.config.hidden_size, dtype=data_type, device=device), + requires_grad=False) + self.norm_b = nn.Parameter(torch.empty(self.config.hidden_size, dtype=data_type, device=device), + requires_grad=False) + self.layer_past = None + self.layer_norm = LayerNormOp() + if DeepSpeedTransformerInference.workspace is None: + DeepSpeedTransformerInference.workspace = WorkspaceOp(self.config) + self._should_allocate_workspace = True + + def allocate_workspace(self, size): + # Allocate memory only on first layer forward + if self.config.layer_id == 0 and self._should_allocate_workspace: + DeepSpeedTransformerInference.workspace.allocate_workspace( + self.config.hidden_size, self.config.heads, size[1], size[0], DeepSpeedTransformerInference.layer_id, + self.config.mp_size, self.config.bigscience_bloom, + dist.get_rank() if dist.is_initialized() else 0, self.config.max_out_tokens, + self.config.min_out_tokens) + self._should_allocate_workspace = False + + @classmethod + def reset_cache(cls): + if cls.workspace is not None: + cls.workspace.reset_cache() + + def forward( + self, + input=None, + input_mask=None, + attention_mask=None, + attn_mask=None, + head_mask=None, + layer_past=None, + get_key_value=False, + get_present=False, + encoder_output=None, + enc_dec_attn_mask=None, + x=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + use_cache=False, + alibi=None, + output_attentions=False, + # TODO(arashb): 'layer_head_mask' and 'past_key_value' are only added to satisfy the OPT models API. + # This needs to be redesigned later! + layer_head_mask=None, + past_key_value=None, + **kwargs): + + if x is not None: + input = x + if "hidden_states" in kwargs: + input = kwargs["hidden_states"] + + input_mask = (input_mask if attn_mask is None else attn_mask) if attention_mask is None else attention_mask + + self.allocate_workspace(input.size()) + + get_present = (get_present or get_key_value or use_cache) + input_mask = input_mask if attention_mask is None else attention_mask + + # We set the prev key/value to None when there is a prompt + if input.shape[1] > 1: + self.layer_past = None + layer_past = layer_past if layer_past is not None else self.layer_past + head_mask = layer_head_mask if layer_head_mask is not None else head_mask + + attn_mask = None + if isinstance(input, tuple): + attn_mask = input[1] + input = input[0] + input_type = input.dtype + + if (self.config.dtype in [torch.float16, torch.bfloat16, torch.int8]) \ + and input.dtype == torch.float: + target_dtype = torch.half if self.config.dtype == torch.int8 else self.config.dtype + input = input.to(target_dtype) + + with torch.no_grad(): + attention_output, key, value, context_outputtn_ctx, inp_norm = \ + self.attention(input, + input_mask, + head_mask, + layer_past, + get_present, + encoder_hidden_states, + encoder_attention_mask, + output_attentions, + self.norm_w, + self.norm_b, + alibi, + **kwargs) + + presents = (key, value) + self.layer_past = presents if layer_past is None else None + output = self.mlp(attention_output, input, inp_norm, self.attention.attn_ob) + + if not self.config.pre_layer_norm: + output = self.layer_norm(output, self.norm_w, self.norm_b, self.config.epsilon) + + output = output.to(input_type) + if get_present: + output = (output, presents) + + if self.config.return_single_tuple: + return (output, ) + elif self.config.return_tuple: + return output if type(output) is tuple else (output, attn_mask) + else: + return output diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/__init__.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..51e1a4d1b7cefce83cc28f19d5571bfa6d81902a Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/__init__.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/coreviews.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/coreviews.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..001cd2e3edb89e1a0388cba96483f0b0390250df Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/coreviews.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/digraph.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/digraph.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4ce630e54a71970abadfb7da21a34f55446bd99 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/digraph.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/filters.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/filters.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70870dab540aeacbe32e0c59748cce9ecc4a0886 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/filters.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/function.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/function.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2a350a4e1c0a9be8a8c173bb9a072e81651d520d Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/function.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/graph.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/graph.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e3984a599a349a3407a3db8fe01ae137fd343d65 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/graph.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/graphviews.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/graphviews.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..44c6269d8569b74d58592a735bb4858d78abc42f Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/graphviews.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/multidigraph.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/multidigraph.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d47884322223ab77a1c2694141e6c11cefe48f11 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/multidigraph.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/multigraph.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/multigraph.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7284bb036c5d3b4beac1661711b9b85b255f501 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/multigraph.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/reportviews.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/reportviews.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a44ec24fe19e8f8a25329bfb2b8ecf960ff3d24b Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/__pycache__/reportviews.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/__init__.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e6b3328ce7cbe9f33753b9ac4277b7e7bb20f4c2 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/dispatch_interface.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/dispatch_interface.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..43a2c7d9276cb75c5f2faf4e8695f0402bb6594c Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/dispatch_interface.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/historical_tests.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/historical_tests.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..953b82f616cfc6f70a826e7e19168d1f581160f6 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/historical_tests.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_coreviews.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_coreviews.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fb6b32e5215e1a5d37a71204aec4ec099d97023a Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_coreviews.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_digraph.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_digraph.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a80e379e053de026740a880d91e3892f60722ad0 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_digraph.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_digraph_historical.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_digraph_historical.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..78f1264134309bcc3a955f658cfe0ba4855de213 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_digraph_historical.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_filters.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_filters.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d77cecb88a88882d0ca51bee573a68feb126eeb Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_filters.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_function.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_function.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..57aa42f86b31cb50f9e16fa1e3700bc3585db32a Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_function.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graph.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graph.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..49bb0cc911fafdaacb2cc3123298a92867cdc8f3 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graph.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graph_historical.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graph_historical.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..edcfcd60aaa6999c7394412f93e41ba5a739f227 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graph_historical.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graphviews.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graphviews.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2a85029cc229f5ab4c3bba611cfd0e489f182173 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_graphviews.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_multidigraph.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_multidigraph.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..de936149fa9b00d300c5caade0575ecca8e95d85 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_multidigraph.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_multigraph.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_multigraph.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bd0f0c62ce2488d9b91413fc7870f49c58da569f Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_multigraph.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_reportviews.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_reportviews.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dc3a86bf3f8917a4477820016da4e7f3cbd3b7ad Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_reportviews.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_special.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_special.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5cf7c418150d7f6b3522deccb580d7f1cc5b641 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_special.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_subgraphviews.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_subgraphviews.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0051d7a23649c12a889a6247d7e019619e13af5 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/__pycache__/test_subgraphviews.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/dispatch_interface.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/dispatch_interface.py new file mode 100644 index 0000000000000000000000000000000000000000..5cc908d707c8efa30ce1e334313e1f946bdb5348 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/dispatch_interface.py @@ -0,0 +1,185 @@ +# This file contains utilities for testing the dispatching feature + +# A full test of all dispatchable algorithms is performed by +# modifying the pytest invocation and setting an environment variable +# NETWORKX_TEST_BACKEND=nx_loopback pytest +# This is comprehensive, but only tests the `test_override_dispatch` +# function in networkx.classes.backends. + +# To test the `_dispatchable` function directly, several tests scattered throughout +# NetworkX have been augmented to test normal and dispatch mode. +# Searching for `dispatch_interface` should locate the specific tests. + +import networkx as nx +from networkx import DiGraph, Graph, MultiDiGraph, MultiGraph, PlanarEmbedding +from networkx.classes.reportviews import NodeView + + +class LoopbackGraph(Graph): + __networkx_backend__ = "nx_loopback" + + +class LoopbackDiGraph(DiGraph): + __networkx_backend__ = "nx_loopback" + + +class LoopbackMultiGraph(MultiGraph): + __networkx_backend__ = "nx_loopback" + + +class LoopbackMultiDiGraph(MultiDiGraph): + __networkx_backend__ = "nx_loopback" + + +class LoopbackPlanarEmbedding(PlanarEmbedding): + __networkx_backend__ = "nx_loopback" + + +def convert(graph): + if isinstance(graph, PlanarEmbedding): + return LoopbackPlanarEmbedding(graph) + if isinstance(graph, MultiDiGraph): + return LoopbackMultiDiGraph(graph) + if isinstance(graph, MultiGraph): + return LoopbackMultiGraph(graph) + if isinstance(graph, DiGraph): + return LoopbackDiGraph(graph) + if isinstance(graph, Graph): + return LoopbackGraph(graph) + raise TypeError(f"Unsupported type of graph: {type(graph)}") + + +class LoopbackBackendInterface: + def __getattr__(self, item): + try: + return nx.utils.backends._registered_algorithms[item].orig_func + except KeyError: + raise AttributeError(item) from None + + @staticmethod + def convert_from_nx( + graph, + *, + edge_attrs=None, + node_attrs=None, + preserve_edge_attrs=None, + preserve_node_attrs=None, + preserve_graph_attrs=None, + name=None, + graph_name=None, + ): + if name in { + # Raise if input graph changes. See test_dag.py::test_topological_sort6 + "lexicographical_topological_sort", + "topological_generations", + "topological_sort", + # Would be nice to some day avoid these cutoffs of full testing + }: + return graph + if isinstance(graph, NodeView): + # Convert to a Graph with only nodes (no edges) + new_graph = Graph() + new_graph.add_nodes_from(graph.items()) + graph = new_graph + G = LoopbackGraph() + elif not isinstance(graph, Graph): + raise TypeError( + f"Bad type for graph argument {graph_name} in {name}: {type(graph)}" + ) + elif graph.__class__ in {Graph, LoopbackGraph}: + G = LoopbackGraph() + elif graph.__class__ in {DiGraph, LoopbackDiGraph}: + G = LoopbackDiGraph() + elif graph.__class__ in {MultiGraph, LoopbackMultiGraph}: + G = LoopbackMultiGraph() + elif graph.__class__ in {MultiDiGraph, LoopbackMultiDiGraph}: + G = LoopbackMultiDiGraph() + elif graph.__class__ in {PlanarEmbedding, LoopbackPlanarEmbedding}: + G = LoopbackDiGraph() # or LoopbackPlanarEmbedding + else: + # Would be nice to handle these better some day + # nx.algorithms.approximation.kcomponents._AntiGraph + # nx.classes.tests.test_multidigraph.MultiDiGraphSubClass + # nx.classes.tests.test_multigraph.MultiGraphSubClass + G = graph.__class__() + + if preserve_graph_attrs: + G.graph.update(graph.graph) + + # add nodes + G.add_nodes_from(graph) + if preserve_node_attrs: + for n, dd in G._node.items(): + dd.update(graph.nodes[n]) + elif node_attrs: + for n, dd in G._node.items(): + dd.update( + (attr, graph._node[n].get(attr, default)) + for attr, default in node_attrs.items() + if default is not None or attr in graph._node[n] + ) + + # tools to build datadict and keydict + if preserve_edge_attrs: + + def G_new_datadict(old_dd): + return G.edge_attr_dict_factory(old_dd) + elif edge_attrs: + + def G_new_datadict(old_dd): + return G.edge_attr_dict_factory( + (attr, old_dd.get(attr, default)) + for attr, default in edge_attrs.items() + if default is not None or attr in old_dd + ) + else: + + def G_new_datadict(old_dd): + return G.edge_attr_dict_factory() + + if G.is_multigraph(): + + def G_new_inner(keydict): + kd = G.adjlist_inner_dict_factory( + (k, G_new_datadict(dd)) for k, dd in keydict.items() + ) + return kd + else: + G_new_inner = G_new_datadict + + # add edges keeping the same order in _adj and _pred + G_adj = G._adj + if G.is_directed(): + for n, nbrs in graph._adj.items(): + G_adj[n].update((nbr, G_new_inner(dd)) for nbr, dd in nbrs.items()) + # ensure same datadict for pred and adj; and pred order of graph._pred + G_pred = G._pred + for n, nbrs in graph._pred.items(): + G_pred[n].update((nbr, G_adj[nbr][n]) for nbr in nbrs) + else: # undirected + for n, nbrs in graph._adj.items(): + # ensure same datadict for both ways; and adj order of graph._adj + G_adj[n].update( + (nbr, G_adj[nbr][n] if n in G_adj[nbr] else G_new_inner(dd)) + for nbr, dd in nbrs.items() + ) + + return G + + @staticmethod + def convert_to_nx(obj, *, name=None): + return obj + + @staticmethod + def on_start_tests(items): + # Verify that items can be xfailed + for item in items: + assert hasattr(item, "add_marker") + + def can_run(self, name, args, kwargs): + # It is unnecessary to define this function if algorithms are fully supported. + # We include it for illustration purposes. + return hasattr(self, name) + + +backend_interface = LoopbackBackendInterface() diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/historical_tests.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/historical_tests.py new file mode 100644 index 0000000000000000000000000000000000000000..9dad24e2328408fd803cce8aa909604226184b31 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/historical_tests.py @@ -0,0 +1,475 @@ +"""Original NetworkX graph tests""" + +import pytest + +import networkx as nx +from networkx import convert_node_labels_to_integers as cnlti +from networkx.utils import edges_equal, nodes_equal + + +class HistoricalTests: + @classmethod + def setup_class(cls): + cls.null = nx.null_graph() + cls.P1 = cnlti(nx.path_graph(1), first_label=1) + cls.P3 = cnlti(nx.path_graph(3), first_label=1) + cls.P10 = cnlti(nx.path_graph(10), first_label=1) + cls.K1 = cnlti(nx.complete_graph(1), first_label=1) + cls.K3 = cnlti(nx.complete_graph(3), first_label=1) + cls.K4 = cnlti(nx.complete_graph(4), first_label=1) + cls.K5 = cnlti(nx.complete_graph(5), first_label=1) + cls.K10 = cnlti(nx.complete_graph(10), first_label=1) + cls.G = nx.Graph + + def test_name(self): + G = self.G(name="test") + assert G.name == "test" + H = self.G() + assert H.name == "" + + # Nodes + + def test_add_remove_node(self): + G = self.G() + G.add_node("A") + assert G.has_node("A") + G.remove_node("A") + assert not G.has_node("A") + + def test_nonhashable_node(self): + # Test if a non-hashable object is in the Graph. A python dict will + # raise a TypeError, but for a Graph class a simple False should be + # returned (see Graph __contains__). If it cannot be a node then it is + # not a node. + G = self.G() + assert not G.has_node(["A"]) + assert not G.has_node({"A": 1}) + + def test_add_nodes_from(self): + G = self.G() + G.add_nodes_from(list("ABCDEFGHIJKL")) + assert G.has_node("L") + G.remove_nodes_from(["H", "I", "J", "K", "L"]) + G.add_nodes_from([1, 2, 3, 4]) + assert sorted(G.nodes(), key=str) == [ + 1, + 2, + 3, + 4, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + ] + # test __iter__ + assert sorted(G, key=str) == [1, 2, 3, 4, "A", "B", "C", "D", "E", "F", "G"] + + def test_contains(self): + G = self.G() + G.add_node("A") + assert "A" in G + assert [] not in G # never raise a Key or TypeError in this test + assert {1: 1} not in G + + def test_add_remove(self): + # Test add_node and remove_node acting for various nbunch + G = self.G() + G.add_node("m") + assert G.has_node("m") + G.add_node("m") # no complaints + pytest.raises(nx.NetworkXError, G.remove_node, "j") + G.remove_node("m") + assert list(G) == [] + + def test_nbunch_is_list(self): + G = self.G() + G.add_nodes_from(list("ABCD")) + G.add_nodes_from(self.P3) # add nbunch of nodes (nbunch=Graph) + assert sorted(G.nodes(), key=str) == [1, 2, 3, "A", "B", "C", "D"] + G.remove_nodes_from(self.P3) # remove nbunch of nodes (nbunch=Graph) + assert sorted(G.nodes(), key=str) == ["A", "B", "C", "D"] + + def test_nbunch_is_set(self): + G = self.G() + nbunch = set("ABCDEFGHIJKL") + G.add_nodes_from(nbunch) + assert G.has_node("L") + + def test_nbunch_dict(self): + # nbunch is a dict with nodes as keys + G = self.G() + nbunch = set("ABCDEFGHIJKL") + G.add_nodes_from(nbunch) + nbunch = {"I": "foo", "J": 2, "K": True, "L": "spam"} + G.remove_nodes_from(nbunch) + assert sorted(G.nodes(), key=str), ["A", "B", "C", "D", "E", "F", "G", "H"] + + def test_nbunch_iterator(self): + G = self.G() + G.add_nodes_from(["A", "B", "C", "D", "E", "F", "G", "H"]) + n_iter = self.P3.nodes() + G.add_nodes_from(n_iter) + assert sorted(G.nodes(), key=str) == [ + 1, + 2, + 3, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + "H", + ] + n_iter = self.P3.nodes() # rebuild same iterator + G.remove_nodes_from(n_iter) # remove nbunch of nodes (nbunch=iterator) + assert sorted(G.nodes(), key=str) == ["A", "B", "C", "D", "E", "F", "G", "H"] + + def test_nbunch_graph(self): + G = self.G() + G.add_nodes_from(["A", "B", "C", "D", "E", "F", "G", "H"]) + nbunch = self.K3 + G.add_nodes_from(nbunch) + assert sorted(G.nodes(), key=str), [ + 1, + 2, + 3, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + "H", + ] + + # Edges + + def test_add_edge(self): + G = self.G() + pytest.raises(TypeError, G.add_edge, "A") + + G.add_edge("A", "B") # testing add_edge() + G.add_edge("A", "B") # should fail silently + assert G.has_edge("A", "B") + assert not G.has_edge("A", "C") + assert G.has_edge(*("A", "B")) + if G.is_directed(): + assert not G.has_edge("B", "A") + else: + # G is undirected, so B->A is an edge + assert G.has_edge("B", "A") + + G.add_edge("A", "C") # test directedness + G.add_edge("C", "A") + G.remove_edge("C", "A") + if G.is_directed(): + assert G.has_edge("A", "C") + else: + assert not G.has_edge("A", "C") + assert not G.has_edge("C", "A") + + def test_self_loop(self): + G = self.G() + G.add_edge("A", "A") # test self loops + assert G.has_edge("A", "A") + G.remove_edge("A", "A") + G.add_edge("X", "X") + assert G.has_node("X") + G.remove_node("X") + G.add_edge("A", "Z") # should add the node silently + assert G.has_node("Z") + + def test_add_edges_from(self): + G = self.G() + G.add_edges_from([("B", "C")]) # test add_edges_from() + assert G.has_edge("B", "C") + if G.is_directed(): + assert not G.has_edge("C", "B") + else: + assert G.has_edge("C", "B") # undirected + + G.add_edges_from([("D", "F"), ("B", "D")]) + assert G.has_edge("D", "F") + assert G.has_edge("B", "D") + + if G.is_directed(): + assert not G.has_edge("D", "B") + else: + assert G.has_edge("D", "B") # undirected + + def test_add_edges_from2(self): + G = self.G() + # after failing silently, should add 2nd edge + G.add_edges_from([tuple("IJ"), list("KK"), tuple("JK")]) + assert G.has_edge(*("I", "J")) + assert G.has_edge(*("K", "K")) + assert G.has_edge(*("J", "K")) + if G.is_directed(): + assert not G.has_edge(*("K", "J")) + else: + assert G.has_edge(*("K", "J")) + + def test_add_edges_from3(self): + G = self.G() + G.add_edges_from(zip(list("ACD"), list("CDE"))) + assert G.has_edge("D", "E") + assert not G.has_edge("E", "C") + + def test_remove_edge(self): + G = self.G() + G.add_nodes_from([1, 2, 3, "A", "B", "C", "D", "E", "F", "G", "H"]) + + G.add_edges_from(zip(list("MNOP"), list("NOPM"))) + assert G.has_edge("O", "P") + assert G.has_edge("P", "M") + G.remove_node("P") # tests remove_node()'s handling of edges. + assert not G.has_edge("P", "M") + pytest.raises(TypeError, G.remove_edge, "M") + + G.add_edge("N", "M") + assert G.has_edge("M", "N") + G.remove_edge("M", "N") + assert not G.has_edge("M", "N") + + # self loop fails silently + G.remove_edges_from([list("HI"), list("DF"), tuple("KK"), tuple("JK")]) + assert not G.has_edge("H", "I") + assert not G.has_edge("J", "K") + G.remove_edges_from([list("IJ"), list("KK"), list("JK")]) + assert not G.has_edge("I", "J") + G.remove_nodes_from(set("ZEFHIMNO")) + G.add_edge("J", "K") + + def test_edges_nbunch(self): + # Test G.edges(nbunch) with various forms of nbunch + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + # node not in nbunch should be quietly ignored + pytest.raises(nx.NetworkXError, G.edges, 6) + assert list(G.edges("Z")) == [] # iterable non-node + # nbunch can be an empty list + assert list(G.edges([])) == [] + if G.is_directed(): + elist = [("A", "B"), ("A", "C"), ("B", "D")] + else: + elist = [("A", "B"), ("A", "C"), ("B", "C"), ("B", "D")] + # nbunch can be a list + assert edges_equal(list(G.edges(["A", "B"])), elist) + # nbunch can be a set + assert edges_equal(G.edges({"A", "B"}), elist) + # nbunch can be a graph + G1 = self.G() + G1.add_nodes_from("AB") + assert edges_equal(G.edges(G1), elist) + # nbunch can be a dict with nodes as keys + ndict = {"A": "thing1", "B": "thing2"} + assert edges_equal(G.edges(ndict), elist) + # nbunch can be a single node + assert edges_equal(list(G.edges("A")), [("A", "B"), ("A", "C")]) + assert nodes_equal(sorted(G), ["A", "B", "C", "D"]) + + # nbunch can be nothing (whole graph) + assert edges_equal( + list(G.edges()), + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")], + ) + + def test_degree(self): + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + assert G.degree("A") == 2 + + # degree of single node in iterable container must return dict + assert list(G.degree(["A"])) == [("A", 2)] + assert sorted(d for n, d in G.degree(["A", "B"])) == [2, 3] + assert sorted(d for n, d in G.degree()) == [2, 2, 3, 3] + + def test_degree2(self): + H = self.G() + H.add_edges_from([(1, 24), (1, 2)]) + assert sorted(d for n, d in H.degree([1, 24])) == [1, 2] + + def test_degree_graph(self): + P3 = nx.path_graph(3) + P5 = nx.path_graph(5) + # silently ignore nodes not in P3 + assert dict(d for n, d in P3.degree(["A", "B"])) == {} + # nbunch can be a graph + assert sorted(d for n, d in P5.degree(P3)) == [1, 2, 2] + # nbunch can be a graph that's way too big + assert sorted(d for n, d in P3.degree(P5)) == [1, 1, 2] + assert list(P5.degree([])) == [] + assert dict(P5.degree([])) == {} + + def test_null(self): + null = nx.null_graph() + assert list(null.degree()) == [] + assert dict(null.degree()) == {} + + def test_order_size(self): + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + assert G.order() == 4 + assert G.size() == 5 + assert G.number_of_edges() == 5 + assert G.number_of_edges("A", "B") == 1 + assert G.number_of_edges("A", "D") == 0 + + def test_copy(self): + G = self.G() + H = G.copy() # copy + assert H.adj == G.adj + assert H.name == G.name + assert H is not G + + def test_subgraph(self): + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + SG = G.subgraph(["A", "B", "D"]) + assert nodes_equal(list(SG), ["A", "B", "D"]) + assert edges_equal(list(SG.edges()), [("A", "B"), ("B", "D")]) + + def test_to_directed(self): + G = self.G() + if not G.is_directed(): + G.add_edges_from( + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) + + DG = G.to_directed() + assert DG is not G # directed copy or copy + + assert DG.is_directed() + assert DG.name == G.name + assert DG.adj == G.adj + assert sorted(DG.out_edges(list("AB"))) == [ + ("A", "B"), + ("A", "C"), + ("B", "A"), + ("B", "C"), + ("B", "D"), + ] + DG.remove_edge("A", "B") + assert DG.has_edge("B", "A") # this removes B-A but not A-B + assert not DG.has_edge("A", "B") + + def test_to_undirected(self): + G = self.G() + if G.is_directed(): + G.add_edges_from( + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) + UG = G.to_undirected() # to_undirected + assert UG is not G + assert not UG.is_directed() + assert G.is_directed() + assert UG.name == G.name + assert UG.adj != G.adj + assert sorted(UG.edges(list("AB"))) == [ + ("A", "B"), + ("A", "C"), + ("B", "C"), + ("B", "D"), + ] + assert sorted(UG.edges(["A", "B"])) == [ + ("A", "B"), + ("A", "C"), + ("B", "C"), + ("B", "D"), + ] + UG.remove_edge("A", "B") + assert not UG.has_edge("B", "A") + assert not UG.has_edge("A", "B") + + def test_neighbors(self): + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + G.add_nodes_from("GJK") + assert sorted(G["A"]) == ["B", "C"] + assert sorted(G.neighbors("A")) == ["B", "C"] + assert sorted(G.neighbors("A")) == ["B", "C"] + assert sorted(G.neighbors("G")) == [] + pytest.raises(nx.NetworkXError, G.neighbors, "j") + + def test_iterators(self): + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + G.add_nodes_from("GJK") + assert sorted(G.nodes()) == ["A", "B", "C", "D", "G", "J", "K"] + assert edges_equal( + G.edges(), [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) + + assert sorted(v for k, v in G.degree()) == [0, 0, 0, 2, 2, 3, 3] + assert sorted(G.degree(), key=str) == [ + ("A", 2), + ("B", 3), + ("C", 3), + ("D", 2), + ("G", 0), + ("J", 0), + ("K", 0), + ] + assert sorted(G.neighbors("A")) == ["B", "C"] + pytest.raises(nx.NetworkXError, G.neighbors, "X") + G.clear() + assert nx.number_of_nodes(G) == 0 + assert nx.number_of_edges(G) == 0 + + def test_null_subgraph(self): + # Subgraph of a null graph is a null graph + nullgraph = nx.null_graph() + G = nx.null_graph() + H = G.subgraph([]) + assert nx.is_isomorphic(H, nullgraph) + + def test_empty_subgraph(self): + # Subgraph of an empty graph is an empty graph. test 1 + nullgraph = nx.null_graph() + E5 = nx.empty_graph(5) + E10 = nx.empty_graph(10) + H = E10.subgraph([]) + assert nx.is_isomorphic(H, nullgraph) + H = E10.subgraph([1, 2, 3, 4, 5]) + assert nx.is_isomorphic(H, E5) + + def test_complete_subgraph(self): + # Subgraph of a complete graph is a complete graph + K1 = nx.complete_graph(1) + K3 = nx.complete_graph(3) + K5 = nx.complete_graph(5) + H = K5.subgraph([1, 2, 3]) + assert nx.is_isomorphic(H, K3) + + def test_subgraph_nbunch(self): + nullgraph = nx.null_graph() + K1 = nx.complete_graph(1) + K3 = nx.complete_graph(3) + K5 = nx.complete_graph(5) + # Test G.subgraph(nbunch), where nbunch is a single node + H = K5.subgraph(1) + assert nx.is_isomorphic(H, K1) + # Test G.subgraph(nbunch), where nbunch is a set + H = K5.subgraph({1}) + assert nx.is_isomorphic(H, K1) + # Test G.subgraph(nbunch), where nbunch is an iterator + H = K5.subgraph(iter(K3)) + assert nx.is_isomorphic(H, K3) + # Test G.subgraph(nbunch), where nbunch is another graph + H = K5.subgraph(K3) + assert nx.is_isomorphic(H, K3) + H = K5.subgraph([9]) + assert nx.is_isomorphic(H, nullgraph) + + def test_node_tuple_issue(self): + H = self.G() + # Test error handling of tuple as a node + pytest.raises(nx.NetworkXError, H.remove_node, (1, 2)) + H.remove_nodes_from([(1, 2)]) # no error + pytest.raises(nx.NetworkXError, H.neighbors, (1, 2)) diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_coreviews.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_coreviews.py new file mode 100644 index 0000000000000000000000000000000000000000..24de7f2f1115b864682b261daa256eff0deef696 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_coreviews.py @@ -0,0 +1,362 @@ +import pickle + +import pytest + +import networkx as nx + + +class TestAtlasView: + # node->data + def setup_method(self): + self.d = {0: {"color": "blue", "weight": 1.2}, 1: {}, 2: {"color": 1}} + self.av = nx.classes.coreviews.AtlasView(self.d) + + def test_pickle(self): + view = self.av + pview = pickle.loads(pickle.dumps(view, -1)) + assert view == pview + assert view.__slots__ == pview.__slots__ + pview = pickle.loads(pickle.dumps(view)) + assert view == pview + assert view.__slots__ == pview.__slots__ + + def test_len(self): + assert len(self.av) == len(self.d) + + def test_iter(self): + assert list(self.av) == list(self.d) + + def test_getitem(self): + assert self.av[1] is self.d[1] + assert self.av[2]["color"] == 1 + pytest.raises(KeyError, self.av.__getitem__, 3) + + def test_copy(self): + avcopy = self.av.copy() + assert avcopy[0] == self.av[0] + assert avcopy == self.av + assert avcopy[0] is not self.av[0] + assert avcopy is not self.av + avcopy[5] = {} + assert avcopy != self.av + + avcopy[0]["ht"] = 4 + assert avcopy[0] != self.av[0] + self.av[0]["ht"] = 4 + assert avcopy[0] == self.av[0] + del self.av[0]["ht"] + + assert not hasattr(self.av, "__setitem__") + + def test_items(self): + assert sorted(self.av.items()) == sorted(self.d.items()) + + def test_str(self): + out = str(self.d) + assert str(self.av) == out + + def test_repr(self): + out = "AtlasView(" + str(self.d) + ")" + assert repr(self.av) == out + + +class TestAdjacencyView: + # node->nbr->data + def setup_method(self): + dd = {"color": "blue", "weight": 1.2} + self.nd = {0: dd, 1: {}, 2: {"color": 1}} + self.adj = {3: self.nd, 0: {3: dd}, 1: {}, 2: {3: {"color": 1}}} + self.adjview = nx.classes.coreviews.AdjacencyView(self.adj) + + def test_pickle(self): + view = self.adjview + pview = pickle.loads(pickle.dumps(view, -1)) + assert view == pview + assert view.__slots__ == pview.__slots__ + + def test_len(self): + assert len(self.adjview) == len(self.adj) + + def test_iter(self): + assert list(self.adjview) == list(self.adj) + + def test_getitem(self): + assert self.adjview[1] is not self.adj[1] + assert self.adjview[3][0] is self.adjview[0][3] + assert self.adjview[2][3]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) + + def test_copy(self): + avcopy = self.adjview.copy() + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] + + avcopy[2][3]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3]["ht"] + + assert not hasattr(self.adjview, "__setitem__") + + def test_items(self): + view_items = sorted((n, dict(d)) for n, d in self.adjview.items()) + assert view_items == sorted(self.adj.items()) + + def test_str(self): + out = str(dict(self.adj)) + assert str(self.adjview) == out + + def test_repr(self): + out = self.adjview.__class__.__name__ + "(" + str(self.adj) + ")" + assert repr(self.adjview) == out + + +class TestMultiAdjacencyView(TestAdjacencyView): + # node->nbr->key->data + def setup_method(self): + dd = {"color": "blue", "weight": 1.2} + self.kd = {0: dd, 1: {}, 2: {"color": 1}} + self.nd = {3: self.kd, 0: {3: dd}, 1: {0: {}}, 2: {3: {"color": 1}}} + self.adj = {3: self.nd, 0: {3: {3: dd}}, 1: {}, 2: {3: {8: {}}}} + self.adjview = nx.classes.coreviews.MultiAdjacencyView(self.adj) + + def test_getitem(self): + assert self.adjview[1] is not self.adj[1] + assert self.adjview[3][0][3] is self.adjview[0][3][3] + assert self.adjview[3][2][3]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) + + def test_copy(self): + avcopy = self.adjview.copy() + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] + + avcopy[2][3][8]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3][8]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3][8]["ht"] + + assert not hasattr(self.adjview, "__setitem__") + + +class TestUnionAtlas: + # node->data + def setup_method(self): + self.s = {0: {"color": "blue", "weight": 1.2}, 1: {}, 2: {"color": 1}} + self.p = {3: {"color": "blue", "weight": 1.2}, 4: {}, 2: {"watch": 2}} + self.av = nx.classes.coreviews.UnionAtlas(self.s, self.p) + + def test_pickle(self): + view = self.av + pview = pickle.loads(pickle.dumps(view, -1)) + assert view == pview + assert view.__slots__ == pview.__slots__ + + def test_len(self): + assert len(self.av) == len(self.s.keys() | self.p.keys()) == 5 + + def test_iter(self): + assert set(self.av) == set(self.s) | set(self.p) + + def test_getitem(self): + assert self.av[0] is self.s[0] + assert self.av[4] is self.p[4] + assert self.av[2]["color"] == 1 + pytest.raises(KeyError, self.av[2].__getitem__, "watch") + pytest.raises(KeyError, self.av.__getitem__, 8) + + def test_copy(self): + avcopy = self.av.copy() + assert avcopy[0] == self.av[0] + assert avcopy[0] is not self.av[0] + assert avcopy is not self.av + avcopy[5] = {} + assert avcopy != self.av + + avcopy[0]["ht"] = 4 + assert avcopy[0] != self.av[0] + self.av[0]["ht"] = 4 + assert avcopy[0] == self.av[0] + del self.av[0]["ht"] + + assert not hasattr(self.av, "__setitem__") + + def test_items(self): + expected = dict(self.p.items()) + expected.update(self.s) + assert sorted(self.av.items()) == sorted(expected.items()) + + def test_str(self): + out = str(dict(self.av)) + assert str(self.av) == out + + def test_repr(self): + out = f"{self.av.__class__.__name__}({self.s}, {self.p})" + assert repr(self.av) == out + + +class TestUnionAdjacency: + # node->nbr->data + def setup_method(self): + dd = {"color": "blue", "weight": 1.2} + self.nd = {0: dd, 1: {}, 2: {"color": 1}} + self.s = {3: self.nd, 0: {}, 1: {}, 2: {3: {"color": 1}}} + self.p = {3: {}, 0: {3: dd}, 1: {0: {}}, 2: {1: {"color": 1}}} + self.adjview = nx.classes.coreviews.UnionAdjacency(self.s, self.p) + + def test_pickle(self): + view = self.adjview + pview = pickle.loads(pickle.dumps(view, -1)) + assert view == pview + assert view.__slots__ == pview.__slots__ + + def test_len(self): + assert len(self.adjview) == len(self.s) + + def test_iter(self): + assert sorted(self.adjview) == sorted(self.s) + + def test_getitem(self): + assert self.adjview[1] is not self.s[1] + assert self.adjview[3][0] is self.adjview[0][3] + assert self.adjview[2][3]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) + + def test_copy(self): + avcopy = self.adjview.copy() + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] + + avcopy[2][3]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3]["ht"] + + assert not hasattr(self.adjview, "__setitem__") + + def test_str(self): + out = str(dict(self.adjview)) + assert str(self.adjview) == out + + def test_repr(self): + clsname = self.adjview.__class__.__name__ + out = f"{clsname}({self.s}, {self.p})" + assert repr(self.adjview) == out + + +class TestUnionMultiInner(TestUnionAdjacency): + # nbr->key->data + def setup_method(self): + dd = {"color": "blue", "weight": 1.2} + self.kd = {7: {}, "ekey": {}, 9: {"color": 1}} + self.s = {3: self.kd, 0: {7: dd}, 1: {}, 2: {"key": {"color": 1}}} + self.p = {3: {}, 0: {3: dd}, 1: {}, 2: {1: {"span": 2}}} + self.adjview = nx.classes.coreviews.UnionMultiInner(self.s, self.p) + + def test_len(self): + assert len(self.adjview) == len(self.s.keys() | self.p.keys()) == 4 + + def test_getitem(self): + assert self.adjview[1] is not self.s[1] + assert self.adjview[0][7] is self.adjview[0][3] + assert self.adjview[2]["key"]["color"] == 1 + assert self.adjview[2][1]["span"] == 2 + pytest.raises(KeyError, self.adjview.__getitem__, 4) + pytest.raises(KeyError, self.adjview[1].__getitem__, "key") + + def test_copy(self): + avcopy = self.adjview.copy() + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] + + avcopy[2][1]["width"] = 8 + assert avcopy[2] != self.adjview[2] + self.adjview[2][1]["width"] = 8 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][1]["width"] + + assert not hasattr(self.adjview, "__setitem__") + assert hasattr(avcopy, "__setitem__") + + +class TestUnionMultiAdjacency(TestUnionAdjacency): + # node->nbr->key->data + def setup_method(self): + dd = {"color": "blue", "weight": 1.2} + self.kd = {7: {}, 8: {}, 9: {"color": 1}} + self.nd = {3: self.kd, 0: {9: dd}, 1: {8: {}}, 2: {9: {"color": 1}}} + self.s = {3: self.nd, 0: {3: {7: dd}}, 1: {}, 2: {3: {8: {}}}} + self.p = {3: {}, 0: {3: {9: dd}}, 1: {}, 2: {1: {8: {}}}} + self.adjview = nx.classes.coreviews.UnionMultiAdjacency(self.s, self.p) + + def test_getitem(self): + assert self.adjview[1] is not self.s[1] + assert self.adjview[3][0][9] is self.adjview[0][3][9] + assert self.adjview[3][2][9]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) + + def test_copy(self): + avcopy = self.adjview.copy() + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] + + avcopy[2][3][8]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3][8]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3][8]["ht"] + + assert not hasattr(self.adjview, "__setitem__") + assert hasattr(avcopy, "__setitem__") + + +class TestFilteredGraphs: + def setup_method(self): + self.Graphs = [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] + + def test_hide_show_nodes(self): + SubGraph = nx.subgraph_view + for Graph in self.Graphs: + G = nx.path_graph(4, Graph) + SG = G.subgraph([2, 3]) + RG = SubGraph(G, filter_node=nx.filters.hide_nodes([0, 1])) + assert SG.nodes == RG.nodes + assert SG.edges == RG.edges + SGC = SG.copy() + RGC = RG.copy() + assert SGC.nodes == RGC.nodes + assert SGC.edges == RGC.edges + + def test_str_repr(self): + SubGraph = nx.subgraph_view + for Graph in self.Graphs: + G = nx.path_graph(4, Graph) + SG = G.subgraph([2, 3]) + RG = SubGraph(G, filter_node=nx.filters.hide_nodes([0, 1])) + str(SG.adj) + str(RG.adj) + repr(SG.adj) + repr(RG.adj) + str(SG.adj[2]) + str(RG.adj[2]) + repr(SG.adj[2]) + repr(RG.adj[2]) + + def test_copy(self): + SubGraph = nx.subgraph_view + for Graph in self.Graphs: + G = nx.path_graph(4, Graph) + SG = G.subgraph([2, 3]) + RG = SubGraph(G, filter_node=nx.filters.hide_nodes([0, 1])) + RsG = SubGraph(G, filter_node=nx.filters.show_nodes([2, 3])) + assert G.adj.copy() == G.adj + assert G.adj[2].copy() == G.adj[2] + assert SG.adj.copy() == SG.adj + assert SG.adj[2].copy() == SG.adj[2] + assert RG.adj.copy() == RG.adj + assert RG.adj[2].copy() == RG.adj[2] + assert RsG.adj.copy() == RsG.adj + assert RsG.adj[2].copy() == RsG.adj[2] diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_digraph.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_digraph.py new file mode 100644 index 0000000000000000000000000000000000000000..b9972f9a5f1ab101b9f6f2f9a1584ddafccd2ff3 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_digraph.py @@ -0,0 +1,331 @@ +import pytest + +import networkx as nx +from networkx.utils import nodes_equal + +from .test_graph import BaseAttrGraphTester, BaseGraphTester +from .test_graph import TestEdgeSubgraph as _TestGraphEdgeSubgraph +from .test_graph import TestGraph as _TestGraph + + +class BaseDiGraphTester(BaseGraphTester): + def test_has_successor(self): + G = self.K3 + assert G.has_successor(0, 1) + assert not G.has_successor(0, -1) + + def test_successors(self): + G = self.K3 + assert sorted(G.successors(0)) == [1, 2] + with pytest.raises(nx.NetworkXError): + G.successors(-1) + + def test_has_predecessor(self): + G = self.K3 + assert G.has_predecessor(0, 1) + assert not G.has_predecessor(0, -1) + + def test_predecessors(self): + G = self.K3 + assert sorted(G.predecessors(0)) == [1, 2] + with pytest.raises(nx.NetworkXError): + G.predecessors(-1) + + def test_edges(self): + G = self.K3 + assert sorted(G.edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.edges(0)) == [(0, 1), (0, 2)] + assert sorted(G.edges([0, 1])) == [(0, 1), (0, 2), (1, 0), (1, 2)] + with pytest.raises(nx.NetworkXError): + G.edges(-1) + + def test_out_edges(self): + G = self.K3 + assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)] + with pytest.raises(nx.NetworkXError): + G.out_edges(-1) + + def test_out_edges_dir(self): + G = self.P3 + assert sorted(G.out_edges()) == [(0, 1), (1, 2)] + assert sorted(G.out_edges(0)) == [(0, 1)] + assert sorted(G.out_edges(2)) == [] + + def test_out_edges_data(self): + G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})]) + assert sorted(G.out_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})] + assert sorted(G.out_edges(0, data=True)) == [(0, 1, {"data": 0})] + assert sorted(G.out_edges(data="data")) == [(0, 1, 0), (1, 0, None)] + assert sorted(G.out_edges(0, data="data")) == [(0, 1, 0)] + + def test_in_edges_dir(self): + G = self.P3 + assert sorted(G.in_edges()) == [(0, 1), (1, 2)] + assert sorted(G.in_edges(0)) == [] + assert sorted(G.in_edges(2)) == [(1, 2)] + + def test_in_edges_data(self): + G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})]) + assert sorted(G.in_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})] + assert sorted(G.in_edges(1, data=True)) == [(0, 1, {"data": 0})] + assert sorted(G.in_edges(data="data")) == [(0, 1, 0), (1, 0, None)] + assert sorted(G.in_edges(1, data="data")) == [(0, 1, 0)] + + def test_degree(self): + G = self.K3 + assert sorted(G.degree()) == [(0, 4), (1, 4), (2, 4)] + assert dict(G.degree()) == {0: 4, 1: 4, 2: 4} + assert G.degree(0) == 4 + assert list(G.degree(iter([0]))) == [(0, 4)] # run through iterator + + def test_in_degree(self): + G = self.K3 + assert sorted(G.in_degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.in_degree()) == {0: 2, 1: 2, 2: 2} + assert G.in_degree(0) == 2 + assert list(G.in_degree(iter([0]))) == [(0, 2)] # run through iterator + + def test_out_degree(self): + G = self.K3 + assert sorted(G.out_degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.out_degree()) == {0: 2, 1: 2, 2: 2} + assert G.out_degree(0) == 2 + assert list(G.out_degree(iter([0]))) == [(0, 2)] + + def test_size(self): + G = self.K3 + assert G.size() == 6 + assert G.number_of_edges() == 6 + + def test_to_undirected_reciprocal(self): + G = self.Graph() + G.add_edge(1, 2) + assert G.to_undirected().has_edge(1, 2) + assert not G.to_undirected(reciprocal=True).has_edge(1, 2) + G.add_edge(2, 1) + assert G.to_undirected(reciprocal=True).has_edge(1, 2) + + def test_reverse_copy(self): + G = nx.DiGraph([(0, 1), (1, 2)]) + R = G.reverse() + assert sorted(R.edges()) == [(1, 0), (2, 1)] + R.remove_edge(1, 0) + assert sorted(R.edges()) == [(2, 1)] + assert sorted(G.edges()) == [(0, 1), (1, 2)] + + def test_reverse_nocopy(self): + G = nx.DiGraph([(0, 1), (1, 2)]) + R = G.reverse(copy=False) + assert sorted(R.edges()) == [(1, 0), (2, 1)] + with pytest.raises(nx.NetworkXError): + R.remove_edge(1, 0) + + def test_reverse_hashable(self): + class Foo: + pass + + x = Foo() + y = Foo() + G = nx.DiGraph() + G.add_edge(x, y) + assert nodes_equal(G.nodes(), G.reverse().nodes()) + assert [(y, x)] == list(G.reverse().edges()) + + def test_di_cache_reset(self): + G = self.K3.copy() + old_succ = G.succ + assert id(G.succ) == id(old_succ) + old_adj = G.adj + assert id(G.adj) == id(old_adj) + + G._succ = {} + assert id(G.succ) != id(old_succ) + assert id(G.adj) != id(old_adj) + + old_pred = G.pred + assert id(G.pred) == id(old_pred) + G._pred = {} + assert id(G.pred) != id(old_pred) + + def test_di_attributes_cached(self): + G = self.K3.copy() + assert id(G.in_edges) == id(G.in_edges) + assert id(G.out_edges) == id(G.out_edges) + assert id(G.in_degree) == id(G.in_degree) + assert id(G.out_degree) == id(G.out_degree) + assert id(G.succ) == id(G.succ) + assert id(G.pred) == id(G.pred) + + +class BaseAttrDiGraphTester(BaseDiGraphTester, BaseAttrGraphTester): + def test_edges_data(self): + G = self.K3 + all_edges = [ + (0, 1, {}), + (0, 2, {}), + (1, 0, {}), + (1, 2, {}), + (2, 0, {}), + (2, 1, {}), + ] + assert sorted(G.edges(data=True)) == all_edges + assert sorted(G.edges(0, data=True)) == all_edges[:2] + assert sorted(G.edges([0, 1], data=True)) == all_edges[:4] + with pytest.raises(nx.NetworkXError): + G.edges(-1, True) + + def test_in_degree_weighted(self): + G = self.K3.copy() + G.add_edge(0, 1, weight=0.3, other=1.2) + assert sorted(G.in_degree(weight="weight")) == [(0, 2), (1, 1.3), (2, 2)] + assert dict(G.in_degree(weight="weight")) == {0: 2, 1: 1.3, 2: 2} + assert G.in_degree(1, weight="weight") == 1.3 + assert sorted(G.in_degree(weight="other")) == [(0, 2), (1, 2.2), (2, 2)] + assert dict(G.in_degree(weight="other")) == {0: 2, 1: 2.2, 2: 2} + assert G.in_degree(1, weight="other") == 2.2 + assert list(G.in_degree(iter([1]), weight="other")) == [(1, 2.2)] + + def test_out_degree_weighted(self): + G = self.K3.copy() + G.add_edge(0, 1, weight=0.3, other=1.2) + assert sorted(G.out_degree(weight="weight")) == [(0, 1.3), (1, 2), (2, 2)] + assert dict(G.out_degree(weight="weight")) == {0: 1.3, 1: 2, 2: 2} + assert G.out_degree(0, weight="weight") == 1.3 + assert sorted(G.out_degree(weight="other")) == [(0, 2.2), (1, 2), (2, 2)] + assert dict(G.out_degree(weight="other")) == {0: 2.2, 1: 2, 2: 2} + assert G.out_degree(0, weight="other") == 2.2 + assert list(G.out_degree(iter([0]), weight="other")) == [(0, 2.2)] + + +class TestDiGraph(BaseAttrDiGraphTester, _TestGraph): + """Tests specific to dict-of-dict-of-dict digraph data structure""" + + def setup_method(self): + self.Graph = nx.DiGraph + # build dict-of-dict-of-dict K3 + ed1, ed2, ed3, ed4, ed5, ed6 = ({}, {}, {}, {}, {}, {}) + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed3, 2: ed4}, 2: {0: ed5, 1: ed6}} + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._succ = self.k3adj # K3._adj is synced with K3._succ + self.K3._pred = {0: {1: ed3, 2: ed5}, 1: {0: ed1, 2: ed6}, 2: {0: ed2, 1: ed4}} + self.K3._node = {} + self.K3._node[0] = {} + self.K3._node[1] = {} + self.K3._node[2] = {} + + ed1, ed2 = ({}, {}) + self.P3 = self.Graph() + self.P3._succ = {0: {1: ed1}, 1: {2: ed2}, 2: {}} + self.P3._pred = {0: {}, 1: {0: ed1}, 2: {1: ed2}} + # P3._adj is synced with P3._succ + self.P3._node = {} + self.P3._node[0] = {} + self.P3._node[1] = {} + self.P3._node[2] = {} + + def test_data_input(self): + G = self.Graph({1: [2], 2: [1]}, name="test") + assert G.name == "test" + assert sorted(G.adj.items()) == [(1, {2: {}}), (2, {1: {}})] + assert sorted(G.succ.items()) == [(1, {2: {}}), (2, {1: {}})] + assert sorted(G.pred.items()) == [(1, {2: {}}), (2, {1: {}})] + + def test_add_edge(self): + G = self.Graph() + G.add_edge(0, 1) + assert G.adj == {0: {1: {}}, 1: {}} + assert G.succ == {0: {1: {}}, 1: {}} + assert G.pred == {0: {}, 1: {0: {}}} + G = self.Graph() + G.add_edge(*(0, 1)) + assert G.adj == {0: {1: {}}, 1: {}} + assert G.succ == {0: {1: {}}, 1: {}} + assert G.pred == {0: {}, 1: {0: {}}} + with pytest.raises(ValueError, match="None cannot be a node"): + G.add_edge(None, 3) + + def test_add_edges_from(self): + G = self.Graph() + G.add_edges_from([(0, 1), (0, 2, {"data": 3})], data=2) + assert G.adj == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}} + assert G.succ == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}} + assert G.pred == {0: {}, 1: {0: {"data": 2}}, 2: {0: {"data": 3}}} + + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0,)]) # too few in tuple + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0, 1, 2, 3)]) # too many in tuple + with pytest.raises(TypeError): + G.add_edges_from([0]) # not a tuple + with pytest.raises(ValueError, match="None cannot be a node"): + G.add_edges_from([(None, 3), (3, 2)]) + + def test_remove_edge(self): + G = self.K3.copy() + G.remove_edge(0, 1) + assert G.succ == {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}} + assert G.pred == {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} + with pytest.raises(nx.NetworkXError): + G.remove_edge(-1, 0) + + def test_remove_edges_from(self): + G = self.K3.copy() + G.remove_edges_from([(0, 1)]) + assert G.succ == {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}} + assert G.pred == {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} + G.remove_edges_from([(0, 0)]) # silent fail + + def test_clear(self): + G = self.K3 + G.graph["name"] = "K3" + G.clear() + assert list(G.nodes) == [] + assert G.succ == {} + assert G.pred == {} + assert G.graph == {} + + def test_clear_edges(self): + G = self.K3 + G.graph["name"] = "K3" + nodes = list(G.nodes) + G.clear_edges() + assert list(G.nodes) == nodes + expected = {0: {}, 1: {}, 2: {}} + assert G.succ == expected + assert G.pred == expected + assert list(G.edges) == [] + assert G.graph["name"] == "K3" + + +class TestEdgeSubgraph(_TestGraphEdgeSubgraph): + """Unit tests for the :meth:`DiGraph.edge_subgraph` method.""" + + def setup_method(self): + # Create a doubly-linked path graph on five nodes. + G = nx.DiGraph(nx.path_graph(5)) + # Add some node, edge, and graph attributes. + for i in range(5): + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" + # Get the subgraph induced by the first and last edges. + self.G = G + self.H = G.edge_subgraph([(0, 1), (3, 4)]) + + def test_pred_succ(self): + """Test that nodes are added to predecessors and successors. + + For more information, see GitHub issue #2370. + + """ + G = nx.DiGraph() + G.add_edge(0, 1) + H = G.edge_subgraph([(0, 1)]) + assert list(H.predecessors(0)) == [] + assert list(H.successors(0)) == [1] + assert list(H.predecessors(1)) == [0] + assert list(H.successors(1)) == [] diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_digraph_historical.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_digraph_historical.py new file mode 100644 index 0000000000000000000000000000000000000000..4f2b1da90f977962e9610bd64d10e721915a0595 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_digraph_historical.py @@ -0,0 +1,111 @@ +"""Original NetworkX graph tests""" + +import pytest + +import networkx +import networkx as nx + +from .historical_tests import HistoricalTests + + +class TestDiGraphHistorical(HistoricalTests): + @classmethod + def setup_class(cls): + HistoricalTests.setup_class() + cls.G = nx.DiGraph + + def test_in_degree(self): + G = self.G() + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + + assert sorted(d for n, d in G.in_degree()) == [0, 0, 0, 0, 1, 2, 2] + assert dict(G.in_degree()) == { + "A": 0, + "C": 2, + "B": 1, + "D": 2, + "G": 0, + "K": 0, + "J": 0, + } + + def test_out_degree(self): + G = self.G() + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted(v for k, v in G.in_degree()) == [0, 0, 0, 0, 1, 2, 2] + assert dict(G.out_degree()) == { + "A": 2, + "C": 1, + "B": 2, + "D": 0, + "G": 0, + "K": 0, + "J": 0, + } + + def test_degree_digraph(self): + H = nx.DiGraph() + H.add_edges_from([(1, 24), (1, 2)]) + assert sorted(d for n, d in H.in_degree([1, 24])) == [0, 1] + assert sorted(d for n, d in H.out_degree([1, 24])) == [0, 2] + assert sorted(d for n, d in H.degree([1, 24])) == [1, 2] + + def test_neighbors(self): + G = self.G() + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + + assert sorted(G.neighbors("C")) == ["D"] + assert sorted(G["C"]) == ["D"] + assert sorted(G.neighbors("A")) == ["B", "C"] + pytest.raises(nx.NetworkXError, G.neighbors, "j") + pytest.raises(nx.NetworkXError, G.neighbors, "j") + + def test_successors(self): + G = self.G() + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted(G.successors("A")) == ["B", "C"] + assert sorted(G.successors("A")) == ["B", "C"] + assert sorted(G.successors("G")) == [] + assert sorted(G.successors("D")) == [] + assert sorted(G.successors("G")) == [] + pytest.raises(nx.NetworkXError, G.successors, "j") + pytest.raises(nx.NetworkXError, G.successors, "j") + + def test_predecessors(self): + G = self.G() + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted(G.predecessors("C")) == ["A", "B"] + assert sorted(G.predecessors("C")) == ["A", "B"] + assert sorted(G.predecessors("G")) == [] + assert sorted(G.predecessors("A")) == [] + assert sorted(G.predecessors("G")) == [] + assert sorted(G.predecessors("A")) == [] + assert sorted(G.successors("D")) == [] + + pytest.raises(nx.NetworkXError, G.predecessors, "j") + pytest.raises(nx.NetworkXError, G.predecessors, "j") + + def test_reverse(self): + G = nx.complete_graph(10) + H = G.to_directed() + HR = H.reverse() + assert nx.is_isomorphic(H, HR) + assert sorted(H.edges()) == sorted(HR.edges()) + + def test_reverse2(self): + H = nx.DiGraph() + foo = [H.add_edge(u, u + 1) for u in range(5)] + HR = H.reverse() + for u in range(5): + assert HR.has_edge(u + 1, u) + + def test_reverse3(self): + H = nx.DiGraph() + H.add_nodes_from([1, 2, 3, 4]) + HR = H.reverse() + assert sorted(HR.nodes()) == [1, 2, 3, 4] diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_filters.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_filters.py new file mode 100644 index 0000000000000000000000000000000000000000..2da59117cad0d72d5830b53c8d19c6e0ca988d54 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_filters.py @@ -0,0 +1,177 @@ +import pytest + +import networkx as nx + + +class TestFilterFactory: + def test_no_filter(self): + nf = nx.filters.no_filter + assert nf() + assert nf(1) + assert nf(2, 1) + + def test_hide_nodes(self): + f = nx.classes.filters.hide_nodes([1, 2, 3]) + assert not f(1) + assert not f(2) + assert not f(3) + assert f(4) + assert f(0) + assert f("a") + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f) + + def test_show_nodes(self): + f = nx.classes.filters.show_nodes([1, 2, 3]) + assert f(1) + assert f(2) + assert f(3) + assert not f(4) + assert not f(0) + assert not f("a") + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f) + + def test_hide_edges(self): + factory = nx.classes.filters.hide_edges + f = factory([(1, 2), (3, 4)]) + assert not f(1, 2) + assert not f(3, 4) + assert not f(4, 3) + assert f(2, 3) + assert f(0, -1) + assert f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) + + def test_show_edges(self): + factory = nx.classes.filters.show_edges + f = factory([(1, 2), (3, 4)]) + assert f(1, 2) + assert f(3, 4) + assert f(4, 3) + assert not f(2, 3) + assert not f(0, -1) + assert not f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) + + def test_hide_diedges(self): + factory = nx.classes.filters.hide_diedges + f = factory([(1, 2), (3, 4)]) + assert not f(1, 2) + assert not f(3, 4) + assert f(4, 3) + assert f(2, 3) + assert f(0, -1) + assert f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) + + def test_show_diedges(self): + factory = nx.classes.filters.show_diedges + f = factory([(1, 2), (3, 4)]) + assert f(1, 2) + assert f(3, 4) + assert not f(4, 3) + assert not f(2, 3) + assert not f(0, -1) + assert not f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) + + def test_hide_multiedges(self): + factory = nx.classes.filters.hide_multiedges + f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) + assert not f(1, 2, 0) + assert not f(1, 2, 1) + assert f(1, 2, 2) + assert f(3, 4, 0) + assert not f(3, 4, 1) + assert not f(4, 3, 1) + assert f(4, 3, 0) + assert f(2, 3, 0) + assert f(0, -1, 0) + assert f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) + + def test_show_multiedges(self): + factory = nx.classes.filters.show_multiedges + f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) + assert f(1, 2, 0) + assert f(1, 2, 1) + assert not f(1, 2, 2) + assert not f(3, 4, 0) + assert f(3, 4, 1) + assert f(4, 3, 1) + assert not f(4, 3, 0) + assert not f(2, 3, 0) + assert not f(0, -1, 0) + assert not f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) + + def test_hide_multidiedges(self): + factory = nx.classes.filters.hide_multidiedges + f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) + assert not f(1, 2, 0) + assert not f(1, 2, 1) + assert f(1, 2, 2) + assert f(3, 4, 0) + assert not f(3, 4, 1) + assert f(4, 3, 1) + assert f(4, 3, 0) + assert f(2, 3, 0) + assert f(0, -1, 0) + assert f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) + + def test_show_multidiedges(self): + factory = nx.classes.filters.show_multidiedges + f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) + assert f(1, 2, 0) + assert f(1, 2, 1) + assert not f(1, 2, 2) + assert not f(3, 4, 0) + assert f(3, 4, 1) + assert not f(4, 3, 1) + assert not f(4, 3, 0) + assert not f(2, 3, 0) + assert not f(0, -1, 0) + assert not f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_function.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_function.py new file mode 100644 index 0000000000000000000000000000000000000000..f86890dd25268472db87fb18448d38ab11ec9946 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_function.py @@ -0,0 +1,1035 @@ +import random + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, nodes_equal + + +def test_degree_histogram_empty(): + G = nx.Graph() + assert nx.degree_histogram(G) == [] + + +class TestFunction: + def setup_method(self): + self.G = nx.Graph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}, name="Test") + self.Gdegree = {0: 3, 1: 2, 2: 2, 3: 1, 4: 0} + self.Gnodes = list(range(5)) + self.Gedges = [(0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2)] + self.DG = nx.DiGraph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}) + self.DGin_degree = {0: 1, 1: 2, 2: 2, 3: 1, 4: 0} + self.DGout_degree = {0: 3, 1: 3, 2: 0, 3: 0, 4: 0} + self.DGnodes = list(range(5)) + self.DGedges = [(0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2)] + + def test_nodes(self): + assert nodes_equal(self.G.nodes(), list(nx.nodes(self.G))) + assert nodes_equal(self.DG.nodes(), list(nx.nodes(self.DG))) + + def test_edges(self): + assert edges_equal(self.G.edges(), list(nx.edges(self.G))) + assert sorted(self.DG.edges()) == sorted(nx.edges(self.DG)) + assert edges_equal( + self.G.edges(nbunch=[0, 1, 3]), list(nx.edges(self.G, nbunch=[0, 1, 3])) + ) + assert sorted(self.DG.edges(nbunch=[0, 1, 3])) == sorted( + nx.edges(self.DG, nbunch=[0, 1, 3]) + ) + + def test_degree(self): + assert edges_equal(self.G.degree(), list(nx.degree(self.G))) + assert sorted(self.DG.degree()) == sorted(nx.degree(self.DG)) + assert edges_equal( + self.G.degree(nbunch=[0, 1]), list(nx.degree(self.G, nbunch=[0, 1])) + ) + assert sorted(self.DG.degree(nbunch=[0, 1])) == sorted( + nx.degree(self.DG, nbunch=[0, 1]) + ) + assert edges_equal( + self.G.degree(weight="weight"), list(nx.degree(self.G, weight="weight")) + ) + assert sorted(self.DG.degree(weight="weight")) == sorted( + nx.degree(self.DG, weight="weight") + ) + + def test_neighbors(self): + assert list(self.G.neighbors(1)) == list(nx.neighbors(self.G, 1)) + assert list(self.DG.neighbors(1)) == list(nx.neighbors(self.DG, 1)) + + def test_number_of_nodes(self): + assert self.G.number_of_nodes() == nx.number_of_nodes(self.G) + assert self.DG.number_of_nodes() == nx.number_of_nodes(self.DG) + + def test_number_of_edges(self): + assert self.G.number_of_edges() == nx.number_of_edges(self.G) + assert self.DG.number_of_edges() == nx.number_of_edges(self.DG) + + def test_is_directed(self): + assert self.G.is_directed() == nx.is_directed(self.G) + assert self.DG.is_directed() == nx.is_directed(self.DG) + + def test_add_star(self): + G = self.G.copy() + nlist = [12, 13, 14, 15] + nx.add_star(G, nlist) + assert edges_equal(G.edges(nlist), [(12, 13), (12, 14), (12, 15)]) + + G = self.G.copy() + nx.add_star(G, nlist, weight=2.0) + assert edges_equal( + G.edges(nlist, data=True), + [ + (12, 13, {"weight": 2.0}), + (12, 14, {"weight": 2.0}), + (12, 15, {"weight": 2.0}), + ], + ) + + G = self.G.copy() + nlist = [12] + nx.add_star(G, nlist) + assert nodes_equal(G, list(self.G) + nlist) + + G = self.G.copy() + nlist = [] + nx.add_star(G, nlist) + assert nodes_equal(G.nodes, self.Gnodes) + assert edges_equal(G.edges, self.G.edges) + + def test_add_path(self): + G = self.G.copy() + nlist = [12, 13, 14, 15] + nx.add_path(G, nlist) + assert edges_equal(G.edges(nlist), [(12, 13), (13, 14), (14, 15)]) + G = self.G.copy() + nx.add_path(G, nlist, weight=2.0) + assert edges_equal( + G.edges(nlist, data=True), + [ + (12, 13, {"weight": 2.0}), + (13, 14, {"weight": 2.0}), + (14, 15, {"weight": 2.0}), + ], + ) + + G = self.G.copy() + nlist = ["node"] + nx.add_path(G, nlist) + assert edges_equal(G.edges(nlist), []) + assert nodes_equal(G, list(self.G) + ["node"]) + + G = self.G.copy() + nlist = iter(["node"]) + nx.add_path(G, nlist) + assert edges_equal(G.edges(["node"]), []) + assert nodes_equal(G, list(self.G) + ["node"]) + + G = self.G.copy() + nlist = [12] + nx.add_path(G, nlist) + assert edges_equal(G.edges(nlist), []) + assert nodes_equal(G, list(self.G) + [12]) + + G = self.G.copy() + nlist = iter([12]) + nx.add_path(G, nlist) + assert edges_equal(G.edges([12]), []) + assert nodes_equal(G, list(self.G) + [12]) + + G = self.G.copy() + nlist = [] + nx.add_path(G, nlist) + assert edges_equal(G.edges, self.G.edges) + assert nodes_equal(G, list(self.G)) + + G = self.G.copy() + nlist = iter([]) + nx.add_path(G, nlist) + assert edges_equal(G.edges, self.G.edges) + assert nodes_equal(G, list(self.G)) + + def test_add_cycle(self): + G = self.G.copy() + nlist = [12, 13, 14, 15] + oklists = [ + [(12, 13), (12, 15), (13, 14), (14, 15)], + [(12, 13), (13, 14), (14, 15), (15, 12)], + ] + nx.add_cycle(G, nlist) + assert sorted(G.edges(nlist)) in oklists + G = self.G.copy() + oklists = [ + [ + (12, 13, {"weight": 1.0}), + (12, 15, {"weight": 1.0}), + (13, 14, {"weight": 1.0}), + (14, 15, {"weight": 1.0}), + ], + [ + (12, 13, {"weight": 1.0}), + (13, 14, {"weight": 1.0}), + (14, 15, {"weight": 1.0}), + (15, 12, {"weight": 1.0}), + ], + ] + nx.add_cycle(G, nlist, weight=1.0) + assert sorted(G.edges(nlist, data=True)) in oklists + + G = self.G.copy() + nlist = [12] + nx.add_cycle(G, nlist) + assert nodes_equal(G, list(self.G) + nlist) + + G = self.G.copy() + nlist = [] + nx.add_cycle(G, nlist) + assert nodes_equal(G.nodes, self.Gnodes) + assert edges_equal(G.edges, self.G.edges) + + def test_subgraph(self): + assert ( + self.G.subgraph([0, 1, 2, 4]).adj == nx.subgraph(self.G, [0, 1, 2, 4]).adj + ) + assert ( + self.DG.subgraph([0, 1, 2, 4]).adj == nx.subgraph(self.DG, [0, 1, 2, 4]).adj + ) + assert ( + self.G.subgraph([0, 1, 2, 4]).adj + == nx.induced_subgraph(self.G, [0, 1, 2, 4]).adj + ) + assert ( + self.DG.subgraph([0, 1, 2, 4]).adj + == nx.induced_subgraph(self.DG, [0, 1, 2, 4]).adj + ) + # subgraph-subgraph chain is allowed in function interface + H = nx.induced_subgraph(self.G.subgraph([0, 1, 2, 4]), [0, 1, 4]) + assert H._graph is not self.G + assert H.adj == self.G.subgraph([0, 1, 4]).adj + + def test_edge_subgraph(self): + assert ( + self.G.edge_subgraph([(1, 2), (0, 3)]).adj + == nx.edge_subgraph(self.G, [(1, 2), (0, 3)]).adj + ) + assert ( + self.DG.edge_subgraph([(1, 2), (0, 3)]).adj + == nx.edge_subgraph(self.DG, [(1, 2), (0, 3)]).adj + ) + + def test_create_empty_copy(self): + G = nx.create_empty_copy(self.G, with_data=False) + assert nodes_equal(G, list(self.G)) + assert G.graph == {} + assert G._node == {}.fromkeys(self.G.nodes(), {}) + assert G._adj == {}.fromkeys(self.G.nodes(), {}) + G = nx.create_empty_copy(self.G) + assert nodes_equal(G, list(self.G)) + assert G.graph == self.G.graph + assert G._node == self.G._node + assert G._adj == {}.fromkeys(self.G.nodes(), {}) + + def test_degree_histogram(self): + assert nx.degree_histogram(self.G) == [1, 1, 1, 1, 1] + + def test_density(self): + assert nx.density(self.G) == 0.5 + assert nx.density(self.DG) == 0.3 + G = nx.Graph() + G.add_node(1) + assert nx.density(G) == 0.0 + + def test_density_selfloop(self): + G = nx.Graph() + G.add_edge(1, 1) + assert nx.density(G) == 0.0 + G.add_edge(1, 2) + assert nx.density(G) == 2.0 + + def test_freeze(self): + G = nx.freeze(self.G) + assert G.frozen + pytest.raises(nx.NetworkXError, G.add_node, 1) + pytest.raises(nx.NetworkXError, G.add_nodes_from, [1]) + pytest.raises(nx.NetworkXError, G.remove_node, 1) + pytest.raises(nx.NetworkXError, G.remove_nodes_from, [1]) + pytest.raises(nx.NetworkXError, G.add_edge, 1, 2) + pytest.raises(nx.NetworkXError, G.add_edges_from, [(1, 2)]) + pytest.raises(nx.NetworkXError, G.remove_edge, 1, 2) + pytest.raises(nx.NetworkXError, G.remove_edges_from, [(1, 2)]) + pytest.raises(nx.NetworkXError, G.clear_edges) + pytest.raises(nx.NetworkXError, G.clear) + + def test_is_frozen(self): + assert not nx.is_frozen(self.G) + G = nx.freeze(self.G) + assert G.frozen == nx.is_frozen(self.G) + assert G.frozen + + def test_node_attributes_are_still_mutable_on_frozen_graph(self): + G = nx.freeze(nx.path_graph(3)) + node = G.nodes[0] + node["node_attribute"] = True + assert node["node_attribute"] == True + + def test_edge_attributes_are_still_mutable_on_frozen_graph(self): + G = nx.freeze(nx.path_graph(3)) + edge = G.edges[(0, 1)] + edge["edge_attribute"] = True + assert edge["edge_attribute"] == True + + def test_neighbors_complete_graph(self): + graph = nx.complete_graph(100) + pop = random.sample(list(graph), 1) + nbors = list(nx.neighbors(graph, pop[0])) + # should be all the other vertices in the graph + assert len(nbors) == len(graph) - 1 + + graph = nx.path_graph(100) + node = random.sample(list(graph), 1)[0] + nbors = list(nx.neighbors(graph, node)) + # should be all the other vertices in the graph + if node != 0 and node != 99: + assert len(nbors) == 2 + else: + assert len(nbors) == 1 + + # create a star graph with 99 outer nodes + graph = nx.star_graph(99) + nbors = list(nx.neighbors(graph, 0)) + assert len(nbors) == 99 + + def test_non_neighbors(self): + graph = nx.complete_graph(100) + pop = random.sample(list(graph), 1) + nbors = nx.non_neighbors(graph, pop[0]) + # should be all the other vertices in the graph + assert len(nbors) == 0 + + graph = nx.path_graph(100) + node = random.sample(list(graph), 1)[0] + nbors = nx.non_neighbors(graph, node) + # should be all the other vertices in the graph + if node != 0 and node != 99: + assert len(nbors) == 97 + else: + assert len(nbors) == 98 + + # create a star graph with 99 outer nodes + graph = nx.star_graph(99) + nbors = nx.non_neighbors(graph, 0) + assert len(nbors) == 0 + + # disconnected graph + graph = nx.Graph() + graph.add_nodes_from(range(10)) + nbors = nx.non_neighbors(graph, 0) + assert len(nbors) == 9 + + def test_non_edges(self): + # All possible edges exist + graph = nx.complete_graph(5) + nedges = list(nx.non_edges(graph)) + assert len(nedges) == 0 + + graph = nx.path_graph(4) + expected = [(0, 2), (0, 3), (1, 3)] + nedges = list(nx.non_edges(graph)) + for u, v in expected: + assert (u, v) in nedges or (v, u) in nedges + + graph = nx.star_graph(4) + expected = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + nedges = list(nx.non_edges(graph)) + for u, v in expected: + assert (u, v) in nedges or (v, u) in nedges + + # Directed graphs + graph = nx.DiGraph() + graph.add_edges_from([(0, 2), (2, 0), (2, 1)]) + expected = [(0, 1), (1, 0), (1, 2)] + nedges = list(nx.non_edges(graph)) + for e in expected: + assert e in nedges + + def test_is_weighted(self): + G = nx.Graph() + assert not nx.is_weighted(G) + + G = nx.path_graph(4) + assert not nx.is_weighted(G) + assert not nx.is_weighted(G, (2, 3)) + + G.add_node(4) + G.add_edge(3, 4, weight=4) + assert not nx.is_weighted(G) + assert nx.is_weighted(G, (3, 4)) + + G = nx.DiGraph() + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -5), + ("0", "2", 2), + ("1", "2", 4), + ("2", "3", 1), + ] + ) + assert nx.is_weighted(G) + assert nx.is_weighted(G, ("1", "0")) + + G = G.to_undirected() + assert nx.is_weighted(G) + assert nx.is_weighted(G, ("1", "0")) + + pytest.raises(nx.NetworkXError, nx.is_weighted, G, (1, 2)) + + def test_is_negatively_weighted(self): + G = nx.Graph() + assert not nx.is_negatively_weighted(G) + + G.add_node(1) + G.add_nodes_from([2, 3, 4, 5]) + assert not nx.is_negatively_weighted(G) + + G.add_edge(1, 2, weight=4) + assert not nx.is_negatively_weighted(G, (1, 2)) + + G.add_edges_from([(1, 3), (2, 4), (2, 6)]) + G[1][3]["color"] = "blue" + assert not nx.is_negatively_weighted(G) + assert not nx.is_negatively_weighted(G, (1, 3)) + + G[2][4]["weight"] = -2 + assert nx.is_negatively_weighted(G, (2, 4)) + assert nx.is_negatively_weighted(G) + + G = nx.DiGraph() + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -2), + ("0", "2", 2), + ("1", "2", -3), + ("2", "3", 1), + ] + ) + assert nx.is_negatively_weighted(G) + assert not nx.is_negatively_weighted(G, ("0", "3")) + assert nx.is_negatively_weighted(G, ("1", "0")) + + pytest.raises(nx.NetworkXError, nx.is_negatively_weighted, G, (1, 4)) + + +class TestCommonNeighbors: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.common_neighbors) + + def test_func(G, u, v, expected): + result = sorted(cls.func(G, u, v)) + assert result == expected + + cls.test = staticmethod(test_func) + + def test_K5(self): + G = nx.complete_graph(5) + self.test(G, 0, 1, [2, 3, 4]) + + def test_P3(self): + G = nx.path_graph(3) + self.test(G, 0, 2, [1]) + + def test_S4(self): + G = nx.star_graph(4) + self.test(G, 1, 2, [0]) + + def test_digraph(self): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + G.add_edges_from([(0, 1), (1, 2)]) + self.func(G, 0, 2) + + def test_nonexistent_nodes(self): + G = nx.complete_graph(5) + pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 5, 4) + pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 4, 5) + pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 5, 6) + + def test_custom1(self): + """Case of no common neighbors.""" + G = nx.Graph() + G.add_nodes_from([0, 1]) + self.test(G, 0, 1, []) + + def test_custom2(self): + """Case of equal nodes.""" + G = nx.complete_graph(4) + self.test(G, 0, 0, [1, 2, 3]) + + +@pytest.mark.parametrize( + "graph_type", (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph) +) +def test_set_node_attributes(graph_type): + # Test single value + G = nx.path_graph(3, create_using=graph_type) + vals = 100 + attr = "hello" + nx.set_node_attributes(G, vals, attr) + assert G.nodes[0][attr] == vals + assert G.nodes[1][attr] == vals + assert G.nodes[2][attr] == vals + + # Test dictionary + G = nx.path_graph(3, create_using=graph_type) + vals = dict(zip(sorted(G.nodes()), range(len(G)))) + attr = "hi" + nx.set_node_attributes(G, vals, attr) + assert G.nodes[0][attr] == 0 + assert G.nodes[1][attr] == 1 + assert G.nodes[2][attr] == 2 + + # Test dictionary of dictionaries + G = nx.path_graph(3, create_using=graph_type) + d = {"hi": 0, "hello": 200} + vals = dict.fromkeys(G.nodes(), d) + vals.pop(0) + nx.set_node_attributes(G, vals) + assert G.nodes[0] == {} + assert G.nodes[1]["hi"] == 0 + assert G.nodes[2]["hello"] == 200 + + +@pytest.mark.parametrize( + ("values", "name"), + ( + ({0: "red", 1: "blue"}, "color"), # values dictionary + ({0: {"color": "red"}, 1: {"color": "blue"}}, None), # dict-of-dict + ), +) +def test_set_node_attributes_ignores_extra_nodes(values, name): + """ + When `values` is a dict or dict-of-dict keyed by nodes, ensure that keys + that correspond to nodes not in G are ignored. + """ + G = nx.Graph() + G.add_node(0) + nx.set_node_attributes(G, values, name) + assert G.nodes[0]["color"] == "red" + assert 1 not in G.nodes + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_set_edge_attributes(graph_type): + # Test single value + G = nx.path_graph(3, create_using=graph_type) + attr = "hello" + vals = 3 + nx.set_edge_attributes(G, vals, attr) + assert G[0][1][attr] == vals + assert G[1][2][attr] == vals + + # Test multiple values + G = nx.path_graph(3, create_using=graph_type) + attr = "hi" + edges = [(0, 1), (1, 2)] + vals = dict(zip(edges, range(len(edges)))) + nx.set_edge_attributes(G, vals, attr) + assert G[0][1][attr] == 0 + assert G[1][2][attr] == 1 + + # Test dictionary of dictionaries + G = nx.path_graph(3, create_using=graph_type) + d = {"hi": 0, "hello": 200} + edges = [(0, 1)] + vals = dict.fromkeys(edges, d) + nx.set_edge_attributes(G, vals) + assert G[0][1]["hi"] == 0 + assert G[0][1]["hello"] == 200 + assert G[1][2] == {} + + +@pytest.mark.parametrize( + ("values", "name"), + ( + ({(0, 1): 1.0, (0, 2): 2.0}, "weight"), # values dict + ({(0, 1): {"weight": 1.0}, (0, 2): {"weight": 2.0}}, None), # values dod + ), +) +def test_set_edge_attributes_ignores_extra_edges(values, name): + """If `values` is a dict or dict-of-dicts containing edges that are not in + G, data associate with these edges should be ignored. + """ + G = nx.Graph([(0, 1)]) + nx.set_edge_attributes(G, values, name) + assert G[0][1]["weight"] == 1.0 + assert (0, 2) not in G.edges + + +@pytest.mark.parametrize("graph_type", (nx.MultiGraph, nx.MultiDiGraph)) +def test_set_edge_attributes_multi(graph_type): + # Test single value + G = nx.path_graph(3, create_using=graph_type) + attr = "hello" + vals = 3 + nx.set_edge_attributes(G, vals, attr) + assert G[0][1][0][attr] == vals + assert G[1][2][0][attr] == vals + + # Test multiple values + G = nx.path_graph(3, create_using=graph_type) + attr = "hi" + edges = [(0, 1, 0), (1, 2, 0)] + vals = dict(zip(edges, range(len(edges)))) + nx.set_edge_attributes(G, vals, attr) + assert G[0][1][0][attr] == 0 + assert G[1][2][0][attr] == 1 + + # Test dictionary of dictionaries + G = nx.path_graph(3, create_using=graph_type) + d = {"hi": 0, "hello": 200} + edges = [(0, 1, 0)] + vals = dict.fromkeys(edges, d) + nx.set_edge_attributes(G, vals) + assert G[0][1][0]["hi"] == 0 + assert G[0][1][0]["hello"] == 200 + assert G[1][2][0] == {} + + +@pytest.mark.parametrize( + ("values", "name"), + ( + ({(0, 1, 0): 1.0, (0, 2, 0): 2.0}, "weight"), # values dict + ({(0, 1, 0): {"weight": 1.0}, (0, 2, 0): {"weight": 2.0}}, None), # values dod + ), +) +def test_set_edge_attributes_multi_ignores_extra_edges(values, name): + """If `values` is a dict or dict-of-dicts containing edges that are not in + G, data associate with these edges should be ignored. + """ + G = nx.MultiGraph([(0, 1, 0), (0, 1, 1)]) + nx.set_edge_attributes(G, values, name) + assert G[0][1][0]["weight"] == 1.0 + assert G[0][1][1] == {} + assert (0, 2) not in G.edges() + + +def test_get_node_attributes(): + graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] + for G in graphs: + G = nx.path_graph(3, create_using=G) + attr = "hello" + vals = 100 + nx.set_node_attributes(G, vals, attr) + attrs = nx.get_node_attributes(G, attr) + assert attrs[0] == vals + assert attrs[1] == vals + assert attrs[2] == vals + default_val = 1 + G.add_node(4) + attrs = nx.get_node_attributes(G, attr, default=default_val) + assert attrs[4] == default_val + + +def test_get_edge_attributes(): + graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] + for G in graphs: + G = nx.path_graph(3, create_using=G) + attr = "hello" + vals = 100 + nx.set_edge_attributes(G, vals, attr) + attrs = nx.get_edge_attributes(G, attr) + assert len(attrs) == 2 + + for edge in G.edges: + assert attrs[edge] == vals + + default_val = vals + G.add_edge(4, 5) + deafult_attrs = nx.get_edge_attributes(G, attr, default=default_val) + assert len(deafult_attrs) == 3 + + for edge in G.edges: + assert deafult_attrs[edge] == vals + + +@pytest.mark.parametrize( + "graph_type", (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph) +) +def test_remove_node_attributes(graph_type): + # Test removing single attribute + G = nx.path_graph(3, create_using=graph_type) + vals = 100 + attr = "hello" + nx.set_node_attributes(G, vals, attr) + nx.remove_node_attributes(G, attr) + assert attr not in G.nodes[0] + assert attr not in G.nodes[1] + assert attr not in G.nodes[2] + + # Test removing single attribute when multiple present + G = nx.path_graph(3, create_using=graph_type) + other_vals = 200 + other_attr = "other" + nx.set_node_attributes(G, vals, attr) + nx.set_node_attributes(G, other_vals, other_attr) + nx.remove_node_attributes(G, attr) + assert attr not in G.nodes[0] + assert G.nodes[0][other_attr] == other_vals + assert attr not in G.nodes[1] + assert G.nodes[1][other_attr] == other_vals + assert attr not in G.nodes[2] + assert G.nodes[2][other_attr] == other_vals + + # Test removing multiple attributes + G = nx.path_graph(3, create_using=graph_type) + nx.set_node_attributes(G, vals, attr) + nx.set_node_attributes(G, other_vals, other_attr) + nx.remove_node_attributes(G, attr, other_attr) + assert attr not in G.nodes[0] and other_attr not in G.nodes[0] + assert attr not in G.nodes[1] and other_attr not in G.nodes[1] + assert attr not in G.nodes[2] and other_attr not in G.nodes[2] + + # Test removing multiple (but not all) attributes + G = nx.path_graph(3, create_using=graph_type) + third_vals = 300 + third_attr = "three" + nx.set_node_attributes( + G, + { + n: {attr: vals, other_attr: other_vals, third_attr: third_vals} + for n in G.nodes() + }, + ) + nx.remove_node_attributes(G, other_attr, third_attr) + assert other_attr not in G.nodes[0] and third_attr not in G.nodes[0] + assert other_attr not in G.nodes[1] and third_attr not in G.nodes[1] + assert other_attr not in G.nodes[2] and third_attr not in G.nodes[2] + assert G.nodes[0][attr] == vals + assert G.nodes[1][attr] == vals + assert G.nodes[2][attr] == vals + + # Test incomplete node attributes + G = nx.path_graph(3, create_using=graph_type) + nx.set_node_attributes( + G, + { + 1: {attr: vals, other_attr: other_vals}, + 2: {attr: vals, other_attr: other_vals}, + }, + ) + nx.remove_node_attributes(G, attr) + assert attr not in G.nodes[0] + assert attr not in G.nodes[1] + assert attr not in G.nodes[2] + assert G.nodes[1][other_attr] == other_vals + assert G.nodes[2][other_attr] == other_vals + + # Test removing on a subset of nodes + G = nx.path_graph(3, create_using=graph_type) + nx.set_node_attributes( + G, + { + n: {attr: vals, other_attr: other_vals, third_attr: third_vals} + for n in G.nodes() + }, + ) + nx.remove_node_attributes(G, attr, other_attr, nbunch=[0, 1]) + assert attr not in G.nodes[0] and other_attr not in G.nodes[0] + assert attr not in G.nodes[1] and other_attr not in G.nodes[1] + assert attr in G.nodes[2] and other_attr in G.nodes[2] + assert third_attr in G.nodes[0] and G.nodes[0][third_attr] == third_vals + assert third_attr in G.nodes[1] and G.nodes[1][third_attr] == third_vals + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_remove_edge_attributes(graph_type): + # Test removing single attribute + G = nx.path_graph(3, create_using=graph_type) + attr = "hello" + vals = 100 + nx.set_edge_attributes(G, vals, attr) + nx.remove_edge_attributes(G, attr) + assert len(nx.get_edge_attributes(G, attr)) == 0 + + # Test removing only some attributes + G = nx.path_graph(3, create_using=graph_type) + other_attr = "other" + other_vals = 200 + nx.set_edge_attributes(G, vals, attr) + nx.set_edge_attributes(G, other_vals, other_attr) + nx.remove_edge_attributes(G, attr) + + assert attr not in G[0][1] + assert attr not in G[1][2] + assert G[0][1][other_attr] == 200 + assert G[1][2][other_attr] == 200 + + # Test removing multiple attributes + G = nx.path_graph(3, create_using=graph_type) + nx.set_edge_attributes(G, vals, attr) + nx.set_edge_attributes(G, other_vals, other_attr) + nx.remove_edge_attributes(G, attr, other_attr) + assert attr not in G[0][1] and other_attr not in G[0][1] + assert attr not in G[1][2] and other_attr not in G[1][2] + + # Test removing multiple (not all) attributes + G = nx.path_graph(3, create_using=graph_type) + third_attr = "third" + third_vals = 300 + nx.set_edge_attributes( + G, + { + (u, v): {attr: vals, other_attr: other_vals, third_attr: third_vals} + for u, v in G.edges() + }, + ) + nx.remove_edge_attributes(G, other_attr, third_attr) + assert other_attr not in G[0][1] and third_attr not in G[0][1] + assert other_attr not in G[1][2] and third_attr not in G[1][2] + assert G[0][1][attr] == vals + assert G[1][2][attr] == vals + + # Test removing incomplete edge attributes + G = nx.path_graph(3, create_using=graph_type) + nx.set_edge_attributes(G, {(0, 1): {attr: vals, other_attr: other_vals}}) + nx.remove_edge_attributes(G, other_attr) + assert other_attr not in G[0][1] and G[0][1][attr] == vals + assert other_attr not in G[1][2] + + # Test removing subset of edge attributes + G = nx.path_graph(3, create_using=graph_type) + nx.set_edge_attributes( + G, + { + (u, v): {attr: vals, other_attr: other_vals, third_attr: third_vals} + for u, v in G.edges() + }, + ) + nx.remove_edge_attributes(G, other_attr, third_attr, ebunch=[(0, 1)]) + assert other_attr not in G[0][1] and third_attr not in G[0][1] + assert other_attr in G[1][2] and third_attr in G[1][2] + + +@pytest.mark.parametrize("graph_type", (nx.MultiGraph, nx.MultiDiGraph)) +def test_remove_multi_edge_attributes(graph_type): + # Test removing single attribute + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + attr = "hello" + vals = 100 + nx.set_edge_attributes(G, vals, attr) + nx.remove_edge_attributes(G, attr) + assert attr not in G[0][1][0] + assert attr not in G[1][2][0] + assert attr not in G[1][2][1] + + # Test removing only some attributes + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + other_attr = "other" + other_vals = 200 + nx.set_edge_attributes(G, vals, attr) + nx.set_edge_attributes(G, other_vals, other_attr) + nx.remove_edge_attributes(G, attr) + assert attr not in G[0][1][0] + assert attr not in G[1][2][0] + assert attr not in G[1][2][1] + assert G[0][1][0][other_attr] == other_vals + assert G[1][2][0][other_attr] == other_vals + assert G[1][2][1][other_attr] == other_vals + + # Test removing multiple attributes + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + nx.set_edge_attributes(G, vals, attr) + nx.set_edge_attributes(G, other_vals, other_attr) + nx.remove_edge_attributes(G, attr, other_attr) + assert attr not in G[0][1][0] and other_attr not in G[0][1][0] + assert attr not in G[1][2][0] and other_attr not in G[1][2][0] + assert attr not in G[1][2][1] and other_attr not in G[1][2][1] + + # Test removing multiple (not all) attributes + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + third_attr = "third" + third_vals = 300 + nx.set_edge_attributes( + G, + { + (u, v, k): {attr: vals, other_attr: other_vals, third_attr: third_vals} + for u, v, k in G.edges(keys=True) + }, + ) + nx.remove_edge_attributes(G, other_attr, third_attr) + assert other_attr not in G[0][1][0] and third_attr not in G[0][1][0] + assert other_attr not in G[1][2][0] and other_attr not in G[1][2][0] + assert other_attr not in G[1][2][1] and other_attr not in G[1][2][1] + assert G[0][1][0][attr] == vals + assert G[1][2][0][attr] == vals + assert G[1][2][1][attr] == vals + + # Test removing incomplete edge attributes + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + nx.set_edge_attributes( + G, + { + (0, 1, 0): {attr: vals, other_attr: other_vals}, + (1, 2, 1): {attr: vals, other_attr: other_vals}, + }, + ) + nx.remove_edge_attributes(G, other_attr) + assert other_attr not in G[0][1][0] and G[0][1][0][attr] == vals + assert other_attr not in G[1][2][0] + assert other_attr not in G[1][2][1] + + # Test removing subset of edge attributes + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + nx.set_edge_attributes( + G, + { + (0, 1, 0): {attr: vals, other_attr: other_vals}, + (1, 2, 0): {attr: vals, other_attr: other_vals}, + (1, 2, 1): {attr: vals, other_attr: other_vals}, + }, + ) + nx.remove_edge_attributes(G, attr, ebunch=[(0, 1, 0), (1, 2, 0)]) + assert attr not in G[0][1][0] and other_attr in G[0][1][0] + assert attr not in G[1][2][0] and other_attr in G[1][2][0] + assert attr in G[1][2][1] and other_attr in G[1][2][1] + + +def test_is_empty(): + graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] + for G in graphs: + assert nx.is_empty(G) + G.add_nodes_from(range(5)) + assert nx.is_empty(G) + G.add_edges_from([(1, 2), (3, 4)]) + assert not nx.is_empty(G) + + +@pytest.mark.parametrize( + "graph_type", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] +) +def test_selfloops(graph_type): + G = nx.complete_graph(3, create_using=graph_type) + G.add_edge(0, 0) + assert nodes_equal(nx.nodes_with_selfloops(G), [0]) + assert edges_equal(nx.selfloop_edges(G), [(0, 0)]) + assert edges_equal(nx.selfloop_edges(G, data=True), [(0, 0, {})]) + assert nx.number_of_selfloops(G) == 1 + + +@pytest.mark.parametrize( + "graph_type", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] +) +def test_selfloop_edges_attr(graph_type): + G = nx.complete_graph(3, create_using=graph_type) + G.add_edge(0, 0) + G.add_edge(1, 1, weight=2) + assert edges_equal( + nx.selfloop_edges(G, data=True), [(0, 0, {}), (1, 1, {"weight": 2})] + ) + assert edges_equal(nx.selfloop_edges(G, data="weight"), [(0, 0, None), (1, 1, 2)]) + + +def test_selfloop_edges_multi_with_data_and_keys(): + G = nx.complete_graph(3, create_using=nx.MultiGraph) + G.add_edge(0, 0, weight=10) + G.add_edge(0, 0, weight=100) + assert edges_equal( + nx.selfloop_edges(G, data="weight", keys=True), [(0, 0, 0, 10), (0, 0, 1, 100)] + ) + + +@pytest.mark.parametrize("graph_type", [nx.Graph, nx.DiGraph]) +def test_selfloops_removal(graph_type): + G = nx.complete_graph(3, create_using=graph_type) + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G, keys=True)) + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G, data=True)) + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G, keys=True, data=True)) + + +@pytest.mark.parametrize("graph_type", [nx.MultiGraph, nx.MultiDiGraph]) +def test_selfloops_removal_multi(graph_type): + """test removing selfloops behavior vis-a-vis altering a dict while iterating. + cf. gh-4068""" + G = nx.complete_graph(3, create_using=graph_type) + # Defaults - see gh-4080 + G.add_edge(0, 0) + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G)) + assert (0, 0) not in G.edges() + # With keys + G.add_edge(0, 0) + G.add_edge(0, 0) + with pytest.raises(RuntimeError): + G.remove_edges_from(nx.selfloop_edges(G, keys=True)) + # With data + G.add_edge(0, 0) + G.add_edge(0, 0) + with pytest.raises(TypeError): + G.remove_edges_from(nx.selfloop_edges(G, data=True)) + # With keys and data + G.add_edge(0, 0) + G.add_edge(0, 0) + with pytest.raises(RuntimeError): + G.remove_edges_from(nx.selfloop_edges(G, data=True, keys=True)) + + +def test_pathweight(): + valid_path = [1, 2, 3] + invalid_path = [1, 3, 2] + graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] + edges = [ + (1, 2, {"cost": 5, "dist": 6}), + (2, 3, {"cost": 3, "dist": 4}), + (1, 2, {"cost": 1, "dist": 2}), + ] + for graph in graphs: + graph.add_edges_from(edges) + assert nx.path_weight(graph, valid_path, "cost") == 4 + assert nx.path_weight(graph, valid_path, "dist") == 6 + pytest.raises(nx.NetworkXNoPath, nx.path_weight, graph, invalid_path, "cost") + + +@pytest.mark.parametrize( + "G", (nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()) +) +def test_ispath(G): + G.add_edges_from([(1, 2), (2, 3), (1, 2), (3, 4)]) + valid_path = [1, 2, 3, 4] + invalid_path = [1, 2, 4, 3] # wrong node order + another_invalid_path = [1, 2, 3, 4, 5] # contains node not in G + assert nx.is_path(G, valid_path) + assert not nx.is_path(G, invalid_path) + assert not nx.is_path(G, another_invalid_path) + + +@pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph())) +def test_restricted_view(G): + G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2)]) + G.add_node(4) + H = nx.restricted_view(G, [0, 2, 5], [(1, 2), (3, 4)]) + assert set(H.nodes()) == {1, 3, 4} + assert set(H.edges()) == {(1, 1)} + + +@pytest.mark.parametrize("G", (nx.MultiGraph(), nx.MultiDiGraph())) +def test_restricted_view_multi(G): + G.add_edges_from( + [(0, 1, 0), (0, 2, 0), (0, 3, 0), (0, 1, 1), (1, 0, 0), (1, 1, 0), (1, 2, 0)] + ) + G.add_node(4) + H = nx.restricted_view(G, [0, 2, 5], [(1, 2, 0), (3, 4, 0)]) + assert set(H.nodes()) == {1, 3, 4} + assert set(H.edges()) == {(1, 1)} diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_graph.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_graph.py new file mode 100644 index 0000000000000000000000000000000000000000..b0048a31f04eb583f4bcfc32385c2335b94467ad --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_graph.py @@ -0,0 +1,920 @@ +import gc +import pickle +import platform +import weakref + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, graphs_equal, nodes_equal + + +class BaseGraphTester: + """Tests for data-structure independent graph class features.""" + + def test_contains(self): + G = self.K3 + assert 1 in G + assert 4 not in G + assert "b" not in G + assert [] not in G # no exception for nonhashable + assert {1: 1} not in G # no exception for nonhashable + + def test_order(self): + G = self.K3 + assert len(G) == 3 + assert G.order() == 3 + assert G.number_of_nodes() == 3 + + def test_nodes(self): + G = self.K3 + assert isinstance(G._node, G.node_dict_factory) + assert isinstance(G._adj, G.adjlist_outer_dict_factory) + assert all( + isinstance(adj, G.adjlist_inner_dict_factory) for adj in G._adj.values() + ) + assert sorted(G.nodes()) == self.k3nodes + assert sorted(G.nodes(data=True)) == [(0, {}), (1, {}), (2, {})] + + def test_none_node(self): + G = self.Graph() + with pytest.raises(ValueError): + G.add_node(None) + with pytest.raises(ValueError): + G.add_nodes_from([None]) + with pytest.raises(ValueError): + G.add_edge(0, None) + with pytest.raises(ValueError): + G.add_edges_from([(0, None)]) + + def test_has_node(self): + G = self.K3 + assert G.has_node(1) + assert not G.has_node(4) + assert not G.has_node([]) # no exception for nonhashable + assert not G.has_node({1: 1}) # no exception for nonhashable + + def test_has_edge(self): + G = self.K3 + assert G.has_edge(0, 1) + assert not G.has_edge(0, -1) + + def test_neighbors(self): + G = self.K3 + assert sorted(G.neighbors(0)) == [1, 2] + with pytest.raises(nx.NetworkXError): + G.neighbors(-1) + + @pytest.mark.skipif( + platform.python_implementation() == "PyPy", reason="PyPy gc is different" + ) + def test_memory_leak(self): + G = self.Graph() + + def count_objects_of_type(_type): + # Iterating over all objects tracked by gc can include weak references + # whose weakly-referenced objects may no longer exist. Calling `isinstance` + # on such a weak reference will raise ReferenceError. There are at least + # three workarounds for this: one is to compare type names instead of using + # `isinstance` such as `type(obj).__name__ == typename`, another is to use + # `type(obj) == _type`, and the last is to ignore ProxyTypes as we do below. + # NOTE: even if this safeguard is deemed unnecessary to pass NetworkX tests, + # we should still keep it for maximum safety for other NetworkX backends. + return sum( + 1 + for obj in gc.get_objects() + if not isinstance(obj, weakref.ProxyTypes) and isinstance(obj, _type) + ) + + gc.collect() + before = count_objects_of_type(self.Graph) + G.copy() + gc.collect() + after = count_objects_of_type(self.Graph) + assert before == after + + # test a subgraph of the base class + class MyGraph(self.Graph): + pass + + gc.collect() + G = MyGraph() + before = count_objects_of_type(MyGraph) + G.copy() + gc.collect() + after = count_objects_of_type(MyGraph) + assert before == after + + def test_edges(self): + G = self.K3 + assert isinstance(G._adj, G.adjlist_outer_dict_factory) + assert edges_equal(G.edges(), [(0, 1), (0, 2), (1, 2)]) + assert edges_equal(G.edges(0), [(0, 1), (0, 2)]) + assert edges_equal(G.edges([0, 1]), [(0, 1), (0, 2), (1, 2)]) + with pytest.raises(nx.NetworkXError): + G.edges(-1) + + def test_degree(self): + G = self.K3 + assert sorted(G.degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.degree()) == {0: 2, 1: 2, 2: 2} + assert G.degree(0) == 2 + with pytest.raises(nx.NetworkXError): + G.degree(-1) # node not in graph + + def test_size(self): + G = self.K3 + assert G.size() == 3 + assert G.number_of_edges() == 3 + + def test_nbunch_iter(self): + G = self.K3 + assert nodes_equal(G.nbunch_iter(), self.k3nodes) # all nodes + assert nodes_equal(G.nbunch_iter(0), [0]) # single node + assert nodes_equal(G.nbunch_iter([0, 1]), [0, 1]) # sequence + # sequence with none in graph + assert nodes_equal(G.nbunch_iter([-1]), []) + # string sequence with none in graph + assert nodes_equal(G.nbunch_iter("foo"), []) + # node not in graph doesn't get caught upon creation of iterator + bunch = G.nbunch_iter(-1) + # but gets caught when iterator used + with pytest.raises(nx.NetworkXError, match="is not a node or a sequence"): + list(bunch) + # unhashable doesn't get caught upon creation of iterator + bunch = G.nbunch_iter([0, 1, 2, {}]) + # but gets caught when iterator hits the unhashable + with pytest.raises( + nx.NetworkXError, match="in sequence nbunch is not a valid node" + ): + list(bunch) + + def test_nbunch_iter_node_format_raise(self): + # Tests that a node that would have failed string formatting + # doesn't cause an error when attempting to raise a + # :exc:`nx.NetworkXError`. + + # For more information, see pull request #1813. + G = self.Graph() + nbunch = [("x", set())] + with pytest.raises(nx.NetworkXError): + list(G.nbunch_iter(nbunch)) + + def test_selfloop_degree(self): + G = self.Graph() + G.add_edge(1, 1) + assert sorted(G.degree()) == [(1, 2)] + assert dict(G.degree()) == {1: 2} + assert G.degree(1) == 2 + assert sorted(G.degree([1])) == [(1, 2)] + assert G.degree(1, weight="weight") == 2 + + def test_selfloops(self): + G = self.K3.copy() + G.add_edge(0, 0) + assert nodes_equal(nx.nodes_with_selfloops(G), [0]) + assert edges_equal(nx.selfloop_edges(G), [(0, 0)]) + assert nx.number_of_selfloops(G) == 1 + G.remove_edge(0, 0) + G.add_edge(0, 0) + G.remove_edges_from([(0, 0)]) + G.add_edge(1, 1) + G.remove_node(1) + G.add_edge(0, 0) + G.add_edge(1, 1) + G.remove_nodes_from([0, 1]) + + def test_cache_reset(self): + G = self.K3.copy() + old_adj = G.adj + assert id(G.adj) == id(old_adj) + G._adj = {} + assert id(G.adj) != id(old_adj) + + old_nodes = G.nodes + assert id(G.nodes) == id(old_nodes) + G._node = {} + assert id(G.nodes) != id(old_nodes) + + def test_attributes_cached(self): + G = self.K3.copy() + assert id(G.nodes) == id(G.nodes) + assert id(G.edges) == id(G.edges) + assert id(G.degree) == id(G.degree) + assert id(G.adj) == id(G.adj) + + +class BaseAttrGraphTester(BaseGraphTester): + """Tests of graph class attribute features.""" + + def test_weighted_degree(self): + G = self.Graph() + G.add_edge(1, 2, weight=2, other=3) + G.add_edge(2, 3, weight=3, other=4) + assert sorted(d for n, d in G.degree(weight="weight")) == [2, 3, 5] + assert dict(G.degree(weight="weight")) == {1: 2, 2: 5, 3: 3} + assert G.degree(1, weight="weight") == 2 + assert nodes_equal((G.degree([1], weight="weight")), [(1, 2)]) + + assert nodes_equal((d for n, d in G.degree(weight="other")), [3, 7, 4]) + assert dict(G.degree(weight="other")) == {1: 3, 2: 7, 3: 4} + assert G.degree(1, weight="other") == 3 + assert edges_equal((G.degree([1], weight="other")), [(1, 3)]) + + def add_attributes(self, G): + G.graph["foo"] = [] + G.nodes[0]["foo"] = [] + G.remove_edge(1, 2) + ll = [] + G.add_edge(1, 2, foo=ll) + G.add_edge(2, 1, foo=ll) + + def test_name(self): + G = self.Graph(name="") + assert G.name == "" + G = self.Graph(name="test") + assert G.name == "test" + + def test_str_unnamed(self): + G = self.Graph() + G.add_edges_from([(1, 2), (2, 3)]) + assert str(G) == f"{type(G).__name__} with 3 nodes and 2 edges" + + def test_str_named(self): + G = self.Graph(name="foo") + G.add_edges_from([(1, 2), (2, 3)]) + assert str(G) == f"{type(G).__name__} named 'foo' with 3 nodes and 2 edges" + + def test_graph_chain(self): + G = self.Graph([(0, 1), (1, 2)]) + DG = G.to_directed(as_view=True) + SDG = DG.subgraph([0, 1]) + RSDG = SDG.reverse(copy=False) + assert G is DG._graph + assert DG is SDG._graph + assert SDG is RSDG._graph + + def test_copy(self): + G = self.Graph() + G.add_node(0) + G.add_edge(1, 2) + self.add_attributes(G) + # copy edge datadict but any container attr are same + H = G.copy() + self.graphs_equal(H, G) + self.different_attrdict(H, G) + self.shallow_copy_attrdict(H, G) + + def test_class_copy(self): + G = self.Graph() + G.add_node(0) + G.add_edge(1, 2) + self.add_attributes(G) + # copy edge datadict but any container attr are same + H = G.__class__(G) + self.graphs_equal(H, G) + self.different_attrdict(H, G) + self.shallow_copy_attrdict(H, G) + + def test_fresh_copy(self): + G = self.Graph() + G.add_node(0) + G.add_edge(1, 2) + self.add_attributes(G) + # copy graph structure but use fresh datadict + H = G.__class__() + H.add_nodes_from(G) + H.add_edges_from(G.edges()) + assert len(G.nodes[0]) == 1 + ddict = G.adj[1][2][0] if G.is_multigraph() else G.adj[1][2] + assert len(ddict) == 1 + assert len(H.nodes[0]) == 0 + ddict = H.adj[1][2][0] if H.is_multigraph() else H.adj[1][2] + assert len(ddict) == 0 + + def is_deepcopy(self, H, G): + self.graphs_equal(H, G) + self.different_attrdict(H, G) + self.deep_copy_attrdict(H, G) + + def deep_copy_attrdict(self, H, G): + self.deepcopy_graph_attr(H, G) + self.deepcopy_node_attr(H, G) + self.deepcopy_edge_attr(H, G) + + def deepcopy_graph_attr(self, H, G): + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] != H.graph["foo"] + + def deepcopy_node_attr(self, H, G): + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] != H.nodes[0]["foo"] + + def deepcopy_edge_attr(self, H, G): + assert G[1][2]["foo"] == H[1][2]["foo"] + G[1][2]["foo"].append(1) + assert G[1][2]["foo"] != H[1][2]["foo"] + + def is_shallow_copy(self, H, G): + self.graphs_equal(H, G) + self.shallow_copy_attrdict(H, G) + + def shallow_copy_attrdict(self, H, G): + self.shallow_copy_graph_attr(H, G) + self.shallow_copy_node_attr(H, G) + self.shallow_copy_edge_attr(H, G) + + def shallow_copy_graph_attr(self, H, G): + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] == H.graph["foo"] + + def shallow_copy_node_attr(self, H, G): + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + + def shallow_copy_edge_attr(self, H, G): + assert G[1][2]["foo"] == H[1][2]["foo"] + G[1][2]["foo"].append(1) + assert G[1][2]["foo"] == H[1][2]["foo"] + + def same_attrdict(self, H, G): + old_foo = H[1][2]["foo"] + H.adj[1][2]["foo"] = "baz" + assert G.edges == H.edges + H.adj[1][2]["foo"] = old_foo + assert G.edges == H.edges + + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G.nodes == H.nodes + H.nodes[0]["foo"] = old_foo + assert G.nodes == H.nodes + + def different_attrdict(self, H, G): + old_foo = H[1][2]["foo"] + H.adj[1][2]["foo"] = "baz" + assert G._adj != H._adj + H.adj[1][2]["foo"] = old_foo + assert G._adj == H._adj + + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G._node != H._node + H.nodes[0]["foo"] = old_foo + assert G._node == H._node + + def graphs_equal(self, H, G): + assert G._adj == H._adj + assert G._node == H._node + assert G.graph == H.graph + assert G.name == H.name + if not G.is_directed() and not H.is_directed(): + assert H._adj[1][2] is H._adj[2][1] + assert G._adj[1][2] is G._adj[2][1] + else: # at least one is directed + if not G.is_directed(): + G._pred = G._adj + G._succ = G._adj + if not H.is_directed(): + H._pred = H._adj + H._succ = H._adj + assert G._pred == H._pred + assert G._succ == H._succ + assert H._succ[1][2] is H._pred[2][1] + assert G._succ[1][2] is G._pred[2][1] + + def test_graph_attr(self): + G = self.K3.copy() + G.graph["foo"] = "bar" + assert isinstance(G.graph, G.graph_attr_dict_factory) + assert G.graph["foo"] == "bar" + del G.graph["foo"] + assert G.graph == {} + H = self.Graph(foo="bar") + assert H.graph["foo"] == "bar" + + def test_node_attr(self): + G = self.K3.copy() + G.add_node(1, foo="bar") + assert all( + isinstance(d, G.node_attr_dict_factory) for u, d in G.nodes(data=True) + ) + assert nodes_equal(G.nodes(), [0, 1, 2]) + assert nodes_equal(G.nodes(data=True), [(0, {}), (1, {"foo": "bar"}), (2, {})]) + G.nodes[1]["foo"] = "baz" + assert nodes_equal(G.nodes(data=True), [(0, {}), (1, {"foo": "baz"}), (2, {})]) + assert nodes_equal(G.nodes(data="foo"), [(0, None), (1, "baz"), (2, None)]) + assert nodes_equal( + G.nodes(data="foo", default="bar"), [(0, "bar"), (1, "baz"), (2, "bar")] + ) + + def test_node_attr2(self): + G = self.K3.copy() + a = {"foo": "bar"} + G.add_node(3, **a) + assert nodes_equal(G.nodes(), [0, 1, 2, 3]) + assert nodes_equal( + G.nodes(data=True), [(0, {}), (1, {}), (2, {}), (3, {"foo": "bar"})] + ) + + def test_edge_lookup(self): + G = self.Graph() + G.add_edge(1, 2, foo="bar") + assert edges_equal(G.edges[1, 2], {"foo": "bar"}) + + def test_edge_attr(self): + G = self.Graph() + G.add_edge(1, 2, foo="bar") + assert all( + isinstance(d, G.edge_attr_dict_factory) for u, v, d in G.edges(data=True) + ) + assert edges_equal(G.edges(data=True), [(1, 2, {"foo": "bar"})]) + assert edges_equal(G.edges(data="foo"), [(1, 2, "bar")]) + + def test_edge_attr2(self): + G = self.Graph() + G.add_edges_from([(1, 2), (3, 4)], foo="foo") + assert edges_equal( + G.edges(data=True), [(1, 2, {"foo": "foo"}), (3, 4, {"foo": "foo"})] + ) + assert edges_equal(G.edges(data="foo"), [(1, 2, "foo"), (3, 4, "foo")]) + + def test_edge_attr3(self): + G = self.Graph() + G.add_edges_from([(1, 2, {"weight": 32}), (3, 4, {"weight": 64})], foo="foo") + assert edges_equal( + G.edges(data=True), + [ + (1, 2, {"foo": "foo", "weight": 32}), + (3, 4, {"foo": "foo", "weight": 64}), + ], + ) + + G.remove_edges_from([(1, 2), (3, 4)]) + G.add_edge(1, 2, data=7, spam="bar", bar="foo") + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})] + ) + + def test_edge_attr4(self): + G = self.Graph() + G.add_edge(1, 2, data=7, spam="bar", bar="foo") + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})] + ) + G[1][2]["data"] = 10 # OK to set data like this + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 10, "spam": "bar", "bar": "foo"})] + ) + + G.adj[1][2]["data"] = 20 + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 20, "spam": "bar", "bar": "foo"})] + ) + G.edges[1, 2]["data"] = 21 # another spelling, "edge" + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 21, "spam": "bar", "bar": "foo"})] + ) + G.adj[1][2]["listdata"] = [20, 200] + G.adj[1][2]["weight"] = 20 + dd = { + "data": 21, + "spam": "bar", + "bar": "foo", + "listdata": [20, 200], + "weight": 20, + } + assert edges_equal(G.edges(data=True), [(1, 2, dd)]) + + def test_to_undirected(self): + G = self.K3 + self.add_attributes(G) + H = nx.Graph(G) + self.is_shallow_copy(H, G) + self.different_attrdict(H, G) + H = G.to_undirected() + self.is_deepcopy(H, G) + + def test_to_directed_as_view(self): + H = nx.path_graph(2, create_using=self.Graph) + H2 = H.to_directed(as_view=True) + assert H is H2._graph + assert H2.has_edge(0, 1) + assert H2.has_edge(1, 0) or H.is_directed() + pytest.raises(nx.NetworkXError, H2.add_node, -1) + pytest.raises(nx.NetworkXError, H2.add_edge, 1, 2) + H.add_edge(1, 2) + assert H2.has_edge(1, 2) + assert H2.has_edge(2, 1) or H.is_directed() + + def test_to_undirected_as_view(self): + H = nx.path_graph(2, create_using=self.Graph) + H2 = H.to_undirected(as_view=True) + assert H is H2._graph + assert H2.has_edge(0, 1) + assert H2.has_edge(1, 0) + pytest.raises(nx.NetworkXError, H2.add_node, -1) + pytest.raises(nx.NetworkXError, H2.add_edge, 1, 2) + H.add_edge(1, 2) + assert H2.has_edge(1, 2) + assert H2.has_edge(2, 1) + + def test_directed_class(self): + G = self.Graph() + + class newGraph(G.to_undirected_class()): + def to_directed_class(self): + return newDiGraph + + def to_undirected_class(self): + return newGraph + + class newDiGraph(G.to_directed_class()): + def to_directed_class(self): + return newDiGraph + + def to_undirected_class(self): + return newGraph + + G = newDiGraph() if G.is_directed() else newGraph() + H = G.to_directed() + assert isinstance(H, newDiGraph) + H = G.to_undirected() + assert isinstance(H, newGraph) + + def test_to_directed(self): + G = self.K3 + self.add_attributes(G) + H = nx.DiGraph(G) + self.is_shallow_copy(H, G) + self.different_attrdict(H, G) + H = G.to_directed() + self.is_deepcopy(H, G) + + def test_subgraph(self): + G = self.K3 + self.add_attributes(G) + H = G.subgraph([0, 1, 2, 5]) + self.graphs_equal(H, G) + self.same_attrdict(H, G) + self.shallow_copy_attrdict(H, G) + + H = G.subgraph(0) + assert H.adj == {0: {}} + H = G.subgraph([]) + assert H.adj == {} + assert G.adj != {} + + def test_selfloops_attr(self): + G = self.K3.copy() + G.add_edge(0, 0) + G.add_edge(1, 1, weight=2) + assert edges_equal( + nx.selfloop_edges(G, data=True), [(0, 0, {}), (1, 1, {"weight": 2})] + ) + assert edges_equal( + nx.selfloop_edges(G, data="weight"), [(0, 0, None), (1, 1, 2)] + ) + + +class TestGraph(BaseAttrGraphTester): + """Tests specific to dict-of-dict-of-dict graph data structure""" + + def setup_method(self): + self.Graph = nx.Graph + # build dict-of-dict-of-dict K3 + ed1, ed2, ed3 = ({}, {}, {}) + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._adj = self.k3adj + self.K3._node = {} + self.K3._node[0] = {} + self.K3._node[1] = {} + self.K3._node[2] = {} + + def test_pickle(self): + G = self.K3 + pg = pickle.loads(pickle.dumps(G, -1)) + self.graphs_equal(pg, G) + pg = pickle.loads(pickle.dumps(G)) + self.graphs_equal(pg, G) + + def test_data_input(self): + G = self.Graph({1: [2], 2: [1]}, name="test") + assert G.name == "test" + assert sorted(G.adj.items()) == [(1, {2: {}}), (2, {1: {}})] + + def test_adjacency(self): + G = self.K3 + assert dict(G.adjacency()) == { + 0: {1: {}, 2: {}}, + 1: {0: {}, 2: {}}, + 2: {0: {}, 1: {}}, + } + + def test_getitem(self): + G = self.K3 + assert G.adj[0] == {1: {}, 2: {}} + assert G[0] == {1: {}, 2: {}} + with pytest.raises(KeyError): + G.__getitem__("j") + with pytest.raises(TypeError): + G.__getitem__(["A"]) + + def test_add_node(self): + G = self.Graph() + G.add_node(0) + assert G.adj == {0: {}} + # test add attributes + G.add_node(1, c="red") + G.add_node(2, c="blue") + G.add_node(3, c="red") + assert G.nodes[1]["c"] == "red" + assert G.nodes[2]["c"] == "blue" + assert G.nodes[3]["c"] == "red" + # test updating attributes + G.add_node(1, c="blue") + G.add_node(2, c="red") + G.add_node(3, c="blue") + assert G.nodes[1]["c"] == "blue" + assert G.nodes[2]["c"] == "red" + assert G.nodes[3]["c"] == "blue" + + def test_add_nodes_from(self): + G = self.Graph() + G.add_nodes_from([0, 1, 2]) + assert G.adj == {0: {}, 1: {}, 2: {}} + # test add attributes + G.add_nodes_from([0, 1, 2], c="red") + assert G.nodes[0]["c"] == "red" + assert G.nodes[2]["c"] == "red" + # test that attribute dicts are not the same + assert G.nodes[0] is not G.nodes[1] + # test updating attributes + G.add_nodes_from([0, 1, 2], c="blue") + assert G.nodes[0]["c"] == "blue" + assert G.nodes[2]["c"] == "blue" + assert G.nodes[0] is not G.nodes[1] + # test tuple input + H = self.Graph() + H.add_nodes_from(G.nodes(data=True)) + assert H.nodes[0]["c"] == "blue" + assert H.nodes[2]["c"] == "blue" + assert H.nodes[0] is not H.nodes[1] + # specific overrides general + H.add_nodes_from([0, (1, {"c": "green"}), (3, {"c": "cyan"})], c="red") + assert H.nodes[0]["c"] == "red" + assert H.nodes[1]["c"] == "green" + assert H.nodes[2]["c"] == "blue" + assert H.nodes[3]["c"] == "cyan" + + def test_remove_node(self): + G = self.K3.copy() + G.remove_node(0) + assert G.adj == {1: {2: {}}, 2: {1: {}}} + with pytest.raises(nx.NetworkXError): + G.remove_node(-1) + + # generator here to implement list,set,string... + + def test_remove_nodes_from(self): + G = self.K3.copy() + G.remove_nodes_from([0, 1]) + assert G.adj == {2: {}} + G.remove_nodes_from([-1]) # silent fail + + def test_add_edge(self): + G = self.Graph() + G.add_edge(0, 1) + assert G.adj == {0: {1: {}}, 1: {0: {}}} + G = self.Graph() + G.add_edge(*(0, 1)) + assert G.adj == {0: {1: {}}, 1: {0: {}}} + G = self.Graph() + with pytest.raises(ValueError): + G.add_edge(None, "anything") + + def test_add_edges_from(self): + G = self.Graph() + G.add_edges_from([(0, 1), (0, 2, {"weight": 3})]) + assert G.adj == { + 0: {1: {}, 2: {"weight": 3}}, + 1: {0: {}}, + 2: {0: {"weight": 3}}, + } + G = self.Graph() + G.add_edges_from([(0, 1), (0, 2, {"weight": 3}), (1, 2, {"data": 4})], data=2) + assert G.adj == { + 0: {1: {"data": 2}, 2: {"weight": 3, "data": 2}}, + 1: {0: {"data": 2}, 2: {"data": 4}}, + 2: {0: {"weight": 3, "data": 2}, 1: {"data": 4}}, + } + + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0,)]) # too few in tuple + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0, 1, 2, 3)]) # too many in tuple + with pytest.raises(TypeError): + G.add_edges_from([0]) # not a tuple + with pytest.raises(ValueError): + G.add_edges_from([(None, 3), (3, 2)]) # None cannot be a node + + def test_remove_edge(self): + G = self.K3.copy() + G.remove_edge(0, 1) + assert G.adj == {0: {2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} + with pytest.raises(nx.NetworkXError): + G.remove_edge(-1, 0) + + def test_remove_edges_from(self): + G = self.K3.copy() + G.remove_edges_from([(0, 1)]) + assert G.adj == {0: {2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} + G.remove_edges_from([(0, 0)]) # silent fail + + def test_clear(self): + G = self.K3.copy() + G.graph["name"] = "K3" + G.clear() + assert list(G.nodes) == [] + assert G.adj == {} + assert G.graph == {} + + def test_clear_edges(self): + G = self.K3.copy() + G.graph["name"] = "K3" + nodes = list(G.nodes) + G.clear_edges() + assert list(G.nodes) == nodes + assert G.adj == {0: {}, 1: {}, 2: {}} + assert list(G.edges) == [] + assert G.graph["name"] == "K3" + + def test_edges_data(self): + G = self.K3 + all_edges = [(0, 1, {}), (0, 2, {}), (1, 2, {})] + assert edges_equal(G.edges(data=True), all_edges) + assert edges_equal(G.edges(0, data=True), [(0, 1, {}), (0, 2, {})]) + assert edges_equal(G.edges([0, 1], data=True), all_edges) + with pytest.raises(nx.NetworkXError): + G.edges(-1, True) + + def test_get_edge_data(self): + G = self.K3.copy() + assert G.get_edge_data(0, 1) == {} + assert G[0][1] == {} + assert G.get_edge_data(10, 20) is None + assert G.get_edge_data(-1, 0) is None + assert G.get_edge_data(-1, 0, default=1) == 1 + + def test_update(self): + # specify both edges and nodes + G = self.K3.copy() + G.update(nodes=[3, (4, {"size": 2})], edges=[(4, 5), (6, 7, {"weight": 2})]) + nlist = [ + (0, {}), + (1, {}), + (2, {}), + (3, {}), + (4, {"size": 2}), + (5, {}), + (6, {}), + (7, {}), + ] + assert sorted(G.nodes.data()) == nlist + if G.is_directed(): + elist = [ + (0, 1, {}), + (0, 2, {}), + (1, 0, {}), + (1, 2, {}), + (2, 0, {}), + (2, 1, {}), + (4, 5, {}), + (6, 7, {"weight": 2}), + ] + else: + elist = [ + (0, 1, {}), + (0, 2, {}), + (1, 2, {}), + (4, 5, {}), + (6, 7, {"weight": 2}), + ] + assert sorted(G.edges.data()) == elist + assert G.graph == {} + + # no keywords -- order is edges, nodes + G = self.K3.copy() + G.update([(4, 5), (6, 7, {"weight": 2})], [3, (4, {"size": 2})]) + assert sorted(G.nodes.data()) == nlist + assert sorted(G.edges.data()) == elist + assert G.graph == {} + + # update using only a graph + G = self.Graph() + G.graph["foo"] = "bar" + G.add_node(2, data=4) + G.add_edge(0, 1, weight=0.5) + GG = G.copy() + H = self.Graph() + GG.update(H) + assert graphs_equal(G, GG) + H.update(G) + assert graphs_equal(H, G) + + # update nodes only + H = self.Graph() + H.update(nodes=[3, 4]) + assert H.nodes ^ {3, 4} == set() + assert H.size() == 0 + + # update edges only + H = self.Graph() + H.update(edges=[(3, 4)]) + assert sorted(H.edges.data()) == [(3, 4, {})] + assert H.size() == 1 + + # No inputs -> exception + with pytest.raises(nx.NetworkXError): + nx.Graph().update() + + +class TestEdgeSubgraph: + """Unit tests for the :meth:`Graph.edge_subgraph` method.""" + + def setup_method(self): + # Create a path graph on five nodes. + G = nx.path_graph(5) + # Add some node, edge, and graph attributes. + for i in range(5): + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" + # Get the subgraph induced by the first and last edges. + self.G = G + self.H = G.edge_subgraph([(0, 1), (3, 4)]) + + def test_correct_nodes(self): + """Tests that the subgraph has the correct nodes.""" + assert [0, 1, 3, 4] == sorted(self.H.nodes()) + + def test_correct_edges(self): + """Tests that the subgraph has the correct edges.""" + assert [(0, 1, "edge01"), (3, 4, "edge34")] == sorted(self.H.edges(data="name")) + + def test_add_node(self): + """Tests that adding a node to the original graph does not + affect the nodes of the subgraph. + + """ + self.G.add_node(5) + assert [0, 1, 3, 4] == sorted(self.H.nodes()) + + def test_remove_node(self): + """Tests that removing a node in the original graph does + affect the nodes of the subgraph. + + """ + self.G.remove_node(0) + assert [1, 3, 4] == sorted(self.H.nodes()) + + def test_node_attr_dict(self): + """Tests that the node attribute dictionary of the two graphs is + the same object. + + """ + for v in self.H: + assert self.G.nodes[v] == self.H.nodes[v] + # Making a change to G should make a change in H and vice versa. + self.G.nodes[0]["name"] = "foo" + assert self.G.nodes[0] == self.H.nodes[0] + self.H.nodes[1]["name"] = "bar" + assert self.G.nodes[1] == self.H.nodes[1] + + def test_edge_attr_dict(self): + """Tests that the edge attribute dictionary of the two graphs is + the same object. + + """ + for u, v in self.H.edges(): + assert self.G.edges[u, v] == self.H.edges[u, v] + # Making a change to G should make a change in H and vice versa. + self.G.edges[0, 1]["name"] = "foo" + assert self.G.edges[0, 1]["name"] == self.H.edges[0, 1]["name"] + self.H.edges[3, 4]["name"] = "bar" + assert self.G.edges[3, 4]["name"] == self.H.edges[3, 4]["name"] + + def test_graph_attr_dict(self): + """Tests that the graph attribute dictionary of the two graphs + is the same object. + + """ + assert self.G.graph is self.H.graph diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_graph_historical.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_graph_historical.py new file mode 100644 index 0000000000000000000000000000000000000000..36aba7100e7758f6357d66470f45b0fcd0f10145 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_graph_historical.py @@ -0,0 +1,13 @@ +"""Original NetworkX graph tests""" + +import networkx +import networkx as nx + +from .historical_tests import HistoricalTests + + +class TestGraphHistorical(HistoricalTests): + @classmethod + def setup_class(cls): + HistoricalTests.setup_class() + cls.G = nx.Graph diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_graphviews.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_graphviews.py new file mode 100644 index 0000000000000000000000000000000000000000..591c760c9bdfa61984dd7b87e938b1c15c04be0e --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_graphviews.py @@ -0,0 +1,350 @@ +import pytest + +import networkx as nx +from networkx.utils import edges_equal, nodes_equal + +# Note: SubGraph views are not tested here. They have their own testing file + + +class TestReverseView: + def setup_method(self): + self.G = nx.path_graph(9, create_using=nx.DiGraph()) + self.rv = nx.reverse_view(self.G) + + def test_pickle(self): + import pickle + + rv = self.rv + prv = pickle.loads(pickle.dumps(rv, -1)) + assert rv._node == prv._node + assert rv._adj == prv._adj + assert rv.graph == prv.graph + + def test_contains(self): + assert (2, 3) in self.G.edges + assert (3, 2) not in self.G.edges + assert (2, 3) not in self.rv.edges + assert (3, 2) in self.rv.edges + + def test_iter(self): + expected = sorted(tuple(reversed(e)) for e in self.G.edges) + assert sorted(self.rv.edges) == expected + + def test_exceptions(self): + G = nx.Graph() + pytest.raises(nx.NetworkXNotImplemented, nx.reverse_view, G) + + def test_subclass(self): + class MyGraph(nx.DiGraph): + def my_method(self): + return "me" + + def to_directed_class(self): + return MyGraph() + + M = MyGraph() + M.add_edge(1, 2) + RM = nx.reverse_view(M) + print("RM class", RM.__class__) + RMC = RM.copy() + print("RMC class", RMC.__class__) + print(RMC.edges) + assert RMC.has_edge(2, 1) + assert RMC.my_method() == "me" + + +class TestMultiReverseView: + def setup_method(self): + self.G = nx.path_graph(9, create_using=nx.MultiDiGraph()) + self.G.add_edge(4, 5) + self.rv = nx.reverse_view(self.G) + + def test_pickle(self): + import pickle + + rv = self.rv + prv = pickle.loads(pickle.dumps(rv, -1)) + assert rv._node == prv._node + assert rv._adj == prv._adj + assert rv.graph == prv.graph + + def test_contains(self): + assert (2, 3, 0) in self.G.edges + assert (3, 2, 0) not in self.G.edges + assert (2, 3, 0) not in self.rv.edges + assert (3, 2, 0) in self.rv.edges + assert (5, 4, 1) in self.rv.edges + assert (4, 5, 1) not in self.rv.edges + + def test_iter(self): + expected = sorted((v, u, k) for u, v, k in self.G.edges) + assert sorted(self.rv.edges) == expected + + def test_exceptions(self): + MG = nx.MultiGraph(self.G) + pytest.raises(nx.NetworkXNotImplemented, nx.reverse_view, MG) + + +def test_generic_multitype(): + nxg = nx.graphviews + G = nx.DiGraph([(1, 2)]) + with pytest.raises(nx.NetworkXError): + nxg.generic_graph_view(G, create_using=nx.MultiGraph) + G = nx.MultiDiGraph([(1, 2)]) + with pytest.raises(nx.NetworkXError): + nxg.generic_graph_view(G, create_using=nx.DiGraph) + + +class TestToDirected: + def setup_method(self): + self.G = nx.path_graph(9) + self.dv = nx.to_directed(self.G) + self.MG = nx.path_graph(9, create_using=nx.MultiGraph()) + self.Mdv = nx.to_directed(self.MG) + + def test_directed(self): + assert not self.G.is_directed() + assert self.dv.is_directed() + + def test_already_directed(self): + dd = nx.to_directed(self.dv) + Mdd = nx.to_directed(self.Mdv) + assert edges_equal(dd.edges, self.dv.edges) + assert edges_equal(Mdd.edges, self.Mdv.edges) + + def test_pickle(self): + import pickle + + dv = self.dv + pdv = pickle.loads(pickle.dumps(dv, -1)) + assert dv._node == pdv._node + assert dv._succ == pdv._succ + assert dv._pred == pdv._pred + assert dv.graph == pdv.graph + + def test_contains(self): + assert (2, 3) in self.G.edges + assert (3, 2) in self.G.edges + assert (2, 3) in self.dv.edges + assert (3, 2) in self.dv.edges + + def test_iter(self): + revd = [tuple(reversed(e)) for e in self.G.edges] + expected = sorted(list(self.G.edges) + revd) + assert sorted(self.dv.edges) == expected + + +class TestToUndirected: + def setup_method(self): + self.DG = nx.path_graph(9, create_using=nx.DiGraph()) + self.uv = nx.to_undirected(self.DG) + self.MDG = nx.path_graph(9, create_using=nx.MultiDiGraph()) + self.Muv = nx.to_undirected(self.MDG) + + def test_directed(self): + assert self.DG.is_directed() + assert not self.uv.is_directed() + + def test_already_directed(self): + uu = nx.to_undirected(self.uv) + Muu = nx.to_undirected(self.Muv) + assert edges_equal(uu.edges, self.uv.edges) + assert edges_equal(Muu.edges, self.Muv.edges) + + def test_pickle(self): + import pickle + + uv = self.uv + puv = pickle.loads(pickle.dumps(uv, -1)) + assert uv._node == puv._node + assert uv._adj == puv._adj + assert uv.graph == puv.graph + assert hasattr(uv, "_graph") + + def test_contains(self): + assert (2, 3) in self.DG.edges + assert (3, 2) not in self.DG.edges + assert (2, 3) in self.uv.edges + assert (3, 2) in self.uv.edges + + def test_iter(self): + expected = sorted(self.DG.edges) + assert sorted(self.uv.edges) == expected + + +class TestChainsOfViews: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.DG = nx.path_graph(9, create_using=nx.DiGraph()) + cls.MG = nx.path_graph(9, create_using=nx.MultiGraph()) + cls.MDG = nx.path_graph(9, create_using=nx.MultiDiGraph()) + cls.Gv = nx.to_undirected(cls.DG) + cls.DGv = nx.to_directed(cls.G) + cls.MGv = nx.to_undirected(cls.MDG) + cls.MDGv = nx.to_directed(cls.MG) + cls.Rv = cls.DG.reverse() + cls.MRv = cls.MDG.reverse() + cls.graphs = [ + cls.G, + cls.DG, + cls.MG, + cls.MDG, + cls.Gv, + cls.DGv, + cls.MGv, + cls.MDGv, + cls.Rv, + cls.MRv, + ] + for G in cls.graphs: + G.edges, G.nodes, G.degree + + def test_pickle(self): + import pickle + + for G in self.graphs: + H = pickle.loads(pickle.dumps(G, -1)) + assert edges_equal(H.edges, G.edges) + assert nodes_equal(H.nodes, G.nodes) + + def test_subgraph_of_subgraph(self): + SGv = nx.subgraph(self.G, range(3, 7)) + SDGv = nx.subgraph(self.DG, range(3, 7)) + SMGv = nx.subgraph(self.MG, range(3, 7)) + SMDGv = nx.subgraph(self.MDG, range(3, 7)) + for G in self.graphs + [SGv, SDGv, SMGv, SMDGv]: + SG = nx.induced_subgraph(G, [4, 5, 6]) + assert list(SG) == [4, 5, 6] + SSG = SG.subgraph([6, 7]) + assert list(SSG) == [6] + # subgraph-subgraph chain is short-cut in base class method + assert SSG._graph is G + + def test_restricted_induced_subgraph_chains(self): + """Test subgraph chains that both restrict and show nodes/edges. + + A restricted_view subgraph should allow induced subgraphs using + G.subgraph that automagically without a chain (meaning the result + is a subgraph view of the original graph not a subgraph-of-subgraph. + """ + hide_nodes = [3, 4, 5] + hide_edges = [(6, 7)] + RG = nx.restricted_view(self.G, hide_nodes, hide_edges) + nodes = [4, 5, 6, 7, 8] + SG = nx.induced_subgraph(RG, nodes) + SSG = RG.subgraph(nodes) + assert RG._graph is self.G + assert SSG._graph is self.G + assert SG._graph is RG + assert edges_equal(SG.edges, SSG.edges) + # should be same as morphing the graph + CG = self.G.copy() + CG.remove_nodes_from(hide_nodes) + CG.remove_edges_from(hide_edges) + assert edges_equal(CG.edges(nodes), SSG.edges) + CG.remove_nodes_from([0, 1, 2, 3]) + assert edges_equal(CG.edges, SSG.edges) + # switch order: subgraph first, then restricted view + SSSG = self.G.subgraph(nodes) + RSG = nx.restricted_view(SSSG, hide_nodes, hide_edges) + assert RSG._graph is not self.G + assert edges_equal(RSG.edges, CG.edges) + + def test_subgraph_copy(self): + for origG in self.graphs: + G = nx.Graph(origG) + SG = G.subgraph([4, 5, 6]) + H = SG.copy() + assert type(G) == type(H) + + def test_subgraph_todirected(self): + SG = nx.induced_subgraph(self.G, [4, 5, 6]) + SSG = SG.to_directed() + assert sorted(SSG) == [4, 5, 6] + assert sorted(SSG.edges) == [(4, 5), (5, 4), (5, 6), (6, 5)] + + def test_subgraph_toundirected(self): + SG = nx.induced_subgraph(self.G, [4, 5, 6]) + SSG = SG.to_undirected() + assert list(SSG) == [4, 5, 6] + assert sorted(SSG.edges) == [(4, 5), (5, 6)] + + def test_reverse_subgraph_toundirected(self): + G = self.DG.reverse(copy=False) + SG = G.subgraph([4, 5, 6]) + SSG = SG.to_undirected() + assert list(SSG) == [4, 5, 6] + assert sorted(SSG.edges) == [(4, 5), (5, 6)] + + def test_reverse_reverse_copy(self): + G = self.DG.reverse(copy=False) + H = G.reverse(copy=True) + assert H.nodes == self.DG.nodes + assert H.edges == self.DG.edges + G = self.MDG.reverse(copy=False) + H = G.reverse(copy=True) + assert H.nodes == self.MDG.nodes + assert H.edges == self.MDG.edges + + def test_subgraph_edgesubgraph_toundirected(self): + G = self.G.copy() + SG = G.subgraph([4, 5, 6]) + SSG = SG.edge_subgraph([(4, 5), (5, 4)]) + USSG = SSG.to_undirected() + assert list(USSG) == [4, 5] + assert sorted(USSG.edges) == [(4, 5)] + + def test_copy_subgraph(self): + G = self.G.copy() + SG = G.subgraph([4, 5, 6]) + CSG = SG.copy(as_view=True) + DCSG = SG.copy(as_view=False) + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view + + def test_copy_disubgraph(self): + G = self.DG.copy() + SG = G.subgraph([4, 5, 6]) + CSG = SG.copy(as_view=True) + DCSG = SG.copy(as_view=False) + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view + + def test_copy_multidisubgraph(self): + G = self.MDG.copy() + SG = G.subgraph([4, 5, 6]) + CSG = SG.copy(as_view=True) + DCSG = SG.copy(as_view=False) + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view + + def test_copy_multisubgraph(self): + G = self.MG.copy() + SG = G.subgraph([4, 5, 6]) + CSG = SG.copy(as_view=True) + DCSG = SG.copy(as_view=False) + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view + + def test_copy_of_view(self): + G = nx.MultiGraph(self.MGv) + assert G.__class__.__name__ == "MultiGraph" + G = G.copy(as_view=True) + assert G.__class__.__name__ == "MultiGraph" + + def test_subclass(self): + class MyGraph(nx.DiGraph): + def my_method(self): + return "me" + + def to_directed_class(self): + return MyGraph() + + for origG in self.graphs: + G = MyGraph(origG) + SG = G.subgraph([4, 5, 6]) + H = SG.copy() + assert SG.my_method() == "me" + assert H.my_method() == "me" + assert 3 not in H or 3 in SG diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_reportviews.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_reportviews.py new file mode 100644 index 0000000000000000000000000000000000000000..789c829f4b05fa71eff384a05ad071bc7fdebd9f --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_reportviews.py @@ -0,0 +1,1435 @@ +import pickle +from copy import deepcopy + +import pytest + +import networkx as nx +from networkx.classes import reportviews as rv +from networkx.classes.reportviews import NodeDataView + + +# Nodes +class TestNodeView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.nv = cls.G.nodes # NodeView(G) + + def test_pickle(self): + import pickle + + nv = self.nv + pnv = pickle.loads(pickle.dumps(nv, -1)) + assert nv == pnv + assert nv.__slots__ == pnv.__slots__ + + def test_str(self): + assert str(self.nv) == "[0, 1, 2, 3, 4, 5, 6, 7, 8]" + + def test_repr(self): + assert repr(self.nv) == "NodeView((0, 1, 2, 3, 4, 5, 6, 7, 8))" + + def test_contains(self): + G = self.G.copy() + nv = G.nodes + assert 7 in nv + assert 9 not in nv + G.remove_node(7) + G.add_node(9) + assert 7 not in nv + assert 9 in nv + + def test_getitem(self): + G = self.G.copy() + nv = G.nodes + G.nodes[3]["foo"] = "bar" + assert nv[7] == {} + assert nv[3] == {"foo": "bar"} + # slicing + with pytest.raises(nx.NetworkXError): + G.nodes[0:5] + + def test_iter(self): + nv = self.nv + for i, n in enumerate(nv): + assert i == n + inv = iter(nv) + assert next(inv) == 0 + assert iter(nv) != nv + assert iter(inv) == inv + inv2 = iter(nv) + next(inv2) + assert list(inv) == list(inv2) + # odd case where NodeView calls NodeDataView with data=False + nnv = nv(data=False) + for i, n in enumerate(nnv): + assert i == n + + def test_call(self): + nodes = self.nv + assert nodes is nodes() + assert nodes is not nodes(data=True) + assert nodes is not nodes(data="weight") + + +class TestNodeDataView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.nv = NodeDataView(cls.G) + cls.ndv = cls.G.nodes.data(True) + cls.nwv = cls.G.nodes.data("foo") + + def test_viewtype(self): + nv = self.G.nodes + ndvfalse = nv.data(False) + assert nv is ndvfalse + assert nv is not self.ndv + + def test_pickle(self): + import pickle + + nv = self.nv + pnv = pickle.loads(pickle.dumps(nv, -1)) + assert nv == pnv + assert nv.__slots__ == pnv.__slots__ + + def test_str(self): + msg = str([(n, {}) for n in range(9)]) + assert str(self.ndv) == msg + + def test_repr(self): + expected = "NodeDataView((0, 1, 2, 3, 4, 5, 6, 7, 8))" + assert repr(self.nv) == expected + expected = ( + "NodeDataView({0: {}, 1: {}, 2: {}, 3: {}, " + + "4: {}, 5: {}, 6: {}, 7: {}, 8: {}})" + ) + assert repr(self.ndv) == expected + expected = ( + "NodeDataView({0: None, 1: None, 2: None, 3: None, 4: None, " + + "5: None, 6: None, 7: None, 8: None}, data='foo')" + ) + assert repr(self.nwv) == expected + + def test_contains(self): + G = self.G.copy() + nv = G.nodes.data() + nwv = G.nodes.data("foo") + G.nodes[3]["foo"] = "bar" + assert (7, {}) in nv + assert (3, {"foo": "bar"}) in nv + assert (3, "bar") in nwv + assert (7, None) in nwv + # default + nwv_def = G.nodes(data="foo", default="biz") + assert (7, "biz") in nwv_def + assert (3, "bar") in nwv_def + + def test_getitem(self): + G = self.G.copy() + nv = G.nodes + G.nodes[3]["foo"] = "bar" + assert nv[3] == {"foo": "bar"} + # default + nwv_def = G.nodes(data="foo", default="biz") + assert nwv_def[7], "biz" + assert nwv_def[3] == "bar" + # slicing + with pytest.raises(nx.NetworkXError): + G.nodes.data()[0:5] + + def test_iter(self): + G = self.G.copy() + nv = G.nodes.data() + ndv = G.nodes.data(True) + nwv = G.nodes.data("foo") + for i, (n, d) in enumerate(nv): + assert i == n + assert d == {} + inv = iter(nv) + assert next(inv) == (0, {}) + G.nodes[3]["foo"] = "bar" + # default + for n, d in nv: + if n == 3: + assert d == {"foo": "bar"} + else: + assert d == {} + # data=True + for n, d in ndv: + if n == 3: + assert d == {"foo": "bar"} + else: + assert d == {} + # data='foo' + for n, d in nwv: + if n == 3: + assert d == "bar" + else: + assert d is None + # data='foo', default=1 + for n, d in G.nodes.data("foo", default=1): + if n == 3: + assert d == "bar" + else: + assert d == 1 + + +def test_nodedataview_unhashable(): + G = nx.path_graph(9) + G.nodes[3]["foo"] = "bar" + nvs = [G.nodes.data()] + nvs.append(G.nodes.data(True)) + H = G.copy() + H.nodes[4]["foo"] = {1, 2, 3} + nvs.append(H.nodes.data(True)) + # raise unhashable + for nv in nvs: + pytest.raises(TypeError, set, nv) + pytest.raises(TypeError, eval, "nv | nv", locals()) + # no raise... hashable + Gn = G.nodes.data(False) + set(Gn) + Gn | Gn + Gn = G.nodes.data("foo") + set(Gn) + Gn | Gn + + +class TestNodeViewSetOps: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.G.nodes[3]["foo"] = "bar" + cls.nv = cls.G.nodes + + def n_its(self, nodes): + return set(nodes) + + def test_len(self): + G = self.G.copy() + nv = G.nodes + assert len(nv) == 9 + G.remove_node(7) + assert len(nv) == 8 + G.add_node(9) + assert len(nv) == 9 + + def test_and(self): + # print("G & H nodes:", gnv & hnv) + nv = self.nv + some_nodes = self.n_its(range(5, 12)) + assert nv & some_nodes == self.n_its(range(5, 9)) + assert some_nodes & nv == self.n_its(range(5, 9)) + + def test_or(self): + # print("G | H nodes:", gnv | hnv) + nv = self.nv + some_nodes = self.n_its(range(5, 12)) + assert nv | some_nodes == self.n_its(range(12)) + assert some_nodes | nv == self.n_its(range(12)) + + def test_xor(self): + # print("G ^ H nodes:", gnv ^ hnv) + nv = self.nv + some_nodes = self.n_its(range(5, 12)) + nodes = {0, 1, 2, 3, 4, 9, 10, 11} + assert nv ^ some_nodes == self.n_its(nodes) + assert some_nodes ^ nv == self.n_its(nodes) + + def test_sub(self): + # print("G - H nodes:", gnv - hnv) + nv = self.nv + some_nodes = self.n_its(range(5, 12)) + assert nv - some_nodes == self.n_its(range(5)) + assert some_nodes - nv == self.n_its(range(9, 12)) + + +class TestNodeDataViewSetOps(TestNodeViewSetOps): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.G.nodes[3]["foo"] = "bar" + cls.nv = cls.G.nodes.data("foo") + + def n_its(self, nodes): + return {(node, "bar" if node == 3 else None) for node in nodes} + + +class TestNodeDataViewDefaultSetOps(TestNodeDataViewSetOps): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.G.nodes[3]["foo"] = "bar" + cls.nv = cls.G.nodes.data("foo", default=1) + + def n_its(self, nodes): + return {(node, "bar" if node == 3 else 1) for node in nodes} + + +# Edges Data View +class TestEdgeDataView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.eview = nx.reportviews.EdgeView + + def test_pickle(self): + import pickle + + ev = self.eview(self.G)(data=True) + pev = pickle.loads(pickle.dumps(ev, -1)) + assert list(ev) == list(pev) + assert ev.__slots__ == pev.__slots__ + + def modify_edge(self, G, e, **kwds): + G._adj[e[0]][e[1]].update(kwds) + + def test_str(self): + ev = self.eview(self.G)(data=True) + rep = str([(n, n + 1, {}) for n in range(8)]) + assert str(ev) == rep + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "EdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_iterdata(self): + G = self.G.copy() + evr = self.eview(G) + ev = evr(data=True) + ev_def = evr(data="foo", default=1) + + for u, v, d in ev: + pass + assert d == {} + + for u, v, wt in ev_def: + pass + assert wt == 1 + + self.modify_edge(G, (2, 3), foo="bar") + for e in ev: + assert len(e) == 3 + if set(e[:2]) == {2, 3}: + assert e[2] == {"foo": "bar"} + checked = True + else: + assert e[2] == {} + assert checked + + for e in ev_def: + assert len(e) == 3 + if set(e[:2]) == {2, 3}: + assert e[2] == "bar" + checked_wt = True + else: + assert e[2] == 1 + assert checked_wt + + def test_iter(self): + evr = self.eview(self.G) + ev = evr() + for u, v in ev: + pass + iev = iter(ev) + assert next(iev) == (0, 1) + assert iter(ev) != ev + assert iter(iev) == iev + + def test_contains(self): + evr = self.eview(self.G) + ev = evr() + if self.G.is_directed(): + assert (1, 2) in ev and (2, 1) not in ev + else: + assert (1, 2) in ev and (2, 1) in ev + assert (1, 4) not in ev + assert (1, 90) not in ev + assert (90, 1) not in ev + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + if self.G.is_directed(): + assert (0, 1) in ev + assert (1, 2) not in ev + assert (2, 3) in ev + else: + assert (0, 1) in ev + assert (1, 2) in ev + assert (2, 3) in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + def test_len(self): + evr = self.eview(self.G) + ev = evr(data="foo") + assert len(ev) == 8 + assert len(evr(1)) == 2 + assert len(evr([1, 2, 3])) == 4 + + assert len(self.G.edges(1)) == 2 + assert len(self.G.edges()) == 8 + assert len(self.G.edges) == 8 + + H = self.G.copy() + H.add_edge(1, 1) + assert len(H.edges(1)) == 3 + assert len(H.edges()) == 9 + assert len(H.edges) == 9 + + +class TestOutEdgeDataView(TestEdgeDataView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.DiGraph()) + cls.eview = nx.reportviews.OutEdgeView + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "OutEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_len(self): + evr = self.eview(self.G) + ev = evr(data="foo") + assert len(ev) == 8 + assert len(evr(1)) == 1 + assert len(evr([1, 2, 3])) == 3 + + assert len(self.G.edges(1)) == 1 + assert len(self.G.edges()) == 8 + assert len(self.G.edges) == 8 + + H = self.G.copy() + H.add_edge(1, 1) + assert len(H.edges(1)) == 2 + assert len(H.edges()) == 9 + assert len(H.edges) == 9 + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) in ev + assert (1, 2) not in ev + assert (2, 3) in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + +class TestInEdgeDataView(TestOutEdgeDataView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.DiGraph()) + cls.eview = nx.reportviews.InEdgeView + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "InEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) not in ev + assert (1, 2) in ev + assert (2, 3) not in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + +class TestMultiEdgeDataView(TestEdgeDataView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.MultiGraph()) + cls.eview = nx.reportviews.MultiEdgeView + + def modify_edge(self, G, e, **kwds): + G._adj[e[0]][e[1]][0].update(kwds) + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "MultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) in ev + assert (1, 2) in ev + assert (2, 3) in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + +class TestOutMultiEdgeDataView(TestOutEdgeDataView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.MultiDiGraph()) + cls.eview = nx.reportviews.OutMultiEdgeView + + def modify_edge(self, G, e, **kwds): + G._adj[e[0]][e[1]][0].update(kwds) + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "OutMultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) in ev + assert (1, 2) not in ev + assert (2, 3) in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + +class TestInMultiEdgeDataView(TestOutMultiEdgeDataView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.MultiDiGraph()) + cls.eview = nx.reportviews.InMultiEdgeView + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "InMultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) not in ev + assert (1, 2) in ev + assert (2, 3) not in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + +# Edge Views +class TestEdgeView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.eview = nx.reportviews.EdgeView + + def test_pickle(self): + import pickle + + ev = self.eview(self.G) + pev = pickle.loads(pickle.dumps(ev, -1)) + assert ev == pev + assert ev.__slots__ == pev.__slots__ + + def modify_edge(self, G, e, **kwds): + G._adj[e[0]][e[1]].update(kwds) + + def test_str(self): + ev = self.eview(self.G) + rep = str([(n, n + 1) for n in range(8)]) + assert str(ev) == rep + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "EdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + + "(4, 5), (5, 6), (6, 7), (7, 8)])" + ) + assert repr(ev) == rep + + def test_getitem(self): + G = self.G.copy() + ev = G.edges + G.edges[0, 1]["foo"] = "bar" + assert ev[0, 1] == {"foo": "bar"} + + # slicing + with pytest.raises(nx.NetworkXError, match=".*does not support slicing"): + G.edges[0:5] + + # Invalid edge + with pytest.raises(KeyError, match=r".*edge.*is not in the graph."): + G.edges[0, 9] + + def test_call(self): + ev = self.eview(self.G) + assert id(ev) == id(ev()) + assert id(ev) == id(ev(data=False)) + assert id(ev) != id(ev(data=True)) + assert id(ev) != id(ev(nbunch=1)) + + def test_data(self): + ev = self.eview(self.G) + assert id(ev) != id(ev.data()) + assert id(ev) == id(ev.data(data=False)) + assert id(ev) != id(ev.data(data=True)) + assert id(ev) != id(ev.data(nbunch=1)) + + def test_iter(self): + ev = self.eview(self.G) + for u, v in ev: + pass + iev = iter(ev) + assert next(iev) == (0, 1) + assert iter(ev) != ev + assert iter(iev) == iev + + def test_contains(self): + ev = self.eview(self.G) + edv = ev() + if self.G.is_directed(): + assert (1, 2) in ev and (2, 1) not in ev + assert (1, 2) in edv and (2, 1) not in edv + else: + assert (1, 2) in ev and (2, 1) in ev + assert (1, 2) in edv and (2, 1) in edv + assert (1, 4) not in ev + assert (1, 4) not in edv + # edge not in graph + assert (1, 90) not in ev + assert (90, 1) not in ev + assert (1, 90) not in edv + assert (90, 1) not in edv + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert (1, 2) in evn + assert (2, 3) in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + def test_len(self): + ev = self.eview(self.G) + num_ed = 9 if self.G.is_multigraph() else 8 + assert len(ev) == num_ed + + H = self.G.copy() + H.add_edge(1, 1) + assert len(H.edges(1)) == 3 + H.is_multigraph() - H.is_directed() + assert len(H.edges()) == num_ed + 1 + assert len(H.edges) == num_ed + 1 + + def test_and(self): + # print("G & H edges:", gnv & hnv) + ev = self.eview(self.G) + some_edges = {(0, 1), (1, 0), (0, 2)} + if self.G.is_directed(): + assert some_edges & ev, {(0, 1)} + assert ev & some_edges, {(0, 1)} + else: + assert ev & some_edges == {(0, 1), (1, 0)} + assert some_edges & ev == {(0, 1), (1, 0)} + return + + def test_or(self): + # print("G | H edges:", gnv | hnv) + ev = self.eview(self.G) + some_edges = {(0, 1), (1, 0), (0, 2)} + result1 = {(n, n + 1) for n in range(8)} + result1.update(some_edges) + result2 = {(n + 1, n) for n in range(8)} + result2.update(some_edges) + assert (ev | some_edges) in (result1, result2) + assert (some_edges | ev) in (result1, result2) + + def test_xor(self): + # print("G ^ H edges:", gnv ^ hnv) + ev = self.eview(self.G) + some_edges = {(0, 1), (1, 0), (0, 2)} + if self.G.is_directed(): + result = {(n, n + 1) for n in range(1, 8)} + result.update({(1, 0), (0, 2)}) + assert ev ^ some_edges == result + else: + result = {(n, n + 1) for n in range(1, 8)} + result.update({(0, 2)}) + assert ev ^ some_edges == result + return + + def test_sub(self): + # print("G - H edges:", gnv - hnv) + ev = self.eview(self.G) + some_edges = {(0, 1), (1, 0), (0, 2)} + result = {(n, n + 1) for n in range(8)} + result.remove((0, 1)) + assert ev - some_edges, result + + +class TestOutEdgeView(TestEdgeView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.DiGraph()) + cls.eview = nx.reportviews.OutEdgeView + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "OutEdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + + "(4, 5), (5, 6), (6, 7), (7, 8)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert (1, 2) not in evn + assert (2, 3) in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + +class TestInEdgeView(TestEdgeView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.DiGraph()) + cls.eview = nx.reportviews.InEdgeView + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "InEdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + + "(4, 5), (5, 6), (6, 7), (7, 8)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) not in evn + assert (1, 2) in evn + assert (2, 3) not in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + +class TestMultiEdgeView(TestEdgeView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.MultiGraph()) + cls.G.add_edge(1, 2, key=3, foo="bar") + cls.eview = nx.reportviews.MultiEdgeView + + def modify_edge(self, G, e, **kwds): + if len(e) == 2: + e = e + (0,) + G._adj[e[0]][e[1]][e[2]].update(kwds) + + def test_str(self): + ev = self.eview(self.G) + replist = [(n, n + 1, 0) for n in range(8)] + replist.insert(2, (1, 2, 3)) + rep = str(replist) + assert str(ev) == rep + + def test_getitem(self): + G = self.G.copy() + ev = G.edges + G.edges[0, 1, 0]["foo"] = "bar" + assert ev[0, 1, 0] == {"foo": "bar"} + + # slicing + with pytest.raises(nx.NetworkXError): + G.edges[0:5] + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "MultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0), " + + "(3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" + ) + assert repr(ev) == rep + + def test_call(self): + ev = self.eview(self.G) + assert id(ev) == id(ev(keys=True)) + assert id(ev) == id(ev(data=False, keys=True)) + assert id(ev) != id(ev(keys=False)) + assert id(ev) != id(ev(data=True)) + assert id(ev) != id(ev(nbunch=1)) + + def test_data(self): + ev = self.eview(self.G) + assert id(ev) != id(ev.data()) + assert id(ev) == id(ev.data(data=False, keys=True)) + assert id(ev) != id(ev.data(keys=False)) + assert id(ev) != id(ev.data(data=True)) + assert id(ev) != id(ev.data(nbunch=1)) + + def test_iter(self): + ev = self.eview(self.G) + for u, v, k in ev: + pass + iev = iter(ev) + assert next(iev) == (0, 1, 0) + assert iter(ev) != ev + assert iter(iev) == iev + + def test_iterkeys(self): + G = self.G + evr = self.eview(G) + ev = evr(keys=True) + for u, v, k in ev: + pass + assert k == 0 + ev = evr(keys=True, data="foo", default=1) + for u, v, k, wt in ev: + pass + assert wt == 1 + + self.modify_edge(G, (2, 3, 0), foo="bar") + ev = evr(keys=True, data=True) + for e in ev: + assert len(e) == 4 + print("edge:", e) + if set(e[:2]) == {2, 3}: + print(self.G._adj[2][3]) + assert e[2] == 0 + assert e[3] == {"foo": "bar"} + checked = True + elif set(e[:3]) == {1, 2, 3}: + assert e[2] == 3 + assert e[3] == {"foo": "bar"} + checked_multi = True + else: + assert e[2] == 0 + assert e[3] == {} + assert checked + assert checked_multi + ev = evr(keys=True, data="foo", default=1) + for e in ev: + if set(e[:2]) == {1, 2} and e[2] == 3: + assert e[3] == "bar" + if set(e[:2]) == {1, 2} and e[2] == 0: + assert e[3] == 1 + if set(e[:2]) == {2, 3}: + assert e[2] == 0 + assert e[3] == "bar" + assert len(e) == 4 + checked_wt = True + assert checked_wt + ev = evr(keys=True) + for e in ev: + assert len(e) == 3 + elist = sorted([(i, i + 1, 0) for i in range(8)] + [(1, 2, 3)]) + assert sorted(ev) == elist + # test that the keyword arguments are passed correctly + ev = evr((1, 2), "foo", keys=True, default=1) + with pytest.raises(TypeError): + evr((1, 2), "foo", True, 1) + with pytest.raises(TypeError): + evr((1, 2), "foo", True, default=1) + for e in ev: + if set(e[:2]) == {1, 2}: + assert e[2] in {0, 3} + if e[2] == 3: + assert e[3] == "bar" + else: # e[2] == 0 + assert e[3] == 1 + if G.is_directed(): + assert len(list(ev)) == 3 + else: + assert len(list(ev)) == 4 + + def test_or(self): + # print("G | H edges:", gnv | hnv) + ev = self.eview(self.G) + some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)} + result = {(n, n + 1, 0) for n in range(8)} + result.update(some_edges) + result.update({(1, 2, 3)}) + assert ev | some_edges == result + assert some_edges | ev == result + + def test_sub(self): + # print("G - H edges:", gnv - hnv) + ev = self.eview(self.G) + some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)} + result = {(n, n + 1, 0) for n in range(8)} + result.remove((0, 1, 0)) + result.update({(1, 2, 3)}) + assert ev - some_edges, result + assert some_edges - ev, result + + def test_xor(self): + # print("G ^ H edges:", gnv ^ hnv) + ev = self.eview(self.G) + some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)} + if self.G.is_directed(): + result = {(n, n + 1, 0) for n in range(1, 8)} + result.update({(1, 0, 0), (0, 2, 0), (1, 2, 3)}) + assert ev ^ some_edges == result + assert some_edges ^ ev == result + else: + result = {(n, n + 1, 0) for n in range(1, 8)} + result.update({(0, 2, 0), (1, 2, 3)}) + assert ev ^ some_edges == result + assert some_edges ^ ev == result + + def test_and(self): + # print("G & H edges:", gnv & hnv) + ev = self.eview(self.G) + some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)} + if self.G.is_directed(): + assert ev & some_edges == {(0, 1, 0)} + assert some_edges & ev == {(0, 1, 0)} + else: + assert ev & some_edges == {(0, 1, 0), (1, 0, 0)} + assert some_edges & ev == {(0, 1, 0), (1, 0, 0)} + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert (1, 2) in evn + assert (2, 3) in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + +class TestOutMultiEdgeView(TestMultiEdgeView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.MultiDiGraph()) + cls.G.add_edge(1, 2, key=3, foo="bar") + cls.eview = nx.reportviews.OutMultiEdgeView + + def modify_edge(self, G, e, **kwds): + if len(e) == 2: + e = e + (0,) + G._adj[e[0]][e[1]][e[2]].update(kwds) + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "OutMultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0)," + + " (3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert (1, 2) not in evn + assert (2, 3) in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + +class TestInMultiEdgeView(TestMultiEdgeView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.MultiDiGraph()) + cls.G.add_edge(1, 2, key=3, foo="bar") + cls.eview = nx.reportviews.InMultiEdgeView + + def modify_edge(self, G, e, **kwds): + if len(e) == 2: + e = e + (0,) + G._adj[e[0]][e[1]][e[2]].update(kwds) + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "InMultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0), " + + "(3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) not in evn + assert (1, 2) in evn + assert (2, 3) not in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + +# Degrees +class TestDegreeView: + GRAPH = nx.Graph + dview = nx.reportviews.DegreeView + + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(6, cls.GRAPH()) + cls.G.add_edge(1, 3, foo=2) + cls.G.add_edge(1, 3, foo=3) + + def test_pickle(self): + import pickle + + deg = self.G.degree + pdeg = pickle.loads(pickle.dumps(deg, -1)) + assert dict(deg) == dict(pdeg) + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 1), (1, 3), (2, 2), (3, 3), (4, 2), (5, 1)]) + assert str(dv) == rep + dv = self.G.degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.dview(self.G) + rep = "DegreeView({0: 1, 1: 3, 2: 2, 3: 3, 4: 2, 5: 1})" + assert repr(dv) == rep + + def test_iter(self): + dv = self.dview(self.G) + for n, d in dv: + pass + idv = iter(dv) + assert iter(dv) != dv + assert iter(idv) == idv + assert next(idv) == (0, dv[0]) + assert next(idv) == (1, dv[1]) + # weighted + dv = self.dview(self.G, weight="foo") + for n, d in dv: + pass + idv = iter(dv) + assert iter(dv) != dv + assert iter(idv) == idv + assert next(idv) == (0, dv[0]) + assert next(idv) == (1, dv[1]) + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 1 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 2), (3, 3)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 1 + assert dv[1] == 3 + assert dv[2] == 2 + assert dv[3] == 3 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 5 + assert dv[2] == 2 + assert dv[3] == 5 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 5 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 2), (3, 5)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 5 + assert dvd[2] == 2 + assert dvd[3] == 5 + + def test_len(self): + dv = self.dview(self.G) + assert len(dv) == 6 + + +class TestDiDegreeView(TestDegreeView): + GRAPH = nx.DiGraph + dview = nx.reportviews.DiDegreeView + + def test_repr(self): + dv = self.G.degree() + rep = "DiDegreeView({0: 1, 1: 3, 2: 2, 3: 3, 4: 2, 5: 1})" + assert repr(dv) == rep + + +class TestOutDegreeView(TestDegreeView): + GRAPH = nx.DiGraph + dview = nx.reportviews.OutDegreeView + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 1), (1, 2), (2, 1), (3, 1), (4, 1), (5, 0)]) + assert str(dv) == rep + dv = self.G.out_degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.G.out_degree() + rep = "OutDegreeView({0: 1, 1: 2, 2: 1, 3: 1, 4: 1, 5: 0})" + assert repr(dv) == rep + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 1 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 1), (3, 1)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 1 + assert dv[1] == 2 + assert dv[2] == 1 + assert dv[3] == 1 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 4 + assert dv[2] == 1 + assert dv[3] == 1 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 4 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 1)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 4 + assert dvd[2] == 1 + assert dvd[3] == 1 + + +class TestInDegreeView(TestDegreeView): + GRAPH = nx.DiGraph + dview = nx.reportviews.InDegreeView + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 0), (1, 1), (2, 1), (3, 2), (4, 1), (5, 1)]) + assert str(dv) == rep + dv = self.G.in_degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.G.in_degree() + rep = "InDegreeView({0: 0, 1: 1, 2: 1, 3: 2, 4: 1, 5: 1})" + assert repr(dv) == rep + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 0 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 1), (3, 2)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 2 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 4 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 0 + dvw = dv(1, weight="foo") + assert dvw == 1 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 4)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 0 + assert dvd[1] == 1 + assert dvd[2] == 1 + assert dvd[3] == 4 + + +class TestMultiDegreeView(TestDegreeView): + GRAPH = nx.MultiGraph + dview = nx.reportviews.MultiDegreeView + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 1), (1, 4), (2, 2), (3, 4), (4, 2), (5, 1)]) + assert str(dv) == rep + dv = self.G.degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.G.degree() + rep = "MultiDegreeView({0: 1, 1: 4, 2: 2, 3: 4, 4: 2, 5: 1})" + assert repr(dv) == rep + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 1 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 2), (3, 4)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 1 + assert dv[1] == 4 + assert dv[2] == 2 + assert dv[3] == 4 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 7 + assert dv[2] == 2 + assert dv[3] == 7 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 7 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 2), (3, 7)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 7 + assert dvd[2] == 2 + assert dvd[3] == 7 + + +class TestDiMultiDegreeView(TestMultiDegreeView): + GRAPH = nx.MultiDiGraph + dview = nx.reportviews.DiMultiDegreeView + + def test_repr(self): + dv = self.G.degree() + rep = "DiMultiDegreeView({0: 1, 1: 4, 2: 2, 3: 4, 4: 2, 5: 1})" + assert repr(dv) == rep + + +class TestOutMultiDegreeView(TestDegreeView): + GRAPH = nx.MultiDiGraph + dview = nx.reportviews.OutMultiDegreeView + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 1), (1, 3), (2, 1), (3, 1), (4, 1), (5, 0)]) + assert str(dv) == rep + dv = self.G.out_degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.G.out_degree() + rep = "OutMultiDegreeView({0: 1, 1: 3, 2: 1, 3: 1, 4: 1, 5: 0})" + assert repr(dv) == rep + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 1 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 1), (3, 1)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 1 + assert dv[1] == 3 + assert dv[2] == 1 + assert dv[3] == 1 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 6 + assert dv[2] == 1 + assert dv[3] == 1 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 6 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 1)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 6 + assert dvd[2] == 1 + assert dvd[3] == 1 + + +class TestInMultiDegreeView(TestDegreeView): + GRAPH = nx.MultiDiGraph + dview = nx.reportviews.InMultiDegreeView + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 0), (1, 1), (2, 1), (3, 3), (4, 1), (5, 1)]) + assert str(dv) == rep + dv = self.G.in_degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.G.in_degree() + rep = "InMultiDegreeView({0: 0, 1: 1, 2: 1, 3: 3, 4: 1, 5: 1})" + assert repr(dv) == rep + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 0 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 1), (3, 3)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 3 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 6 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 0 + dvw = dv(1, weight="foo") + assert dvw == 1 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 6)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 0 + assert dvd[1] == 1 + assert dvd[2] == 1 + assert dvd[3] == 6 + + +@pytest.mark.parametrize( + ("reportview", "err_msg_terms"), + ( + (rv.NodeView, "list(G.nodes"), + (rv.NodeDataView, "list(G.nodes.data"), + (rv.EdgeView, "list(G.edges"), + # Directed EdgeViews + (rv.InEdgeView, "list(G.in_edges"), + (rv.OutEdgeView, "list(G.edges"), + # Multi EdgeViews + (rv.MultiEdgeView, "list(G.edges"), + (rv.InMultiEdgeView, "list(G.in_edges"), + (rv.OutMultiEdgeView, "list(G.edges"), + ), +) +def test_slicing_reportviews(reportview, err_msg_terms): + G = nx.complete_graph(3) + view = reportview(G) + with pytest.raises(nx.NetworkXError) as exc: + view[0:2] + errmsg = str(exc.value) + assert type(view).__name__ in errmsg + assert err_msg_terms in errmsg + + +@pytest.mark.parametrize( + "graph", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] +) +def test_cache_dict_get_set_state(graph): + G = nx.path_graph(5, graph()) + G.nodes, G.edges, G.adj, G.degree + if G.is_directed(): + G.pred, G.succ, G.in_edges, G.out_edges, G.in_degree, G.out_degree + cached_dict = G.__dict__ + assert "nodes" in cached_dict + assert "edges" in cached_dict + assert "adj" in cached_dict + assert "degree" in cached_dict + if G.is_directed(): + assert "pred" in cached_dict + assert "succ" in cached_dict + assert "in_edges" in cached_dict + assert "out_edges" in cached_dict + assert "in_degree" in cached_dict + assert "out_degree" in cached_dict + + # Raises error if the cached properties and views do not work + pickle.loads(pickle.dumps(G, -1)) + deepcopy(G) + + +def test_edge_views_inherit_from_EdgeViewABC(): + all_edge_view_classes = (v for v in dir(nx.reportviews) if "Edge" in v) + for eview_class in all_edge_view_classes: + assert issubclass( + getattr(nx.reportviews, eview_class), nx.reportviews.EdgeViewABC + ) diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_special.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_special.py new file mode 100644 index 0000000000000000000000000000000000000000..1fa79605b484f57ed6cbd17762b21a23406ee006 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_special.py @@ -0,0 +1,131 @@ +import networkx as nx + +from .test_digraph import BaseDiGraphTester +from .test_digraph import TestDiGraph as _TestDiGraph +from .test_graph import BaseGraphTester +from .test_graph import TestGraph as _TestGraph +from .test_multidigraph import TestMultiDiGraph as _TestMultiDiGraph +from .test_multigraph import TestMultiGraph as _TestMultiGraph + + +def test_factories(): + class mydict1(dict): + pass + + class mydict2(dict): + pass + + class mydict3(dict): + pass + + class mydict4(dict): + pass + + class mydict5(dict): + pass + + for Graph in (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph): + # print("testing class: ", Graph.__name__) + class MyGraph(Graph): + node_dict_factory = mydict1 + adjlist_outer_dict_factory = mydict2 + adjlist_inner_dict_factory = mydict3 + edge_key_dict_factory = mydict4 + edge_attr_dict_factory = mydict5 + + G = MyGraph() + assert isinstance(G._node, mydict1) + assert isinstance(G._adj, mydict2) + G.add_node(1) + assert isinstance(G._adj[1], mydict3) + if G.is_directed(): + assert isinstance(G._pred, mydict2) + assert isinstance(G._succ, mydict2) + assert isinstance(G._pred[1], mydict3) + G.add_edge(1, 2) + if G.is_multigraph(): + assert isinstance(G._adj[1][2], mydict4) + assert isinstance(G._adj[1][2][0], mydict5) + else: + assert isinstance(G._adj[1][2], mydict5) + + +class TestSpecialGraph(_TestGraph): + def setup_method(self): + _TestGraph.setup_method(self) + self.Graph = nx.Graph + + +class TestThinGraph(BaseGraphTester): + def setup_method(self): + all_edge_dict = {"weight": 1} + + class MyGraph(nx.Graph): + def edge_attr_dict_factory(self): + return all_edge_dict + + self.Graph = MyGraph + # build dict-of-dict-of-dict K3 + ed1, ed2, ed3 = (all_edge_dict, all_edge_dict, all_edge_dict) + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._adj = self.k3adj + self.K3._node = {} + self.K3._node[0] = {} + self.K3._node[1] = {} + self.K3._node[2] = {} + + +class TestSpecialDiGraph(_TestDiGraph): + def setup_method(self): + _TestDiGraph.setup_method(self) + self.Graph = nx.DiGraph + + +class TestThinDiGraph(BaseDiGraphTester): + def setup_method(self): + all_edge_dict = {"weight": 1} + + class MyGraph(nx.DiGraph): + def edge_attr_dict_factory(self): + return all_edge_dict + + self.Graph = MyGraph + # build dict-of-dict-of-dict K3 + ed1, ed2, ed3 = (all_edge_dict, all_edge_dict, all_edge_dict) + ed4, ed5, ed6 = (all_edge_dict, all_edge_dict, all_edge_dict) + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed3, 2: ed4}, 2: {0: ed5, 1: ed6}} + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._succ = self.k3adj + # K3._adj is synced with K3._succ + self.K3._pred = {0: {1: ed3, 2: ed5}, 1: {0: ed1, 2: ed6}, 2: {0: ed2, 1: ed4}} + self.K3._node = {} + self.K3._node[0] = {} + self.K3._node[1] = {} + self.K3._node[2] = {} + + ed1, ed2 = (all_edge_dict, all_edge_dict) + self.P3 = self.Graph() + self.P3._succ = {0: {1: ed1}, 1: {2: ed2}, 2: {}} + # P3._adj is synced with P3._succ + self.P3._pred = {0: {}, 1: {0: ed1}, 2: {1: ed2}} + self.P3._node = {} + self.P3._node[0] = {} + self.P3._node[1] = {} + self.P3._node[2] = {} + + +class TestSpecialMultiGraph(_TestMultiGraph): + def setup_method(self): + _TestMultiGraph.setup_method(self) + self.Graph = nx.MultiGraph + + +class TestSpecialMultiDiGraph(_TestMultiDiGraph): + def setup_method(self): + _TestMultiDiGraph.setup_method(self) + self.Graph = nx.MultiDiGraph diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_subgraphviews.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_subgraphviews.py new file mode 100644 index 0000000000000000000000000000000000000000..73e0fdd2d52bcb7623dbd4e4f502e8bed0a4e3d3 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/classes/tests/test_subgraphviews.py @@ -0,0 +1,362 @@ +import pytest + +import networkx as nx +from networkx.utils import edges_equal + + +class TestSubGraphView: + gview = staticmethod(nx.subgraph_view) + graph = nx.Graph + hide_edges_filter = staticmethod(nx.filters.hide_edges) + show_edges_filter = staticmethod(nx.filters.show_edges) + + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=cls.graph()) + cls.hide_edges_w_hide_nodes = {(3, 4), (4, 5), (5, 6)} + + def test_hidden_nodes(self): + hide_nodes = [4, 5, 111] + nodes_gone = nx.filters.hide_nodes(hide_nodes) + gview = self.gview + G = gview(self.G, filter_node=nodes_gone) + assert self.G.nodes - G.nodes == {4, 5} + assert self.G.edges - G.edges == self.hide_edges_w_hide_nodes + if G.is_directed(): + assert list(G[3]) == [] + assert list(G[2]) == [3] + else: + assert list(G[3]) == [2] + assert set(G[2]) == {1, 3} + pytest.raises(KeyError, G.__getitem__, 4) + pytest.raises(KeyError, G.__getitem__, 112) + pytest.raises(KeyError, G.__getitem__, 111) + assert G.degree(3) == (3 if G.is_multigraph() else 1) + assert G.size() == (7 if G.is_multigraph() else 5) + + def test_hidden_edges(self): + hide_edges = [(2, 3), (8, 7), (222, 223)] + edges_gone = self.hide_edges_filter(hide_edges) + gview = self.gview + G = gview(self.G, filter_edge=edges_gone) + assert self.G.nodes == G.nodes + if G.is_directed(): + assert self.G.edges - G.edges == {(2, 3)} + assert list(G[2]) == [] + assert list(G.pred[3]) == [] + assert list(G.pred[2]) == [1] + assert G.size() == 7 + else: + assert self.G.edges - G.edges == {(2, 3), (7, 8)} + assert list(G[2]) == [1] + assert G.size() == 6 + assert list(G[3]) == [4] + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) + assert G.degree(3) == 1 + + def test_shown_node(self): + induced_subgraph = nx.filters.show_nodes([2, 3, 111]) + gview = self.gview + G = gview(self.G, filter_node=induced_subgraph) + assert set(G.nodes) == {2, 3} + if G.is_directed(): + assert list(G[3]) == [] + else: + assert list(G[3]) == [2] + assert list(G[2]) == [3] + pytest.raises(KeyError, G.__getitem__, 4) + pytest.raises(KeyError, G.__getitem__, 112) + pytest.raises(KeyError, G.__getitem__, 111) + assert G.degree(3) == (3 if G.is_multigraph() else 1) + assert G.size() == (3 if G.is_multigraph() else 1) + + def test_shown_edges(self): + show_edges = [(2, 3), (8, 7), (222, 223)] + edge_subgraph = self.show_edges_filter(show_edges) + G = self.gview(self.G, filter_edge=edge_subgraph) + assert self.G.nodes == G.nodes + if G.is_directed(): + assert G.edges == {(2, 3)} + assert list(G[3]) == [] + assert list(G[2]) == [3] + assert list(G.pred[3]) == [2] + assert list(G.pred[2]) == [] + assert G.size() == 1 + else: + assert G.edges == {(2, 3), (7, 8)} + assert list(G[3]) == [2] + assert list(G[2]) == [3] + assert G.size() == 2 + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) + assert G.degree(3) == 1 + + +class TestSubDiGraphView(TestSubGraphView): + gview = staticmethod(nx.subgraph_view) + graph = nx.DiGraph + hide_edges_filter = staticmethod(nx.filters.hide_diedges) + show_edges_filter = staticmethod(nx.filters.show_diedges) + hide_edges = [(2, 3), (8, 7), (222, 223)] + excluded = {(2, 3), (3, 4), (4, 5), (5, 6)} + + def test_inoutedges(self): + edges_gone = self.hide_edges_filter(self.hide_edges) + hide_nodes = [4, 5, 111] + nodes_gone = nx.filters.hide_nodes(hide_nodes) + G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone) + + assert self.G.in_edges - G.in_edges == self.excluded + assert self.G.out_edges - G.out_edges == self.excluded + + def test_pred(self): + edges_gone = self.hide_edges_filter(self.hide_edges) + hide_nodes = [4, 5, 111] + nodes_gone = nx.filters.hide_nodes(hide_nodes) + G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone) + + assert list(G.pred[2]) == [1] + assert list(G.pred[6]) == [] + + def test_inout_degree(self): + edges_gone = self.hide_edges_filter(self.hide_edges) + hide_nodes = [4, 5, 111] + nodes_gone = nx.filters.hide_nodes(hide_nodes) + G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone) + + assert G.degree(2) == 1 + assert G.out_degree(2) == 0 + assert G.in_degree(2) == 1 + assert G.size() == 4 + + +# multigraph +class TestMultiGraphView(TestSubGraphView): + gview = staticmethod(nx.subgraph_view) + graph = nx.MultiGraph + hide_edges_filter = staticmethod(nx.filters.hide_multiedges) + show_edges_filter = staticmethod(nx.filters.show_multiedges) + + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=cls.graph()) + multiedges = {(2, 3, 4), (2, 3, 5)} + cls.G.add_edges_from(multiedges) + cls.hide_edges_w_hide_nodes = {(3, 4, 0), (4, 5, 0), (5, 6, 0)} + + def test_hidden_edges(self): + hide_edges = [(2, 3, 4), (2, 3, 3), (8, 7, 0), (222, 223, 0)] + edges_gone = self.hide_edges_filter(hide_edges) + G = self.gview(self.G, filter_edge=edges_gone) + assert self.G.nodes == G.nodes + if G.is_directed(): + assert self.G.edges - G.edges == {(2, 3, 4)} + assert list(G[3]) == [4] + assert list(G[2]) == [3] + assert list(G.pred[3]) == [2] # only one 2 but two edges + assert list(G.pred[2]) == [1] + assert G.size() == 9 + else: + assert self.G.edges - G.edges == {(2, 3, 4), (7, 8, 0)} + assert list(G[3]) == [2, 4] + assert list(G[2]) == [1, 3] + assert G.size() == 8 + assert G.degree(3) == 3 + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) + + def test_shown_edges(self): + show_edges = [(2, 3, 4), (2, 3, 3), (8, 7, 0), (222, 223, 0)] + edge_subgraph = self.show_edges_filter(show_edges) + G = self.gview(self.G, filter_edge=edge_subgraph) + assert self.G.nodes == G.nodes + if G.is_directed(): + assert G.edges == {(2, 3, 4)} + assert list(G[3]) == [] + assert list(G.pred[3]) == [2] + assert list(G.pred[2]) == [] + assert G.size() == 1 + else: + assert G.edges == {(2, 3, 4), (7, 8, 0)} + assert G.size() == 2 + assert list(G[3]) == [2] + assert G.degree(3) == 1 + assert list(G[2]) == [3] + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) + + +# multidigraph +class TestMultiDiGraphView(TestMultiGraphView, TestSubDiGraphView): + gview = staticmethod(nx.subgraph_view) + graph = nx.MultiDiGraph + hide_edges_filter = staticmethod(nx.filters.hide_multidiedges) + show_edges_filter = staticmethod(nx.filters.show_multidiedges) + hide_edges = [(2, 3, 0), (8, 7, 0), (222, 223, 0)] + excluded = {(2, 3, 0), (3, 4, 0), (4, 5, 0), (5, 6, 0)} + + def test_inout_degree(self): + edges_gone = self.hide_edges_filter(self.hide_edges) + hide_nodes = [4, 5, 111] + nodes_gone = nx.filters.hide_nodes(hide_nodes) + G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone) + + assert G.degree(2) == 3 + assert G.out_degree(2) == 2 + assert G.in_degree(2) == 1 + assert G.size() == 6 + + +# induced_subgraph +class TestInducedSubGraph: + @classmethod + def setup_class(cls): + cls.K3 = G = nx.complete_graph(3) + G.graph["foo"] = [] + G.nodes[0]["foo"] = [] + G.remove_edge(1, 2) + ll = [] + G.add_edge(1, 2, foo=ll) + G.add_edge(2, 1, foo=ll) + + def test_full_graph(self): + G = self.K3 + H = nx.induced_subgraph(G, [0, 1, 2, 5]) + assert H.name == G.name + self.graphs_equal(H, G) + self.same_attrdict(H, G) + + def test_partial_subgraph(self): + G = self.K3 + H = nx.induced_subgraph(G, 0) + assert dict(H.adj) == {0: {}} + assert dict(G.adj) != {0: {}} + + H = nx.induced_subgraph(G, [0, 1]) + assert dict(H.adj) == {0: {1: {}}, 1: {0: {}}} + + def same_attrdict(self, H, G): + old_foo = H[1][2]["foo"] + H.edges[1, 2]["foo"] = "baz" + assert G.edges == H.edges + H.edges[1, 2]["foo"] = old_foo + assert G.edges == H.edges + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G.nodes == H.nodes + H.nodes[0]["foo"] = old_foo + assert G.nodes == H.nodes + + def graphs_equal(self, H, G): + assert G._adj == H._adj + assert G._node == H._node + assert G.graph == H.graph + assert G.name == H.name + if not G.is_directed() and not H.is_directed(): + assert H._adj[1][2] is H._adj[2][1] + assert G._adj[1][2] is G._adj[2][1] + else: # at least one is directed + if not G.is_directed(): + G._pred = G._adj + G._succ = G._adj + if not H.is_directed(): + H._pred = H._adj + H._succ = H._adj + assert G._pred == H._pred + assert G._succ == H._succ + assert H._succ[1][2] is H._pred[2][1] + assert G._succ[1][2] is G._pred[2][1] + + +# edge_subgraph +class TestEdgeSubGraph: + @classmethod + def setup_class(cls): + # Create a path graph on five nodes. + cls.G = G = nx.path_graph(5) + # Add some node, edge, and graph attributes. + for i in range(5): + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" + # Get the subgraph induced by the first and last edges. + cls.H = nx.edge_subgraph(G, [(0, 1), (3, 4)]) + + def test_correct_nodes(self): + """Tests that the subgraph has the correct nodes.""" + assert [(0, "node0"), (1, "node1"), (3, "node3"), (4, "node4")] == sorted( + self.H.nodes.data("name") + ) + + def test_correct_edges(self): + """Tests that the subgraph has the correct edges.""" + assert edges_equal( + [(0, 1, "edge01"), (3, 4, "edge34")], self.H.edges.data("name") + ) + + def test_add_node(self): + """Tests that adding a node to the original graph does not + affect the nodes of the subgraph. + + """ + self.G.add_node(5) + assert [0, 1, 3, 4] == sorted(self.H.nodes) + self.G.remove_node(5) + + def test_remove_node(self): + """Tests that removing a node in the original graph + removes the nodes of the subgraph. + + """ + self.G.remove_node(0) + assert [1, 3, 4] == sorted(self.H.nodes) + self.G.add_node(0, name="node0") + self.G.add_edge(0, 1, name="edge01") + + def test_node_attr_dict(self): + """Tests that the node attribute dictionary of the two graphs is + the same object. + + """ + for v in self.H: + assert self.G.nodes[v] == self.H.nodes[v] + # Making a change to G should make a change in H and vice versa. + self.G.nodes[0]["name"] = "foo" + assert self.G.nodes[0] == self.H.nodes[0] + self.H.nodes[1]["name"] = "bar" + assert self.G.nodes[1] == self.H.nodes[1] + # Revert the change, so tests pass with pytest-randomly + self.G.nodes[0]["name"] = "node0" + self.H.nodes[1]["name"] = "node1" + + def test_edge_attr_dict(self): + """Tests that the edge attribute dictionary of the two graphs is + the same object. + + """ + for u, v in self.H.edges(): + assert self.G.edges[u, v] == self.H.edges[u, v] + # Making a change to G should make a change in H and vice versa. + self.G.edges[0, 1]["name"] = "foo" + assert self.G.edges[0, 1]["name"] == self.H.edges[0, 1]["name"] + self.H.edges[3, 4]["name"] = "bar" + assert self.G.edges[3, 4]["name"] == self.H.edges[3, 4]["name"] + # Revert the change, so tests pass with pytest-randomly + self.G.edges[0, 1]["name"] = "edge01" + self.H.edges[3, 4]["name"] = "edge34" + + def test_graph_attr_dict(self): + """Tests that the graph attribute dictionary of the two graphs + is the same object. + + """ + assert self.G.graph is self.H.graph + + def test_readonly(self): + """Tests that the subgraph cannot change the graph structure""" + pytest.raises(nx.NetworkXError, self.H.add_node, 5) + pytest.raises(nx.NetworkXError, self.H.remove_node, 0) + pytest.raises(nx.NetworkXError, self.H.add_edge, 5, 6) + pytest.raises(nx.NetworkXError, self.H.remove_edge, 0, 1) diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_geometric.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_geometric.py new file mode 100644 index 0000000000000000000000000000000000000000..f1c68bead51b75e7a39484164cc484cbd4e5def8 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_geometric.py @@ -0,0 +1,488 @@ +import math +import random +from itertools import combinations + +import pytest + +import networkx as nx + + +def l1dist(x, y): + return sum(abs(a - b) for a, b in zip(x, y)) + + +class TestRandomGeometricGraph: + """Unit tests for :func:`~networkx.random_geometric_graph`""" + + def test_number_of_nodes(self): + G = nx.random_geometric_graph(50, 0.25, seed=42) + assert len(G) == 50 + G = nx.random_geometric_graph(range(50), 0.25, seed=42) + assert len(G) == 50 + + def test_distances(self): + """Tests that pairs of vertices adjacent if and only if they are + within the prescribed radius. + """ + # Use the Euclidean metric, the default according to the + # documentation. + G = nx.random_geometric_graph(50, 0.25) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + # Nonadjacent vertices must be at greater distance. + else: + assert not math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_p(self): + """Tests for providing an alternate distance metric to the generator.""" + # Use the L1 metric. + G = nx.random_geometric_graph(50, 0.25, p=1) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert l1dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + # Nonadjacent vertices must be at greater distance. + else: + assert not l1dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_node_names(self): + """Tests using values other than sequential numbers as node IDs.""" + import string + + nodes = list(string.ascii_lowercase) + G = nx.random_geometric_graph(nodes, 0.25) + assert len(G) == len(nodes) + + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + # Nonadjacent vertices must be at greater distance. + else: + assert not math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_pos_name(self): + G = nx.random_geometric_graph(50, 0.25, seed=42, pos_name="coords") + assert all(len(d["coords"]) == 2 for n, d in G.nodes.items()) + + +class TestSoftRandomGeometricGraph: + """Unit tests for :func:`~networkx.soft_random_geometric_graph`""" + + def test_number_of_nodes(self): + G = nx.soft_random_geometric_graph(50, 0.25, seed=42) + assert len(G) == 50 + G = nx.soft_random_geometric_graph(range(50), 0.25, seed=42) + assert len(G) == 50 + + def test_distances(self): + """Tests that pairs of vertices adjacent if and only if they are + within the prescribed radius. + """ + # Use the Euclidean metric, the default according to the + # documentation. + G = nx.soft_random_geometric_graph(50, 0.25) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_p(self): + """Tests for providing an alternate distance metric to the generator.""" + + # Use the L1 metric. + def dist(x, y): + return sum(abs(a - b) for a, b in zip(x, y)) + + G = nx.soft_random_geometric_graph(50, 0.25, p=1) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_node_names(self): + """Tests using values other than sequential numbers as node IDs.""" + import string + + nodes = list(string.ascii_lowercase) + G = nx.soft_random_geometric_graph(nodes, 0.25) + assert len(G) == len(nodes) + + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_p_dist_default(self): + """Tests default p_dict = 0.5 returns graph with edge count <= RGG with + same n, radius, dim and positions + """ + nodes = 50 + dim = 2 + pos = {v: [random.random() for i in range(dim)] for v in range(nodes)} + RGG = nx.random_geometric_graph(50, 0.25, pos=pos) + SRGG = nx.soft_random_geometric_graph(50, 0.25, pos=pos) + assert len(SRGG.edges()) <= len(RGG.edges()) + + def test_p_dist_zero(self): + """Tests if p_dict = 0 returns disconnected graph with 0 edges""" + + def p_dist(dist): + return 0 + + G = nx.soft_random_geometric_graph(50, 0.25, p_dist=p_dist) + assert len(G.edges) == 0 + + def test_pos_name(self): + G = nx.soft_random_geometric_graph(50, 0.25, seed=42, pos_name="coords") + assert all(len(d["coords"]) == 2 for n, d in G.nodes.items()) + + +def join(G, u, v, theta, alpha, metric): + """Returns ``True`` if and only if the nodes whose attributes are + ``du`` and ``dv`` should be joined, according to the threshold + condition for geographical threshold graphs. + + ``G`` is an undirected NetworkX graph, and ``u`` and ``v`` are nodes + in that graph. The nodes must have node attributes ``'pos'`` and + ``'weight'``. + + ``metric`` is a distance metric. + """ + du, dv = G.nodes[u], G.nodes[v] + u_pos, v_pos = du["pos"], dv["pos"] + u_weight, v_weight = du["weight"], dv["weight"] + return (u_weight + v_weight) * metric(u_pos, v_pos) ** alpha >= theta + + +class TestGeographicalThresholdGraph: + """Unit tests for :func:`~networkx.geographical_threshold_graph`""" + + def test_number_of_nodes(self): + G = nx.geographical_threshold_graph(50, 100, seed=42) + assert len(G) == 50 + G = nx.geographical_threshold_graph(range(50), 100, seed=42) + assert len(G) == 50 + + def test_distances(self): + """Tests that pairs of vertices adjacent if and only if their + distances meet the given threshold. + """ + # Use the Euclidean metric and alpha = -2 + # the default according to the documentation. + G = nx.geographical_threshold_graph(50, 10) + for u, v in combinations(G, 2): + # Adjacent vertices must exceed the threshold. + if v in G[u]: + assert join(G, u, v, 10, -2, math.dist) + # Nonadjacent vertices must not exceed the threshold. + else: + assert not join(G, u, v, 10, -2, math.dist) + + def test_metric(self): + """Tests for providing an alternate distance metric to the generator.""" + # Use the L1 metric. + G = nx.geographical_threshold_graph(50, 10, metric=l1dist) + for u, v in combinations(G, 2): + # Adjacent vertices must exceed the threshold. + if v in G[u]: + assert join(G, u, v, 10, -2, l1dist) + # Nonadjacent vertices must not exceed the threshold. + else: + assert not join(G, u, v, 10, -2, l1dist) + + def test_p_dist_zero(self): + """Tests if p_dict = 0 returns disconnected graph with 0 edges""" + + def p_dist(dist): + return 0 + + G = nx.geographical_threshold_graph(50, 1, p_dist=p_dist) + assert len(G.edges) == 0 + + def test_pos_weight_name(self): + gtg = nx.geographical_threshold_graph + G = gtg(50, 100, seed=42, pos_name="coords", weight_name="wt") + assert all(len(d["coords"]) == 2 for n, d in G.nodes.items()) + assert all(d["wt"] > 0 for n, d in G.nodes.items()) + + +class TestWaxmanGraph: + """Unit tests for the :func:`~networkx.waxman_graph` function.""" + + def test_number_of_nodes_1(self): + G = nx.waxman_graph(50, 0.5, 0.1, seed=42) + assert len(G) == 50 + G = nx.waxman_graph(range(50), 0.5, 0.1, seed=42) + assert len(G) == 50 + + def test_number_of_nodes_2(self): + G = nx.waxman_graph(50, 0.5, 0.1, L=1) + assert len(G) == 50 + G = nx.waxman_graph(range(50), 0.5, 0.1, L=1) + assert len(G) == 50 + + def test_metric(self): + """Tests for providing an alternate distance metric to the generator.""" + # Use the L1 metric. + G = nx.waxman_graph(50, 0.5, 0.1, metric=l1dist) + assert len(G) == 50 + + def test_pos_name(self): + G = nx.waxman_graph(50, 0.5, 0.1, seed=42, pos_name="coords") + assert all(len(d["coords"]) == 2 for n, d in G.nodes.items()) + + +class TestNavigableSmallWorldGraph: + def test_navigable_small_world(self): + G = nx.navigable_small_world_graph(5, p=1, q=0, seed=42) + gg = nx.grid_2d_graph(5, 5).to_directed() + assert nx.is_isomorphic(G, gg) + + G = nx.navigable_small_world_graph(5, p=1, q=0, dim=3) + gg = nx.grid_graph([5, 5, 5]).to_directed() + assert nx.is_isomorphic(G, gg) + + G = nx.navigable_small_world_graph(5, p=1, q=0, dim=1) + gg = nx.grid_graph([5]).to_directed() + assert nx.is_isomorphic(G, gg) + + def test_invalid_diameter_value(self): + with pytest.raises(nx.NetworkXException, match=".*p must be >= 1"): + nx.navigable_small_world_graph(5, p=0, q=0, dim=1) + + def test_invalid_long_range_connections_value(self): + with pytest.raises(nx.NetworkXException, match=".*q must be >= 0"): + nx.navigable_small_world_graph(5, p=1, q=-1, dim=1) + + def test_invalid_exponent_for_decaying_probability_value(self): + with pytest.raises(nx.NetworkXException, match=".*r must be >= 0"): + nx.navigable_small_world_graph(5, p=1, q=0, r=-1, dim=1) + + def test_r_between_0_and_1(self): + """Smoke test for radius in range [0, 1]""" + # q=0 means no long-range connections + G = nx.navigable_small_world_graph(3, p=1, q=0, r=0.5, dim=2, seed=42) + expected = nx.grid_2d_graph(3, 3, create_using=nx.DiGraph) + assert nx.utils.graphs_equal(G, expected) + + @pytest.mark.parametrize("seed", range(2478, 2578, 10)) + def test_r_general_scaling(self, seed): + """The probability of adding a long-range edge scales with `1 / dist**r`, + so a navigable_small_world graph created with r < 1 should generally + result in more edges than a navigable_small_world graph with r >= 1 + (for 0 < q << n). + + N.B. this is probabilistic, so this test may not hold for all seeds.""" + G1 = nx.navigable_small_world_graph(7, q=3, r=0.5, seed=seed) + G2 = nx.navigable_small_world_graph(7, q=3, r=1, seed=seed) + G3 = nx.navigable_small_world_graph(7, q=3, r=2, seed=seed) + assert G1.number_of_edges() > G2.number_of_edges() + assert G2.number_of_edges() > G3.number_of_edges() + + +class TestThresholdedRandomGeometricGraph: + """Unit tests for :func:`~networkx.thresholded_random_geometric_graph`""" + + def test_number_of_nodes(self): + G = nx.thresholded_random_geometric_graph(50, 0.2, 0.1, seed=42) + assert len(G) == 50 + G = nx.thresholded_random_geometric_graph(range(50), 0.2, 0.1, seed=42) + assert len(G) == 50 + + def test_distances(self): + """Tests that pairs of vertices adjacent if and only if they are + within the prescribed radius. + """ + # Use the Euclidean metric, the default according to the + # documentation. + G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1, seed=42) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_p(self): + """Tests for providing an alternate distance metric to the generator.""" + + # Use the L1 metric. + def dist(x, y): + return sum(abs(a - b) for a, b in zip(x, y)) + + G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1, p=1, seed=42) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_node_names(self): + """Tests using values other than sequential numbers as node IDs.""" + import string + + nodes = list(string.ascii_lowercase) + G = nx.thresholded_random_geometric_graph(nodes, 0.25, 0.1, seed=42) + assert len(G) == len(nodes) + + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_theta(self): + """Tests that pairs of vertices adjacent if and only if their sum + weights exceeds the threshold parameter theta. + """ + G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1, seed=42) + + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert (G.nodes[u]["weight"] + G.nodes[v]["weight"]) >= 0.1 + + def test_pos_name(self): + trgg = nx.thresholded_random_geometric_graph + G = trgg(50, 0.25, 0.1, seed=42, pos_name="p", weight_name="wt") + assert all(len(d["p"]) == 2 for n, d in G.nodes.items()) + assert all(d["wt"] > 0 for n, d in G.nodes.items()) + + +def test_geometric_edges_pos_attribute(): + G = nx.Graph() + G.add_nodes_from( + [ + (0, {"position": (0, 0)}), + (1, {"position": (0, 1)}), + (2, {"position": (1, 0)}), + ] + ) + expected_edges = [(0, 1), (0, 2)] + assert expected_edges == nx.geometric_edges(G, radius=1, pos_name="position") + + +def test_geometric_edges_raises_no_pos(): + G = nx.path_graph(3) + msg = "all nodes. must have a '" + with pytest.raises(nx.NetworkXError, match=msg): + nx.geometric_edges(G, radius=1) + + +def test_number_of_nodes_S1(): + G = nx.geometric_soft_configuration_graph( + beta=1.5, n=100, gamma=2.7, mean_degree=10, seed=42 + ) + assert len(G) == 100 + + +def test_set_attributes_S1(): + G = nx.geometric_soft_configuration_graph( + beta=1.5, n=100, gamma=2.7, mean_degree=10, seed=42 + ) + kappas = nx.get_node_attributes(G, "kappa") + assert len(kappas) == 100 + thetas = nx.get_node_attributes(G, "theta") + assert len(thetas) == 100 + radii = nx.get_node_attributes(G, "radius") + assert len(radii) == 100 + + +def test_mean_kappas_mean_degree_S1(): + G = nx.geometric_soft_configuration_graph( + beta=2.5, n=50, gamma=2.7, mean_degree=10, seed=8023 + ) + + kappas = nx.get_node_attributes(G, "kappa") + mean_kappas = sum(kappas.values()) / len(kappas) + assert math.fabs(mean_kappas - 10) < 0.5 + + degrees = dict(G.degree()) + mean_degree = sum(degrees.values()) / len(degrees) + assert math.fabs(mean_degree - 10) < 1 + + +def test_dict_kappas_S1(): + kappas = {i: 10 for i in range(1000)} + G = nx.geometric_soft_configuration_graph(beta=1, kappas=kappas) + assert len(G) == 1000 + kappas = nx.get_node_attributes(G, "kappa") + assert all(kappa == 10 for kappa in kappas.values()) + + +def test_beta_clustering_S1(): + G1 = nx.geometric_soft_configuration_graph( + beta=1.5, n=100, gamma=3.5, mean_degree=10, seed=42 + ) + G2 = nx.geometric_soft_configuration_graph( + beta=3.0, n=100, gamma=3.5, mean_degree=10, seed=42 + ) + assert nx.average_clustering(G1) < nx.average_clustering(G2) + + +def test_wrong_parameters_S1(): + with pytest.raises( + nx.NetworkXError, + match="Please provide either kappas, or all 3 of: n, gamma and mean_degree.", + ): + G = nx.geometric_soft_configuration_graph( + beta=1.5, gamma=3.5, mean_degree=10, seed=42 + ) + + with pytest.raises( + nx.NetworkXError, + match="When kappas is input, n, gamma and mean_degree must not be.", + ): + kappas = {i: 10 for i in range(1000)} + G = nx.geometric_soft_configuration_graph( + beta=1.5, kappas=kappas, gamma=2.3, seed=42 + ) + + with pytest.raises( + nx.NetworkXError, + match="Please provide either kappas, or all 3 of: n, gamma and mean_degree.", + ): + G = nx.geometric_soft_configuration_graph(beta=1.5, seed=42) + + +def test_negative_beta_S1(): + with pytest.raises( + nx.NetworkXError, match="The parameter beta cannot be smaller or equal to 0." + ): + G = nx.geometric_soft_configuration_graph( + beta=-1, n=100, gamma=2.3, mean_degree=10, seed=42 + ) + + +def test_non_zero_clustering_beta_lower_one_S1(): + G = nx.geometric_soft_configuration_graph( + beta=0.5, n=100, gamma=3.5, mean_degree=10, seed=42 + ) + assert nx.average_clustering(G) > 0 + + +def test_mean_degree_influence_on_connectivity_S1(): + low_mean_degree = 2 + high_mean_degree = 20 + G_low = nx.geometric_soft_configuration_graph( + beta=1.2, n=100, gamma=2.7, mean_degree=low_mean_degree, seed=42 + ) + G_high = nx.geometric_soft_configuration_graph( + beta=1.2, n=100, gamma=2.7, mean_degree=high_mean_degree, seed=42 + ) + assert nx.number_connected_components(G_low) > nx.number_connected_components( + G_high + ) + + +def test_compare_mean_kappas_different_gammas_S1(): + G1 = nx.geometric_soft_configuration_graph( + beta=1.5, n=20, gamma=2.7, mean_degree=5, seed=42 + ) + G2 = nx.geometric_soft_configuration_graph( + beta=1.5, n=20, gamma=3.5, mean_degree=5, seed=42 + ) + kappas1 = nx.get_node_attributes(G1, "kappa") + mean_kappas1 = sum(kappas1.values()) / len(kappas1) + kappas2 = nx.get_node_attributes(G2, "kappa") + mean_kappas2 = sum(kappas2.values()) / len(kappas2) + assert math.fabs(mean_kappas1 - mean_kappas2) < 1 diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_lattice.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_lattice.py new file mode 100644 index 0000000000000000000000000000000000000000..5012324a535297bb1a6997dc1f60b332c2aa0752 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_lattice.py @@ -0,0 +1,246 @@ +"""Unit tests for the :mod:`networkx.generators.lattice` module.""" + +from itertools import product + +import pytest + +import networkx as nx +from networkx.utils import edges_equal + + +class TestGrid2DGraph: + """Unit tests for :func:`networkx.generators.lattice.grid_2d_graph`""" + + def test_number_of_vertices(self): + m, n = 5, 6 + G = nx.grid_2d_graph(m, n) + assert len(G) == m * n + + def test_degree_distribution(self): + m, n = 5, 6 + G = nx.grid_2d_graph(m, n) + expected_histogram = [0, 0, 4, 2 * (m + n) - 8, (m - 2) * (n - 2)] + assert nx.degree_histogram(G) == expected_histogram + + def test_directed(self): + m, n = 5, 6 + G = nx.grid_2d_graph(m, n) + H = nx.grid_2d_graph(m, n, create_using=nx.DiGraph()) + assert H.succ == G.adj + assert H.pred == G.adj + + def test_multigraph(self): + m, n = 5, 6 + G = nx.grid_2d_graph(m, n) + H = nx.grid_2d_graph(m, n, create_using=nx.MultiGraph()) + assert list(H.edges()) == list(G.edges()) + + def test_periodic(self): + G = nx.grid_2d_graph(0, 0, periodic=True) + assert dict(G.degree()) == {} + + for m, n, H in [ + (2, 2, nx.cycle_graph(4)), + (1, 7, nx.cycle_graph(7)), + (7, 1, nx.cycle_graph(7)), + (2, 5, nx.circular_ladder_graph(5)), + (5, 2, nx.circular_ladder_graph(5)), + (2, 4, nx.cubical_graph()), + (4, 2, nx.cubical_graph()), + ]: + G = nx.grid_2d_graph(m, n, periodic=True) + assert nx.could_be_isomorphic(G, H) + + def test_periodic_iterable(self): + m, n = 3, 7 + for a, b in product([0, 1], [0, 1]): + G = nx.grid_2d_graph(m, n, periodic=(a, b)) + assert G.number_of_nodes() == m * n + assert G.number_of_edges() == (m + a - 1) * n + (n + b - 1) * m + + def test_periodic_directed(self): + G = nx.grid_2d_graph(4, 2, periodic=True) + H = nx.grid_2d_graph(4, 2, periodic=True, create_using=nx.DiGraph()) + assert H.succ == G.adj + assert H.pred == G.adj + + def test_periodic_multigraph(self): + G = nx.grid_2d_graph(4, 2, periodic=True) + H = nx.grid_2d_graph(4, 2, periodic=True, create_using=nx.MultiGraph()) + assert list(G.edges()) == list(H.edges()) + + def test_exceptions(self): + pytest.raises(nx.NetworkXError, nx.grid_2d_graph, -3, 2) + pytest.raises(nx.NetworkXError, nx.grid_2d_graph, 3, -2) + pytest.raises(TypeError, nx.grid_2d_graph, 3.3, 2) + pytest.raises(TypeError, nx.grid_2d_graph, 3, 2.2) + + def test_node_input(self): + G = nx.grid_2d_graph(4, 2, periodic=True) + H = nx.grid_2d_graph(range(4), range(2), periodic=True) + assert nx.is_isomorphic(H, G) + H = nx.grid_2d_graph("abcd", "ef", periodic=True) + assert nx.is_isomorphic(H, G) + G = nx.grid_2d_graph(5, 6) + H = nx.grid_2d_graph(range(5), range(6)) + assert edges_equal(H, G) + + +class TestGridGraph: + """Unit tests for :func:`networkx.generators.lattice.grid_graph`""" + + def test_grid_graph(self): + """grid_graph([n,m]) is a connected simple graph with the + following properties: + number_of_nodes = n*m + degree_histogram = [0,0,4,2*(n+m)-8,(n-2)*(m-2)] + """ + for n, m in [(3, 5), (5, 3), (4, 5), (5, 4)]: + dim = [n, m] + g = nx.grid_graph(dim) + assert len(g) == n * m + assert nx.degree_histogram(g) == [ + 0, + 0, + 4, + 2 * (n + m) - 8, + (n - 2) * (m - 2), + ] + + for n, m in [(1, 5), (5, 1)]: + dim = [n, m] + g = nx.grid_graph(dim) + assert len(g) == n * m + assert nx.is_isomorphic(g, nx.path_graph(5)) + + # mg = nx.grid_graph([n,m], create_using=MultiGraph()) + # assert_equal(mg.edges(), g.edges()) + + def test_node_input(self): + G = nx.grid_graph([range(7, 9), range(3, 6)]) + assert len(G) == 2 * 3 + assert nx.is_isomorphic(G, nx.grid_graph([2, 3])) + + def test_periodic_iterable(self): + m, n, k = 3, 7, 5 + for a, b, c in product([0, 1], [0, 1], [0, 1]): + G = nx.grid_graph([m, n, k], periodic=(a, b, c)) + num_e = (m + a - 1) * n * k + (n + b - 1) * m * k + (k + c - 1) * m * n + assert G.number_of_nodes() == m * n * k + assert G.number_of_edges() == num_e + + +class TestHypercubeGraph: + """Unit tests for :func:`networkx.generators.lattice.hypercube_graph`""" + + def test_special_cases(self): + for n, H in [ + (0, nx.null_graph()), + (1, nx.path_graph(2)), + (2, nx.cycle_graph(4)), + (3, nx.cubical_graph()), + ]: + G = nx.hypercube_graph(n) + assert nx.could_be_isomorphic(G, H) + + def test_degree_distribution(self): + for n in range(1, 10): + G = nx.hypercube_graph(n) + expected_histogram = [0] * n + [2**n] + assert nx.degree_histogram(G) == expected_histogram + + +class TestTriangularLatticeGraph: + "Tests for :func:`networkx.generators.lattice.triangular_lattice_graph`" + + def test_lattice_points(self): + """Tests that the graph is really a triangular lattice.""" + for m, n in [(2, 3), (2, 2), (2, 1), (3, 3), (3, 2), (3, 4)]: + G = nx.triangular_lattice_graph(m, n) + N = (n + 1) // 2 + assert len(G) == (m + 1) * (1 + N) - (n % 2) * ((m + 1) // 2) + for i, j in G.nodes(): + nbrs = G[(i, j)] + if i < N: + assert (i + 1, j) in nbrs + if j < m: + assert (i, j + 1) in nbrs + if j < m and (i > 0 or j % 2) and (i < N or (j + 1) % 2): + assert (i + 1, j + 1) in nbrs or (i - 1, j + 1) in nbrs + + def test_directed(self): + """Tests for creating a directed triangular lattice.""" + G = nx.triangular_lattice_graph(3, 4, create_using=nx.Graph()) + H = nx.triangular_lattice_graph(3, 4, create_using=nx.DiGraph()) + assert H.is_directed() + for u, v in H.edges(): + assert v[1] >= u[1] + if v[1] == u[1]: + assert v[0] > u[0] + + def test_multigraph(self): + """Tests for creating a triangular lattice multigraph.""" + G = nx.triangular_lattice_graph(3, 4, create_using=nx.Graph()) + H = nx.triangular_lattice_graph(3, 4, create_using=nx.MultiGraph()) + assert list(H.edges()) == list(G.edges()) + + def test_periodic(self): + G = nx.triangular_lattice_graph(4, 6, periodic=True) + assert len(G) == 12 + assert G.size() == 36 + # all degrees are 6 + assert len([n for n, d in G.degree() if d != 6]) == 0 + G = nx.triangular_lattice_graph(5, 7, periodic=True) + TLG = nx.triangular_lattice_graph + pytest.raises(nx.NetworkXError, TLG, 2, 4, periodic=True) + pytest.raises(nx.NetworkXError, TLG, 4, 4, periodic=True) + pytest.raises(nx.NetworkXError, TLG, 2, 6, periodic=True) + + +class TestHexagonalLatticeGraph: + "Tests for :func:`networkx.generators.lattice.hexagonal_lattice_graph`" + + def test_lattice_points(self): + """Tests that the graph is really a hexagonal lattice.""" + for m, n in [(4, 5), (4, 4), (4, 3), (3, 2), (3, 3), (3, 5)]: + G = nx.hexagonal_lattice_graph(m, n) + assert len(G) == 2 * (m + 1) * (n + 1) - 2 + C_6 = nx.cycle_graph(6) + hexagons = [ + [(0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2)], + [(0, 2), (0, 3), (0, 4), (1, 2), (1, 3), (1, 4)], + [(1, 1), (1, 2), (1, 3), (2, 1), (2, 2), (2, 3)], + [(2, 0), (2, 1), (2, 2), (3, 0), (3, 1), (3, 2)], + [(2, 2), (2, 3), (2, 4), (3, 2), (3, 3), (3, 4)], + ] + for hexagon in hexagons: + assert nx.is_isomorphic(G.subgraph(hexagon), C_6) + + def test_directed(self): + """Tests for creating a directed hexagonal lattice.""" + G = nx.hexagonal_lattice_graph(3, 5, create_using=nx.Graph()) + H = nx.hexagonal_lattice_graph(3, 5, create_using=nx.DiGraph()) + assert H.is_directed() + pos = nx.get_node_attributes(H, "pos") + for u, v in H.edges(): + assert pos[v][1] >= pos[u][1] + if pos[v][1] == pos[u][1]: + assert pos[v][0] > pos[u][0] + + def test_multigraph(self): + """Tests for creating a hexagonal lattice multigraph.""" + G = nx.hexagonal_lattice_graph(3, 5, create_using=nx.Graph()) + H = nx.hexagonal_lattice_graph(3, 5, create_using=nx.MultiGraph()) + assert list(H.edges()) == list(G.edges()) + + def test_periodic(self): + G = nx.hexagonal_lattice_graph(4, 6, periodic=True) + assert len(G) == 48 + assert G.size() == 72 + # all degrees are 3 + assert len([n for n, d in G.degree() if d != 3]) == 0 + G = nx.hexagonal_lattice_graph(5, 8, periodic=True) + HLG = nx.hexagonal_lattice_graph + pytest.raises(nx.NetworkXError, HLG, 2, 7, periodic=True) + pytest.raises(nx.NetworkXError, HLG, 1, 4, periodic=True) + pytest.raises(nx.NetworkXError, HLG, 2, 1, periodic=True) diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_line.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_line.py new file mode 100644 index 0000000000000000000000000000000000000000..7f5454ebee019fb27b61f72f1fdd81b6c927ba17 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_line.py @@ -0,0 +1,309 @@ +import pytest + +import networkx as nx +from networkx.generators import line +from networkx.utils import edges_equal + + +class TestGeneratorLine: + def test_star(self): + G = nx.star_graph(5) + L = nx.line_graph(G) + assert nx.is_isomorphic(L, nx.complete_graph(5)) + + def test_path(self): + G = nx.path_graph(5) + L = nx.line_graph(G) + assert nx.is_isomorphic(L, nx.path_graph(4)) + + def test_cycle(self): + G = nx.cycle_graph(5) + L = nx.line_graph(G) + assert nx.is_isomorphic(L, G) + + def test_digraph1(self): + G = nx.DiGraph([(0, 1), (0, 2), (0, 3)]) + L = nx.line_graph(G) + # no edge graph, but with nodes + assert L.adj == {(0, 1): {}, (0, 2): {}, (0, 3): {}} + + def test_multigraph1(self): + G = nx.MultiGraph([(0, 1), (0, 1), (1, 0), (0, 2), (2, 0), (0, 3)]) + L = nx.line_graph(G) + # no edge graph, but with nodes + assert edges_equal( + L.edges(), + [ + ((0, 3, 0), (0, 1, 0)), + ((0, 3, 0), (0, 2, 0)), + ((0, 3, 0), (0, 2, 1)), + ((0, 3, 0), (0, 1, 1)), + ((0, 3, 0), (0, 1, 2)), + ((0, 1, 0), (0, 1, 1)), + ((0, 1, 0), (0, 2, 0)), + ((0, 1, 0), (0, 1, 2)), + ((0, 1, 0), (0, 2, 1)), + ((0, 1, 1), (0, 1, 2)), + ((0, 1, 1), (0, 2, 0)), + ((0, 1, 1), (0, 2, 1)), + ((0, 1, 2), (0, 2, 0)), + ((0, 1, 2), (0, 2, 1)), + ((0, 2, 0), (0, 2, 1)), + ], + ) + + def test_multigraph2(self): + G = nx.MultiGraph([(1, 2), (2, 1)]) + L = nx.line_graph(G) + assert edges_equal(L.edges(), [((1, 2, 0), (1, 2, 1))]) + + def test_multidigraph1(self): + G = nx.MultiDiGraph([(1, 2), (2, 1)]) + L = nx.line_graph(G) + assert edges_equal(L.edges(), [((1, 2, 0), (2, 1, 0)), ((2, 1, 0), (1, 2, 0))]) + + def test_multidigraph2(self): + G = nx.MultiDiGraph([(0, 1), (0, 1), (0, 1), (1, 2)]) + L = nx.line_graph(G) + assert edges_equal( + L.edges(), + [((0, 1, 0), (1, 2, 0)), ((0, 1, 1), (1, 2, 0)), ((0, 1, 2), (1, 2, 0))], + ) + + def test_digraph2(self): + G = nx.DiGraph([(0, 1), (1, 2), (2, 3)]) + L = nx.line_graph(G) + assert edges_equal(L.edges(), [((0, 1), (1, 2)), ((1, 2), (2, 3))]) + + def test_create1(self): + G = nx.DiGraph([(0, 1), (1, 2), (2, 3)]) + L = nx.line_graph(G, create_using=nx.Graph()) + assert edges_equal(L.edges(), [((0, 1), (1, 2)), ((1, 2), (2, 3))]) + + def test_create2(self): + G = nx.Graph([(0, 1), (1, 2), (2, 3)]) + L = nx.line_graph(G, create_using=nx.DiGraph()) + assert edges_equal(L.edges(), [((0, 1), (1, 2)), ((1, 2), (2, 3))]) + + +class TestGeneratorInverseLine: + def test_example(self): + G = nx.Graph() + G_edges = [ + [1, 2], + [1, 3], + [1, 4], + [1, 5], + [2, 3], + [2, 5], + [2, 6], + [2, 7], + [3, 4], + [3, 5], + [6, 7], + [6, 8], + [7, 8], + ] + G.add_edges_from(G_edges) + H = nx.inverse_line_graph(G) + solution = nx.Graph() + solution_edges = [ + ("a", "b"), + ("a", "c"), + ("a", "d"), + ("a", "e"), + ("c", "d"), + ("e", "f"), + ("e", "g"), + ("f", "g"), + ] + solution.add_edges_from(solution_edges) + assert nx.is_isomorphic(H, solution) + + def test_example_2(self): + G = nx.Graph() + G_edges = [[1, 2], [1, 3], [2, 3], [3, 4], [3, 5], [4, 5]] + G.add_edges_from(G_edges) + H = nx.inverse_line_graph(G) + solution = nx.Graph() + solution_edges = [("a", "c"), ("b", "c"), ("c", "d"), ("d", "e"), ("d", "f")] + solution.add_edges_from(solution_edges) + assert nx.is_isomorphic(H, solution) + + def test_pair(self): + G = nx.path_graph(2) + H = nx.inverse_line_graph(G) + solution = nx.path_graph(3) + assert nx.is_isomorphic(H, solution) + + def test_line(self): + G = nx.path_graph(5) + solution = nx.path_graph(6) + H = nx.inverse_line_graph(G) + assert nx.is_isomorphic(H, solution) + + def test_triangle_graph(self): + G = nx.complete_graph(3) + H = nx.inverse_line_graph(G) + alternative_solution = nx.Graph() + alternative_solution.add_edges_from([[0, 1], [0, 2], [0, 3]]) + # there are two alternative inverse line graphs for this case + # so long as we get one of them the test should pass + assert nx.is_isomorphic(H, G) or nx.is_isomorphic(H, alternative_solution) + + def test_cycle(self): + G = nx.cycle_graph(5) + H = nx.inverse_line_graph(G) + assert nx.is_isomorphic(H, G) + + def test_empty(self): + G = nx.Graph() + H = nx.inverse_line_graph(G) + assert nx.is_isomorphic(H, nx.complete_graph(1)) + + def test_K1(self): + G = nx.complete_graph(1) + H = nx.inverse_line_graph(G) + solution = nx.path_graph(2) + assert nx.is_isomorphic(H, solution) + + def test_edgeless_graph(self): + G = nx.empty_graph(5) + with pytest.raises(nx.NetworkXError, match="edgeless graph"): + nx.inverse_line_graph(G) + + def test_selfloops_error(self): + G = nx.cycle_graph(4) + G.add_edge(0, 0) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + def test_non_line_graphs(self): + # Tests several known non-line graphs for impossibility + # Adapted from L.W.Beineke, "Characterizations of derived graphs" + + # claw graph + claw = nx.star_graph(3) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, claw) + + # wheel graph with 6 nodes + wheel = nx.wheel_graph(6) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, wheel) + + # K5 with one edge remove + K5m = nx.complete_graph(5) + K5m.remove_edge(0, 1) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, K5m) + + # graph without any odd triangles (contains claw as induced subgraph) + G = nx.compose(nx.path_graph(2), nx.complete_bipartite_graph(2, 3)) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + ## Variations on a diamond graph + + # Diamond + 2 edges (+ "roof") + G = nx.diamond_graph() + G.add_edges_from([(4, 0), (5, 3)]) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + G.add_edge(4, 5) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + # Diamond + 2 connected edges + G = nx.diamond_graph() + G.add_edges_from([(4, 0), (4, 3)]) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + # Diamond + K3 + one edge (+ 2*K3) + G = nx.diamond_graph() + G.add_edges_from([(4, 0), (4, 1), (4, 2), (5, 3)]) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + G.add_edges_from([(5, 1), (5, 2)]) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + # 4 triangles + G = nx.diamond_graph() + G.add_edges_from([(4, 0), (4, 1), (5, 2), (5, 3)]) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + def test_wrong_graph_type(self): + G = nx.DiGraph() + G_edges = [[0, 1], [0, 2], [0, 3]] + G.add_edges_from(G_edges) + pytest.raises(nx.NetworkXNotImplemented, nx.inverse_line_graph, G) + + G = nx.MultiGraph() + G_edges = [[0, 1], [0, 2], [0, 3]] + G.add_edges_from(G_edges) + pytest.raises(nx.NetworkXNotImplemented, nx.inverse_line_graph, G) + + def test_line_inverse_line_complete(self): + G = nx.complete_graph(10) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_path(self): + G = nx.path_graph(10) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_hypercube(self): + G = nx.hypercube_graph(5) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_cycle(self): + G = nx.cycle_graph(10) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_star(self): + G = nx.star_graph(20) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_multipartite(self): + G = nx.complete_multipartite_graph(3, 4, 5) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_dgm(self): + G = nx.dorogovtsev_goltsev_mendes_graph(4) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_different_node_types(self): + G = nx.path_graph([1, 2, 3, "a", "b", "c"]) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + +class TestGeneratorPrivateFunctions: + def test_triangles_error(self): + G = nx.diamond_graph() + pytest.raises(nx.NetworkXError, line._triangles, G, (4, 0)) + pytest.raises(nx.NetworkXError, line._triangles, G, (0, 3)) + + def test_odd_triangles_error(self): + G = nx.diamond_graph() + pytest.raises(nx.NetworkXError, line._odd_triangle, G, (0, 1, 4)) + pytest.raises(nx.NetworkXError, line._odd_triangle, G, (0, 1, 3)) + + def test_select_starting_cell_error(self): + G = nx.diamond_graph() + pytest.raises(nx.NetworkXError, line._select_starting_cell, G, (4, 0)) + pytest.raises(nx.NetworkXError, line._select_starting_cell, G, (0, 3)) + + def test_diamond_graph(self): + G = nx.diamond_graph() + for edge in G.edges: + cell = line._select_starting_cell(G, starting_edge=edge) + # Starting cell should always be one of the two triangles + assert len(cell) == 3 + assert all(v in G[u] for u in cell for v in cell if u != v) diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_stochastic.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_stochastic.py new file mode 100644 index 0000000000000000000000000000000000000000..0404d9d8454b36b546152c1428790441c6952fa2 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_stochastic.py @@ -0,0 +1,72 @@ +"""Unit tests for the :mod:`networkx.generators.stochastic` module.""" + +import pytest + +import networkx as nx + + +class TestStochasticGraph: + """Unit tests for the :func:`~networkx.stochastic_graph` function.""" + + def test_default_weights(self): + G = nx.DiGraph() + G.add_edge(0, 1) + G.add_edge(0, 2) + S = nx.stochastic_graph(G) + assert nx.is_isomorphic(G, S) + assert sorted(S.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] + + def test_in_place(self): + """Tests for an in-place reweighting of the edges of the graph.""" + G = nx.DiGraph() + G.add_edge(0, 1, weight=1) + G.add_edge(0, 2, weight=1) + nx.stochastic_graph(G, copy=False) + assert sorted(G.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] + + def test_arbitrary_weights(self): + G = nx.DiGraph() + G.add_edge(0, 1, weight=1) + G.add_edge(0, 2, weight=1) + S = nx.stochastic_graph(G) + assert sorted(S.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] + + def test_multidigraph(self): + G = nx.MultiDiGraph() + G.add_edges_from([(0, 1), (0, 1), (0, 2), (0, 2)]) + S = nx.stochastic_graph(G) + d = {"weight": 0.25} + assert sorted(S.edges(data=True)) == [ + (0, 1, d), + (0, 1, d), + (0, 2, d), + (0, 2, d), + ] + + def test_zero_weights(self): + """Smoke test: ensure ZeroDivisionError is not raised.""" + G = nx.DiGraph() + G.add_edge(0, 1, weight=0) + G.add_edge(0, 2, weight=0) + S = nx.stochastic_graph(G) + assert sorted(S.edges(data=True)) == [ + (0, 1, {"weight": 0}), + (0, 2, {"weight": 0}), + ] + + def test_graph_disallowed(self): + with pytest.raises(nx.NetworkXNotImplemented): + nx.stochastic_graph(nx.Graph()) + + def test_multigraph_disallowed(self): + with pytest.raises(nx.NetworkXNotImplemented): + nx.stochastic_graph(nx.MultiGraph()) diff --git a/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_trees.py b/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_trees.py new file mode 100644 index 0000000000000000000000000000000000000000..7932436bf7ad6bb5ab5124f6ff59b7523358354d --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/networkx/generators/tests/test_trees.py @@ -0,0 +1,195 @@ +import random + +import pytest + +import networkx as nx +from networkx.utils import arbitrary_element, graphs_equal + + +@pytest.mark.parametrize("prefix_tree_fn", (nx.prefix_tree, nx.prefix_tree_recursive)) +def test_basic_prefix_tree(prefix_tree_fn): + # This example is from the Wikipedia article "Trie" + # . + strings = ["a", "to", "tea", "ted", "ten", "i", "in", "inn"] + T = prefix_tree_fn(strings) + root, NIL = 0, -1 + + def source_label(v): + return T.nodes[v]["source"] + + # First, we check that the tree has the expected + # structure. Recall that each node that corresponds to one of + # the input strings has an edge to the NIL node. + # + # Consider the three children at level 1 in the trie. + a, i, t = sorted(T[root], key=source_label) + # Check the 'a' branch. + assert len(T[a]) == 1 + nil = arbitrary_element(T[a]) + assert len(T[nil]) == 0 + # Check the 'i' branch. + assert len(T[i]) == 2 + nil, in_ = sorted(T[i], key=source_label) + assert len(T[nil]) == 0 + assert len(T[in_]) == 2 + nil, inn = sorted(T[in_], key=source_label) + assert len(T[nil]) == 0 + assert len(T[inn]) == 1 + nil = arbitrary_element(T[inn]) + assert len(T[nil]) == 0 + # Check the 't' branch. + te, to = sorted(T[t], key=source_label) + assert len(T[to]) == 1 + nil = arbitrary_element(T[to]) + assert len(T[nil]) == 0 + tea, ted, ten = sorted(T[te], key=source_label) + assert len(T[tea]) == 1 + assert len(T[ted]) == 1 + assert len(T[ten]) == 1 + nil = arbitrary_element(T[tea]) + assert len(T[nil]) == 0 + nil = arbitrary_element(T[ted]) + assert len(T[nil]) == 0 + nil = arbitrary_element(T[ten]) + assert len(T[nil]) == 0 + + # Next, we check that the "sources" of each of the nodes is the + # rightmost letter in the string corresponding to the path to + # that node. + assert source_label(root) is None + assert source_label(a) == "a" + assert source_label(i) == "i" + assert source_label(t) == "t" + assert source_label(in_) == "n" + assert source_label(inn) == "n" + assert source_label(to) == "o" + assert source_label(te) == "e" + assert source_label(tea) == "a" + assert source_label(ted) == "d" + assert source_label(ten) == "n" + assert source_label(NIL) == "NIL" + + +@pytest.mark.parametrize( + "strings", + ( + ["a", "to", "tea", "ted", "ten", "i", "in", "inn"], + ["ab", "abs", "ad"], + ["ab", "abs", "ad", ""], + ["distant", "disparaging", "distant", "diamond", "ruby"], + ), +) +def test_implementations_consistent(strings): + """Ensure results are consistent between prefix_tree implementations.""" + assert graphs_equal(nx.prefix_tree(strings), nx.prefix_tree_recursive(strings)) + + +def test_random_labeled_rooted_tree(): + for i in range(1, 10): + t1 = nx.random_labeled_rooted_tree(i, seed=42) + t2 = nx.random_labeled_rooted_tree(i, seed=42) + assert nx.utils.misc.graphs_equal(t1, t2) + assert nx.is_tree(t1) + assert "root" in t1.graph + assert "roots" not in t1.graph + + +def test_random_labeled_tree_n_zero(): + """Tests if n = 0 then the NetworkXPointlessConcept exception is raised.""" + with pytest.raises(nx.NetworkXPointlessConcept): + T = nx.random_labeled_tree(0, seed=1234) + with pytest.raises(nx.NetworkXPointlessConcept): + T = nx.random_labeled_rooted_tree(0, seed=1234) + + +def test_random_labeled_rooted_forest(): + for i in range(1, 10): + t1 = nx.random_labeled_rooted_forest(i, seed=42) + t2 = nx.random_labeled_rooted_forest(i, seed=42) + assert nx.utils.misc.graphs_equal(t1, t2) + for c in nx.connected_components(t1): + assert nx.is_tree(t1.subgraph(c)) + assert "root" not in t1.graph + assert "roots" in t1.graph + + +def test_random_labeled_rooted_forest_n_zero(): + """Tests generation of empty labeled forests.""" + F = nx.random_labeled_rooted_forest(0, seed=1234) + assert len(F) == 0 + assert len(F.graph["roots"]) == 0 + + +def test_random_unlabeled_rooted_tree(): + for i in range(1, 10): + t1 = nx.random_unlabeled_rooted_tree(i, seed=42) + t2 = nx.random_unlabeled_rooted_tree(i, seed=42) + assert nx.utils.misc.graphs_equal(t1, t2) + assert nx.is_tree(t1) + assert "root" in t1.graph + assert "roots" not in t1.graph + t = nx.random_unlabeled_rooted_tree(15, number_of_trees=10, seed=43) + random.seed(43) + s = nx.random_unlabeled_rooted_tree(15, number_of_trees=10, seed=random) + for i in range(10): + assert nx.utils.misc.graphs_equal(t[i], s[i]) + assert nx.is_tree(t[i]) + assert "root" in t[i].graph + assert "roots" not in t[i].graph + + +def test_random_unlabeled_tree_n_zero(): + """Tests if n = 0 then the NetworkXPointlessConcept exception is raised.""" + with pytest.raises(nx.NetworkXPointlessConcept): + T = nx.random_unlabeled_tree(0, seed=1234) + with pytest.raises(nx.NetworkXPointlessConcept): + T = nx.random_unlabeled_rooted_tree(0, seed=1234) + + +def test_random_unlabeled_rooted_forest(): + with pytest.raises(ValueError): + nx.random_unlabeled_rooted_forest(10, q=0, seed=42) + for i in range(1, 10): + for q in range(1, i + 1): + t1 = nx.random_unlabeled_rooted_forest(i, q=q, seed=42) + t2 = nx.random_unlabeled_rooted_forest(i, q=q, seed=42) + assert nx.utils.misc.graphs_equal(t1, t2) + for c in nx.connected_components(t1): + assert nx.is_tree(t1.subgraph(c)) + assert len(c) <= q + assert "root" not in t1.graph + assert "roots" in t1.graph + t = nx.random_unlabeled_rooted_forest(15, number_of_forests=10, seed=43) + random.seed(43) + s = nx.random_unlabeled_rooted_forest(15, number_of_forests=10, seed=random) + for i in range(10): + assert nx.utils.misc.graphs_equal(t[i], s[i]) + for c in nx.connected_components(t[i]): + assert nx.is_tree(t[i].subgraph(c)) + assert "root" not in t[i].graph + assert "roots" in t[i].graph + + +def test_random_unlabeled_forest_n_zero(): + """Tests generation of empty unlabeled forests.""" + F = nx.random_unlabeled_rooted_forest(0, seed=1234) + assert len(F) == 0 + assert len(F.graph["roots"]) == 0 + + +def test_random_unlabeled_tree(): + for i in range(1, 10): + t1 = nx.random_unlabeled_tree(i, seed=42) + t2 = nx.random_unlabeled_tree(i, seed=42) + assert nx.utils.misc.graphs_equal(t1, t2) + assert nx.is_tree(t1) + assert "root" not in t1.graph + assert "roots" not in t1.graph + t = nx.random_unlabeled_tree(10, number_of_trees=10, seed=43) + random.seed(43) + s = nx.random_unlabeled_tree(10, number_of_trees=10, seed=random) + for i in range(10): + assert nx.utils.misc.graphs_equal(t[i], s[i]) + assert nx.is_tree(t[i]) + assert "root" not in t[i].graph + assert "roots" not in t[i].graph diff --git a/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__init__.py b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1528bda574c4e50f10e99a46551a4a4d9378a6b2 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright (c) The python-semanticversion project +# This code is distributed under the two-clause BSD License. + + +from .base import compare, match, validate, SimpleSpec, NpmSpec, Spec, SpecItem, Version + + +__author__ = "Raphaƫl Barrois " +try: + # Python 3.8+ + from importlib.metadata import version + + __version__ = version("semantic_version") +except ImportError: + import pkg_resources + + __version__ = pkg_resources.get_distribution("semantic_version").version diff --git a/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__pycache__/__init__.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bbdb5059c50e2288eba794d0997d696e5293e4dc Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__pycache__/__init__.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__pycache__/base.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cdf23ad029947247641d4256132686672f56a0f9 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__pycache__/base.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__pycache__/django_fields.cpython-310.pyc b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__pycache__/django_fields.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..17b6c44cd3bbe2f3dd49c3704fd91472dc46f7f7 Binary files /dev/null and b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/__pycache__/django_fields.cpython-310.pyc differ diff --git a/evalkit_tf446/lib/python3.10/site-packages/semantic_version/base.py b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/base.py new file mode 100644 index 0000000000000000000000000000000000000000..777c27ac463f34996d0281fb7a68e5f6c7fb9a9c --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/base.py @@ -0,0 +1,1449 @@ +# -*- coding: utf-8 -*- +# Copyright (c) The python-semanticversion project +# This code is distributed under the two-clause BSD License. + +import functools +import re +import warnings + + +def _has_leading_zero(value): + return (value + and value[0] == '0' + and value.isdigit() + and value != '0') + + +class MaxIdentifier(object): + __slots__ = [] + + def __repr__(self): + return 'MaxIdentifier()' + + def __eq__(self, other): + return isinstance(other, self.__class__) + + +@functools.total_ordering +class NumericIdentifier(object): + __slots__ = ['value'] + + def __init__(self, value): + self.value = int(value) + + def __repr__(self): + return 'NumericIdentifier(%r)' % self.value + + def __eq__(self, other): + if isinstance(other, NumericIdentifier): + return self.value == other.value + return NotImplemented + + def __lt__(self, other): + if isinstance(other, MaxIdentifier): + return True + elif isinstance(other, AlphaIdentifier): + return True + elif isinstance(other, NumericIdentifier): + return self.value < other.value + else: + return NotImplemented + + +@functools.total_ordering +class AlphaIdentifier(object): + __slots__ = ['value'] + + def __init__(self, value): + self.value = value.encode('ascii') + + def __repr__(self): + return 'AlphaIdentifier(%r)' % self.value + + def __eq__(self, other): + if isinstance(other, AlphaIdentifier): + return self.value == other.value + return NotImplemented + + def __lt__(self, other): + if isinstance(other, MaxIdentifier): + return True + elif isinstance(other, NumericIdentifier): + return False + elif isinstance(other, AlphaIdentifier): + return self.value < other.value + else: + return NotImplemented + + +class Version(object): + + version_re = re.compile(r'^(\d+)\.(\d+)\.(\d+)(?:-([0-9a-zA-Z.-]+))?(?:\+([0-9a-zA-Z.-]+))?$') + partial_version_re = re.compile(r'^(\d+)(?:\.(\d+)(?:\.(\d+))?)?(?:-([0-9a-zA-Z.-]*))?(?:\+([0-9a-zA-Z.-]*))?$') + + def __init__( + self, + version_string=None, + major=None, + minor=None, + patch=None, + prerelease=None, + build=None, + partial=False): + if partial: + warnings.warn( + "Partial versions will be removed in 3.0; use SimpleSpec('1.x.x') instead.", + DeprecationWarning, + stacklevel=2, + ) + has_text = version_string is not None + has_parts = not (major is minor is patch is prerelease is build is None) + if not has_text ^ has_parts: + raise ValueError("Call either Version('1.2.3') or Version(major=1, ...).") + + if has_text: + major, minor, patch, prerelease, build = self.parse(version_string, partial) + else: + # Convenience: allow to omit prerelease/build. + prerelease = tuple(prerelease or ()) + if not partial: + build = tuple(build or ()) + self._validate_kwargs(major, minor, patch, prerelease, build, partial) + + self.major = major + self.minor = minor + self.patch = patch + self.prerelease = prerelease + self.build = build + + self.partial = partial + + # Cached precedence keys + # _cmp_precedence_key is used for semver-precedence comparison + self._cmp_precedence_key = self._build_precedence_key(with_build=False) + # _sort_precedence_key is used for self.precedence_key, esp. for sorted(...) + self._sort_precedence_key = self._build_precedence_key(with_build=True) + + @classmethod + def _coerce(cls, value, allow_none=False): + if value is None and allow_none: + return value + return int(value) + + def next_major(self): + if self.prerelease and self.minor == self.patch == 0: + return Version( + major=self.major, + minor=0, + patch=0, + partial=self.partial, + ) + else: + return Version( + major=self.major + 1, + minor=0, + patch=0, + partial=self.partial, + ) + + def next_minor(self): + if self.prerelease and self.patch == 0: + return Version( + major=self.major, + minor=self.minor, + patch=0, + partial=self.partial, + ) + else: + return Version( + major=self.major, + minor=self.minor + 1, + patch=0, + partial=self.partial, + ) + + def next_patch(self): + if self.prerelease: + return Version( + major=self.major, + minor=self.minor, + patch=self.patch, + partial=self.partial, + ) + else: + return Version( + major=self.major, + minor=self.minor, + patch=self.patch + 1, + partial=self.partial, + ) + + def truncate(self, level='patch'): + """Return a new Version object, truncated up to the selected level.""" + if level == 'build': + return self + elif level == 'prerelease': + return Version( + major=self.major, + minor=self.minor, + patch=self.patch, + prerelease=self.prerelease, + partial=self.partial, + ) + elif level == 'patch': + return Version( + major=self.major, + minor=self.minor, + patch=self.patch, + partial=self.partial, + ) + elif level == 'minor': + return Version( + major=self.major, + minor=self.minor, + patch=None if self.partial else 0, + partial=self.partial, + ) + elif level == 'major': + return Version( + major=self.major, + minor=None if self.partial else 0, + patch=None if self.partial else 0, + partial=self.partial, + ) + else: + raise ValueError("Invalid truncation level `%s`." % level) + + @classmethod + def coerce(cls, version_string, partial=False): + """Coerce an arbitrary version string into a semver-compatible one. + + The rule is: + - If not enough components, fill minor/patch with zeroes; unless + partial=True + - If more than 3 dot-separated components, extra components are "build" + data. If some "build" data already appeared, append it to the + extra components + + Examples: + >>> Version.coerce('0.1') + Version(0, 1, 0) + >>> Version.coerce('0.1.2.3') + Version(0, 1, 2, (), ('3',)) + >>> Version.coerce('0.1.2.3+4') + Version(0, 1, 2, (), ('3', '4')) + >>> Version.coerce('0.1+2-3+4_5') + Version(0, 1, 0, (), ('2-3', '4-5')) + """ + base_re = re.compile(r'^\d+(?:\.\d+(?:\.\d+)?)?') + + match = base_re.match(version_string) + if not match: + raise ValueError( + "Version string lacks a numerical component: %r" + % version_string + ) + + version = version_string[:match.end()] + if not partial: + # We need a not-partial version. + while version.count('.') < 2: + version += '.0' + + # Strip leading zeros in components + # Version is of the form nn, nn.pp or nn.pp.qq + version = '.'.join( + # If the part was '0', we end up with an empty string. + part.lstrip('0') or '0' + for part in version.split('.') + ) + + if match.end() == len(version_string): + return Version(version, partial=partial) + + rest = version_string[match.end():] + + # Cleanup the 'rest' + rest = re.sub(r'[^a-zA-Z0-9+.-]', '-', rest) + + if rest[0] == '+': + # A 'build' component + prerelease = '' + build = rest[1:] + elif rest[0] == '.': + # An extra version component, probably 'build' + prerelease = '' + build = rest[1:] + elif rest[0] == '-': + rest = rest[1:] + if '+' in rest: + prerelease, build = rest.split('+', 1) + else: + prerelease, build = rest, '' + elif '+' in rest: + prerelease, build = rest.split('+', 1) + else: + prerelease, build = rest, '' + + build = build.replace('+', '.') + + if prerelease: + version = '%s-%s' % (version, prerelease) + if build: + version = '%s+%s' % (version, build) + + return cls(version, partial=partial) + + @classmethod + def parse(cls, version_string, partial=False, coerce=False): + """Parse a version string into a tuple of components: + (major, minor, patch, prerelease, build). + + Args: + version_string (str), the version string to parse + partial (bool), whether to accept incomplete input + coerce (bool), whether to try to map the passed in string into a + valid Version. + """ + if not version_string: + raise ValueError('Invalid empty version string: %r' % version_string) + + if partial: + version_re = cls.partial_version_re + else: + version_re = cls.version_re + + match = version_re.match(version_string) + if not match: + raise ValueError('Invalid version string: %r' % version_string) + + major, minor, patch, prerelease, build = match.groups() + + if _has_leading_zero(major): + raise ValueError("Invalid leading zero in major: %r" % version_string) + if _has_leading_zero(minor): + raise ValueError("Invalid leading zero in minor: %r" % version_string) + if _has_leading_zero(patch): + raise ValueError("Invalid leading zero in patch: %r" % version_string) + + major = int(major) + minor = cls._coerce(minor, partial) + patch = cls._coerce(patch, partial) + + if prerelease is None: + if partial and (build is None): + # No build info, strip here + return (major, minor, patch, None, None) + else: + prerelease = () + elif prerelease == '': + prerelease = () + else: + prerelease = tuple(prerelease.split('.')) + cls._validate_identifiers(prerelease, allow_leading_zeroes=False) + + if build is None: + if partial: + build = None + else: + build = () + elif build == '': + build = () + else: + build = tuple(build.split('.')) + cls._validate_identifiers(build, allow_leading_zeroes=True) + + return (major, minor, patch, prerelease, build) + + @classmethod + def _validate_identifiers(cls, identifiers, allow_leading_zeroes=False): + for item in identifiers: + if not item: + raise ValueError( + "Invalid empty identifier %r in %r" + % (item, '.'.join(identifiers)) + ) + + if item[0] == '0' and item.isdigit() and item != '0' and not allow_leading_zeroes: + raise ValueError("Invalid leading zero in identifier %r" % item) + + @classmethod + def _validate_kwargs(cls, major, minor, patch, prerelease, build, partial): + if ( + major != int(major) + or minor != cls._coerce(minor, partial) + or patch != cls._coerce(patch, partial) + or prerelease is None and not partial + or build is None and not partial + ): + raise ValueError( + "Invalid kwargs to Version(major=%r, minor=%r, patch=%r, " + "prerelease=%r, build=%r, partial=%r" % ( + major, minor, patch, prerelease, build, partial + )) + if prerelease is not None: + cls._validate_identifiers(prerelease, allow_leading_zeroes=False) + if build is not None: + cls._validate_identifiers(build, allow_leading_zeroes=True) + + def __iter__(self): + return iter((self.major, self.minor, self.patch, self.prerelease, self.build)) + + def __str__(self): + version = '%d' % self.major + if self.minor is not None: + version = '%s.%d' % (version, self.minor) + if self.patch is not None: + version = '%s.%d' % (version, self.patch) + + if self.prerelease or (self.partial and self.prerelease == () and self.build is None): + version = '%s-%s' % (version, '.'.join(self.prerelease)) + if self.build or (self.partial and self.build == ()): + version = '%s+%s' % (version, '.'.join(self.build)) + return version + + def __repr__(self): + return '%s(%r%s)' % ( + self.__class__.__name__, + str(self), + ', partial=True' if self.partial else '', + ) + + def __hash__(self): + # We don't include 'partial', since this is strictly equivalent to having + # at least a field being `None`. + return hash((self.major, self.minor, self.patch, self.prerelease, self.build)) + + def _build_precedence_key(self, with_build=False): + """Build a precedence key. + + The "build" component should only be used when sorting an iterable + of versions. + """ + if self.prerelease: + prerelease_key = tuple( + NumericIdentifier(part) if part.isdigit() else AlphaIdentifier(part) + for part in self.prerelease + ) + else: + prerelease_key = ( + MaxIdentifier(), + ) + + if not with_build: + return ( + self.major, + self.minor, + self.patch, + prerelease_key, + ) + + build_key = tuple( + NumericIdentifier(part) if part.isdigit() else AlphaIdentifier(part) + for part in self.build or () + ) + + return ( + self.major, + self.minor, + self.patch, + prerelease_key, + build_key, + ) + + @property + def precedence_key(self): + return self._sort_precedence_key + + def __cmp__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + if self < other: + return -1 + elif self > other: + return 1 + elif self == other: + return 0 + else: + return NotImplemented + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return ( + self.major == other.major + and self.minor == other.minor + and self.patch == other.patch + and (self.prerelease or ()) == (other.prerelease or ()) + and (self.build or ()) == (other.build or ()) + ) + + def __ne__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return tuple(self) != tuple(other) + + def __lt__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return self._cmp_precedence_key < other._cmp_precedence_key + + def __le__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return self._cmp_precedence_key <= other._cmp_precedence_key + + def __gt__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return self._cmp_precedence_key > other._cmp_precedence_key + + def __ge__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return self._cmp_precedence_key >= other._cmp_precedence_key + + +class SpecItem(object): + """A requirement specification.""" + + KIND_ANY = '*' + KIND_LT = '<' + KIND_LTE = '<=' + KIND_EQUAL = '==' + KIND_SHORTEQ = '=' + KIND_EMPTY = '' + KIND_GTE = '>=' + KIND_GT = '>' + KIND_NEQ = '!=' + KIND_CARET = '^' + KIND_TILDE = '~' + KIND_COMPATIBLE = '~=' + + # Map a kind alias to its full version + KIND_ALIASES = { + KIND_SHORTEQ: KIND_EQUAL, + KIND_EMPTY: KIND_EQUAL, + } + + re_spec = re.compile(r'^(<|<=||=|==|>=|>|!=|\^|~|~=)(\d.*)$') + + def __init__(self, requirement_string, _warn=True): + if _warn: + warnings.warn( + "The `SpecItem` class will be removed in 3.0.", + DeprecationWarning, + stacklevel=2, + ) + kind, spec = self.parse(requirement_string) + self.kind = kind + self.spec = spec + self._clause = Spec(requirement_string).clause + + @classmethod + def parse(cls, requirement_string): + if not requirement_string: + raise ValueError("Invalid empty requirement specification: %r" % requirement_string) + + # Special case: the 'any' version spec. + if requirement_string == '*': + return (cls.KIND_ANY, '') + + match = cls.re_spec.match(requirement_string) + if not match: + raise ValueError("Invalid requirement specification: %r" % requirement_string) + + kind, version = match.groups() + if kind in cls.KIND_ALIASES: + kind = cls.KIND_ALIASES[kind] + + spec = Version(version, partial=True) + if spec.build is not None and kind not in (cls.KIND_EQUAL, cls.KIND_NEQ): + raise ValueError( + "Invalid requirement specification %r: build numbers have no ordering." + % requirement_string + ) + return (kind, spec) + + @classmethod + def from_matcher(cls, matcher): + if matcher == Always(): + return cls('*', _warn=False) + elif matcher == Never(): + return cls('<0.0.0-', _warn=False) + elif isinstance(matcher, Range): + return cls('%s%s' % (matcher.operator, matcher.target), _warn=False) + + def match(self, version): + return self._clause.match(version) + + def __str__(self): + return '%s%s' % (self.kind, self.spec) + + def __repr__(self): + return '' % (self.kind, self.spec) + + def __eq__(self, other): + if not isinstance(other, SpecItem): + return NotImplemented + return self.kind == other.kind and self.spec == other.spec + + def __hash__(self): + return hash((self.kind, self.spec)) + + +def compare(v1, v2): + return Version(v1).__cmp__(Version(v2)) + + +def match(spec, version): + return Spec(spec).match(Version(version)) + + +def validate(version_string): + """Validates a version string againt the SemVer specification.""" + try: + Version.parse(version_string) + return True + except ValueError: + return False + + +DEFAULT_SYNTAX = 'simple' + + +class BaseSpec(object): + """A specification of compatible versions. + + Usage: + >>> Spec('>=1.0.0', syntax='npm') + + A version matches a specification if it matches any + of the clauses of that specification. + + Internally, a Spec is AnyOf( + AllOf(Matcher, Matcher, Matcher), + AllOf(...), + ) + """ + SYNTAXES = {} + + @classmethod + def register_syntax(cls, subclass): + syntax = subclass.SYNTAX + if syntax is None: + raise ValueError("A Spec needs its SYNTAX field to be set.") + elif syntax in cls.SYNTAXES: + raise ValueError( + "Duplicate syntax for %s: %r, %r" + % (syntax, cls.SYNTAXES[syntax], subclass) + ) + cls.SYNTAXES[syntax] = subclass + return subclass + + def __init__(self, expression): + super(BaseSpec, self).__init__() + self.expression = expression + self.clause = self._parse_to_clause(expression) + + @classmethod + def parse(cls, expression, syntax=DEFAULT_SYNTAX): + """Convert a syntax-specific expression into a BaseSpec instance.""" + return cls.SYNTAXES[syntax](expression) + + @classmethod + def _parse_to_clause(cls, expression): + """Converts an expression to a clause.""" + raise NotImplementedError() + + def filter(self, versions): + """Filter an iterable of versions satisfying the Spec.""" + for version in versions: + if self.match(version): + yield version + + def match(self, version): + """Check whether a Version satisfies the Spec.""" + return self.clause.match(version) + + def select(self, versions): + """Select the best compatible version among an iterable of options.""" + options = list(self.filter(versions)) + if options: + return max(options) + return None + + def __contains__(self, version): + """Whether `version in self`.""" + if isinstance(version, Version): + return self.match(version) + return False + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + + return self.clause == other.clause + + def __hash__(self): + return hash(self.clause) + + def __str__(self): + return self.expression + + def __repr__(self): + return '<%s: %r>' % (self.__class__.__name__, self.expression) + + +class Clause(object): + __slots__ = [] + + def match(self, version): + raise NotImplementedError() + + def __and__(self, other): + raise NotImplementedError() + + def __or__(self, other): + raise NotImplementedError() + + def __eq__(self, other): + raise NotImplementedError() + + def prettyprint(self, indent='\t'): + """Pretty-print the clause. + """ + return '\n'.join(self._pretty()).replace('\t', indent) + + def _pretty(self): + """Actual pretty-printing logic. + + Yields: + A list of string. Indentation is performed with \t. + """ + yield repr(self) + + def __ne__(self, other): + return not self == other + + def simplify(self): + return self + + +class AnyOf(Clause): + __slots__ = ['clauses'] + + def __init__(self, *clauses): + super(AnyOf, self).__init__() + self.clauses = frozenset(clauses) + + def match(self, version): + return any(c.match(version) for c in self.clauses) + + def simplify(self): + subclauses = set() + for clause in self.clauses: + simplified = clause.simplify() + if isinstance(simplified, AnyOf): + subclauses |= simplified.clauses + elif simplified == Never(): + continue + else: + subclauses.add(simplified) + if len(subclauses) == 1: + return subclauses.pop() + return AnyOf(*subclauses) + + def __hash__(self): + return hash((AnyOf, self.clauses)) + + def __iter__(self): + return iter(self.clauses) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.clauses == other.clauses + + def __and__(self, other): + if isinstance(other, AllOf): + return other & self + elif isinstance(other, Matcher) or isinstance(other, AnyOf): + return AllOf(self, other) + else: + return NotImplemented + + def __or__(self, other): + if isinstance(other, AnyOf): + clauses = list(self.clauses | other.clauses) + elif isinstance(other, Matcher) or isinstance(other, AllOf): + clauses = list(self.clauses | set([other])) + else: + return NotImplemented + return AnyOf(*clauses) + + def __repr__(self): + return 'AnyOf(%s)' % ', '.join(sorted(repr(c) for c in self.clauses)) + + def _pretty(self): + yield 'AnyOF(' + for clause in self.clauses: + lines = list(clause._pretty()) + for line in lines[:-1]: + yield '\t' + line + yield '\t' + lines[-1] + ',' + yield ')' + + +class AllOf(Clause): + __slots__ = ['clauses'] + + def __init__(self, *clauses): + super(AllOf, self).__init__() + self.clauses = frozenset(clauses) + + def match(self, version): + return all(clause.match(version) for clause in self.clauses) + + def simplify(self): + subclauses = set() + for clause in self.clauses: + simplified = clause.simplify() + if isinstance(simplified, AllOf): + subclauses |= simplified.clauses + elif simplified == Always(): + continue + else: + subclauses.add(simplified) + if len(subclauses) == 1: + return subclauses.pop() + return AllOf(*subclauses) + + def __hash__(self): + return hash((AllOf, self.clauses)) + + def __iter__(self): + return iter(self.clauses) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.clauses == other.clauses + + def __and__(self, other): + if isinstance(other, Matcher) or isinstance(other, AnyOf): + clauses = list(self.clauses | set([other])) + elif isinstance(other, AllOf): + clauses = list(self.clauses | other.clauses) + else: + return NotImplemented + return AllOf(*clauses) + + def __or__(self, other): + if isinstance(other, AnyOf): + return other | self + elif isinstance(other, Matcher): + return AnyOf(self, AllOf(other)) + elif isinstance(other, AllOf): + return AnyOf(self, other) + else: + return NotImplemented + + def __repr__(self): + return 'AllOf(%s)' % ', '.join(sorted(repr(c) for c in self.clauses)) + + def _pretty(self): + yield 'AllOF(' + for clause in self.clauses: + lines = list(clause._pretty()) + for line in lines[:-1]: + yield '\t' + line + yield '\t' + lines[-1] + ',' + yield ')' + + +class Matcher(Clause): + __slots__ = [] + + def __and__(self, other): + if isinstance(other, AllOf): + return other & self + elif isinstance(other, Matcher) or isinstance(other, AnyOf): + return AllOf(self, other) + else: + return NotImplemented + + def __or__(self, other): + if isinstance(other, AnyOf): + return other | self + elif isinstance(other, Matcher) or isinstance(other, AllOf): + return AnyOf(self, other) + else: + return NotImplemented + + +class Never(Matcher): + __slots__ = [] + + def match(self, version): + return False + + def __hash__(self): + return hash((Never,)) + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __and__(self, other): + return self + + def __or__(self, other): + return other + + def __repr__(self): + return 'Never()' + + +class Always(Matcher): + __slots__ = [] + + def match(self, version): + return True + + def __hash__(self): + return hash((Always,)) + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __and__(self, other): + return other + + def __or__(self, other): + return self + + def __repr__(self): + return 'Always()' + + +class Range(Matcher): + OP_EQ = '==' + OP_GT = '>' + OP_GTE = '>=' + OP_LT = '<' + OP_LTE = '<=' + OP_NEQ = '!=' + + # <1.2.3 matches 1.2.3-a1 + PRERELEASE_ALWAYS = 'always' + # <1.2.3 does not match 1.2.3-a1 + PRERELEASE_NATURAL = 'natural' + # 1.2.3-a1 is only considered if target == 1.2.3-xxx + PRERELEASE_SAMEPATCH = 'same-patch' + + # 1.2.3 matches 1.2.3+* + BUILD_IMPLICIT = 'implicit' + # 1.2.3 matches only 1.2.3, not 1.2.3+4 + BUILD_STRICT = 'strict' + + __slots__ = ['operator', 'target', 'prerelease_policy', 'build_policy'] + + def __init__(self, operator, target, prerelease_policy=PRERELEASE_NATURAL, build_policy=BUILD_IMPLICIT): + super(Range, self).__init__() + if target.build and operator not in (self.OP_EQ, self.OP_NEQ): + raise ValueError( + "Invalid range %s%s: build numbers have no ordering." + % (operator, target)) + self.operator = operator + self.target = target + self.prerelease_policy = prerelease_policy + self.build_policy = self.BUILD_STRICT if target.build else build_policy + + def match(self, version): + if self.build_policy != self.BUILD_STRICT: + version = version.truncate('prerelease') + + if version.prerelease: + same_patch = self.target.truncate() == version.truncate() + + if self.prerelease_policy == self.PRERELEASE_SAMEPATCH and not same_patch: + return False + + if self.operator == self.OP_EQ: + if self.build_policy == self.BUILD_STRICT: + return ( + self.target.truncate('prerelease') == version.truncate('prerelease') + and version.build == self.target.build + ) + return version == self.target + elif self.operator == self.OP_GT: + return version > self.target + elif self.operator == self.OP_GTE: + return version >= self.target + elif self.operator == self.OP_LT: + if ( + version.prerelease + and self.prerelease_policy == self.PRERELEASE_NATURAL + and version.truncate() == self.target.truncate() + and not self.target.prerelease + ): + return False + return version < self.target + elif self.operator == self.OP_LTE: + return version <= self.target + else: + assert self.operator == self.OP_NEQ + if self.build_policy == self.BUILD_STRICT: + return not ( + self.target.truncate('prerelease') == version.truncate('prerelease') + and version.build == self.target.build + ) + + if ( + version.prerelease + and self.prerelease_policy == self.PRERELEASE_NATURAL + and version.truncate() == self.target.truncate() + and not self.target.prerelease + ): + return False + return version != self.target + + def __hash__(self): + return hash((Range, self.operator, self.target, self.prerelease_policy)) + + def __eq__(self, other): + return ( + isinstance(other, self.__class__) + and self.operator == other.operator + and self.target == other.target + and self.prerelease_policy == other.prerelease_policy + ) + + def __str__(self): + return '%s%s' % (self.operator, self.target) + + def __repr__(self): + policy_part = ( + '' if self.prerelease_policy == self.PRERELEASE_NATURAL + else ', prerelease_policy=%r' % self.prerelease_policy + ) + ( + '' if self.build_policy == self.BUILD_IMPLICIT + else ', build_policy=%r' % self.build_policy + ) + return 'Range(%r, %r%s)' % ( + self.operator, + self.target, + policy_part, + ) + + +@BaseSpec.register_syntax +class SimpleSpec(BaseSpec): + + SYNTAX = 'simple' + + @classmethod + def _parse_to_clause(cls, expression): + return cls.Parser.parse(expression) + + class Parser: + NUMBER = r'\*|0|[1-9][0-9]*' + NAIVE_SPEC = re.compile(r"""^ + (?P<|<=||=|==|>=|>|!=|\^|~|~=) + (?P{nb})(?:\.(?P{nb})(?:\.(?P{nb}))?)? + (?:-(?P[a-z0-9A-Z.-]*))? + (?:\+(?P[a-z0-9A-Z.-]*))? + $ + """.format(nb=NUMBER), + re.VERBOSE, + ) + + @classmethod + def parse(cls, expression): + blocks = expression.split(',') + clause = Always() + for block in blocks: + if not cls.NAIVE_SPEC.match(block): + raise ValueError("Invalid simple block %r" % block) + clause &= cls.parse_block(block) + + return clause + + PREFIX_CARET = '^' + PREFIX_TILDE = '~' + PREFIX_COMPATIBLE = '~=' + PREFIX_EQ = '==' + PREFIX_NEQ = '!=' + PREFIX_GT = '>' + PREFIX_GTE = '>=' + PREFIX_LT = '<' + PREFIX_LTE = '<=' + + PREFIX_ALIASES = { + '=': PREFIX_EQ, + '': PREFIX_EQ, + } + + EMPTY_VALUES = ['*', 'x', 'X', None] + + @classmethod + def parse_block(cls, expr): + if not cls.NAIVE_SPEC.match(expr): + raise ValueError("Invalid simple spec component: %r" % expr) + prefix, major_t, minor_t, patch_t, prerel, build = cls.NAIVE_SPEC.match(expr).groups() + prefix = cls.PREFIX_ALIASES.get(prefix, prefix) + + major = None if major_t in cls.EMPTY_VALUES else int(major_t) + minor = None if minor_t in cls.EMPTY_VALUES else int(minor_t) + patch = None if patch_t in cls.EMPTY_VALUES else int(patch_t) + + if major is None: # '*' + target = Version(major=0, minor=0, patch=0) + if prefix not in (cls.PREFIX_EQ, cls.PREFIX_GTE): + raise ValueError("Invalid simple spec: %r" % expr) + elif minor is None: + target = Version(major=major, minor=0, patch=0) + elif patch is None: + target = Version(major=major, minor=minor, patch=0) + else: + target = Version( + major=major, + minor=minor, + patch=patch, + prerelease=prerel.split('.') if prerel else (), + build=build.split('.') if build else (), + ) + + if (major is None or minor is None or patch is None) and (prerel or build): + raise ValueError("Invalid simple spec: %r" % expr) + + if build is not None and prefix not in (cls.PREFIX_EQ, cls.PREFIX_NEQ): + raise ValueError("Invalid simple spec: %r" % expr) + + if prefix == cls.PREFIX_CARET: + # Accept anything with the same most-significant digit + if target.major: + high = target.next_major() + elif target.minor: + high = target.next_minor() + else: + high = target.next_patch() + return Range(Range.OP_GTE, target) & Range(Range.OP_LT, high) + + elif prefix == cls.PREFIX_TILDE: + assert major is not None + # Accept any higher patch in the same minor + # Might go higher if the initial version was a partial + if minor is None: + high = target.next_major() + else: + high = target.next_minor() + return Range(Range.OP_GTE, target) & Range(Range.OP_LT, high) + + elif prefix == cls.PREFIX_COMPATIBLE: + assert major is not None + # ~1 is 1.0.0..2.0.0; ~=2.2 is 2.2.0..3.0.0; ~=1.4.5 is 1.4.5..1.5.0 + if minor is None or patch is None: + # We got a partial version + high = target.next_major() + else: + high = target.next_minor() + return Range(Range.OP_GTE, target) & Range(Range.OP_LT, high) + + elif prefix == cls.PREFIX_EQ: + if major is None: + return Range(Range.OP_GTE, target) + elif minor is None: + return Range(Range.OP_GTE, target) & Range(Range.OP_LT, target.next_major()) + elif patch is None: + return Range(Range.OP_GTE, target) & Range(Range.OP_LT, target.next_minor()) + elif build == '': + return Range(Range.OP_EQ, target, build_policy=Range.BUILD_STRICT) + else: + return Range(Range.OP_EQ, target) + + elif prefix == cls.PREFIX_NEQ: + assert major is not None + if minor is None: + # !=1.x => <1.0.0 || >=2.0.0 + return Range(Range.OP_LT, target) | Range(Range.OP_GTE, target.next_major()) + elif patch is None: + # !=1.2.x => <1.2.0 || >=1.3.0 + return Range(Range.OP_LT, target) | Range(Range.OP_GTE, target.next_minor()) + elif prerel == '': + # !=1.2.3- + return Range(Range.OP_NEQ, target, prerelease_policy=Range.PRERELEASE_ALWAYS) + elif build == '': + # !=1.2.3+ or !=1.2.3-a2+ + return Range(Range.OP_NEQ, target, build_policy=Range.BUILD_STRICT) + else: + return Range(Range.OP_NEQ, target) + + elif prefix == cls.PREFIX_GT: + assert major is not None + if minor is None: + # >1.x => >=2.0 + return Range(Range.OP_GTE, target.next_major()) + elif patch is None: + return Range(Range.OP_GTE, target.next_minor()) + else: + return Range(Range.OP_GT, target) + + elif prefix == cls.PREFIX_GTE: + return Range(Range.OP_GTE, target) + + elif prefix == cls.PREFIX_LT: + assert major is not None + if prerel == '': + # <1.2.3- + return Range(Range.OP_LT, target, prerelease_policy=Range.PRERELEASE_ALWAYS) + return Range(Range.OP_LT, target) + + else: + assert prefix == cls.PREFIX_LTE + assert major is not None + if minor is None: + # <=1.x => <2.0 + return Range(Range.OP_LT, target.next_major()) + elif patch is None: + return Range(Range.OP_LT, target.next_minor()) + else: + return Range(Range.OP_LTE, target) + + +class LegacySpec(SimpleSpec): + def __init__(self, *expressions): + warnings.warn( + "The Spec() class will be removed in 3.1; use SimpleSpec() instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + if len(expressions) > 1: + warnings.warn( + "Passing 2+ arguments to SimpleSpec will be removed in 3.0; concatenate them with ',' instead.", + DeprecationWarning, + stacklevel=2, + ) + expression = ','.join(expressions) + super(LegacySpec, self).__init__(expression) + + @property + def specs(self): + return list(self) + + def __iter__(self): + warnings.warn( + "Iterating over the components of a SimpleSpec object will be removed in 3.0.", + DeprecationWarning, + stacklevel=2, + ) + try: + clauses = list(self.clause) + except TypeError: # Not an iterable + clauses = [self.clause] + for clause in clauses: + yield SpecItem.from_matcher(clause) + + +Spec = LegacySpec + + +@BaseSpec.register_syntax +class NpmSpec(BaseSpec): + SYNTAX = 'npm' + + @classmethod + def _parse_to_clause(cls, expression): + return cls.Parser.parse(expression) + + class Parser: + JOINER = '||' + HYPHEN = ' - ' + + NUMBER = r'x|X|\*|0|[1-9][0-9]*' + PART = r'[a-zA-Z0-9.-]*' + NPM_SPEC_BLOCK = re.compile(r""" + ^(?:v)? # Strip optional initial v + (?P<|<=|>=|>|=|\^|~|) # Operator, can be empty + (?P{nb})(?:\.(?P{nb})(?:\.(?P{nb}))?)? + (?:-(?P{part}))? # Optional re-release + (?:\+(?P{part}))? # Optional build + $""".format(nb=NUMBER, part=PART), + re.VERBOSE, + ) + + @classmethod + def range(cls, operator, target): + return Range(operator, target, prerelease_policy=Range.PRERELEASE_SAMEPATCH) + + @classmethod + def parse(cls, expression): + result = Never() + groups = expression.split(cls.JOINER) + for group in groups: + group = group.strip() + if not group: + group = '>=0.0.0' + + subclauses = [] + if cls.HYPHEN in group: + low, high = group.split(cls.HYPHEN, 2) + subclauses = cls.parse_simple('>=' + low) + cls.parse_simple('<=' + high) + + else: + blocks = group.split(' ') + for block in blocks: + if not cls.NPM_SPEC_BLOCK.match(block): + raise ValueError("Invalid NPM block in %r: %r" % (expression, block)) + + subclauses.extend(cls.parse_simple(block)) + + prerelease_clauses = [] + non_prerel_clauses = [] + for clause in subclauses: + if clause.target.prerelease: + if clause.operator in (Range.OP_GT, Range.OP_GTE): + prerelease_clauses.append(Range( + operator=Range.OP_LT, + target=Version( + major=clause.target.major, + minor=clause.target.minor, + patch=clause.target.patch + 1, + ), + prerelease_policy=Range.PRERELEASE_ALWAYS, + )) + elif clause.operator in (Range.OP_LT, Range.OP_LTE): + prerelease_clauses.append(Range( + operator=Range.OP_GTE, + target=Version( + major=clause.target.major, + minor=clause.target.minor, + patch=0, + prerelease=(), + ), + prerelease_policy=Range.PRERELEASE_ALWAYS, + )) + prerelease_clauses.append(clause) + non_prerel_clauses.append(cls.range( + operator=clause.operator, + target=clause.target.truncate(), + )) + else: + non_prerel_clauses.append(clause) + if prerelease_clauses: + result |= AllOf(*prerelease_clauses) + result |= AllOf(*non_prerel_clauses) + + return result + + PREFIX_CARET = '^' + PREFIX_TILDE = '~' + PREFIX_EQ = '=' + PREFIX_GT = '>' + PREFIX_GTE = '>=' + PREFIX_LT = '<' + PREFIX_LTE = '<=' + + PREFIX_ALIASES = { + '': PREFIX_EQ, + } + + PREFIX_TO_OPERATOR = { + PREFIX_EQ: Range.OP_EQ, + PREFIX_LT: Range.OP_LT, + PREFIX_LTE: Range.OP_LTE, + PREFIX_GTE: Range.OP_GTE, + PREFIX_GT: Range.OP_GT, + } + + EMPTY_VALUES = ['*', 'x', 'X', None] + + @classmethod + def parse_simple(cls, simple): + match = cls.NPM_SPEC_BLOCK.match(simple) + + prefix, major_t, minor_t, patch_t, prerel, build = match.groups() + + prefix = cls.PREFIX_ALIASES.get(prefix, prefix) + major = None if major_t in cls.EMPTY_VALUES else int(major_t) + minor = None if minor_t in cls.EMPTY_VALUES else int(minor_t) + patch = None if patch_t in cls.EMPTY_VALUES else int(patch_t) + + if build is not None and prefix not in [cls.PREFIX_EQ]: + # Ignore the 'build' part when not comparing to a specific part. + build = None + + if major is None: # '*', 'x', 'X' + target = Version(major=0, minor=0, patch=0) + if prefix not in [cls.PREFIX_EQ, cls.PREFIX_GTE]: + raise ValueError("Invalid expression %r" % simple) + prefix = cls.PREFIX_GTE + elif minor is None: + target = Version(major=major, minor=0, patch=0) + elif patch is None: + target = Version(major=major, minor=minor, patch=0) + else: + target = Version( + major=major, + minor=minor, + patch=patch, + prerelease=prerel.split('.') if prerel else (), + build=build.split('.') if build else (), + ) + + if (major is None or minor is None or patch is None) and (prerel or build): + raise ValueError("Invalid NPM spec: %r" % simple) + + if prefix == cls.PREFIX_CARET: + if target.major: # ^1.2.4 => >=1.2.4 <2.0.0 ; ^1.x => >=1.0.0 <2.0.0 + high = target.truncate().next_major() + elif target.minor: # ^0.1.2 => >=0.1.2 <0.2.0 + high = target.truncate().next_minor() + elif minor is None: # ^0.x => >=0.0.0 <1.0.0 + high = target.truncate().next_major() + elif patch is None: # ^0.2.x => >=0.2.0 <0.3.0 + high = target.truncate().next_minor() + else: # ^0.0.1 => >=0.0.1 <0.0.2 + high = target.truncate().next_patch() + return [cls.range(Range.OP_GTE, target), cls.range(Range.OP_LT, high)] + + elif prefix == cls.PREFIX_TILDE: + assert major is not None + if minor is None: # ~1.x => >=1.0.0 <2.0.0 + high = target.next_major() + else: # ~1.2.x => >=1.2.0 <1.3.0; ~1.2.3 => >=1.2.3 <1.3.0 + high = target.next_minor() + return [cls.range(Range.OP_GTE, target), cls.range(Range.OP_LT, high)] + + elif prefix == cls.PREFIX_EQ: + if major is None: + return [cls.range(Range.OP_GTE, target)] + elif minor is None: + return [cls.range(Range.OP_GTE, target), cls.range(Range.OP_LT, target.next_major())] + elif patch is None: + return [cls.range(Range.OP_GTE, target), cls.range(Range.OP_LT, target.next_minor())] + else: + return [cls.range(Range.OP_EQ, target)] + + elif prefix == cls.PREFIX_GT: + assert major is not None + if minor is None: # >1.x + return [cls.range(Range.OP_GTE, target.next_major())] + elif patch is None: # >1.2.x => >=1.3.0 + return [cls.range(Range.OP_GTE, target.next_minor())] + else: + return [cls.range(Range.OP_GT, target)] + + elif prefix == cls.PREFIX_GTE: + return [cls.range(Range.OP_GTE, target)] + + elif prefix == cls.PREFIX_LT: + assert major is not None + return [cls.range(Range.OP_LT, target)] + + else: + assert prefix == cls.PREFIX_LTE + assert major is not None + if minor is None: # <=1.x => <2.0.0 + return [cls.range(Range.OP_LT, target.next_major())] + elif patch is None: # <=1.2.x => <1.3.0 + return [cls.range(Range.OP_LT, target.next_minor())] + else: + return [cls.range(Range.OP_LTE, target)] diff --git a/evalkit_tf446/lib/python3.10/site-packages/semantic_version/django_fields.py b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/django_fields.py new file mode 100644 index 0000000000000000000000000000000000000000..e5bd7eb2340cfedad7ac387869ce20bdb053f112 --- /dev/null +++ b/evalkit_tf446/lib/python3.10/site-packages/semantic_version/django_fields.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +# Copyright (c) The python-semanticversion project +# This code is distributed under the two-clause BSD License. + +import warnings + +import django +from django.db import models + +if django.VERSION >= (3, 0): + # See https://docs.djangoproject.com/en/dev/releases/3.0/#features-deprecated-in-3-0 + from django.utils.translation import gettext_lazy as _ +else: + from django.utils.translation import ugettext_lazy as _ + +from . import base + + +class SemVerField(models.CharField): + + def __init__(self, *args, **kwargs): + kwargs.setdefault('max_length', 200) + super(SemVerField, self).__init__(*args, **kwargs) + + def from_db_value(self, value, expression, connection, *args): + """Convert from the database format. + + This should be the inverse of self.get_prep_value() + """ + return self.to_python(value) + + def get_prep_value(self, obj): + return None if obj is None else str(obj) + + def get_db_prep_value(self, value, connection, prepared=False): + if not prepared: + value = self.get_prep_value(value) + return value + + def value_to_string(self, obj): + value = self.to_python(self.value_from_object(obj)) + return str(value) + + def run_validators(self, value): + return super(SemVerField, self).run_validators(str(value)) + + +class VersionField(SemVerField): + default_error_messages = { + 'invalid': _("Enter a valid version number in X.Y.Z format."), + } + description = _("Version") + + def __init__(self, *args, **kwargs): + self.partial = kwargs.pop('partial', False) + if self.partial: + warnings.warn( + "Use of `partial=True` will be removed in 3.0.", + DeprecationWarning, + stacklevel=2, + ) + self.coerce = kwargs.pop('coerce', False) + super(VersionField, self).__init__(*args, **kwargs) + + def deconstruct(self): + """Handle django.db.migrations.""" + name, path, args, kwargs = super(VersionField, self).deconstruct() + kwargs['partial'] = self.partial + kwargs['coerce'] = self.coerce + return name, path, args, kwargs + + def to_python(self, value): + """Converts any value to a base.Version field.""" + if value is None or value == '': + return value + if isinstance(value, base.Version): + return value + if self.coerce: + return base.Version.coerce(value, partial=self.partial) + else: + return base.Version(value, partial=self.partial) + + +class SpecField(SemVerField): + default_error_messages = { + 'invalid': _("Enter a valid version number spec list in ==X.Y.Z,>=A.B.C format."), + } + description = _("Version specification list") + + def __init__(self, *args, **kwargs): + self.syntax = kwargs.pop('syntax', base.DEFAULT_SYNTAX) + super(SpecField, self).__init__(*args, **kwargs) + + def deconstruct(self): + """Handle django.db.migrations.""" + name, path, args, kwargs = super(SpecField, self).deconstruct() + if self.syntax != base.DEFAULT_SYNTAX: + kwargs['syntax'] = self.syntax + return name, path, args, kwargs + + def to_python(self, value): + """Converts any value to a base.Spec field.""" + if value is None or value == '': + return value + if isinstance(value, base.BaseSpec): + return value + return base.BaseSpec.parse(value, syntax=self.syntax)