ZAIDX11 commited on
Commit
fac481d
·
verified ·
1 Parent(s): 25b3648

Add files using upload-large-folder tool

Browse files
Files changed (21) hide show
  1. .gitattributes +1 -0
  2. archive/.venv/Lib/site-packages/torch/include/torch/csrc/jit/tensorexpr/operators/quantization.h +156 -0
  3. archive/.venv/Lib/site-packages/torch/include/torch/csrc/jit/tensorexpr/operators/reduction.h +32 -0
  4. archive/.venv/Lib/site-packages/torch/include/torch/csrc/jit/tensorexpr/operators/softmax.h +13 -0
  5. archive/.venv/Lib/site-packages/torch/include/torch/csrc/jit/testing/catch_utils.hpp +10 -0
  6. archive/.venv/Lib/site-packages/wheel/cli/__init__.py +155 -0
  7. archive/.venv/Lib/site-packages/wheel/cli/convert.py +332 -0
  8. archive/.venv/Lib/site-packages/wheel/cli/pack.py +85 -0
  9. archive/.venv/Lib/site-packages/wheel/cli/tags.py +139 -0
  10. archive/.venv/Lib/site-packages/wheel/cli/unpack.py +30 -0
  11. archive/.venv/Lib/site-packages/wheel/vendored/__init__.py +0 -0
  12. archive/.venv/Lib/site-packages/wheel/vendored/packaging/LICENSE +3 -0
  13. archive/.venv/Lib/site-packages/wheel/vendored/packaging/LICENSE.APACHE +177 -0
  14. archive/.venv/Lib/site-packages/wheel/vendored/packaging/LICENSE.BSD +23 -0
  15. archive/.venv/Lib/site-packages/wheel/vendored/packaging/markers.py +253 -0
  16. archive/.venv/Lib/site-packages/wheel/vendored/packaging/requirements.py +90 -0
  17. archive/.venv/Lib/site-packages/wheel/vendored/packaging/specifiers.py +1011 -0
  18. archive/.venv/Lib/site-packages/wheel/vendored/packaging/tags.py +571 -0
  19. archive/.venv/Lib/site-packages/wheel/vendored/packaging/utils.py +172 -0
  20. archive/.venv/Lib/site-packages/wheel/vendored/vendor.txt +1 -0
  21. backend/core/ag4masses/outputs/solved/imo-2004p1.jpg +3 -0
.gitattributes CHANGED
@@ -866,3 +866,4 @@ archive/.venv/Lib/site-packages/win32comext/shell/shell.pyd filter=lfs diff=lfs
866
  archive/.venv/Scripts/httpx.exe filter=lfs diff=lfs merge=lfs -text
867
  archive/.venv/Scripts/ipython.exe filter=lfs diff=lfs merge=lfs -text
868
  archive/.venv/Scripts/ipython3.exe filter=lfs diff=lfs merge=lfs -text
 
 
866
  archive/.venv/Scripts/httpx.exe filter=lfs diff=lfs merge=lfs -text
867
  archive/.venv/Scripts/ipython.exe filter=lfs diff=lfs merge=lfs -text
868
  archive/.venv/Scripts/ipython3.exe filter=lfs diff=lfs merge=lfs -text
869
+ backend/core/ag4masses/outputs/solved/imo-2004p1.jpg filter=lfs diff=lfs merge=lfs -text
archive/.venv/Lib/site-packages/torch/include/torch/csrc/jit/tensorexpr/operators/quantization.h ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ #include <torch/csrc/jit/tensorexpr/kernel.h>
4
+
5
+ namespace torch::jit::tensorexpr {
6
+
7
+ TORCH_API ExprHandle quantizePerTensorQParamFromArg(ArgValue arg);
8
+
9
+ TORCH_API double immQScale(const BufHandle& qx);
10
+
11
+ TORCH_API int64_t immQZero(const BufHandle& qx);
12
+
13
+ TORCH_API ScalarType immQDType(const BufHandle& qx);
14
+
15
+ TORCH_API bool isQuantized(const BufHandle& qx);
16
+
17
+ TORCH_API Tensor computeQuantizePerTensor(
18
+ const std::vector<ArgValue>& inputs,
19
+ const std::vector<ExprHandle>& outputShape,
20
+ const std::vector<ExprHandle>& outputStrides,
21
+ const std::optional<ScalarType>& outputType,
22
+ at::Device device);
23
+
24
+ TORCH_API Tensor computeQuantizePerTensorExternalCall(
25
+ const std::vector<ArgValue>& inputs,
26
+ const std::vector<ExprHandle>& outputShape,
27
+ const std::vector<ExprHandle>& outputStrides,
28
+ const std::optional<ScalarType>& outputType,
29
+ at::Device device);
30
+
31
+ TORCH_API Tensor computeQuantizedConv1d(
32
+ const std::vector<ArgValue>& inputs,
33
+ const std::vector<ExprHandle>& outputShape,
34
+ const std::vector<ExprHandle>& outputStrides,
35
+ const std::optional<ScalarType>& outputType,
36
+ at::Device device);
37
+
38
+ TORCH_API Tensor computeQuantizedConv2dPrepack(
39
+ const std::vector<ArgValue>& inputs,
40
+ const std::vector<ExprHandle>& outputShape,
41
+ const std::vector<ExprHandle>& outputStrides,
42
+ const std::optional<ScalarType>& outputType,
43
+ at::Device device);
44
+
45
+ TORCH_API Tensor computeQuantizedConv1d(
46
+ const std::vector<ArgValue>& inputs,
47
+ const std::vector<ExprHandle>& outputShape,
48
+ const std::vector<ExprHandle>& outputStrides,
49
+ const std::optional<ScalarType>& outputType,
50
+ at::Device device);
51
+
52
+ TORCH_API Tensor computeQuantizedConv2d(
53
+ const std::vector<ArgValue>& inputs,
54
+ const std::vector<ExprHandle>& outputShape,
55
+ const std::vector<ExprHandle>& outputStrides,
56
+ const std::optional<ScalarType>& outputType,
57
+ at::Device device);
58
+
59
+ TORCH_API Tensor computeQuantizedConv2dRelu(
60
+ const std::vector<ArgValue>& inputs,
61
+ const std::vector<ExprHandle>& outputShape,
62
+ const std::vector<ExprHandle>& outputStrides,
63
+ const std::optional<ScalarType>& outputType,
64
+ at::Device device);
65
+
66
+ TORCH_API Tensor computeQuantizedLinear(
67
+ const std::vector<ArgValue>& inputs,
68
+ const std::vector<ExprHandle>& outputShape,
69
+ const std::vector<ExprHandle>& outputStrides,
70
+ const std::optional<ScalarType>& outputType,
71
+ at::Device device);
72
+
73
+ TORCH_API Tensor computeQuantizedLinearRelu(
74
+ const std::vector<ArgValue>& inputs,
75
+ const std::vector<ExprHandle>& outputShape,
76
+ const std::vector<ExprHandle>& outputStrides,
77
+ const std::optional<ScalarType>& outputType,
78
+ at::Device device);
79
+
80
+ TORCH_API Tensor computeQuantizedAdd(
81
+ const std::vector<ArgValue>& inputs,
82
+ const std::vector<ExprHandle>& outputShape,
83
+ const std::vector<ExprHandle>& outputStrides,
84
+ const std::optional<ScalarType>& outputType,
85
+ at::Device device);
86
+
87
+ Tensor computeQuantizedAddExternalCall(
88
+ const std::vector<ArgValue>& inputs,
89
+ const std::vector<ExprHandle>& outputShape,
90
+ const std::vector<ExprHandle>& outputStrides,
91
+ const std::optional<ScalarType>& outputType,
92
+ at::Device device);
93
+
94
+ TORCH_API Tensor computeQuantizedMul(
95
+ const std::vector<ArgValue>& inputs,
96
+ const std::vector<ExprHandle>& outputShape,
97
+ const std::vector<ExprHandle>& outputStrides,
98
+ const std::optional<ScalarType>& outputType,
99
+ at::Device device);
100
+
101
+ TORCH_API Tensor computeQuantizedMulScalar(
102
+ const std::vector<ArgValue>& inputs,
103
+ const std::vector<ExprHandle>& outputShape,
104
+ const std::vector<ExprHandle>& outputStrides,
105
+ const std::optional<ScalarType>& outputType,
106
+ at::Device device);
107
+
108
+ TORCH_API Tensor computeQuantizedCat(
109
+ const std::vector<ArgValue>& inputs,
110
+ const std::vector<ExprHandle>& outputShape,
111
+ const std::vector<ExprHandle>& outputStrides,
112
+ const std::optional<ScalarType>& outputType,
113
+ at::Device device);
114
+
115
+ TORCH_API Tensor computeQuantizedRelu(
116
+ const std::vector<ArgValue>& inputs,
117
+ const std::vector<ExprHandle>& outputShape,
118
+ const std::vector<ExprHandle>& outputStrides,
119
+ const std::optional<ScalarType>& outputType,
120
+ at::Device device);
121
+
122
+ TORCH_API Tensor computeDequantize(
123
+ const std::vector<ArgValue>& inputs,
124
+ const std::vector<ExprHandle>& outputShape,
125
+ const std::vector<ExprHandle>& outputStrides,
126
+ const std::optional<ScalarType>& outputType,
127
+ at::Device device);
128
+
129
+ TORCH_API Tensor computeDequantizeExternalCall(
130
+ const std::vector<ArgValue>& inputs,
131
+ const std::vector<ExprHandle>& outputShape,
132
+ const std::vector<ExprHandle>& outputStrides,
133
+ const std::optional<ScalarType>& outputType,
134
+ at::Device device);
135
+
136
+ TORCH_API Tensor computeUpsampleNearest2d(
137
+ const std::vector<ArgValue>& inputs,
138
+ const std::vector<ExprHandle>& outputShape,
139
+ const std::vector<ExprHandle>& outputStrides,
140
+ const std::optional<ScalarType>& outputType,
141
+ at::Device device);
142
+
143
+ TORCH_API Tensor computeUpsampleNearest2dExternalCall(
144
+ const std::vector<ArgValue>& inputs,
145
+ const std::vector<ExprHandle>& outputShape,
146
+ const std::vector<ExprHandle>& outputStrides,
147
+ const std::optional<ScalarType>& outputType,
148
+ at::Device device);
149
+
150
+ TORCH_API Tensor computeQuantizedSigmoidExternalCall(
151
+ const std::vector<ArgValue>& inputs,
152
+ const std::vector<ExprHandle>& outputShape,
153
+ const std::vector<ExprHandle>& outputStrides,
154
+ const std::optional<ScalarType>& outputType,
155
+ at::Device);
156
+ } // namespace torch::jit::tensorexpr
archive/.venv/Lib/site-packages/torch/include/torch/csrc/jit/tensorexpr/operators/reduction.h ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ #include <torch/csrc/jit/tensorexpr/kernel.h>
4
+
5
+ namespace torch::jit::tensorexpr {
6
+
7
+ TORCH_API Tensor computeSum(
8
+ const std::vector<ArgValue>& inputs,
9
+ const std::vector<ExprHandle>& outputShape,
10
+ const std::vector<ExprHandle>& outputStrides,
11
+ const std::optional<ScalarType>& outputType,
12
+ at::Device device);
13
+ TORCH_API Tensor computeMean(
14
+ const std::vector<ArgValue>& inputs,
15
+ const std::vector<ExprHandle>& outputShape,
16
+ const std::vector<ExprHandle>& outputStrides,
17
+ const std::optional<ScalarType>& outputType,
18
+ at::Device device);
19
+ TORCH_API Tensor computeAdaptiveAvgPool2d(
20
+ const std::vector<ArgValue>& inputs,
21
+ const std::vector<ExprHandle>& outputShape,
22
+ const std::vector<ExprHandle>& outputStrides,
23
+ const std::optional<ScalarType>& outputType,
24
+ at::Device device);
25
+ Tensor computeMax(
26
+ const std::vector<ArgValue>& inputs,
27
+ const std::vector<ExprHandle>& outputShape,
28
+ const std::vector<ExprHandle>& outputStrides,
29
+ const std::optional<ScalarType>& outputType,
30
+ at::Device device);
31
+
32
+ } // namespace torch::jit::tensorexpr
archive/.venv/Lib/site-packages/torch/include/torch/csrc/jit/tensorexpr/operators/softmax.h ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ #include <torch/csrc/jit/tensorexpr/kernel.h>
4
+
5
+ namespace torch::jit::tensorexpr {
6
+
7
+ Tensor computeSoftmax(
8
+ const std::vector<ArgValue>& inputs,
9
+ const std::vector<ExprHandle>& outputShape,
10
+ const std::vector<ExprHandle>& outputStrides,
11
+ bool log_softmax);
12
+
13
+ } // namespace torch::jit::tensorexpr
archive/.venv/Lib/site-packages/torch/include/torch/csrc/jit/testing/catch_utils.hpp ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ #define CATCH_CONFIG_PREFIX_ALL
4
+ #include <catch.hpp>
5
+
6
+ // CATCH_REQUIRE_THROWS is not defined identically to REQUIRE_THROWS and causes
7
+ // warning; define our own version that doesn't warn.
8
+ #define _CATCH_REQUIRE_THROWS(...) \
9
+ INTERNAL_CATCH_THROWS( \
10
+ "CATCH_REQUIRE_THROWS", Catch::ResultDisposition::Normal, __VA_ARGS__)
archive/.venv/Lib/site-packages/wheel/cli/__init__.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Wheel command-line utility.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import argparse
8
+ import os
9
+ import sys
10
+ from argparse import ArgumentTypeError
11
+
12
+
13
+ class WheelError(Exception):
14
+ pass
15
+
16
+
17
+ def unpack_f(args: argparse.Namespace) -> None:
18
+ from .unpack import unpack
19
+
20
+ unpack(args.wheelfile, args.dest)
21
+
22
+
23
+ def pack_f(args: argparse.Namespace) -> None:
24
+ from .pack import pack
25
+
26
+ pack(args.directory, args.dest_dir, args.build_number)
27
+
28
+
29
+ def convert_f(args: argparse.Namespace) -> None:
30
+ from .convert import convert
31
+
32
+ convert(args.files, args.dest_dir, args.verbose)
33
+
34
+
35
+ def tags_f(args: argparse.Namespace) -> None:
36
+ from .tags import tags
37
+
38
+ names = (
39
+ tags(
40
+ wheel,
41
+ args.python_tag,
42
+ args.abi_tag,
43
+ args.platform_tag,
44
+ args.build,
45
+ args.remove,
46
+ )
47
+ for wheel in args.wheel
48
+ )
49
+
50
+ for name in names:
51
+ print(name)
52
+
53
+
54
+ def version_f(args: argparse.Namespace) -> None:
55
+ from .. import __version__
56
+
57
+ print(f"wheel {__version__}")
58
+
59
+
60
+ def parse_build_tag(build_tag: str) -> str:
61
+ if build_tag and not build_tag[0].isdigit():
62
+ raise ArgumentTypeError("build tag must begin with a digit")
63
+ elif "-" in build_tag:
64
+ raise ArgumentTypeError("invalid character ('-') in build tag")
65
+
66
+ return build_tag
67
+
68
+
69
+ TAGS_HELP = """\
70
+ Make a new wheel with given tags. Any tags unspecified will remain the same.
71
+ Starting the tags with a "+" will append to the existing tags. Starting with a
72
+ "-" will remove a tag (use --option=-TAG syntax). Multiple tags can be
73
+ separated by ".". The original file will remain unless --remove is given. The
74
+ output filename(s) will be displayed on stdout for further processing.
75
+ """
76
+
77
+
78
+ def parser():
79
+ p = argparse.ArgumentParser()
80
+ s = p.add_subparsers(help="commands")
81
+
82
+ unpack_parser = s.add_parser("unpack", help="Unpack wheel")
83
+ unpack_parser.add_argument(
84
+ "--dest", "-d", help="Destination directory", default="."
85
+ )
86
+ unpack_parser.add_argument("wheelfile", help="Wheel file")
87
+ unpack_parser.set_defaults(func=unpack_f)
88
+
89
+ repack_parser = s.add_parser("pack", help="Repack wheel")
90
+ repack_parser.add_argument("directory", help="Root directory of the unpacked wheel")
91
+ repack_parser.add_argument(
92
+ "--dest-dir",
93
+ "-d",
94
+ default=os.path.curdir,
95
+ help="Directory to store the wheel (default %(default)s)",
96
+ )
97
+ repack_parser.add_argument(
98
+ "--build-number", help="Build tag to use in the wheel name"
99
+ )
100
+ repack_parser.set_defaults(func=pack_f)
101
+
102
+ convert_parser = s.add_parser("convert", help="Convert egg or wininst to wheel")
103
+ convert_parser.add_argument("files", nargs="*", help="Files to convert")
104
+ convert_parser.add_argument(
105
+ "--dest-dir",
106
+ "-d",
107
+ default=os.path.curdir,
108
+ help="Directory to store wheels (default %(default)s)",
109
+ )
110
+ convert_parser.add_argument("--verbose", "-v", action="store_true")
111
+ convert_parser.set_defaults(func=convert_f)
112
+
113
+ tags_parser = s.add_parser(
114
+ "tags", help="Add or replace the tags on a wheel", description=TAGS_HELP
115
+ )
116
+ tags_parser.add_argument("wheel", nargs="*", help="Existing wheel(s) to retag")
117
+ tags_parser.add_argument(
118
+ "--remove",
119
+ action="store_true",
120
+ help="Remove the original files, keeping only the renamed ones",
121
+ )
122
+ tags_parser.add_argument(
123
+ "--python-tag", metavar="TAG", help="Specify an interpreter tag(s)"
124
+ )
125
+ tags_parser.add_argument("--abi-tag", metavar="TAG", help="Specify an ABI tag(s)")
126
+ tags_parser.add_argument(
127
+ "--platform-tag", metavar="TAG", help="Specify a platform tag(s)"
128
+ )
129
+ tags_parser.add_argument(
130
+ "--build", type=parse_build_tag, metavar="BUILD", help="Specify a build tag"
131
+ )
132
+ tags_parser.set_defaults(func=tags_f)
133
+
134
+ version_parser = s.add_parser("version", help="Print version and exit")
135
+ version_parser.set_defaults(func=version_f)
136
+
137
+ help_parser = s.add_parser("help", help="Show this help")
138
+ help_parser.set_defaults(func=lambda args: p.print_help())
139
+
140
+ return p
141
+
142
+
143
+ def main():
144
+ p = parser()
145
+ args = p.parse_args()
146
+ if not hasattr(args, "func"):
147
+ p.print_help()
148
+ else:
149
+ try:
150
+ args.func(args)
151
+ return 0
152
+ except WheelError as e:
153
+ print(e, file=sys.stderr)
154
+
155
+ return 1
archive/.venv/Lib/site-packages/wheel/cli/convert.py ADDED
@@ -0,0 +1,332 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import os.path
4
+ import re
5
+ from abc import ABCMeta, abstractmethod
6
+ from collections import defaultdict
7
+ from collections.abc import Iterator
8
+ from email.message import Message
9
+ from email.parser import Parser
10
+ from email.policy import EmailPolicy
11
+ from glob import iglob
12
+ from pathlib import Path
13
+ from textwrap import dedent
14
+ from zipfile import ZipFile
15
+
16
+ from .. import __version__
17
+ from ..metadata import generate_requirements
18
+ from ..vendored.packaging.tags import parse_tag
19
+ from ..wheelfile import WheelFile
20
+
21
+ egg_filename_re = re.compile(
22
+ r"""
23
+ (?P<name>.+?)-(?P<ver>.+?)
24
+ (-(?P<pyver>py\d\.\d+)
25
+ (-(?P<arch>.+?))?
26
+ )?.egg$""",
27
+ re.VERBOSE,
28
+ )
29
+ egg_info_re = re.compile(
30
+ r"""
31
+ ^(?P<name>.+?)-(?P<ver>.+?)
32
+ (-(?P<pyver>py\d\.\d+)
33
+ )?.egg-info/""",
34
+ re.VERBOSE,
35
+ )
36
+ wininst_re = re.compile(
37
+ r"\.(?P<platform>win32|win-amd64)(?:-(?P<pyver>py\d\.\d))?\.exe$"
38
+ )
39
+ pyd_re = re.compile(r"\.(?P<abi>[a-z0-9]+)-(?P<platform>win32|win_amd64)\.pyd$")
40
+ serialization_policy = EmailPolicy(
41
+ utf8=True,
42
+ mangle_from_=False,
43
+ max_line_length=0,
44
+ )
45
+ GENERATOR = f"wheel {__version__}"
46
+
47
+
48
+ def convert_requires(requires: str, metadata: Message) -> None:
49
+ extra: str | None = None
50
+ requirements: dict[str | None, list[str]] = defaultdict(list)
51
+ for line in requires.splitlines():
52
+ line = line.strip()
53
+ if not line:
54
+ continue
55
+
56
+ if line.startswith("[") and line.endswith("]"):
57
+ extra = line[1:-1]
58
+ continue
59
+
60
+ requirements[extra].append(line)
61
+
62
+ for key, value in generate_requirements(requirements):
63
+ metadata.add_header(key, value)
64
+
65
+
66
+ def convert_pkg_info(pkginfo: str, metadata: Message):
67
+ parsed_message = Parser().parsestr(pkginfo)
68
+ for key, value in parsed_message.items():
69
+ key_lower = key.lower()
70
+ if value == "UNKNOWN":
71
+ continue
72
+
73
+ if key_lower == "description":
74
+ description_lines = value.splitlines()
75
+ value = "\n".join(
76
+ (
77
+ description_lines[0].lstrip(),
78
+ dedent("\n".join(description_lines[1:])),
79
+ "\n",
80
+ )
81
+ )
82
+ metadata.set_payload(value)
83
+ elif key_lower == "home-page":
84
+ metadata.add_header("Project-URL", f"Homepage, {value}")
85
+ elif key_lower == "download-url":
86
+ metadata.add_header("Project-URL", f"Download, {value}")
87
+ else:
88
+ metadata.add_header(key, value)
89
+
90
+ metadata.replace_header("Metadata-Version", "2.4")
91
+
92
+
93
+ def normalize(name: str) -> str:
94
+ return re.sub(r"[-_.]+", "-", name).lower().replace("-", "_")
95
+
96
+
97
+ class ConvertSource(metaclass=ABCMeta):
98
+ name: str
99
+ version: str
100
+ pyver: str = "py2.py3"
101
+ abi: str = "none"
102
+ platform: str = "any"
103
+ metadata: Message
104
+
105
+ @property
106
+ def dist_info_dir(self) -> str:
107
+ return f"{self.name}-{self.version}.dist-info"
108
+
109
+ @abstractmethod
110
+ def generate_contents(self) -> Iterator[tuple[str, bytes]]:
111
+ pass
112
+
113
+
114
+ class EggFileSource(ConvertSource):
115
+ def __init__(self, path: Path):
116
+ if not (match := egg_filename_re.match(path.name)):
117
+ raise ValueError(f"Invalid egg file name: {path.name}")
118
+
119
+ # Binary wheels are assumed to be for CPython
120
+ self.path = path
121
+ self.name = normalize(match.group("name"))
122
+ self.version = match.group("ver")
123
+ if pyver := match.group("pyver"):
124
+ self.pyver = pyver.replace(".", "")
125
+ if arch := match.group("arch"):
126
+ self.abi = self.pyver.replace("py", "cp")
127
+ self.platform = normalize(arch)
128
+
129
+ self.metadata = Message()
130
+
131
+ def generate_contents(self) -> Iterator[tuple[str, bytes]]:
132
+ with ZipFile(self.path, "r") as zip_file:
133
+ for filename in sorted(zip_file.namelist()):
134
+ # Skip pure directory entries
135
+ if filename.endswith("/"):
136
+ continue
137
+
138
+ # Handle files in the egg-info directory specially, selectively moving
139
+ # them to the dist-info directory while converting as needed
140
+ if filename.startswith("EGG-INFO/"):
141
+ if filename == "EGG-INFO/requires.txt":
142
+ requires = zip_file.read(filename).decode("utf-8")
143
+ convert_requires(requires, self.metadata)
144
+ elif filename == "EGG-INFO/PKG-INFO":
145
+ pkginfo = zip_file.read(filename).decode("utf-8")
146
+ convert_pkg_info(pkginfo, self.metadata)
147
+ elif filename == "EGG-INFO/entry_points.txt":
148
+ yield (
149
+ f"{self.dist_info_dir}/entry_points.txt",
150
+ zip_file.read(filename),
151
+ )
152
+
153
+ continue
154
+
155
+ # For any other file, just pass it through
156
+ yield filename, zip_file.read(filename)
157
+
158
+
159
+ class EggDirectorySource(EggFileSource):
160
+ def generate_contents(self) -> Iterator[tuple[str, bytes]]:
161
+ for dirpath, _, filenames in os.walk(self.path):
162
+ for filename in sorted(filenames):
163
+ path = Path(dirpath, filename)
164
+ if path.parent.name == "EGG-INFO":
165
+ if path.name == "requires.txt":
166
+ requires = path.read_text("utf-8")
167
+ convert_requires(requires, self.metadata)
168
+ elif path.name == "PKG-INFO":
169
+ pkginfo = path.read_text("utf-8")
170
+ convert_pkg_info(pkginfo, self.metadata)
171
+ if name := self.metadata.get("Name"):
172
+ self.name = normalize(name)
173
+
174
+ if version := self.metadata.get("Version"):
175
+ self.version = version
176
+ elif path.name == "entry_points.txt":
177
+ yield (
178
+ f"{self.dist_info_dir}/entry_points.txt",
179
+ path.read_bytes(),
180
+ )
181
+
182
+ continue
183
+
184
+ # For any other file, just pass it through
185
+ yield str(path.relative_to(self.path)), path.read_bytes()
186
+
187
+
188
+ class WininstFileSource(ConvertSource):
189
+ """
190
+ Handles distributions created with ``bdist_wininst``.
191
+
192
+ The egginfo filename has the format::
193
+
194
+ name-ver(-pyver)(-arch).egg-info
195
+
196
+ The installer filename has the format::
197
+
198
+ name-ver.arch(-pyver).exe
199
+
200
+ Some things to note:
201
+
202
+ 1. The installer filename is not definitive. An installer can be renamed
203
+ and work perfectly well as an installer. So more reliable data should
204
+ be used whenever possible.
205
+ 2. The egg-info data should be preferred for the name and version, because
206
+ these come straight from the distutils metadata, and are mandatory.
207
+ 3. The pyver from the egg-info data should be ignored, as it is
208
+ constructed from the version of Python used to build the installer,
209
+ which is irrelevant - the installer filename is correct here (even to
210
+ the point that when it's not there, any version is implied).
211
+ 4. The architecture must be taken from the installer filename, as it is
212
+ not included in the egg-info data.
213
+ 5. Architecture-neutral installers still have an architecture because the
214
+ installer format itself (being executable) is architecture-specific. We
215
+ should therefore ignore the architecture if the content is pure-python.
216
+ """
217
+
218
+ def __init__(self, path: Path):
219
+ self.path = path
220
+ self.metadata = Message()
221
+
222
+ # Determine the initial architecture and Python version from the file name
223
+ # (if possible)
224
+ if match := wininst_re.search(path.name):
225
+ self.platform = normalize(match.group("platform"))
226
+ if pyver := match.group("pyver"):
227
+ self.pyver = pyver.replace(".", "")
228
+
229
+ # Look for an .egg-info directory and any .pyd files for more precise info
230
+ egg_info_found = pyd_found = False
231
+ with ZipFile(self.path) as zip_file:
232
+ for filename in zip_file.namelist():
233
+ prefix, filename = filename.split("/", 1)
234
+ if not egg_info_found and (match := egg_info_re.match(filename)):
235
+ egg_info_found = True
236
+ self.name = normalize(match.group("name"))
237
+ self.version = match.group("ver")
238
+ if pyver := match.group("pyver"):
239
+ self.pyver = pyver.replace(".", "")
240
+ elif not pyd_found and (match := pyd_re.search(filename)):
241
+ pyd_found = True
242
+ self.abi = match.group("abi")
243
+ self.platform = match.group("platform")
244
+
245
+ if egg_info_found and pyd_found:
246
+ break
247
+
248
+ def generate_contents(self) -> Iterator[tuple[str, bytes]]:
249
+ dist_info_dir = f"{self.name}-{self.version}.dist-info"
250
+ data_dir = f"{self.name}-{self.version}.data"
251
+ with ZipFile(self.path, "r") as zip_file:
252
+ for filename in sorted(zip_file.namelist()):
253
+ # Skip pure directory entries
254
+ if filename.endswith("/"):
255
+ continue
256
+
257
+ # Handle files in the egg-info directory specially, selectively moving
258
+ # them to the dist-info directory while converting as needed
259
+ prefix, target_filename = filename.split("/", 1)
260
+ if egg_info_re.search(target_filename):
261
+ basename = target_filename.rsplit("/", 1)[-1]
262
+ if basename == "requires.txt":
263
+ requires = zip_file.read(filename).decode("utf-8")
264
+ convert_requires(requires, self.metadata)
265
+ elif basename == "PKG-INFO":
266
+ pkginfo = zip_file.read(filename).decode("utf-8")
267
+ convert_pkg_info(pkginfo, self.metadata)
268
+ elif basename == "entry_points.txt":
269
+ yield (
270
+ f"{dist_info_dir}/entry_points.txt",
271
+ zip_file.read(filename),
272
+ )
273
+
274
+ continue
275
+ elif prefix == "SCRIPTS":
276
+ target_filename = f"{data_dir}/scripts/{target_filename}"
277
+
278
+ # For any other file, just pass it through
279
+ yield target_filename, zip_file.read(filename)
280
+
281
+
282
+ def convert(files: list[str], dest_dir: str, verbose: bool) -> None:
283
+ for pat in files:
284
+ for archive in iglob(pat):
285
+ path = Path(archive)
286
+ if path.suffix == ".egg":
287
+ if path.is_dir():
288
+ source: ConvertSource = EggDirectorySource(path)
289
+ else:
290
+ source = EggFileSource(path)
291
+ else:
292
+ source = WininstFileSource(path)
293
+
294
+ if verbose:
295
+ print(f"{archive}...", flush=True, end="")
296
+
297
+ dest_path = Path(dest_dir) / (
298
+ f"{source.name}-{source.version}-{source.pyver}-{source.abi}"
299
+ f"-{source.platform}.whl"
300
+ )
301
+ with WheelFile(dest_path, "w") as wheelfile:
302
+ for name_or_zinfo, contents in source.generate_contents():
303
+ wheelfile.writestr(name_or_zinfo, contents)
304
+
305
+ # Write the METADATA file
306
+ wheelfile.writestr(
307
+ f"{source.dist_info_dir}/METADATA",
308
+ source.metadata.as_string(policy=serialization_policy).encode(
309
+ "utf-8"
310
+ ),
311
+ )
312
+
313
+ # Write the WHEEL file
314
+ wheel_message = Message()
315
+ wheel_message.add_header("Wheel-Version", "1.0")
316
+ wheel_message.add_header("Generator", GENERATOR)
317
+ wheel_message.add_header(
318
+ "Root-Is-Purelib", str(source.platform == "any").lower()
319
+ )
320
+ tags = parse_tag(f"{source.pyver}-{source.abi}-{source.platform}")
321
+ for tag in sorted(tags, key=lambda tag: tag.interpreter):
322
+ wheel_message.add_header("Tag", str(tag))
323
+
324
+ wheelfile.writestr(
325
+ f"{source.dist_info_dir}/WHEEL",
326
+ wheel_message.as_string(policy=serialization_policy).encode(
327
+ "utf-8"
328
+ ),
329
+ )
330
+
331
+ if verbose:
332
+ print("OK")
archive/.venv/Lib/site-packages/wheel/cli/pack.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import email.policy
4
+ import os.path
5
+ import re
6
+ from email.generator import BytesGenerator
7
+ from email.parser import BytesParser
8
+
9
+ from wheel.cli import WheelError
10
+ from wheel.wheelfile import WheelFile
11
+
12
+ DIST_INFO_RE = re.compile(r"^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))\.dist-info$")
13
+
14
+
15
+ def pack(directory: str, dest_dir: str, build_number: str | None) -> None:
16
+ """Repack a previously unpacked wheel directory into a new wheel file.
17
+
18
+ The .dist-info/WHEEL file must contain one or more tags so that the target
19
+ wheel file name can be determined.
20
+
21
+ :param directory: The unpacked wheel directory
22
+ :param dest_dir: Destination directory (defaults to the current directory)
23
+ """
24
+ # Find the .dist-info directory
25
+ dist_info_dirs = [
26
+ fn
27
+ for fn in os.listdir(directory)
28
+ if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)
29
+ ]
30
+ if len(dist_info_dirs) > 1:
31
+ raise WheelError(f"Multiple .dist-info directories found in {directory}")
32
+ elif not dist_info_dirs:
33
+ raise WheelError(f"No .dist-info directories found in {directory}")
34
+
35
+ # Determine the target wheel filename
36
+ dist_info_dir = dist_info_dirs[0]
37
+ name_version = DIST_INFO_RE.match(dist_info_dir).group("namever")
38
+
39
+ # Read the tags and the existing build number from .dist-info/WHEEL
40
+ wheel_file_path = os.path.join(directory, dist_info_dir, "WHEEL")
41
+ with open(wheel_file_path, "rb") as f:
42
+ info = BytesParser(policy=email.policy.compat32).parse(f)
43
+ tags: list[str] = info.get_all("Tag", [])
44
+ existing_build_number = info.get("Build")
45
+
46
+ if not tags:
47
+ raise WheelError(
48
+ f"No tags present in {dist_info_dir}/WHEEL; cannot determine target "
49
+ f"wheel filename"
50
+ )
51
+
52
+ # Set the wheel file name and add/replace/remove the Build tag in .dist-info/WHEEL
53
+ build_number = build_number if build_number is not None else existing_build_number
54
+ if build_number is not None:
55
+ del info["Build"]
56
+ if build_number:
57
+ info["Build"] = build_number
58
+ name_version += "-" + build_number
59
+
60
+ if build_number != existing_build_number:
61
+ with open(wheel_file_path, "wb") as f:
62
+ BytesGenerator(f, maxheaderlen=0).flatten(info)
63
+
64
+ # Reassemble the tags for the wheel file
65
+ tagline = compute_tagline(tags)
66
+
67
+ # Repack the wheel
68
+ wheel_path = os.path.join(dest_dir, f"{name_version}-{tagline}.whl")
69
+ with WheelFile(wheel_path, "w") as wf:
70
+ print(f"Repacking wheel as {wheel_path}...", end="", flush=True)
71
+ wf.write_files(directory)
72
+
73
+ print("OK")
74
+
75
+
76
+ def compute_tagline(tags: list[str]) -> str:
77
+ """Compute a tagline from a list of tags.
78
+
79
+ :param tags: A list of tags
80
+ :return: A tagline
81
+ """
82
+ impls = sorted({tag.split("-")[0] for tag in tags})
83
+ abivers = sorted({tag.split("-")[1] for tag in tags})
84
+ platforms = sorted({tag.split("-")[2] for tag in tags})
85
+ return "-".join([".".join(impls), ".".join(abivers), ".".join(platforms)])
archive/.venv/Lib/site-packages/wheel/cli/tags.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import email.policy
4
+ import itertools
5
+ import os
6
+ from collections.abc import Iterable
7
+ from email.parser import BytesParser
8
+
9
+ from ..wheelfile import WheelFile
10
+
11
+
12
+ def _compute_tags(original_tags: Iterable[str], new_tags: str | None) -> set[str]:
13
+ """Add or replace tags. Supports dot-separated tags"""
14
+ if new_tags is None:
15
+ return set(original_tags)
16
+
17
+ if new_tags.startswith("+"):
18
+ return {*original_tags, *new_tags[1:].split(".")}
19
+
20
+ if new_tags.startswith("-"):
21
+ return set(original_tags) - set(new_tags[1:].split("."))
22
+
23
+ return set(new_tags.split("."))
24
+
25
+
26
+ def tags(
27
+ wheel: str,
28
+ python_tags: str | None = None,
29
+ abi_tags: str | None = None,
30
+ platform_tags: str | None = None,
31
+ build_tag: str | None = None,
32
+ remove: bool = False,
33
+ ) -> str:
34
+ """Change the tags on a wheel file.
35
+
36
+ The tags are left unchanged if they are not specified. To specify "none",
37
+ use ["none"]. To append to the previous tags, a tag should start with a
38
+ "+". If a tag starts with "-", it will be removed from existing tags.
39
+ Processing is done left to right.
40
+
41
+ :param wheel: The paths to the wheels
42
+ :param python_tags: The Python tags to set
43
+ :param abi_tags: The ABI tags to set
44
+ :param platform_tags: The platform tags to set
45
+ :param build_tag: The build tag to set
46
+ :param remove: Remove the original wheel
47
+ """
48
+ with WheelFile(wheel, "r") as f:
49
+ assert f.filename, f"{f.filename} must be available"
50
+
51
+ wheel_info = f.read(f.dist_info_path + "/WHEEL")
52
+ info = BytesParser(policy=email.policy.compat32).parsebytes(wheel_info)
53
+
54
+ original_wheel_name = os.path.basename(f.filename)
55
+ namever = f.parsed_filename.group("namever")
56
+ build = f.parsed_filename.group("build")
57
+ original_python_tags = f.parsed_filename.group("pyver").split(".")
58
+ original_abi_tags = f.parsed_filename.group("abi").split(".")
59
+ original_plat_tags = f.parsed_filename.group("plat").split(".")
60
+
61
+ tags: list[str] = info.get_all("Tag", [])
62
+ existing_build_tag = info.get("Build")
63
+
64
+ impls = {tag.split("-")[0] for tag in tags}
65
+ abivers = {tag.split("-")[1] for tag in tags}
66
+ platforms = {tag.split("-")[2] for tag in tags}
67
+
68
+ if impls != set(original_python_tags):
69
+ msg = f"Wheel internal tags {impls!r} != filename tags {original_python_tags!r}"
70
+ raise AssertionError(msg)
71
+
72
+ if abivers != set(original_abi_tags):
73
+ msg = f"Wheel internal tags {abivers!r} != filename tags {original_abi_tags!r}"
74
+ raise AssertionError(msg)
75
+
76
+ if platforms != set(original_plat_tags):
77
+ msg = (
78
+ f"Wheel internal tags {platforms!r} != filename tags {original_plat_tags!r}"
79
+ )
80
+ raise AssertionError(msg)
81
+
82
+ if existing_build_tag != build:
83
+ msg = (
84
+ f"Incorrect filename '{build}' "
85
+ f"& *.dist-info/WHEEL '{existing_build_tag}' build numbers"
86
+ )
87
+ raise AssertionError(msg)
88
+
89
+ # Start changing as needed
90
+ if build_tag is not None:
91
+ build = build_tag
92
+
93
+ final_python_tags = sorted(_compute_tags(original_python_tags, python_tags))
94
+ final_abi_tags = sorted(_compute_tags(original_abi_tags, abi_tags))
95
+ final_plat_tags = sorted(_compute_tags(original_plat_tags, platform_tags))
96
+
97
+ final_tags = [
98
+ namever,
99
+ ".".join(final_python_tags),
100
+ ".".join(final_abi_tags),
101
+ ".".join(final_plat_tags),
102
+ ]
103
+ if build:
104
+ final_tags.insert(1, build)
105
+
106
+ final_wheel_name = "-".join(final_tags) + ".whl"
107
+
108
+ if original_wheel_name != final_wheel_name:
109
+ del info["Tag"], info["Build"]
110
+ for a, b, c in itertools.product(
111
+ final_python_tags, final_abi_tags, final_plat_tags
112
+ ):
113
+ info["Tag"] = f"{a}-{b}-{c}"
114
+ if build:
115
+ info["Build"] = build
116
+
117
+ original_wheel_path = os.path.join(
118
+ os.path.dirname(f.filename), original_wheel_name
119
+ )
120
+ final_wheel_path = os.path.join(os.path.dirname(f.filename), final_wheel_name)
121
+
122
+ with WheelFile(original_wheel_path, "r") as fin, WheelFile(
123
+ final_wheel_path, "w"
124
+ ) as fout:
125
+ fout.comment = fin.comment # preserve the comment
126
+ for item in fin.infolist():
127
+ if item.is_dir():
128
+ continue
129
+ if item.filename == f.dist_info_path + "/RECORD":
130
+ continue
131
+ if item.filename == f.dist_info_path + "/WHEEL":
132
+ fout.writestr(item, info.as_bytes())
133
+ else:
134
+ fout.writestr(item, fin.read(item))
135
+
136
+ if remove:
137
+ os.remove(original_wheel_path)
138
+
139
+ return final_wheel_name
archive/.venv/Lib/site-packages/wheel/cli/unpack.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+
5
+ from ..wheelfile import WheelFile
6
+
7
+
8
+ def unpack(path: str, dest: str = ".") -> None:
9
+ """Unpack a wheel.
10
+
11
+ Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
12
+ is the package name and {ver} its version.
13
+
14
+ :param path: The path to the wheel.
15
+ :param dest: Destination directory (default to current directory).
16
+ """
17
+ with WheelFile(path) as wf:
18
+ namever = wf.parsed_filename.group("namever")
19
+ destination = Path(dest) / namever
20
+ print(f"Unpacking to: {destination}...", end="", flush=True)
21
+ for zinfo in wf.filelist:
22
+ wf.extract(zinfo, destination)
23
+
24
+ # Set permissions to the same values as they were set in the archive
25
+ # We have to do this manually due to
26
+ # https://github.com/python/cpython/issues/59999
27
+ permissions = zinfo.external_attr >> 16 & 0o777
28
+ destination.joinpath(zinfo.filename).chmod(permissions)
29
+
30
+ print("OK")
archive/.venv/Lib/site-packages/wheel/vendored/__init__.py ADDED
File without changes
archive/.venv/Lib/site-packages/wheel/vendored/packaging/LICENSE ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ This software is made available under the terms of *either* of the licenses
2
+ found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
3
+ under the terms of *both* these licenses.
archive/.venv/Lib/site-packages/wheel/vendored/packaging/LICENSE.APACHE ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
archive/.venv/Lib/site-packages/wheel/vendored/packaging/LICENSE.BSD ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright (c) Donald Stufft and individual contributors.
2
+ All rights reserved.
3
+
4
+ Redistribution and use in source and binary forms, with or without
5
+ modification, are permitted provided that the following conditions are met:
6
+
7
+ 1. Redistributions of source code must retain the above copyright notice,
8
+ this list of conditions and the following disclaimer.
9
+
10
+ 2. Redistributions in binary form must reproduce the above copyright
11
+ notice, this list of conditions and the following disclaimer in the
12
+ documentation and/or other materials provided with the distribution.
13
+
14
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
15
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
16
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
17
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
18
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
20
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
21
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
22
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
archive/.venv/Lib/site-packages/wheel/vendored/packaging/markers.py ADDED
@@ -0,0 +1,253 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import operator
6
+ import os
7
+ import platform
8
+ import sys
9
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
10
+
11
+ from ._parser import (
12
+ MarkerAtom,
13
+ MarkerList,
14
+ Op,
15
+ Value,
16
+ Variable,
17
+ )
18
+ from ._parser import (
19
+ parse_marker as _parse_marker,
20
+ )
21
+ from ._tokenizer import ParserSyntaxError
22
+ from .specifiers import InvalidSpecifier, Specifier
23
+ from .utils import canonicalize_name
24
+
25
+ __all__ = [
26
+ "InvalidMarker",
27
+ "UndefinedComparison",
28
+ "UndefinedEnvironmentName",
29
+ "Marker",
30
+ "default_environment",
31
+ ]
32
+
33
+ Operator = Callable[[str, str], bool]
34
+
35
+
36
+ class InvalidMarker(ValueError):
37
+ """
38
+ An invalid marker was found, users should refer to PEP 508.
39
+ """
40
+
41
+
42
+ class UndefinedComparison(ValueError):
43
+ """
44
+ An invalid operation was attempted on a value that doesn't support it.
45
+ """
46
+
47
+
48
+ class UndefinedEnvironmentName(ValueError):
49
+ """
50
+ A name was attempted to be used that does not exist inside of the
51
+ environment.
52
+ """
53
+
54
+
55
+ def _normalize_extra_values(results: Any) -> Any:
56
+ """
57
+ Normalize extra values.
58
+ """
59
+ if isinstance(results[0], tuple):
60
+ lhs, op, rhs = results[0]
61
+ if isinstance(lhs, Variable) and lhs.value == "extra":
62
+ normalized_extra = canonicalize_name(rhs.value)
63
+ rhs = Value(normalized_extra)
64
+ elif isinstance(rhs, Variable) and rhs.value == "extra":
65
+ normalized_extra = canonicalize_name(lhs.value)
66
+ lhs = Value(normalized_extra)
67
+ results[0] = lhs, op, rhs
68
+ return results
69
+
70
+
71
+ def _format_marker(
72
+ marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
73
+ ) -> str:
74
+ assert isinstance(marker, (list, tuple, str))
75
+
76
+ # Sometimes we have a structure like [[...]] which is a single item list
77
+ # where the single item is itself it's own list. In that case we want skip
78
+ # the rest of this function so that we don't get extraneous () on the
79
+ # outside.
80
+ if (
81
+ isinstance(marker, list)
82
+ and len(marker) == 1
83
+ and isinstance(marker[0], (list, tuple))
84
+ ):
85
+ return _format_marker(marker[0])
86
+
87
+ if isinstance(marker, list):
88
+ inner = (_format_marker(m, first=False) for m in marker)
89
+ if first:
90
+ return " ".join(inner)
91
+ else:
92
+ return "(" + " ".join(inner) + ")"
93
+ elif isinstance(marker, tuple):
94
+ return " ".join([m.serialize() for m in marker])
95
+ else:
96
+ return marker
97
+
98
+
99
+ _operators: Dict[str, Operator] = {
100
+ "in": lambda lhs, rhs: lhs in rhs,
101
+ "not in": lambda lhs, rhs: lhs not in rhs,
102
+ "<": operator.lt,
103
+ "<=": operator.le,
104
+ "==": operator.eq,
105
+ "!=": operator.ne,
106
+ ">=": operator.ge,
107
+ ">": operator.gt,
108
+ }
109
+
110
+
111
+ def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
112
+ try:
113
+ spec = Specifier("".join([op.serialize(), rhs]))
114
+ except InvalidSpecifier:
115
+ pass
116
+ else:
117
+ return spec.contains(lhs, prereleases=True)
118
+
119
+ oper: Optional[Operator] = _operators.get(op.serialize())
120
+ if oper is None:
121
+ raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
122
+
123
+ return oper(lhs, rhs)
124
+
125
+
126
+ def _normalize(*values: str, key: str) -> Tuple[str, ...]:
127
+ # PEP 685 – Comparison of extra names for optional distribution dependencies
128
+ # https://peps.python.org/pep-0685/
129
+ # > When comparing extra names, tools MUST normalize the names being
130
+ # > compared using the semantics outlined in PEP 503 for names
131
+ if key == "extra":
132
+ return tuple(canonicalize_name(v) for v in values)
133
+
134
+ # other environment markers don't have such standards
135
+ return values
136
+
137
+
138
+ def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
139
+ groups: List[List[bool]] = [[]]
140
+
141
+ for marker in markers:
142
+ assert isinstance(marker, (list, tuple, str))
143
+
144
+ if isinstance(marker, list):
145
+ groups[-1].append(_evaluate_markers(marker, environment))
146
+ elif isinstance(marker, tuple):
147
+ lhs, op, rhs = marker
148
+
149
+ if isinstance(lhs, Variable):
150
+ environment_key = lhs.value
151
+ lhs_value = environment[environment_key]
152
+ rhs_value = rhs.value
153
+ else:
154
+ lhs_value = lhs.value
155
+ environment_key = rhs.value
156
+ rhs_value = environment[environment_key]
157
+
158
+ lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
159
+ groups[-1].append(_eval_op(lhs_value, op, rhs_value))
160
+ else:
161
+ assert marker in ["and", "or"]
162
+ if marker == "or":
163
+ groups.append([])
164
+
165
+ return any(all(item) for item in groups)
166
+
167
+
168
+ def format_full_version(info: "sys._version_info") -> str:
169
+ version = "{0.major}.{0.minor}.{0.micro}".format(info)
170
+ kind = info.releaselevel
171
+ if kind != "final":
172
+ version += kind[0] + str(info.serial)
173
+ return version
174
+
175
+
176
+ def default_environment() -> Dict[str, str]:
177
+ iver = format_full_version(sys.implementation.version)
178
+ implementation_name = sys.implementation.name
179
+ return {
180
+ "implementation_name": implementation_name,
181
+ "implementation_version": iver,
182
+ "os_name": os.name,
183
+ "platform_machine": platform.machine(),
184
+ "platform_release": platform.release(),
185
+ "platform_system": platform.system(),
186
+ "platform_version": platform.version(),
187
+ "python_full_version": platform.python_version(),
188
+ "platform_python_implementation": platform.python_implementation(),
189
+ "python_version": ".".join(platform.python_version_tuple()[:2]),
190
+ "sys_platform": sys.platform,
191
+ }
192
+
193
+
194
+ class Marker:
195
+ def __init__(self, marker: str) -> None:
196
+ # Note: We create a Marker object without calling this constructor in
197
+ # packaging.requirements.Requirement. If any additional logic is
198
+ # added here, make sure to mirror/adapt Requirement.
199
+ try:
200
+ self._markers = _normalize_extra_values(_parse_marker(marker))
201
+ # The attribute `_markers` can be described in terms of a recursive type:
202
+ # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
203
+ #
204
+ # For example, the following expression:
205
+ # python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
206
+ #
207
+ # is parsed into:
208
+ # [
209
+ # (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
210
+ # 'and',
211
+ # [
212
+ # (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
213
+ # 'or',
214
+ # (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
215
+ # ]
216
+ # ]
217
+ except ParserSyntaxError as e:
218
+ raise InvalidMarker(str(e)) from e
219
+
220
+ def __str__(self) -> str:
221
+ return _format_marker(self._markers)
222
+
223
+ def __repr__(self) -> str:
224
+ return f"<Marker('{self}')>"
225
+
226
+ def __hash__(self) -> int:
227
+ return hash((self.__class__.__name__, str(self)))
228
+
229
+ def __eq__(self, other: Any) -> bool:
230
+ if not isinstance(other, Marker):
231
+ return NotImplemented
232
+
233
+ return str(self) == str(other)
234
+
235
+ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
236
+ """Evaluate a marker.
237
+
238
+ Return the boolean from evaluating the given marker against the
239
+ environment. environment is an optional argument to override all or
240
+ part of the determined environment.
241
+
242
+ The environment is determined from the current Python process.
243
+ """
244
+ current_environment = default_environment()
245
+ current_environment["extra"] = ""
246
+ if environment is not None:
247
+ current_environment.update(environment)
248
+ # The API used to allow setting extra to None. We need to handle this
249
+ # case for backwards compatibility.
250
+ if current_environment["extra"] is None:
251
+ current_environment["extra"] = ""
252
+
253
+ return _evaluate_markers(self._markers, current_environment)
archive/.venv/Lib/site-packages/wheel/vendored/packaging/requirements.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ from typing import Any, Iterator, Optional, Set
6
+
7
+ from ._parser import parse_requirement as _parse_requirement
8
+ from ._tokenizer import ParserSyntaxError
9
+ from .markers import Marker, _normalize_extra_values
10
+ from .specifiers import SpecifierSet
11
+ from .utils import canonicalize_name
12
+
13
+
14
+ class InvalidRequirement(ValueError):
15
+ """
16
+ An invalid requirement was found, users should refer to PEP 508.
17
+ """
18
+
19
+
20
+ class Requirement:
21
+ """Parse a requirement.
22
+
23
+ Parse a given requirement string into its parts, such as name, specifier,
24
+ URL, and extras. Raises InvalidRequirement on a badly-formed requirement
25
+ string.
26
+ """
27
+
28
+ # TODO: Can we test whether something is contained within a requirement?
29
+ # If so how do we do that? Do we need to test against the _name_ of
30
+ # the thing as well as the version? What about the markers?
31
+ # TODO: Can we normalize the name and extra name?
32
+
33
+ def __init__(self, requirement_string: str) -> None:
34
+ try:
35
+ parsed = _parse_requirement(requirement_string)
36
+ except ParserSyntaxError as e:
37
+ raise InvalidRequirement(str(e)) from e
38
+
39
+ self.name: str = parsed.name
40
+ self.url: Optional[str] = parsed.url or None
41
+ self.extras: Set[str] = set(parsed.extras or [])
42
+ self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
43
+ self.marker: Optional[Marker] = None
44
+ if parsed.marker is not None:
45
+ self.marker = Marker.__new__(Marker)
46
+ self.marker._markers = _normalize_extra_values(parsed.marker)
47
+
48
+ def _iter_parts(self, name: str) -> Iterator[str]:
49
+ yield name
50
+
51
+ if self.extras:
52
+ formatted_extras = ",".join(sorted(self.extras))
53
+ yield f"[{formatted_extras}]"
54
+
55
+ if self.specifier:
56
+ yield str(self.specifier)
57
+
58
+ if self.url:
59
+ yield f"@ {self.url}"
60
+ if self.marker:
61
+ yield " "
62
+
63
+ if self.marker:
64
+ yield f"; {self.marker}"
65
+
66
+ def __str__(self) -> str:
67
+ return "".join(self._iter_parts(self.name))
68
+
69
+ def __repr__(self) -> str:
70
+ return f"<Requirement('{self}')>"
71
+
72
+ def __hash__(self) -> int:
73
+ return hash(
74
+ (
75
+ self.__class__.__name__,
76
+ *self._iter_parts(canonicalize_name(self.name)),
77
+ )
78
+ )
79
+
80
+ def __eq__(self, other: Any) -> bool:
81
+ if not isinstance(other, Requirement):
82
+ return NotImplemented
83
+
84
+ return (
85
+ canonicalize_name(self.name) == canonicalize_name(other.name)
86
+ and self.extras == other.extras
87
+ and self.specifier == other.specifier
88
+ and self.url == other.url
89
+ and self.marker == other.marker
90
+ )
archive/.venv/Lib/site-packages/wheel/vendored/packaging/specifiers.py ADDED
@@ -0,0 +1,1011 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+ """
5
+ .. testsetup::
6
+
7
+ from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
8
+ from packaging.version import Version
9
+ """
10
+
11
+ import abc
12
+ import itertools
13
+ import re
14
+ from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
15
+
16
+ from .utils import canonicalize_version
17
+ from .version import Version
18
+
19
+ UnparsedVersion = Union[Version, str]
20
+ UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
21
+ CallableOperator = Callable[[Version, str], bool]
22
+
23
+
24
+ def _coerce_version(version: UnparsedVersion) -> Version:
25
+ if not isinstance(version, Version):
26
+ version = Version(version)
27
+ return version
28
+
29
+
30
+ class InvalidSpecifier(ValueError):
31
+ """
32
+ Raised when attempting to create a :class:`Specifier` with a specifier
33
+ string that is invalid.
34
+
35
+ >>> Specifier("lolwat")
36
+ Traceback (most recent call last):
37
+ ...
38
+ packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
39
+ """
40
+
41
+
42
+ class BaseSpecifier(metaclass=abc.ABCMeta):
43
+ @abc.abstractmethod
44
+ def __str__(self) -> str:
45
+ """
46
+ Returns the str representation of this Specifier-like object. This
47
+ should be representative of the Specifier itself.
48
+ """
49
+
50
+ @abc.abstractmethod
51
+ def __hash__(self) -> int:
52
+ """
53
+ Returns a hash value for this Specifier-like object.
54
+ """
55
+
56
+ @abc.abstractmethod
57
+ def __eq__(self, other: object) -> bool:
58
+ """
59
+ Returns a boolean representing whether or not the two Specifier-like
60
+ objects are equal.
61
+
62
+ :param other: The other object to check against.
63
+ """
64
+
65
+ @property
66
+ @abc.abstractmethod
67
+ def prereleases(self) -> Optional[bool]:
68
+ """Whether or not pre-releases as a whole are allowed.
69
+
70
+ This can be set to either ``True`` or ``False`` to explicitly enable or disable
71
+ prereleases or it can be set to ``None`` (the default) to use default semantics.
72
+ """
73
+
74
+ @prereleases.setter
75
+ def prereleases(self, value: bool) -> None:
76
+ """Setter for :attr:`prereleases`.
77
+
78
+ :param value: The value to set.
79
+ """
80
+
81
+ @abc.abstractmethod
82
+ def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
83
+ """
84
+ Determines if the given item is contained within this specifier.
85
+ """
86
+
87
+ @abc.abstractmethod
88
+ def filter(
89
+ self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
90
+ ) -> Iterator[UnparsedVersionVar]:
91
+ """
92
+ Takes an iterable of items and filters them so that only items which
93
+ are contained within this specifier are allowed in it.
94
+ """
95
+
96
+
97
+ class Specifier(BaseSpecifier):
98
+ """This class abstracts handling of version specifiers.
99
+
100
+ .. tip::
101
+
102
+ It is generally not required to instantiate this manually. You should instead
103
+ prefer to work with :class:`SpecifierSet` instead, which can parse
104
+ comma-separated version specifiers (which is what package metadata contains).
105
+ """
106
+
107
+ _operator_regex_str = r"""
108
+ (?P<operator>(~=|==|!=|<=|>=|<|>|===))
109
+ """
110
+ _version_regex_str = r"""
111
+ (?P<version>
112
+ (?:
113
+ # The identity operators allow for an escape hatch that will
114
+ # do an exact string match of the version you wish to install.
115
+ # This will not be parsed by PEP 440 and we cannot determine
116
+ # any semantic meaning from it. This operator is discouraged
117
+ # but included entirely as an escape hatch.
118
+ (?<====) # Only match for the identity operator
119
+ \s*
120
+ [^\s;)]* # The arbitrary version can be just about anything,
121
+ # we match everything except for whitespace, a
122
+ # semi-colon for marker support, and a closing paren
123
+ # since versions can be enclosed in them.
124
+ )
125
+ |
126
+ (?:
127
+ # The (non)equality operators allow for wild card and local
128
+ # versions to be specified so we have to define these two
129
+ # operators separately to enable that.
130
+ (?<===|!=) # Only match for equals and not equals
131
+
132
+ \s*
133
+ v?
134
+ (?:[0-9]+!)? # epoch
135
+ [0-9]+(?:\.[0-9]+)* # release
136
+
137
+ # You cannot use a wild card and a pre-release, post-release, a dev or
138
+ # local version together so group them with a | and make them optional.
139
+ (?:
140
+ \.\* # Wild card syntax of .*
141
+ |
142
+ (?: # pre release
143
+ [-_\.]?
144
+ (alpha|beta|preview|pre|a|b|c|rc)
145
+ [-_\.]?
146
+ [0-9]*
147
+ )?
148
+ (?: # post release
149
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
150
+ )?
151
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
152
+ (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
153
+ )?
154
+ )
155
+ |
156
+ (?:
157
+ # The compatible operator requires at least two digits in the
158
+ # release segment.
159
+ (?<=~=) # Only match for the compatible operator
160
+
161
+ \s*
162
+ v?
163
+ (?:[0-9]+!)? # epoch
164
+ [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
165
+ (?: # pre release
166
+ [-_\.]?
167
+ (alpha|beta|preview|pre|a|b|c|rc)
168
+ [-_\.]?
169
+ [0-9]*
170
+ )?
171
+ (?: # post release
172
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
173
+ )?
174
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
175
+ )
176
+ |
177
+ (?:
178
+ # All other operators only allow a sub set of what the
179
+ # (non)equality operators do. Specifically they do not allow
180
+ # local versions to be specified nor do they allow the prefix
181
+ # matching wild cards.
182
+ (?<!==|!=|~=) # We have special cases for these
183
+ # operators so we want to make sure they
184
+ # don't match here.
185
+
186
+ \s*
187
+ v?
188
+ (?:[0-9]+!)? # epoch
189
+ [0-9]+(?:\.[0-9]+)* # release
190
+ (?: # pre release
191
+ [-_\.]?
192
+ (alpha|beta|preview|pre|a|b|c|rc)
193
+ [-_\.]?
194
+ [0-9]*
195
+ )?
196
+ (?: # post release
197
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
198
+ )?
199
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
200
+ )
201
+ )
202
+ """
203
+
204
+ _regex = re.compile(
205
+ r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",
206
+ re.VERBOSE | re.IGNORECASE,
207
+ )
208
+
209
+ _operators = {
210
+ "~=": "compatible",
211
+ "==": "equal",
212
+ "!=": "not_equal",
213
+ "<=": "less_than_equal",
214
+ ">=": "greater_than_equal",
215
+ "<": "less_than",
216
+ ">": "greater_than",
217
+ "===": "arbitrary",
218
+ }
219
+
220
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
221
+ """Initialize a Specifier instance.
222
+
223
+ :param spec:
224
+ The string representation of a specifier which will be parsed and
225
+ normalized before use.
226
+ :param prereleases:
227
+ This tells the specifier if it should accept prerelease versions if
228
+ applicable or not. The default of ``None`` will autodetect it from the
229
+ given specifiers.
230
+ :raises InvalidSpecifier:
231
+ If the given specifier is invalid (i.e. bad syntax).
232
+ """
233
+ match = self._regex.search(spec)
234
+ if not match:
235
+ raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
236
+
237
+ self._spec: Tuple[str, str] = (
238
+ match.group("operator").strip(),
239
+ match.group("version").strip(),
240
+ )
241
+
242
+ # Store whether or not this Specifier should accept prereleases
243
+ self._prereleases = prereleases
244
+
245
+ # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515
246
+ @property # type: ignore[override]
247
+ def prereleases(self) -> bool:
248
+ # If there is an explicit prereleases set for this, then we'll just
249
+ # blindly use that.
250
+ if self._prereleases is not None:
251
+ return self._prereleases
252
+
253
+ # Look at all of our specifiers and determine if they are inclusive
254
+ # operators, and if they are if they are including an explicit
255
+ # prerelease.
256
+ operator, version = self._spec
257
+ if operator in ["==", ">=", "<=", "~=", "==="]:
258
+ # The == specifier can include a trailing .*, if it does we
259
+ # want to remove before parsing.
260
+ if operator == "==" and version.endswith(".*"):
261
+ version = version[:-2]
262
+
263
+ # Parse the version, and if it is a pre-release than this
264
+ # specifier allows pre-releases.
265
+ if Version(version).is_prerelease:
266
+ return True
267
+
268
+ return False
269
+
270
+ @prereleases.setter
271
+ def prereleases(self, value: bool) -> None:
272
+ self._prereleases = value
273
+
274
+ @property
275
+ def operator(self) -> str:
276
+ """The operator of this specifier.
277
+
278
+ >>> Specifier("==1.2.3").operator
279
+ '=='
280
+ """
281
+ return self._spec[0]
282
+
283
+ @property
284
+ def version(self) -> str:
285
+ """The version of this specifier.
286
+
287
+ >>> Specifier("==1.2.3").version
288
+ '1.2.3'
289
+ """
290
+ return self._spec[1]
291
+
292
+ def __repr__(self) -> str:
293
+ """A representation of the Specifier that shows all internal state.
294
+
295
+ >>> Specifier('>=1.0.0')
296
+ <Specifier('>=1.0.0')>
297
+ >>> Specifier('>=1.0.0', prereleases=False)
298
+ <Specifier('>=1.0.0', prereleases=False)>
299
+ >>> Specifier('>=1.0.0', prereleases=True)
300
+ <Specifier('>=1.0.0', prereleases=True)>
301
+ """
302
+ pre = (
303
+ f", prereleases={self.prereleases!r}"
304
+ if self._prereleases is not None
305
+ else ""
306
+ )
307
+
308
+ return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
309
+
310
+ def __str__(self) -> str:
311
+ """A string representation of the Specifier that can be round-tripped.
312
+
313
+ >>> str(Specifier('>=1.0.0'))
314
+ '>=1.0.0'
315
+ >>> str(Specifier('>=1.0.0', prereleases=False))
316
+ '>=1.0.0'
317
+ """
318
+ return "{}{}".format(*self._spec)
319
+
320
+ @property
321
+ def _canonical_spec(self) -> Tuple[str, str]:
322
+ canonical_version = canonicalize_version(
323
+ self._spec[1],
324
+ strip_trailing_zero=(self._spec[0] != "~="),
325
+ )
326
+ return self._spec[0], canonical_version
327
+
328
+ def __hash__(self) -> int:
329
+ return hash(self._canonical_spec)
330
+
331
+ def __eq__(self, other: object) -> bool:
332
+ """Whether or not the two Specifier-like objects are equal.
333
+
334
+ :param other: The other object to check against.
335
+
336
+ The value of :attr:`prereleases` is ignored.
337
+
338
+ >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
339
+ True
340
+ >>> (Specifier("==1.2.3", prereleases=False) ==
341
+ ... Specifier("==1.2.3", prereleases=True))
342
+ True
343
+ >>> Specifier("==1.2.3") == "==1.2.3"
344
+ True
345
+ >>> Specifier("==1.2.3") == Specifier("==1.2.4")
346
+ False
347
+ >>> Specifier("==1.2.3") == Specifier("~=1.2.3")
348
+ False
349
+ """
350
+ if isinstance(other, str):
351
+ try:
352
+ other = self.__class__(str(other))
353
+ except InvalidSpecifier:
354
+ return NotImplemented
355
+ elif not isinstance(other, self.__class__):
356
+ return NotImplemented
357
+
358
+ return self._canonical_spec == other._canonical_spec
359
+
360
+ def _get_operator(self, op: str) -> CallableOperator:
361
+ operator_callable: CallableOperator = getattr(
362
+ self, f"_compare_{self._operators[op]}"
363
+ )
364
+ return operator_callable
365
+
366
+ def _compare_compatible(self, prospective: Version, spec: str) -> bool:
367
+ # Compatible releases have an equivalent combination of >= and ==. That
368
+ # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
369
+ # implement this in terms of the other specifiers instead of
370
+ # implementing it ourselves. The only thing we need to do is construct
371
+ # the other specifiers.
372
+
373
+ # We want everything but the last item in the version, but we want to
374
+ # ignore suffix segments.
375
+ prefix = _version_join(
376
+ list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
377
+ )
378
+
379
+ # Add the prefix notation to the end of our string
380
+ prefix += ".*"
381
+
382
+ return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
383
+ prospective, prefix
384
+ )
385
+
386
+ def _compare_equal(self, prospective: Version, spec: str) -> bool:
387
+ # We need special logic to handle prefix matching
388
+ if spec.endswith(".*"):
389
+ # In the case of prefix matching we want to ignore local segment.
390
+ normalized_prospective = canonicalize_version(
391
+ prospective.public, strip_trailing_zero=False
392
+ )
393
+ # Get the normalized version string ignoring the trailing .*
394
+ normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
395
+ # Split the spec out by bangs and dots, and pretend that there is
396
+ # an implicit dot in between a release segment and a pre-release segment.
397
+ split_spec = _version_split(normalized_spec)
398
+
399
+ # Split the prospective version out by bangs and dots, and pretend
400
+ # that there is an implicit dot in between a release segment and
401
+ # a pre-release segment.
402
+ split_prospective = _version_split(normalized_prospective)
403
+
404
+ # 0-pad the prospective version before shortening it to get the correct
405
+ # shortened version.
406
+ padded_prospective, _ = _pad_version(split_prospective, split_spec)
407
+
408
+ # Shorten the prospective version to be the same length as the spec
409
+ # so that we can determine if the specifier is a prefix of the
410
+ # prospective version or not.
411
+ shortened_prospective = padded_prospective[: len(split_spec)]
412
+
413
+ return shortened_prospective == split_spec
414
+ else:
415
+ # Convert our spec string into a Version
416
+ spec_version = Version(spec)
417
+
418
+ # If the specifier does not have a local segment, then we want to
419
+ # act as if the prospective version also does not have a local
420
+ # segment.
421
+ if not spec_version.local:
422
+ prospective = Version(prospective.public)
423
+
424
+ return prospective == spec_version
425
+
426
+ def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
427
+ return not self._compare_equal(prospective, spec)
428
+
429
+ def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
430
+ # NB: Local version identifiers are NOT permitted in the version
431
+ # specifier, so local version labels can be universally removed from
432
+ # the prospective version.
433
+ return Version(prospective.public) <= Version(spec)
434
+
435
+ def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
436
+ # NB: Local version identifiers are NOT permitted in the version
437
+ # specifier, so local version labels can be universally removed from
438
+ # the prospective version.
439
+ return Version(prospective.public) >= Version(spec)
440
+
441
+ def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
442
+ # Convert our spec to a Version instance, since we'll want to work with
443
+ # it as a version.
444
+ spec = Version(spec_str)
445
+
446
+ # Check to see if the prospective version is less than the spec
447
+ # version. If it's not we can short circuit and just return False now
448
+ # instead of doing extra unneeded work.
449
+ if not prospective < spec:
450
+ return False
451
+
452
+ # This special case is here so that, unless the specifier itself
453
+ # includes is a pre-release version, that we do not accept pre-release
454
+ # versions for the version mentioned in the specifier (e.g. <3.1 should
455
+ # not match 3.1.dev0, but should match 3.0.dev0).
456
+ if not spec.is_prerelease and prospective.is_prerelease:
457
+ if Version(prospective.base_version) == Version(spec.base_version):
458
+ return False
459
+
460
+ # If we've gotten to here, it means that prospective version is both
461
+ # less than the spec version *and* it's not a pre-release of the same
462
+ # version in the spec.
463
+ return True
464
+
465
+ def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
466
+ # Convert our spec to a Version instance, since we'll want to work with
467
+ # it as a version.
468
+ spec = Version(spec_str)
469
+
470
+ # Check to see if the prospective version is greater than the spec
471
+ # version. If it's not we can short circuit and just return False now
472
+ # instead of doing extra unneeded work.
473
+ if not prospective > spec:
474
+ return False
475
+
476
+ # This special case is here so that, unless the specifier itself
477
+ # includes is a post-release version, that we do not accept
478
+ # post-release versions for the version mentioned in the specifier
479
+ # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
480
+ if not spec.is_postrelease and prospective.is_postrelease:
481
+ if Version(prospective.base_version) == Version(spec.base_version):
482
+ return False
483
+
484
+ # Ensure that we do not allow a local version of the version mentioned
485
+ # in the specifier, which is technically greater than, to match.
486
+ if prospective.local is not None:
487
+ if Version(prospective.base_version) == Version(spec.base_version):
488
+ return False
489
+
490
+ # If we've gotten to here, it means that prospective version is both
491
+ # greater than the spec version *and* it's not a pre-release of the
492
+ # same version in the spec.
493
+ return True
494
+
495
+ def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
496
+ return str(prospective).lower() == str(spec).lower()
497
+
498
+ def __contains__(self, item: Union[str, Version]) -> bool:
499
+ """Return whether or not the item is contained in this specifier.
500
+
501
+ :param item: The item to check for.
502
+
503
+ This is used for the ``in`` operator and behaves the same as
504
+ :meth:`contains` with no ``prereleases`` argument passed.
505
+
506
+ >>> "1.2.3" in Specifier(">=1.2.3")
507
+ True
508
+ >>> Version("1.2.3") in Specifier(">=1.2.3")
509
+ True
510
+ >>> "1.0.0" in Specifier(">=1.2.3")
511
+ False
512
+ >>> "1.3.0a1" in Specifier(">=1.2.3")
513
+ False
514
+ >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
515
+ True
516
+ """
517
+ return self.contains(item)
518
+
519
+ def contains(
520
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
521
+ ) -> bool:
522
+ """Return whether or not the item is contained in this specifier.
523
+
524
+ :param item:
525
+ The item to check for, which can be a version string or a
526
+ :class:`Version` instance.
527
+ :param prereleases:
528
+ Whether or not to match prereleases with this Specifier. If set to
529
+ ``None`` (the default), it uses :attr:`prereleases` to determine
530
+ whether or not prereleases are allowed.
531
+
532
+ >>> Specifier(">=1.2.3").contains("1.2.3")
533
+ True
534
+ >>> Specifier(">=1.2.3").contains(Version("1.2.3"))
535
+ True
536
+ >>> Specifier(">=1.2.3").contains("1.0.0")
537
+ False
538
+ >>> Specifier(">=1.2.3").contains("1.3.0a1")
539
+ False
540
+ >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
541
+ True
542
+ >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
543
+ True
544
+ """
545
+
546
+ # Determine if prereleases are to be allowed or not.
547
+ if prereleases is None:
548
+ prereleases = self.prereleases
549
+
550
+ # Normalize item to a Version, this allows us to have a shortcut for
551
+ # "2.0" in Specifier(">=2")
552
+ normalized_item = _coerce_version(item)
553
+
554
+ # Determine if we should be supporting prereleases in this specifier
555
+ # or not, if we do not support prereleases than we can short circuit
556
+ # logic if this version is a prereleases.
557
+ if normalized_item.is_prerelease and not prereleases:
558
+ return False
559
+
560
+ # Actually do the comparison to determine if this item is contained
561
+ # within this Specifier or not.
562
+ operator_callable: CallableOperator = self._get_operator(self.operator)
563
+ return operator_callable(normalized_item, self.version)
564
+
565
+ def filter(
566
+ self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
567
+ ) -> Iterator[UnparsedVersionVar]:
568
+ """Filter items in the given iterable, that match the specifier.
569
+
570
+ :param iterable:
571
+ An iterable that can contain version strings and :class:`Version` instances.
572
+ The items in the iterable will be filtered according to the specifier.
573
+ :param prereleases:
574
+ Whether or not to allow prereleases in the returned iterator. If set to
575
+ ``None`` (the default), it will be intelligently decide whether to allow
576
+ prereleases or not (based on the :attr:`prereleases` attribute, and
577
+ whether the only versions matching are prereleases).
578
+
579
+ This method is smarter than just ``filter(Specifier().contains, [...])``
580
+ because it implements the rule from :pep:`440` that a prerelease item
581
+ SHOULD be accepted if no other versions match the given specifier.
582
+
583
+ >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
584
+ ['1.3']
585
+ >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
586
+ ['1.2.3', '1.3', <Version('1.4')>]
587
+ >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
588
+ ['1.5a1']
589
+ >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
590
+ ['1.3', '1.5a1']
591
+ >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
592
+ ['1.3', '1.5a1']
593
+ """
594
+
595
+ yielded = False
596
+ found_prereleases = []
597
+
598
+ kw = {"prereleases": prereleases if prereleases is not None else True}
599
+
600
+ # Attempt to iterate over all the values in the iterable and if any of
601
+ # them match, yield them.
602
+ for version in iterable:
603
+ parsed_version = _coerce_version(version)
604
+
605
+ if self.contains(parsed_version, **kw):
606
+ # If our version is a prerelease, and we were not set to allow
607
+ # prereleases, then we'll store it for later in case nothing
608
+ # else matches this specifier.
609
+ if parsed_version.is_prerelease and not (
610
+ prereleases or self.prereleases
611
+ ):
612
+ found_prereleases.append(version)
613
+ # Either this is not a prerelease, or we should have been
614
+ # accepting prereleases from the beginning.
615
+ else:
616
+ yielded = True
617
+ yield version
618
+
619
+ # Now that we've iterated over everything, determine if we've yielded
620
+ # any values, and if we have not and we have any prereleases stored up
621
+ # then we will go ahead and yield the prereleases.
622
+ if not yielded and found_prereleases:
623
+ for version in found_prereleases:
624
+ yield version
625
+
626
+
627
+ _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
628
+
629
+
630
+ def _version_split(version: str) -> List[str]:
631
+ """Split version into components.
632
+
633
+ The split components are intended for version comparison. The logic does
634
+ not attempt to retain the original version string, so joining the
635
+ components back with :func:`_version_join` may not produce the original
636
+ version string.
637
+ """
638
+ result: List[str] = []
639
+
640
+ epoch, _, rest = version.rpartition("!")
641
+ result.append(epoch or "0")
642
+
643
+ for item in rest.split("."):
644
+ match = _prefix_regex.search(item)
645
+ if match:
646
+ result.extend(match.groups())
647
+ else:
648
+ result.append(item)
649
+ return result
650
+
651
+
652
+ def _version_join(components: List[str]) -> str:
653
+ """Join split version components into a version string.
654
+
655
+ This function assumes the input came from :func:`_version_split`, where the
656
+ first component must be the epoch (either empty or numeric), and all other
657
+ components numeric.
658
+ """
659
+ epoch, *rest = components
660
+ return f"{epoch}!{'.'.join(rest)}"
661
+
662
+
663
+ def _is_not_suffix(segment: str) -> bool:
664
+ return not any(
665
+ segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
666
+ )
667
+
668
+
669
+ def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
670
+ left_split, right_split = [], []
671
+
672
+ # Get the release segment of our versions
673
+ left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
674
+ right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
675
+
676
+ # Get the rest of our versions
677
+ left_split.append(left[len(left_split[0]) :])
678
+ right_split.append(right[len(right_split[0]) :])
679
+
680
+ # Insert our padding
681
+ left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
682
+ right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
683
+
684
+ return (
685
+ list(itertools.chain.from_iterable(left_split)),
686
+ list(itertools.chain.from_iterable(right_split)),
687
+ )
688
+
689
+
690
+ class SpecifierSet(BaseSpecifier):
691
+ """This class abstracts handling of a set of version specifiers.
692
+
693
+ It can be passed a single specifier (``>=3.0``), a comma-separated list of
694
+ specifiers (``>=3.0,!=3.1``), or no specifier at all.
695
+ """
696
+
697
+ def __init__(
698
+ self, specifiers: str = "", prereleases: Optional[bool] = None
699
+ ) -> None:
700
+ """Initialize a SpecifierSet instance.
701
+
702
+ :param specifiers:
703
+ The string representation of a specifier or a comma-separated list of
704
+ specifiers which will be parsed and normalized before use.
705
+ :param prereleases:
706
+ This tells the SpecifierSet if it should accept prerelease versions if
707
+ applicable or not. The default of ``None`` will autodetect it from the
708
+ given specifiers.
709
+
710
+ :raises InvalidSpecifier:
711
+ If the given ``specifiers`` are not parseable than this exception will be
712
+ raised.
713
+ """
714
+
715
+ # Split on `,` to break each individual specifier into it's own item, and
716
+ # strip each item to remove leading/trailing whitespace.
717
+ split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
718
+
719
+ # Make each individual specifier a Specifier and save in a frozen set for later.
720
+ self._specs = frozenset(map(Specifier, split_specifiers))
721
+
722
+ # Store our prereleases value so we can use it later to determine if
723
+ # we accept prereleases or not.
724
+ self._prereleases = prereleases
725
+
726
+ @property
727
+ def prereleases(self) -> Optional[bool]:
728
+ # If we have been given an explicit prerelease modifier, then we'll
729
+ # pass that through here.
730
+ if self._prereleases is not None:
731
+ return self._prereleases
732
+
733
+ # If we don't have any specifiers, and we don't have a forced value,
734
+ # then we'll just return None since we don't know if this should have
735
+ # pre-releases or not.
736
+ if not self._specs:
737
+ return None
738
+
739
+ # Otherwise we'll see if any of the given specifiers accept
740
+ # prereleases, if any of them do we'll return True, otherwise False.
741
+ return any(s.prereleases for s in self._specs)
742
+
743
+ @prereleases.setter
744
+ def prereleases(self, value: bool) -> None:
745
+ self._prereleases = value
746
+
747
+ def __repr__(self) -> str:
748
+ """A representation of the specifier set that shows all internal state.
749
+
750
+ Note that the ordering of the individual specifiers within the set may not
751
+ match the input string.
752
+
753
+ >>> SpecifierSet('>=1.0.0,!=2.0.0')
754
+ <SpecifierSet('!=2.0.0,>=1.0.0')>
755
+ >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
756
+ <SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>
757
+ >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
758
+ <SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>
759
+ """
760
+ pre = (
761
+ f", prereleases={self.prereleases!r}"
762
+ if self._prereleases is not None
763
+ else ""
764
+ )
765
+
766
+ return f"<SpecifierSet({str(self)!r}{pre})>"
767
+
768
+ def __str__(self) -> str:
769
+ """A string representation of the specifier set that can be round-tripped.
770
+
771
+ Note that the ordering of the individual specifiers within the set may not
772
+ match the input string.
773
+
774
+ >>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
775
+ '!=1.0.1,>=1.0.0'
776
+ >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
777
+ '!=1.0.1,>=1.0.0'
778
+ """
779
+ return ",".join(sorted(str(s) for s in self._specs))
780
+
781
+ def __hash__(self) -> int:
782
+ return hash(self._specs)
783
+
784
+ def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
785
+ """Return a SpecifierSet which is a combination of the two sets.
786
+
787
+ :param other: The other object to combine with.
788
+
789
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
790
+ <SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
791
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
792
+ <SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
793
+ """
794
+ if isinstance(other, str):
795
+ other = SpecifierSet(other)
796
+ elif not isinstance(other, SpecifierSet):
797
+ return NotImplemented
798
+
799
+ specifier = SpecifierSet()
800
+ specifier._specs = frozenset(self._specs | other._specs)
801
+
802
+ if self._prereleases is None and other._prereleases is not None:
803
+ specifier._prereleases = other._prereleases
804
+ elif self._prereleases is not None and other._prereleases is None:
805
+ specifier._prereleases = self._prereleases
806
+ elif self._prereleases == other._prereleases:
807
+ specifier._prereleases = self._prereleases
808
+ else:
809
+ raise ValueError(
810
+ "Cannot combine SpecifierSets with True and False prerelease "
811
+ "overrides."
812
+ )
813
+
814
+ return specifier
815
+
816
+ def __eq__(self, other: object) -> bool:
817
+ """Whether or not the two SpecifierSet-like objects are equal.
818
+
819
+ :param other: The other object to check against.
820
+
821
+ The value of :attr:`prereleases` is ignored.
822
+
823
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
824
+ True
825
+ >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
826
+ ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
827
+ True
828
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
829
+ True
830
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
831
+ False
832
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
833
+ False
834
+ """
835
+ if isinstance(other, (str, Specifier)):
836
+ other = SpecifierSet(str(other))
837
+ elif not isinstance(other, SpecifierSet):
838
+ return NotImplemented
839
+
840
+ return self._specs == other._specs
841
+
842
+ def __len__(self) -> int:
843
+ """Returns the number of specifiers in this specifier set."""
844
+ return len(self._specs)
845
+
846
+ def __iter__(self) -> Iterator[Specifier]:
847
+ """
848
+ Returns an iterator over all the underlying :class:`Specifier` instances
849
+ in this specifier set.
850
+
851
+ >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
852
+ [<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]
853
+ """
854
+ return iter(self._specs)
855
+
856
+ def __contains__(self, item: UnparsedVersion) -> bool:
857
+ """Return whether or not the item is contained in this specifier.
858
+
859
+ :param item: The item to check for.
860
+
861
+ This is used for the ``in`` operator and behaves the same as
862
+ :meth:`contains` with no ``prereleases`` argument passed.
863
+
864
+ >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
865
+ True
866
+ >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
867
+ True
868
+ >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
869
+ False
870
+ >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
871
+ False
872
+ >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
873
+ True
874
+ """
875
+ return self.contains(item)
876
+
877
+ def contains(
878
+ self,
879
+ item: UnparsedVersion,
880
+ prereleases: Optional[bool] = None,
881
+ installed: Optional[bool] = None,
882
+ ) -> bool:
883
+ """Return whether or not the item is contained in this SpecifierSet.
884
+
885
+ :param item:
886
+ The item to check for, which can be a version string or a
887
+ :class:`Version` instance.
888
+ :param prereleases:
889
+ Whether or not to match prereleases with this SpecifierSet. If set to
890
+ ``None`` (the default), it uses :attr:`prereleases` to determine
891
+ whether or not prereleases are allowed.
892
+
893
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
894
+ True
895
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
896
+ True
897
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
898
+ False
899
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
900
+ False
901
+ >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
902
+ True
903
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
904
+ True
905
+ """
906
+ # Ensure that our item is a Version instance.
907
+ if not isinstance(item, Version):
908
+ item = Version(item)
909
+
910
+ # Determine if we're forcing a prerelease or not, if we're not forcing
911
+ # one for this particular filter call, then we'll use whatever the
912
+ # SpecifierSet thinks for whether or not we should support prereleases.
913
+ if prereleases is None:
914
+ prereleases = self.prereleases
915
+
916
+ # We can determine if we're going to allow pre-releases by looking to
917
+ # see if any of the underlying items supports them. If none of them do
918
+ # and this item is a pre-release then we do not allow it and we can
919
+ # short circuit that here.
920
+ # Note: This means that 1.0.dev1 would not be contained in something
921
+ # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
922
+ if not prereleases and item.is_prerelease:
923
+ return False
924
+
925
+ if installed and item.is_prerelease:
926
+ item = Version(item.base_version)
927
+
928
+ # We simply dispatch to the underlying specs here to make sure that the
929
+ # given version is contained within all of them.
930
+ # Note: This use of all() here means that an empty set of specifiers
931
+ # will always return True, this is an explicit design decision.
932
+ return all(s.contains(item, prereleases=prereleases) for s in self._specs)
933
+
934
+ def filter(
935
+ self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
936
+ ) -> Iterator[UnparsedVersionVar]:
937
+ """Filter items in the given iterable, that match the specifiers in this set.
938
+
939
+ :param iterable:
940
+ An iterable that can contain version strings and :class:`Version` instances.
941
+ The items in the iterable will be filtered according to the specifier.
942
+ :param prereleases:
943
+ Whether or not to allow prereleases in the returned iterator. If set to
944
+ ``None`` (the default), it will be intelligently decide whether to allow
945
+ prereleases or not (based on the :attr:`prereleases` attribute, and
946
+ whether the only versions matching are prereleases).
947
+
948
+ This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
949
+ because it implements the rule from :pep:`440` that a prerelease item
950
+ SHOULD be accepted if no other versions match the given specifier.
951
+
952
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
953
+ ['1.3']
954
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
955
+ ['1.3', <Version('1.4')>]
956
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
957
+ []
958
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
959
+ ['1.3', '1.5a1']
960
+ >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
961
+ ['1.3', '1.5a1']
962
+
963
+ An "empty" SpecifierSet will filter items based on the presence of prerelease
964
+ versions in the set.
965
+
966
+ >>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
967
+ ['1.3']
968
+ >>> list(SpecifierSet("").filter(["1.5a1"]))
969
+ ['1.5a1']
970
+ >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
971
+ ['1.3', '1.5a1']
972
+ >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
973
+ ['1.3', '1.5a1']
974
+ """
975
+ # Determine if we're forcing a prerelease or not, if we're not forcing
976
+ # one for this particular filter call, then we'll use whatever the
977
+ # SpecifierSet thinks for whether or not we should support prereleases.
978
+ if prereleases is None:
979
+ prereleases = self.prereleases
980
+
981
+ # If we have any specifiers, then we want to wrap our iterable in the
982
+ # filter method for each one, this will act as a logical AND amongst
983
+ # each specifier.
984
+ if self._specs:
985
+ for spec in self._specs:
986
+ iterable = spec.filter(iterable, prereleases=bool(prereleases))
987
+ return iter(iterable)
988
+ # If we do not have any specifiers, then we need to have a rough filter
989
+ # which will filter out any pre-releases, unless there are no final
990
+ # releases.
991
+ else:
992
+ filtered: List[UnparsedVersionVar] = []
993
+ found_prereleases: List[UnparsedVersionVar] = []
994
+
995
+ for item in iterable:
996
+ parsed_version = _coerce_version(item)
997
+
998
+ # Store any item which is a pre-release for later unless we've
999
+ # already found a final version or we are accepting prereleases
1000
+ if parsed_version.is_prerelease and not prereleases:
1001
+ if not filtered:
1002
+ found_prereleases.append(item)
1003
+ else:
1004
+ filtered.append(item)
1005
+
1006
+ # If we've found no items except for pre-releases, then we'll go
1007
+ # ahead and use the pre-releases
1008
+ if not filtered and found_prereleases and prereleases is None:
1009
+ return iter(found_prereleases)
1010
+
1011
+ return iter(filtered)
archive/.venv/Lib/site-packages/wheel/vendored/packaging/tags.py ADDED
@@ -0,0 +1,571 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import logging
6
+ import platform
7
+ import re
8
+ import struct
9
+ import subprocess
10
+ import sys
11
+ import sysconfig
12
+ from importlib.machinery import EXTENSION_SUFFIXES
13
+ from typing import (
14
+ Dict,
15
+ FrozenSet,
16
+ Iterable,
17
+ Iterator,
18
+ List,
19
+ Optional,
20
+ Sequence,
21
+ Tuple,
22
+ Union,
23
+ cast,
24
+ )
25
+
26
+ from . import _manylinux, _musllinux
27
+
28
+ logger = logging.getLogger(__name__)
29
+
30
+ PythonVersion = Sequence[int]
31
+ MacVersion = Tuple[int, int]
32
+
33
+ INTERPRETER_SHORT_NAMES: Dict[str, str] = {
34
+ "python": "py", # Generic.
35
+ "cpython": "cp",
36
+ "pypy": "pp",
37
+ "ironpython": "ip",
38
+ "jython": "jy",
39
+ }
40
+
41
+
42
+ _32_BIT_INTERPRETER = struct.calcsize("P") == 4
43
+
44
+
45
+ class Tag:
46
+ """
47
+ A representation of the tag triple for a wheel.
48
+
49
+ Instances are considered immutable and thus are hashable. Equality checking
50
+ is also supported.
51
+ """
52
+
53
+ __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
54
+
55
+ def __init__(self, interpreter: str, abi: str, platform: str) -> None:
56
+ self._interpreter = interpreter.lower()
57
+ self._abi = abi.lower()
58
+ self._platform = platform.lower()
59
+ # The __hash__ of every single element in a Set[Tag] will be evaluated each time
60
+ # that a set calls its `.disjoint()` method, which may be called hundreds of
61
+ # times when scanning a page of links for packages with tags matching that
62
+ # Set[Tag]. Pre-computing the value here produces significant speedups for
63
+ # downstream consumers.
64
+ self._hash = hash((self._interpreter, self._abi, self._platform))
65
+
66
+ @property
67
+ def interpreter(self) -> str:
68
+ return self._interpreter
69
+
70
+ @property
71
+ def abi(self) -> str:
72
+ return self._abi
73
+
74
+ @property
75
+ def platform(self) -> str:
76
+ return self._platform
77
+
78
+ def __eq__(self, other: object) -> bool:
79
+ if not isinstance(other, Tag):
80
+ return NotImplemented
81
+
82
+ return (
83
+ (self._hash == other._hash) # Short-circuit ASAP for perf reasons.
84
+ and (self._platform == other._platform)
85
+ and (self._abi == other._abi)
86
+ and (self._interpreter == other._interpreter)
87
+ )
88
+
89
+ def __hash__(self) -> int:
90
+ return self._hash
91
+
92
+ def __str__(self) -> str:
93
+ return f"{self._interpreter}-{self._abi}-{self._platform}"
94
+
95
+ def __repr__(self) -> str:
96
+ return f"<{self} @ {id(self)}>"
97
+
98
+
99
+ def parse_tag(tag: str) -> FrozenSet[Tag]:
100
+ """
101
+ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
102
+
103
+ Returning a set is required due to the possibility that the tag is a
104
+ compressed tag set.
105
+ """
106
+ tags = set()
107
+ interpreters, abis, platforms = tag.split("-")
108
+ for interpreter in interpreters.split("."):
109
+ for abi in abis.split("."):
110
+ for platform_ in platforms.split("."):
111
+ tags.add(Tag(interpreter, abi, platform_))
112
+ return frozenset(tags)
113
+
114
+
115
+ def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
116
+ value: Union[int, str, None] = sysconfig.get_config_var(name)
117
+ if value is None and warn:
118
+ logger.debug(
119
+ "Config variable '%s' is unset, Python ABI tag may be incorrect", name
120
+ )
121
+ return value
122
+
123
+
124
+ def _normalize_string(string: str) -> str:
125
+ return string.replace(".", "_").replace("-", "_").replace(" ", "_")
126
+
127
+
128
+ def _is_threaded_cpython(abis: List[str]) -> bool:
129
+ """
130
+ Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
131
+
132
+ The threaded builds are indicated by a "t" in the abiflags.
133
+ """
134
+ if len(abis) == 0:
135
+ return False
136
+ # expect e.g., cp313
137
+ m = re.match(r"cp\d+(.*)", abis[0])
138
+ if not m:
139
+ return False
140
+ abiflags = m.group(1)
141
+ return "t" in abiflags
142
+
143
+
144
+ def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
145
+ """
146
+ Determine if the Python version supports abi3.
147
+
148
+ PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
149
+ builds do not support abi3.
150
+ """
151
+ return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
152
+
153
+
154
+ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
155
+ py_version = tuple(py_version) # To allow for version comparison.
156
+ abis = []
157
+ version = _version_nodot(py_version[:2])
158
+ threading = debug = pymalloc = ucs4 = ""
159
+ with_debug = _get_config_var("Py_DEBUG", warn)
160
+ has_refcount = hasattr(sys, "gettotalrefcount")
161
+ # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
162
+ # extension modules is the best option.
163
+ # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
164
+ has_ext = "_d.pyd" in EXTENSION_SUFFIXES
165
+ if with_debug or (with_debug is None and (has_refcount or has_ext)):
166
+ debug = "d"
167
+ if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
168
+ threading = "t"
169
+ if py_version < (3, 8):
170
+ with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
171
+ if with_pymalloc or with_pymalloc is None:
172
+ pymalloc = "m"
173
+ if py_version < (3, 3):
174
+ unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
175
+ if unicode_size == 4 or (
176
+ unicode_size is None and sys.maxunicode == 0x10FFFF
177
+ ):
178
+ ucs4 = "u"
179
+ elif debug:
180
+ # Debug builds can also load "normal" extension modules.
181
+ # We can also assume no UCS-4 or pymalloc requirement.
182
+ abis.append(f"cp{version}{threading}")
183
+ abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
184
+ return abis
185
+
186
+
187
+ def cpython_tags(
188
+ python_version: Optional[PythonVersion] = None,
189
+ abis: Optional[Iterable[str]] = None,
190
+ platforms: Optional[Iterable[str]] = None,
191
+ *,
192
+ warn: bool = False,
193
+ ) -> Iterator[Tag]:
194
+ """
195
+ Yields the tags for a CPython interpreter.
196
+
197
+ The tags consist of:
198
+ - cp<python_version>-<abi>-<platform>
199
+ - cp<python_version>-abi3-<platform>
200
+ - cp<python_version>-none-<platform>
201
+ - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
202
+
203
+ If python_version only specifies a major version then user-provided ABIs and
204
+ the 'none' ABItag will be used.
205
+
206
+ If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
207
+ their normal position and not at the beginning.
208
+ """
209
+ if not python_version:
210
+ python_version = sys.version_info[:2]
211
+
212
+ interpreter = f"cp{_version_nodot(python_version[:2])}"
213
+
214
+ if abis is None:
215
+ if len(python_version) > 1:
216
+ abis = _cpython_abis(python_version, warn)
217
+ else:
218
+ abis = []
219
+ abis = list(abis)
220
+ # 'abi3' and 'none' are explicitly handled later.
221
+ for explicit_abi in ("abi3", "none"):
222
+ try:
223
+ abis.remove(explicit_abi)
224
+ except ValueError:
225
+ pass
226
+
227
+ platforms = list(platforms or platform_tags())
228
+ for abi in abis:
229
+ for platform_ in platforms:
230
+ yield Tag(interpreter, abi, platform_)
231
+
232
+ threading = _is_threaded_cpython(abis)
233
+ use_abi3 = _abi3_applies(python_version, threading)
234
+ if use_abi3:
235
+ yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
236
+ yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
237
+
238
+ if use_abi3:
239
+ for minor_version in range(python_version[1] - 1, 1, -1):
240
+ for platform_ in platforms:
241
+ interpreter = "cp{version}".format(
242
+ version=_version_nodot((python_version[0], minor_version))
243
+ )
244
+ yield Tag(interpreter, "abi3", platform_)
245
+
246
+
247
+ def _generic_abi() -> List[str]:
248
+ """
249
+ Return the ABI tag based on EXT_SUFFIX.
250
+ """
251
+ # The following are examples of `EXT_SUFFIX`.
252
+ # We want to keep the parts which are related to the ABI and remove the
253
+ # parts which are related to the platform:
254
+ # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
255
+ # - mac: '.cpython-310-darwin.so' => cp310
256
+ # - win: '.cp310-win_amd64.pyd' => cp310
257
+ # - win: '.pyd' => cp37 (uses _cpython_abis())
258
+ # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
259
+ # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
260
+ # => graalpy_38_native
261
+
262
+ ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
263
+ if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
264
+ raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
265
+ parts = ext_suffix.split(".")
266
+ if len(parts) < 3:
267
+ # CPython3.7 and earlier uses ".pyd" on Windows.
268
+ return _cpython_abis(sys.version_info[:2])
269
+ soabi = parts[1]
270
+ if soabi.startswith("cpython"):
271
+ # non-windows
272
+ abi = "cp" + soabi.split("-")[1]
273
+ elif soabi.startswith("cp"):
274
+ # windows
275
+ abi = soabi.split("-")[0]
276
+ elif soabi.startswith("pypy"):
277
+ abi = "-".join(soabi.split("-")[:2])
278
+ elif soabi.startswith("graalpy"):
279
+ abi = "-".join(soabi.split("-")[:3])
280
+ elif soabi:
281
+ # pyston, ironpython, others?
282
+ abi = soabi
283
+ else:
284
+ return []
285
+ return [_normalize_string(abi)]
286
+
287
+
288
+ def generic_tags(
289
+ interpreter: Optional[str] = None,
290
+ abis: Optional[Iterable[str]] = None,
291
+ platforms: Optional[Iterable[str]] = None,
292
+ *,
293
+ warn: bool = False,
294
+ ) -> Iterator[Tag]:
295
+ """
296
+ Yields the tags for a generic interpreter.
297
+
298
+ The tags consist of:
299
+ - <interpreter>-<abi>-<platform>
300
+
301
+ The "none" ABI will be added if it was not explicitly provided.
302
+ """
303
+ if not interpreter:
304
+ interp_name = interpreter_name()
305
+ interp_version = interpreter_version(warn=warn)
306
+ interpreter = "".join([interp_name, interp_version])
307
+ if abis is None:
308
+ abis = _generic_abi()
309
+ else:
310
+ abis = list(abis)
311
+ platforms = list(platforms or platform_tags())
312
+ if "none" not in abis:
313
+ abis.append("none")
314
+ for abi in abis:
315
+ for platform_ in platforms:
316
+ yield Tag(interpreter, abi, platform_)
317
+
318
+
319
+ def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
320
+ """
321
+ Yields Python versions in descending order.
322
+
323
+ After the latest version, the major-only version will be yielded, and then
324
+ all previous versions of that major version.
325
+ """
326
+ if len(py_version) > 1:
327
+ yield f"py{_version_nodot(py_version[:2])}"
328
+ yield f"py{py_version[0]}"
329
+ if len(py_version) > 1:
330
+ for minor in range(py_version[1] - 1, -1, -1):
331
+ yield f"py{_version_nodot((py_version[0], minor))}"
332
+
333
+
334
+ def compatible_tags(
335
+ python_version: Optional[PythonVersion] = None,
336
+ interpreter: Optional[str] = None,
337
+ platforms: Optional[Iterable[str]] = None,
338
+ ) -> Iterator[Tag]:
339
+ """
340
+ Yields the sequence of tags that are compatible with a specific version of Python.
341
+
342
+ The tags consist of:
343
+ - py*-none-<platform>
344
+ - <interpreter>-none-any # ... if `interpreter` is provided.
345
+ - py*-none-any
346
+ """
347
+ if not python_version:
348
+ python_version = sys.version_info[:2]
349
+ platforms = list(platforms or platform_tags())
350
+ for version in _py_interpreter_range(python_version):
351
+ for platform_ in platforms:
352
+ yield Tag(version, "none", platform_)
353
+ if interpreter:
354
+ yield Tag(interpreter, "none", "any")
355
+ for version in _py_interpreter_range(python_version):
356
+ yield Tag(version, "none", "any")
357
+
358
+
359
+ def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
360
+ if not is_32bit:
361
+ return arch
362
+
363
+ if arch.startswith("ppc"):
364
+ return "ppc"
365
+
366
+ return "i386"
367
+
368
+
369
+ def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
370
+ formats = [cpu_arch]
371
+ if cpu_arch == "x86_64":
372
+ if version < (10, 4):
373
+ return []
374
+ formats.extend(["intel", "fat64", "fat32"])
375
+
376
+ elif cpu_arch == "i386":
377
+ if version < (10, 4):
378
+ return []
379
+ formats.extend(["intel", "fat32", "fat"])
380
+
381
+ elif cpu_arch == "ppc64":
382
+ # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
383
+ if version > (10, 5) or version < (10, 4):
384
+ return []
385
+ formats.append("fat64")
386
+
387
+ elif cpu_arch == "ppc":
388
+ if version > (10, 6):
389
+ return []
390
+ formats.extend(["fat32", "fat"])
391
+
392
+ if cpu_arch in {"arm64", "x86_64"}:
393
+ formats.append("universal2")
394
+
395
+ if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
396
+ formats.append("universal")
397
+
398
+ return formats
399
+
400
+
401
+ def mac_platforms(
402
+ version: Optional[MacVersion] = None, arch: Optional[str] = None
403
+ ) -> Iterator[str]:
404
+ """
405
+ Yields the platform tags for a macOS system.
406
+
407
+ The `version` parameter is a two-item tuple specifying the macOS version to
408
+ generate platform tags for. The `arch` parameter is the CPU architecture to
409
+ generate platform tags for. Both parameters default to the appropriate value
410
+ for the current system.
411
+ """
412
+ version_str, _, cpu_arch = platform.mac_ver()
413
+ if version is None:
414
+ version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
415
+ if version == (10, 16):
416
+ # When built against an older macOS SDK, Python will report macOS 10.16
417
+ # instead of the real version.
418
+ version_str = subprocess.run(
419
+ [
420
+ sys.executable,
421
+ "-sS",
422
+ "-c",
423
+ "import platform; print(platform.mac_ver()[0])",
424
+ ],
425
+ check=True,
426
+ env={"SYSTEM_VERSION_COMPAT": "0"},
427
+ stdout=subprocess.PIPE,
428
+ text=True,
429
+ ).stdout
430
+ version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
431
+ else:
432
+ version = version
433
+ if arch is None:
434
+ arch = _mac_arch(cpu_arch)
435
+ else:
436
+ arch = arch
437
+
438
+ if (10, 0) <= version and version < (11, 0):
439
+ # Prior to Mac OS 11, each yearly release of Mac OS bumped the
440
+ # "minor" version number. The major version was always 10.
441
+ for minor_version in range(version[1], -1, -1):
442
+ compat_version = 10, minor_version
443
+ binary_formats = _mac_binary_formats(compat_version, arch)
444
+ for binary_format in binary_formats:
445
+ yield "macosx_{major}_{minor}_{binary_format}".format(
446
+ major=10, minor=minor_version, binary_format=binary_format
447
+ )
448
+
449
+ if version >= (11, 0):
450
+ # Starting with Mac OS 11, each yearly release bumps the major version
451
+ # number. The minor versions are now the midyear updates.
452
+ for major_version in range(version[0], 10, -1):
453
+ compat_version = major_version, 0
454
+ binary_formats = _mac_binary_formats(compat_version, arch)
455
+ for binary_format in binary_formats:
456
+ yield "macosx_{major}_{minor}_{binary_format}".format(
457
+ major=major_version, minor=0, binary_format=binary_format
458
+ )
459
+
460
+ if version >= (11, 0):
461
+ # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
462
+ # Arm64 support was introduced in 11.0, so no Arm binaries from previous
463
+ # releases exist.
464
+ #
465
+ # However, the "universal2" binary format can have a
466
+ # macOS version earlier than 11.0 when the x86_64 part of the binary supports
467
+ # that version of macOS.
468
+ if arch == "x86_64":
469
+ for minor_version in range(16, 3, -1):
470
+ compat_version = 10, minor_version
471
+ binary_formats = _mac_binary_formats(compat_version, arch)
472
+ for binary_format in binary_formats:
473
+ yield "macosx_{major}_{minor}_{binary_format}".format(
474
+ major=compat_version[0],
475
+ minor=compat_version[1],
476
+ binary_format=binary_format,
477
+ )
478
+ else:
479
+ for minor_version in range(16, 3, -1):
480
+ compat_version = 10, minor_version
481
+ binary_format = "universal2"
482
+ yield "macosx_{major}_{minor}_{binary_format}".format(
483
+ major=compat_version[0],
484
+ minor=compat_version[1],
485
+ binary_format=binary_format,
486
+ )
487
+
488
+
489
+ def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
490
+ linux = _normalize_string(sysconfig.get_platform())
491
+ if not linux.startswith("linux_"):
492
+ # we should never be here, just yield the sysconfig one and return
493
+ yield linux
494
+ return
495
+ if is_32bit:
496
+ if linux == "linux_x86_64":
497
+ linux = "linux_i686"
498
+ elif linux == "linux_aarch64":
499
+ linux = "linux_armv8l"
500
+ _, arch = linux.split("_", 1)
501
+ archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
502
+ yield from _manylinux.platform_tags(archs)
503
+ yield from _musllinux.platform_tags(archs)
504
+ for arch in archs:
505
+ yield f"linux_{arch}"
506
+
507
+
508
+ def _generic_platforms() -> Iterator[str]:
509
+ yield _normalize_string(sysconfig.get_platform())
510
+
511
+
512
+ def platform_tags() -> Iterator[str]:
513
+ """
514
+ Provides the platform tags for this installation.
515
+ """
516
+ if platform.system() == "Darwin":
517
+ return mac_platforms()
518
+ elif platform.system() == "Linux":
519
+ return _linux_platforms()
520
+ else:
521
+ return _generic_platforms()
522
+
523
+
524
+ def interpreter_name() -> str:
525
+ """
526
+ Returns the name of the running interpreter.
527
+
528
+ Some implementations have a reserved, two-letter abbreviation which will
529
+ be returned when appropriate.
530
+ """
531
+ name = sys.implementation.name
532
+ return INTERPRETER_SHORT_NAMES.get(name) or name
533
+
534
+
535
+ def interpreter_version(*, warn: bool = False) -> str:
536
+ """
537
+ Returns the version of the running interpreter.
538
+ """
539
+ version = _get_config_var("py_version_nodot", warn=warn)
540
+ if version:
541
+ version = str(version)
542
+ else:
543
+ version = _version_nodot(sys.version_info[:2])
544
+ return version
545
+
546
+
547
+ def _version_nodot(version: PythonVersion) -> str:
548
+ return "".join(map(str, version))
549
+
550
+
551
+ def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
552
+ """
553
+ Returns the sequence of tag triples for the running interpreter.
554
+
555
+ The order of the sequence corresponds to priority order for the
556
+ interpreter, from most to least important.
557
+ """
558
+
559
+ interp_name = interpreter_name()
560
+ if interp_name == "cp":
561
+ yield from cpython_tags(warn=warn)
562
+ else:
563
+ yield from generic_tags()
564
+
565
+ if interp_name == "pp":
566
+ interp = "pp3"
567
+ elif interp_name == "cp":
568
+ interp = "cp" + interpreter_version(warn=warn)
569
+ else:
570
+ interp = None
571
+ yield from compatible_tags(interpreter=interp)
archive/.venv/Lib/site-packages/wheel/vendored/packaging/utils.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import re
6
+ from typing import FrozenSet, NewType, Tuple, Union, cast
7
+
8
+ from .tags import Tag, parse_tag
9
+ from .version import InvalidVersion, Version
10
+
11
+ BuildTag = Union[Tuple[()], Tuple[int, str]]
12
+ NormalizedName = NewType("NormalizedName", str)
13
+
14
+
15
+ class InvalidName(ValueError):
16
+ """
17
+ An invalid distribution name; users should refer to the packaging user guide.
18
+ """
19
+
20
+
21
+ class InvalidWheelFilename(ValueError):
22
+ """
23
+ An invalid wheel filename was found, users should refer to PEP 427.
24
+ """
25
+
26
+
27
+ class InvalidSdistFilename(ValueError):
28
+ """
29
+ An invalid sdist filename was found, users should refer to the packaging user guide.
30
+ """
31
+
32
+
33
+ # Core metadata spec for `Name`
34
+ _validate_regex = re.compile(
35
+ r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
36
+ )
37
+ _canonicalize_regex = re.compile(r"[-_.]+")
38
+ _normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
39
+ # PEP 427: The build number must start with a digit.
40
+ _build_tag_regex = re.compile(r"(\d+)(.*)")
41
+
42
+
43
+ def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
44
+ if validate and not _validate_regex.match(name):
45
+ raise InvalidName(f"name is invalid: {name!r}")
46
+ # This is taken from PEP 503.
47
+ value = _canonicalize_regex.sub("-", name).lower()
48
+ return cast(NormalizedName, value)
49
+
50
+
51
+ def is_normalized_name(name: str) -> bool:
52
+ return _normalized_regex.match(name) is not None
53
+
54
+
55
+ def canonicalize_version(
56
+ version: Union[Version, str], *, strip_trailing_zero: bool = True
57
+ ) -> str:
58
+ """
59
+ This is very similar to Version.__str__, but has one subtle difference
60
+ with the way it handles the release segment.
61
+ """
62
+ if isinstance(version, str):
63
+ try:
64
+ parsed = Version(version)
65
+ except InvalidVersion:
66
+ # Legacy versions cannot be normalized
67
+ return version
68
+ else:
69
+ parsed = version
70
+
71
+ parts = []
72
+
73
+ # Epoch
74
+ if parsed.epoch != 0:
75
+ parts.append(f"{parsed.epoch}!")
76
+
77
+ # Release segment
78
+ release_segment = ".".join(str(x) for x in parsed.release)
79
+ if strip_trailing_zero:
80
+ # NB: This strips trailing '.0's to normalize
81
+ release_segment = re.sub(r"(\.0)+$", "", release_segment)
82
+ parts.append(release_segment)
83
+
84
+ # Pre-release
85
+ if parsed.pre is not None:
86
+ parts.append("".join(str(x) for x in parsed.pre))
87
+
88
+ # Post-release
89
+ if parsed.post is not None:
90
+ parts.append(f".post{parsed.post}")
91
+
92
+ # Development release
93
+ if parsed.dev is not None:
94
+ parts.append(f".dev{parsed.dev}")
95
+
96
+ # Local version segment
97
+ if parsed.local is not None:
98
+ parts.append(f"+{parsed.local}")
99
+
100
+ return "".join(parts)
101
+
102
+
103
+ def parse_wheel_filename(
104
+ filename: str,
105
+ ) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
106
+ if not filename.endswith(".whl"):
107
+ raise InvalidWheelFilename(
108
+ f"Invalid wheel filename (extension must be '.whl'): {filename}"
109
+ )
110
+
111
+ filename = filename[:-4]
112
+ dashes = filename.count("-")
113
+ if dashes not in (4, 5):
114
+ raise InvalidWheelFilename(
115
+ f"Invalid wheel filename (wrong number of parts): {filename}"
116
+ )
117
+
118
+ parts = filename.split("-", dashes - 2)
119
+ name_part = parts[0]
120
+ # See PEP 427 for the rules on escaping the project name.
121
+ if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
122
+ raise InvalidWheelFilename(f"Invalid project name: {filename}")
123
+ name = canonicalize_name(name_part)
124
+
125
+ try:
126
+ version = Version(parts[1])
127
+ except InvalidVersion as e:
128
+ raise InvalidWheelFilename(
129
+ f"Invalid wheel filename (invalid version): {filename}"
130
+ ) from e
131
+
132
+ if dashes == 5:
133
+ build_part = parts[2]
134
+ build_match = _build_tag_regex.match(build_part)
135
+ if build_match is None:
136
+ raise InvalidWheelFilename(
137
+ f"Invalid build number: {build_part} in '{filename}'"
138
+ )
139
+ build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
140
+ else:
141
+ build = ()
142
+ tags = parse_tag(parts[-1])
143
+ return (name, version, build, tags)
144
+
145
+
146
+ def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
147
+ if filename.endswith(".tar.gz"):
148
+ file_stem = filename[: -len(".tar.gz")]
149
+ elif filename.endswith(".zip"):
150
+ file_stem = filename[: -len(".zip")]
151
+ else:
152
+ raise InvalidSdistFilename(
153
+ f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
154
+ f" {filename}"
155
+ )
156
+
157
+ # We are requiring a PEP 440 version, which cannot contain dashes,
158
+ # so we split on the last dash.
159
+ name_part, sep, version_part = file_stem.rpartition("-")
160
+ if not sep:
161
+ raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
162
+
163
+ name = canonicalize_name(name_part)
164
+
165
+ try:
166
+ version = Version(version_part)
167
+ except InvalidVersion as e:
168
+ raise InvalidSdistFilename(
169
+ f"Invalid sdist filename (invalid version): {filename}"
170
+ ) from e
171
+
172
+ return (name, version)
archive/.venv/Lib/site-packages/wheel/vendored/vendor.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ packaging==24.0
backend/core/ag4masses/outputs/solved/imo-2004p1.jpg ADDED

Git LFS Details

  • SHA256: 1399b08da9ad93fd0a9711a338f619a637bb7fdd3b30fb8e907e80c2d6568e34
  • Pointer size: 131 Bytes
  • Size of remote file: 212 kB