Mingke977 commited on
Commit
81d78fb
·
verified ·
1 Parent(s): 29f8576

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. venv/lib/python3.10/site-packages/markdown_it/rules_core/__pycache__/replacements.cpython-310.pyc +0 -0
  2. venv/lib/python3.10/site-packages/markdown_it/rules_core/__pycache__/smartquotes.cpython-310.pyc +0 -0
  3. venv/lib/python3.10/site-packages/markdown_it/rules_core/__pycache__/state_core.cpython-310.pyc +0 -0
  4. venv/lib/python3.10/site-packages/markdown_it/rules_core/__pycache__/text_join.cpython-310.pyc +0 -0
  5. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__init__.py +31 -0
  6. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/__init__.cpython-310.pyc +0 -0
  7. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/autolink.cpython-310.pyc +0 -0
  8. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/backticks.cpython-310.pyc +0 -0
  9. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/balance_pairs.cpython-310.pyc +0 -0
  10. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/emphasis.cpython-310.pyc +0 -0
  11. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/entity.cpython-310.pyc +0 -0
  12. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/escape.cpython-310.pyc +0 -0
  13. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/fragments_join.cpython-310.pyc +0 -0
  14. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/html_inline.cpython-310.pyc +0 -0
  15. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/image.cpython-310.pyc +0 -0
  16. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/link.cpython-310.pyc +0 -0
  17. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/linkify.cpython-310.pyc +0 -0
  18. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/newline.cpython-310.pyc +0 -0
  19. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/state_inline.cpython-310.pyc +0 -0
  20. venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/strikethrough.cpython-310.pyc +0 -0
  21. venv/lib/python3.10/site-packages/markdown_it/rules_inline/autolink.py +77 -0
  22. venv/lib/python3.10/site-packages/markdown_it/rules_inline/backticks.py +72 -0
  23. venv/lib/python3.10/site-packages/markdown_it/rules_inline/balance_pairs.py +138 -0
  24. venv/lib/python3.10/site-packages/markdown_it/rules_inline/emphasis.py +102 -0
  25. venv/lib/python3.10/site-packages/markdown_it/rules_inline/entity.py +53 -0
  26. venv/lib/python3.10/site-packages/markdown_it/rules_inline/escape.py +93 -0
  27. venv/lib/python3.10/site-packages/markdown_it/rules_inline/fragments_join.py +43 -0
  28. venv/lib/python3.10/site-packages/markdown_it/rules_inline/html_inline.py +43 -0
  29. venv/lib/python3.10/site-packages/markdown_it/rules_inline/image.py +148 -0
  30. venv/lib/python3.10/site-packages/markdown_it/rules_inline/link.py +149 -0
  31. venv/lib/python3.10/site-packages/markdown_it/rules_inline/linkify.py +62 -0
  32. venv/lib/python3.10/site-packages/markdown_it/rules_inline/newline.py +44 -0
  33. venv/lib/python3.10/site-packages/markdown_it/rules_inline/state_inline.py +165 -0
  34. venv/lib/python3.10/site-packages/markdown_it/rules_inline/strikethrough.py +127 -0
  35. venv/lib/python3.10/site-packages/markdown_it/rules_inline/text.py +62 -0
  36. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  37. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc +0 -0
  38. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc +0 -0
  39. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc +0 -0
  40. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc +0 -0
  41. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc +0 -0
  42. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc +0 -0
  43. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc +0 -0
  44. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc +0 -0
  45. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc +0 -0
  46. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc +0 -0
  47. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc +0 -0
  48. venv/lib/python3.10/site-packages/pip/_internal/cli/req_command.py +506 -0
  49. venv/lib/python3.10/site-packages/pip/_internal/cli/spinners.py +157 -0
  50. venv/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py +6 -0
venv/lib/python3.10/site-packages/markdown_it/rules_core/__pycache__/replacements.cpython-310.pyc ADDED
Binary file (2.84 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_core/__pycache__/smartquotes.cpython-310.pyc ADDED
Binary file (3.39 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_core/__pycache__/state_core.cpython-310.pyc ADDED
Binary file (1.06 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_core/__pycache__/text_join.cpython-310.pyc ADDED
Binary file (1.2 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__init__.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __all__ = (
2
+ "StateInline",
3
+ "autolink",
4
+ "backtick",
5
+ "emphasis",
6
+ "entity",
7
+ "escape",
8
+ "fragments_join",
9
+ "html_inline",
10
+ "image",
11
+ "link",
12
+ "link_pairs",
13
+ "linkify",
14
+ "newline",
15
+ "strikethrough",
16
+ "text",
17
+ )
18
+ from . import emphasis, strikethrough
19
+ from .autolink import autolink
20
+ from .backticks import backtick
21
+ from .balance_pairs import link_pairs
22
+ from .entity import entity
23
+ from .escape import escape
24
+ from .fragments_join import fragments_join
25
+ from .html_inline import html_inline
26
+ from .image import image
27
+ from .link import link
28
+ from .linkify import linkify
29
+ from .newline import newline
30
+ from .state_inline import StateInline
31
+ from .text import text
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (884 Bytes). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/autolink.cpython-310.pyc ADDED
Binary file (1.56 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/backticks.cpython-310.pyc ADDED
Binary file (1.39 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/balance_pairs.cpython-310.pyc ADDED
Binary file (1.98 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/emphasis.cpython-310.pyc ADDED
Binary file (2.23 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/entity.cpython-310.pyc ADDED
Binary file (1.39 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/escape.cpython-310.pyc ADDED
Binary file (1.35 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/fragments_join.cpython-310.pyc ADDED
Binary file (1.31 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/html_inline.cpython-310.pyc ADDED
Binary file (1.26 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/image.cpython-310.pyc ADDED
Binary file (2.13 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/link.cpython-310.pyc ADDED
Binary file (1.99 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/linkify.cpython-310.pyc ADDED
Binary file (1.54 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/newline.cpython-310.pyc ADDED
Binary file (1.07 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/state_inline.cpython-310.pyc ADDED
Binary file (4.03 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/__pycache__/strikethrough.cpython-310.pyc ADDED
Binary file (2.41 kB). View file
 
venv/lib/python3.10/site-packages/markdown_it/rules_inline/autolink.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Process autolinks '<protocol:...>'
2
+ import re
3
+
4
+ from .state_inline import StateInline
5
+
6
+ EMAIL_RE = re.compile(
7
+ r"^([a-zA-Z0-9.!#$%&\'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*)$"
8
+ )
9
+ AUTOLINK_RE = re.compile(r"^([a-zA-Z][a-zA-Z0-9+.\-]{1,31}):([^<>\x00-\x20]*)$")
10
+
11
+
12
+ def autolink(state: StateInline, silent: bool) -> bool:
13
+ pos = state.pos
14
+
15
+ if state.src[pos] != "<":
16
+ return False
17
+
18
+ start = state.pos
19
+ maximum = state.posMax
20
+
21
+ while True:
22
+ pos += 1
23
+ if pos >= maximum:
24
+ return False
25
+
26
+ ch = state.src[pos]
27
+
28
+ if ch == "<":
29
+ return False
30
+ if ch == ">":
31
+ break
32
+
33
+ url = state.src[start + 1 : pos]
34
+
35
+ if AUTOLINK_RE.search(url) is not None:
36
+ fullUrl = state.md.normalizeLink(url)
37
+ if not state.md.validateLink(fullUrl):
38
+ return False
39
+
40
+ if not silent:
41
+ token = state.push("link_open", "a", 1)
42
+ token.attrs = {"href": fullUrl}
43
+ token.markup = "autolink"
44
+ token.info = "auto"
45
+
46
+ token = state.push("text", "", 0)
47
+ token.content = state.md.normalizeLinkText(url)
48
+
49
+ token = state.push("link_close", "a", -1)
50
+ token.markup = "autolink"
51
+ token.info = "auto"
52
+
53
+ state.pos += len(url) + 2
54
+ return True
55
+
56
+ if EMAIL_RE.search(url) is not None:
57
+ fullUrl = state.md.normalizeLink("mailto:" + url)
58
+ if not state.md.validateLink(fullUrl):
59
+ return False
60
+
61
+ if not silent:
62
+ token = state.push("link_open", "a", 1)
63
+ token.attrs = {"href": fullUrl}
64
+ token.markup = "autolink"
65
+ token.info = "auto"
66
+
67
+ token = state.push("text", "", 0)
68
+ token.content = state.md.normalizeLinkText(url)
69
+
70
+ token = state.push("link_close", "a", -1)
71
+ token.markup = "autolink"
72
+ token.info = "auto"
73
+
74
+ state.pos += len(url) + 2
75
+ return True
76
+
77
+ return False
venv/lib/python3.10/site-packages/markdown_it/rules_inline/backticks.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Parse backticks
2
+ import re
3
+
4
+ from .state_inline import StateInline
5
+
6
+ regex = re.compile("^ (.+) $")
7
+
8
+
9
+ def backtick(state: StateInline, silent: bool) -> bool:
10
+ pos = state.pos
11
+
12
+ if state.src[pos] != "`":
13
+ return False
14
+
15
+ start = pos
16
+ pos += 1
17
+ maximum = state.posMax
18
+
19
+ # scan marker length
20
+ while pos < maximum and (state.src[pos] == "`"):
21
+ pos += 1
22
+
23
+ marker = state.src[start:pos]
24
+ openerLength = len(marker)
25
+
26
+ if state.backticksScanned and state.backticks.get(openerLength, 0) <= start:
27
+ if not silent:
28
+ state.pending += marker
29
+ state.pos += openerLength
30
+ return True
31
+
32
+ matchStart = matchEnd = pos
33
+
34
+ # Nothing found in the cache, scan until the end of the line (or until marker is found)
35
+ while True:
36
+ try:
37
+ matchStart = state.src.index("`", matchEnd)
38
+ except ValueError:
39
+ break
40
+ matchEnd = matchStart + 1
41
+
42
+ # scan marker length
43
+ while matchEnd < maximum and (state.src[matchEnd] == "`"):
44
+ matchEnd += 1
45
+
46
+ closerLength = matchEnd - matchStart
47
+
48
+ if closerLength == openerLength:
49
+ # Found matching closer length.
50
+ if not silent:
51
+ token = state.push("code_inline", "code", 0)
52
+ token.markup = marker
53
+ token.content = state.src[pos:matchStart].replace("\n", " ")
54
+ if (
55
+ token.content.startswith(" ")
56
+ and token.content.endswith(" ")
57
+ and len(token.content.strip()) > 0
58
+ ):
59
+ token.content = token.content[1:-1]
60
+ state.pos = matchEnd
61
+ return True
62
+
63
+ # Some different length found, put it in cache as upper limit of where closer can be found
64
+ state.backticks[closerLength] = matchStart
65
+
66
+ # Scanned through the end, didn't find anything
67
+ state.backticksScanned = True
68
+
69
+ if not silent:
70
+ state.pending += marker
71
+ state.pos += openerLength
72
+ return True
venv/lib/python3.10/site-packages/markdown_it/rules_inline/balance_pairs.py ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Balance paired characters (*, _, etc) in inline tokens."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from .state_inline import Delimiter, StateInline
6
+
7
+
8
+ def processDelimiters(state: StateInline, delimiters: list[Delimiter]) -> None:
9
+ """For each opening emphasis-like marker find a matching closing one."""
10
+ if not delimiters:
11
+ return
12
+
13
+ openersBottom = {}
14
+ maximum = len(delimiters)
15
+
16
+ # headerIdx is the first delimiter of the current (where closer is) delimiter run
17
+ headerIdx = 0
18
+ lastTokenIdx = -2 # needs any value lower than -1
19
+ jumps: list[int] = []
20
+ closerIdx = 0
21
+ while closerIdx < maximum:
22
+ closer = delimiters[closerIdx]
23
+
24
+ jumps.append(0)
25
+
26
+ # markers belong to same delimiter run if:
27
+ # - they have adjacent tokens
28
+ # - AND markers are the same
29
+ #
30
+ if (
31
+ delimiters[headerIdx].marker != closer.marker
32
+ or lastTokenIdx != closer.token - 1
33
+ ):
34
+ headerIdx = closerIdx
35
+ lastTokenIdx = closer.token
36
+
37
+ # Length is only used for emphasis-specific "rule of 3",
38
+ # if it's not defined (in strikethrough or 3rd party plugins),
39
+ # we can default it to 0 to disable those checks.
40
+ #
41
+ closer.length = closer.length or 0
42
+
43
+ if not closer.close:
44
+ closerIdx += 1
45
+ continue
46
+
47
+ # Previously calculated lower bounds (previous fails)
48
+ # for each marker, each delimiter length modulo 3,
49
+ # and for whether this closer can be an opener;
50
+ # https://github.com/commonmark/cmark/commit/34250e12ccebdc6372b8b49c44fab57c72443460
51
+ if closer.marker not in openersBottom:
52
+ openersBottom[closer.marker] = [-1, -1, -1, -1, -1, -1]
53
+
54
+ minOpenerIdx = openersBottom[closer.marker][
55
+ (3 if closer.open else 0) + (closer.length % 3)
56
+ ]
57
+
58
+ openerIdx = headerIdx - jumps[headerIdx] - 1
59
+
60
+ newMinOpenerIdx = openerIdx
61
+
62
+ while openerIdx > minOpenerIdx:
63
+ opener = delimiters[openerIdx]
64
+
65
+ if opener.marker != closer.marker:
66
+ openerIdx -= jumps[openerIdx] + 1
67
+ continue
68
+
69
+ if opener.open and opener.end < 0:
70
+ isOddMatch = False
71
+
72
+ # from spec:
73
+ #
74
+ # If one of the delimiters can both open and close emphasis, then the
75
+ # sum of the lengths of the delimiter runs containing the opening and
76
+ # closing delimiters must not be a multiple of 3 unless both lengths
77
+ # are multiples of 3.
78
+ #
79
+ if (
80
+ (opener.close or closer.open)
81
+ and ((opener.length + closer.length) % 3 == 0)
82
+ and (opener.length % 3 != 0 or closer.length % 3 != 0)
83
+ ):
84
+ isOddMatch = True
85
+
86
+ if not isOddMatch:
87
+ # If previous delimiter cannot be an opener, we can safely skip
88
+ # the entire sequence in future checks. This is required to make
89
+ # sure algorithm has linear complexity (see *_*_*_*_*_... case).
90
+ #
91
+ if openerIdx > 0 and not delimiters[openerIdx - 1].open:
92
+ lastJump = jumps[openerIdx - 1] + 1
93
+ else:
94
+ lastJump = 0
95
+
96
+ jumps[closerIdx] = closerIdx - openerIdx + lastJump
97
+ jumps[openerIdx] = lastJump
98
+
99
+ closer.open = False
100
+ opener.end = closerIdx
101
+ opener.close = False
102
+ newMinOpenerIdx = -1
103
+
104
+ # treat next token as start of run,
105
+ # it optimizes skips in **<...>**a**<...>** pathological case
106
+ lastTokenIdx = -2
107
+
108
+ break
109
+
110
+ openerIdx -= jumps[openerIdx] + 1
111
+
112
+ if newMinOpenerIdx != -1:
113
+ # If match for this delimiter run failed, we want to set lower bound for
114
+ # future lookups. This is required to make sure algorithm has linear
115
+ # complexity.
116
+ #
117
+ # See details here:
118
+ # https:#github.com/commonmark/cmark/issues/178#issuecomment-270417442
119
+ #
120
+ openersBottom[closer.marker][
121
+ (3 if closer.open else 0) + ((closer.length or 0) % 3)
122
+ ] = newMinOpenerIdx
123
+
124
+ closerIdx += 1
125
+
126
+
127
+ def link_pairs(state: StateInline) -> None:
128
+ tokens_meta = state.tokens_meta
129
+ maximum = len(state.tokens_meta)
130
+
131
+ processDelimiters(state, state.delimiters)
132
+
133
+ curr = 0
134
+ while curr < maximum:
135
+ curr_meta = tokens_meta[curr]
136
+ if curr_meta and "delimiters" in curr_meta:
137
+ processDelimiters(state, curr_meta["delimiters"])
138
+ curr += 1
venv/lib/python3.10/site-packages/markdown_it/rules_inline/emphasis.py ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Process *this* and _that_
2
+ #
3
+ from __future__ import annotations
4
+
5
+ from .state_inline import Delimiter, StateInline
6
+
7
+
8
+ def tokenize(state: StateInline, silent: bool) -> bool:
9
+ """Insert each marker as a separate text token, and add it to delimiter list"""
10
+ start = state.pos
11
+ marker = state.src[start]
12
+
13
+ if silent:
14
+ return False
15
+
16
+ if marker not in ("_", "*"):
17
+ return False
18
+
19
+ scanned = state.scanDelims(state.pos, marker == "*")
20
+
21
+ for _ in range(scanned.length):
22
+ token = state.push("text", "", 0)
23
+ token.content = marker
24
+ state.delimiters.append(
25
+ Delimiter(
26
+ marker=ord(marker),
27
+ length=scanned.length,
28
+ token=len(state.tokens) - 1,
29
+ end=-1,
30
+ open=scanned.can_open,
31
+ close=scanned.can_close,
32
+ )
33
+ )
34
+
35
+ state.pos += scanned.length
36
+
37
+ return True
38
+
39
+
40
+ def _postProcess(state: StateInline, delimiters: list[Delimiter]) -> None:
41
+ i = len(delimiters) - 1
42
+ while i >= 0:
43
+ startDelim = delimiters[i]
44
+
45
+ # /* _ */ /* * */
46
+ if startDelim.marker != 0x5F and startDelim.marker != 0x2A:
47
+ i -= 1
48
+ continue
49
+
50
+ # Process only opening markers
51
+ if startDelim.end == -1:
52
+ i -= 1
53
+ continue
54
+
55
+ endDelim = delimiters[startDelim.end]
56
+
57
+ # If the previous delimiter has the same marker and is adjacent to this one,
58
+ # merge those into one strong delimiter.
59
+ #
60
+ # `<em><em>whatever</em></em>` -> `<strong>whatever</strong>`
61
+ #
62
+ isStrong = (
63
+ i > 0
64
+ and delimiters[i - 1].end == startDelim.end + 1
65
+ # check that first two markers match and adjacent
66
+ and delimiters[i - 1].marker == startDelim.marker
67
+ and delimiters[i - 1].token == startDelim.token - 1
68
+ # check that last two markers are adjacent (we can safely assume they match)
69
+ and delimiters[startDelim.end + 1].token == endDelim.token + 1
70
+ )
71
+
72
+ ch = chr(startDelim.marker)
73
+
74
+ token = state.tokens[startDelim.token]
75
+ token.type = "strong_open" if isStrong else "em_open"
76
+ token.tag = "strong" if isStrong else "em"
77
+ token.nesting = 1
78
+ token.markup = ch + ch if isStrong else ch
79
+ token.content = ""
80
+
81
+ token = state.tokens[endDelim.token]
82
+ token.type = "strong_close" if isStrong else "em_close"
83
+ token.tag = "strong" if isStrong else "em"
84
+ token.nesting = -1
85
+ token.markup = ch + ch if isStrong else ch
86
+ token.content = ""
87
+
88
+ if isStrong:
89
+ state.tokens[delimiters[i - 1].token].content = ""
90
+ state.tokens[delimiters[startDelim.end + 1].token].content = ""
91
+ i -= 1
92
+
93
+ i -= 1
94
+
95
+
96
+ def postProcess(state: StateInline) -> None:
97
+ """Walk through delimiter list and replace text tokens with tags."""
98
+ _postProcess(state, state.delimiters)
99
+
100
+ for token in state.tokens_meta:
101
+ if token and "delimiters" in token:
102
+ _postProcess(state, token["delimiters"])
venv/lib/python3.10/site-packages/markdown_it/rules_inline/entity.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Process html entity - &#123;, &#xAF;, &quot;, ...
2
+ import re
3
+
4
+ from ..common.entities import entities
5
+ from ..common.utils import fromCodePoint, isValidEntityCode
6
+ from .state_inline import StateInline
7
+
8
+ DIGITAL_RE = re.compile(r"^&#((?:x[a-f0-9]{1,6}|[0-9]{1,7}));", re.IGNORECASE)
9
+ NAMED_RE = re.compile(r"^&([a-z][a-z0-9]{1,31});", re.IGNORECASE)
10
+
11
+
12
+ def entity(state: StateInline, silent: bool) -> bool:
13
+ pos = state.pos
14
+ maximum = state.posMax
15
+
16
+ if state.src[pos] != "&":
17
+ return False
18
+
19
+ if pos + 1 >= maximum:
20
+ return False
21
+
22
+ if state.src[pos + 1] == "#":
23
+ if match := DIGITAL_RE.search(state.src[pos:]):
24
+ if not silent:
25
+ match1 = match.group(1)
26
+ code = (
27
+ int(match1[1:], 16) if match1[0].lower() == "x" else int(match1, 10)
28
+ )
29
+
30
+ token = state.push("text_special", "", 0)
31
+ token.content = (
32
+ fromCodePoint(code)
33
+ if isValidEntityCode(code)
34
+ else fromCodePoint(0xFFFD)
35
+ )
36
+ token.markup = match.group(0)
37
+ token.info = "entity"
38
+
39
+ state.pos += len(match.group(0))
40
+ return True
41
+
42
+ else:
43
+ if (match := NAMED_RE.search(state.src[pos:])) and match.group(1) in entities:
44
+ if not silent:
45
+ token = state.push("text_special", "", 0)
46
+ token.content = entities[match.group(1)]
47
+ token.markup = match.group(0)
48
+ token.info = "entity"
49
+
50
+ state.pos += len(match.group(0))
51
+ return True
52
+
53
+ return False
venv/lib/python3.10/site-packages/markdown_it/rules_inline/escape.py ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Process escaped chars and hardbreaks
3
+ """
4
+
5
+ from ..common.utils import isStrSpace
6
+ from .state_inline import StateInline
7
+
8
+
9
+ def escape(state: StateInline, silent: bool) -> bool:
10
+ """Process escaped chars and hardbreaks."""
11
+ pos = state.pos
12
+ maximum = state.posMax
13
+
14
+ if state.src[pos] != "\\":
15
+ return False
16
+
17
+ pos += 1
18
+
19
+ # '\' at the end of the inline block
20
+ if pos >= maximum:
21
+ return False
22
+
23
+ ch1 = state.src[pos]
24
+ ch1_ord = ord(ch1)
25
+ if ch1 == "\n":
26
+ if not silent:
27
+ state.push("hardbreak", "br", 0)
28
+ pos += 1
29
+ # skip leading whitespaces from next line
30
+ while pos < maximum:
31
+ ch = state.src[pos]
32
+ if not isStrSpace(ch):
33
+ break
34
+ pos += 1
35
+
36
+ state.pos = pos
37
+ return True
38
+
39
+ escapedStr = state.src[pos]
40
+
41
+ if ch1_ord >= 0xD800 and ch1_ord <= 0xDBFF and pos + 1 < maximum:
42
+ ch2 = state.src[pos + 1]
43
+ ch2_ord = ord(ch2)
44
+ if ch2_ord >= 0xDC00 and ch2_ord <= 0xDFFF:
45
+ escapedStr += ch2
46
+ pos += 1
47
+
48
+ origStr = "\\" + escapedStr
49
+
50
+ if not silent:
51
+ token = state.push("text_special", "", 0)
52
+ token.content = escapedStr if ch1 in _ESCAPED else origStr
53
+ token.markup = origStr
54
+ token.info = "escape"
55
+
56
+ state.pos = pos + 1
57
+ return True
58
+
59
+
60
+ _ESCAPED = {
61
+ "!",
62
+ '"',
63
+ "#",
64
+ "$",
65
+ "%",
66
+ "&",
67
+ "'",
68
+ "(",
69
+ ")",
70
+ "*",
71
+ "+",
72
+ ",",
73
+ "-",
74
+ ".",
75
+ "/",
76
+ ":",
77
+ ";",
78
+ "<",
79
+ "=",
80
+ ">",
81
+ "?",
82
+ "@",
83
+ "[",
84
+ "\\",
85
+ "]",
86
+ "^",
87
+ "_",
88
+ "`",
89
+ "{",
90
+ "|",
91
+ "}",
92
+ "~",
93
+ }
venv/lib/python3.10/site-packages/markdown_it/rules_inline/fragments_join.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .state_inline import StateInline
2
+
3
+
4
+ def fragments_join(state: StateInline) -> None:
5
+ """
6
+ Clean up tokens after emphasis and strikethrough postprocessing:
7
+ merge adjacent text nodes into one and re-calculate all token levels
8
+
9
+ This is necessary because initially emphasis delimiter markers (``*, _, ~``)
10
+ are treated as their own separate text tokens. Then emphasis rule either
11
+ leaves them as text (needed to merge with adjacent text) or turns them
12
+ into opening/closing tags (which messes up levels inside).
13
+ """
14
+ level = 0
15
+ maximum = len(state.tokens)
16
+
17
+ curr = last = 0
18
+ while curr < maximum:
19
+ # re-calculate levels after emphasis/strikethrough turns some text nodes
20
+ # into opening/closing tags
21
+ if state.tokens[curr].nesting < 0:
22
+ level -= 1 # closing tag
23
+ state.tokens[curr].level = level
24
+ if state.tokens[curr].nesting > 0:
25
+ level += 1 # opening tag
26
+
27
+ if (
28
+ state.tokens[curr].type == "text"
29
+ and curr + 1 < maximum
30
+ and state.tokens[curr + 1].type == "text"
31
+ ):
32
+ # collapse two adjacent text nodes
33
+ state.tokens[curr + 1].content = (
34
+ state.tokens[curr].content + state.tokens[curr + 1].content
35
+ )
36
+ else:
37
+ if curr != last:
38
+ state.tokens[last] = state.tokens[curr]
39
+ last += 1
40
+ curr += 1
41
+
42
+ if curr != last:
43
+ del state.tokens[last:]
venv/lib/python3.10/site-packages/markdown_it/rules_inline/html_inline.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Process html tags
2
+ from ..common.html_re import HTML_TAG_RE
3
+ from ..common.utils import isLinkClose, isLinkOpen
4
+ from .state_inline import StateInline
5
+
6
+
7
+ def isLetter(ch: int) -> bool:
8
+ lc = ch | 0x20 # to lower case
9
+ # /* a */ and /* z */
10
+ return (lc >= 0x61) and (lc <= 0x7A)
11
+
12
+
13
+ def html_inline(state: StateInline, silent: bool) -> bool:
14
+ pos = state.pos
15
+
16
+ if not state.md.options.get("html", None):
17
+ return False
18
+
19
+ # Check start
20
+ maximum = state.posMax
21
+ if state.src[pos] != "<" or pos + 2 >= maximum:
22
+ return False
23
+
24
+ # Quick fail on second char
25
+ ch = state.src[pos + 1]
26
+ if ch not in ("!", "?", "/") and not isLetter(ord(ch)): # /* / */
27
+ return False
28
+
29
+ match = HTML_TAG_RE.search(state.src[pos:])
30
+ if not match:
31
+ return False
32
+
33
+ if not silent:
34
+ token = state.push("html_inline", "", 0)
35
+ token.content = state.src[pos : pos + len(match.group(0))]
36
+
37
+ if isLinkOpen(token.content):
38
+ state.linkLevel += 1
39
+ if isLinkClose(token.content):
40
+ state.linkLevel -= 1
41
+
42
+ state.pos += len(match.group(0))
43
+ return True
venv/lib/python3.10/site-packages/markdown_it/rules_inline/image.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Process ![image](<src> "title")
2
+ from __future__ import annotations
3
+
4
+ from ..common.utils import isStrSpace, normalizeReference
5
+ from ..token import Token
6
+ from .state_inline import StateInline
7
+
8
+
9
+ def image(state: StateInline, silent: bool) -> bool:
10
+ label = None
11
+ href = ""
12
+ oldPos = state.pos
13
+ max = state.posMax
14
+
15
+ if state.src[state.pos] != "!":
16
+ return False
17
+
18
+ if state.pos + 1 < state.posMax and state.src[state.pos + 1] != "[":
19
+ return False
20
+
21
+ labelStart = state.pos + 2
22
+ labelEnd = state.md.helpers.parseLinkLabel(state, state.pos + 1, False)
23
+
24
+ # parser failed to find ']', so it's not a valid link
25
+ if labelEnd < 0:
26
+ return False
27
+
28
+ pos = labelEnd + 1
29
+
30
+ if pos < max and state.src[pos] == "(":
31
+ #
32
+ # Inline link
33
+ #
34
+
35
+ # [link]( <href> "title" )
36
+ # ^^ skipping these spaces
37
+ pos += 1
38
+ while pos < max:
39
+ ch = state.src[pos]
40
+ if not isStrSpace(ch) and ch != "\n":
41
+ break
42
+ pos += 1
43
+
44
+ if pos >= max:
45
+ return False
46
+
47
+ # [link]( <href> "title" )
48
+ # ^^^^^^ parsing link destination
49
+ start = pos
50
+ res = state.md.helpers.parseLinkDestination(state.src, pos, state.posMax)
51
+ if res.ok:
52
+ href = state.md.normalizeLink(res.str)
53
+ if state.md.validateLink(href):
54
+ pos = res.pos
55
+ else:
56
+ href = ""
57
+
58
+ # [link]( <href> "title" )
59
+ # ^^ skipping these spaces
60
+ start = pos
61
+ while pos < max:
62
+ ch = state.src[pos]
63
+ if not isStrSpace(ch) and ch != "\n":
64
+ break
65
+ pos += 1
66
+
67
+ # [link]( <href> "title" )
68
+ # ^^^^^^^ parsing link title
69
+ res = state.md.helpers.parseLinkTitle(state.src, pos, state.posMax, None)
70
+ if pos < max and start != pos and res.ok:
71
+ title = res.str
72
+ pos = res.pos
73
+
74
+ # [link]( <href> "title" )
75
+ # ^^ skipping these spaces
76
+ while pos < max:
77
+ ch = state.src[pos]
78
+ if not isStrSpace(ch) and ch != "\n":
79
+ break
80
+ pos += 1
81
+ else:
82
+ title = ""
83
+
84
+ if pos >= max or state.src[pos] != ")":
85
+ state.pos = oldPos
86
+ return False
87
+
88
+ pos += 1
89
+
90
+ else:
91
+ #
92
+ # Link reference
93
+ #
94
+ if "references" not in state.env:
95
+ return False
96
+
97
+ # /* [ */
98
+ if pos < max and state.src[pos] == "[":
99
+ start = pos + 1
100
+ pos = state.md.helpers.parseLinkLabel(state, pos)
101
+ if pos >= 0:
102
+ label = state.src[start:pos]
103
+ pos += 1
104
+ else:
105
+ pos = labelEnd + 1
106
+ else:
107
+ pos = labelEnd + 1
108
+
109
+ # covers label == '' and label == undefined
110
+ # (collapsed reference link and shortcut reference link respectively)
111
+ if not label:
112
+ label = state.src[labelStart:labelEnd]
113
+
114
+ label = normalizeReference(label)
115
+
116
+ ref = state.env["references"].get(label, None)
117
+ if not ref:
118
+ state.pos = oldPos
119
+ return False
120
+
121
+ href = ref["href"]
122
+ title = ref["title"]
123
+
124
+ #
125
+ # We found the end of the link, and know for a fact it's a valid link
126
+ # so all that's left to do is to call tokenizer.
127
+ #
128
+ if not silent:
129
+ content = state.src[labelStart:labelEnd]
130
+
131
+ tokens: list[Token] = []
132
+ state.md.inline.parse(content, state.md, state.env, tokens)
133
+
134
+ token = state.push("image", "img", 0)
135
+ token.attrs = {"src": href, "alt": ""}
136
+ token.children = tokens or None
137
+ token.content = content
138
+
139
+ if title:
140
+ token.attrSet("title", title)
141
+
142
+ # note, this is not part of markdown-it JS, but is useful for renderers
143
+ if label and state.md.options.get("store_labels", False):
144
+ token.meta["label"] = label
145
+
146
+ state.pos = pos
147
+ state.posMax = max
148
+ return True
venv/lib/python3.10/site-packages/markdown_it/rules_inline/link.py ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Process [link](<to> "stuff")
2
+
3
+ from ..common.utils import isStrSpace, normalizeReference
4
+ from .state_inline import StateInline
5
+
6
+
7
+ def link(state: StateInline, silent: bool) -> bool:
8
+ href = ""
9
+ title = ""
10
+ label = None
11
+ oldPos = state.pos
12
+ maximum = state.posMax
13
+ start = state.pos
14
+ parseReference = True
15
+
16
+ if state.src[state.pos] != "[":
17
+ return False
18
+
19
+ labelStart = state.pos + 1
20
+ labelEnd = state.md.helpers.parseLinkLabel(state, state.pos, True)
21
+
22
+ # parser failed to find ']', so it's not a valid link
23
+ if labelEnd < 0:
24
+ return False
25
+
26
+ pos = labelEnd + 1
27
+
28
+ if pos < maximum and state.src[pos] == "(":
29
+ #
30
+ # Inline link
31
+ #
32
+
33
+ # might have found a valid shortcut link, disable reference parsing
34
+ parseReference = False
35
+
36
+ # [link]( <href> "title" )
37
+ # ^^ skipping these spaces
38
+ pos += 1
39
+ while pos < maximum:
40
+ ch = state.src[pos]
41
+ if not isStrSpace(ch) and ch != "\n":
42
+ break
43
+ pos += 1
44
+
45
+ if pos >= maximum:
46
+ return False
47
+
48
+ # [link]( <href> "title" )
49
+ # ^^^^^^ parsing link destination
50
+ start = pos
51
+ res = state.md.helpers.parseLinkDestination(state.src, pos, state.posMax)
52
+ if res.ok:
53
+ href = state.md.normalizeLink(res.str)
54
+ if state.md.validateLink(href):
55
+ pos = res.pos
56
+ else:
57
+ href = ""
58
+
59
+ # [link]( <href> "title" )
60
+ # ^^ skipping these spaces
61
+ start = pos
62
+ while pos < maximum:
63
+ ch = state.src[pos]
64
+ if not isStrSpace(ch) and ch != "\n":
65
+ break
66
+ pos += 1
67
+
68
+ # [link]( <href> "title" )
69
+ # ^^^^^^^ parsing link title
70
+ res = state.md.helpers.parseLinkTitle(state.src, pos, state.posMax)
71
+ if pos < maximum and start != pos and res.ok:
72
+ title = res.str
73
+ pos = res.pos
74
+
75
+ # [link]( <href> "title" )
76
+ # ^^ skipping these spaces
77
+ while pos < maximum:
78
+ ch = state.src[pos]
79
+ if not isStrSpace(ch) and ch != "\n":
80
+ break
81
+ pos += 1
82
+
83
+ if pos >= maximum or state.src[pos] != ")":
84
+ # parsing a valid shortcut link failed, fallback to reference
85
+ parseReference = True
86
+
87
+ pos += 1
88
+
89
+ if parseReference:
90
+ #
91
+ # Link reference
92
+ #
93
+ if "references" not in state.env:
94
+ return False
95
+
96
+ if pos < maximum and state.src[pos] == "[":
97
+ start = pos + 1
98
+ pos = state.md.helpers.parseLinkLabel(state, pos)
99
+ if pos >= 0:
100
+ label = state.src[start:pos]
101
+ pos += 1
102
+ else:
103
+ pos = labelEnd + 1
104
+
105
+ else:
106
+ pos = labelEnd + 1
107
+
108
+ # covers label == '' and label == undefined
109
+ # (collapsed reference link and shortcut reference link respectively)
110
+ if not label:
111
+ label = state.src[labelStart:labelEnd]
112
+
113
+ label = normalizeReference(label)
114
+
115
+ ref = state.env["references"].get(label, None)
116
+ if not ref:
117
+ state.pos = oldPos
118
+ return False
119
+
120
+ href = ref["href"]
121
+ title = ref["title"]
122
+
123
+ #
124
+ # We found the end of the link, and know for a fact it's a valid link
125
+ # so all that's left to do is to call tokenizer.
126
+ #
127
+ if not silent:
128
+ state.pos = labelStart
129
+ state.posMax = labelEnd
130
+
131
+ token = state.push("link_open", "a", 1)
132
+ token.attrs = {"href": href}
133
+
134
+ if title:
135
+ token.attrSet("title", title)
136
+
137
+ # note, this is not part of markdown-it JS, but is useful for renderers
138
+ if label and state.md.options.get("store_labels", False):
139
+ token.meta["label"] = label
140
+
141
+ state.linkLevel += 1
142
+ state.md.inline.tokenize(state)
143
+ state.linkLevel -= 1
144
+
145
+ token = state.push("link_close", "a", -1)
146
+
147
+ state.pos = pos
148
+ state.posMax = maximum
149
+ return True
venv/lib/python3.10/site-packages/markdown_it/rules_inline/linkify.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Process links like https://example.org/"""
2
+
3
+ import re
4
+
5
+ from .state_inline import StateInline
6
+
7
+ # RFC3986: scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
8
+ SCHEME_RE = re.compile(r"(?:^|[^a-z0-9.+-])([a-z][a-z0-9.+-]*)$", re.IGNORECASE)
9
+
10
+
11
+ def linkify(state: StateInline, silent: bool) -> bool:
12
+ """Rule for identifying plain-text links."""
13
+ if not state.md.options.linkify:
14
+ return False
15
+ if state.linkLevel > 0:
16
+ return False
17
+ if not state.md.linkify:
18
+ raise ModuleNotFoundError("Linkify enabled but not installed.")
19
+
20
+ pos = state.pos
21
+ maximum = state.posMax
22
+
23
+ if (
24
+ (pos + 3) > maximum
25
+ or state.src[pos] != ":"
26
+ or state.src[pos + 1] != "/"
27
+ or state.src[pos + 2] != "/"
28
+ ):
29
+ return False
30
+
31
+ if not (match := SCHEME_RE.search(state.pending)):
32
+ return False
33
+
34
+ proto = match.group(1)
35
+ if not (link := state.md.linkify.match_at_start(state.src[pos - len(proto) :])):
36
+ return False
37
+ url: str = link.url
38
+
39
+ # disallow '*' at the end of the link (conflicts with emphasis)
40
+ url = url.rstrip("*")
41
+
42
+ full_url = state.md.normalizeLink(url)
43
+ if not state.md.validateLink(full_url):
44
+ return False
45
+
46
+ if not silent:
47
+ state.pending = state.pending[: -len(proto)]
48
+
49
+ token = state.push("link_open", "a", 1)
50
+ token.attrs = {"href": full_url}
51
+ token.markup = "linkify"
52
+ token.info = "auto"
53
+
54
+ token = state.push("text", "", 0)
55
+ token.content = state.md.normalizeLinkText(url)
56
+
57
+ token = state.push("link_close", "a", -1)
58
+ token.markup = "linkify"
59
+ token.info = "auto"
60
+
61
+ state.pos += len(url) - len(proto)
62
+ return True
venv/lib/python3.10/site-packages/markdown_it/rules_inline/newline.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Proceess '\n'."""
2
+
3
+ from ..common.utils import charStrAt, isStrSpace
4
+ from .state_inline import StateInline
5
+
6
+
7
+ def newline(state: StateInline, silent: bool) -> bool:
8
+ pos = state.pos
9
+
10
+ if state.src[pos] != "\n":
11
+ return False
12
+
13
+ pmax = len(state.pending) - 1
14
+ maximum = state.posMax
15
+
16
+ # ' \n' -> hardbreak
17
+ # Lookup in pending chars is bad practice! Don't copy to other rules!
18
+ # Pending string is stored in concat mode, indexed lookups will cause
19
+ # conversion to flat mode.
20
+ if not silent:
21
+ if pmax >= 0 and charStrAt(state.pending, pmax) == " ":
22
+ if pmax >= 1 and charStrAt(state.pending, pmax - 1) == " ":
23
+ # Find whitespaces tail of pending chars.
24
+ ws = pmax - 1
25
+ while ws >= 1 and charStrAt(state.pending, ws - 1) == " ":
26
+ ws -= 1
27
+ state.pending = state.pending[:ws]
28
+
29
+ state.push("hardbreak", "br", 0)
30
+ else:
31
+ state.pending = state.pending[:-1]
32
+ state.push("softbreak", "br", 0)
33
+
34
+ else:
35
+ state.push("softbreak", "br", 0)
36
+
37
+ pos += 1
38
+
39
+ # skip heading spaces for next line
40
+ while pos < maximum and isStrSpace(state.src[pos]):
41
+ pos += 1
42
+
43
+ state.pos = pos
44
+ return True
venv/lib/python3.10/site-packages/markdown_it/rules_inline/state_inline.py ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from collections import namedtuple
4
+ from dataclasses import dataclass
5
+ from typing import TYPE_CHECKING, Any, Literal
6
+
7
+ from ..common.utils import isMdAsciiPunct, isPunctChar, isWhiteSpace
8
+ from ..ruler import StateBase
9
+ from ..token import Token
10
+ from ..utils import EnvType
11
+
12
+ if TYPE_CHECKING:
13
+ from markdown_it import MarkdownIt
14
+
15
+
16
+ @dataclass(slots=True)
17
+ class Delimiter:
18
+ # Char code of the starting marker (number).
19
+ marker: int
20
+
21
+ # Total length of these series of delimiters.
22
+ length: int
23
+
24
+ # A position of the token this delimiter corresponds to.
25
+ token: int
26
+
27
+ # If this delimiter is matched as a valid opener, `end` will be
28
+ # equal to its position, otherwise it's `-1`.
29
+ end: int
30
+
31
+ # Boolean flags that determine if this delimiter could open or close
32
+ # an emphasis.
33
+ open: bool
34
+ close: bool
35
+
36
+ level: bool | None = None
37
+
38
+
39
+ Scanned = namedtuple("Scanned", ["can_open", "can_close", "length"])
40
+
41
+
42
+ class StateInline(StateBase):
43
+ def __init__(
44
+ self, src: str, md: MarkdownIt, env: EnvType, outTokens: list[Token]
45
+ ) -> None:
46
+ self.src = src
47
+ self.env = env
48
+ self.md = md
49
+ self.tokens = outTokens
50
+ self.tokens_meta: list[dict[str, Any] | None] = [None] * len(outTokens)
51
+
52
+ self.pos = 0
53
+ self.posMax = len(self.src)
54
+ self.level = 0
55
+ self.pending = ""
56
+ self.pendingLevel = 0
57
+
58
+ # Stores { start: end } pairs. Useful for backtrack
59
+ # optimization of pairs parse (emphasis, strikes).
60
+ self.cache: dict[int, int] = {}
61
+
62
+ # List of emphasis-like delimiters for current tag
63
+ self.delimiters: list[Delimiter] = []
64
+
65
+ # Stack of delimiter lists for upper level tags
66
+ self._prev_delimiters: list[list[Delimiter]] = []
67
+
68
+ # backticklength => last seen position
69
+ self.backticks: dict[int, int] = {}
70
+ self.backticksScanned = False
71
+
72
+ # Counter used to disable inline linkify-it execution
73
+ # inside <a> and markdown links
74
+ self.linkLevel = 0
75
+
76
+ def __repr__(self) -> str:
77
+ return (
78
+ f"{self.__class__.__name__}"
79
+ f"(pos=[{self.pos} of {self.posMax}], token={len(self.tokens)})"
80
+ )
81
+
82
+ def pushPending(self) -> Token:
83
+ token = Token("text", "", 0)
84
+ token.content = self.pending
85
+ token.level = self.pendingLevel
86
+ self.tokens.append(token)
87
+ self.pending = ""
88
+ return token
89
+
90
+ def push(self, ttype: str, tag: str, nesting: Literal[-1, 0, 1]) -> Token:
91
+ """Push new token to "stream".
92
+ If pending text exists - flush it as text token
93
+ """
94
+ if self.pending:
95
+ self.pushPending()
96
+
97
+ token = Token(ttype, tag, nesting)
98
+ token_meta = None
99
+
100
+ if nesting < 0:
101
+ # closing tag
102
+ self.level -= 1
103
+ self.delimiters = self._prev_delimiters.pop()
104
+
105
+ token.level = self.level
106
+
107
+ if nesting > 0:
108
+ # opening tag
109
+ self.level += 1
110
+ self._prev_delimiters.append(self.delimiters)
111
+ self.delimiters = []
112
+ token_meta = {"delimiters": self.delimiters}
113
+
114
+ self.pendingLevel = self.level
115
+ self.tokens.append(token)
116
+ self.tokens_meta.append(token_meta)
117
+ return token
118
+
119
+ def scanDelims(self, start: int, canSplitWord: bool) -> Scanned:
120
+ """
121
+ Scan a sequence of emphasis-like markers, and determine whether
122
+ it can start an emphasis sequence or end an emphasis sequence.
123
+
124
+ - start - position to scan from (it should point at a valid marker);
125
+ - canSplitWord - determine if these markers can be found inside a word
126
+
127
+ """
128
+ pos = start
129
+ maximum = self.posMax
130
+ marker = self.src[start]
131
+
132
+ # treat beginning of the line as a whitespace
133
+ lastChar = self.src[start - 1] if start > 0 else " "
134
+
135
+ while pos < maximum and self.src[pos] == marker:
136
+ pos += 1
137
+
138
+ count = pos - start
139
+
140
+ # treat end of the line as a whitespace
141
+ nextChar = self.src[pos] if pos < maximum else " "
142
+
143
+ isLastPunctChar = isMdAsciiPunct(ord(lastChar)) or isPunctChar(lastChar)
144
+ isNextPunctChar = isMdAsciiPunct(ord(nextChar)) or isPunctChar(nextChar)
145
+
146
+ isLastWhiteSpace = isWhiteSpace(ord(lastChar))
147
+ isNextWhiteSpace = isWhiteSpace(ord(nextChar))
148
+
149
+ left_flanking = not (
150
+ isNextWhiteSpace
151
+ or (isNextPunctChar and not (isLastWhiteSpace or isLastPunctChar))
152
+ )
153
+ right_flanking = not (
154
+ isLastWhiteSpace
155
+ or (isLastPunctChar and not (isNextWhiteSpace or isNextPunctChar))
156
+ )
157
+
158
+ can_open = left_flanking and (
159
+ canSplitWord or (not right_flanking) or isLastPunctChar
160
+ )
161
+ can_close = right_flanking and (
162
+ canSplitWord or (not left_flanking) or isNextPunctChar
163
+ )
164
+
165
+ return Scanned(can_open, can_close, count)
venv/lib/python3.10/site-packages/markdown_it/rules_inline/strikethrough.py ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ~~strike through~~
2
+ from __future__ import annotations
3
+
4
+ from .state_inline import Delimiter, StateInline
5
+
6
+
7
+ def tokenize(state: StateInline, silent: bool) -> bool:
8
+ """Insert each marker as a separate text token, and add it to delimiter list"""
9
+ start = state.pos
10
+ ch = state.src[start]
11
+
12
+ if silent:
13
+ return False
14
+
15
+ if ch != "~":
16
+ return False
17
+
18
+ scanned = state.scanDelims(state.pos, True)
19
+ length = scanned.length
20
+
21
+ if length < 2:
22
+ return False
23
+
24
+ if length % 2:
25
+ token = state.push("text", "", 0)
26
+ token.content = ch
27
+ length -= 1
28
+
29
+ i = 0
30
+ while i < length:
31
+ token = state.push("text", "", 0)
32
+ token.content = ch + ch
33
+ state.delimiters.append(
34
+ Delimiter(
35
+ marker=ord(ch),
36
+ length=0, # disable "rule of 3" length checks meant for emphasis
37
+ token=len(state.tokens) - 1,
38
+ end=-1,
39
+ open=scanned.can_open,
40
+ close=scanned.can_close,
41
+ )
42
+ )
43
+
44
+ i += 2
45
+
46
+ state.pos += scanned.length
47
+
48
+ return True
49
+
50
+
51
+ def _postProcess(state: StateInline, delimiters: list[Delimiter]) -> None:
52
+ loneMarkers = []
53
+ maximum = len(delimiters)
54
+
55
+ i = 0
56
+ while i < maximum:
57
+ startDelim = delimiters[i]
58
+
59
+ if startDelim.marker != 0x7E: # /* ~ */
60
+ i += 1
61
+ continue
62
+
63
+ if startDelim.end == -1:
64
+ i += 1
65
+ continue
66
+
67
+ endDelim = delimiters[startDelim.end]
68
+
69
+ token = state.tokens[startDelim.token]
70
+ token.type = "s_open"
71
+ token.tag = "s"
72
+ token.nesting = 1
73
+ token.markup = "~~"
74
+ token.content = ""
75
+
76
+ token = state.tokens[endDelim.token]
77
+ token.type = "s_close"
78
+ token.tag = "s"
79
+ token.nesting = -1
80
+ token.markup = "~~"
81
+ token.content = ""
82
+
83
+ if (
84
+ state.tokens[endDelim.token - 1].type == "text"
85
+ and state.tokens[endDelim.token - 1].content == "~"
86
+ ):
87
+ loneMarkers.append(endDelim.token - 1)
88
+
89
+ i += 1
90
+
91
+ # If a marker sequence has an odd number of characters, it's split
92
+ # like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the
93
+ # start of the sequence.
94
+ #
95
+ # So, we have to move all those markers after subsequent s_close tags.
96
+ #
97
+ while loneMarkers:
98
+ i = loneMarkers.pop()
99
+ j = i + 1
100
+
101
+ while (j < len(state.tokens)) and (state.tokens[j].type == "s_close"):
102
+ j += 1
103
+
104
+ j -= 1
105
+
106
+ if i != j:
107
+ token = state.tokens[j]
108
+ state.tokens[j] = state.tokens[i]
109
+ state.tokens[i] = token
110
+
111
+
112
+ def postProcess(state: StateInline) -> None:
113
+ """Walk through delimiter list and replace text tokens with tags."""
114
+ tokens_meta = state.tokens_meta
115
+ maximum = len(state.tokens_meta)
116
+ _postProcess(state, state.delimiters)
117
+
118
+ curr = 0
119
+ while curr < maximum:
120
+ try:
121
+ curr_meta = tokens_meta[curr]
122
+ except IndexError:
123
+ pass
124
+ else:
125
+ if curr_meta and "delimiters" in curr_meta:
126
+ _postProcess(state, curr_meta["delimiters"])
127
+ curr += 1
venv/lib/python3.10/site-packages/markdown_it/rules_inline/text.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import re
3
+
4
+ # Skip text characters for text token, place those to pending buffer
5
+ # and increment current pos
6
+ from .state_inline import StateInline
7
+
8
+ # Rule to skip pure text
9
+ # '{}$%@~+=:' reserved for extensions
10
+
11
+ # !!!! Don't confuse with "Markdown ASCII Punctuation" chars
12
+ # http://spec.commonmark.org/0.15/#ascii-punctuation-character
13
+
14
+
15
+ _TerminatorChars = {
16
+ "\n",
17
+ "!",
18
+ "#",
19
+ "$",
20
+ "%",
21
+ "&",
22
+ "*",
23
+ "+",
24
+ "-",
25
+ ":",
26
+ "<",
27
+ "=",
28
+ ">",
29
+ "@",
30
+ "[",
31
+ "\\",
32
+ "]",
33
+ "^",
34
+ "_",
35
+ "`",
36
+ "{",
37
+ "}",
38
+ "~",
39
+ }
40
+
41
+
42
+ @functools.cache
43
+ def _terminator_char_regex() -> re.Pattern[str]:
44
+ return re.compile("[" + re.escape("".join(_TerminatorChars)) + "]")
45
+
46
+
47
+ def text(state: StateInline, silent: bool) -> bool:
48
+ pos = state.pos
49
+ posMax = state.posMax
50
+
51
+ terminator_char = _terminator_char_regex().search(state.src, pos)
52
+ pos = terminator_char.start() if terminator_char else posMax
53
+
54
+ if pos == state.pos:
55
+ return False
56
+
57
+ if not silent:
58
+ state.pending += state.src[state.pos : pos]
59
+
60
+ state.pos = pos
61
+
62
+ return True
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (371 Bytes). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc ADDED
Binary file (5.4 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc ADDED
Binary file (6.35 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc ADDED
Binary file (22.6 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc ADDED
Binary file (1.41 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc ADDED
Binary file (1.47 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc ADDED
Binary file (2.26 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc ADDED
Binary file (10 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc ADDED
Binary file (9.33 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc ADDED
Binary file (13.6 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc ADDED
Binary file (5.05 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc ADDED
Binary file (450 Bytes). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/req_command.py ADDED
@@ -0,0 +1,506 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Contains the Command base classes that depend on PipSession.
2
+
3
+ The classes in this module are in a separate module so the commands not
4
+ needing download / PackageFinder capability don't unnecessarily import the
5
+ PackageFinder machinery and all its vendored dependencies, etc.
6
+ """
7
+
8
+ import logging
9
+ import os
10
+ import sys
11
+ from functools import partial
12
+ from optparse import Values
13
+ from typing import Any, List, Optional, Tuple
14
+
15
+ from pip._internal.cache import WheelCache
16
+ from pip._internal.cli import cmdoptions
17
+ from pip._internal.cli.base_command import Command
18
+ from pip._internal.cli.command_context import CommandContextMixIn
19
+ from pip._internal.exceptions import CommandError, PreviousBuildDirError
20
+ from pip._internal.index.collector import LinkCollector
21
+ from pip._internal.index.package_finder import PackageFinder
22
+ from pip._internal.models.selection_prefs import SelectionPreferences
23
+ from pip._internal.models.target_python import TargetPython
24
+ from pip._internal.network.session import PipSession
25
+ from pip._internal.operations.prepare import RequirementPreparer
26
+ from pip._internal.req.constructors import (
27
+ install_req_from_editable,
28
+ install_req_from_line,
29
+ install_req_from_parsed_requirement,
30
+ install_req_from_req_string,
31
+ )
32
+ from pip._internal.req.req_file import parse_requirements
33
+ from pip._internal.req.req_install import InstallRequirement
34
+ from pip._internal.req.req_tracker import RequirementTracker
35
+ from pip._internal.resolution.base import BaseResolver
36
+ from pip._internal.self_outdated_check import pip_self_version_check
37
+ from pip._internal.utils.deprecation import deprecated
38
+ from pip._internal.utils.temp_dir import (
39
+ TempDirectory,
40
+ TempDirectoryTypeRegistry,
41
+ tempdir_kinds,
42
+ )
43
+ from pip._internal.utils.virtualenv import running_under_virtualenv
44
+
45
+ logger = logging.getLogger(__name__)
46
+
47
+
48
+ class SessionCommandMixin(CommandContextMixIn):
49
+
50
+ """
51
+ A class mixin for command classes needing _build_session().
52
+ """
53
+
54
+ def __init__(self) -> None:
55
+ super().__init__()
56
+ self._session: Optional[PipSession] = None
57
+
58
+ @classmethod
59
+ def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
60
+ """Return a list of index urls from user-provided options."""
61
+ index_urls = []
62
+ if not getattr(options, "no_index", False):
63
+ url = getattr(options, "index_url", None)
64
+ if url:
65
+ index_urls.append(url)
66
+ urls = getattr(options, "extra_index_urls", None)
67
+ if urls:
68
+ index_urls.extend(urls)
69
+ # Return None rather than an empty list
70
+ return index_urls or None
71
+
72
+ def get_default_session(self, options: Values) -> PipSession:
73
+ """Get a default-managed session."""
74
+ if self._session is None:
75
+ self._session = self.enter_context(self._build_session(options))
76
+ # there's no type annotation on requests.Session, so it's
77
+ # automatically ContextManager[Any] and self._session becomes Any,
78
+ # then https://github.com/python/mypy/issues/7696 kicks in
79
+ assert self._session is not None
80
+ return self._session
81
+
82
+ def _build_session(
83
+ self,
84
+ options: Values,
85
+ retries: Optional[int] = None,
86
+ timeout: Optional[int] = None,
87
+ ) -> PipSession:
88
+ assert not options.cache_dir or os.path.isabs(options.cache_dir)
89
+ session = PipSession(
90
+ cache=(
91
+ os.path.join(options.cache_dir, "http") if options.cache_dir else None
92
+ ),
93
+ retries=retries if retries is not None else options.retries,
94
+ trusted_hosts=options.trusted_hosts,
95
+ index_urls=self._get_index_urls(options),
96
+ )
97
+
98
+ # Handle custom ca-bundles from the user
99
+ if options.cert:
100
+ session.verify = options.cert
101
+
102
+ # Handle SSL client certificate
103
+ if options.client_cert:
104
+ session.cert = options.client_cert
105
+
106
+ # Handle timeouts
107
+ if options.timeout or timeout:
108
+ session.timeout = timeout if timeout is not None else options.timeout
109
+
110
+ # Handle configured proxies
111
+ if options.proxy:
112
+ session.proxies = {
113
+ "http": options.proxy,
114
+ "https": options.proxy,
115
+ }
116
+
117
+ # Determine if we can prompt the user for authentication or not
118
+ session.auth.prompting = not options.no_input
119
+
120
+ return session
121
+
122
+
123
+ class IndexGroupCommand(Command, SessionCommandMixin):
124
+
125
+ """
126
+ Abstract base class for commands with the index_group options.
127
+
128
+ This also corresponds to the commands that permit the pip version check.
129
+ """
130
+
131
+ def handle_pip_version_check(self, options: Values) -> None:
132
+ """
133
+ Do the pip version check if not disabled.
134
+
135
+ This overrides the default behavior of not doing the check.
136
+ """
137
+ # Make sure the index_group options are present.
138
+ assert hasattr(options, "no_index")
139
+
140
+ if options.disable_pip_version_check or options.no_index:
141
+ return
142
+
143
+ # Otherwise, check if we're using the latest version of pip available.
144
+ session = self._build_session(
145
+ options, retries=0, timeout=min(5, options.timeout)
146
+ )
147
+ with session:
148
+ pip_self_version_check(session, options)
149
+
150
+
151
+ KEEPABLE_TEMPDIR_TYPES = [
152
+ tempdir_kinds.BUILD_ENV,
153
+ tempdir_kinds.EPHEM_WHEEL_CACHE,
154
+ tempdir_kinds.REQ_BUILD,
155
+ ]
156
+
157
+
158
+ def warn_if_run_as_root() -> None:
159
+ """Output a warning for sudo users on Unix.
160
+
161
+ In a virtual environment, sudo pip still writes to virtualenv.
162
+ On Windows, users may run pip as Administrator without issues.
163
+ This warning only applies to Unix root users outside of virtualenv.
164
+ """
165
+ if running_under_virtualenv():
166
+ return
167
+ if not hasattr(os, "getuid"):
168
+ return
169
+ # On Windows, there are no "system managed" Python packages. Installing as
170
+ # Administrator via pip is the correct way of updating system environments.
171
+ #
172
+ # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
173
+ # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
174
+ if sys.platform == "win32" or sys.platform == "cygwin":
175
+ return
176
+
177
+ if os.getuid() != 0:
178
+ return
179
+
180
+ logger.warning(
181
+ "Running pip as the 'root' user can result in broken permissions and "
182
+ "conflicting behaviour with the system package manager. "
183
+ "It is recommended to use a virtual environment instead: "
184
+ "https://pip.pypa.io/warnings/venv"
185
+ )
186
+
187
+
188
+ def with_cleanup(func: Any) -> Any:
189
+ """Decorator for common logic related to managing temporary
190
+ directories.
191
+ """
192
+
193
+ def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
194
+ for t in KEEPABLE_TEMPDIR_TYPES:
195
+ registry.set_delete(t, False)
196
+
197
+ def wrapper(
198
+ self: RequirementCommand, options: Values, args: List[Any]
199
+ ) -> Optional[int]:
200
+ assert self.tempdir_registry is not None
201
+ if options.no_clean:
202
+ configure_tempdir_registry(self.tempdir_registry)
203
+
204
+ try:
205
+ return func(self, options, args)
206
+ except PreviousBuildDirError:
207
+ # This kind of conflict can occur when the user passes an explicit
208
+ # build directory with a pre-existing folder. In that case we do
209
+ # not want to accidentally remove it.
210
+ configure_tempdir_registry(self.tempdir_registry)
211
+ raise
212
+
213
+ return wrapper
214
+
215
+
216
+ class RequirementCommand(IndexGroupCommand):
217
+ def __init__(self, *args: Any, **kw: Any) -> None:
218
+ super().__init__(*args, **kw)
219
+
220
+ self.cmd_opts.add_option(cmdoptions.no_clean())
221
+
222
+ @staticmethod
223
+ def determine_resolver_variant(options: Values) -> str:
224
+ """Determines which resolver should be used, based on the given options."""
225
+ if "legacy-resolver" in options.deprecated_features_enabled:
226
+ return "legacy"
227
+
228
+ return "2020-resolver"
229
+
230
+ @staticmethod
231
+ def determine_build_failure_suppression(options: Values) -> bool:
232
+ """Determines whether build failures should be suppressed and backtracked on."""
233
+ if "backtrack-on-build-failures" not in options.deprecated_features_enabled:
234
+ return False
235
+
236
+ if "legacy-resolver" in options.deprecated_features_enabled:
237
+ raise CommandError("Cannot backtrack with legacy resolver.")
238
+
239
+ deprecated(
240
+ reason=(
241
+ "Backtracking on build failures can mask issues related to how "
242
+ "a package generates metadata or builds a wheel. This flag will "
243
+ "be removed in pip 22.2."
244
+ ),
245
+ gone_in=None,
246
+ replacement=(
247
+ "avoiding known-bad versions by explicitly telling pip to ignore them "
248
+ "(either directly as requirements, or via a constraints file)"
249
+ ),
250
+ feature_flag=None,
251
+ issue=10655,
252
+ )
253
+ return True
254
+
255
+ @classmethod
256
+ def make_requirement_preparer(
257
+ cls,
258
+ temp_build_dir: TempDirectory,
259
+ options: Values,
260
+ req_tracker: RequirementTracker,
261
+ session: PipSession,
262
+ finder: PackageFinder,
263
+ use_user_site: bool,
264
+ download_dir: Optional[str] = None,
265
+ verbosity: int = 0,
266
+ ) -> RequirementPreparer:
267
+ """
268
+ Create a RequirementPreparer instance for the given parameters.
269
+ """
270
+ temp_build_dir_path = temp_build_dir.path
271
+ assert temp_build_dir_path is not None
272
+
273
+ resolver_variant = cls.determine_resolver_variant(options)
274
+ if resolver_variant == "2020-resolver":
275
+ lazy_wheel = "fast-deps" in options.features_enabled
276
+ if lazy_wheel:
277
+ logger.warning(
278
+ "pip is using lazily downloaded wheels using HTTP "
279
+ "range requests to obtain dependency information. "
280
+ "This experimental feature is enabled through "
281
+ "--use-feature=fast-deps and it is not ready for "
282
+ "production."
283
+ )
284
+ else:
285
+ lazy_wheel = False
286
+ if "fast-deps" in options.features_enabled:
287
+ logger.warning(
288
+ "fast-deps has no effect when used with the legacy resolver."
289
+ )
290
+
291
+ in_tree_build = "out-of-tree-build" not in options.deprecated_features_enabled
292
+ if "in-tree-build" in options.features_enabled:
293
+ deprecated(
294
+ reason="In-tree builds are now the default.",
295
+ replacement="to remove the --use-feature=in-tree-build flag",
296
+ gone_in="22.1",
297
+ )
298
+ if "out-of-tree-build" in options.deprecated_features_enabled:
299
+ deprecated(
300
+ reason="Out-of-tree builds are deprecated.",
301
+ replacement=None,
302
+ gone_in="22.1",
303
+ )
304
+
305
+ if options.progress_bar not in {"on", "off"}:
306
+ deprecated(
307
+ reason="Custom progress bar styles are deprecated",
308
+ replacement="to use the default progress bar style.",
309
+ gone_in="22.1",
310
+ )
311
+
312
+ return RequirementPreparer(
313
+ build_dir=temp_build_dir_path,
314
+ src_dir=options.src_dir,
315
+ download_dir=download_dir,
316
+ build_isolation=options.build_isolation,
317
+ req_tracker=req_tracker,
318
+ session=session,
319
+ progress_bar=options.progress_bar,
320
+ finder=finder,
321
+ require_hashes=options.require_hashes,
322
+ use_user_site=use_user_site,
323
+ lazy_wheel=lazy_wheel,
324
+ verbosity=verbosity,
325
+ in_tree_build=in_tree_build,
326
+ )
327
+
328
+ @classmethod
329
+ def make_resolver(
330
+ cls,
331
+ preparer: RequirementPreparer,
332
+ finder: PackageFinder,
333
+ options: Values,
334
+ wheel_cache: Optional[WheelCache] = None,
335
+ use_user_site: bool = False,
336
+ ignore_installed: bool = True,
337
+ ignore_requires_python: bool = False,
338
+ force_reinstall: bool = False,
339
+ upgrade_strategy: str = "to-satisfy-only",
340
+ use_pep517: Optional[bool] = None,
341
+ py_version_info: Optional[Tuple[int, ...]] = None,
342
+ ) -> BaseResolver:
343
+ """
344
+ Create a Resolver instance for the given parameters.
345
+ """
346
+ make_install_req = partial(
347
+ install_req_from_req_string,
348
+ isolated=options.isolated_mode,
349
+ use_pep517=use_pep517,
350
+ )
351
+ suppress_build_failures = cls.determine_build_failure_suppression(options)
352
+ resolver_variant = cls.determine_resolver_variant(options)
353
+ # The long import name and duplicated invocation is needed to convince
354
+ # Mypy into correctly typechecking. Otherwise it would complain the
355
+ # "Resolver" class being redefined.
356
+ if resolver_variant == "2020-resolver":
357
+ import pip._internal.resolution.resolvelib.resolver
358
+
359
+ return pip._internal.resolution.resolvelib.resolver.Resolver(
360
+ preparer=preparer,
361
+ finder=finder,
362
+ wheel_cache=wheel_cache,
363
+ make_install_req=make_install_req,
364
+ use_user_site=use_user_site,
365
+ ignore_dependencies=options.ignore_dependencies,
366
+ ignore_installed=ignore_installed,
367
+ ignore_requires_python=ignore_requires_python,
368
+ force_reinstall=force_reinstall,
369
+ upgrade_strategy=upgrade_strategy,
370
+ py_version_info=py_version_info,
371
+ suppress_build_failures=suppress_build_failures,
372
+ )
373
+ import pip._internal.resolution.legacy.resolver
374
+
375
+ return pip._internal.resolution.legacy.resolver.Resolver(
376
+ preparer=preparer,
377
+ finder=finder,
378
+ wheel_cache=wheel_cache,
379
+ make_install_req=make_install_req,
380
+ use_user_site=use_user_site,
381
+ ignore_dependencies=options.ignore_dependencies,
382
+ ignore_installed=ignore_installed,
383
+ ignore_requires_python=ignore_requires_python,
384
+ force_reinstall=force_reinstall,
385
+ upgrade_strategy=upgrade_strategy,
386
+ py_version_info=py_version_info,
387
+ )
388
+
389
+ def get_requirements(
390
+ self,
391
+ args: List[str],
392
+ options: Values,
393
+ finder: PackageFinder,
394
+ session: PipSession,
395
+ ) -> List[InstallRequirement]:
396
+ """
397
+ Parse command-line arguments into the corresponding requirements.
398
+ """
399
+ requirements: List[InstallRequirement] = []
400
+ for filename in options.constraints:
401
+ for parsed_req in parse_requirements(
402
+ filename,
403
+ constraint=True,
404
+ finder=finder,
405
+ options=options,
406
+ session=session,
407
+ ):
408
+ req_to_add = install_req_from_parsed_requirement(
409
+ parsed_req,
410
+ isolated=options.isolated_mode,
411
+ user_supplied=False,
412
+ )
413
+ requirements.append(req_to_add)
414
+
415
+ for req in args:
416
+ req_to_add = install_req_from_line(
417
+ req,
418
+ None,
419
+ isolated=options.isolated_mode,
420
+ use_pep517=options.use_pep517,
421
+ user_supplied=True,
422
+ )
423
+ requirements.append(req_to_add)
424
+
425
+ for req in options.editables:
426
+ req_to_add = install_req_from_editable(
427
+ req,
428
+ user_supplied=True,
429
+ isolated=options.isolated_mode,
430
+ use_pep517=options.use_pep517,
431
+ )
432
+ requirements.append(req_to_add)
433
+
434
+ # NOTE: options.require_hashes may be set if --require-hashes is True
435
+ for filename in options.requirements:
436
+ for parsed_req in parse_requirements(
437
+ filename, finder=finder, options=options, session=session
438
+ ):
439
+ req_to_add = install_req_from_parsed_requirement(
440
+ parsed_req,
441
+ isolated=options.isolated_mode,
442
+ use_pep517=options.use_pep517,
443
+ user_supplied=True,
444
+ )
445
+ requirements.append(req_to_add)
446
+
447
+ # If any requirement has hash options, enable hash checking.
448
+ if any(req.has_hash_options for req in requirements):
449
+ options.require_hashes = True
450
+
451
+ if not (args or options.editables or options.requirements):
452
+ opts = {"name": self.name}
453
+ if options.find_links:
454
+ raise CommandError(
455
+ "You must give at least one requirement to {name} "
456
+ '(maybe you meant "pip {name} {links}"?)'.format(
457
+ **dict(opts, links=" ".join(options.find_links))
458
+ )
459
+ )
460
+ else:
461
+ raise CommandError(
462
+ "You must give at least one requirement to {name} "
463
+ '(see "pip help {name}")'.format(**opts)
464
+ )
465
+
466
+ return requirements
467
+
468
+ @staticmethod
469
+ def trace_basic_info(finder: PackageFinder) -> None:
470
+ """
471
+ Trace basic information about the provided objects.
472
+ """
473
+ # Display where finder is looking for packages
474
+ search_scope = finder.search_scope
475
+ locations = search_scope.get_formatted_locations()
476
+ if locations:
477
+ logger.info(locations)
478
+
479
+ def _build_package_finder(
480
+ self,
481
+ options: Values,
482
+ session: PipSession,
483
+ target_python: Optional[TargetPython] = None,
484
+ ignore_requires_python: Optional[bool] = None,
485
+ ) -> PackageFinder:
486
+ """
487
+ Create a package finder appropriate to this requirement command.
488
+
489
+ :param ignore_requires_python: Whether to ignore incompatible
490
+ "Requires-Python" values in links. Defaults to False.
491
+ """
492
+ link_collector = LinkCollector.create(session, options=options)
493
+ selection_prefs = SelectionPreferences(
494
+ allow_yanked=True,
495
+ format_control=options.format_control,
496
+ allow_all_prereleases=options.pre,
497
+ prefer_binary=options.prefer_binary,
498
+ ignore_requires_python=ignore_requires_python,
499
+ )
500
+
501
+ return PackageFinder.create(
502
+ link_collector=link_collector,
503
+ selection_prefs=selection_prefs,
504
+ target_python=target_python,
505
+ use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled,
506
+ )
venv/lib/python3.10/site-packages/pip/_internal/cli/spinners.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import itertools
3
+ import logging
4
+ import sys
5
+ import time
6
+ from typing import IO, Iterator
7
+
8
+ from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
9
+
10
+ from pip._internal.utils.compat import WINDOWS
11
+ from pip._internal.utils.logging import get_indentation
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class SpinnerInterface:
17
+ def spin(self) -> None:
18
+ raise NotImplementedError()
19
+
20
+ def finish(self, final_status: str) -> None:
21
+ raise NotImplementedError()
22
+
23
+
24
+ class InteractiveSpinner(SpinnerInterface):
25
+ def __init__(
26
+ self,
27
+ message: str,
28
+ file: IO[str] = None,
29
+ spin_chars: str = "-\\|/",
30
+ # Empirically, 8 updates/second looks nice
31
+ min_update_interval_seconds: float = 0.125,
32
+ ):
33
+ self._message = message
34
+ if file is None:
35
+ file = sys.stdout
36
+ self._file = file
37
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
38
+ self._finished = False
39
+
40
+ self._spin_cycle = itertools.cycle(spin_chars)
41
+
42
+ self._file.write(" " * get_indentation() + self._message + " ... ")
43
+ self._width = 0
44
+
45
+ def _write(self, status: str) -> None:
46
+ assert not self._finished
47
+ # Erase what we wrote before by backspacing to the beginning, writing
48
+ # spaces to overwrite the old text, and then backspacing again
49
+ backup = "\b" * self._width
50
+ self._file.write(backup + " " * self._width + backup)
51
+ # Now we have a blank slate to add our status
52
+ self._file.write(status)
53
+ self._width = len(status)
54
+ self._file.flush()
55
+ self._rate_limiter.reset()
56
+
57
+ def spin(self) -> None:
58
+ if self._finished:
59
+ return
60
+ if not self._rate_limiter.ready():
61
+ return
62
+ self._write(next(self._spin_cycle))
63
+
64
+ def finish(self, final_status: str) -> None:
65
+ if self._finished:
66
+ return
67
+ self._write(final_status)
68
+ self._file.write("\n")
69
+ self._file.flush()
70
+ self._finished = True
71
+
72
+
73
+ # Used for dumb terminals, non-interactive installs (no tty), etc.
74
+ # We still print updates occasionally (once every 60 seconds by default) to
75
+ # act as a keep-alive for systems like Travis-CI that take lack-of-output as
76
+ # an indication that a task has frozen.
77
+ class NonInteractiveSpinner(SpinnerInterface):
78
+ def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
79
+ self._message = message
80
+ self._finished = False
81
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
82
+ self._update("started")
83
+
84
+ def _update(self, status: str) -> None:
85
+ assert not self._finished
86
+ self._rate_limiter.reset()
87
+ logger.info("%s: %s", self._message, status)
88
+
89
+ def spin(self) -> None:
90
+ if self._finished:
91
+ return
92
+ if not self._rate_limiter.ready():
93
+ return
94
+ self._update("still running...")
95
+
96
+ def finish(self, final_status: str) -> None:
97
+ if self._finished:
98
+ return
99
+ self._update(f"finished with status '{final_status}'")
100
+ self._finished = True
101
+
102
+
103
+ class RateLimiter:
104
+ def __init__(self, min_update_interval_seconds: float) -> None:
105
+ self._min_update_interval_seconds = min_update_interval_seconds
106
+ self._last_update: float = 0
107
+
108
+ def ready(self) -> bool:
109
+ now = time.time()
110
+ delta = now - self._last_update
111
+ return delta >= self._min_update_interval_seconds
112
+
113
+ def reset(self) -> None:
114
+ self._last_update = time.time()
115
+
116
+
117
+ @contextlib.contextmanager
118
+ def open_spinner(message: str) -> Iterator[SpinnerInterface]:
119
+ # Interactive spinner goes directly to sys.stdout rather than being routed
120
+ # through the logging system, but it acts like it has level INFO,
121
+ # i.e. it's only displayed if we're at level INFO or better.
122
+ # Non-interactive spinner goes through the logging system, so it is always
123
+ # in sync with logging configuration.
124
+ if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
125
+ spinner: SpinnerInterface = InteractiveSpinner(message)
126
+ else:
127
+ spinner = NonInteractiveSpinner(message)
128
+ try:
129
+ with hidden_cursor(sys.stdout):
130
+ yield spinner
131
+ except KeyboardInterrupt:
132
+ spinner.finish("canceled")
133
+ raise
134
+ except Exception:
135
+ spinner.finish("error")
136
+ raise
137
+ else:
138
+ spinner.finish("done")
139
+
140
+
141
+ @contextlib.contextmanager
142
+ def hidden_cursor(file: IO[str]) -> Iterator[None]:
143
+ # The Windows terminal does not support the hide/show cursor ANSI codes,
144
+ # even via colorama. So don't even try.
145
+ if WINDOWS:
146
+ yield
147
+ # We don't want to clutter the output with control characters if we're
148
+ # writing to a file, or if the user is running with --quiet.
149
+ # See https://github.com/pypa/pip/issues/3418
150
+ elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
151
+ yield
152
+ else:
153
+ file.write(HIDE_CURSOR)
154
+ try:
155
+ yield
156
+ finally:
157
+ file.write(SHOW_CURSOR)
venv/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ SUCCESS = 0
2
+ ERROR = 1
3
+ UNKNOWN_ERROR = 2
4
+ VIRTUALENV_NOT_FOUND = 3
5
+ PREVIOUS_BUILD_DIR_ERROR = 4
6
+ NO_MATCHES_FOUND = 23