prasb commited on
Commit
f0d9c1e
·
verified ·
1 Parent(s): dae6971

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-38.pyc +0 -0
  2. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-38.pyc +0 -0
  3. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-38.pyc +0 -0
  4. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/cache.cpython-38.pyc +0 -0
  5. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/check.cpython-38.pyc +0 -0
  6. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/completion.cpython-38.pyc +0 -0
  7. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-38.pyc +0 -0
  8. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/debug.cpython-38.pyc +0 -0
  9. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/download.cpython-38.pyc +0 -0
  10. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-38.pyc +0 -0
  11. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/hash.cpython-38.pyc +0 -0
  12. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/help.cpython-38.pyc +0 -0
  13. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/index.cpython-38.pyc +0 -0
  14. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-38.pyc +0 -0
  15. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/install.cpython-38.pyc +0 -0
  16. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/list.cpython-38.pyc +0 -0
  17. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/search.cpython-38.pyc +0 -0
  18. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/show.cpython-38.pyc +0 -0
  19. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-38.pyc +0 -0
  20. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-38.pyc +0 -0
  21. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/base.cpython-38.pyc +0 -0
  22. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-38.pyc +0 -0
  23. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/__pycache__/__init__.cpython-38.pyc +0 -0
  24. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/__pycache__/collector.cpython-38.pyc +0 -0
  25. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-38.pyc +0 -0
  26. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/__pycache__/sources.cpython-38.pyc +0 -0
  27. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-38.pyc +0 -0
  28. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-38.pyc +0 -0
  29. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-38.pyc +0 -0
  30. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/__pycache__/base.cpython-38.pyc +0 -0
  31. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/__init__.cpython-38.pyc +0 -0
  32. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/candidate.cpython-38.pyc +0 -0
  33. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/format_control.cpython-38.pyc +0 -0
  34. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/index.cpython-38.pyc +0 -0
  35. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/link.cpython-38.pyc +0 -0
  36. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/scheme.cpython-38.pyc +0 -0
  37. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-38.pyc +0 -0
  38. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/wheel.cpython-38.pyc +0 -0
  39. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__init__.py +2 -0
  40. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/__init__.cpython-38.pyc +0 -0
  41. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/auth.cpython-38.pyc +0 -0
  42. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/cache.cpython-38.pyc +0 -0
  43. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-38.pyc +0 -0
  44. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/session.cpython-38.pyc +0 -0
  45. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/utils.cpython-38.pyc +0 -0
  46. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-38.pyc +0 -0
  47. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/download.py +186 -0
  48. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/lazy_wheel.py +210 -0
  49. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/session.py +520 -0
  50. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/utils.py +96 -0
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-38.pyc ADDED
Binary file (6.48 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-38.pyc ADDED
Binary file (310 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (3.12 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/cache.cpython-38.pyc ADDED
Binary file (6.38 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/check.cpython-38.pyc ADDED
Binary file (1.59 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/completion.cpython-38.pyc ADDED
Binary file (4.28 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-38.pyc ADDED
Binary file (8.84 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/debug.cpython-38.pyc ADDED
Binary file (6.78 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/download.cpython-38.pyc ADDED
Binary file (4.22 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-38.pyc ADDED
Binary file (2.93 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/hash.cpython-38.pyc ADDED
Binary file (2.08 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/help.cpython-38.pyc ADDED
Binary file (1.26 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/index.cpython-38.pyc ADDED
Binary file (4.5 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-38.pyc ADDED
Binary file (2.94 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/install.cpython-38.pyc ADDED
Binary file (17.6 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/list.cpython-38.pyc ADDED
Binary file (10.2 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/search.cpython-38.pyc ADDED
Binary file (5.28 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/show.cpython-38.pyc ADDED
Binary file (6.39 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-38.pyc ADDED
Binary file (3.29 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-38.pyc ADDED
Binary file (4.96 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/base.cpython-38.pyc ADDED
Binary file (2.46 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-38.pyc ADDED
Binary file (5.34 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (185 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/__pycache__/collector.cpython-38.pyc ADDED
Binary file (15.3 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-38.pyc ADDED
Binary file (29.4 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/__pycache__/sources.cpython-38.pyc ADDED
Binary file (8.96 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (11 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-38.pyc ADDED
Binary file (4.58 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-38.pyc ADDED
Binary file (5.99 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/__pycache__/base.cpython-38.pyc ADDED
Binary file (2.37 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (219 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/candidate.cpython-38.pyc ADDED
Binary file (1.41 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/format_control.cpython-38.pyc ADDED
Binary file (2.67 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/index.cpython-38.pyc ADDED
Binary file (1.2 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/link.cpython-38.pyc ADDED
Binary file (18.1 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/scheme.cpython-38.pyc ADDED
Binary file (991 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-38.pyc ADDED
Binary file (3.5 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/wheel.cpython-38.pyc ADDED
Binary file (4.43 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """Contains purely network-related utilities.
2
+ """
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (207 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/auth.cpython-38.pyc ADDED
Binary file (14.4 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/cache.cpython-38.pyc ADDED
Binary file (4.42 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-38.pyc ADDED
Binary file (8.32 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/session.cpython-38.pyc ADDED
Binary file (12.5 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/utils.cpython-38.pyc ADDED
Binary file (1.4 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-38.pyc ADDED
Binary file (2.07 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/download.py ADDED
@@ -0,0 +1,186 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Download files with progress indicators.
2
+ """
3
+ import email.message
4
+ import logging
5
+ import mimetypes
6
+ import os
7
+ from typing import Iterable, Optional, Tuple
8
+
9
+ from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
10
+
11
+ from pip._internal.cli.progress_bars import get_download_progress_renderer
12
+ from pip._internal.exceptions import NetworkConnectionError
13
+ from pip._internal.models.index import PyPI
14
+ from pip._internal.models.link import Link
15
+ from pip._internal.network.cache import is_from_cache
16
+ from pip._internal.network.session import PipSession
17
+ from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
18
+ from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+
23
+ def _get_http_response_size(resp: Response) -> Optional[int]:
24
+ try:
25
+ return int(resp.headers["content-length"])
26
+ except (ValueError, KeyError, TypeError):
27
+ return None
28
+
29
+
30
+ def _prepare_download(
31
+ resp: Response,
32
+ link: Link,
33
+ progress_bar: str,
34
+ ) -> Iterable[bytes]:
35
+ total_length = _get_http_response_size(resp)
36
+
37
+ if link.netloc == PyPI.file_storage_domain:
38
+ url = link.show_url
39
+ else:
40
+ url = link.url_without_fragment
41
+
42
+ logged_url = redact_auth_from_url(url)
43
+
44
+ if total_length:
45
+ logged_url = f"{logged_url} ({format_size(total_length)})"
46
+
47
+ if is_from_cache(resp):
48
+ logger.info("Using cached %s", logged_url)
49
+ else:
50
+ logger.info("Downloading %s", logged_url)
51
+
52
+ if logger.getEffectiveLevel() > logging.INFO:
53
+ show_progress = False
54
+ elif is_from_cache(resp):
55
+ show_progress = False
56
+ elif not total_length:
57
+ show_progress = True
58
+ elif total_length > (40 * 1000):
59
+ show_progress = True
60
+ else:
61
+ show_progress = False
62
+
63
+ chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)
64
+
65
+ if not show_progress:
66
+ return chunks
67
+
68
+ renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length)
69
+ return renderer(chunks)
70
+
71
+
72
+ def sanitize_content_filename(filename: str) -> str:
73
+ """
74
+ Sanitize the "filename" value from a Content-Disposition header.
75
+ """
76
+ return os.path.basename(filename)
77
+
78
+
79
+ def parse_content_disposition(content_disposition: str, default_filename: str) -> str:
80
+ """
81
+ Parse the "filename" value from a Content-Disposition header, and
82
+ return the default filename if the result is empty.
83
+ """
84
+ m = email.message.Message()
85
+ m["content-type"] = content_disposition
86
+ filename = m.get_param("filename")
87
+ if filename:
88
+ # We need to sanitize the filename to prevent directory traversal
89
+ # in case the filename contains ".." path parts.
90
+ filename = sanitize_content_filename(str(filename))
91
+ return filename or default_filename
92
+
93
+
94
+ def _get_http_response_filename(resp: Response, link: Link) -> str:
95
+ """Get an ideal filename from the given HTTP response, falling back to
96
+ the link filename if not provided.
97
+ """
98
+ filename = link.filename # fallback
99
+ # Have a look at the Content-Disposition header for a better guess
100
+ content_disposition = resp.headers.get("content-disposition")
101
+ if content_disposition:
102
+ filename = parse_content_disposition(content_disposition, filename)
103
+ ext: Optional[str] = splitext(filename)[1]
104
+ if not ext:
105
+ ext = mimetypes.guess_extension(resp.headers.get("content-type", ""))
106
+ if ext:
107
+ filename += ext
108
+ if not ext and link.url != resp.url:
109
+ ext = os.path.splitext(resp.url)[1]
110
+ if ext:
111
+ filename += ext
112
+ return filename
113
+
114
+
115
+ def _http_get_download(session: PipSession, link: Link) -> Response:
116
+ target_url = link.url.split("#", 1)[0]
117
+ resp = session.get(target_url, headers=HEADERS, stream=True)
118
+ raise_for_status(resp)
119
+ return resp
120
+
121
+
122
+ class Downloader:
123
+ def __init__(
124
+ self,
125
+ session: PipSession,
126
+ progress_bar: str,
127
+ ) -> None:
128
+ self._session = session
129
+ self._progress_bar = progress_bar
130
+
131
+ def __call__(self, link: Link, location: str) -> Tuple[str, str]:
132
+ """Download the file given by link into location."""
133
+ try:
134
+ resp = _http_get_download(self._session, link)
135
+ except NetworkConnectionError as e:
136
+ assert e.response is not None
137
+ logger.critical(
138
+ "HTTP error %s while getting %s", e.response.status_code, link
139
+ )
140
+ raise
141
+
142
+ filename = _get_http_response_filename(resp, link)
143
+ filepath = os.path.join(location, filename)
144
+
145
+ chunks = _prepare_download(resp, link, self._progress_bar)
146
+ with open(filepath, "wb") as content_file:
147
+ for chunk in chunks:
148
+ content_file.write(chunk)
149
+ content_type = resp.headers.get("Content-Type", "")
150
+ return filepath, content_type
151
+
152
+
153
+ class BatchDownloader:
154
+ def __init__(
155
+ self,
156
+ session: PipSession,
157
+ progress_bar: str,
158
+ ) -> None:
159
+ self._session = session
160
+ self._progress_bar = progress_bar
161
+
162
+ def __call__(
163
+ self, links: Iterable[Link], location: str
164
+ ) -> Iterable[Tuple[Link, Tuple[str, str]]]:
165
+ """Download the files given by links into location."""
166
+ for link in links:
167
+ try:
168
+ resp = _http_get_download(self._session, link)
169
+ except NetworkConnectionError as e:
170
+ assert e.response is not None
171
+ logger.critical(
172
+ "HTTP error %s while getting %s",
173
+ e.response.status_code,
174
+ link,
175
+ )
176
+ raise
177
+
178
+ filename = _get_http_response_filename(resp, link)
179
+ filepath = os.path.join(location, filename)
180
+
181
+ chunks = _prepare_download(resp, link, self._progress_bar)
182
+ with open(filepath, "wb") as content_file:
183
+ for chunk in chunks:
184
+ content_file.write(chunk)
185
+ content_type = resp.headers.get("Content-Type", "")
186
+ yield link, (filepath, content_type)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/lazy_wheel.py ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Lazy ZIP over HTTP"""
2
+
3
+ __all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"]
4
+
5
+ from bisect import bisect_left, bisect_right
6
+ from contextlib import contextmanager
7
+ from tempfile import NamedTemporaryFile
8
+ from typing import Any, Dict, Generator, List, Optional, Tuple
9
+ from zipfile import BadZipFile, ZipFile
10
+
11
+ from pip._vendor.packaging.utils import canonicalize_name
12
+ from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
13
+
14
+ from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution
15
+ from pip._internal.network.session import PipSession
16
+ from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
17
+
18
+
19
+ class HTTPRangeRequestUnsupported(Exception):
20
+ pass
21
+
22
+
23
+ def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution:
24
+ """Return a distribution object from the given wheel URL.
25
+
26
+ This uses HTTP range requests to only fetch the portion of the wheel
27
+ containing metadata, just enough for the object to be constructed.
28
+ If such requests are not supported, HTTPRangeRequestUnsupported
29
+ is raised.
30
+ """
31
+ with LazyZipOverHTTP(url, session) as zf:
32
+ # For read-only ZIP files, ZipFile only needs methods read,
33
+ # seek, seekable and tell, not the whole IO protocol.
34
+ wheel = MemoryWheel(zf.name, zf) # type: ignore
35
+ # After context manager exit, wheel.name
36
+ # is an invalid file by intention.
37
+ return get_wheel_distribution(wheel, canonicalize_name(name))
38
+
39
+
40
+ class LazyZipOverHTTP:
41
+ """File-like object mapped to a ZIP file over HTTP.
42
+
43
+ This uses HTTP range requests to lazily fetch the file's content,
44
+ which is supposed to be fed to ZipFile. If such requests are not
45
+ supported by the server, raise HTTPRangeRequestUnsupported
46
+ during initialization.
47
+ """
48
+
49
+ def __init__(
50
+ self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE
51
+ ) -> None:
52
+ head = session.head(url, headers=HEADERS)
53
+ raise_for_status(head)
54
+ assert head.status_code == 200
55
+ self._session, self._url, self._chunk_size = session, url, chunk_size
56
+ self._length = int(head.headers["Content-Length"])
57
+ self._file = NamedTemporaryFile()
58
+ self.truncate(self._length)
59
+ self._left: List[int] = []
60
+ self._right: List[int] = []
61
+ if "bytes" not in head.headers.get("Accept-Ranges", "none"):
62
+ raise HTTPRangeRequestUnsupported("range request is not supported")
63
+ self._check_zip()
64
+
65
+ @property
66
+ def mode(self) -> str:
67
+ """Opening mode, which is always rb."""
68
+ return "rb"
69
+
70
+ @property
71
+ def name(self) -> str:
72
+ """Path to the underlying file."""
73
+ return self._file.name
74
+
75
+ def seekable(self) -> bool:
76
+ """Return whether random access is supported, which is True."""
77
+ return True
78
+
79
+ def close(self) -> None:
80
+ """Close the file."""
81
+ self._file.close()
82
+
83
+ @property
84
+ def closed(self) -> bool:
85
+ """Whether the file is closed."""
86
+ return self._file.closed
87
+
88
+ def read(self, size: int = -1) -> bytes:
89
+ """Read up to size bytes from the object and return them.
90
+
91
+ As a convenience, if size is unspecified or -1,
92
+ all bytes until EOF are returned. Fewer than
93
+ size bytes may be returned if EOF is reached.
94
+ """
95
+ download_size = max(size, self._chunk_size)
96
+ start, length = self.tell(), self._length
97
+ stop = length if size < 0 else min(start + download_size, length)
98
+ start = max(0, stop - download_size)
99
+ self._download(start, stop - 1)
100
+ return self._file.read(size)
101
+
102
+ def readable(self) -> bool:
103
+ """Return whether the file is readable, which is True."""
104
+ return True
105
+
106
+ def seek(self, offset: int, whence: int = 0) -> int:
107
+ """Change stream position and return the new absolute position.
108
+
109
+ Seek to offset relative position indicated by whence:
110
+ * 0: Start of stream (the default). pos should be >= 0;
111
+ * 1: Current position - pos may be negative;
112
+ * 2: End of stream - pos usually negative.
113
+ """
114
+ return self._file.seek(offset, whence)
115
+
116
+ def tell(self) -> int:
117
+ """Return the current position."""
118
+ return self._file.tell()
119
+
120
+ def truncate(self, size: Optional[int] = None) -> int:
121
+ """Resize the stream to the given size in bytes.
122
+
123
+ If size is unspecified resize to the current position.
124
+ The current stream position isn't changed.
125
+
126
+ Return the new file size.
127
+ """
128
+ return self._file.truncate(size)
129
+
130
+ def writable(self) -> bool:
131
+ """Return False."""
132
+ return False
133
+
134
+ def __enter__(self) -> "LazyZipOverHTTP":
135
+ self._file.__enter__()
136
+ return self
137
+
138
+ def __exit__(self, *exc: Any) -> None:
139
+ self._file.__exit__(*exc)
140
+
141
+ @contextmanager
142
+ def _stay(self) -> Generator[None, None, None]:
143
+ """Return a context manager keeping the position.
144
+
145
+ At the end of the block, seek back to original position.
146
+ """
147
+ pos = self.tell()
148
+ try:
149
+ yield
150
+ finally:
151
+ self.seek(pos)
152
+
153
+ def _check_zip(self) -> None:
154
+ """Check and download until the file is a valid ZIP."""
155
+ end = self._length - 1
156
+ for start in reversed(range(0, end, self._chunk_size)):
157
+ self._download(start, end)
158
+ with self._stay():
159
+ try:
160
+ # For read-only ZIP files, ZipFile only needs
161
+ # methods read, seek, seekable and tell.
162
+ ZipFile(self) # type: ignore
163
+ except BadZipFile:
164
+ pass
165
+ else:
166
+ break
167
+
168
+ def _stream_response(
169
+ self, start: int, end: int, base_headers: Dict[str, str] = HEADERS
170
+ ) -> Response:
171
+ """Return HTTP response to a range request from start to end."""
172
+ headers = base_headers.copy()
173
+ headers["Range"] = f"bytes={start}-{end}"
174
+ # TODO: Get range requests to be correctly cached
175
+ headers["Cache-Control"] = "no-cache"
176
+ return self._session.get(self._url, headers=headers, stream=True)
177
+
178
+ def _merge(
179
+ self, start: int, end: int, left: int, right: int
180
+ ) -> Generator[Tuple[int, int], None, None]:
181
+ """Return a generator of intervals to be fetched.
182
+
183
+ Args:
184
+ start (int): Start of needed interval
185
+ end (int): End of needed interval
186
+ left (int): Index of first overlapping downloaded data
187
+ right (int): Index after last overlapping downloaded data
188
+ """
189
+ lslice, rslice = self._left[left:right], self._right[left:right]
190
+ i = start = min([start] + lslice[:1])
191
+ end = max([end] + rslice[-1:])
192
+ for j, k in zip(lslice, rslice):
193
+ if j > i:
194
+ yield i, j - 1
195
+ i = k + 1
196
+ if i <= end:
197
+ yield i, end
198
+ self._left[left:right], self._right[left:right] = [start], [end]
199
+
200
+ def _download(self, start: int, end: int) -> None:
201
+ """Download bytes from start to end inclusively."""
202
+ with self._stay():
203
+ left = bisect_left(self._right, start)
204
+ right = bisect_right(self._left, end)
205
+ for start, end in self._merge(start, end, left, right):
206
+ response = self._stream_response(start, end)
207
+ response.raise_for_status()
208
+ self.seek(start)
209
+ for chunk in response_chunks(response, self._chunk_size):
210
+ self._file.write(chunk)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/session.py ADDED
@@ -0,0 +1,520 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """PipSession and supporting code, containing all pip-specific
2
+ network request configuration and behavior.
3
+ """
4
+
5
+ import email.utils
6
+ import io
7
+ import ipaddress
8
+ import json
9
+ import logging
10
+ import mimetypes
11
+ import os
12
+ import platform
13
+ import shutil
14
+ import subprocess
15
+ import sys
16
+ import urllib.parse
17
+ import warnings
18
+ from typing import (
19
+ TYPE_CHECKING,
20
+ Any,
21
+ Dict,
22
+ Generator,
23
+ List,
24
+ Mapping,
25
+ Optional,
26
+ Sequence,
27
+ Tuple,
28
+ Union,
29
+ )
30
+
31
+ from pip._vendor import requests, urllib3
32
+ from pip._vendor.cachecontrol import CacheControlAdapter as _BaseCacheControlAdapter
33
+ from pip._vendor.requests.adapters import DEFAULT_POOLBLOCK, BaseAdapter
34
+ from pip._vendor.requests.adapters import HTTPAdapter as _BaseHTTPAdapter
35
+ from pip._vendor.requests.models import PreparedRequest, Response
36
+ from pip._vendor.requests.structures import CaseInsensitiveDict
37
+ from pip._vendor.urllib3.connectionpool import ConnectionPool
38
+ from pip._vendor.urllib3.exceptions import InsecureRequestWarning
39
+
40
+ from pip import __version__
41
+ from pip._internal.metadata import get_default_environment
42
+ from pip._internal.models.link import Link
43
+ from pip._internal.network.auth import MultiDomainBasicAuth
44
+ from pip._internal.network.cache import SafeFileCache
45
+
46
+ # Import ssl from compat so the initial import occurs in only one place.
47
+ from pip._internal.utils.compat import has_tls
48
+ from pip._internal.utils.glibc import libc_ver
49
+ from pip._internal.utils.misc import build_url_from_netloc, parse_netloc
50
+ from pip._internal.utils.urls import url_to_path
51
+
52
+ if TYPE_CHECKING:
53
+ from ssl import SSLContext
54
+
55
+ from pip._vendor.urllib3.poolmanager import PoolManager
56
+
57
+
58
+ logger = logging.getLogger(__name__)
59
+
60
+ SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
61
+
62
+
63
+ # Ignore warning raised when using --trusted-host.
64
+ warnings.filterwarnings("ignore", category=InsecureRequestWarning)
65
+
66
+
67
+ SECURE_ORIGINS: List[SecureOrigin] = [
68
+ # protocol, hostname, port
69
+ # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
70
+ ("https", "*", "*"),
71
+ ("*", "localhost", "*"),
72
+ ("*", "127.0.0.0/8", "*"),
73
+ ("*", "::1/128", "*"),
74
+ ("file", "*", None),
75
+ # ssh is always secure.
76
+ ("ssh", "*", "*"),
77
+ ]
78
+
79
+
80
+ # These are environment variables present when running under various
81
+ # CI systems. For each variable, some CI systems that use the variable
82
+ # are indicated. The collection was chosen so that for each of a number
83
+ # of popular systems, at least one of the environment variables is used.
84
+ # This list is used to provide some indication of and lower bound for
85
+ # CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
86
+ # For more background, see: https://github.com/pypa/pip/issues/5499
87
+ CI_ENVIRONMENT_VARIABLES = (
88
+ # Azure Pipelines
89
+ "BUILD_BUILDID",
90
+ # Jenkins
91
+ "BUILD_ID",
92
+ # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
93
+ "CI",
94
+ # Explicit environment variable.
95
+ "PIP_IS_CI",
96
+ )
97
+
98
+
99
+ def looks_like_ci() -> bool:
100
+ """
101
+ Return whether it looks like pip is running under CI.
102
+ """
103
+ # We don't use the method of checking for a tty (e.g. using isatty())
104
+ # because some CI systems mimic a tty (e.g. Travis CI). Thus that
105
+ # method doesn't provide definitive information in either direction.
106
+ return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
107
+
108
+
109
+ def user_agent() -> str:
110
+ """
111
+ Return a string representing the user agent.
112
+ """
113
+ data: Dict[str, Any] = {
114
+ "installer": {"name": "pip", "version": __version__},
115
+ "python": platform.python_version(),
116
+ "implementation": {
117
+ "name": platform.python_implementation(),
118
+ },
119
+ }
120
+
121
+ if data["implementation"]["name"] == "CPython":
122
+ data["implementation"]["version"] = platform.python_version()
123
+ elif data["implementation"]["name"] == "PyPy":
124
+ pypy_version_info = sys.pypy_version_info # type: ignore
125
+ if pypy_version_info.releaselevel == "final":
126
+ pypy_version_info = pypy_version_info[:3]
127
+ data["implementation"]["version"] = ".".join(
128
+ [str(x) for x in pypy_version_info]
129
+ )
130
+ elif data["implementation"]["name"] == "Jython":
131
+ # Complete Guess
132
+ data["implementation"]["version"] = platform.python_version()
133
+ elif data["implementation"]["name"] == "IronPython":
134
+ # Complete Guess
135
+ data["implementation"]["version"] = platform.python_version()
136
+
137
+ if sys.platform.startswith("linux"):
138
+ from pip._vendor import distro
139
+
140
+ linux_distribution = distro.name(), distro.version(), distro.codename()
141
+ distro_infos: Dict[str, Any] = dict(
142
+ filter(
143
+ lambda x: x[1],
144
+ zip(["name", "version", "id"], linux_distribution),
145
+ )
146
+ )
147
+ libc = dict(
148
+ filter(
149
+ lambda x: x[1],
150
+ zip(["lib", "version"], libc_ver()),
151
+ )
152
+ )
153
+ if libc:
154
+ distro_infos["libc"] = libc
155
+ if distro_infos:
156
+ data["distro"] = distro_infos
157
+
158
+ if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
159
+ data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
160
+
161
+ if platform.system():
162
+ data.setdefault("system", {})["name"] = platform.system()
163
+
164
+ if platform.release():
165
+ data.setdefault("system", {})["release"] = platform.release()
166
+
167
+ if platform.machine():
168
+ data["cpu"] = platform.machine()
169
+
170
+ if has_tls():
171
+ import _ssl as ssl
172
+
173
+ data["openssl_version"] = ssl.OPENSSL_VERSION
174
+
175
+ setuptools_dist = get_default_environment().get_distribution("setuptools")
176
+ if setuptools_dist is not None:
177
+ data["setuptools_version"] = str(setuptools_dist.version)
178
+
179
+ if shutil.which("rustc") is not None:
180
+ # If for any reason `rustc --version` fails, silently ignore it
181
+ try:
182
+ rustc_output = subprocess.check_output(
183
+ ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5
184
+ )
185
+ except Exception:
186
+ pass
187
+ else:
188
+ if rustc_output.startswith(b"rustc "):
189
+ # The format of `rustc --version` is:
190
+ # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'`
191
+ # We extract just the middle (1.52.1) part
192
+ data["rustc_version"] = rustc_output.split(b" ")[1].decode()
193
+
194
+ # Use None rather than False so as not to give the impression that
195
+ # pip knows it is not being run under CI. Rather, it is a null or
196
+ # inconclusive result. Also, we include some value rather than no
197
+ # value to make it easier to know that the check has been run.
198
+ data["ci"] = True if looks_like_ci() else None
199
+
200
+ user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
201
+ if user_data is not None:
202
+ data["user_data"] = user_data
203
+
204
+ return "{data[installer][name]}/{data[installer][version]} {json}".format(
205
+ data=data,
206
+ json=json.dumps(data, separators=(",", ":"), sort_keys=True),
207
+ )
208
+
209
+
210
+ class LocalFSAdapter(BaseAdapter):
211
+ def send(
212
+ self,
213
+ request: PreparedRequest,
214
+ stream: bool = False,
215
+ timeout: Optional[Union[float, Tuple[float, float]]] = None,
216
+ verify: Union[bool, str] = True,
217
+ cert: Optional[Union[str, Tuple[str, str]]] = None,
218
+ proxies: Optional[Mapping[str, str]] = None,
219
+ ) -> Response:
220
+ pathname = url_to_path(request.url)
221
+
222
+ resp = Response()
223
+ resp.status_code = 200
224
+ resp.url = request.url
225
+
226
+ try:
227
+ stats = os.stat(pathname)
228
+ except OSError as exc:
229
+ # format the exception raised as a io.BytesIO object,
230
+ # to return a better error message:
231
+ resp.status_code = 404
232
+ resp.reason = type(exc).__name__
233
+ resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8"))
234
+ else:
235
+ modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
236
+ content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
237
+ resp.headers = CaseInsensitiveDict(
238
+ {
239
+ "Content-Type": content_type,
240
+ "Content-Length": stats.st_size,
241
+ "Last-Modified": modified,
242
+ }
243
+ )
244
+
245
+ resp.raw = open(pathname, "rb")
246
+ resp.close = resp.raw.close
247
+
248
+ return resp
249
+
250
+ def close(self) -> None:
251
+ pass
252
+
253
+
254
+ class _SSLContextAdapterMixin:
255
+ """Mixin to add the ``ssl_context`` constructor argument to HTTP adapters.
256
+
257
+ The additional argument is forwarded directly to the pool manager. This allows us
258
+ to dynamically decide what SSL store to use at runtime, which is used to implement
259
+ the optional ``truststore`` backend.
260
+ """
261
+
262
+ def __init__(
263
+ self,
264
+ *,
265
+ ssl_context: Optional["SSLContext"] = None,
266
+ **kwargs: Any,
267
+ ) -> None:
268
+ self._ssl_context = ssl_context
269
+ super().__init__(**kwargs)
270
+
271
+ def init_poolmanager(
272
+ self,
273
+ connections: int,
274
+ maxsize: int,
275
+ block: bool = DEFAULT_POOLBLOCK,
276
+ **pool_kwargs: Any,
277
+ ) -> "PoolManager":
278
+ if self._ssl_context is not None:
279
+ pool_kwargs.setdefault("ssl_context", self._ssl_context)
280
+ return super().init_poolmanager( # type: ignore[misc]
281
+ connections=connections,
282
+ maxsize=maxsize,
283
+ block=block,
284
+ **pool_kwargs,
285
+ )
286
+
287
+
288
+ class HTTPAdapter(_SSLContextAdapterMixin, _BaseHTTPAdapter):
289
+ pass
290
+
291
+
292
+ class CacheControlAdapter(_SSLContextAdapterMixin, _BaseCacheControlAdapter):
293
+ pass
294
+
295
+
296
+ class InsecureHTTPAdapter(HTTPAdapter):
297
+ def cert_verify(
298
+ self,
299
+ conn: ConnectionPool,
300
+ url: str,
301
+ verify: Union[bool, str],
302
+ cert: Optional[Union[str, Tuple[str, str]]],
303
+ ) -> None:
304
+ super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
305
+
306
+
307
+ class InsecureCacheControlAdapter(CacheControlAdapter):
308
+ def cert_verify(
309
+ self,
310
+ conn: ConnectionPool,
311
+ url: str,
312
+ verify: Union[bool, str],
313
+ cert: Optional[Union[str, Tuple[str, str]]],
314
+ ) -> None:
315
+ super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
316
+
317
+
318
+ class PipSession(requests.Session):
319
+ timeout: Optional[int] = None
320
+
321
+ def __init__(
322
+ self,
323
+ *args: Any,
324
+ retries: int = 0,
325
+ cache: Optional[str] = None,
326
+ trusted_hosts: Sequence[str] = (),
327
+ index_urls: Optional[List[str]] = None,
328
+ ssl_context: Optional["SSLContext"] = None,
329
+ **kwargs: Any,
330
+ ) -> None:
331
+ """
332
+ :param trusted_hosts: Domains not to emit warnings for when not using
333
+ HTTPS.
334
+ """
335
+ super().__init__(*args, **kwargs)
336
+
337
+ # Namespace the attribute with "pip_" just in case to prevent
338
+ # possible conflicts with the base class.
339
+ self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = []
340
+
341
+ # Attach our User Agent to the request
342
+ self.headers["User-Agent"] = user_agent()
343
+
344
+ # Attach our Authentication handler to the session
345
+ self.auth = MultiDomainBasicAuth(index_urls=index_urls)
346
+
347
+ # Create our urllib3.Retry instance which will allow us to customize
348
+ # how we handle retries.
349
+ retries = urllib3.Retry(
350
+ # Set the total number of retries that a particular request can
351
+ # have.
352
+ total=retries,
353
+ # A 503 error from PyPI typically means that the Fastly -> Origin
354
+ # connection got interrupted in some way. A 503 error in general
355
+ # is typically considered a transient error so we'll go ahead and
356
+ # retry it.
357
+ # A 500 may indicate transient error in Amazon S3
358
+ # A 502 may be a transient error from a CDN like CloudFlare or CloudFront
359
+ # A 520 or 527 - may indicate transient error in CloudFlare
360
+ status_forcelist=[500, 502, 503, 520, 527],
361
+ # Add a small amount of back off between failed requests in
362
+ # order to prevent hammering the service.
363
+ backoff_factor=0.25,
364
+ ) # type: ignore
365
+
366
+ # Our Insecure HTTPAdapter disables HTTPS validation. It does not
367
+ # support caching so we'll use it for all http:// URLs.
368
+ # If caching is disabled, we will also use it for
369
+ # https:// hosts that we've marked as ignoring
370
+ # TLS errors for (trusted-hosts).
371
+ insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
372
+
373
+ # We want to _only_ cache responses on securely fetched origins or when
374
+ # the host is specified as trusted. We do this because
375
+ # we can't validate the response of an insecurely/untrusted fetched
376
+ # origin, and we don't want someone to be able to poison the cache and
377
+ # require manual eviction from the cache to fix it.
378
+ if cache:
379
+ secure_adapter = CacheControlAdapter(
380
+ cache=SafeFileCache(cache),
381
+ max_retries=retries,
382
+ ssl_context=ssl_context,
383
+ )
384
+ self._trusted_host_adapter = InsecureCacheControlAdapter(
385
+ cache=SafeFileCache(cache),
386
+ max_retries=retries,
387
+ )
388
+ else:
389
+ secure_adapter = HTTPAdapter(max_retries=retries, ssl_context=ssl_context)
390
+ self._trusted_host_adapter = insecure_adapter
391
+
392
+ self.mount("https://", secure_adapter)
393
+ self.mount("http://", insecure_adapter)
394
+
395
+ # Enable file:// urls
396
+ self.mount("file://", LocalFSAdapter())
397
+
398
+ for host in trusted_hosts:
399
+ self.add_trusted_host(host, suppress_logging=True)
400
+
401
+ def update_index_urls(self, new_index_urls: List[str]) -> None:
402
+ """
403
+ :param new_index_urls: New index urls to update the authentication
404
+ handler with.
405
+ """
406
+ self.auth.index_urls = new_index_urls
407
+
408
+ def add_trusted_host(
409
+ self, host: str, source: Optional[str] = None, suppress_logging: bool = False
410
+ ) -> None:
411
+ """
412
+ :param host: It is okay to provide a host that has previously been
413
+ added.
414
+ :param source: An optional source string, for logging where the host
415
+ string came from.
416
+ """
417
+ if not suppress_logging:
418
+ msg = f"adding trusted host: {host!r}"
419
+ if source is not None:
420
+ msg += f" (from {source})"
421
+ logger.info(msg)
422
+
423
+ parsed_host, parsed_port = parse_netloc(host)
424
+ if parsed_host is None:
425
+ raise ValueError(f"Trusted host URL must include a host part: {host!r}")
426
+ if (parsed_host, parsed_port) not in self.pip_trusted_origins:
427
+ self.pip_trusted_origins.append((parsed_host, parsed_port))
428
+
429
+ self.mount(
430
+ build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
431
+ )
432
+ self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
433
+ if not parsed_port:
434
+ self.mount(
435
+ build_url_from_netloc(host, scheme="http") + ":",
436
+ self._trusted_host_adapter,
437
+ )
438
+ # Mount wildcard ports for the same host.
439
+ self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter)
440
+
441
+ def iter_secure_origins(self) -> Generator[SecureOrigin, None, None]:
442
+ yield from SECURE_ORIGINS
443
+ for host, port in self.pip_trusted_origins:
444
+ yield ("*", host, "*" if port is None else port)
445
+
446
+ def is_secure_origin(self, location: Link) -> bool:
447
+ # Determine if this url used a secure transport mechanism
448
+ parsed = urllib.parse.urlparse(str(location))
449
+ origin_protocol, origin_host, origin_port = (
450
+ parsed.scheme,
451
+ parsed.hostname,
452
+ parsed.port,
453
+ )
454
+
455
+ # The protocol to use to see if the protocol matches.
456
+ # Don't count the repository type as part of the protocol: in
457
+ # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
458
+ # the last scheme.)
459
+ origin_protocol = origin_protocol.rsplit("+", 1)[-1]
460
+
461
+ # Determine if our origin is a secure origin by looking through our
462
+ # hardcoded list of secure origins, as well as any additional ones
463
+ # configured on this PackageFinder instance.
464
+ for secure_origin in self.iter_secure_origins():
465
+ secure_protocol, secure_host, secure_port = secure_origin
466
+ if origin_protocol != secure_protocol and secure_protocol != "*":
467
+ continue
468
+
469
+ try:
470
+ addr = ipaddress.ip_address(origin_host or "")
471
+ network = ipaddress.ip_network(secure_host)
472
+ except ValueError:
473
+ # We don't have both a valid address or a valid network, so
474
+ # we'll check this origin against hostnames.
475
+ if (
476
+ origin_host
477
+ and origin_host.lower() != secure_host.lower()
478
+ and secure_host != "*"
479
+ ):
480
+ continue
481
+ else:
482
+ # We have a valid address and network, so see if the address
483
+ # is contained within the network.
484
+ if addr not in network:
485
+ continue
486
+
487
+ # Check to see if the port matches.
488
+ if (
489
+ origin_port != secure_port
490
+ and secure_port != "*"
491
+ and secure_port is not None
492
+ ):
493
+ continue
494
+
495
+ # If we've gotten here, then this origin matches the current
496
+ # secure origin and we should return True
497
+ return True
498
+
499
+ # If we've gotten to this point, then the origin isn't secure and we
500
+ # will not accept it as a valid location to search. We will however
501
+ # log a warning that we are ignoring it.
502
+ logger.warning(
503
+ "The repository located at %s is not a trusted or secure host and "
504
+ "is being ignored. If this repository is available via HTTPS we "
505
+ "recommend you use HTTPS instead, otherwise you may silence "
506
+ "this warning and allow it anyway with '--trusted-host %s'.",
507
+ origin_host,
508
+ origin_host,
509
+ )
510
+
511
+ return False
512
+
513
+ def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response:
514
+ # Allow setting a default timeout on a session
515
+ kwargs.setdefault("timeout", self.timeout)
516
+ # Allow setting a default proxies on a session
517
+ kwargs.setdefault("proxies", self.proxies)
518
+
519
+ # Dispatch the actual request
520
+ return super().request(method, url, *args, **kwargs)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/utils.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Dict, Generator
2
+
3
+ from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
4
+
5
+ from pip._internal.exceptions import NetworkConnectionError
6
+
7
+ # The following comments and HTTP headers were originally added by
8
+ # Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03.
9
+ #
10
+ # We use Accept-Encoding: identity here because requests defaults to
11
+ # accepting compressed responses. This breaks in a variety of ways
12
+ # depending on how the server is configured.
13
+ # - Some servers will notice that the file isn't a compressible file
14
+ # and will leave the file alone and with an empty Content-Encoding
15
+ # - Some servers will notice that the file is already compressed and
16
+ # will leave the file alone, adding a Content-Encoding: gzip header
17
+ # - Some servers won't notice anything at all and will take a file
18
+ # that's already been compressed and compress it again, and set
19
+ # the Content-Encoding: gzip header
20
+ # By setting this to request only the identity encoding we're hoping
21
+ # to eliminate the third case. Hopefully there does not exist a server
22
+ # which when given a file will notice it is already compressed and that
23
+ # you're not asking for a compressed file and will then decompress it
24
+ # before sending because if that's the case I don't think it'll ever be
25
+ # possible to make this work.
26
+ HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"}
27
+
28
+
29
+ def raise_for_status(resp: Response) -> None:
30
+ http_error_msg = ""
31
+ if isinstance(resp.reason, bytes):
32
+ # We attempt to decode utf-8 first because some servers
33
+ # choose to localize their reason strings. If the string
34
+ # isn't utf-8, we fall back to iso-8859-1 for all other
35
+ # encodings.
36
+ try:
37
+ reason = resp.reason.decode("utf-8")
38
+ except UnicodeDecodeError:
39
+ reason = resp.reason.decode("iso-8859-1")
40
+ else:
41
+ reason = resp.reason
42
+
43
+ if 400 <= resp.status_code < 500:
44
+ http_error_msg = (
45
+ f"{resp.status_code} Client Error: {reason} for url: {resp.url}"
46
+ )
47
+
48
+ elif 500 <= resp.status_code < 600:
49
+ http_error_msg = (
50
+ f"{resp.status_code} Server Error: {reason} for url: {resp.url}"
51
+ )
52
+
53
+ if http_error_msg:
54
+ raise NetworkConnectionError(http_error_msg, response=resp)
55
+
56
+
57
+ def response_chunks(
58
+ response: Response, chunk_size: int = CONTENT_CHUNK_SIZE
59
+ ) -> Generator[bytes, None, None]:
60
+ """Given a requests Response, provide the data chunks."""
61
+ try:
62
+ # Special case for urllib3.
63
+ for chunk in response.raw.stream(
64
+ chunk_size,
65
+ # We use decode_content=False here because we don't
66
+ # want urllib3 to mess with the raw bytes we get
67
+ # from the server. If we decompress inside of
68
+ # urllib3 then we cannot verify the checksum
69
+ # because the checksum will be of the compressed
70
+ # file. This breakage will only occur if the
71
+ # server adds a Content-Encoding header, which
72
+ # depends on how the server was configured:
73
+ # - Some servers will notice that the file isn't a
74
+ # compressible file and will leave the file alone
75
+ # and with an empty Content-Encoding
76
+ # - Some servers will notice that the file is
77
+ # already compressed and will leave the file
78
+ # alone and will add a Content-Encoding: gzip
79
+ # header
80
+ # - Some servers won't notice anything at all and
81
+ # will take a file that's already been compressed
82
+ # and compress it again and set the
83
+ # Content-Encoding: gzip header
84
+ #
85
+ # By setting this not to decode automatically we
86
+ # hope to eliminate problems with the second case.
87
+ decode_content=False,
88
+ ):
89
+ yield chunk
90
+ except AttributeError:
91
+ # Standard file-like object.
92
+ while True:
93
+ chunk = response.raw.read(chunk_size)
94
+ if not chunk:
95
+ break
96
+ yield chunk