ZTWHHH commited on
Commit
7431fdb
·
verified ·
1 Parent(s): 6cc620d

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. falcon/lib/python3.10/site-packages/httpcore-0.17.3.dist-info/INSTALLER +1 -0
  3. falcon/lib/python3.10/site-packages/httpcore-0.17.3.dist-info/LICENSE.md +27 -0
  4. falcon/lib/python3.10/site-packages/httpcore-0.17.3.dist-info/METADATA +542 -0
  5. falcon/lib/python3.10/site-packages/httpcore-0.17.3.dist-info/top_level.txt +4 -0
  6. falcon/lib/python3.10/site-packages/pandas/__pycache__/__init__.cpython-310.pyc +0 -0
  7. falcon/lib/python3.10/site-packages/pandas/__pycache__/_typing.cpython-310.pyc +0 -0
  8. falcon/lib/python3.10/site-packages/pandas/__pycache__/_version.cpython-310.pyc +0 -0
  9. falcon/lib/python3.10/site-packages/pandas/__pycache__/_version_meson.cpython-310.pyc +0 -0
  10. falcon/lib/python3.10/site-packages/pandas/__pycache__/conftest.cpython-310.pyc +0 -0
  11. falcon/lib/python3.10/site-packages/pandas/__pycache__/testing.cpython-310.pyc +0 -0
  12. falcon/lib/python3.10/site-packages/pandas/_config/__pycache__/config.cpython-310.pyc +0 -0
  13. falcon/lib/python3.10/site-packages/pandas/_testing/__init__.py +639 -0
  14. falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/__init__.cpython-310.pyc +0 -0
  15. falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/_hypothesis.cpython-310.pyc +0 -0
  16. falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/_io.cpython-310.pyc +0 -0
  17. falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/_warnings.cpython-310.pyc +0 -0
  18. falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/asserters.cpython-310.pyc +0 -0
  19. falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/compat.cpython-310.pyc +0 -0
  20. falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/contexts.cpython-310.pyc +0 -0
  21. falcon/lib/python3.10/site-packages/pandas/_testing/_hypothesis.py +93 -0
  22. falcon/lib/python3.10/site-packages/pandas/_testing/_io.py +170 -0
  23. falcon/lib/python3.10/site-packages/pandas/_testing/_warnings.py +232 -0
  24. falcon/lib/python3.10/site-packages/pandas/_testing/asserters.py +1435 -0
  25. falcon/lib/python3.10/site-packages/pandas/_testing/contexts.py +257 -0
  26. falcon/lib/python3.10/site-packages/pandas/api/__init__.py +16 -0
  27. falcon/lib/python3.10/site-packages/pandas/api/__pycache__/__init__.cpython-310.pyc +0 -0
  28. falcon/lib/python3.10/site-packages/pandas/api/extensions/__init__.py +33 -0
  29. falcon/lib/python3.10/site-packages/pandas/api/extensions/__pycache__/__init__.cpython-310.pyc +0 -0
  30. falcon/lib/python3.10/site-packages/pandas/api/indexers/__init__.py +17 -0
  31. falcon/lib/python3.10/site-packages/pandas/api/indexers/__pycache__/__init__.cpython-310.pyc +0 -0
  32. falcon/lib/python3.10/site-packages/pandas/api/interchange/__init__.py +8 -0
  33. falcon/lib/python3.10/site-packages/pandas/api/interchange/__pycache__/__init__.cpython-310.pyc +0 -0
  34. falcon/lib/python3.10/site-packages/pandas/api/types/__init__.py +23 -0
  35. falcon/lib/python3.10/site-packages/pandas/api/types/__pycache__/__init__.cpython-310.pyc +0 -0
  36. falcon/lib/python3.10/site-packages/pandas/api/typing/__init__.py +55 -0
  37. falcon/lib/python3.10/site-packages/pandas/api/typing/__pycache__/__init__.cpython-310.pyc +0 -0
  38. falcon/lib/python3.10/site-packages/pandas/compat/__init__.py +199 -0
  39. falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/__init__.cpython-310.pyc +0 -0
  40. falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/_constants.cpython-310.pyc +0 -0
  41. falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/_optional.cpython-310.pyc +0 -0
  42. falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/compressors.cpython-310.pyc +0 -0
  43. falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/pickle_compat.cpython-310.pyc +0 -0
  44. falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/pyarrow.cpython-310.pyc +0 -0
  45. falcon/lib/python3.10/site-packages/pandas/compat/_constants.py +30 -0
  46. falcon/lib/python3.10/site-packages/pandas/compat/_optional.py +168 -0
  47. falcon/lib/python3.10/site-packages/pandas/compat/compressors.py +77 -0
  48. falcon/lib/python3.10/site-packages/pandas/compat/numpy/__init__.py +53 -0
  49. falcon/lib/python3.10/site-packages/pandas/compat/numpy/__pycache__/__init__.cpython-310.pyc +0 -0
  50. falcon/lib/python3.10/site-packages/pandas/compat/numpy/__pycache__/function.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -566,3 +566,4 @@ falcon/lib/python3.10/site-packages/sklearn/cluster/_k_means_minibatch.cpython-3
566
  evalkit_tf437/lib/python3.10/site-packages/sympy/polys/matrices/__pycache__/domainmatrix.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
567
  falcon/lib/python3.10/site-packages/sklearn/preprocessing/_csr_polynomial_expansion.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
568
  falcon/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
 
 
566
  evalkit_tf437/lib/python3.10/site-packages/sympy/polys/matrices/__pycache__/domainmatrix.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
567
  falcon/lib/python3.10/site-packages/sklearn/preprocessing/_csr_polynomial_expansion.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
568
  falcon/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
569
+ falcon/lib/python3.10/site-packages/pandas/tests/tools/__pycache__/test_to_datetime.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
falcon/lib/python3.10/site-packages/httpcore-0.17.3.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
falcon/lib/python3.10/site-packages/httpcore-0.17.3.dist-info/LICENSE.md ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/).
2
+ All rights reserved.
3
+
4
+ Redistribution and use in source and binary forms, with or without
5
+ modification, are permitted provided that the following conditions are met:
6
+
7
+ * Redistributions of source code must retain the above copyright notice, this
8
+ list of conditions and the following disclaimer.
9
+
10
+ * Redistributions in binary form must reproduce the above copyright notice,
11
+ this list of conditions and the following disclaimer in the documentation
12
+ and/or other materials provided with the distribution.
13
+
14
+ * Neither the name of the copyright holder nor the names of its
15
+ contributors may be used to endorse or promote products derived from
16
+ this software without specific prior written permission.
17
+
18
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
22
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
24
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
25
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
falcon/lib/python3.10/site-packages/httpcore-0.17.3.dist-info/METADATA ADDED
@@ -0,0 +1,542 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: httpcore
3
+ Version: 0.17.3
4
+ Summary: A minimal low-level HTTP client.
5
+ Home-page: https://github.com/encode/httpcore
6
+ Author: Tom Christie
7
+ Author-email: tom@tomchristie.com
8
+ License: BSD
9
+ Project-URL: Documentation, https://www.encode.io/httpcore
10
+ Project-URL: Source, https://github.com/encode/httpcore
11
+ Classifier: Development Status :: 3 - Alpha
12
+ Classifier: Environment :: Web Environment
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: BSD License
15
+ Classifier: Operating System :: OS Independent
16
+ Classifier: Topic :: Internet :: WWW/HTTP
17
+ Classifier: Framework :: AsyncIO
18
+ Classifier: Framework :: Trio
19
+ Classifier: Programming Language :: Python :: 3
20
+ Classifier: Programming Language :: Python :: 3.7
21
+ Classifier: Programming Language :: Python :: 3.8
22
+ Classifier: Programming Language :: Python :: 3.9
23
+ Classifier: Programming Language :: Python :: 3.10
24
+ Classifier: Programming Language :: Python :: 3.11
25
+ Classifier: Programming Language :: Python :: 3 :: Only
26
+ Requires-Python: >=3.7
27
+ Description-Content-Type: text/markdown
28
+ License-File: LICENSE.md
29
+ Requires-Dist: h11 (<0.15,>=0.13)
30
+ Requires-Dist: sniffio (==1.*)
31
+ Requires-Dist: anyio (<5.0,>=3.0)
32
+ Requires-Dist: certifi
33
+ Provides-Extra: http2
34
+ Requires-Dist: h2 (<5,>=3) ; extra == 'http2'
35
+ Provides-Extra: socks
36
+ Requires-Dist: socksio (==1.*) ; extra == 'socks'
37
+
38
+ # HTTP Core
39
+
40
+ [![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions)
41
+ [![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/)
42
+
43
+ > *Do one thing, and do it well.*
44
+
45
+ The HTTP Core package provides a minimal low-level HTTP client, which does
46
+ one thing only. Sending HTTP requests.
47
+
48
+ It does not provide any high level model abstractions over the API,
49
+ does not handle redirects, multipart uploads, building authentication headers,
50
+ transparent HTTP caching, URL parsing, session cookie handling,
51
+ content or charset decoding, handling JSON, environment based configuration
52
+ defaults, or any of that Jazz.
53
+
54
+ Some things HTTP Core does do:
55
+
56
+ * Sending HTTP requests.
57
+ * Thread-safe / task-safe connection pooling.
58
+ * HTTP(S) proxy & SOCKS proxy support.
59
+ * Supports HTTP/1.1 and HTTP/2.
60
+ * Provides both sync and async interfaces.
61
+ * Async backend support for `asyncio` and `trio`.
62
+
63
+ ## Requirements
64
+
65
+ Python 3.7+
66
+
67
+ ## Installation
68
+
69
+ For HTTP/1.1 only support, install with:
70
+
71
+ ```shell
72
+ $ pip install httpcore
73
+ ```
74
+
75
+ For HTTP/1.1 and HTTP/2 support, install with:
76
+
77
+ ```shell
78
+ $ pip install httpcore[http2]
79
+ ```
80
+
81
+ For SOCKS proxy support, install with:
82
+
83
+ ```shell
84
+ $ pip install httpcore[socks]
85
+ ```
86
+
87
+ # Sending requests
88
+
89
+ Send an HTTP request:
90
+
91
+ ```python
92
+ import httpcore
93
+
94
+ response = httpcore.request("GET", "https://www.example.com/")
95
+
96
+ print(response)
97
+ # <Response [200]>
98
+ print(response.status)
99
+ # 200
100
+ print(response.headers)
101
+ # [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...]
102
+ print(response.content)
103
+ # b'<!doctype html>\n<html>\n<head>\n<title>Example Domain</title>\n\n<meta charset="utf-8"/>\n ...'
104
+ ```
105
+
106
+ The top-level `httpcore.request()` function is provided for convenience. In practice whenever you're working with `httpcore` you'll want to use the connection pooling functionality that it provides.
107
+
108
+ ```python
109
+ import httpcore
110
+
111
+ http = httpcore.ConnectionPool()
112
+ response = http.request("GET", "https://www.example.com/")
113
+ ```
114
+
115
+ Once you're ready to get going, [head over to the documentation](https://www.encode.io/httpcore/).
116
+
117
+ ## Motivation
118
+
119
+ You *probably* don't want to be using HTTP Core directly. It might make sense if
120
+ you're writing something like a proxy service in Python, and you just want
121
+ something at the lowest possible level, but more typically you'll want to use
122
+ a higher level client library, such as `httpx`.
123
+
124
+ The motivation for `httpcore` is:
125
+
126
+ * To provide a reusable low-level client library, that other packages can then build on top of.
127
+ * To provide a *really clear interface split* between the networking code and client logic,
128
+ so that each is easier to understand and reason about in isolation.
129
+
130
+
131
+ # Changelog
132
+
133
+ All notable changes to this project will be documented in this file.
134
+
135
+ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
136
+
137
+ ## 0.17.3 (5th July 2023)
138
+
139
+ - Support async cancellations, ensuring that the connection pool is left in a clean state when cancellations occur. (#726)
140
+ - The networking backend interface has [been added to the public API](https://www.encode.io/httpcore/network-backends). Some classes which were previously private implementation detail are now part of the top-level public API. (#699)
141
+ - Graceful handling of HTTP/2 GoAway frames, with requests being transparently retried on a new connection. (#730)
142
+ - Add exceptions when a synchronous `trace callback` is passed to an asynchronous request or an asynchronous `trace callback` is passed to a synchronous request. (#717)
143
+
144
+ ## 0.17.2 (May 23th, 2023)
145
+
146
+ - Add `socket_options` argument to `ConnectionPool` and `HTTProxy` classes. (#668)
147
+ - Improve logging with per-module logger names. (#690)
148
+ - Add `sni_hostname` request extension. (#696)
149
+ - Resolve race condition during import of `anyio` package. (#692)
150
+ - Enable TCP_NODELAY for all synchronous sockets. (#651)
151
+
152
+ ## 0.17.1 (May 17th, 2023)
153
+
154
+ - If 'retries' is set, then allow retries if an SSL handshake error occurs. (#669)
155
+ - Improve correctness of tracebacks on network exceptions, by raising properly chained exceptions. (#678)
156
+ - Prevent connection-hanging behaviour when HTTP/2 connections are closed by a server-sent 'GoAway' frame. (#679)
157
+ - Fix edge-case exception when removing requests from the connection pool. (#680)
158
+ - Fix pool timeout edge-case. (#688)
159
+
160
+ ## 0.17.0 (March 16th, 2023)
161
+
162
+ - Add DEBUG level logging. (#648)
163
+ - Respect HTTP/2 max concurrent streams when settings updates are sent by server. (#652)
164
+ - Increase the allowable HTTP header size to 100kB. (#647)
165
+ - Add `retries` option to SOCKS proxy classes. (#643)
166
+
167
+ ## 0.16.3 (December 20th, 2022)
168
+
169
+ - Allow `ws` and `wss` schemes. Allows us to properly support websocket upgrade connections. (#625)
170
+ - Forwarding HTTP proxies use a connection-per-remote-host. Required by some proxy implementations. (#637)
171
+ - Don't raise `RuntimeError` when closing a connection pool with active connections. Removes some error cases when cancellations are used. (#631)
172
+ - Lazy import `anyio`, so that it's no longer a hard dependancy, and isn't imported if unused. (#639)
173
+
174
+ ## 0.16.2 (November 25th, 2022)
175
+
176
+ - Revert 'Fix async cancellation behaviour', which introduced race conditions. (#627)
177
+ - Raise `RuntimeError` if attempting to us UNIX domain sockets on Windows. (#619)
178
+
179
+ ## 0.16.1 (November 17th, 2022)
180
+
181
+ - Fix HTTP/1.1 interim informational responses, such as "100 Continue". (#605)
182
+
183
+ ## 0.16.0 (October 11th, 2022)
184
+
185
+ - Support HTTP/1.1 informational responses. (#581)
186
+ - Fix async cancellation behaviour. (#580)
187
+ - Support `h11` 0.14. (#579)
188
+
189
+ ## 0.15.0 (May 17th, 2022)
190
+
191
+ - Drop Python 3.6 support (#535)
192
+ - Ensure HTTP proxy CONNECT requests include `timeout` configuration. (#506)
193
+ - Switch to explicit `typing.Optional` for type hints. (#513)
194
+ - For `trio` map OSError exceptions to `ConnectError`. (#543)
195
+
196
+ ## 0.14.7 (February 4th, 2022)
197
+
198
+ - Requests which raise a PoolTimeout need to be removed from the pool queue. (#502)
199
+ - Fix AttributeError that happened when Socks5Connection were terminated. (#501)
200
+
201
+ ## 0.14.6 (February 1st, 2022)
202
+
203
+ - Fix SOCKS support for `http://` URLs. (#492)
204
+ - Resolve race condition around exceptions during streaming a response. (#491)
205
+
206
+ ## 0.14.5 (January 18th, 2022)
207
+
208
+ - SOCKS proxy support. (#478)
209
+ - Add proxy_auth argument to HTTPProxy. (#481)
210
+ - Improve error message on 'RemoteProtocolError' exception when server disconnects without sending a response. (#479)
211
+
212
+ ## 0.14.4 (January 5th, 2022)
213
+
214
+ - Support HTTP/2 on HTTPS tunnelling proxies. (#468)
215
+ - Fix proxy headers missing on HTTP forwarding. (#456)
216
+ - Only instantiate SSL context if required. (#457)
217
+ - More robust HTTP/2 handling. (#253, #439, #440, #441)
218
+
219
+ ## 0.14.3 (November 17th, 2021)
220
+
221
+ - Fix race condition when removing closed connections from the pool. (#437)
222
+
223
+ ## 0.14.2 (November 16th, 2021)
224
+
225
+ - Failed connections no longer remain in the pool. (Pull #433)
226
+
227
+ ## 0.14.1 (November 12th, 2021)
228
+
229
+ - `max_connections` becomes optional. (Pull #429)
230
+ - `certifi` is now included in the install dependancies. (Pull #428)
231
+ - `h2` is now strictly optional. (Pull #428)
232
+
233
+ ## 0.14.0 (November 11th, 2021)
234
+
235
+ The 0.14 release is a complete reworking of `httpcore`, comprehensively addressing some underlying issues in the connection pooling, as well as substantially redesigning the API to be more user friendly.
236
+
237
+ Some of the lower-level API design also makes the components more easily testable in isolation, and the package now has 100% test coverage.
238
+
239
+ See [discussion #419](https://github.com/encode/httpcore/discussions/419) for a little more background.
240
+
241
+ There's some other neat bits in there too, such as the "trace" extension, which gives a hook into inspecting the internal events that occur during the request/response cycle. This extension is needed for the HTTPX cli, in order to...
242
+
243
+ * Log the point at which the connection is established, and the IP/port on which it is made.
244
+ * Determine if the outgoing request should log as HTTP/1.1 or HTTP/2, rather than having to assume it's HTTP/2 if the --http2 flag was passed. (Which may not actually be true.)
245
+ * Log SSL version info / certificate info.
246
+
247
+ Note that `curio` support is not currently available in 0.14.0. If you're using `httpcore` with `curio` please get in touch, so we can assess if we ought to prioritize it as a feature or not.
248
+
249
+ ## 0.13.7 (September 13th, 2021)
250
+
251
+ - Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403)
252
+
253
+ ## 0.13.6 (June 15th, 2021)
254
+
255
+ ### Fixed
256
+
257
+ - Close sockets when read or write timeouts occur. (Pull #365)
258
+
259
+ ## 0.13.5 (June 14th, 2021)
260
+
261
+ ### Fixed
262
+
263
+ - Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362)
264
+
265
+ ## 0.13.4 (June 9th, 2021)
266
+
267
+ ### Added
268
+
269
+ - Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354)
270
+
271
+ ### Fixed
272
+
273
+ - Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511).
274
+
275
+ ## 0.13.3 (May 6th, 2021)
276
+
277
+ ### Added
278
+
279
+ - Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333)
280
+
281
+ ### Fixed
282
+
283
+ - Handle cases where environment does not provide `select.poll` support. (Pull #331)
284
+
285
+ ## 0.13.2 (April 29th, 2021)
286
+
287
+ ### Added
288
+
289
+ - Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313)
290
+
291
+ ## 0.13.1 (April 28th, 2021)
292
+
293
+ ### Fixed
294
+
295
+ - More resiliant testing for closed connections. (Pull #311)
296
+ - Don't raise exceptions on ungraceful connection closes. (Pull #310)
297
+
298
+ ## 0.13.0 (April 21st, 2021)
299
+
300
+ The 0.13 release updates the core API in order to match the HTTPX Transport API,
301
+ introduced in HTTPX 0.18 onwards.
302
+
303
+ An example of making requests with the new interface is:
304
+
305
+ ```python
306
+ with httpcore.SyncConnectionPool() as http:
307
+ status_code, headers, stream, extensions = http.handle_request(
308
+ method=b'GET',
309
+ url=(b'https', b'example.org', 443, b'/'),
310
+ headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')]
311
+ stream=httpcore.ByteStream(b''),
312
+ extensions={}
313
+ )
314
+ body = stream.read()
315
+ print(status_code, body)
316
+ ```
317
+
318
+ ### Changed
319
+
320
+ - The `.request()` method is now `handle_request()`. (Pull #296)
321
+ - The `.arequest()` method is now `.handle_async_request()`. (Pull #296)
322
+ - The `headers` argument is no longer optional. (Pull #296)
323
+ - The `stream` argument is no longer optional. (Pull #296)
324
+ - The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296)
325
+ - The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296)
326
+ - The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296)
327
+ - The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296)
328
+
329
+ ### Added
330
+
331
+ - Streams now support a `.read()` interface. (Pull #296)
332
+
333
+ ### Fixed
334
+
335
+ - Task cancellation no longer leaks connections from the connection pool. (Pull #305)
336
+
337
+ ## 0.12.3 (December 7th, 2020)
338
+
339
+ ### Fixed
340
+
341
+ - Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167)
342
+ - Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236)
343
+ - Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243)
344
+ - Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244)
345
+ - Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249)
346
+
347
+ ## 0.12.2 (November 20th, 2020)
348
+
349
+ ### Fixed
350
+
351
+ - Properly wrap connect errors on the asyncio backend. (Pull #235)
352
+ - Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237)
353
+
354
+ ## 0.12.1 (November 7th, 2020)
355
+
356
+ ### Added
357
+
358
+ - Add connect retries. (Pull #221)
359
+
360
+ ### Fixed
361
+
362
+ - Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185)
363
+ - Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223)
364
+ - Properly wrap OS errors when using `trio`. (Pull #225)
365
+
366
+ ## 0.12.0 (October 6th, 2020)
367
+
368
+ ### Changed
369
+
370
+ - HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104)
371
+
372
+ ### Added
373
+
374
+ - Add Python 3.9 to officially supported versions.
375
+
376
+ ### Fixed
377
+
378
+ - Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201)
379
+
380
+ ## 0.11.1 (September 28nd, 2020)
381
+
382
+ ### Fixed
383
+
384
+ - Add await to async semaphore release() coroutine (#197)
385
+ - Drop incorrect curio classifier (#192)
386
+
387
+ ## 0.11.0 (September 22nd, 2020)
388
+
389
+ The Transport API with 0.11.0 has a couple of significant changes.
390
+
391
+ Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features
392
+ such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections.
393
+
394
+ The interface changes from:
395
+
396
+ ```python
397
+ def request(method, url, headers, stream, timeout):
398
+ return (http_version, status_code, reason, headers, stream)
399
+ ```
400
+
401
+ To instead including an optional dictionary of extensions on the request and response:
402
+
403
+ ```python
404
+ def request(method, url, headers, stream, ext):
405
+ return (status_code, headers, stream, ext)
406
+ ```
407
+
408
+ Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes.
409
+
410
+ In particular:
411
+
412
+ * Trailing headers support.
413
+ * HTTP/2 Server Push
414
+ * sendfile.
415
+ * Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming.
416
+ * Exposing debug information out of the API, including template name, template context.
417
+
418
+ Currently extensions are limited to:
419
+
420
+ * request: `timeout` - Optional. Timeout dictionary.
421
+ * response: `http_version` - Optional. Include the HTTP version used on the response.
422
+ * response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*.
423
+
424
+ See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this.
425
+
426
+ Secondly, the async version of `request` is now namespaced as `arequest`.
427
+
428
+ This allows concrete transports to support both sync and async implementations on the same class.
429
+
430
+ ### Added
431
+
432
+ - Add curio support. (Pull #168)
433
+ - Add anyio support, with `backend="anyio"`. (Pull #169)
434
+
435
+ ### Changed
436
+
437
+ - Update the Transport API to use 'ext' for optional extensions. (Pull #190)
438
+ - Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189)
439
+
440
+ ## 0.10.2 (August 20th, 2020)
441
+
442
+ ### Added
443
+
444
+ - Added Unix Domain Socket support. (Pull #139)
445
+
446
+ ### Fixed
447
+
448
+ - Always include the port on proxy CONNECT requests. (Pull #154)
449
+ - Fix `max_keepalive_connections` configuration. (Pull #153)
450
+ - Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164)
451
+
452
+ ## 0.10.1 (August 7th, 2020)
453
+
454
+ - Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes.
455
+
456
+ ## 0.10.0 (August 7th, 2020)
457
+
458
+ The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional.
459
+
460
+ Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support.
461
+
462
+ ### Added
463
+
464
+ - HTTP/2 support becomes optional. (Pull #121, #130)
465
+ - Add `local_address=...` support. (Pull #100, #134)
466
+ - Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133)
467
+ - Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129)
468
+ - Add `UnsupportedProtocol` exception. (Pull #128)
469
+ - Add `.get_connection_info()` method. (Pull #102, #137)
470
+ - Add better TRACE logs. (Pull #101)
471
+
472
+ ### Changed
473
+
474
+ - `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140)
475
+
476
+ ### Fixed
477
+
478
+ - Improve handling of server disconnects. (Pull #112)
479
+
480
+ ## 0.9.1 (May 27th, 2020)
481
+
482
+ ### Fixed
483
+
484
+ - Proper host resolution for sync case, including IPv6 support. (Pull #97)
485
+ - Close outstanding connections when connection pool is closed. (Pull #98)
486
+
487
+ ## 0.9.0 (May 21th, 2020)
488
+
489
+ ### Changed
490
+
491
+ - URL port becomes an `Optional[int]` instead of `int`. (Pull #92)
492
+
493
+ ### Fixed
494
+
495
+ - Honor HTTP/2 max concurrent streams settings. (Pull #89, #90)
496
+ - Remove incorrect debug log. (Pull #83)
497
+
498
+ ## 0.8.4 (May 11th, 2020)
499
+
500
+ ### Added
501
+
502
+ - Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables
503
+ and TRACE level logging. (Pull #79)
504
+
505
+ ### Fixed
506
+
507
+ - Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81)
508
+
509
+ ## 0.8.3 (May 6rd, 2020)
510
+
511
+ ### Fixed
512
+
513
+ - Include `Host` and `Accept` headers on proxy "CONNECT" requests.
514
+ - De-duplicate any headers also contained in proxy_headers.
515
+ - HTTP/2 flag not being passed down to proxy connections.
516
+
517
+ ## 0.8.2 (May 3rd, 2020)
518
+
519
+ ### Fixed
520
+
521
+ - Fix connections using proxy forwarding requests not being added to the
522
+ connection pool properly. (Pull #70)
523
+
524
+ ## 0.8.1 (April 30th, 2020)
525
+
526
+ ### Changed
527
+
528
+ - Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts.
529
+
530
+ ## 0.8.0 (April 30th, 2020)
531
+
532
+ ### Fixed
533
+
534
+ - Fixed tunnel proxy support.
535
+
536
+ ### Added
537
+
538
+ - New `TimeoutException` base class.
539
+
540
+ ## 0.7.0 (March 5th, 2020)
541
+
542
+ - First integration with HTTPX.
falcon/lib/python3.10/site-packages/httpcore-0.17.3.dist-info/top_level.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ httpcore
2
+ httpcore/_async
3
+ httpcore/_backends
4
+ httpcore/_sync
falcon/lib/python3.10/site-packages/pandas/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (6.95 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/__pycache__/_typing.cpython-310.pyc ADDED
Binary file (11.5 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/__pycache__/_version.cpython-310.pyc ADDED
Binary file (14.5 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/__pycache__/_version_meson.cpython-310.pyc ADDED
Binary file (250 Bytes). View file
 
falcon/lib/python3.10/site-packages/pandas/__pycache__/conftest.cpython-310.pyc ADDED
Binary file (46.1 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/__pycache__/testing.cpython-310.pyc ADDED
Binary file (406 Bytes). View file
 
falcon/lib/python3.10/site-packages/pandas/_config/__pycache__/config.cpython-310.pyc ADDED
Binary file (26.4 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/_testing/__init__.py ADDED
@@ -0,0 +1,639 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from decimal import Decimal
4
+ import operator
5
+ import os
6
+ from sys import byteorder
7
+ from typing import (
8
+ TYPE_CHECKING,
9
+ Callable,
10
+ ContextManager,
11
+ cast,
12
+ )
13
+ import warnings
14
+
15
+ import numpy as np
16
+
17
+ from pandas._config.localization import (
18
+ can_set_locale,
19
+ get_locales,
20
+ set_locale,
21
+ )
22
+
23
+ from pandas.compat import pa_version_under10p1
24
+
25
+ from pandas.core.dtypes.common import is_string_dtype
26
+
27
+ import pandas as pd
28
+ from pandas import (
29
+ ArrowDtype,
30
+ DataFrame,
31
+ Index,
32
+ MultiIndex,
33
+ RangeIndex,
34
+ Series,
35
+ )
36
+ from pandas._testing._io import (
37
+ round_trip_localpath,
38
+ round_trip_pathlib,
39
+ round_trip_pickle,
40
+ write_to_compressed,
41
+ )
42
+ from pandas._testing._warnings import (
43
+ assert_produces_warning,
44
+ maybe_produces_warning,
45
+ )
46
+ from pandas._testing.asserters import (
47
+ assert_almost_equal,
48
+ assert_attr_equal,
49
+ assert_categorical_equal,
50
+ assert_class_equal,
51
+ assert_contains_all,
52
+ assert_copy,
53
+ assert_datetime_array_equal,
54
+ assert_dict_equal,
55
+ assert_equal,
56
+ assert_extension_array_equal,
57
+ assert_frame_equal,
58
+ assert_index_equal,
59
+ assert_indexing_slices_equivalent,
60
+ assert_interval_array_equal,
61
+ assert_is_sorted,
62
+ assert_is_valid_plot_return_object,
63
+ assert_metadata_equivalent,
64
+ assert_numpy_array_equal,
65
+ assert_period_array_equal,
66
+ assert_series_equal,
67
+ assert_sp_array_equal,
68
+ assert_timedelta_array_equal,
69
+ raise_assert_detail,
70
+ )
71
+ from pandas._testing.compat import (
72
+ get_dtype,
73
+ get_obj,
74
+ )
75
+ from pandas._testing.contexts import (
76
+ assert_cow_warning,
77
+ decompress_file,
78
+ ensure_clean,
79
+ raises_chained_assignment_error,
80
+ set_timezone,
81
+ use_numexpr,
82
+ with_csv_dialect,
83
+ )
84
+ from pandas.core.arrays import (
85
+ BaseMaskedArray,
86
+ ExtensionArray,
87
+ NumpyExtensionArray,
88
+ )
89
+ from pandas.core.arrays._mixins import NDArrayBackedExtensionArray
90
+ from pandas.core.construction import extract_array
91
+
92
+ if TYPE_CHECKING:
93
+ from pandas._typing import (
94
+ Dtype,
95
+ NpDtype,
96
+ )
97
+
98
+ from pandas.core.arrays import ArrowExtensionArray
99
+
100
+ UNSIGNED_INT_NUMPY_DTYPES: list[NpDtype] = ["uint8", "uint16", "uint32", "uint64"]
101
+ UNSIGNED_INT_EA_DTYPES: list[Dtype] = ["UInt8", "UInt16", "UInt32", "UInt64"]
102
+ SIGNED_INT_NUMPY_DTYPES: list[NpDtype] = [int, "int8", "int16", "int32", "int64"]
103
+ SIGNED_INT_EA_DTYPES: list[Dtype] = ["Int8", "Int16", "Int32", "Int64"]
104
+ ALL_INT_NUMPY_DTYPES = UNSIGNED_INT_NUMPY_DTYPES + SIGNED_INT_NUMPY_DTYPES
105
+ ALL_INT_EA_DTYPES = UNSIGNED_INT_EA_DTYPES + SIGNED_INT_EA_DTYPES
106
+ ALL_INT_DTYPES: list[Dtype] = [*ALL_INT_NUMPY_DTYPES, *ALL_INT_EA_DTYPES]
107
+
108
+ FLOAT_NUMPY_DTYPES: list[NpDtype] = [float, "float32", "float64"]
109
+ FLOAT_EA_DTYPES: list[Dtype] = ["Float32", "Float64"]
110
+ ALL_FLOAT_DTYPES: list[Dtype] = [*FLOAT_NUMPY_DTYPES, *FLOAT_EA_DTYPES]
111
+
112
+ COMPLEX_DTYPES: list[Dtype] = [complex, "complex64", "complex128"]
113
+ STRING_DTYPES: list[Dtype] = [str, "str", "U"]
114
+ COMPLEX_FLOAT_DTYPES: list[Dtype] = [*COMPLEX_DTYPES, *FLOAT_NUMPY_DTYPES]
115
+
116
+ DATETIME64_DTYPES: list[Dtype] = ["datetime64[ns]", "M8[ns]"]
117
+ TIMEDELTA64_DTYPES: list[Dtype] = ["timedelta64[ns]", "m8[ns]"]
118
+
119
+ BOOL_DTYPES: list[Dtype] = [bool, "bool"]
120
+ BYTES_DTYPES: list[Dtype] = [bytes, "bytes"]
121
+ OBJECT_DTYPES: list[Dtype] = [object, "object"]
122
+
123
+ ALL_REAL_NUMPY_DTYPES = FLOAT_NUMPY_DTYPES + ALL_INT_NUMPY_DTYPES
124
+ ALL_REAL_EXTENSION_DTYPES = FLOAT_EA_DTYPES + ALL_INT_EA_DTYPES
125
+ ALL_REAL_DTYPES: list[Dtype] = [*ALL_REAL_NUMPY_DTYPES, *ALL_REAL_EXTENSION_DTYPES]
126
+ ALL_NUMERIC_DTYPES: list[Dtype] = [*ALL_REAL_DTYPES, *COMPLEX_DTYPES]
127
+
128
+ ALL_NUMPY_DTYPES = (
129
+ ALL_REAL_NUMPY_DTYPES
130
+ + COMPLEX_DTYPES
131
+ + STRING_DTYPES
132
+ + DATETIME64_DTYPES
133
+ + TIMEDELTA64_DTYPES
134
+ + BOOL_DTYPES
135
+ + OBJECT_DTYPES
136
+ + BYTES_DTYPES
137
+ )
138
+
139
+ NARROW_NP_DTYPES = [
140
+ np.float16,
141
+ np.float32,
142
+ np.int8,
143
+ np.int16,
144
+ np.int32,
145
+ np.uint8,
146
+ np.uint16,
147
+ np.uint32,
148
+ ]
149
+
150
+ PYTHON_DATA_TYPES = [
151
+ str,
152
+ int,
153
+ float,
154
+ complex,
155
+ list,
156
+ tuple,
157
+ range,
158
+ dict,
159
+ set,
160
+ frozenset,
161
+ bool,
162
+ bytes,
163
+ bytearray,
164
+ memoryview,
165
+ ]
166
+
167
+ ENDIAN = {"little": "<", "big": ">"}[byteorder]
168
+
169
+ NULL_OBJECTS = [None, np.nan, pd.NaT, float("nan"), pd.NA, Decimal("NaN")]
170
+ NP_NAT_OBJECTS = [
171
+ cls("NaT", unit)
172
+ for cls in [np.datetime64, np.timedelta64]
173
+ for unit in [
174
+ "Y",
175
+ "M",
176
+ "W",
177
+ "D",
178
+ "h",
179
+ "m",
180
+ "s",
181
+ "ms",
182
+ "us",
183
+ "ns",
184
+ "ps",
185
+ "fs",
186
+ "as",
187
+ ]
188
+ ]
189
+
190
+ if not pa_version_under10p1:
191
+ import pyarrow as pa
192
+
193
+ UNSIGNED_INT_PYARROW_DTYPES = [pa.uint8(), pa.uint16(), pa.uint32(), pa.uint64()]
194
+ SIGNED_INT_PYARROW_DTYPES = [pa.int8(), pa.int16(), pa.int32(), pa.int64()]
195
+ ALL_INT_PYARROW_DTYPES = UNSIGNED_INT_PYARROW_DTYPES + SIGNED_INT_PYARROW_DTYPES
196
+ ALL_INT_PYARROW_DTYPES_STR_REPR = [
197
+ str(ArrowDtype(typ)) for typ in ALL_INT_PYARROW_DTYPES
198
+ ]
199
+
200
+ # pa.float16 doesn't seem supported
201
+ # https://github.com/apache/arrow/blob/master/python/pyarrow/src/arrow/python/helpers.cc#L86
202
+ FLOAT_PYARROW_DTYPES = [pa.float32(), pa.float64()]
203
+ FLOAT_PYARROW_DTYPES_STR_REPR = [
204
+ str(ArrowDtype(typ)) for typ in FLOAT_PYARROW_DTYPES
205
+ ]
206
+ DECIMAL_PYARROW_DTYPES = [pa.decimal128(7, 3)]
207
+ STRING_PYARROW_DTYPES = [pa.string()]
208
+ BINARY_PYARROW_DTYPES = [pa.binary()]
209
+
210
+ TIME_PYARROW_DTYPES = [
211
+ pa.time32("s"),
212
+ pa.time32("ms"),
213
+ pa.time64("us"),
214
+ pa.time64("ns"),
215
+ ]
216
+ DATE_PYARROW_DTYPES = [pa.date32(), pa.date64()]
217
+ DATETIME_PYARROW_DTYPES = [
218
+ pa.timestamp(unit=unit, tz=tz)
219
+ for unit in ["s", "ms", "us", "ns"]
220
+ for tz in [None, "UTC", "US/Pacific", "US/Eastern"]
221
+ ]
222
+ TIMEDELTA_PYARROW_DTYPES = [pa.duration(unit) for unit in ["s", "ms", "us", "ns"]]
223
+
224
+ BOOL_PYARROW_DTYPES = [pa.bool_()]
225
+
226
+ # TODO: Add container like pyarrow types:
227
+ # https://arrow.apache.org/docs/python/api/datatypes.html#factory-functions
228
+ ALL_PYARROW_DTYPES = (
229
+ ALL_INT_PYARROW_DTYPES
230
+ + FLOAT_PYARROW_DTYPES
231
+ + DECIMAL_PYARROW_DTYPES
232
+ + STRING_PYARROW_DTYPES
233
+ + BINARY_PYARROW_DTYPES
234
+ + TIME_PYARROW_DTYPES
235
+ + DATE_PYARROW_DTYPES
236
+ + DATETIME_PYARROW_DTYPES
237
+ + TIMEDELTA_PYARROW_DTYPES
238
+ + BOOL_PYARROW_DTYPES
239
+ )
240
+ ALL_REAL_PYARROW_DTYPES_STR_REPR = (
241
+ ALL_INT_PYARROW_DTYPES_STR_REPR + FLOAT_PYARROW_DTYPES_STR_REPR
242
+ )
243
+ else:
244
+ FLOAT_PYARROW_DTYPES_STR_REPR = []
245
+ ALL_INT_PYARROW_DTYPES_STR_REPR = []
246
+ ALL_PYARROW_DTYPES = []
247
+ ALL_REAL_PYARROW_DTYPES_STR_REPR = []
248
+
249
+ ALL_REAL_NULLABLE_DTYPES = (
250
+ FLOAT_NUMPY_DTYPES + ALL_REAL_EXTENSION_DTYPES + ALL_REAL_PYARROW_DTYPES_STR_REPR
251
+ )
252
+
253
+ arithmetic_dunder_methods = [
254
+ "__add__",
255
+ "__radd__",
256
+ "__sub__",
257
+ "__rsub__",
258
+ "__mul__",
259
+ "__rmul__",
260
+ "__floordiv__",
261
+ "__rfloordiv__",
262
+ "__truediv__",
263
+ "__rtruediv__",
264
+ "__pow__",
265
+ "__rpow__",
266
+ "__mod__",
267
+ "__rmod__",
268
+ ]
269
+
270
+ comparison_dunder_methods = ["__eq__", "__ne__", "__le__", "__lt__", "__ge__", "__gt__"]
271
+
272
+
273
+ # -----------------------------------------------------------------------------
274
+ # Comparators
275
+
276
+
277
+ def box_expected(expected, box_cls, transpose: bool = True):
278
+ """
279
+ Helper function to wrap the expected output of a test in a given box_class.
280
+
281
+ Parameters
282
+ ----------
283
+ expected : np.ndarray, Index, Series
284
+ box_cls : {Index, Series, DataFrame}
285
+
286
+ Returns
287
+ -------
288
+ subclass of box_cls
289
+ """
290
+ if box_cls is pd.array:
291
+ if isinstance(expected, RangeIndex):
292
+ # pd.array would return an IntegerArray
293
+ expected = NumpyExtensionArray(np.asarray(expected._values))
294
+ else:
295
+ expected = pd.array(expected, copy=False)
296
+ elif box_cls is Index:
297
+ with warnings.catch_warnings():
298
+ warnings.filterwarnings("ignore", "Dtype inference", category=FutureWarning)
299
+ expected = Index(expected)
300
+ elif box_cls is Series:
301
+ with warnings.catch_warnings():
302
+ warnings.filterwarnings("ignore", "Dtype inference", category=FutureWarning)
303
+ expected = Series(expected)
304
+ elif box_cls is DataFrame:
305
+ with warnings.catch_warnings():
306
+ warnings.filterwarnings("ignore", "Dtype inference", category=FutureWarning)
307
+ expected = Series(expected).to_frame()
308
+ if transpose:
309
+ # for vector operations, we need a DataFrame to be a single-row,
310
+ # not a single-column, in order to operate against non-DataFrame
311
+ # vectors of the same length. But convert to two rows to avoid
312
+ # single-row special cases in datetime arithmetic
313
+ expected = expected.T
314
+ expected = pd.concat([expected] * 2, ignore_index=True)
315
+ elif box_cls is np.ndarray or box_cls is np.array:
316
+ expected = np.array(expected)
317
+ elif box_cls is to_array:
318
+ expected = to_array(expected)
319
+ else:
320
+ raise NotImplementedError(box_cls)
321
+ return expected
322
+
323
+
324
+ def to_array(obj):
325
+ """
326
+ Similar to pd.array, but does not cast numpy dtypes to nullable dtypes.
327
+ """
328
+ # temporary implementation until we get pd.array in place
329
+ dtype = getattr(obj, "dtype", None)
330
+
331
+ if dtype is None:
332
+ return np.asarray(obj)
333
+
334
+ return extract_array(obj, extract_numpy=True)
335
+
336
+
337
+ class SubclassedSeries(Series):
338
+ _metadata = ["testattr", "name"]
339
+
340
+ @property
341
+ def _constructor(self):
342
+ # For testing, those properties return a generic callable, and not
343
+ # the actual class. In this case that is equivalent, but it is to
344
+ # ensure we don't rely on the property returning a class
345
+ # See https://github.com/pandas-dev/pandas/pull/46018 and
346
+ # https://github.com/pandas-dev/pandas/issues/32638 and linked issues
347
+ return lambda *args, **kwargs: SubclassedSeries(*args, **kwargs)
348
+
349
+ @property
350
+ def _constructor_expanddim(self):
351
+ return lambda *args, **kwargs: SubclassedDataFrame(*args, **kwargs)
352
+
353
+
354
+ class SubclassedDataFrame(DataFrame):
355
+ _metadata = ["testattr"]
356
+
357
+ @property
358
+ def _constructor(self):
359
+ return lambda *args, **kwargs: SubclassedDataFrame(*args, **kwargs)
360
+
361
+ @property
362
+ def _constructor_sliced(self):
363
+ return lambda *args, **kwargs: SubclassedSeries(*args, **kwargs)
364
+
365
+
366
+ def convert_rows_list_to_csv_str(rows_list: list[str]) -> str:
367
+ """
368
+ Convert list of CSV rows to single CSV-formatted string for current OS.
369
+
370
+ This method is used for creating expected value of to_csv() method.
371
+
372
+ Parameters
373
+ ----------
374
+ rows_list : List[str]
375
+ Each element represents the row of csv.
376
+
377
+ Returns
378
+ -------
379
+ str
380
+ Expected output of to_csv() in current OS.
381
+ """
382
+ sep = os.linesep
383
+ return sep.join(rows_list) + sep
384
+
385
+
386
+ def external_error_raised(expected_exception: type[Exception]) -> ContextManager:
387
+ """
388
+ Helper function to mark pytest.raises that have an external error message.
389
+
390
+ Parameters
391
+ ----------
392
+ expected_exception : Exception
393
+ Expected error to raise.
394
+
395
+ Returns
396
+ -------
397
+ Callable
398
+ Regular `pytest.raises` function with `match` equal to `None`.
399
+ """
400
+ import pytest
401
+
402
+ return pytest.raises(expected_exception, match=None)
403
+
404
+
405
+ cython_table = pd.core.common._cython_table.items()
406
+
407
+
408
+ def get_cython_table_params(ndframe, func_names_and_expected):
409
+ """
410
+ Combine frame, functions from com._cython_table
411
+ keys and expected result.
412
+
413
+ Parameters
414
+ ----------
415
+ ndframe : DataFrame or Series
416
+ func_names_and_expected : Sequence of two items
417
+ The first item is a name of a NDFrame method ('sum', 'prod') etc.
418
+ The second item is the expected return value.
419
+
420
+ Returns
421
+ -------
422
+ list
423
+ List of three items (DataFrame, function, expected result)
424
+ """
425
+ results = []
426
+ for func_name, expected in func_names_and_expected:
427
+ results.append((ndframe, func_name, expected))
428
+ results += [
429
+ (ndframe, func, expected)
430
+ for func, name in cython_table
431
+ if name == func_name
432
+ ]
433
+ return results
434
+
435
+
436
+ def get_op_from_name(op_name: str) -> Callable:
437
+ """
438
+ The operator function for a given op name.
439
+
440
+ Parameters
441
+ ----------
442
+ op_name : str
443
+ The op name, in form of "add" or "__add__".
444
+
445
+ Returns
446
+ -------
447
+ function
448
+ A function performing the operation.
449
+ """
450
+ short_opname = op_name.strip("_")
451
+ try:
452
+ op = getattr(operator, short_opname)
453
+ except AttributeError:
454
+ # Assume it is the reverse operator
455
+ rop = getattr(operator, short_opname[1:])
456
+ op = lambda x, y: rop(y, x)
457
+
458
+ return op
459
+
460
+
461
+ # -----------------------------------------------------------------------------
462
+ # Indexing test helpers
463
+
464
+
465
+ def getitem(x):
466
+ return x
467
+
468
+
469
+ def setitem(x):
470
+ return x
471
+
472
+
473
+ def loc(x):
474
+ return x.loc
475
+
476
+
477
+ def iloc(x):
478
+ return x.iloc
479
+
480
+
481
+ def at(x):
482
+ return x.at
483
+
484
+
485
+ def iat(x):
486
+ return x.iat
487
+
488
+
489
+ # -----------------------------------------------------------------------------
490
+
491
+ _UNITS = ["s", "ms", "us", "ns"]
492
+
493
+
494
+ def get_finest_unit(left: str, right: str):
495
+ """
496
+ Find the higher of two datetime64 units.
497
+ """
498
+ if _UNITS.index(left) >= _UNITS.index(right):
499
+ return left
500
+ return right
501
+
502
+
503
+ def shares_memory(left, right) -> bool:
504
+ """
505
+ Pandas-compat for np.shares_memory.
506
+ """
507
+ if isinstance(left, np.ndarray) and isinstance(right, np.ndarray):
508
+ return np.shares_memory(left, right)
509
+ elif isinstance(left, np.ndarray):
510
+ # Call with reversed args to get to unpacking logic below.
511
+ return shares_memory(right, left)
512
+
513
+ if isinstance(left, RangeIndex):
514
+ return False
515
+ if isinstance(left, MultiIndex):
516
+ return shares_memory(left._codes, right)
517
+ if isinstance(left, (Index, Series)):
518
+ return shares_memory(left._values, right)
519
+
520
+ if isinstance(left, NDArrayBackedExtensionArray):
521
+ return shares_memory(left._ndarray, right)
522
+ if isinstance(left, pd.core.arrays.SparseArray):
523
+ return shares_memory(left.sp_values, right)
524
+ if isinstance(left, pd.core.arrays.IntervalArray):
525
+ return shares_memory(left._left, right) or shares_memory(left._right, right)
526
+
527
+ if (
528
+ isinstance(left, ExtensionArray)
529
+ and is_string_dtype(left.dtype)
530
+ and left.dtype.storage in ("pyarrow", "pyarrow_numpy") # type: ignore[attr-defined]
531
+ ):
532
+ # https://github.com/pandas-dev/pandas/pull/43930#discussion_r736862669
533
+ left = cast("ArrowExtensionArray", left)
534
+ if (
535
+ isinstance(right, ExtensionArray)
536
+ and is_string_dtype(right.dtype)
537
+ and right.dtype.storage in ("pyarrow", "pyarrow_numpy") # type: ignore[attr-defined]
538
+ ):
539
+ right = cast("ArrowExtensionArray", right)
540
+ left_pa_data = left._pa_array
541
+ right_pa_data = right._pa_array
542
+ left_buf1 = left_pa_data.chunk(0).buffers()[1]
543
+ right_buf1 = right_pa_data.chunk(0).buffers()[1]
544
+ return left_buf1 == right_buf1
545
+
546
+ if isinstance(left, BaseMaskedArray) and isinstance(right, BaseMaskedArray):
547
+ # By convention, we'll say these share memory if they share *either*
548
+ # the _data or the _mask
549
+ return np.shares_memory(left._data, right._data) or np.shares_memory(
550
+ left._mask, right._mask
551
+ )
552
+
553
+ if isinstance(left, DataFrame) and len(left._mgr.arrays) == 1:
554
+ arr = left._mgr.arrays[0]
555
+ return shares_memory(arr, right)
556
+
557
+ raise NotImplementedError(type(left), type(right))
558
+
559
+
560
+ __all__ = [
561
+ "ALL_INT_EA_DTYPES",
562
+ "ALL_INT_NUMPY_DTYPES",
563
+ "ALL_NUMPY_DTYPES",
564
+ "ALL_REAL_NUMPY_DTYPES",
565
+ "assert_almost_equal",
566
+ "assert_attr_equal",
567
+ "assert_categorical_equal",
568
+ "assert_class_equal",
569
+ "assert_contains_all",
570
+ "assert_copy",
571
+ "assert_datetime_array_equal",
572
+ "assert_dict_equal",
573
+ "assert_equal",
574
+ "assert_extension_array_equal",
575
+ "assert_frame_equal",
576
+ "assert_index_equal",
577
+ "assert_indexing_slices_equivalent",
578
+ "assert_interval_array_equal",
579
+ "assert_is_sorted",
580
+ "assert_is_valid_plot_return_object",
581
+ "assert_metadata_equivalent",
582
+ "assert_numpy_array_equal",
583
+ "assert_period_array_equal",
584
+ "assert_produces_warning",
585
+ "assert_series_equal",
586
+ "assert_sp_array_equal",
587
+ "assert_timedelta_array_equal",
588
+ "assert_cow_warning",
589
+ "at",
590
+ "BOOL_DTYPES",
591
+ "box_expected",
592
+ "BYTES_DTYPES",
593
+ "can_set_locale",
594
+ "COMPLEX_DTYPES",
595
+ "convert_rows_list_to_csv_str",
596
+ "DATETIME64_DTYPES",
597
+ "decompress_file",
598
+ "ENDIAN",
599
+ "ensure_clean",
600
+ "external_error_raised",
601
+ "FLOAT_EA_DTYPES",
602
+ "FLOAT_NUMPY_DTYPES",
603
+ "get_cython_table_params",
604
+ "get_dtype",
605
+ "getitem",
606
+ "get_locales",
607
+ "get_finest_unit",
608
+ "get_obj",
609
+ "get_op_from_name",
610
+ "iat",
611
+ "iloc",
612
+ "loc",
613
+ "maybe_produces_warning",
614
+ "NARROW_NP_DTYPES",
615
+ "NP_NAT_OBJECTS",
616
+ "NULL_OBJECTS",
617
+ "OBJECT_DTYPES",
618
+ "raise_assert_detail",
619
+ "raises_chained_assignment_error",
620
+ "round_trip_localpath",
621
+ "round_trip_pathlib",
622
+ "round_trip_pickle",
623
+ "setitem",
624
+ "set_locale",
625
+ "set_timezone",
626
+ "shares_memory",
627
+ "SIGNED_INT_EA_DTYPES",
628
+ "SIGNED_INT_NUMPY_DTYPES",
629
+ "STRING_DTYPES",
630
+ "SubclassedDataFrame",
631
+ "SubclassedSeries",
632
+ "TIMEDELTA64_DTYPES",
633
+ "to_array",
634
+ "UNSIGNED_INT_EA_DTYPES",
635
+ "UNSIGNED_INT_NUMPY_DTYPES",
636
+ "use_numexpr",
637
+ "with_csv_dialect",
638
+ "write_to_compressed",
639
+ ]
falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (14.3 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/_hypothesis.cpython-310.pyc ADDED
Binary file (1.75 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/_io.cpython-310.pyc ADDED
Binary file (4.38 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/_warnings.cpython-310.pyc ADDED
Binary file (6.49 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/asserters.cpython-310.pyc ADDED
Binary file (32.9 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/compat.cpython-310.pyc ADDED
Binary file (937 Bytes). View file
 
falcon/lib/python3.10/site-packages/pandas/_testing/__pycache__/contexts.cpython-310.pyc ADDED
Binary file (6.23 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/_testing/_hypothesis.py ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Hypothesis data generator helpers.
3
+ """
4
+ from datetime import datetime
5
+
6
+ from hypothesis import strategies as st
7
+ from hypothesis.extra.dateutil import timezones as dateutil_timezones
8
+ from hypothesis.extra.pytz import timezones as pytz_timezones
9
+
10
+ from pandas.compat import is_platform_windows
11
+
12
+ import pandas as pd
13
+
14
+ from pandas.tseries.offsets import (
15
+ BMonthBegin,
16
+ BMonthEnd,
17
+ BQuarterBegin,
18
+ BQuarterEnd,
19
+ BYearBegin,
20
+ BYearEnd,
21
+ MonthBegin,
22
+ MonthEnd,
23
+ QuarterBegin,
24
+ QuarterEnd,
25
+ YearBegin,
26
+ YearEnd,
27
+ )
28
+
29
+ OPTIONAL_INTS = st.lists(st.one_of(st.integers(), st.none()), max_size=10, min_size=3)
30
+
31
+ OPTIONAL_FLOATS = st.lists(st.one_of(st.floats(), st.none()), max_size=10, min_size=3)
32
+
33
+ OPTIONAL_TEXT = st.lists(st.one_of(st.none(), st.text()), max_size=10, min_size=3)
34
+
35
+ OPTIONAL_DICTS = st.lists(
36
+ st.one_of(st.none(), st.dictionaries(st.text(), st.integers())),
37
+ max_size=10,
38
+ min_size=3,
39
+ )
40
+
41
+ OPTIONAL_LISTS = st.lists(
42
+ st.one_of(st.none(), st.lists(st.text(), max_size=10, min_size=3)),
43
+ max_size=10,
44
+ min_size=3,
45
+ )
46
+
47
+ OPTIONAL_ONE_OF_ALL = st.one_of(
48
+ OPTIONAL_DICTS, OPTIONAL_FLOATS, OPTIONAL_INTS, OPTIONAL_LISTS, OPTIONAL_TEXT
49
+ )
50
+
51
+ if is_platform_windows():
52
+ DATETIME_NO_TZ = st.datetimes(min_value=datetime(1900, 1, 1))
53
+ else:
54
+ DATETIME_NO_TZ = st.datetimes()
55
+
56
+ DATETIME_JAN_1_1900_OPTIONAL_TZ = st.datetimes(
57
+ min_value=pd.Timestamp(
58
+ 1900, 1, 1
59
+ ).to_pydatetime(), # pyright: ignore[reportGeneralTypeIssues]
60
+ max_value=pd.Timestamp(
61
+ 1900, 1, 1
62
+ ).to_pydatetime(), # pyright: ignore[reportGeneralTypeIssues]
63
+ timezones=st.one_of(st.none(), dateutil_timezones(), pytz_timezones()),
64
+ )
65
+
66
+ DATETIME_IN_PD_TIMESTAMP_RANGE_NO_TZ = st.datetimes(
67
+ min_value=pd.Timestamp.min.to_pydatetime(warn=False),
68
+ max_value=pd.Timestamp.max.to_pydatetime(warn=False),
69
+ )
70
+
71
+ INT_NEG_999_TO_POS_999 = st.integers(-999, 999)
72
+
73
+ # The strategy for each type is registered in conftest.py, as they don't carry
74
+ # enough runtime information (e.g. type hints) to infer how to build them.
75
+ YQM_OFFSET = st.one_of(
76
+ *map(
77
+ st.from_type,
78
+ [
79
+ MonthBegin,
80
+ MonthEnd,
81
+ BMonthBegin,
82
+ BMonthEnd,
83
+ QuarterBegin,
84
+ QuarterEnd,
85
+ BQuarterBegin,
86
+ BQuarterEnd,
87
+ YearBegin,
88
+ YearEnd,
89
+ BYearBegin,
90
+ BYearEnd,
91
+ ],
92
+ )
93
+ )
falcon/lib/python3.10/site-packages/pandas/_testing/_io.py ADDED
@@ -0,0 +1,170 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import gzip
4
+ import io
5
+ import pathlib
6
+ import tarfile
7
+ from typing import (
8
+ TYPE_CHECKING,
9
+ Any,
10
+ Callable,
11
+ )
12
+ import uuid
13
+ import zipfile
14
+
15
+ from pandas.compat import (
16
+ get_bz2_file,
17
+ get_lzma_file,
18
+ )
19
+ from pandas.compat._optional import import_optional_dependency
20
+
21
+ import pandas as pd
22
+ from pandas._testing.contexts import ensure_clean
23
+
24
+ if TYPE_CHECKING:
25
+ from pandas._typing import (
26
+ FilePath,
27
+ ReadPickleBuffer,
28
+ )
29
+
30
+ from pandas import (
31
+ DataFrame,
32
+ Series,
33
+ )
34
+
35
+ # ------------------------------------------------------------------
36
+ # File-IO
37
+
38
+
39
+ def round_trip_pickle(
40
+ obj: Any, path: FilePath | ReadPickleBuffer | None = None
41
+ ) -> DataFrame | Series:
42
+ """
43
+ Pickle an object and then read it again.
44
+
45
+ Parameters
46
+ ----------
47
+ obj : any object
48
+ The object to pickle and then re-read.
49
+ path : str, path object or file-like object, default None
50
+ The path where the pickled object is written and then read.
51
+
52
+ Returns
53
+ -------
54
+ pandas object
55
+ The original object that was pickled and then re-read.
56
+ """
57
+ _path = path
58
+ if _path is None:
59
+ _path = f"__{uuid.uuid4()}__.pickle"
60
+ with ensure_clean(_path) as temp_path:
61
+ pd.to_pickle(obj, temp_path)
62
+ return pd.read_pickle(temp_path)
63
+
64
+
65
+ def round_trip_pathlib(writer, reader, path: str | None = None):
66
+ """
67
+ Write an object to file specified by a pathlib.Path and read it back
68
+
69
+ Parameters
70
+ ----------
71
+ writer : callable bound to pandas object
72
+ IO writing function (e.g. DataFrame.to_csv )
73
+ reader : callable
74
+ IO reading function (e.g. pd.read_csv )
75
+ path : str, default None
76
+ The path where the object is written and then read.
77
+
78
+ Returns
79
+ -------
80
+ pandas object
81
+ The original object that was serialized and then re-read.
82
+ """
83
+ Path = pathlib.Path
84
+ if path is None:
85
+ path = "___pathlib___"
86
+ with ensure_clean(path) as path:
87
+ writer(Path(path)) # type: ignore[arg-type]
88
+ obj = reader(Path(path)) # type: ignore[arg-type]
89
+ return obj
90
+
91
+
92
+ def round_trip_localpath(writer, reader, path: str | None = None):
93
+ """
94
+ Write an object to file specified by a py.path LocalPath and read it back.
95
+
96
+ Parameters
97
+ ----------
98
+ writer : callable bound to pandas object
99
+ IO writing function (e.g. DataFrame.to_csv )
100
+ reader : callable
101
+ IO reading function (e.g. pd.read_csv )
102
+ path : str, default None
103
+ The path where the object is written and then read.
104
+
105
+ Returns
106
+ -------
107
+ pandas object
108
+ The original object that was serialized and then re-read.
109
+ """
110
+ import pytest
111
+
112
+ LocalPath = pytest.importorskip("py.path").local
113
+ if path is None:
114
+ path = "___localpath___"
115
+ with ensure_clean(path) as path:
116
+ writer(LocalPath(path))
117
+ obj = reader(LocalPath(path))
118
+ return obj
119
+
120
+
121
+ def write_to_compressed(compression, path, data, dest: str = "test") -> None:
122
+ """
123
+ Write data to a compressed file.
124
+
125
+ Parameters
126
+ ----------
127
+ compression : {'gzip', 'bz2', 'zip', 'xz', 'zstd'}
128
+ The compression type to use.
129
+ path : str
130
+ The file path to write the data.
131
+ data : str
132
+ The data to write.
133
+ dest : str, default "test"
134
+ The destination file (for ZIP only)
135
+
136
+ Raises
137
+ ------
138
+ ValueError : An invalid compression value was passed in.
139
+ """
140
+ args: tuple[Any, ...] = (data,)
141
+ mode = "wb"
142
+ method = "write"
143
+ compress_method: Callable
144
+
145
+ if compression == "zip":
146
+ compress_method = zipfile.ZipFile
147
+ mode = "w"
148
+ args = (dest, data)
149
+ method = "writestr"
150
+ elif compression == "tar":
151
+ compress_method = tarfile.TarFile
152
+ mode = "w"
153
+ file = tarfile.TarInfo(name=dest)
154
+ bytes = io.BytesIO(data)
155
+ file.size = len(data)
156
+ args = (file, bytes)
157
+ method = "addfile"
158
+ elif compression == "gzip":
159
+ compress_method = gzip.GzipFile
160
+ elif compression == "bz2":
161
+ compress_method = get_bz2_file()
162
+ elif compression == "zstd":
163
+ compress_method = import_optional_dependency("zstandard").open
164
+ elif compression == "xz":
165
+ compress_method = get_lzma_file()
166
+ else:
167
+ raise ValueError(f"Unrecognized compression type: {compression}")
168
+
169
+ with compress_method(path, mode=mode) as f:
170
+ getattr(f, method)(*args)
falcon/lib/python3.10/site-packages/pandas/_testing/_warnings.py ADDED
@@ -0,0 +1,232 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from contextlib import (
4
+ contextmanager,
5
+ nullcontext,
6
+ )
7
+ import inspect
8
+ import re
9
+ import sys
10
+ from typing import (
11
+ TYPE_CHECKING,
12
+ Literal,
13
+ cast,
14
+ )
15
+ import warnings
16
+
17
+ from pandas.compat import PY311
18
+
19
+ if TYPE_CHECKING:
20
+ from collections.abc import (
21
+ Generator,
22
+ Sequence,
23
+ )
24
+
25
+
26
+ @contextmanager
27
+ def assert_produces_warning(
28
+ expected_warning: type[Warning] | bool | tuple[type[Warning], ...] | None = Warning,
29
+ filter_level: Literal[
30
+ "error", "ignore", "always", "default", "module", "once"
31
+ ] = "always",
32
+ check_stacklevel: bool = True,
33
+ raise_on_extra_warnings: bool = True,
34
+ match: str | None = None,
35
+ ) -> Generator[list[warnings.WarningMessage], None, None]:
36
+ """
37
+ Context manager for running code expected to either raise a specific warning,
38
+ multiple specific warnings, or not raise any warnings. Verifies that the code
39
+ raises the expected warning(s), and that it does not raise any other unexpected
40
+ warnings. It is basically a wrapper around ``warnings.catch_warnings``.
41
+
42
+ Parameters
43
+ ----------
44
+ expected_warning : {Warning, False, tuple[Warning, ...], None}, default Warning
45
+ The type of Exception raised. ``exception.Warning`` is the base
46
+ class for all warnings. To raise multiple types of exceptions,
47
+ pass them as a tuple. To check that no warning is returned,
48
+ specify ``False`` or ``None``.
49
+ filter_level : str or None, default "always"
50
+ Specifies whether warnings are ignored, displayed, or turned
51
+ into errors.
52
+ Valid values are:
53
+
54
+ * "error" - turns matching warnings into exceptions
55
+ * "ignore" - discard the warning
56
+ * "always" - always emit a warning
57
+ * "default" - print the warning the first time it is generated
58
+ from each location
59
+ * "module" - print the warning the first time it is generated
60
+ from each module
61
+ * "once" - print the warning the first time it is generated
62
+
63
+ check_stacklevel : bool, default True
64
+ If True, displays the line that called the function containing
65
+ the warning to show were the function is called. Otherwise, the
66
+ line that implements the function is displayed.
67
+ raise_on_extra_warnings : bool, default True
68
+ Whether extra warnings not of the type `expected_warning` should
69
+ cause the test to fail.
70
+ match : str, optional
71
+ Match warning message.
72
+
73
+ Examples
74
+ --------
75
+ >>> import warnings
76
+ >>> with assert_produces_warning():
77
+ ... warnings.warn(UserWarning())
78
+ ...
79
+ >>> with assert_produces_warning(False):
80
+ ... warnings.warn(RuntimeWarning())
81
+ ...
82
+ Traceback (most recent call last):
83
+ ...
84
+ AssertionError: Caused unexpected warning(s): ['RuntimeWarning'].
85
+ >>> with assert_produces_warning(UserWarning):
86
+ ... warnings.warn(RuntimeWarning())
87
+ Traceback (most recent call last):
88
+ ...
89
+ AssertionError: Did not see expected warning of class 'UserWarning'.
90
+
91
+ ..warn:: This is *not* thread-safe.
92
+ """
93
+ __tracebackhide__ = True
94
+
95
+ with warnings.catch_warnings(record=True) as w:
96
+ warnings.simplefilter(filter_level)
97
+ try:
98
+ yield w
99
+ finally:
100
+ if expected_warning:
101
+ expected_warning = cast(type[Warning], expected_warning)
102
+ _assert_caught_expected_warning(
103
+ caught_warnings=w,
104
+ expected_warning=expected_warning,
105
+ match=match,
106
+ check_stacklevel=check_stacklevel,
107
+ )
108
+ if raise_on_extra_warnings:
109
+ _assert_caught_no_extra_warnings(
110
+ caught_warnings=w,
111
+ expected_warning=expected_warning,
112
+ )
113
+
114
+
115
+ def maybe_produces_warning(warning: type[Warning], condition: bool, **kwargs):
116
+ """
117
+ Return a context manager that possibly checks a warning based on the condition
118
+ """
119
+ if condition:
120
+ return assert_produces_warning(warning, **kwargs)
121
+ else:
122
+ return nullcontext()
123
+
124
+
125
+ def _assert_caught_expected_warning(
126
+ *,
127
+ caught_warnings: Sequence[warnings.WarningMessage],
128
+ expected_warning: type[Warning],
129
+ match: str | None,
130
+ check_stacklevel: bool,
131
+ ) -> None:
132
+ """Assert that there was the expected warning among the caught warnings."""
133
+ saw_warning = False
134
+ matched_message = False
135
+ unmatched_messages = []
136
+
137
+ for actual_warning in caught_warnings:
138
+ if issubclass(actual_warning.category, expected_warning):
139
+ saw_warning = True
140
+
141
+ if check_stacklevel:
142
+ _assert_raised_with_correct_stacklevel(actual_warning)
143
+
144
+ if match is not None:
145
+ if re.search(match, str(actual_warning.message)):
146
+ matched_message = True
147
+ else:
148
+ unmatched_messages.append(actual_warning.message)
149
+
150
+ if not saw_warning:
151
+ raise AssertionError(
152
+ f"Did not see expected warning of class "
153
+ f"{repr(expected_warning.__name__)}"
154
+ )
155
+
156
+ if match and not matched_message:
157
+ raise AssertionError(
158
+ f"Did not see warning {repr(expected_warning.__name__)} "
159
+ f"matching '{match}'. The emitted warning messages are "
160
+ f"{unmatched_messages}"
161
+ )
162
+
163
+
164
+ def _assert_caught_no_extra_warnings(
165
+ *,
166
+ caught_warnings: Sequence[warnings.WarningMessage],
167
+ expected_warning: type[Warning] | bool | tuple[type[Warning], ...] | None,
168
+ ) -> None:
169
+ """Assert that no extra warnings apart from the expected ones are caught."""
170
+ extra_warnings = []
171
+
172
+ for actual_warning in caught_warnings:
173
+ if _is_unexpected_warning(actual_warning, expected_warning):
174
+ # GH#38630 pytest.filterwarnings does not suppress these.
175
+ if actual_warning.category == ResourceWarning:
176
+ # GH 44732: Don't make the CI flaky by filtering SSL-related
177
+ # ResourceWarning from dependencies
178
+ if "unclosed <ssl.SSLSocket" in str(actual_warning.message):
179
+ continue
180
+ # GH 44844: Matplotlib leaves font files open during the entire process
181
+ # upon import. Don't make CI flaky if ResourceWarning raised
182
+ # due to these open files.
183
+ if any("matplotlib" in mod for mod in sys.modules):
184
+ continue
185
+ if PY311 and actual_warning.category == EncodingWarning:
186
+ # EncodingWarnings are checked in the CI
187
+ # pyproject.toml errors on EncodingWarnings in pandas
188
+ # Ignore EncodingWarnings from other libraries
189
+ continue
190
+ extra_warnings.append(
191
+ (
192
+ actual_warning.category.__name__,
193
+ actual_warning.message,
194
+ actual_warning.filename,
195
+ actual_warning.lineno,
196
+ )
197
+ )
198
+
199
+ if extra_warnings:
200
+ raise AssertionError(f"Caused unexpected warning(s): {repr(extra_warnings)}")
201
+
202
+
203
+ def _is_unexpected_warning(
204
+ actual_warning: warnings.WarningMessage,
205
+ expected_warning: type[Warning] | bool | tuple[type[Warning], ...] | None,
206
+ ) -> bool:
207
+ """Check if the actual warning issued is unexpected."""
208
+ if actual_warning and not expected_warning:
209
+ return True
210
+ expected_warning = cast(type[Warning], expected_warning)
211
+ return bool(not issubclass(actual_warning.category, expected_warning))
212
+
213
+
214
+ def _assert_raised_with_correct_stacklevel(
215
+ actual_warning: warnings.WarningMessage,
216
+ ) -> None:
217
+ # https://stackoverflow.com/questions/17407119/python-inspect-stack-is-slow
218
+ frame = inspect.currentframe()
219
+ for _ in range(4):
220
+ frame = frame.f_back # type: ignore[union-attr]
221
+ try:
222
+ caller_filename = inspect.getfile(frame) # type: ignore[arg-type]
223
+ finally:
224
+ # See note in
225
+ # https://docs.python.org/3/library/inspect.html#inspect.Traceback
226
+ del frame
227
+ msg = (
228
+ "Warning not set with correct stacklevel. "
229
+ f"File where warning is raised: {actual_warning.filename} != "
230
+ f"{caller_filename}. Warning message: {actual_warning.message}"
231
+ )
232
+ assert actual_warning.filename == caller_filename, msg
falcon/lib/python3.10/site-packages/pandas/_testing/asserters.py ADDED
@@ -0,0 +1,1435 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import operator
4
+ from typing import (
5
+ TYPE_CHECKING,
6
+ Literal,
7
+ NoReturn,
8
+ cast,
9
+ )
10
+
11
+ import numpy as np
12
+
13
+ from pandas._libs import lib
14
+ from pandas._libs.missing import is_matching_na
15
+ from pandas._libs.sparse import SparseIndex
16
+ import pandas._libs.testing as _testing
17
+ from pandas._libs.tslibs.np_datetime import compare_mismatched_resolutions
18
+
19
+ from pandas.core.dtypes.common import (
20
+ is_bool,
21
+ is_float_dtype,
22
+ is_integer_dtype,
23
+ is_number,
24
+ is_numeric_dtype,
25
+ needs_i8_conversion,
26
+ )
27
+ from pandas.core.dtypes.dtypes import (
28
+ CategoricalDtype,
29
+ DatetimeTZDtype,
30
+ ExtensionDtype,
31
+ NumpyEADtype,
32
+ )
33
+ from pandas.core.dtypes.missing import array_equivalent
34
+
35
+ import pandas as pd
36
+ from pandas import (
37
+ Categorical,
38
+ DataFrame,
39
+ DatetimeIndex,
40
+ Index,
41
+ IntervalDtype,
42
+ IntervalIndex,
43
+ MultiIndex,
44
+ PeriodIndex,
45
+ RangeIndex,
46
+ Series,
47
+ TimedeltaIndex,
48
+ )
49
+ from pandas.core.arrays import (
50
+ DatetimeArray,
51
+ ExtensionArray,
52
+ IntervalArray,
53
+ PeriodArray,
54
+ TimedeltaArray,
55
+ )
56
+ from pandas.core.arrays.datetimelike import DatetimeLikeArrayMixin
57
+ from pandas.core.arrays.string_ import StringDtype
58
+ from pandas.core.indexes.api import safe_sort_index
59
+
60
+ from pandas.io.formats.printing import pprint_thing
61
+
62
+ if TYPE_CHECKING:
63
+ from pandas._typing import DtypeObj
64
+
65
+
66
+ def assert_almost_equal(
67
+ left,
68
+ right,
69
+ check_dtype: bool | Literal["equiv"] = "equiv",
70
+ rtol: float = 1.0e-5,
71
+ atol: float = 1.0e-8,
72
+ **kwargs,
73
+ ) -> None:
74
+ """
75
+ Check that the left and right objects are approximately equal.
76
+
77
+ By approximately equal, we refer to objects that are numbers or that
78
+ contain numbers which may be equivalent to specific levels of precision.
79
+
80
+ Parameters
81
+ ----------
82
+ left : object
83
+ right : object
84
+ check_dtype : bool or {'equiv'}, default 'equiv'
85
+ Check dtype if both a and b are the same type. If 'equiv' is passed in,
86
+ then `RangeIndex` and `Index` with int64 dtype are also considered
87
+ equivalent when doing type checking.
88
+ rtol : float, default 1e-5
89
+ Relative tolerance.
90
+ atol : float, default 1e-8
91
+ Absolute tolerance.
92
+ """
93
+ if isinstance(left, Index):
94
+ assert_index_equal(
95
+ left,
96
+ right,
97
+ check_exact=False,
98
+ exact=check_dtype,
99
+ rtol=rtol,
100
+ atol=atol,
101
+ **kwargs,
102
+ )
103
+
104
+ elif isinstance(left, Series):
105
+ assert_series_equal(
106
+ left,
107
+ right,
108
+ check_exact=False,
109
+ check_dtype=check_dtype,
110
+ rtol=rtol,
111
+ atol=atol,
112
+ **kwargs,
113
+ )
114
+
115
+ elif isinstance(left, DataFrame):
116
+ assert_frame_equal(
117
+ left,
118
+ right,
119
+ check_exact=False,
120
+ check_dtype=check_dtype,
121
+ rtol=rtol,
122
+ atol=atol,
123
+ **kwargs,
124
+ )
125
+
126
+ else:
127
+ # Other sequences.
128
+ if check_dtype:
129
+ if is_number(left) and is_number(right):
130
+ # Do not compare numeric classes, like np.float64 and float.
131
+ pass
132
+ elif is_bool(left) and is_bool(right):
133
+ # Do not compare bool classes, like np.bool_ and bool.
134
+ pass
135
+ else:
136
+ if isinstance(left, np.ndarray) or isinstance(right, np.ndarray):
137
+ obj = "numpy array"
138
+ else:
139
+ obj = "Input"
140
+ assert_class_equal(left, right, obj=obj)
141
+
142
+ # if we have "equiv", this becomes True
143
+ _testing.assert_almost_equal(
144
+ left, right, check_dtype=bool(check_dtype), rtol=rtol, atol=atol, **kwargs
145
+ )
146
+
147
+
148
+ def _check_isinstance(left, right, cls) -> None:
149
+ """
150
+ Helper method for our assert_* methods that ensures that
151
+ the two objects being compared have the right type before
152
+ proceeding with the comparison.
153
+
154
+ Parameters
155
+ ----------
156
+ left : The first object being compared.
157
+ right : The second object being compared.
158
+ cls : The class type to check against.
159
+
160
+ Raises
161
+ ------
162
+ AssertionError : Either `left` or `right` is not an instance of `cls`.
163
+ """
164
+ cls_name = cls.__name__
165
+
166
+ if not isinstance(left, cls):
167
+ raise AssertionError(
168
+ f"{cls_name} Expected type {cls}, found {type(left)} instead"
169
+ )
170
+ if not isinstance(right, cls):
171
+ raise AssertionError(
172
+ f"{cls_name} Expected type {cls}, found {type(right)} instead"
173
+ )
174
+
175
+
176
+ def assert_dict_equal(left, right, compare_keys: bool = True) -> None:
177
+ _check_isinstance(left, right, dict)
178
+ _testing.assert_dict_equal(left, right, compare_keys=compare_keys)
179
+
180
+
181
+ def assert_index_equal(
182
+ left: Index,
183
+ right: Index,
184
+ exact: bool | str = "equiv",
185
+ check_names: bool = True,
186
+ check_exact: bool = True,
187
+ check_categorical: bool = True,
188
+ check_order: bool = True,
189
+ rtol: float = 1.0e-5,
190
+ atol: float = 1.0e-8,
191
+ obj: str = "Index",
192
+ ) -> None:
193
+ """
194
+ Check that left and right Index are equal.
195
+
196
+ Parameters
197
+ ----------
198
+ left : Index
199
+ right : Index
200
+ exact : bool or {'equiv'}, default 'equiv'
201
+ Whether to check the Index class, dtype and inferred_type
202
+ are identical. If 'equiv', then RangeIndex can be substituted for
203
+ Index with an int64 dtype as well.
204
+ check_names : bool, default True
205
+ Whether to check the names attribute.
206
+ check_exact : bool, default True
207
+ Whether to compare number exactly.
208
+ check_categorical : bool, default True
209
+ Whether to compare internal Categorical exactly.
210
+ check_order : bool, default True
211
+ Whether to compare the order of index entries as well as their values.
212
+ If True, both indexes must contain the same elements, in the same order.
213
+ If False, both indexes must contain the same elements, but in any order.
214
+ rtol : float, default 1e-5
215
+ Relative tolerance. Only used when check_exact is False.
216
+ atol : float, default 1e-8
217
+ Absolute tolerance. Only used when check_exact is False.
218
+ obj : str, default 'Index'
219
+ Specify object name being compared, internally used to show appropriate
220
+ assertion message.
221
+
222
+ Examples
223
+ --------
224
+ >>> from pandas import testing as tm
225
+ >>> a = pd.Index([1, 2, 3])
226
+ >>> b = pd.Index([1, 2, 3])
227
+ >>> tm.assert_index_equal(a, b)
228
+ """
229
+ __tracebackhide__ = True
230
+
231
+ def _check_types(left, right, obj: str = "Index") -> None:
232
+ if not exact:
233
+ return
234
+
235
+ assert_class_equal(left, right, exact=exact, obj=obj)
236
+ assert_attr_equal("inferred_type", left, right, obj=obj)
237
+
238
+ # Skip exact dtype checking when `check_categorical` is False
239
+ if isinstance(left.dtype, CategoricalDtype) and isinstance(
240
+ right.dtype, CategoricalDtype
241
+ ):
242
+ if check_categorical:
243
+ assert_attr_equal("dtype", left, right, obj=obj)
244
+ assert_index_equal(left.categories, right.categories, exact=exact)
245
+ return
246
+
247
+ assert_attr_equal("dtype", left, right, obj=obj)
248
+
249
+ # instance validation
250
+ _check_isinstance(left, right, Index)
251
+
252
+ # class / dtype comparison
253
+ _check_types(left, right, obj=obj)
254
+
255
+ # level comparison
256
+ if left.nlevels != right.nlevels:
257
+ msg1 = f"{obj} levels are different"
258
+ msg2 = f"{left.nlevels}, {left}"
259
+ msg3 = f"{right.nlevels}, {right}"
260
+ raise_assert_detail(obj, msg1, msg2, msg3)
261
+
262
+ # length comparison
263
+ if len(left) != len(right):
264
+ msg1 = f"{obj} length are different"
265
+ msg2 = f"{len(left)}, {left}"
266
+ msg3 = f"{len(right)}, {right}"
267
+ raise_assert_detail(obj, msg1, msg2, msg3)
268
+
269
+ # If order doesn't matter then sort the index entries
270
+ if not check_order:
271
+ left = safe_sort_index(left)
272
+ right = safe_sort_index(right)
273
+
274
+ # MultiIndex special comparison for little-friendly error messages
275
+ if isinstance(left, MultiIndex):
276
+ right = cast(MultiIndex, right)
277
+
278
+ for level in range(left.nlevels):
279
+ lobj = f"MultiIndex level [{level}]"
280
+ try:
281
+ # try comparison on levels/codes to avoid densifying MultiIndex
282
+ assert_index_equal(
283
+ left.levels[level],
284
+ right.levels[level],
285
+ exact=exact,
286
+ check_names=check_names,
287
+ check_exact=check_exact,
288
+ check_categorical=check_categorical,
289
+ rtol=rtol,
290
+ atol=atol,
291
+ obj=lobj,
292
+ )
293
+ assert_numpy_array_equal(left.codes[level], right.codes[level])
294
+ except AssertionError:
295
+ llevel = left.get_level_values(level)
296
+ rlevel = right.get_level_values(level)
297
+
298
+ assert_index_equal(
299
+ llevel,
300
+ rlevel,
301
+ exact=exact,
302
+ check_names=check_names,
303
+ check_exact=check_exact,
304
+ check_categorical=check_categorical,
305
+ rtol=rtol,
306
+ atol=atol,
307
+ obj=lobj,
308
+ )
309
+ # get_level_values may change dtype
310
+ _check_types(left.levels[level], right.levels[level], obj=obj)
311
+
312
+ # skip exact index checking when `check_categorical` is False
313
+ elif check_exact and check_categorical:
314
+ if not left.equals(right):
315
+ mismatch = left._values != right._values
316
+
317
+ if not isinstance(mismatch, np.ndarray):
318
+ mismatch = cast("ExtensionArray", mismatch).fillna(True)
319
+
320
+ diff = np.sum(mismatch.astype(int)) * 100.0 / len(left)
321
+ msg = f"{obj} values are different ({np.round(diff, 5)} %)"
322
+ raise_assert_detail(obj, msg, left, right)
323
+ else:
324
+ # if we have "equiv", this becomes True
325
+ exact_bool = bool(exact)
326
+ _testing.assert_almost_equal(
327
+ left.values,
328
+ right.values,
329
+ rtol=rtol,
330
+ atol=atol,
331
+ check_dtype=exact_bool,
332
+ obj=obj,
333
+ lobj=left,
334
+ robj=right,
335
+ )
336
+
337
+ # metadata comparison
338
+ if check_names:
339
+ assert_attr_equal("names", left, right, obj=obj)
340
+ if isinstance(left, PeriodIndex) or isinstance(right, PeriodIndex):
341
+ assert_attr_equal("dtype", left, right, obj=obj)
342
+ if isinstance(left, IntervalIndex) or isinstance(right, IntervalIndex):
343
+ assert_interval_array_equal(left._values, right._values)
344
+
345
+ if check_categorical:
346
+ if isinstance(left.dtype, CategoricalDtype) or isinstance(
347
+ right.dtype, CategoricalDtype
348
+ ):
349
+ assert_categorical_equal(left._values, right._values, obj=f"{obj} category")
350
+
351
+
352
+ def assert_class_equal(
353
+ left, right, exact: bool | str = True, obj: str = "Input"
354
+ ) -> None:
355
+ """
356
+ Checks classes are equal.
357
+ """
358
+ __tracebackhide__ = True
359
+
360
+ def repr_class(x):
361
+ if isinstance(x, Index):
362
+ # return Index as it is to include values in the error message
363
+ return x
364
+
365
+ return type(x).__name__
366
+
367
+ def is_class_equiv(idx: Index) -> bool:
368
+ """Classes that are a RangeIndex (sub-)instance or exactly an `Index` .
369
+
370
+ This only checks class equivalence. There is a separate check that the
371
+ dtype is int64.
372
+ """
373
+ return type(idx) is Index or isinstance(idx, RangeIndex)
374
+
375
+ if type(left) == type(right):
376
+ return
377
+
378
+ if exact == "equiv":
379
+ if is_class_equiv(left) and is_class_equiv(right):
380
+ return
381
+
382
+ msg = f"{obj} classes are different"
383
+ raise_assert_detail(obj, msg, repr_class(left), repr_class(right))
384
+
385
+
386
+ def assert_attr_equal(attr: str, left, right, obj: str = "Attributes") -> None:
387
+ """
388
+ Check attributes are equal. Both objects must have attribute.
389
+
390
+ Parameters
391
+ ----------
392
+ attr : str
393
+ Attribute name being compared.
394
+ left : object
395
+ right : object
396
+ obj : str, default 'Attributes'
397
+ Specify object name being compared, internally used to show appropriate
398
+ assertion message
399
+ """
400
+ __tracebackhide__ = True
401
+
402
+ left_attr = getattr(left, attr)
403
+ right_attr = getattr(right, attr)
404
+
405
+ if left_attr is right_attr or is_matching_na(left_attr, right_attr):
406
+ # e.g. both np.nan, both NaT, both pd.NA, ...
407
+ return None
408
+
409
+ try:
410
+ result = left_attr == right_attr
411
+ except TypeError:
412
+ # datetimetz on rhs may raise TypeError
413
+ result = False
414
+ if (left_attr is pd.NA) ^ (right_attr is pd.NA):
415
+ result = False
416
+ elif not isinstance(result, bool):
417
+ result = result.all()
418
+
419
+ if not result:
420
+ msg = f'Attribute "{attr}" are different'
421
+ raise_assert_detail(obj, msg, left_attr, right_attr)
422
+ return None
423
+
424
+
425
+ def assert_is_valid_plot_return_object(objs) -> None:
426
+ from matplotlib.artist import Artist
427
+ from matplotlib.axes import Axes
428
+
429
+ if isinstance(objs, (Series, np.ndarray)):
430
+ if isinstance(objs, Series):
431
+ objs = objs._values
432
+ for el in objs.ravel():
433
+ msg = (
434
+ "one of 'objs' is not a matplotlib Axes instance, "
435
+ f"type encountered {repr(type(el).__name__)}"
436
+ )
437
+ assert isinstance(el, (Axes, dict)), msg
438
+ else:
439
+ msg = (
440
+ "objs is neither an ndarray of Artist instances nor a single "
441
+ "ArtistArtist instance, tuple, or dict, 'objs' is a "
442
+ f"{repr(type(objs).__name__)}"
443
+ )
444
+ assert isinstance(objs, (Artist, tuple, dict)), msg
445
+
446
+
447
+ def assert_is_sorted(seq) -> None:
448
+ """Assert that the sequence is sorted."""
449
+ if isinstance(seq, (Index, Series)):
450
+ seq = seq.values
451
+ # sorting does not change precisions
452
+ if isinstance(seq, np.ndarray):
453
+ assert_numpy_array_equal(seq, np.sort(np.array(seq)))
454
+ else:
455
+ assert_extension_array_equal(seq, seq[seq.argsort()])
456
+
457
+
458
+ def assert_categorical_equal(
459
+ left,
460
+ right,
461
+ check_dtype: bool = True,
462
+ check_category_order: bool = True,
463
+ obj: str = "Categorical",
464
+ ) -> None:
465
+ """
466
+ Test that Categoricals are equivalent.
467
+
468
+ Parameters
469
+ ----------
470
+ left : Categorical
471
+ right : Categorical
472
+ check_dtype : bool, default True
473
+ Check that integer dtype of the codes are the same.
474
+ check_category_order : bool, default True
475
+ Whether the order of the categories should be compared, which
476
+ implies identical integer codes. If False, only the resulting
477
+ values are compared. The ordered attribute is
478
+ checked regardless.
479
+ obj : str, default 'Categorical'
480
+ Specify object name being compared, internally used to show appropriate
481
+ assertion message.
482
+ """
483
+ _check_isinstance(left, right, Categorical)
484
+
485
+ exact: bool | str
486
+ if isinstance(left.categories, RangeIndex) or isinstance(
487
+ right.categories, RangeIndex
488
+ ):
489
+ exact = "equiv"
490
+ else:
491
+ # We still want to require exact matches for Index
492
+ exact = True
493
+
494
+ if check_category_order:
495
+ assert_index_equal(
496
+ left.categories, right.categories, obj=f"{obj}.categories", exact=exact
497
+ )
498
+ assert_numpy_array_equal(
499
+ left.codes, right.codes, check_dtype=check_dtype, obj=f"{obj}.codes"
500
+ )
501
+ else:
502
+ try:
503
+ lc = left.categories.sort_values()
504
+ rc = right.categories.sort_values()
505
+ except TypeError:
506
+ # e.g. '<' not supported between instances of 'int' and 'str'
507
+ lc, rc = left.categories, right.categories
508
+ assert_index_equal(lc, rc, obj=f"{obj}.categories", exact=exact)
509
+ assert_index_equal(
510
+ left.categories.take(left.codes),
511
+ right.categories.take(right.codes),
512
+ obj=f"{obj}.values",
513
+ exact=exact,
514
+ )
515
+
516
+ assert_attr_equal("ordered", left, right, obj=obj)
517
+
518
+
519
+ def assert_interval_array_equal(
520
+ left, right, exact: bool | Literal["equiv"] = "equiv", obj: str = "IntervalArray"
521
+ ) -> None:
522
+ """
523
+ Test that two IntervalArrays are equivalent.
524
+
525
+ Parameters
526
+ ----------
527
+ left, right : IntervalArray
528
+ The IntervalArrays to compare.
529
+ exact : bool or {'equiv'}, default 'equiv'
530
+ Whether to check the Index class, dtype and inferred_type
531
+ are identical. If 'equiv', then RangeIndex can be substituted for
532
+ Index with an int64 dtype as well.
533
+ obj : str, default 'IntervalArray'
534
+ Specify object name being compared, internally used to show appropriate
535
+ assertion message
536
+ """
537
+ _check_isinstance(left, right, IntervalArray)
538
+
539
+ kwargs = {}
540
+ if left._left.dtype.kind in "mM":
541
+ # We have a DatetimeArray or TimedeltaArray
542
+ kwargs["check_freq"] = False
543
+
544
+ assert_equal(left._left, right._left, obj=f"{obj}.left", **kwargs)
545
+ assert_equal(left._right, right._right, obj=f"{obj}.left", **kwargs)
546
+
547
+ assert_attr_equal("closed", left, right, obj=obj)
548
+
549
+
550
+ def assert_period_array_equal(left, right, obj: str = "PeriodArray") -> None:
551
+ _check_isinstance(left, right, PeriodArray)
552
+
553
+ assert_numpy_array_equal(left._ndarray, right._ndarray, obj=f"{obj}._ndarray")
554
+ assert_attr_equal("dtype", left, right, obj=obj)
555
+
556
+
557
+ def assert_datetime_array_equal(
558
+ left, right, obj: str = "DatetimeArray", check_freq: bool = True
559
+ ) -> None:
560
+ __tracebackhide__ = True
561
+ _check_isinstance(left, right, DatetimeArray)
562
+
563
+ assert_numpy_array_equal(left._ndarray, right._ndarray, obj=f"{obj}._ndarray")
564
+ if check_freq:
565
+ assert_attr_equal("freq", left, right, obj=obj)
566
+ assert_attr_equal("tz", left, right, obj=obj)
567
+
568
+
569
+ def assert_timedelta_array_equal(
570
+ left, right, obj: str = "TimedeltaArray", check_freq: bool = True
571
+ ) -> None:
572
+ __tracebackhide__ = True
573
+ _check_isinstance(left, right, TimedeltaArray)
574
+ assert_numpy_array_equal(left._ndarray, right._ndarray, obj=f"{obj}._ndarray")
575
+ if check_freq:
576
+ assert_attr_equal("freq", left, right, obj=obj)
577
+
578
+
579
+ def raise_assert_detail(
580
+ obj, message, left, right, diff=None, first_diff=None, index_values=None
581
+ ) -> NoReturn:
582
+ __tracebackhide__ = True
583
+
584
+ msg = f"""{obj} are different
585
+
586
+ {message}"""
587
+
588
+ if isinstance(index_values, Index):
589
+ index_values = np.asarray(index_values)
590
+
591
+ if isinstance(index_values, np.ndarray):
592
+ msg += f"\n[index]: {pprint_thing(index_values)}"
593
+
594
+ if isinstance(left, np.ndarray):
595
+ left = pprint_thing(left)
596
+ elif isinstance(left, (CategoricalDtype, NumpyEADtype, StringDtype)):
597
+ left = repr(left)
598
+
599
+ if isinstance(right, np.ndarray):
600
+ right = pprint_thing(right)
601
+ elif isinstance(right, (CategoricalDtype, NumpyEADtype, StringDtype)):
602
+ right = repr(right)
603
+
604
+ msg += f"""
605
+ [left]: {left}
606
+ [right]: {right}"""
607
+
608
+ if diff is not None:
609
+ msg += f"\n[diff]: {diff}"
610
+
611
+ if first_diff is not None:
612
+ msg += f"\n{first_diff}"
613
+
614
+ raise AssertionError(msg)
615
+
616
+
617
+ def assert_numpy_array_equal(
618
+ left,
619
+ right,
620
+ strict_nan: bool = False,
621
+ check_dtype: bool | Literal["equiv"] = True,
622
+ err_msg=None,
623
+ check_same=None,
624
+ obj: str = "numpy array",
625
+ index_values=None,
626
+ ) -> None:
627
+ """
628
+ Check that 'np.ndarray' is equivalent.
629
+
630
+ Parameters
631
+ ----------
632
+ left, right : numpy.ndarray or iterable
633
+ The two arrays to be compared.
634
+ strict_nan : bool, default False
635
+ If True, consider NaN and None to be different.
636
+ check_dtype : bool, default True
637
+ Check dtype if both a and b are np.ndarray.
638
+ err_msg : str, default None
639
+ If provided, used as assertion message.
640
+ check_same : None|'copy'|'same', default None
641
+ Ensure left and right refer/do not refer to the same memory area.
642
+ obj : str, default 'numpy array'
643
+ Specify object name being compared, internally used to show appropriate
644
+ assertion message.
645
+ index_values : Index | numpy.ndarray, default None
646
+ optional index (shared by both left and right), used in output.
647
+ """
648
+ __tracebackhide__ = True
649
+
650
+ # instance validation
651
+ # Show a detailed error message when classes are different
652
+ assert_class_equal(left, right, obj=obj)
653
+ # both classes must be an np.ndarray
654
+ _check_isinstance(left, right, np.ndarray)
655
+
656
+ def _get_base(obj):
657
+ return obj.base if getattr(obj, "base", None) is not None else obj
658
+
659
+ left_base = _get_base(left)
660
+ right_base = _get_base(right)
661
+
662
+ if check_same == "same":
663
+ if left_base is not right_base:
664
+ raise AssertionError(f"{repr(left_base)} is not {repr(right_base)}")
665
+ elif check_same == "copy":
666
+ if left_base is right_base:
667
+ raise AssertionError(f"{repr(left_base)} is {repr(right_base)}")
668
+
669
+ def _raise(left, right, err_msg) -> NoReturn:
670
+ if err_msg is None:
671
+ if left.shape != right.shape:
672
+ raise_assert_detail(
673
+ obj, f"{obj} shapes are different", left.shape, right.shape
674
+ )
675
+
676
+ diff = 0
677
+ for left_arr, right_arr in zip(left, right):
678
+ # count up differences
679
+ if not array_equivalent(left_arr, right_arr, strict_nan=strict_nan):
680
+ diff += 1
681
+
682
+ diff = diff * 100.0 / left.size
683
+ msg = f"{obj} values are different ({np.round(diff, 5)} %)"
684
+ raise_assert_detail(obj, msg, left, right, index_values=index_values)
685
+
686
+ raise AssertionError(err_msg)
687
+
688
+ # compare shape and values
689
+ if not array_equivalent(left, right, strict_nan=strict_nan):
690
+ _raise(left, right, err_msg)
691
+
692
+ if check_dtype:
693
+ if isinstance(left, np.ndarray) and isinstance(right, np.ndarray):
694
+ assert_attr_equal("dtype", left, right, obj=obj)
695
+
696
+
697
+ def assert_extension_array_equal(
698
+ left,
699
+ right,
700
+ check_dtype: bool | Literal["equiv"] = True,
701
+ index_values=None,
702
+ check_exact: bool | lib.NoDefault = lib.no_default,
703
+ rtol: float | lib.NoDefault = lib.no_default,
704
+ atol: float | lib.NoDefault = lib.no_default,
705
+ obj: str = "ExtensionArray",
706
+ ) -> None:
707
+ """
708
+ Check that left and right ExtensionArrays are equal.
709
+
710
+ Parameters
711
+ ----------
712
+ left, right : ExtensionArray
713
+ The two arrays to compare.
714
+ check_dtype : bool, default True
715
+ Whether to check if the ExtensionArray dtypes are identical.
716
+ index_values : Index | numpy.ndarray, default None
717
+ Optional index (shared by both left and right), used in output.
718
+ check_exact : bool, default False
719
+ Whether to compare number exactly.
720
+
721
+ .. versionchanged:: 2.2.0
722
+
723
+ Defaults to True for integer dtypes if none of
724
+ ``check_exact``, ``rtol`` and ``atol`` are specified.
725
+ rtol : float, default 1e-5
726
+ Relative tolerance. Only used when check_exact is False.
727
+ atol : float, default 1e-8
728
+ Absolute tolerance. Only used when check_exact is False.
729
+ obj : str, default 'ExtensionArray'
730
+ Specify object name being compared, internally used to show appropriate
731
+ assertion message.
732
+
733
+ .. versionadded:: 2.0.0
734
+
735
+ Notes
736
+ -----
737
+ Missing values are checked separately from valid values.
738
+ A mask of missing values is computed for each and checked to match.
739
+ The remaining all-valid values are cast to object dtype and checked.
740
+
741
+ Examples
742
+ --------
743
+ >>> from pandas import testing as tm
744
+ >>> a = pd.Series([1, 2, 3, 4])
745
+ >>> b, c = a.array, a.array
746
+ >>> tm.assert_extension_array_equal(b, c)
747
+ """
748
+ if (
749
+ check_exact is lib.no_default
750
+ and rtol is lib.no_default
751
+ and atol is lib.no_default
752
+ ):
753
+ check_exact = (
754
+ is_numeric_dtype(left.dtype)
755
+ and not is_float_dtype(left.dtype)
756
+ or is_numeric_dtype(right.dtype)
757
+ and not is_float_dtype(right.dtype)
758
+ )
759
+ elif check_exact is lib.no_default:
760
+ check_exact = False
761
+
762
+ rtol = rtol if rtol is not lib.no_default else 1.0e-5
763
+ atol = atol if atol is not lib.no_default else 1.0e-8
764
+
765
+ assert isinstance(left, ExtensionArray), "left is not an ExtensionArray"
766
+ assert isinstance(right, ExtensionArray), "right is not an ExtensionArray"
767
+ if check_dtype:
768
+ assert_attr_equal("dtype", left, right, obj=f"Attributes of {obj}")
769
+
770
+ if (
771
+ isinstance(left, DatetimeLikeArrayMixin)
772
+ and isinstance(right, DatetimeLikeArrayMixin)
773
+ and type(right) == type(left)
774
+ ):
775
+ # GH 52449
776
+ if not check_dtype and left.dtype.kind in "mM":
777
+ if not isinstance(left.dtype, np.dtype):
778
+ l_unit = cast(DatetimeTZDtype, left.dtype).unit
779
+ else:
780
+ l_unit = np.datetime_data(left.dtype)[0]
781
+ if not isinstance(right.dtype, np.dtype):
782
+ r_unit = cast(DatetimeTZDtype, right.dtype).unit
783
+ else:
784
+ r_unit = np.datetime_data(right.dtype)[0]
785
+ if (
786
+ l_unit != r_unit
787
+ and compare_mismatched_resolutions(
788
+ left._ndarray, right._ndarray, operator.eq
789
+ ).all()
790
+ ):
791
+ return
792
+ # Avoid slow object-dtype comparisons
793
+ # np.asarray for case where we have a np.MaskedArray
794
+ assert_numpy_array_equal(
795
+ np.asarray(left.asi8),
796
+ np.asarray(right.asi8),
797
+ index_values=index_values,
798
+ obj=obj,
799
+ )
800
+ return
801
+
802
+ left_na = np.asarray(left.isna())
803
+ right_na = np.asarray(right.isna())
804
+ assert_numpy_array_equal(
805
+ left_na, right_na, obj=f"{obj} NA mask", index_values=index_values
806
+ )
807
+
808
+ left_valid = left[~left_na].to_numpy(dtype=object)
809
+ right_valid = right[~right_na].to_numpy(dtype=object)
810
+ if check_exact:
811
+ assert_numpy_array_equal(
812
+ left_valid, right_valid, obj=obj, index_values=index_values
813
+ )
814
+ else:
815
+ _testing.assert_almost_equal(
816
+ left_valid,
817
+ right_valid,
818
+ check_dtype=bool(check_dtype),
819
+ rtol=rtol,
820
+ atol=atol,
821
+ obj=obj,
822
+ index_values=index_values,
823
+ )
824
+
825
+
826
+ # This could be refactored to use the NDFrame.equals method
827
+ def assert_series_equal(
828
+ left,
829
+ right,
830
+ check_dtype: bool | Literal["equiv"] = True,
831
+ check_index_type: bool | Literal["equiv"] = "equiv",
832
+ check_series_type: bool = True,
833
+ check_names: bool = True,
834
+ check_exact: bool | lib.NoDefault = lib.no_default,
835
+ check_datetimelike_compat: bool = False,
836
+ check_categorical: bool = True,
837
+ check_category_order: bool = True,
838
+ check_freq: bool = True,
839
+ check_flags: bool = True,
840
+ rtol: float | lib.NoDefault = lib.no_default,
841
+ atol: float | lib.NoDefault = lib.no_default,
842
+ obj: str = "Series",
843
+ *,
844
+ check_index: bool = True,
845
+ check_like: bool = False,
846
+ ) -> None:
847
+ """
848
+ Check that left and right Series are equal.
849
+
850
+ Parameters
851
+ ----------
852
+ left : Series
853
+ right : Series
854
+ check_dtype : bool, default True
855
+ Whether to check the Series dtype is identical.
856
+ check_index_type : bool or {'equiv'}, default 'equiv'
857
+ Whether to check the Index class, dtype and inferred_type
858
+ are identical.
859
+ check_series_type : bool, default True
860
+ Whether to check the Series class is identical.
861
+ check_names : bool, default True
862
+ Whether to check the Series and Index names attribute.
863
+ check_exact : bool, default False
864
+ Whether to compare number exactly.
865
+
866
+ .. versionchanged:: 2.2.0
867
+
868
+ Defaults to True for integer dtypes if none of
869
+ ``check_exact``, ``rtol`` and ``atol`` are specified.
870
+ check_datetimelike_compat : bool, default False
871
+ Compare datetime-like which is comparable ignoring dtype.
872
+ check_categorical : bool, default True
873
+ Whether to compare internal Categorical exactly.
874
+ check_category_order : bool, default True
875
+ Whether to compare category order of internal Categoricals.
876
+ check_freq : bool, default True
877
+ Whether to check the `freq` attribute on a DatetimeIndex or TimedeltaIndex.
878
+ check_flags : bool, default True
879
+ Whether to check the `flags` attribute.
880
+ rtol : float, default 1e-5
881
+ Relative tolerance. Only used when check_exact is False.
882
+ atol : float, default 1e-8
883
+ Absolute tolerance. Only used when check_exact is False.
884
+ obj : str, default 'Series'
885
+ Specify object name being compared, internally used to show appropriate
886
+ assertion message.
887
+ check_index : bool, default True
888
+ Whether to check index equivalence. If False, then compare only values.
889
+
890
+ .. versionadded:: 1.3.0
891
+ check_like : bool, default False
892
+ If True, ignore the order of the index. Must be False if check_index is False.
893
+ Note: same labels must be with the same data.
894
+
895
+ .. versionadded:: 1.5.0
896
+
897
+ Examples
898
+ --------
899
+ >>> from pandas import testing as tm
900
+ >>> a = pd.Series([1, 2, 3, 4])
901
+ >>> b = pd.Series([1, 2, 3, 4])
902
+ >>> tm.assert_series_equal(a, b)
903
+ """
904
+ __tracebackhide__ = True
905
+ check_exact_index = False if check_exact is lib.no_default else check_exact
906
+ if (
907
+ check_exact is lib.no_default
908
+ and rtol is lib.no_default
909
+ and atol is lib.no_default
910
+ ):
911
+ check_exact = (
912
+ is_numeric_dtype(left.dtype)
913
+ and not is_float_dtype(left.dtype)
914
+ or is_numeric_dtype(right.dtype)
915
+ and not is_float_dtype(right.dtype)
916
+ )
917
+ elif check_exact is lib.no_default:
918
+ check_exact = False
919
+
920
+ rtol = rtol if rtol is not lib.no_default else 1.0e-5
921
+ atol = atol if atol is not lib.no_default else 1.0e-8
922
+
923
+ if not check_index and check_like:
924
+ raise ValueError("check_like must be False if check_index is False")
925
+
926
+ # instance validation
927
+ _check_isinstance(left, right, Series)
928
+
929
+ if check_series_type:
930
+ assert_class_equal(left, right, obj=obj)
931
+
932
+ # length comparison
933
+ if len(left) != len(right):
934
+ msg1 = f"{len(left)}, {left.index}"
935
+ msg2 = f"{len(right)}, {right.index}"
936
+ raise_assert_detail(obj, "Series length are different", msg1, msg2)
937
+
938
+ if check_flags:
939
+ assert left.flags == right.flags, f"{repr(left.flags)} != {repr(right.flags)}"
940
+
941
+ if check_index:
942
+ # GH #38183
943
+ assert_index_equal(
944
+ left.index,
945
+ right.index,
946
+ exact=check_index_type,
947
+ check_names=check_names,
948
+ check_exact=check_exact_index,
949
+ check_categorical=check_categorical,
950
+ check_order=not check_like,
951
+ rtol=rtol,
952
+ atol=atol,
953
+ obj=f"{obj}.index",
954
+ )
955
+
956
+ if check_like:
957
+ left = left.reindex_like(right)
958
+
959
+ if check_freq and isinstance(left.index, (DatetimeIndex, TimedeltaIndex)):
960
+ lidx = left.index
961
+ ridx = right.index
962
+ assert lidx.freq == ridx.freq, (lidx.freq, ridx.freq)
963
+
964
+ if check_dtype:
965
+ # We want to skip exact dtype checking when `check_categorical`
966
+ # is False. We'll still raise if only one is a `Categorical`,
967
+ # regardless of `check_categorical`
968
+ if (
969
+ isinstance(left.dtype, CategoricalDtype)
970
+ and isinstance(right.dtype, CategoricalDtype)
971
+ and not check_categorical
972
+ ):
973
+ pass
974
+ else:
975
+ assert_attr_equal("dtype", left, right, obj=f"Attributes of {obj}")
976
+ if check_exact:
977
+ left_values = left._values
978
+ right_values = right._values
979
+ # Only check exact if dtype is numeric
980
+ if isinstance(left_values, ExtensionArray) and isinstance(
981
+ right_values, ExtensionArray
982
+ ):
983
+ assert_extension_array_equal(
984
+ left_values,
985
+ right_values,
986
+ check_dtype=check_dtype,
987
+ index_values=left.index,
988
+ obj=str(obj),
989
+ )
990
+ else:
991
+ # convert both to NumPy if not, check_dtype would raise earlier
992
+ lv, rv = left_values, right_values
993
+ if isinstance(left_values, ExtensionArray):
994
+ lv = left_values.to_numpy()
995
+ if isinstance(right_values, ExtensionArray):
996
+ rv = right_values.to_numpy()
997
+ assert_numpy_array_equal(
998
+ lv,
999
+ rv,
1000
+ check_dtype=check_dtype,
1001
+ obj=str(obj),
1002
+ index_values=left.index,
1003
+ )
1004
+ elif check_datetimelike_compat and (
1005
+ needs_i8_conversion(left.dtype) or needs_i8_conversion(right.dtype)
1006
+ ):
1007
+ # we want to check only if we have compat dtypes
1008
+ # e.g. integer and M|m are NOT compat, but we can simply check
1009
+ # the values in that case
1010
+
1011
+ # datetimelike may have different objects (e.g. datetime.datetime
1012
+ # vs Timestamp) but will compare equal
1013
+ if not Index(left._values).equals(Index(right._values)):
1014
+ msg = (
1015
+ f"[datetimelike_compat=True] {left._values} "
1016
+ f"is not equal to {right._values}."
1017
+ )
1018
+ raise AssertionError(msg)
1019
+ elif isinstance(left.dtype, IntervalDtype) and isinstance(
1020
+ right.dtype, IntervalDtype
1021
+ ):
1022
+ assert_interval_array_equal(left.array, right.array)
1023
+ elif isinstance(left.dtype, CategoricalDtype) or isinstance(
1024
+ right.dtype, CategoricalDtype
1025
+ ):
1026
+ _testing.assert_almost_equal(
1027
+ left._values,
1028
+ right._values,
1029
+ rtol=rtol,
1030
+ atol=atol,
1031
+ check_dtype=bool(check_dtype),
1032
+ obj=str(obj),
1033
+ index_values=left.index,
1034
+ )
1035
+ elif isinstance(left.dtype, ExtensionDtype) and isinstance(
1036
+ right.dtype, ExtensionDtype
1037
+ ):
1038
+ assert_extension_array_equal(
1039
+ left._values,
1040
+ right._values,
1041
+ rtol=rtol,
1042
+ atol=atol,
1043
+ check_dtype=check_dtype,
1044
+ index_values=left.index,
1045
+ obj=str(obj),
1046
+ )
1047
+ elif is_extension_array_dtype_and_needs_i8_conversion(
1048
+ left.dtype, right.dtype
1049
+ ) or is_extension_array_dtype_and_needs_i8_conversion(right.dtype, left.dtype):
1050
+ assert_extension_array_equal(
1051
+ left._values,
1052
+ right._values,
1053
+ check_dtype=check_dtype,
1054
+ index_values=left.index,
1055
+ obj=str(obj),
1056
+ )
1057
+ elif needs_i8_conversion(left.dtype) and needs_i8_conversion(right.dtype):
1058
+ # DatetimeArray or TimedeltaArray
1059
+ assert_extension_array_equal(
1060
+ left._values,
1061
+ right._values,
1062
+ check_dtype=check_dtype,
1063
+ index_values=left.index,
1064
+ obj=str(obj),
1065
+ )
1066
+ else:
1067
+ _testing.assert_almost_equal(
1068
+ left._values,
1069
+ right._values,
1070
+ rtol=rtol,
1071
+ atol=atol,
1072
+ check_dtype=bool(check_dtype),
1073
+ obj=str(obj),
1074
+ index_values=left.index,
1075
+ )
1076
+
1077
+ # metadata comparison
1078
+ if check_names:
1079
+ assert_attr_equal("name", left, right, obj=obj)
1080
+
1081
+ if check_categorical:
1082
+ if isinstance(left.dtype, CategoricalDtype) or isinstance(
1083
+ right.dtype, CategoricalDtype
1084
+ ):
1085
+ assert_categorical_equal(
1086
+ left._values,
1087
+ right._values,
1088
+ obj=f"{obj} category",
1089
+ check_category_order=check_category_order,
1090
+ )
1091
+
1092
+
1093
+ # This could be refactored to use the NDFrame.equals method
1094
+ def assert_frame_equal(
1095
+ left,
1096
+ right,
1097
+ check_dtype: bool | Literal["equiv"] = True,
1098
+ check_index_type: bool | Literal["equiv"] = "equiv",
1099
+ check_column_type: bool | Literal["equiv"] = "equiv",
1100
+ check_frame_type: bool = True,
1101
+ check_names: bool = True,
1102
+ by_blocks: bool = False,
1103
+ check_exact: bool | lib.NoDefault = lib.no_default,
1104
+ check_datetimelike_compat: bool = False,
1105
+ check_categorical: bool = True,
1106
+ check_like: bool = False,
1107
+ check_freq: bool = True,
1108
+ check_flags: bool = True,
1109
+ rtol: float | lib.NoDefault = lib.no_default,
1110
+ atol: float | lib.NoDefault = lib.no_default,
1111
+ obj: str = "DataFrame",
1112
+ ) -> None:
1113
+ """
1114
+ Check that left and right DataFrame are equal.
1115
+
1116
+ This function is intended to compare two DataFrames and output any
1117
+ differences. It is mostly intended for use in unit tests.
1118
+ Additional parameters allow varying the strictness of the
1119
+ equality checks performed.
1120
+
1121
+ Parameters
1122
+ ----------
1123
+ left : DataFrame
1124
+ First DataFrame to compare.
1125
+ right : DataFrame
1126
+ Second DataFrame to compare.
1127
+ check_dtype : bool, default True
1128
+ Whether to check the DataFrame dtype is identical.
1129
+ check_index_type : bool or {'equiv'}, default 'equiv'
1130
+ Whether to check the Index class, dtype and inferred_type
1131
+ are identical.
1132
+ check_column_type : bool or {'equiv'}, default 'equiv'
1133
+ Whether to check the columns class, dtype and inferred_type
1134
+ are identical. Is passed as the ``exact`` argument of
1135
+ :func:`assert_index_equal`.
1136
+ check_frame_type : bool, default True
1137
+ Whether to check the DataFrame class is identical.
1138
+ check_names : bool, default True
1139
+ Whether to check that the `names` attribute for both the `index`
1140
+ and `column` attributes of the DataFrame is identical.
1141
+ by_blocks : bool, default False
1142
+ Specify how to compare internal data. If False, compare by columns.
1143
+ If True, compare by blocks.
1144
+ check_exact : bool, default False
1145
+ Whether to compare number exactly.
1146
+
1147
+ .. versionchanged:: 2.2.0
1148
+
1149
+ Defaults to True for integer dtypes if none of
1150
+ ``check_exact``, ``rtol`` and ``atol`` are specified.
1151
+ check_datetimelike_compat : bool, default False
1152
+ Compare datetime-like which is comparable ignoring dtype.
1153
+ check_categorical : bool, default True
1154
+ Whether to compare internal Categorical exactly.
1155
+ check_like : bool, default False
1156
+ If True, ignore the order of index & columns.
1157
+ Note: index labels must match their respective rows
1158
+ (same as in columns) - same labels must be with the same data.
1159
+ check_freq : bool, default True
1160
+ Whether to check the `freq` attribute on a DatetimeIndex or TimedeltaIndex.
1161
+ check_flags : bool, default True
1162
+ Whether to check the `flags` attribute.
1163
+ rtol : float, default 1e-5
1164
+ Relative tolerance. Only used when check_exact is False.
1165
+ atol : float, default 1e-8
1166
+ Absolute tolerance. Only used when check_exact is False.
1167
+ obj : str, default 'DataFrame'
1168
+ Specify object name being compared, internally used to show appropriate
1169
+ assertion message.
1170
+
1171
+ See Also
1172
+ --------
1173
+ assert_series_equal : Equivalent method for asserting Series equality.
1174
+ DataFrame.equals : Check DataFrame equality.
1175
+
1176
+ Examples
1177
+ --------
1178
+ This example shows comparing two DataFrames that are equal
1179
+ but with columns of differing dtypes.
1180
+
1181
+ >>> from pandas.testing import assert_frame_equal
1182
+ >>> df1 = pd.DataFrame({'a': [1, 2], 'b': [3, 4]})
1183
+ >>> df2 = pd.DataFrame({'a': [1, 2], 'b': [3.0, 4.0]})
1184
+
1185
+ df1 equals itself.
1186
+
1187
+ >>> assert_frame_equal(df1, df1)
1188
+
1189
+ df1 differs from df2 as column 'b' is of a different type.
1190
+
1191
+ >>> assert_frame_equal(df1, df2)
1192
+ Traceback (most recent call last):
1193
+ ...
1194
+ AssertionError: Attributes of DataFrame.iloc[:, 1] (column name="b") are different
1195
+
1196
+ Attribute "dtype" are different
1197
+ [left]: int64
1198
+ [right]: float64
1199
+
1200
+ Ignore differing dtypes in columns with check_dtype.
1201
+
1202
+ >>> assert_frame_equal(df1, df2, check_dtype=False)
1203
+ """
1204
+ __tracebackhide__ = True
1205
+ _rtol = rtol if rtol is not lib.no_default else 1.0e-5
1206
+ _atol = atol if atol is not lib.no_default else 1.0e-8
1207
+ _check_exact = check_exact if check_exact is not lib.no_default else False
1208
+
1209
+ # instance validation
1210
+ _check_isinstance(left, right, DataFrame)
1211
+
1212
+ if check_frame_type:
1213
+ assert isinstance(left, type(right))
1214
+ # assert_class_equal(left, right, obj=obj)
1215
+
1216
+ # shape comparison
1217
+ if left.shape != right.shape:
1218
+ raise_assert_detail(
1219
+ obj, f"{obj} shape mismatch", f"{repr(left.shape)}", f"{repr(right.shape)}"
1220
+ )
1221
+
1222
+ if check_flags:
1223
+ assert left.flags == right.flags, f"{repr(left.flags)} != {repr(right.flags)}"
1224
+
1225
+ # index comparison
1226
+ assert_index_equal(
1227
+ left.index,
1228
+ right.index,
1229
+ exact=check_index_type,
1230
+ check_names=check_names,
1231
+ check_exact=_check_exact,
1232
+ check_categorical=check_categorical,
1233
+ check_order=not check_like,
1234
+ rtol=_rtol,
1235
+ atol=_atol,
1236
+ obj=f"{obj}.index",
1237
+ )
1238
+
1239
+ # column comparison
1240
+ assert_index_equal(
1241
+ left.columns,
1242
+ right.columns,
1243
+ exact=check_column_type,
1244
+ check_names=check_names,
1245
+ check_exact=_check_exact,
1246
+ check_categorical=check_categorical,
1247
+ check_order=not check_like,
1248
+ rtol=_rtol,
1249
+ atol=_atol,
1250
+ obj=f"{obj}.columns",
1251
+ )
1252
+
1253
+ if check_like:
1254
+ left = left.reindex_like(right)
1255
+
1256
+ # compare by blocks
1257
+ if by_blocks:
1258
+ rblocks = right._to_dict_of_blocks()
1259
+ lblocks = left._to_dict_of_blocks()
1260
+ for dtype in list(set(list(lblocks.keys()) + list(rblocks.keys()))):
1261
+ assert dtype in lblocks
1262
+ assert dtype in rblocks
1263
+ assert_frame_equal(
1264
+ lblocks[dtype], rblocks[dtype], check_dtype=check_dtype, obj=obj
1265
+ )
1266
+
1267
+ # compare by columns
1268
+ else:
1269
+ for i, col in enumerate(left.columns):
1270
+ # We have already checked that columns match, so we can do
1271
+ # fast location-based lookups
1272
+ lcol = left._ixs(i, axis=1)
1273
+ rcol = right._ixs(i, axis=1)
1274
+
1275
+ # GH #38183
1276
+ # use check_index=False, because we do not want to run
1277
+ # assert_index_equal for each column,
1278
+ # as we already checked it for the whole dataframe before.
1279
+ assert_series_equal(
1280
+ lcol,
1281
+ rcol,
1282
+ check_dtype=check_dtype,
1283
+ check_index_type=check_index_type,
1284
+ check_exact=check_exact,
1285
+ check_names=check_names,
1286
+ check_datetimelike_compat=check_datetimelike_compat,
1287
+ check_categorical=check_categorical,
1288
+ check_freq=check_freq,
1289
+ obj=f'{obj}.iloc[:, {i}] (column name="{col}")',
1290
+ rtol=rtol,
1291
+ atol=atol,
1292
+ check_index=False,
1293
+ check_flags=False,
1294
+ )
1295
+
1296
+
1297
+ def assert_equal(left, right, **kwargs) -> None:
1298
+ """
1299
+ Wrapper for tm.assert_*_equal to dispatch to the appropriate test function.
1300
+
1301
+ Parameters
1302
+ ----------
1303
+ left, right : Index, Series, DataFrame, ExtensionArray, or np.ndarray
1304
+ The two items to be compared.
1305
+ **kwargs
1306
+ All keyword arguments are passed through to the underlying assert method.
1307
+ """
1308
+ __tracebackhide__ = True
1309
+
1310
+ if isinstance(left, Index):
1311
+ assert_index_equal(left, right, **kwargs)
1312
+ if isinstance(left, (DatetimeIndex, TimedeltaIndex)):
1313
+ assert left.freq == right.freq, (left.freq, right.freq)
1314
+ elif isinstance(left, Series):
1315
+ assert_series_equal(left, right, **kwargs)
1316
+ elif isinstance(left, DataFrame):
1317
+ assert_frame_equal(left, right, **kwargs)
1318
+ elif isinstance(left, IntervalArray):
1319
+ assert_interval_array_equal(left, right, **kwargs)
1320
+ elif isinstance(left, PeriodArray):
1321
+ assert_period_array_equal(left, right, **kwargs)
1322
+ elif isinstance(left, DatetimeArray):
1323
+ assert_datetime_array_equal(left, right, **kwargs)
1324
+ elif isinstance(left, TimedeltaArray):
1325
+ assert_timedelta_array_equal(left, right, **kwargs)
1326
+ elif isinstance(left, ExtensionArray):
1327
+ assert_extension_array_equal(left, right, **kwargs)
1328
+ elif isinstance(left, np.ndarray):
1329
+ assert_numpy_array_equal(left, right, **kwargs)
1330
+ elif isinstance(left, str):
1331
+ assert kwargs == {}
1332
+ assert left == right
1333
+ else:
1334
+ assert kwargs == {}
1335
+ assert_almost_equal(left, right)
1336
+
1337
+
1338
+ def assert_sp_array_equal(left, right) -> None:
1339
+ """
1340
+ Check that the left and right SparseArray are equal.
1341
+
1342
+ Parameters
1343
+ ----------
1344
+ left : SparseArray
1345
+ right : SparseArray
1346
+ """
1347
+ _check_isinstance(left, right, pd.arrays.SparseArray)
1348
+
1349
+ assert_numpy_array_equal(left.sp_values, right.sp_values)
1350
+
1351
+ # SparseIndex comparison
1352
+ assert isinstance(left.sp_index, SparseIndex)
1353
+ assert isinstance(right.sp_index, SparseIndex)
1354
+
1355
+ left_index = left.sp_index
1356
+ right_index = right.sp_index
1357
+
1358
+ if not left_index.equals(right_index):
1359
+ raise_assert_detail(
1360
+ "SparseArray.index", "index are not equal", left_index, right_index
1361
+ )
1362
+ else:
1363
+ # Just ensure a
1364
+ pass
1365
+
1366
+ assert_attr_equal("fill_value", left, right)
1367
+ assert_attr_equal("dtype", left, right)
1368
+ assert_numpy_array_equal(left.to_dense(), right.to_dense())
1369
+
1370
+
1371
+ def assert_contains_all(iterable, dic) -> None:
1372
+ for k in iterable:
1373
+ assert k in dic, f"Did not contain item: {repr(k)}"
1374
+
1375
+
1376
+ def assert_copy(iter1, iter2, **eql_kwargs) -> None:
1377
+ """
1378
+ iter1, iter2: iterables that produce elements
1379
+ comparable with assert_almost_equal
1380
+
1381
+ Checks that the elements are equal, but not
1382
+ the same object. (Does not check that items
1383
+ in sequences are also not the same object)
1384
+ """
1385
+ for elem1, elem2 in zip(iter1, iter2):
1386
+ assert_almost_equal(elem1, elem2, **eql_kwargs)
1387
+ msg = (
1388
+ f"Expected object {repr(type(elem1))} and object {repr(type(elem2))} to be "
1389
+ "different objects, but they were the same object."
1390
+ )
1391
+ assert elem1 is not elem2, msg
1392
+
1393
+
1394
+ def is_extension_array_dtype_and_needs_i8_conversion(
1395
+ left_dtype: DtypeObj, right_dtype: DtypeObj
1396
+ ) -> bool:
1397
+ """
1398
+ Checks that we have the combination of an ExtensionArraydtype and
1399
+ a dtype that should be converted to int64
1400
+
1401
+ Returns
1402
+ -------
1403
+ bool
1404
+
1405
+ Related to issue #37609
1406
+ """
1407
+ return isinstance(left_dtype, ExtensionDtype) and needs_i8_conversion(right_dtype)
1408
+
1409
+
1410
+ def assert_indexing_slices_equivalent(ser: Series, l_slc: slice, i_slc: slice) -> None:
1411
+ """
1412
+ Check that ser.iloc[i_slc] matches ser.loc[l_slc] and, if applicable,
1413
+ ser[l_slc].
1414
+ """
1415
+ expected = ser.iloc[i_slc]
1416
+
1417
+ assert_series_equal(ser.loc[l_slc], expected)
1418
+
1419
+ if not is_integer_dtype(ser.index):
1420
+ # For integer indices, .loc and plain getitem are position-based.
1421
+ assert_series_equal(ser[l_slc], expected)
1422
+
1423
+
1424
+ def assert_metadata_equivalent(
1425
+ left: DataFrame | Series, right: DataFrame | Series | None = None
1426
+ ) -> None:
1427
+ """
1428
+ Check that ._metadata attributes are equivalent.
1429
+ """
1430
+ for attr in left._metadata:
1431
+ val = getattr(left, attr, None)
1432
+ if right is None:
1433
+ assert val is None
1434
+ else:
1435
+ assert val == getattr(right, attr, None)
falcon/lib/python3.10/site-packages/pandas/_testing/contexts.py ADDED
@@ -0,0 +1,257 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from contextlib import contextmanager
4
+ import os
5
+ from pathlib import Path
6
+ import tempfile
7
+ from typing import (
8
+ IO,
9
+ TYPE_CHECKING,
10
+ Any,
11
+ )
12
+ import uuid
13
+
14
+ from pandas._config import using_copy_on_write
15
+
16
+ from pandas.compat import PYPY
17
+ from pandas.errors import ChainedAssignmentError
18
+
19
+ from pandas import set_option
20
+
21
+ from pandas.io.common import get_handle
22
+
23
+ if TYPE_CHECKING:
24
+ from collections.abc import Generator
25
+
26
+ from pandas._typing import (
27
+ BaseBuffer,
28
+ CompressionOptions,
29
+ FilePath,
30
+ )
31
+
32
+
33
+ @contextmanager
34
+ def decompress_file(
35
+ path: FilePath | BaseBuffer, compression: CompressionOptions
36
+ ) -> Generator[IO[bytes], None, None]:
37
+ """
38
+ Open a compressed file and return a file object.
39
+
40
+ Parameters
41
+ ----------
42
+ path : str
43
+ The path where the file is read from.
44
+
45
+ compression : {'gzip', 'bz2', 'zip', 'xz', 'zstd', None}
46
+ Name of the decompression to use
47
+
48
+ Returns
49
+ -------
50
+ file object
51
+ """
52
+ with get_handle(path, "rb", compression=compression, is_text=False) as handle:
53
+ yield handle.handle
54
+
55
+
56
+ @contextmanager
57
+ def set_timezone(tz: str) -> Generator[None, None, None]:
58
+ """
59
+ Context manager for temporarily setting a timezone.
60
+
61
+ Parameters
62
+ ----------
63
+ tz : str
64
+ A string representing a valid timezone.
65
+
66
+ Examples
67
+ --------
68
+ >>> from datetime import datetime
69
+ >>> from dateutil.tz import tzlocal
70
+ >>> tzlocal().tzname(datetime(2021, 1, 1)) # doctest: +SKIP
71
+ 'IST'
72
+
73
+ >>> with set_timezone('US/Eastern'):
74
+ ... tzlocal().tzname(datetime(2021, 1, 1))
75
+ ...
76
+ 'EST'
77
+ """
78
+ import time
79
+
80
+ def setTZ(tz) -> None:
81
+ if tz is None:
82
+ try:
83
+ del os.environ["TZ"]
84
+ except KeyError:
85
+ pass
86
+ else:
87
+ os.environ["TZ"] = tz
88
+ time.tzset()
89
+
90
+ orig_tz = os.environ.get("TZ")
91
+ setTZ(tz)
92
+ try:
93
+ yield
94
+ finally:
95
+ setTZ(orig_tz)
96
+
97
+
98
+ @contextmanager
99
+ def ensure_clean(
100
+ filename=None, return_filelike: bool = False, **kwargs: Any
101
+ ) -> Generator[Any, None, None]:
102
+ """
103
+ Gets a temporary path and agrees to remove on close.
104
+
105
+ This implementation does not use tempfile.mkstemp to avoid having a file handle.
106
+ If the code using the returned path wants to delete the file itself, windows
107
+ requires that no program has a file handle to it.
108
+
109
+ Parameters
110
+ ----------
111
+ filename : str (optional)
112
+ suffix of the created file.
113
+ return_filelike : bool (default False)
114
+ if True, returns a file-like which is *always* cleaned. Necessary for
115
+ savefig and other functions which want to append extensions.
116
+ **kwargs
117
+ Additional keywords are passed to open().
118
+
119
+ """
120
+ folder = Path(tempfile.gettempdir())
121
+
122
+ if filename is None:
123
+ filename = ""
124
+ filename = str(uuid.uuid4()) + filename
125
+ path = folder / filename
126
+
127
+ path.touch()
128
+
129
+ handle_or_str: str | IO = str(path)
130
+ encoding = kwargs.pop("encoding", None)
131
+ if return_filelike:
132
+ kwargs.setdefault("mode", "w+b")
133
+ if encoding is None and "b" not in kwargs["mode"]:
134
+ encoding = "utf-8"
135
+ handle_or_str = open(path, encoding=encoding, **kwargs)
136
+
137
+ try:
138
+ yield handle_or_str
139
+ finally:
140
+ if not isinstance(handle_or_str, str):
141
+ handle_or_str.close()
142
+ if path.is_file():
143
+ path.unlink()
144
+
145
+
146
+ @contextmanager
147
+ def with_csv_dialect(name: str, **kwargs) -> Generator[None, None, None]:
148
+ """
149
+ Context manager to temporarily register a CSV dialect for parsing CSV.
150
+
151
+ Parameters
152
+ ----------
153
+ name : str
154
+ The name of the dialect.
155
+ kwargs : mapping
156
+ The parameters for the dialect.
157
+
158
+ Raises
159
+ ------
160
+ ValueError : the name of the dialect conflicts with a builtin one.
161
+
162
+ See Also
163
+ --------
164
+ csv : Python's CSV library.
165
+ """
166
+ import csv
167
+
168
+ _BUILTIN_DIALECTS = {"excel", "excel-tab", "unix"}
169
+
170
+ if name in _BUILTIN_DIALECTS:
171
+ raise ValueError("Cannot override builtin dialect.")
172
+
173
+ csv.register_dialect(name, **kwargs)
174
+ try:
175
+ yield
176
+ finally:
177
+ csv.unregister_dialect(name)
178
+
179
+
180
+ @contextmanager
181
+ def use_numexpr(use, min_elements=None) -> Generator[None, None, None]:
182
+ from pandas.core.computation import expressions as expr
183
+
184
+ if min_elements is None:
185
+ min_elements = expr._MIN_ELEMENTS
186
+
187
+ olduse = expr.USE_NUMEXPR
188
+ oldmin = expr._MIN_ELEMENTS
189
+ set_option("compute.use_numexpr", use)
190
+ expr._MIN_ELEMENTS = min_elements
191
+ try:
192
+ yield
193
+ finally:
194
+ expr._MIN_ELEMENTS = oldmin
195
+ set_option("compute.use_numexpr", olduse)
196
+
197
+
198
+ def raises_chained_assignment_error(warn=True, extra_warnings=(), extra_match=()):
199
+ from pandas._testing import assert_produces_warning
200
+
201
+ if not warn:
202
+ from contextlib import nullcontext
203
+
204
+ return nullcontext()
205
+
206
+ if PYPY and not extra_warnings:
207
+ from contextlib import nullcontext
208
+
209
+ return nullcontext()
210
+ elif PYPY and extra_warnings:
211
+ return assert_produces_warning(
212
+ extra_warnings,
213
+ match="|".join(extra_match),
214
+ )
215
+ else:
216
+ if using_copy_on_write():
217
+ warning = ChainedAssignmentError
218
+ match = (
219
+ "A value is trying to be set on a copy of a DataFrame or Series "
220
+ "through chained assignment"
221
+ )
222
+ else:
223
+ warning = FutureWarning # type: ignore[assignment]
224
+ # TODO update match
225
+ match = "ChainedAssignmentError"
226
+ if extra_warnings:
227
+ warning = (warning, *extra_warnings) # type: ignore[assignment]
228
+ return assert_produces_warning(
229
+ warning,
230
+ match="|".join((match, *extra_match)),
231
+ )
232
+
233
+
234
+ def assert_cow_warning(warn=True, match=None, **kwargs):
235
+ """
236
+ Assert that a warning is raised in the CoW warning mode.
237
+
238
+ Parameters
239
+ ----------
240
+ warn : bool, default True
241
+ By default, check that a warning is raised. Can be turned off by passing False.
242
+ match : str
243
+ The warning message to match against, if different from the default.
244
+ kwargs
245
+ Passed through to assert_produces_warning
246
+ """
247
+ from pandas._testing import assert_produces_warning
248
+
249
+ if not warn:
250
+ from contextlib import nullcontext
251
+
252
+ return nullcontext()
253
+
254
+ if not match:
255
+ match = "Setting a value on a view"
256
+
257
+ return assert_produces_warning(FutureWarning, match=match, **kwargs)
falcon/lib/python3.10/site-packages/pandas/api/__init__.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ public toolkit API """
2
+ from pandas.api import (
3
+ extensions,
4
+ indexers,
5
+ interchange,
6
+ types,
7
+ typing,
8
+ )
9
+
10
+ __all__ = [
11
+ "interchange",
12
+ "extensions",
13
+ "indexers",
14
+ "types",
15
+ "typing",
16
+ ]
falcon/lib/python3.10/site-packages/pandas/api/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (364 Bytes). View file
 
falcon/lib/python3.10/site-packages/pandas/api/extensions/__init__.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public API for extending pandas objects.
3
+ """
4
+
5
+ from pandas._libs.lib import no_default
6
+
7
+ from pandas.core.dtypes.base import (
8
+ ExtensionDtype,
9
+ register_extension_dtype,
10
+ )
11
+
12
+ from pandas.core.accessor import (
13
+ register_dataframe_accessor,
14
+ register_index_accessor,
15
+ register_series_accessor,
16
+ )
17
+ from pandas.core.algorithms import take
18
+ from pandas.core.arrays import (
19
+ ExtensionArray,
20
+ ExtensionScalarOpsMixin,
21
+ )
22
+
23
+ __all__ = [
24
+ "no_default",
25
+ "ExtensionDtype",
26
+ "register_extension_dtype",
27
+ "register_dataframe_accessor",
28
+ "register_index_accessor",
29
+ "register_series_accessor",
30
+ "take",
31
+ "ExtensionArray",
32
+ "ExtensionScalarOpsMixin",
33
+ ]
falcon/lib/python3.10/site-packages/pandas/api/extensions/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (729 Bytes). View file
 
falcon/lib/python3.10/site-packages/pandas/api/indexers/__init__.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public API for Rolling Window Indexers.
3
+ """
4
+
5
+ from pandas.core.indexers import check_array_indexer
6
+ from pandas.core.indexers.objects import (
7
+ BaseIndexer,
8
+ FixedForwardWindowIndexer,
9
+ VariableOffsetWindowIndexer,
10
+ )
11
+
12
+ __all__ = [
13
+ "check_array_indexer",
14
+ "BaseIndexer",
15
+ "FixedForwardWindowIndexer",
16
+ "VariableOffsetWindowIndexer",
17
+ ]
falcon/lib/python3.10/site-packages/pandas/api/indexers/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (472 Bytes). View file
 
falcon/lib/python3.10/site-packages/pandas/api/interchange/__init__.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public API for DataFrame interchange protocol.
3
+ """
4
+
5
+ from pandas.core.interchange.dataframe_protocol import DataFrame
6
+ from pandas.core.interchange.from_dataframe import from_dataframe
7
+
8
+ __all__ = ["from_dataframe", "DataFrame"]
falcon/lib/python3.10/site-packages/pandas/api/interchange/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (421 Bytes). View file
 
falcon/lib/python3.10/site-packages/pandas/api/types/__init__.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public toolkit API.
3
+ """
4
+
5
+ from pandas._libs.lib import infer_dtype
6
+
7
+ from pandas.core.dtypes.api import * # noqa: F403
8
+ from pandas.core.dtypes.concat import union_categoricals
9
+ from pandas.core.dtypes.dtypes import (
10
+ CategoricalDtype,
11
+ DatetimeTZDtype,
12
+ IntervalDtype,
13
+ PeriodDtype,
14
+ )
15
+
16
+ __all__ = [
17
+ "infer_dtype",
18
+ "union_categoricals",
19
+ "CategoricalDtype",
20
+ "DatetimeTZDtype",
21
+ "IntervalDtype",
22
+ "PeriodDtype",
23
+ ]
falcon/lib/python3.10/site-packages/pandas/api/types/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (554 Bytes). View file
 
falcon/lib/python3.10/site-packages/pandas/api/typing/__init__.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public API classes that store intermediate results useful for type-hinting.
3
+ """
4
+
5
+ from pandas._libs import NaTType
6
+ from pandas._libs.missing import NAType
7
+
8
+ from pandas.core.groupby import (
9
+ DataFrameGroupBy,
10
+ SeriesGroupBy,
11
+ )
12
+ from pandas.core.resample import (
13
+ DatetimeIndexResamplerGroupby,
14
+ PeriodIndexResamplerGroupby,
15
+ Resampler,
16
+ TimedeltaIndexResamplerGroupby,
17
+ TimeGrouper,
18
+ )
19
+ from pandas.core.window import (
20
+ Expanding,
21
+ ExpandingGroupby,
22
+ ExponentialMovingWindow,
23
+ ExponentialMovingWindowGroupby,
24
+ Rolling,
25
+ RollingGroupby,
26
+ Window,
27
+ )
28
+
29
+ # TODO: Can't import Styler without importing jinja2
30
+ # from pandas.io.formats.style import Styler
31
+ from pandas.io.json._json import JsonReader
32
+ from pandas.io.stata import StataReader
33
+
34
+ __all__ = [
35
+ "DataFrameGroupBy",
36
+ "DatetimeIndexResamplerGroupby",
37
+ "Expanding",
38
+ "ExpandingGroupby",
39
+ "ExponentialMovingWindow",
40
+ "ExponentialMovingWindowGroupby",
41
+ "JsonReader",
42
+ "NaTType",
43
+ "NAType",
44
+ "PeriodIndexResamplerGroupby",
45
+ "Resampler",
46
+ "Rolling",
47
+ "RollingGroupby",
48
+ "SeriesGroupBy",
49
+ "StataReader",
50
+ # See TODO above
51
+ # "Styler",
52
+ "TimedeltaIndexResamplerGroupby",
53
+ "TimeGrouper",
54
+ "Window",
55
+ ]
falcon/lib/python3.10/site-packages/pandas/api/typing/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.07 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/compat/__init__.py ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ compat
3
+ ======
4
+
5
+ Cross-compatible functions for different versions of Python.
6
+
7
+ Other items:
8
+ * platform checker
9
+ """
10
+ from __future__ import annotations
11
+
12
+ import os
13
+ import platform
14
+ import sys
15
+ from typing import TYPE_CHECKING
16
+
17
+ from pandas.compat._constants import (
18
+ IS64,
19
+ ISMUSL,
20
+ PY310,
21
+ PY311,
22
+ PY312,
23
+ PYPY,
24
+ )
25
+ import pandas.compat.compressors
26
+ from pandas.compat.numpy import is_numpy_dev
27
+ from pandas.compat.pyarrow import (
28
+ pa_version_under10p1,
29
+ pa_version_under11p0,
30
+ pa_version_under13p0,
31
+ pa_version_under14p0,
32
+ pa_version_under14p1,
33
+ pa_version_under16p0,
34
+ pa_version_under17p0,
35
+ )
36
+
37
+ if TYPE_CHECKING:
38
+ from pandas._typing import F
39
+
40
+
41
+ def set_function_name(f: F, name: str, cls: type) -> F:
42
+ """
43
+ Bind the name/qualname attributes of the function.
44
+ """
45
+ f.__name__ = name
46
+ f.__qualname__ = f"{cls.__name__}.{name}"
47
+ f.__module__ = cls.__module__
48
+ return f
49
+
50
+
51
+ def is_platform_little_endian() -> bool:
52
+ """
53
+ Checking if the running platform is little endian.
54
+
55
+ Returns
56
+ -------
57
+ bool
58
+ True if the running platform is little endian.
59
+ """
60
+ return sys.byteorder == "little"
61
+
62
+
63
+ def is_platform_windows() -> bool:
64
+ """
65
+ Checking if the running platform is windows.
66
+
67
+ Returns
68
+ -------
69
+ bool
70
+ True if the running platform is windows.
71
+ """
72
+ return sys.platform in ["win32", "cygwin"]
73
+
74
+
75
+ def is_platform_linux() -> bool:
76
+ """
77
+ Checking if the running platform is linux.
78
+
79
+ Returns
80
+ -------
81
+ bool
82
+ True if the running platform is linux.
83
+ """
84
+ return sys.platform == "linux"
85
+
86
+
87
+ def is_platform_mac() -> bool:
88
+ """
89
+ Checking if the running platform is mac.
90
+
91
+ Returns
92
+ -------
93
+ bool
94
+ True if the running platform is mac.
95
+ """
96
+ return sys.platform == "darwin"
97
+
98
+
99
+ def is_platform_arm() -> bool:
100
+ """
101
+ Checking if the running platform use ARM architecture.
102
+
103
+ Returns
104
+ -------
105
+ bool
106
+ True if the running platform uses ARM architecture.
107
+ """
108
+ return platform.machine() in ("arm64", "aarch64") or platform.machine().startswith(
109
+ "armv"
110
+ )
111
+
112
+
113
+ def is_platform_power() -> bool:
114
+ """
115
+ Checking if the running platform use Power architecture.
116
+
117
+ Returns
118
+ -------
119
+ bool
120
+ True if the running platform uses ARM architecture.
121
+ """
122
+ return platform.machine() in ("ppc64", "ppc64le")
123
+
124
+
125
+ def is_ci_environment() -> bool:
126
+ """
127
+ Checking if running in a continuous integration environment by checking
128
+ the PANDAS_CI environment variable.
129
+
130
+ Returns
131
+ -------
132
+ bool
133
+ True if the running in a continuous integration environment.
134
+ """
135
+ return os.environ.get("PANDAS_CI", "0") == "1"
136
+
137
+
138
+ def get_lzma_file() -> type[pandas.compat.compressors.LZMAFile]:
139
+ """
140
+ Importing the `LZMAFile` class from the `lzma` module.
141
+
142
+ Returns
143
+ -------
144
+ class
145
+ The `LZMAFile` class from the `lzma` module.
146
+
147
+ Raises
148
+ ------
149
+ RuntimeError
150
+ If the `lzma` module was not imported correctly, or didn't exist.
151
+ """
152
+ if not pandas.compat.compressors.has_lzma:
153
+ raise RuntimeError(
154
+ "lzma module not available. "
155
+ "A Python re-install with the proper dependencies, "
156
+ "might be required to solve this issue."
157
+ )
158
+ return pandas.compat.compressors.LZMAFile
159
+
160
+
161
+ def get_bz2_file() -> type[pandas.compat.compressors.BZ2File]:
162
+ """
163
+ Importing the `BZ2File` class from the `bz2` module.
164
+
165
+ Returns
166
+ -------
167
+ class
168
+ The `BZ2File` class from the `bz2` module.
169
+
170
+ Raises
171
+ ------
172
+ RuntimeError
173
+ If the `bz2` module was not imported correctly, or didn't exist.
174
+ """
175
+ if not pandas.compat.compressors.has_bz2:
176
+ raise RuntimeError(
177
+ "bz2 module not available. "
178
+ "A Python re-install with the proper dependencies, "
179
+ "might be required to solve this issue."
180
+ )
181
+ return pandas.compat.compressors.BZ2File
182
+
183
+
184
+ __all__ = [
185
+ "is_numpy_dev",
186
+ "pa_version_under10p1",
187
+ "pa_version_under11p0",
188
+ "pa_version_under13p0",
189
+ "pa_version_under14p0",
190
+ "pa_version_under14p1",
191
+ "pa_version_under16p0",
192
+ "pa_version_under17p0",
193
+ "IS64",
194
+ "ISMUSL",
195
+ "PY310",
196
+ "PY311",
197
+ "PY312",
198
+ "PYPY",
199
+ ]
falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (4.74 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/_constants.cpython-310.pyc ADDED
Binary file (703 Bytes). View file
 
falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/_optional.cpython-310.pyc ADDED
Binary file (4.36 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/compressors.cpython-310.pyc ADDED
Binary file (1.73 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/pickle_compat.cpython-310.pyc ADDED
Binary file (5.66 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/compat/__pycache__/pyarrow.cpython-310.pyc ADDED
Binary file (894 Bytes). View file
 
falcon/lib/python3.10/site-packages/pandas/compat/_constants.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ _constants
3
+ ======
4
+
5
+ Constants relevant for the Python implementation.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import platform
11
+ import sys
12
+ import sysconfig
13
+
14
+ IS64 = sys.maxsize > 2**32
15
+
16
+ PY310 = sys.version_info >= (3, 10)
17
+ PY311 = sys.version_info >= (3, 11)
18
+ PY312 = sys.version_info >= (3, 12)
19
+ PYPY = platform.python_implementation() == "PyPy"
20
+ ISMUSL = "musl" in (sysconfig.get_config_var("HOST_GNU_TYPE") or "")
21
+ REF_COUNT = 2 if PY311 else 3
22
+
23
+ __all__ = [
24
+ "IS64",
25
+ "ISMUSL",
26
+ "PY310",
27
+ "PY311",
28
+ "PY312",
29
+ "PYPY",
30
+ ]
falcon/lib/python3.10/site-packages/pandas/compat/_optional.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import importlib
4
+ import sys
5
+ from typing import TYPE_CHECKING
6
+ import warnings
7
+
8
+ from pandas.util._exceptions import find_stack_level
9
+
10
+ from pandas.util.version import Version
11
+
12
+ if TYPE_CHECKING:
13
+ import types
14
+
15
+ # Update install.rst & setup.cfg when updating versions!
16
+
17
+ VERSIONS = {
18
+ "adbc-driver-postgresql": "0.8.0",
19
+ "adbc-driver-sqlite": "0.8.0",
20
+ "bs4": "4.11.2",
21
+ "blosc": "1.21.3",
22
+ "bottleneck": "1.3.6",
23
+ "dataframe-api-compat": "0.1.7",
24
+ "fastparquet": "2022.12.0",
25
+ "fsspec": "2022.11.0",
26
+ "html5lib": "1.1",
27
+ "hypothesis": "6.46.1",
28
+ "gcsfs": "2022.11.0",
29
+ "jinja2": "3.1.2",
30
+ "lxml.etree": "4.9.2",
31
+ "matplotlib": "3.6.3",
32
+ "numba": "0.56.4",
33
+ "numexpr": "2.8.4",
34
+ "odfpy": "1.4.1",
35
+ "openpyxl": "3.1.0",
36
+ "pandas_gbq": "0.19.0",
37
+ "psycopg2": "2.9.6", # (dt dec pq3 ext lo64)
38
+ "pymysql": "1.0.2",
39
+ "pyarrow": "10.0.1",
40
+ "pyreadstat": "1.2.0",
41
+ "pytest": "7.3.2",
42
+ "python-calamine": "0.1.7",
43
+ "pyxlsb": "1.0.10",
44
+ "s3fs": "2022.11.0",
45
+ "scipy": "1.10.0",
46
+ "sqlalchemy": "2.0.0",
47
+ "tables": "3.8.0",
48
+ "tabulate": "0.9.0",
49
+ "xarray": "2022.12.0",
50
+ "xlrd": "2.0.1",
51
+ "xlsxwriter": "3.0.5",
52
+ "zstandard": "0.19.0",
53
+ "tzdata": "2022.7",
54
+ "qtpy": "2.3.0",
55
+ "pyqt5": "5.15.9",
56
+ }
57
+
58
+ # A mapping from import name to package name (on PyPI) for packages where
59
+ # these two names are different.
60
+
61
+ INSTALL_MAPPING = {
62
+ "bs4": "beautifulsoup4",
63
+ "bottleneck": "Bottleneck",
64
+ "jinja2": "Jinja2",
65
+ "lxml.etree": "lxml",
66
+ "odf": "odfpy",
67
+ "pandas_gbq": "pandas-gbq",
68
+ "python_calamine": "python-calamine",
69
+ "sqlalchemy": "SQLAlchemy",
70
+ "tables": "pytables",
71
+ }
72
+
73
+
74
+ def get_version(module: types.ModuleType) -> str:
75
+ version = getattr(module, "__version__", None)
76
+
77
+ if version is None:
78
+ raise ImportError(f"Can't determine version for {module.__name__}")
79
+ if module.__name__ == "psycopg2":
80
+ # psycopg2 appends " (dt dec pq3 ext lo64)" to it's version
81
+ version = version.split()[0]
82
+ return version
83
+
84
+
85
+ def import_optional_dependency(
86
+ name: str,
87
+ extra: str = "",
88
+ errors: str = "raise",
89
+ min_version: str | None = None,
90
+ ):
91
+ """
92
+ Import an optional dependency.
93
+
94
+ By default, if a dependency is missing an ImportError with a nice
95
+ message will be raised. If a dependency is present, but too old,
96
+ we raise.
97
+
98
+ Parameters
99
+ ----------
100
+ name : str
101
+ The module name.
102
+ extra : str
103
+ Additional text to include in the ImportError message.
104
+ errors : str {'raise', 'warn', 'ignore'}
105
+ What to do when a dependency is not found or its version is too old.
106
+
107
+ * raise : Raise an ImportError
108
+ * warn : Only applicable when a module's version is to old.
109
+ Warns that the version is too old and returns None
110
+ * ignore: If the module is not installed, return None, otherwise,
111
+ return the module, even if the version is too old.
112
+ It's expected that users validate the version locally when
113
+ using ``errors="ignore"`` (see. ``io/html.py``)
114
+ min_version : str, default None
115
+ Specify a minimum version that is different from the global pandas
116
+ minimum version required.
117
+ Returns
118
+ -------
119
+ maybe_module : Optional[ModuleType]
120
+ The imported module, when found and the version is correct.
121
+ None is returned when the package is not found and `errors`
122
+ is False, or when the package's version is too old and `errors`
123
+ is ``'warn'`` or ``'ignore'``.
124
+ """
125
+ assert errors in {"warn", "raise", "ignore"}
126
+
127
+ package_name = INSTALL_MAPPING.get(name)
128
+ install_name = package_name if package_name is not None else name
129
+
130
+ msg = (
131
+ f"Missing optional dependency '{install_name}'. {extra} "
132
+ f"Use pip or conda to install {install_name}."
133
+ )
134
+ try:
135
+ module = importlib.import_module(name)
136
+ except ImportError:
137
+ if errors == "raise":
138
+ raise ImportError(msg)
139
+ return None
140
+
141
+ # Handle submodules: if we have submodule, grab parent module from sys.modules
142
+ parent = name.split(".")[0]
143
+ if parent != name:
144
+ install_name = parent
145
+ module_to_get = sys.modules[install_name]
146
+ else:
147
+ module_to_get = module
148
+ minimum_version = min_version if min_version is not None else VERSIONS.get(parent)
149
+ if minimum_version:
150
+ version = get_version(module_to_get)
151
+ if version and Version(version) < Version(minimum_version):
152
+ msg = (
153
+ f"Pandas requires version '{minimum_version}' or newer of '{parent}' "
154
+ f"(version '{version}' currently installed)."
155
+ )
156
+ if errors == "warn":
157
+ warnings.warn(
158
+ msg,
159
+ UserWarning,
160
+ stacklevel=find_stack_level(),
161
+ )
162
+ return None
163
+ elif errors == "raise":
164
+ raise ImportError(msg)
165
+ else:
166
+ return None
167
+
168
+ return module
falcon/lib/python3.10/site-packages/pandas/compat/compressors.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Patched ``BZ2File`` and ``LZMAFile`` to handle pickle protocol 5.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ from pickle import PickleBuffer
8
+
9
+ from pandas.compat._constants import PY310
10
+
11
+ try:
12
+ import bz2
13
+
14
+ has_bz2 = True
15
+ except ImportError:
16
+ has_bz2 = False
17
+
18
+ try:
19
+ import lzma
20
+
21
+ has_lzma = True
22
+ except ImportError:
23
+ has_lzma = False
24
+
25
+
26
+ def flatten_buffer(
27
+ b: bytes | bytearray | memoryview | PickleBuffer,
28
+ ) -> bytes | bytearray | memoryview:
29
+ """
30
+ Return some 1-D `uint8` typed buffer.
31
+
32
+ Coerces anything that does not match that description to one that does
33
+ without copying if possible (otherwise will copy).
34
+ """
35
+
36
+ if isinstance(b, (bytes, bytearray)):
37
+ return b
38
+
39
+ if not isinstance(b, PickleBuffer):
40
+ b = PickleBuffer(b)
41
+
42
+ try:
43
+ # coerce to 1-D `uint8` C-contiguous `memoryview` zero-copy
44
+ return b.raw()
45
+ except BufferError:
46
+ # perform in-memory copy if buffer is not contiguous
47
+ return memoryview(b).tobytes("A")
48
+
49
+
50
+ if has_bz2:
51
+
52
+ class BZ2File(bz2.BZ2File):
53
+ if not PY310:
54
+
55
+ def write(self, b) -> int:
56
+ # Workaround issue where `bz2.BZ2File` expects `len`
57
+ # to return the number of bytes in `b` by converting
58
+ # `b` into something that meets that constraint with
59
+ # minimal copying.
60
+ #
61
+ # Note: This is fixed in Python 3.10.
62
+ return super().write(flatten_buffer(b))
63
+
64
+
65
+ if has_lzma:
66
+
67
+ class LZMAFile(lzma.LZMAFile):
68
+ if not PY310:
69
+
70
+ def write(self, b) -> int:
71
+ # Workaround issue where `lzma.LZMAFile` expects `len`
72
+ # to return the number of bytes in `b` by converting
73
+ # `b` into something that meets that constraint with
74
+ # minimal copying.
75
+ #
76
+ # Note: This is fixed in Python 3.10.
77
+ return super().write(flatten_buffer(b))
falcon/lib/python3.10/site-packages/pandas/compat/numpy/__init__.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ support numpy compatibility across versions """
2
+ import warnings
3
+
4
+ import numpy as np
5
+
6
+ from pandas.util.version import Version
7
+
8
+ # numpy versioning
9
+ _np_version = np.__version__
10
+ _nlv = Version(_np_version)
11
+ np_version_lt1p23 = _nlv < Version("1.23")
12
+ np_version_gte1p24 = _nlv >= Version("1.24")
13
+ np_version_gte1p24p3 = _nlv >= Version("1.24.3")
14
+ np_version_gte1p25 = _nlv >= Version("1.25")
15
+ np_version_gt2 = _nlv >= Version("2.0.0")
16
+ is_numpy_dev = _nlv.dev is not None
17
+ _min_numpy_ver = "1.22.4"
18
+
19
+
20
+ if _nlv < Version(_min_numpy_ver):
21
+ raise ImportError(
22
+ f"this version of pandas is incompatible with numpy < {_min_numpy_ver}\n"
23
+ f"your numpy version is {_np_version}.\n"
24
+ f"Please upgrade numpy to >= {_min_numpy_ver} to use this pandas version"
25
+ )
26
+
27
+
28
+ np_long: type
29
+ np_ulong: type
30
+
31
+ if np_version_gt2:
32
+ try:
33
+ with warnings.catch_warnings():
34
+ warnings.filterwarnings(
35
+ "ignore",
36
+ r".*In the future `np\.long` will be defined as.*",
37
+ FutureWarning,
38
+ )
39
+ np_long = np.long # type: ignore[attr-defined]
40
+ np_ulong = np.ulong # type: ignore[attr-defined]
41
+ except AttributeError:
42
+ np_long = np.int_
43
+ np_ulong = np.uint
44
+ else:
45
+ np_long = np.int_
46
+ np_ulong = np.uint
47
+
48
+
49
+ __all__ = [
50
+ "np",
51
+ "_np_version",
52
+ "is_numpy_dev",
53
+ ]
falcon/lib/python3.10/site-packages/pandas/compat/numpy/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.25 kB). View file
 
falcon/lib/python3.10/site-packages/pandas/compat/numpy/__pycache__/function.cpython-310.pyc ADDED
Binary file (10.5 kB). View file