koichi12 commited on
Commit
fdaa370
·
verified ·
1 Parent(s): 8d80d71

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +2 -0
  2. .venv/lib/python3.11/site-packages/aiohttp_cors-0.7.0.dist-info/DESCRIPTION.rst +614 -0
  3. .venv/lib/python3.11/site-packages/cpuinfo/__pycache__/cpuinfo.cpython-311.pyc +3 -0
  4. .venv/lib/python3.11/site-packages/google/auth/_credentials_base.py +75 -0
  5. .venv/lib/python3.11/site-packages/google/auth/_default.py +719 -0
  6. .venv/lib/python3.11/site-packages/google/auth/_default_async.py +282 -0
  7. .venv/lib/python3.11/site-packages/google/auth/_helpers.py +273 -0
  8. .venv/lib/python3.11/site-packages/google/auth/_jwt_async.py +164 -0
  9. .venv/lib/python3.11/site-packages/google/auth/_oauth2client.py +167 -0
  10. .venv/lib/python3.11/site-packages/google/auth/_refresh_worker.py +109 -0
  11. .venv/lib/python3.11/site-packages/google/auth/_service_account_info.py +80 -0
  12. .venv/lib/python3.11/site-packages/google/auth/app_engine.py +180 -0
  13. .venv/lib/python3.11/site-packages/google/auth/credentials.py +522 -0
  14. .venv/lib/python3.11/site-packages/google/auth/environment_vars.py +84 -0
  15. .venv/lib/python3.11/site-packages/google/auth/exceptions.py +108 -0
  16. .venv/lib/python3.11/site-packages/google/auth/external_account.py +628 -0
  17. .venv/lib/python3.11/site-packages/google/auth/iam.py +136 -0
  18. .venv/lib/python3.11/site-packages/google/auth/identity_pool.py +439 -0
  19. .venv/lib/python3.11/site-packages/google/auth/impersonated_credentials.py +579 -0
  20. .venv/lib/python3.11/site-packages/google/auth/jwt.py +878 -0
  21. .venv/lib/python3.11/site-packages/google/auth/metrics.py +154 -0
  22. .venv/lib/python3.11/site-packages/google/auth/pluggable.py +429 -0
  23. .venv/lib/python3.11/site-packages/google/auth/py.typed +2 -0
  24. .venv/lib/python3.11/site-packages/google/auth/version.py +15 -0
  25. .venv/lib/python3.11/site-packages/h11/__init__.py +62 -0
  26. .venv/lib/python3.11/site-packages/h11/__pycache__/__init__.cpython-311.pyc +0 -0
  27. .venv/lib/python3.11/site-packages/h11/__pycache__/_abnf.cpython-311.pyc +0 -0
  28. .venv/lib/python3.11/site-packages/h11/__pycache__/_connection.cpython-311.pyc +0 -0
  29. .venv/lib/python3.11/site-packages/h11/__pycache__/_events.cpython-311.pyc +0 -0
  30. .venv/lib/python3.11/site-packages/h11/__pycache__/_headers.cpython-311.pyc +0 -0
  31. .venv/lib/python3.11/site-packages/h11/__pycache__/_readers.cpython-311.pyc +0 -0
  32. .venv/lib/python3.11/site-packages/h11/__pycache__/_receivebuffer.cpython-311.pyc +0 -0
  33. .venv/lib/python3.11/site-packages/h11/__pycache__/_state.cpython-311.pyc +0 -0
  34. .venv/lib/python3.11/site-packages/h11/__pycache__/_util.cpython-311.pyc +0 -0
  35. .venv/lib/python3.11/site-packages/h11/__pycache__/_version.cpython-311.pyc +0 -0
  36. .venv/lib/python3.11/site-packages/h11/__pycache__/_writers.cpython-311.pyc +0 -0
  37. .venv/lib/python3.11/site-packages/h11/_abnf.py +132 -0
  38. .venv/lib/python3.11/site-packages/h11/_connection.py +633 -0
  39. .venv/lib/python3.11/site-packages/h11/_events.py +369 -0
  40. .venv/lib/python3.11/site-packages/h11/_headers.py +278 -0
  41. .venv/lib/python3.11/site-packages/h11/_readers.py +247 -0
  42. .venv/lib/python3.11/site-packages/h11/_receivebuffer.py +153 -0
  43. .venv/lib/python3.11/site-packages/h11/_state.py +367 -0
  44. .venv/lib/python3.11/site-packages/h11/_util.py +135 -0
  45. .venv/lib/python3.11/site-packages/h11/_version.py +16 -0
  46. .venv/lib/python3.11/site-packages/h11/_writers.py +145 -0
  47. .venv/lib/python3.11/site-packages/h11/py.typed +1 -0
  48. .venv/lib/python3.11/site-packages/h11/tests/__init__.py +0 -0
  49. .venv/lib/python3.11/site-packages/h11/tests/__pycache__/__init__.cpython-311.pyc +0 -0
  50. .venv/lib/python3.11/site-packages/h11/tests/__pycache__/helpers.cpython-311.pyc +0 -0
.gitattributes CHANGED
@@ -184,3 +184,5 @@ tuning-competition-baseline/.venv/lib/python3.11/site-packages/torch/_inductor/_
184
  .venv/lib/python3.11/site-packages/ray/core/src/ray/gcs/gcs_server filter=lfs diff=lfs merge=lfs -text
185
  .venv/lib/python3.11/site-packages/ray/thirdparty_files/psutil/__pycache__/_pslinux.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
186
  .venv/lib/python3.11/site-packages/ray/scripts/__pycache__/scripts.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
 
 
 
184
  .venv/lib/python3.11/site-packages/ray/core/src/ray/gcs/gcs_server filter=lfs diff=lfs merge=lfs -text
185
  .venv/lib/python3.11/site-packages/ray/thirdparty_files/psutil/__pycache__/_pslinux.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
186
  .venv/lib/python3.11/site-packages/ray/scripts/__pycache__/scripts.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
187
+ .venv/lib/python3.11/site-packages/cpuinfo/__pycache__/cpuinfo.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
188
+ .venv/lib/python3.11/site-packages/yaml/_yaml.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
.venv/lib/python3.11/site-packages/aiohttp_cors-0.7.0.dist-info/DESCRIPTION.rst ADDED
@@ -0,0 +1,614 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ========================
2
+ CORS support for aiohttp
3
+ ========================
4
+
5
+ ``aiohttp_cors`` library implements
6
+ `Cross Origin Resource Sharing (CORS) <cors_>`__
7
+ support for `aiohttp <aiohttp_>`__
8
+ asyncio-powered asynchronous HTTP server.
9
+
10
+ Jump directly to `Usage`_ part to see how to use ``aiohttp_cors``.
11
+
12
+ Same-origin policy
13
+ ==================
14
+
15
+ Web security model is tightly connected to
16
+ `Same-origin policy (SOP) <sop_>`__.
17
+ In short: web pages cannot *Read* resources which origin
18
+ doesn't match origin of requested page, but can *Embed* (or *Execute*)
19
+ resources and have limited ability to *Write* resources.
20
+
21
+ Origin of a page is defined in the `Standard <cors_>`__ as tuple
22
+ ``(schema, host, port)``
23
+ (there is a notable exception with Internet Explorer: it doesn't use port to
24
+ define origin, but uses it's own
25
+ `Security Zones <https://msdn.microsoft.com/en-us/library/ms537183.aspx>`__).
26
+
27
+ Can *Embed* means that resource from other origin can be embedded into
28
+ the page,
29
+ e.g. by using ``<script src="...">``, ``<img src="...">``,
30
+ ``<iframe src="...">``.
31
+
32
+ Cannot *Read* means that resource from other origin *source* cannot be
33
+ obtained by page
34
+ (*source* — any information that would allow to reconstruct resource).
35
+ E.g. the page can *Embed* image with ``<img src="...">``,
36
+ but it can't get information about specific pixels, so page can't reconstruct
37
+ original image
38
+ (though some information from the other resource may still be leaked:
39
+ e.g. the page can read embedded image dimensions).
40
+
41
+ Limited ability to *Write* means, that the page can send POST requests to
42
+ other origin with limited set of ``Content-Type`` values and headers.
43
+
44
+ Restriction to *Read* resource from other origin is related to authentication
45
+ mechanism that is used by browsers:
46
+ when browser reads (downloads) resource he automatically sends all security
47
+ credentials that user previously authorized for that resource
48
+ (e.g. cookies, HTTP Basic Authentication).
49
+
50
+ For example, if *Read* would be allowed and user is authenticated
51
+ in some internet banking,
52
+ malicious page would be able to embed internet banking page with ``iframe``
53
+ (since authentication is done by the browser it may be embedded as if
54
+ user is directly navigated to internet banking page),
55
+ then read user private information by reading *source* of the embedded page
56
+ (which may be not only source code, but, for example,
57
+ screenshot of the embedded internet banking page).
58
+
59
+ Cross-origin resource sharing
60
+ =============================
61
+
62
+ `Cross-origin Resource Sharing (CORS) <cors_>`__ allows to override
63
+ SOP for specific resources.
64
+
65
+ In short, CORS works in the following way.
66
+
67
+ When page ``https://client.example.com`` request (*Read*) resource
68
+ ``https://server.example.com/resource`` that have other origin,
69
+ browser implicitly appends ``Origin: https://client.example.com`` header
70
+ to the HTTP request,
71
+ effectively requesting server to give read permission for
72
+ the resource to the ``https://client.example.com`` page::
73
+
74
+ GET /resource HTTP/1.1
75
+ Origin: https://client.example.com
76
+ Host: server.example.com
77
+
78
+ If server allows access from the page to the resource, it responds with
79
+ resource with ``Access-Control-Allow-Origin: https://client.example.com``
80
+ HTTP header
81
+ (optionally allowing exposing custom server headers to the page and
82
+ enabling use of the user credentials on the server resource)::
83
+
84
+ Access-Control-Allow-Origin: https://client.example.com
85
+ Access-Control-Allow-Credentials: true
86
+ Access-Control-Expose-Headers: X-Server-Header
87
+
88
+ Browser checks, if server responded with proper
89
+ ``Access-Control-Allow-Origin`` header and accordingly allows or denies
90
+ access for the obtained resource to the page.
91
+
92
+ CORS specification designed in a way that servers that are not aware
93
+ of CORS will not expose any additional information, except allowed by the
94
+ SOP.
95
+
96
+ To request resources with custom headers or using custom HTTP methods
97
+ (e.g. ``PUT``, ``DELETE``) that are not allowed by SOP,
98
+ CORS-enabled browser first send *preflight request* to the
99
+ resource using ``OPTIONS`` method, in which he queries access to the resource
100
+ with specific method and headers::
101
+
102
+ OPTIONS / HTTP/1.1
103
+ Origin: https://client.example.com
104
+ Access-Control-Request-Method: PUT
105
+ Access-Control-Request-Headers: X-Client-Header
106
+
107
+ CORS-enabled server responds is requested method is allowed and which of
108
+ the specified headers are allowed::
109
+
110
+ Access-Control-Allow-Origin: https://client.example.com
111
+ Access-Control-Allow-Credentials: true
112
+ Access-Control-Allow-Methods: PUT
113
+ Access-Control-Allow-Headers: X-Client-Header
114
+ Access-Control-Max-Age: 3600
115
+
116
+ Browser checks response to preflight request, and, if actual request allowed,
117
+ does actual request.
118
+
119
+ Installation
120
+ ============
121
+
122
+ You can install ``aiohttp_cors`` as a typical Python library from PyPI or
123
+ from git:
124
+
125
+ .. code-block:: bash
126
+
127
+ $ pip install aiohttp_cors
128
+
129
+ Note that ``aiohttp_cors`` requires versions of Python >= 3.4.1 and
130
+ ``aiohttp`` >= 1.1.
131
+
132
+ Usage
133
+ =====
134
+
135
+ To use ``aiohttp_cors`` you need to configure the application and
136
+ enable CORS on
137
+ `resources and routes <https://aiohttp.readthedocs.org/en/stable/web.html#resources-and-routes>`__
138
+ that you want to expose:
139
+
140
+ .. code-block:: python
141
+
142
+ import asyncio
143
+ from aiohttp import web
144
+ import aiohttp_cors
145
+
146
+ @asyncio.coroutine
147
+ def handler(request):
148
+ return web.Response(
149
+ text="Hello!",
150
+ headers={
151
+ "X-Custom-Server-Header": "Custom data",
152
+ })
153
+
154
+ app = web.Application()
155
+
156
+ # `aiohttp_cors.setup` returns `aiohttp_cors.CorsConfig` instance.
157
+ # The `cors` instance will store CORS configuration for the
158
+ # application.
159
+ cors = aiohttp_cors.setup(app)
160
+
161
+ # To enable CORS processing for specific route you need to add
162
+ # that route to the CORS configuration object and specify its
163
+ # CORS options.
164
+ resource = cors.add(app.router.add_resource("/hello"))
165
+ route = cors.add(
166
+ resource.add_route("GET", handler), {
167
+ "http://client.example.org": aiohttp_cors.ResourceOptions(
168
+ allow_credentials=True,
169
+ expose_headers=("X-Custom-Server-Header",),
170
+ allow_headers=("X-Requested-With", "Content-Type"),
171
+ max_age=3600,
172
+ )
173
+ })
174
+
175
+ Each route has it's own CORS configuration passed in ``CorsConfig.add()``
176
+ method.
177
+
178
+ CORS configuration is a mapping from origins to options for that origins.
179
+
180
+ In the example above CORS is configured for the resource under path ``/hello``
181
+ and HTTP method ``GET``, and in the context of CORS:
182
+
183
+ * This resource will be available using CORS only to
184
+ ``http://client.example.org`` origin.
185
+
186
+ * Passing of credentials to this resource will be allowed.
187
+
188
+ * The resource will expose to the client ``X-Custom-Server-Header``
189
+ server header.
190
+
191
+ * The client will be allowed to pass ``X-Requested-With`` and
192
+ ``Content-Type`` headers to the server.
193
+
194
+ * Preflight requests will be allowed to be cached by client for ``3600``
195
+ seconds.
196
+
197
+ Resource will be available only to the explicitly specified origins.
198
+ You can specify "all other origins" using special ``*`` origin:
199
+
200
+ .. code-block:: python
201
+
202
+ cors.add(route, {
203
+ "*":
204
+ aiohttp_cors.ResourceOptions(allow_credentials=False),
205
+ "http://client.example.org":
206
+ aiohttp_cors.ResourceOptions(allow_credentials=True),
207
+ })
208
+
209
+ Here the resource specified by ``route`` will be available to all origins with
210
+ disallowed credentials passing, and with allowed credentials passing only to
211
+ ``http://client.example.org``.
212
+
213
+ By default ``ResourceOptions`` will be constructed without any allowed CORS
214
+ options.
215
+ This means, that resource will be available using CORS to specified origin,
216
+ but client will not be allowed to send either credentials,
217
+ or send non-simple headers, or read from server non-simple headers.
218
+
219
+ To enable sending or receiving all headers you can specify special value
220
+ ``*`` instead of sequence of headers:
221
+
222
+ .. code-block:: python
223
+
224
+ cors.add(route, {
225
+ "http://client.example.org":
226
+ aiohttp_cors.ResourceOptions(
227
+ expose_headers="*",
228
+ allow_headers="*"),
229
+ })
230
+
231
+ You can specify default CORS-enabled resource options using
232
+ ``aiohttp_cors.setup()``'s ``defaults`` argument:
233
+
234
+ .. code-block:: python
235
+
236
+ cors = aiohttp_cors.setup(app, defaults={
237
+ # Allow all to read all CORS-enabled resources from
238
+ # http://client.example.org.
239
+ "http://client.example.org": aiohttp_cors.ResourceOptions(),
240
+ })
241
+
242
+ # Enable CORS on routes.
243
+
244
+ # According to defaults POST and PUT will be available only to
245
+ # "http://client.example.org".
246
+ hello_resource = cors.add(app.router.add_resource("/hello"))
247
+ cors.add(hello_resource.add_route("POST", handler_post))
248
+ cors.add(hello_resource.add_route("PUT", handler_put))
249
+
250
+ # In addition to "http://client.example.org", GET request will be
251
+ # allowed from "http://other-client.example.org" origin.
252
+ cors.add(hello_resource.add_route("GET", handler), {
253
+ "http://other-client.example.org":
254
+ aiohttp_cors.ResourceOptions(),
255
+ })
256
+
257
+ # CORS will be enabled only on the resources added to `CorsConfig`,
258
+ # so following resource will be NOT CORS-enabled.
259
+ app.router.add_route("GET", "/private", handler)
260
+
261
+ Also you can specify default options for resources:
262
+
263
+ .. code-block:: python
264
+
265
+ # Allow POST and PUT requests from "http://client.example.org" origin.
266
+ hello_resource = cors.add(app.router.add_resource("/hello"), {
267
+ "http://client.example.org": aiohttp_cors.ResourceOptions(),
268
+ })
269
+ cors.add(hello_resource.add_route("POST", handler_post))
270
+ cors.add(hello_resource.add_route("PUT", handler_put))
271
+
272
+ Resource CORS configuration allows to use ``allow_methods`` option that
273
+ explicitly specifies list of allowed HTTP methods for origin
274
+ (or ``*`` for all HTTP methods).
275
+ By using this option it is not required to add all resource routes to
276
+ CORS configuration object:
277
+
278
+ .. code-block:: python
279
+
280
+ # Allow POST and PUT requests from "http://client.example.org" origin.
281
+ hello_resource = cors.add(app.router.add_resource("/hello"), {
282
+ "http://client.example.org":
283
+ aiohttp_cors.ResourceOptions(allow_methods=["POST", "PUT"]),
284
+ })
285
+ # No need to add POST and PUT routes into CORS configuration object.
286
+ hello_resource.add_route("POST", handler_post)
287
+ hello_resource.add_route("PUT", handler_put)
288
+ # Still you can add additional methods to CORS configuration object:
289
+ cors.add(hello_resource.add_route("DELETE", handler_delete))
290
+
291
+ Here is an example of how to enable CORS for all origins with all CORS
292
+ features:
293
+
294
+ .. code-block:: python
295
+
296
+ cors = aiohttp_cors.setup(app, defaults={
297
+ "*": aiohttp_cors.ResourceOptions(
298
+ allow_credentials=True,
299
+ expose_headers="*",
300
+ allow_headers="*",
301
+ )
302
+ })
303
+
304
+ # Add all resources to `CorsConfig`.
305
+ resource = cors.add(app.router.add_resource("/hello"))
306
+ cors.add(resource.add_route("GET", handler_get))
307
+ cors.add(resource.add_route("PUT", handler_put))
308
+ cors.add(resource.add_route("POST", handler_put))
309
+ cors.add(resource.add_route("DELETE", handler_delete))
310
+
311
+ Old routes API is supported — you can use ``router.add_router`` and
312
+ ``router.register_route`` as before, though this usage is discouraged:
313
+
314
+ .. code-block:: python
315
+
316
+ cors.add(
317
+ app.router.add_route("GET", "/hello", handler), {
318
+ "http://client.example.org": aiohttp_cors.ResourceOptions(
319
+ allow_credentials=True,
320
+ expose_headers=("X-Custom-Server-Header",),
321
+ allow_headers=("X-Requested-With", "Content-Type"),
322
+ max_age=3600,
323
+ )
324
+ })
325
+
326
+ You can enable CORS for all added routes by accessing routes list
327
+ in the router:
328
+
329
+ .. code-block:: python
330
+
331
+ # Setup application routes.
332
+ app.router.add_route("GET", "/hello", handler_get)
333
+ app.router.add_route("PUT", "/hello", handler_put)
334
+ app.router.add_route("POST", "/hello", handler_put)
335
+ app.router.add_route("DELETE", "/hello", handler_delete)
336
+
337
+ # Configure default CORS settings.
338
+ cors = aiohttp_cors.setup(app, defaults={
339
+ "*": aiohttp_cors.ResourceOptions(
340
+ allow_credentials=True,
341
+ expose_headers="*",
342
+ allow_headers="*",
343
+ )
344
+ })
345
+
346
+ # Configure CORS on all routes.
347
+ for route in list(app.router.routes()):
348
+ cors.add(route)
349
+
350
+ You can also use ``CorsViewMixin`` on ``web.View``:
351
+
352
+ .. code-block:: python
353
+
354
+ class CorsView(web.View, CorsViewMixin):
355
+
356
+ cors_config = {
357
+ "*": ResourceOption(
358
+ allow_credentials=True,
359
+ allow_headers="X-Request-ID",
360
+ )
361
+ }
362
+
363
+ @asyncio.coroutine
364
+ def get(self):
365
+ return web.Response(text="Done")
366
+
367
+ @custom_cors({
368
+ "*": ResourceOption(
369
+ allow_credentials=True,
370
+ allow_headers="*",
371
+ )
372
+ })
373
+ @asyncio.coroutine
374
+ def post(self):
375
+ return web.Response(text="Done")
376
+
377
+ cors = aiohttp_cors.setup(app, defaults={
378
+ "*": aiohttp_cors.ResourceOptions(
379
+ allow_credentials=True,
380
+ expose_headers="*",
381
+ allow_headers="*",
382
+ )
383
+ })
384
+
385
+ cors.add(
386
+ app.router.add_route("*", "/resource", CorsView),
387
+ webview=True)
388
+
389
+
390
+ Security
391
+ ========
392
+
393
+ TODO: fill this
394
+
395
+ Development
396
+ ===========
397
+
398
+ To setup development environment:
399
+
400
+ .. code-block:: bash
401
+
402
+ # Clone sources repository:
403
+ git clone https://github.com/aio-libs/aiohttp_cors.git .
404
+ # Create and activate virtual Python environment:
405
+ python3 -m venv env
406
+ source env/bin/activate
407
+ # Install requirements and aiohttp_cors into virtual environment
408
+ pip install -r requirements-dev.txt
409
+
410
+ To run tests:
411
+
412
+ .. code-block:: bash
413
+
414
+ tox
415
+
416
+ To run only runtime tests in current environment:
417
+
418
+ .. code-block:: bash
419
+
420
+ py.test
421
+
422
+ To run only static code analysis checks:
423
+
424
+ .. code-block:: bash
425
+
426
+ tox -e check
427
+
428
+ Running Selenium tests
429
+ ----------------------
430
+
431
+ To run Selenium tests with Firefox web driver you need to install Firefox.
432
+
433
+ To run Selenium tests with Chromium web driver you need to:
434
+
435
+ 1. Install Chrome driver. On Ubuntu 14.04 it's in ``chromium-chromedriver``
436
+ package.
437
+
438
+ 2. Either add ``chromedriver`` to PATH or set ``WEBDRIVER_CHROMEDRIVER_PATH``
439
+ environment variable to ``chromedriver``, e.g. on Ubuntu 14.04
440
+ ``WEBDRIVER_CHROMEDRIVER_PATH=/usr/lib/chromium-browser/chromedriver``.
441
+
442
+ Release process
443
+ ---------------
444
+
445
+ To release version ``vA.B.C`` from the current version of ``master`` branch
446
+ you need to:
447
+
448
+ 1. Create local branch ``vA.B.C``.
449
+ 2. In ``CHANGES.rst`` set release date to today.
450
+ 3. In ``aiohttp_cors/__about__.py`` change version from ``A.B.Ca0`` to
451
+ ``A.B.C``.
452
+ 4. Create pull request with ``vA.B.C`` branch, wait for all checks to
453
+ successfully finish (Travis and Appveyor).
454
+ 5. Merge pull request to master.
455
+ 6. Update and checkout ``master`` branch.
456
+
457
+ 7. Create and push tag for release version to GitHub:
458
+
459
+ .. code-block:: bash
460
+
461
+ git tag vA.B.C
462
+ git push --tags
463
+
464
+ Now Travis should ran tests again, and build and deploy wheel on PyPI.
465
+
466
+ If Travis release doesn't work for some reason, use following steps
467
+ for manual release upload.
468
+
469
+ 1. Install fresh versions of setuptools and pip.
470
+ Install ``wheel`` for building wheels.
471
+ Install ``twine`` for uploading to PyPI.
472
+
473
+ .. code-block:: bash
474
+
475
+ pip install -U pip setuptools twine wheel
476
+
477
+ 2. Configure PyPI credentials in ``~/.pypirc``.
478
+
479
+ 3. Build distribution:
480
+
481
+ .. code-block:: bash
482
+
483
+ rm -rf build dist; python setup.py sdist bdist_wheel
484
+
485
+ 4. Upload new release to PyPI:
486
+
487
+ .. code-block:: bash
488
+
489
+ twine upload dist/*
490
+
491
+ 8. Edit release description on GitHub if needed.
492
+ 9. Announce new release on the *aio-libs* mailing list:
493
+ https://groups.google.com/forum/#!forum/aio-libs.
494
+
495
+ Post release steps:
496
+
497
+ 1. In ``CHANGES.rst`` add template for the next release.
498
+ 2. In ``aiohttp_cors/__about__.py`` change version from ``A.B.C`` to
499
+ ``A.(B + 1).0a0``.
500
+
501
+ Bugs
502
+ ====
503
+
504
+ Please report bugs, issues, feature requests, etc. on
505
+ `GitHub <https://github.com/aio-libs/aiohttp_cors/issues>`__.
506
+
507
+
508
+ License
509
+ =======
510
+
511
+ Copyright 2015 Vladimir Rutsky <vladimir@rutsky.org>.
512
+
513
+ Licensed under the
514
+ `Apache License, Version 2.0 <https://www.apache.org/licenses/LICENSE-2.0>`__,
515
+ see ``LICENSE`` file for details.
516
+
517
+ .. _cors: http://www.w3.org/TR/cors/
518
+ .. _aiohttp: https://github.com/KeepSafe/aiohttp/
519
+ .. _sop: https://en.wikipedia.org/wiki/Same-origin_policy
520
+
521
+
522
+ =========
523
+ CHANGES
524
+ =========
525
+
526
+ 0.7.0 (2018-03-05)
527
+ ==================
528
+
529
+ - Make web view check implicit and type based (#159)
530
+
531
+ - Disable Python 3.4 support (#156)
532
+
533
+ - Support aiohttp 3.0+ (#155)
534
+
535
+ 0.6.0 (2017-12-21)
536
+ ==================
537
+
538
+ - Support aiohttp views by ``CorsViewMixin`` (#145)
539
+
540
+ 0.5.3 (2017-04-21)
541
+ ==================
542
+
543
+ - Fix ``typing`` being installed on Python 3.6.
544
+
545
+ 0.5.2 (2017-03-28)
546
+ ==================
547
+
548
+ - Fix tests compatibility with ``aiohttp`` 2.0.
549
+ This release and release v0.5.0 should work on ``aiohttp`` 2.0.
550
+
551
+
552
+ 0.5.1 (2017-03-23)
553
+ ==================
554
+
555
+ - Enforce ``aiohttp`` version to be less than 2.0.
556
+ Newer ``aiohttp`` releases will be supported in the next release.
557
+
558
+ 0.5.0 (2016-11-18)
559
+ ==================
560
+
561
+ - Fix compatibility with ``aiohttp`` 1.1
562
+
563
+
564
+ 0.4.0 (2016-04-04)
565
+ ==================
566
+
567
+ - Fixed support with new Resources objects introduced in ``aiohttp`` 0.21.0.
568
+ Minimum supported version of ``aiohttp`` is 0.21.4 now.
569
+
570
+ - New Resources objects are supported.
571
+ You can specify default configuration for a Resource and use
572
+ ``allow_methods`` to explicitly list allowed methods (or ``*`` for all
573
+ HTTP methods):
574
+
575
+ .. code-block:: python
576
+
577
+ # Allow POST and PUT requests from "http://client.example.org" origin.
578
+ hello_resource = cors.add(app.router.add_resource("/hello"), {
579
+ "http://client.example.org":
580
+ aiohttp_cors.ResourceOptions(
581
+ allow_methods=["POST", "PUT"]),
582
+ })
583
+ # No need to add POST and PUT routes into CORS configuration object.
584
+ hello_resource.add_route("POST", handler_post)
585
+ hello_resource.add_route("PUT", handler_put)
586
+ # Still you can add additional methods to CORS configuration object:
587
+ cors.add(hello_resource.add_route("DELETE", handler_delete))
588
+
589
+ - ``AbstractRouterAdapter`` was completely rewritten to be more Router
590
+ agnostic.
591
+
592
+ 0.3.0 (2016-02-06)
593
+ ==================
594
+
595
+ - Rename ``UrlDistatcherRouterAdapter`` to ``UrlDispatcherRouterAdapter``.
596
+
597
+ - Set maximum supported ``aiohttp`` version to ``0.20.2``, see bug #30 for
598
+ details.
599
+
600
+ 0.2.0 (2015-11-30)
601
+ ==================
602
+
603
+ - Move ABCs from ``aiohttp_cors.router_adapter`` to ``aiohttp_cors.abc``.
604
+
605
+ - Rename ``RouterAdapter`` to ``AbstractRouterAdapter``.
606
+
607
+ - Fix bug with configuring CORS for named routes.
608
+
609
+ 0.1.0 (2015-11-05)
610
+ ==================
611
+
612
+ * Initial release.
613
+
614
+
.venv/lib/python3.11/site-packages/cpuinfo/__pycache__/cpuinfo.cpython-311.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1532742cdb6d2b769cbe73f41cda078dd546a2a4cc2ec93ad7ad8282e4fe52b7
3
+ size 129984
.venv/lib/python3.11/site-packages/google/auth/_credentials_base.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2024 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ """Interface for base credentials."""
17
+
18
+ import abc
19
+
20
+ from google.auth import _helpers
21
+
22
+
23
+ class _BaseCredentials(metaclass=abc.ABCMeta):
24
+ """Base class for all credentials.
25
+
26
+ All credentials have a :attr:`token` that is used for authentication and
27
+ may also optionally set an :attr:`expiry` to indicate when the token will
28
+ no longer be valid.
29
+
30
+ Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
31
+ Credentials can do this automatically before the first HTTP request in
32
+ :meth:`before_request`.
33
+
34
+ Although the token and expiration will change as the credentials are
35
+ :meth:`refreshed <refresh>` and used, credentials should be considered
36
+ immutable. Various credentials will accept configuration such as private
37
+ keys, scopes, and other options. These options are not changeable after
38
+ construction. Some classes will provide mechanisms to copy the credentials
39
+ with modifications such as :meth:`ScopedCredentials.with_scopes`.
40
+
41
+ Attributes:
42
+ token (Optional[str]): The bearer token that can be used in HTTP headers to make
43
+ authenticated requests.
44
+ """
45
+
46
+ def __init__(self):
47
+ self.token = None
48
+
49
+ @abc.abstractmethod
50
+ def refresh(self, request):
51
+ """Refreshes the access token.
52
+
53
+ Args:
54
+ request (google.auth.transport.Request): The object used to make
55
+ HTTP requests.
56
+
57
+ Raises:
58
+ google.auth.exceptions.RefreshError: If the credentials could
59
+ not be refreshed.
60
+ """
61
+ # pylint: disable=missing-raises-doc
62
+ # (pylint doesn't recognize that this is abstract)
63
+ raise NotImplementedError("Refresh must be implemented")
64
+
65
+ def _apply(self, headers, token=None):
66
+ """Apply the token to the authentication header.
67
+
68
+ Args:
69
+ headers (Mapping): The HTTP request headers.
70
+ token (Optional[str]): If specified, overrides the current access
71
+ token.
72
+ """
73
+ headers["authorization"] = "Bearer {}".format(
74
+ _helpers.from_bytes(token or self.token)
75
+ )
.venv/lib/python3.11/site-packages/google/auth/_default.py ADDED
@@ -0,0 +1,719 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 Google Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Application default credentials.
16
+
17
+ Implements application default credentials and project ID detection.
18
+ """
19
+
20
+ import io
21
+ import json
22
+ import logging
23
+ import os
24
+ import warnings
25
+
26
+ from google.auth import environment_vars
27
+ from google.auth import exceptions
28
+ import google.auth.transport._http_client
29
+
30
+ _LOGGER = logging.getLogger(__name__)
31
+
32
+ # Valid types accepted for file-based credentials.
33
+ _AUTHORIZED_USER_TYPE = "authorized_user"
34
+ _SERVICE_ACCOUNT_TYPE = "service_account"
35
+ _EXTERNAL_ACCOUNT_TYPE = "external_account"
36
+ _EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = "external_account_authorized_user"
37
+ _IMPERSONATED_SERVICE_ACCOUNT_TYPE = "impersonated_service_account"
38
+ _GDCH_SERVICE_ACCOUNT_TYPE = "gdch_service_account"
39
+ _VALID_TYPES = (
40
+ _AUTHORIZED_USER_TYPE,
41
+ _SERVICE_ACCOUNT_TYPE,
42
+ _EXTERNAL_ACCOUNT_TYPE,
43
+ _EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE,
44
+ _IMPERSONATED_SERVICE_ACCOUNT_TYPE,
45
+ _GDCH_SERVICE_ACCOUNT_TYPE,
46
+ )
47
+
48
+ # Help message when no credentials can be found.
49
+ _CLOUD_SDK_MISSING_CREDENTIALS = """\
50
+ Your default credentials were not found. To set up Application Default Credentials, \
51
+ see https://cloud.google.com/docs/authentication/external/set-up-adc for more information.\
52
+ """
53
+
54
+ # Warning when using Cloud SDK user credentials
55
+ _CLOUD_SDK_CREDENTIALS_WARNING = """\
56
+ Your application has authenticated using end user credentials from Google \
57
+ Cloud SDK without a quota project. You might receive a "quota exceeded" \
58
+ or "API not enabled" error. See the following page for troubleshooting: \
59
+ https://cloud.google.com/docs/authentication/adc-troubleshooting/user-creds. \
60
+ """
61
+
62
+ # The subject token type used for AWS external_account credentials.
63
+ _AWS_SUBJECT_TOKEN_TYPE = "urn:ietf:params:aws:token-type:aws4_request"
64
+
65
+
66
+ def _warn_about_problematic_credentials(credentials):
67
+ """Determines if the credentials are problematic.
68
+
69
+ Credentials from the Cloud SDK that are associated with Cloud SDK's project
70
+ are problematic because they may not have APIs enabled and have limited
71
+ quota. If this is the case, warn about it.
72
+ """
73
+ from google.auth import _cloud_sdk
74
+
75
+ if credentials.client_id == _cloud_sdk.CLOUD_SDK_CLIENT_ID:
76
+ warnings.warn(_CLOUD_SDK_CREDENTIALS_WARNING)
77
+
78
+
79
+ def load_credentials_from_file(
80
+ filename, scopes=None, default_scopes=None, quota_project_id=None, request=None
81
+ ):
82
+ """Loads Google credentials from a file.
83
+
84
+ The credentials file must be a service account key, stored authorized
85
+ user credentials, external account credentials, or impersonated service
86
+ account credentials.
87
+
88
+ .. warning::
89
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
90
+ from an external source for authentication to Google Cloud Platform, you must
91
+ validate it before providing it to any Google API or client library. Providing an
92
+ unvalidated credential configuration to Google APIs or libraries can compromise
93
+ the security of your systems and data. For more information, refer to
94
+ `Validate credential configurations from external sources`_.
95
+
96
+ .. _Validate credential configurations from external sources:
97
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
98
+
99
+ Args:
100
+ filename (str): The full path to the credentials file.
101
+ scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
102
+ specified, the credentials will automatically be scoped if
103
+ necessary
104
+ default_scopes (Optional[Sequence[str]]): Default scopes passed by a
105
+ Google client library. Use 'scopes' for user-defined scopes.
106
+ quota_project_id (Optional[str]): The project ID used for
107
+ quota and billing.
108
+ request (Optional[google.auth.transport.Request]): An object used to make
109
+ HTTP requests. This is used to determine the associated project ID
110
+ for a workload identity pool resource (external account credentials).
111
+ If not specified, then it will use a
112
+ google.auth.transport.requests.Request client to make requests.
113
+
114
+ Returns:
115
+ Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
116
+ credentials and the project ID. Authorized user credentials do not
117
+ have the project ID information. External account credentials project
118
+ IDs may not always be determined.
119
+
120
+ Raises:
121
+ google.auth.exceptions.DefaultCredentialsError: if the file is in the
122
+ wrong format or is missing.
123
+ """
124
+ if not os.path.exists(filename):
125
+ raise exceptions.DefaultCredentialsError(
126
+ "File {} was not found.".format(filename)
127
+ )
128
+
129
+ with io.open(filename, "r") as file_obj:
130
+ try:
131
+ info = json.load(file_obj)
132
+ except ValueError as caught_exc:
133
+ new_exc = exceptions.DefaultCredentialsError(
134
+ "File {} is not a valid json file.".format(filename), caught_exc
135
+ )
136
+ raise new_exc from caught_exc
137
+ return _load_credentials_from_info(
138
+ filename, info, scopes, default_scopes, quota_project_id, request
139
+ )
140
+
141
+
142
+ def load_credentials_from_dict(
143
+ info, scopes=None, default_scopes=None, quota_project_id=None, request=None
144
+ ):
145
+ """Loads Google credentials from a dict.
146
+
147
+ The credentials file must be a service account key, stored authorized
148
+ user credentials, external account credentials, or impersonated service
149
+ account credentials.
150
+
151
+ .. warning::
152
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
153
+ from an external source for authentication to Google Cloud Platform, you must
154
+ validate it before providing it to any Google API or client library. Providing an
155
+ unvalidated credential configuration to Google APIs or libraries can compromise
156
+ the security of your systems and data. For more information, refer to
157
+ `Validate credential configurations from external sources`_.
158
+
159
+ .. _Validate credential configurations from external sources:
160
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
161
+
162
+ Args:
163
+ info (Dict[str, Any]): A dict object containing the credentials
164
+ scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
165
+ specified, the credentials will automatically be scoped if
166
+ necessary
167
+ default_scopes (Optional[Sequence[str]]): Default scopes passed by a
168
+ Google client library. Use 'scopes' for user-defined scopes.
169
+ quota_project_id (Optional[str]): The project ID used for
170
+ quota and billing.
171
+ request (Optional[google.auth.transport.Request]): An object used to make
172
+ HTTP requests. This is used to determine the associated project ID
173
+ for a workload identity pool resource (external account credentials).
174
+ If not specified, then it will use a
175
+ google.auth.transport.requests.Request client to make requests.
176
+
177
+ Returns:
178
+ Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
179
+ credentials and the project ID. Authorized user credentials do not
180
+ have the project ID information. External account credentials project
181
+ IDs may not always be determined.
182
+
183
+ Raises:
184
+ google.auth.exceptions.DefaultCredentialsError: if the file is in the
185
+ wrong format or is missing.
186
+ """
187
+ if not isinstance(info, dict):
188
+ raise exceptions.DefaultCredentialsError(
189
+ "info object was of type {} but dict type was expected.".format(type(info))
190
+ )
191
+
192
+ return _load_credentials_from_info(
193
+ "dict object", info, scopes, default_scopes, quota_project_id, request
194
+ )
195
+
196
+
197
+ def _load_credentials_from_info(
198
+ filename, info, scopes, default_scopes, quota_project_id, request
199
+ ):
200
+ from google.auth.credentials import CredentialsWithQuotaProject
201
+
202
+ credential_type = info.get("type")
203
+
204
+ if credential_type == _AUTHORIZED_USER_TYPE:
205
+ credentials, project_id = _get_authorized_user_credentials(
206
+ filename, info, scopes
207
+ )
208
+
209
+ elif credential_type == _SERVICE_ACCOUNT_TYPE:
210
+ credentials, project_id = _get_service_account_credentials(
211
+ filename, info, scopes, default_scopes
212
+ )
213
+
214
+ elif credential_type == _EXTERNAL_ACCOUNT_TYPE:
215
+ credentials, project_id = _get_external_account_credentials(
216
+ info,
217
+ filename,
218
+ scopes=scopes,
219
+ default_scopes=default_scopes,
220
+ request=request,
221
+ )
222
+
223
+ elif credential_type == _EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE:
224
+ credentials, project_id = _get_external_account_authorized_user_credentials(
225
+ filename, info, request
226
+ )
227
+
228
+ elif credential_type == _IMPERSONATED_SERVICE_ACCOUNT_TYPE:
229
+ credentials, project_id = _get_impersonated_service_account_credentials(
230
+ filename, info, scopes
231
+ )
232
+ elif credential_type == _GDCH_SERVICE_ACCOUNT_TYPE:
233
+ credentials, project_id = _get_gdch_service_account_credentials(filename, info)
234
+ else:
235
+ raise exceptions.DefaultCredentialsError(
236
+ "The file {file} does not have a valid type. "
237
+ "Type is {type}, expected one of {valid_types}.".format(
238
+ file=filename, type=credential_type, valid_types=_VALID_TYPES
239
+ )
240
+ )
241
+ if isinstance(credentials, CredentialsWithQuotaProject):
242
+ credentials = _apply_quota_project_id(credentials, quota_project_id)
243
+ return credentials, project_id
244
+
245
+
246
+ def _get_gcloud_sdk_credentials(quota_project_id=None):
247
+ """Gets the credentials and project ID from the Cloud SDK."""
248
+ from google.auth import _cloud_sdk
249
+
250
+ _LOGGER.debug("Checking Cloud SDK credentials as part of auth process...")
251
+
252
+ # Check if application default credentials exist.
253
+ credentials_filename = _cloud_sdk.get_application_default_credentials_path()
254
+
255
+ if not os.path.isfile(credentials_filename):
256
+ _LOGGER.debug("Cloud SDK credentials not found on disk; not using them")
257
+ return None, None
258
+
259
+ credentials, project_id = load_credentials_from_file(
260
+ credentials_filename, quota_project_id=quota_project_id
261
+ )
262
+ credentials._cred_file_path = credentials_filename
263
+
264
+ if not project_id:
265
+ project_id = _cloud_sdk.get_project_id()
266
+
267
+ return credentials, project_id
268
+
269
+
270
+ def _get_explicit_environ_credentials(quota_project_id=None):
271
+ """Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
272
+ variable."""
273
+ from google.auth import _cloud_sdk
274
+
275
+ cloud_sdk_adc_path = _cloud_sdk.get_application_default_credentials_path()
276
+ explicit_file = os.environ.get(environment_vars.CREDENTIALS)
277
+
278
+ _LOGGER.debug(
279
+ "Checking %s for explicit credentials as part of auth process...", explicit_file
280
+ )
281
+
282
+ if explicit_file is not None and explicit_file == cloud_sdk_adc_path:
283
+ # Cloud sdk flow calls gcloud to fetch project id, so if the explicit
284
+ # file path is cloud sdk credentials path, then we should fall back
285
+ # to cloud sdk flow, otherwise project id cannot be obtained.
286
+ _LOGGER.debug(
287
+ "Explicit credentials path %s is the same as Cloud SDK credentials path, fall back to Cloud SDK credentials flow...",
288
+ explicit_file,
289
+ )
290
+ return _get_gcloud_sdk_credentials(quota_project_id=quota_project_id)
291
+
292
+ if explicit_file is not None:
293
+ credentials, project_id = load_credentials_from_file(
294
+ os.environ[environment_vars.CREDENTIALS], quota_project_id=quota_project_id
295
+ )
296
+ credentials._cred_file_path = f"{explicit_file} file via the GOOGLE_APPLICATION_CREDENTIALS environment variable"
297
+
298
+ return credentials, project_id
299
+
300
+ else:
301
+ return None, None
302
+
303
+
304
+ def _get_gae_credentials():
305
+ """Gets Google App Engine App Identity credentials and project ID."""
306
+ # If not GAE gen1, prefer the metadata service even if the GAE APIs are
307
+ # available as per https://google.aip.dev/auth/4115.
308
+ if os.environ.get(environment_vars.LEGACY_APPENGINE_RUNTIME) != "python27":
309
+ return None, None
310
+
311
+ # While this library is normally bundled with app_engine, there are
312
+ # some cases where it's not available, so we tolerate ImportError.
313
+ try:
314
+ _LOGGER.debug("Checking for App Engine runtime as part of auth process...")
315
+ import google.auth.app_engine as app_engine
316
+ except ImportError:
317
+ _LOGGER.warning("Import of App Engine auth library failed.")
318
+ return None, None
319
+
320
+ try:
321
+ credentials = app_engine.Credentials()
322
+ project_id = app_engine.get_project_id()
323
+ return credentials, project_id
324
+ except EnvironmentError:
325
+ _LOGGER.debug(
326
+ "No App Engine library was found so cannot authentication via App Engine Identity Credentials."
327
+ )
328
+ return None, None
329
+
330
+
331
+ def _get_gce_credentials(request=None, quota_project_id=None):
332
+ """Gets credentials and project ID from the GCE Metadata Service."""
333
+ # Ping requires a transport, but we want application default credentials
334
+ # to require no arguments. So, we'll use the _http_client transport which
335
+ # uses http.client. This is only acceptable because the metadata server
336
+ # doesn't do SSL and never requires proxies.
337
+
338
+ # While this library is normally bundled with compute_engine, there are
339
+ # some cases where it's not available, so we tolerate ImportError.
340
+ try:
341
+ from google.auth import compute_engine
342
+ from google.auth.compute_engine import _metadata
343
+ except ImportError:
344
+ _LOGGER.warning("Import of Compute Engine auth library failed.")
345
+ return None, None
346
+
347
+ if request is None:
348
+ request = google.auth.transport._http_client.Request()
349
+
350
+ if _metadata.is_on_gce(request=request):
351
+ # Get the project ID.
352
+ try:
353
+ project_id = _metadata.get_project_id(request=request)
354
+ except exceptions.TransportError:
355
+ project_id = None
356
+
357
+ cred = compute_engine.Credentials()
358
+ cred = _apply_quota_project_id(cred, quota_project_id)
359
+
360
+ return cred, project_id
361
+ else:
362
+ _LOGGER.warning(
363
+ "Authentication failed using Compute Engine authentication due to unavailable metadata server."
364
+ )
365
+ return None, None
366
+
367
+
368
+ def _get_external_account_credentials(
369
+ info, filename, scopes=None, default_scopes=None, request=None
370
+ ):
371
+ """Loads external account Credentials from the parsed external account info.
372
+
373
+ The credentials information must correspond to a supported external account
374
+ credentials.
375
+
376
+ Args:
377
+ info (Mapping[str, str]): The external account info in Google format.
378
+ filename (str): The full path to the credentials file.
379
+ scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
380
+ specified, the credentials will automatically be scoped if
381
+ necessary.
382
+ default_scopes (Optional[Sequence[str]]): Default scopes passed by a
383
+ Google client library. Use 'scopes' for user-defined scopes.
384
+ request (Optional[google.auth.transport.Request]): An object used to make
385
+ HTTP requests. This is used to determine the associated project ID
386
+ for a workload identity pool resource (external account credentials).
387
+ If not specified, then it will use a
388
+ google.auth.transport.requests.Request client to make requests.
389
+
390
+ Returns:
391
+ Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
392
+ credentials and the project ID. External account credentials project
393
+ IDs may not always be determined.
394
+
395
+ Raises:
396
+ google.auth.exceptions.DefaultCredentialsError: if the info dictionary
397
+ is in the wrong format or is missing required information.
398
+ """
399
+ # There are currently 3 types of external_account credentials.
400
+ if info.get("subject_token_type") == _AWS_SUBJECT_TOKEN_TYPE:
401
+ # Check if configuration corresponds to an AWS credentials.
402
+ from google.auth import aws
403
+
404
+ credentials = aws.Credentials.from_info(
405
+ info, scopes=scopes, default_scopes=default_scopes
406
+ )
407
+ elif (
408
+ info.get("credential_source") is not None
409
+ and info.get("credential_source").get("executable") is not None
410
+ ):
411
+ from google.auth import pluggable
412
+
413
+ credentials = pluggable.Credentials.from_info(
414
+ info, scopes=scopes, default_scopes=default_scopes
415
+ )
416
+ else:
417
+ try:
418
+ # Check if configuration corresponds to an Identity Pool credentials.
419
+ from google.auth import identity_pool
420
+
421
+ credentials = identity_pool.Credentials.from_info(
422
+ info, scopes=scopes, default_scopes=default_scopes
423
+ )
424
+ except ValueError:
425
+ # If the configuration is invalid or does not correspond to any
426
+ # supported external_account credentials, raise an error.
427
+ raise exceptions.DefaultCredentialsError(
428
+ "Failed to load external account credentials from {}".format(filename)
429
+ )
430
+ if request is None:
431
+ import google.auth.transport.requests
432
+
433
+ request = google.auth.transport.requests.Request()
434
+
435
+ return credentials, credentials.get_project_id(request=request)
436
+
437
+
438
+ def _get_external_account_authorized_user_credentials(
439
+ filename, info, scopes=None, default_scopes=None, request=None
440
+ ):
441
+ try:
442
+ from google.auth import external_account_authorized_user
443
+
444
+ credentials = external_account_authorized_user.Credentials.from_info(info)
445
+ except ValueError:
446
+ raise exceptions.DefaultCredentialsError(
447
+ "Failed to load external account authorized user credentials from {}".format(
448
+ filename
449
+ )
450
+ )
451
+
452
+ return credentials, None
453
+
454
+
455
+ def _get_authorized_user_credentials(filename, info, scopes=None):
456
+ from google.oauth2 import credentials
457
+
458
+ try:
459
+ credentials = credentials.Credentials.from_authorized_user_info(
460
+ info, scopes=scopes
461
+ )
462
+ except ValueError as caught_exc:
463
+ msg = "Failed to load authorized user credentials from {}".format(filename)
464
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
465
+ raise new_exc from caught_exc
466
+ return credentials, None
467
+
468
+
469
+ def _get_service_account_credentials(filename, info, scopes=None, default_scopes=None):
470
+ from google.oauth2 import service_account
471
+
472
+ try:
473
+ credentials = service_account.Credentials.from_service_account_info(
474
+ info, scopes=scopes, default_scopes=default_scopes
475
+ )
476
+ except ValueError as caught_exc:
477
+ msg = "Failed to load service account credentials from {}".format(filename)
478
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
479
+ raise new_exc from caught_exc
480
+ return credentials, info.get("project_id")
481
+
482
+
483
+ def _get_impersonated_service_account_credentials(filename, info, scopes):
484
+ from google.auth import impersonated_credentials
485
+
486
+ try:
487
+ source_credentials_info = info.get("source_credentials")
488
+ source_credentials_type = source_credentials_info.get("type")
489
+ if source_credentials_type == _AUTHORIZED_USER_TYPE:
490
+ source_credentials, _ = _get_authorized_user_credentials(
491
+ filename, source_credentials_info
492
+ )
493
+ elif source_credentials_type == _SERVICE_ACCOUNT_TYPE:
494
+ source_credentials, _ = _get_service_account_credentials(
495
+ filename, source_credentials_info
496
+ )
497
+ elif source_credentials_type == _EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE:
498
+ source_credentials, _ = _get_external_account_authorized_user_credentials(
499
+ filename, source_credentials_info
500
+ )
501
+ else:
502
+ raise exceptions.InvalidType(
503
+ "source credential of type {} is not supported.".format(
504
+ source_credentials_type
505
+ )
506
+ )
507
+ impersonation_url = info.get("service_account_impersonation_url")
508
+ start_index = impersonation_url.rfind("/")
509
+ end_index = impersonation_url.find(":generateAccessToken")
510
+ if start_index == -1 or end_index == -1 or start_index > end_index:
511
+ raise exceptions.InvalidValue(
512
+ "Cannot extract target principal from {}".format(impersonation_url)
513
+ )
514
+ target_principal = impersonation_url[start_index + 1 : end_index]
515
+ delegates = info.get("delegates")
516
+ quota_project_id = info.get("quota_project_id")
517
+ credentials = impersonated_credentials.Credentials(
518
+ source_credentials,
519
+ target_principal,
520
+ scopes,
521
+ delegates,
522
+ quota_project_id=quota_project_id,
523
+ )
524
+ except ValueError as caught_exc:
525
+ msg = "Failed to load impersonated service account credentials from {}".format(
526
+ filename
527
+ )
528
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
529
+ raise new_exc from caught_exc
530
+ return credentials, None
531
+
532
+
533
+ def _get_gdch_service_account_credentials(filename, info):
534
+ from google.oauth2 import gdch_credentials
535
+
536
+ try:
537
+ credentials = gdch_credentials.ServiceAccountCredentials.from_service_account_info(
538
+ info
539
+ )
540
+ except ValueError as caught_exc:
541
+ msg = "Failed to load GDCH service account credentials from {}".format(filename)
542
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
543
+ raise new_exc from caught_exc
544
+ return credentials, info.get("project")
545
+
546
+
547
+ def get_api_key_credentials(key):
548
+ """Return credentials with the given API key."""
549
+ from google.auth import api_key
550
+
551
+ return api_key.Credentials(key)
552
+
553
+
554
+ def _apply_quota_project_id(credentials, quota_project_id):
555
+ if quota_project_id:
556
+ credentials = credentials.with_quota_project(quota_project_id)
557
+ else:
558
+ credentials = credentials.with_quota_project_from_environment()
559
+
560
+ from google.oauth2 import credentials as authorized_user_credentials
561
+
562
+ if isinstance(credentials, authorized_user_credentials.Credentials) and (
563
+ not credentials.quota_project_id
564
+ ):
565
+ _warn_about_problematic_credentials(credentials)
566
+ return credentials
567
+
568
+
569
+ def default(scopes=None, request=None, quota_project_id=None, default_scopes=None):
570
+ """Gets the default credentials for the current environment.
571
+
572
+ `Application Default Credentials`_ provides an easy way to obtain
573
+ credentials to call Google APIs for server-to-server or local applications.
574
+ This function acquires credentials from the environment in the following
575
+ order:
576
+
577
+ 1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
578
+ to the path of a valid service account JSON private key file, then it is
579
+ loaded and returned. The project ID returned is the project ID defined
580
+ in the service account file if available (some older files do not
581
+ contain project ID information).
582
+
583
+ If the environment variable is set to the path of a valid external
584
+ account JSON configuration file (workload identity federation), then the
585
+ configuration file is used to determine and retrieve the external
586
+ credentials from the current environment (AWS, Azure, etc).
587
+ These will then be exchanged for Google access tokens via the Google STS
588
+ endpoint.
589
+ The project ID returned in this case is the one corresponding to the
590
+ underlying workload identity pool resource if determinable.
591
+
592
+ If the environment variable is set to the path of a valid GDCH service
593
+ account JSON file (`Google Distributed Cloud Hosted`_), then a GDCH
594
+ credential will be returned. The project ID returned is the project
595
+ specified in the JSON file.
596
+ 2. If the `Google Cloud SDK`_ is installed and has application default
597
+ credentials set they are loaded and returned.
598
+
599
+ To enable application default credentials with the Cloud SDK run::
600
+
601
+ gcloud auth application-default login
602
+
603
+ If the Cloud SDK has an active project, the project ID is returned. The
604
+ active project can be set using::
605
+
606
+ gcloud config set project
607
+
608
+ 3. If the application is running in the `App Engine standard environment`_
609
+ (first generation) then the credentials and project ID from the
610
+ `App Identity Service`_ are used.
611
+ 4. If the application is running in `Compute Engine`_ or `Cloud Run`_ or
612
+ the `App Engine flexible environment`_ or the `App Engine standard
613
+ environment`_ (second generation) then the credentials and project ID
614
+ are obtained from the `Metadata Service`_.
615
+ 5. If no credentials are found,
616
+ :class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.
617
+
618
+ .. _Application Default Credentials: https://developers.google.com\
619
+ /identity/protocols/application-default-credentials
620
+ .. _Google Cloud SDK: https://cloud.google.com/sdk
621
+ .. _App Engine standard environment: https://cloud.google.com/appengine
622
+ .. _App Identity Service: https://cloud.google.com/appengine/docs/python\
623
+ /appidentity/
624
+ .. _Compute Engine: https://cloud.google.com/compute
625
+ .. _App Engine flexible environment: https://cloud.google.com\
626
+ /appengine/flexible
627
+ .. _Metadata Service: https://cloud.google.com/compute/docs\
628
+ /storing-retrieving-metadata
629
+ .. _Cloud Run: https://cloud.google.com/run
630
+ .. _Google Distributed Cloud Hosted: https://cloud.google.com/blog/topics\
631
+ /hybrid-cloud/announcing-google-distributed-cloud-edge-and-hosted
632
+
633
+ Example::
634
+
635
+ import google.auth
636
+
637
+ credentials, project_id = google.auth.default()
638
+
639
+ Args:
640
+ scopes (Sequence[str]): The list of scopes for the credentials. If
641
+ specified, the credentials will automatically be scoped if
642
+ necessary.
643
+ request (Optional[google.auth.transport.Request]): An object used to make
644
+ HTTP requests. This is used to either detect whether the application
645
+ is running on Compute Engine or to determine the associated project
646
+ ID for a workload identity pool resource (external account
647
+ credentials). If not specified, then it will either use the standard
648
+ library http client to make requests for Compute Engine credentials
649
+ or a google.auth.transport.requests.Request client for external
650
+ account credentials.
651
+ quota_project_id (Optional[str]): The project ID used for
652
+ quota and billing.
653
+ default_scopes (Optional[Sequence[str]]): Default scopes passed by a
654
+ Google client library. Use 'scopes' for user-defined scopes.
655
+ Returns:
656
+ Tuple[~google.auth.credentials.Credentials, Optional[str]]:
657
+ the current environment's credentials and project ID. Project ID
658
+ may be None, which indicates that the Project ID could not be
659
+ ascertained from the environment.
660
+
661
+ Raises:
662
+ ~google.auth.exceptions.DefaultCredentialsError:
663
+ If no credentials were found, or if the credentials found were
664
+ invalid.
665
+ """
666
+ from google.auth.credentials import with_scopes_if_required
667
+ from google.auth.credentials import CredentialsWithQuotaProject
668
+
669
+ explicit_project_id = os.environ.get(
670
+ environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)
671
+ )
672
+
673
+ checkers = (
674
+ # Avoid passing scopes here to prevent passing scopes to user credentials.
675
+ # with_scopes_if_required() below will ensure scopes/default scopes are
676
+ # safely set on the returned credentials since requires_scopes will
677
+ # guard against setting scopes on user credentials.
678
+ lambda: _get_explicit_environ_credentials(quota_project_id=quota_project_id),
679
+ lambda: _get_gcloud_sdk_credentials(quota_project_id=quota_project_id),
680
+ _get_gae_credentials,
681
+ lambda: _get_gce_credentials(request, quota_project_id=quota_project_id),
682
+ )
683
+
684
+ for checker in checkers:
685
+ credentials, project_id = checker()
686
+ if credentials is not None:
687
+ credentials = with_scopes_if_required(
688
+ credentials, scopes, default_scopes=default_scopes
689
+ )
690
+
691
+ effective_project_id = explicit_project_id or project_id
692
+
693
+ # For external account credentials, scopes are required to determine
694
+ # the project ID. Try to get the project ID again if not yet
695
+ # determined.
696
+ if not effective_project_id and callable(
697
+ getattr(credentials, "get_project_id", None)
698
+ ):
699
+ if request is None:
700
+ import google.auth.transport.requests
701
+
702
+ request = google.auth.transport.requests.Request()
703
+ effective_project_id = credentials.get_project_id(request=request)
704
+
705
+ if quota_project_id and isinstance(
706
+ credentials, CredentialsWithQuotaProject
707
+ ):
708
+ credentials = credentials.with_quota_project(quota_project_id)
709
+
710
+ if not effective_project_id:
711
+ _LOGGER.warning(
712
+ "No project ID could be determined. Consider running "
713
+ "`gcloud config set project` or setting the %s "
714
+ "environment variable",
715
+ environment_vars.PROJECT,
716
+ )
717
+ return credentials, effective_project_id
718
+
719
+ raise exceptions.DefaultCredentialsError(_CLOUD_SDK_MISSING_CREDENTIALS)
.venv/lib/python3.11/site-packages/google/auth/_default_async.py ADDED
@@ -0,0 +1,282 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Application default credentials.
16
+
17
+ Implements application default credentials and project ID detection.
18
+ """
19
+
20
+ import io
21
+ import json
22
+ import os
23
+
24
+ from google.auth import _default
25
+ from google.auth import environment_vars
26
+ from google.auth import exceptions
27
+
28
+
29
+ def load_credentials_from_file(filename, scopes=None, quota_project_id=None):
30
+ """Loads Google credentials from a file.
31
+
32
+ The credentials file must be a service account key or stored authorized
33
+ user credentials.
34
+
35
+ Args:
36
+ filename (str): The full path to the credentials file.
37
+ scopes (Optional[Sequence[str]]): The list of scopes for the credentials. If
38
+ specified, the credentials will automatically be scoped if
39
+ necessary
40
+ quota_project_id (Optional[str]): The project ID used for
41
+ quota and billing.
42
+
43
+ Returns:
44
+ Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
45
+ credentials and the project ID. Authorized user credentials do not
46
+ have the project ID information.
47
+
48
+ Raises:
49
+ google.auth.exceptions.DefaultCredentialsError: if the file is in the
50
+ wrong format or is missing.
51
+ """
52
+ if not os.path.exists(filename):
53
+ raise exceptions.DefaultCredentialsError(
54
+ "File {} was not found.".format(filename)
55
+ )
56
+
57
+ with io.open(filename, "r") as file_obj:
58
+ try:
59
+ info = json.load(file_obj)
60
+ except ValueError as caught_exc:
61
+ new_exc = exceptions.DefaultCredentialsError(
62
+ "File {} is not a valid json file.".format(filename), caught_exc
63
+ )
64
+ raise new_exc from caught_exc
65
+
66
+ # The type key should indicate that the file is either a service account
67
+ # credentials file or an authorized user credentials file.
68
+ credential_type = info.get("type")
69
+
70
+ if credential_type == _default._AUTHORIZED_USER_TYPE:
71
+ from google.oauth2 import _credentials_async as credentials
72
+
73
+ try:
74
+ credentials = credentials.Credentials.from_authorized_user_info(
75
+ info, scopes=scopes
76
+ )
77
+ except ValueError as caught_exc:
78
+ msg = "Failed to load authorized user credentials from {}".format(filename)
79
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
80
+ raise new_exc from caught_exc
81
+ if quota_project_id:
82
+ credentials = credentials.with_quota_project(quota_project_id)
83
+ if not credentials.quota_project_id:
84
+ _default._warn_about_problematic_credentials(credentials)
85
+ return credentials, None
86
+
87
+ elif credential_type == _default._SERVICE_ACCOUNT_TYPE:
88
+ from google.oauth2 import _service_account_async as service_account
89
+
90
+ try:
91
+ credentials = service_account.Credentials.from_service_account_info(
92
+ info, scopes=scopes
93
+ ).with_quota_project(quota_project_id)
94
+ except ValueError as caught_exc:
95
+ msg = "Failed to load service account credentials from {}".format(filename)
96
+ new_exc = exceptions.DefaultCredentialsError(msg, caught_exc)
97
+ raise new_exc from caught_exc
98
+ return credentials, info.get("project_id")
99
+
100
+ else:
101
+ raise exceptions.DefaultCredentialsError(
102
+ "The file {file} does not have a valid type. "
103
+ "Type is {type}, expected one of {valid_types}.".format(
104
+ file=filename, type=credential_type, valid_types=_default._VALID_TYPES
105
+ )
106
+ )
107
+
108
+
109
+ def _get_gcloud_sdk_credentials(quota_project_id=None):
110
+ """Gets the credentials and project ID from the Cloud SDK."""
111
+ from google.auth import _cloud_sdk
112
+
113
+ # Check if application default credentials exist.
114
+ credentials_filename = _cloud_sdk.get_application_default_credentials_path()
115
+
116
+ if not os.path.isfile(credentials_filename):
117
+ return None, None
118
+
119
+ credentials, project_id = load_credentials_from_file(
120
+ credentials_filename, quota_project_id=quota_project_id
121
+ )
122
+
123
+ if not project_id:
124
+ project_id = _cloud_sdk.get_project_id()
125
+
126
+ return credentials, project_id
127
+
128
+
129
+ def _get_explicit_environ_credentials(quota_project_id=None):
130
+ """Gets credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
131
+ variable."""
132
+ from google.auth import _cloud_sdk
133
+
134
+ cloud_sdk_adc_path = _cloud_sdk.get_application_default_credentials_path()
135
+ explicit_file = os.environ.get(environment_vars.CREDENTIALS)
136
+
137
+ if explicit_file is not None and explicit_file == cloud_sdk_adc_path:
138
+ # Cloud sdk flow calls gcloud to fetch project id, so if the explicit
139
+ # file path is cloud sdk credentials path, then we should fall back
140
+ # to cloud sdk flow, otherwise project id cannot be obtained.
141
+ return _get_gcloud_sdk_credentials(quota_project_id=quota_project_id)
142
+
143
+ if explicit_file is not None:
144
+ credentials, project_id = load_credentials_from_file(
145
+ os.environ[environment_vars.CREDENTIALS], quota_project_id=quota_project_id
146
+ )
147
+
148
+ return credentials, project_id
149
+
150
+ else:
151
+ return None, None
152
+
153
+
154
+ def _get_gae_credentials():
155
+ """Gets Google App Engine App Identity credentials and project ID."""
156
+ # While this library is normally bundled with app_engine, there are
157
+ # some cases where it's not available, so we tolerate ImportError.
158
+
159
+ return _default._get_gae_credentials()
160
+
161
+
162
+ def _get_gce_credentials(request=None):
163
+ """Gets credentials and project ID from the GCE Metadata Service."""
164
+ # Ping requires a transport, but we want application default credentials
165
+ # to require no arguments. So, we'll use the _http_client transport which
166
+ # uses http.client. This is only acceptable because the metadata server
167
+ # doesn't do SSL and never requires proxies.
168
+
169
+ # While this library is normally bundled with compute_engine, there are
170
+ # some cases where it's not available, so we tolerate ImportError.
171
+
172
+ return _default._get_gce_credentials(request)
173
+
174
+
175
+ def default_async(scopes=None, request=None, quota_project_id=None):
176
+ """Gets the default credentials for the current environment.
177
+
178
+ `Application Default Credentials`_ provides an easy way to obtain
179
+ credentials to call Google APIs for server-to-server or local applications.
180
+ This function acquires credentials from the environment in the following
181
+ order:
182
+
183
+ 1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
184
+ to the path of a valid service account JSON private key file, then it is
185
+ loaded and returned. The project ID returned is the project ID defined
186
+ in the service account file if available (some older files do not
187
+ contain project ID information).
188
+ 2. If the `Google Cloud SDK`_ is installed and has application default
189
+ credentials set they are loaded and returned.
190
+
191
+ To enable application default credentials with the Cloud SDK run::
192
+
193
+ gcloud auth application-default login
194
+
195
+ If the Cloud SDK has an active project, the project ID is returned. The
196
+ active project can be set using::
197
+
198
+ gcloud config set project
199
+
200
+ 3. If the application is running in the `App Engine standard environment`_
201
+ (first generation) then the credentials and project ID from the
202
+ `App Identity Service`_ are used.
203
+ 4. If the application is running in `Compute Engine`_ or `Cloud Run`_ or
204
+ the `App Engine flexible environment`_ or the `App Engine standard
205
+ environment`_ (second generation) then the credentials and project ID
206
+ are obtained from the `Metadata Service`_.
207
+ 5. If no credentials are found,
208
+ :class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.
209
+
210
+ .. _Application Default Credentials: https://developers.google.com\
211
+ /identity/protocols/application-default-credentials
212
+ .. _Google Cloud SDK: https://cloud.google.com/sdk
213
+ .. _App Engine standard environment: https://cloud.google.com/appengine
214
+ .. _App Identity Service: https://cloud.google.com/appengine/docs/python\
215
+ /appidentity/
216
+ .. _Compute Engine: https://cloud.google.com/compute
217
+ .. _App Engine flexible environment: https://cloud.google.com\
218
+ /appengine/flexible
219
+ .. _Metadata Service: https://cloud.google.com/compute/docs\
220
+ /storing-retrieving-metadata
221
+ .. _Cloud Run: https://cloud.google.com/run
222
+
223
+ Example::
224
+
225
+ import google.auth
226
+
227
+ credentials, project_id = google.auth.default()
228
+
229
+ Args:
230
+ scopes (Sequence[str]): The list of scopes for the credentials. If
231
+ specified, the credentials will automatically be scoped if
232
+ necessary.
233
+ request (google.auth.transport.Request): An object used to make
234
+ HTTP requests. This is used to detect whether the application
235
+ is running on Compute Engine. If not specified, then it will
236
+ use the standard library http client to make requests.
237
+ quota_project_id (Optional[str]): The project ID used for
238
+ quota and billing.
239
+ Returns:
240
+ Tuple[~google.auth.credentials.Credentials, Optional[str]]:
241
+ the current environment's credentials and project ID. Project ID
242
+ may be None, which indicates that the Project ID could not be
243
+ ascertained from the environment.
244
+
245
+ Raises:
246
+ ~google.auth.exceptions.DefaultCredentialsError:
247
+ If no credentials were found, or if the credentials found were
248
+ invalid.
249
+ """
250
+ from google.auth._credentials_async import with_scopes_if_required
251
+ from google.auth.credentials import CredentialsWithQuotaProject
252
+
253
+ explicit_project_id = os.environ.get(
254
+ environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)
255
+ )
256
+
257
+ checkers = (
258
+ lambda: _get_explicit_environ_credentials(quota_project_id=quota_project_id),
259
+ lambda: _get_gcloud_sdk_credentials(quota_project_id=quota_project_id),
260
+ _get_gae_credentials,
261
+ lambda: _get_gce_credentials(request),
262
+ )
263
+
264
+ for checker in checkers:
265
+ credentials, project_id = checker()
266
+ if credentials is not None:
267
+ credentials = with_scopes_if_required(credentials, scopes)
268
+ if quota_project_id and isinstance(
269
+ credentials, CredentialsWithQuotaProject
270
+ ):
271
+ credentials = credentials.with_quota_project(quota_project_id)
272
+ effective_project_id = explicit_project_id or project_id
273
+ if not effective_project_id:
274
+ _default._LOGGER.warning(
275
+ "No project ID could be determined. Consider running "
276
+ "`gcloud config set project` or setting the %s "
277
+ "environment variable",
278
+ environment_vars.PROJECT,
279
+ )
280
+ return credentials, effective_project_id
281
+
282
+ raise exceptions.DefaultCredentialsError(_default._CLOUD_SDK_MISSING_CREDENTIALS)
.venv/lib/python3.11/site-packages/google/auth/_helpers.py ADDED
@@ -0,0 +1,273 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2015 Google Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Helper functions for commonly used utilities."""
16
+
17
+ import base64
18
+ import calendar
19
+ import datetime
20
+ from email.message import Message
21
+ import sys
22
+ import urllib
23
+
24
+ from google.auth import exceptions
25
+
26
+ # The smallest MDS cache used by this library stores tokens until 4 minutes from
27
+ # expiry.
28
+ REFRESH_THRESHOLD = datetime.timedelta(minutes=3, seconds=45)
29
+
30
+
31
+ def copy_docstring(source_class):
32
+ """Decorator that copies a method's docstring from another class.
33
+
34
+ Args:
35
+ source_class (type): The class that has the documented method.
36
+
37
+ Returns:
38
+ Callable: A decorator that will copy the docstring of the same
39
+ named method in the source class to the decorated method.
40
+ """
41
+
42
+ def decorator(method):
43
+ """Decorator implementation.
44
+
45
+ Args:
46
+ method (Callable): The method to copy the docstring to.
47
+
48
+ Returns:
49
+ Callable: the same method passed in with an updated docstring.
50
+
51
+ Raises:
52
+ google.auth.exceptions.InvalidOperation: if the method already has a docstring.
53
+ """
54
+ if method.__doc__:
55
+ raise exceptions.InvalidOperation("Method already has a docstring.")
56
+
57
+ source_method = getattr(source_class, method.__name__)
58
+ method.__doc__ = source_method.__doc__
59
+
60
+ return method
61
+
62
+ return decorator
63
+
64
+
65
+ def parse_content_type(header_value):
66
+ """Parse a 'content-type' header value to get just the plain media-type (without parameters).
67
+
68
+ This is done using the class Message from email.message as suggested in PEP 594
69
+ (because the cgi is now deprecated and will be removed in python 3.13,
70
+ see https://peps.python.org/pep-0594/#cgi).
71
+
72
+ Args:
73
+ header_value (str): The value of a 'content-type' header as a string.
74
+
75
+ Returns:
76
+ str: A string with just the lowercase media-type from the parsed 'content-type' header.
77
+ If the provided content-type is not parsable, returns 'text/plain',
78
+ the default value for textual files.
79
+ """
80
+ m = Message()
81
+ m["content-type"] = header_value
82
+ return (
83
+ m.get_content_type()
84
+ ) # Despite the name, actually returns just the media-type
85
+
86
+
87
+ def utcnow():
88
+ """Returns the current UTC datetime.
89
+
90
+ Returns:
91
+ datetime: The current time in UTC.
92
+ """
93
+ # We used datetime.utcnow() before, since it's deprecated from python 3.12,
94
+ # we are using datetime.now(timezone.utc) now. "utcnow()" is offset-native
95
+ # (no timezone info), but "now()" is offset-aware (with timezone info).
96
+ # This will cause datetime comparison problem. For backward compatibility,
97
+ # we need to remove the timezone info.
98
+ now = datetime.datetime.now(datetime.timezone.utc)
99
+ now = now.replace(tzinfo=None)
100
+ return now
101
+
102
+
103
+ def datetime_to_secs(value):
104
+ """Convert a datetime object to the number of seconds since the UNIX epoch.
105
+
106
+ Args:
107
+ value (datetime): The datetime to convert.
108
+
109
+ Returns:
110
+ int: The number of seconds since the UNIX epoch.
111
+ """
112
+ return calendar.timegm(value.utctimetuple())
113
+
114
+
115
+ def to_bytes(value, encoding="utf-8"):
116
+ """Converts a string value to bytes, if necessary.
117
+
118
+ Args:
119
+ value (Union[str, bytes]): The value to be converted.
120
+ encoding (str): The encoding to use to convert unicode to bytes.
121
+ Defaults to "utf-8".
122
+
123
+ Returns:
124
+ bytes: The original value converted to bytes (if unicode) or as
125
+ passed in if it started out as bytes.
126
+
127
+ Raises:
128
+ google.auth.exceptions.InvalidValue: If the value could not be converted to bytes.
129
+ """
130
+ result = value.encode(encoding) if isinstance(value, str) else value
131
+ if isinstance(result, bytes):
132
+ return result
133
+ else:
134
+ raise exceptions.InvalidValue(
135
+ "{0!r} could not be converted to bytes".format(value)
136
+ )
137
+
138
+
139
+ def from_bytes(value):
140
+ """Converts bytes to a string value, if necessary.
141
+
142
+ Args:
143
+ value (Union[str, bytes]): The value to be converted.
144
+
145
+ Returns:
146
+ str: The original value converted to unicode (if bytes) or as passed in
147
+ if it started out as unicode.
148
+
149
+ Raises:
150
+ google.auth.exceptions.InvalidValue: If the value could not be converted to unicode.
151
+ """
152
+ result = value.decode("utf-8") if isinstance(value, bytes) else value
153
+ if isinstance(result, str):
154
+ return result
155
+ else:
156
+ raise exceptions.InvalidValue(
157
+ "{0!r} could not be converted to unicode".format(value)
158
+ )
159
+
160
+
161
+ def update_query(url, params, remove=None):
162
+ """Updates a URL's query parameters.
163
+
164
+ Replaces any current values if they are already present in the URL.
165
+
166
+ Args:
167
+ url (str): The URL to update.
168
+ params (Mapping[str, str]): A mapping of query parameter
169
+ keys to values.
170
+ remove (Sequence[str]): Parameters to remove from the query string.
171
+
172
+ Returns:
173
+ str: The URL with updated query parameters.
174
+
175
+ Examples:
176
+
177
+ >>> url = 'http://example.com?a=1'
178
+ >>> update_query(url, {'a': '2'})
179
+ http://example.com?a=2
180
+ >>> update_query(url, {'b': '3'})
181
+ http://example.com?a=1&b=3
182
+ >> update_query(url, {'b': '3'}, remove=['a'])
183
+ http://example.com?b=3
184
+
185
+ """
186
+ if remove is None:
187
+ remove = []
188
+
189
+ # Split the URL into parts.
190
+ parts = urllib.parse.urlparse(url)
191
+ # Parse the query string.
192
+ query_params = urllib.parse.parse_qs(parts.query)
193
+ # Update the query parameters with the new parameters.
194
+ query_params.update(params)
195
+ # Remove any values specified in remove.
196
+ query_params = {
197
+ key: value for key, value in query_params.items() if key not in remove
198
+ }
199
+ # Re-encoded the query string.
200
+ new_query = urllib.parse.urlencode(query_params, doseq=True)
201
+ # Unsplit the url.
202
+ new_parts = parts._replace(query=new_query)
203
+ return urllib.parse.urlunparse(new_parts)
204
+
205
+
206
+ def scopes_to_string(scopes):
207
+ """Converts scope value to a string suitable for sending to OAuth 2.0
208
+ authorization servers.
209
+
210
+ Args:
211
+ scopes (Sequence[str]): The sequence of scopes to convert.
212
+
213
+ Returns:
214
+ str: The scopes formatted as a single string.
215
+ """
216
+ return " ".join(scopes)
217
+
218
+
219
+ def string_to_scopes(scopes):
220
+ """Converts stringifed scopes value to a list.
221
+
222
+ Args:
223
+ scopes (Union[Sequence, str]): The string of space-separated scopes
224
+ to convert.
225
+ Returns:
226
+ Sequence(str): The separated scopes.
227
+ """
228
+ if not scopes:
229
+ return []
230
+
231
+ return scopes.split(" ")
232
+
233
+
234
+ def padded_urlsafe_b64decode(value):
235
+ """Decodes base64 strings lacking padding characters.
236
+
237
+ Google infrastructure tends to omit the base64 padding characters.
238
+
239
+ Args:
240
+ value (Union[str, bytes]): The encoded value.
241
+
242
+ Returns:
243
+ bytes: The decoded value
244
+ """
245
+ b64string = to_bytes(value)
246
+ padded = b64string + b"=" * (-len(b64string) % 4)
247
+ return base64.urlsafe_b64decode(padded)
248
+
249
+
250
+ def unpadded_urlsafe_b64encode(value):
251
+ """Encodes base64 strings removing any padding characters.
252
+
253
+ `rfc 7515`_ defines Base64url to NOT include any padding
254
+ characters, but the stdlib doesn't do that by default.
255
+
256
+ _rfc7515: https://tools.ietf.org/html/rfc7515#page-6
257
+
258
+ Args:
259
+ value (Union[str|bytes]): The bytes-like value to encode
260
+
261
+ Returns:
262
+ Union[str|bytes]: The encoded value
263
+ """
264
+ return base64.urlsafe_b64encode(value).rstrip(b"=")
265
+
266
+
267
+ def is_python_3():
268
+ """Check if the Python interpreter is Python 2 or 3.
269
+
270
+ Returns:
271
+ bool: True if the Python interpreter is Python 3 and False otherwise.
272
+ """
273
+ return sys.version_info > (3, 0)
.venv/lib/python3.11/site-packages/google/auth/_jwt_async.py ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """JSON Web Tokens
16
+
17
+ Provides support for creating (encoding) and verifying (decoding) JWTs,
18
+ especially JWTs generated and consumed by Google infrastructure.
19
+
20
+ See `rfc7519`_ for more details on JWTs.
21
+
22
+ To encode a JWT use :func:`encode`::
23
+
24
+ from google.auth import crypt
25
+ from google.auth import jwt_async
26
+
27
+ signer = crypt.Signer(private_key)
28
+ payload = {'some': 'payload'}
29
+ encoded = jwt_async.encode(signer, payload)
30
+
31
+ To decode a JWT and verify claims use :func:`decode`::
32
+
33
+ claims = jwt_async.decode(encoded, certs=public_certs)
34
+
35
+ You can also skip verification::
36
+
37
+ claims = jwt_async.decode(encoded, verify=False)
38
+
39
+ .. _rfc7519: https://tools.ietf.org/html/rfc7519
40
+
41
+
42
+ NOTE: This async support is experimental and marked internal. This surface may
43
+ change in minor releases.
44
+ """
45
+
46
+ from google.auth import _credentials_async
47
+ from google.auth import jwt
48
+
49
+
50
+ def encode(signer, payload, header=None, key_id=None):
51
+ """Make a signed JWT.
52
+
53
+ Args:
54
+ signer (google.auth.crypt.Signer): The signer used to sign the JWT.
55
+ payload (Mapping[str, str]): The JWT payload.
56
+ header (Mapping[str, str]): Additional JWT header payload.
57
+ key_id (str): The key id to add to the JWT header. If the
58
+ signer has a key id it will be used as the default. If this is
59
+ specified it will override the signer's key id.
60
+
61
+ Returns:
62
+ bytes: The encoded JWT.
63
+ """
64
+ return jwt.encode(signer, payload, header, key_id)
65
+
66
+
67
+ def decode(token, certs=None, verify=True, audience=None):
68
+ """Decode and verify a JWT.
69
+
70
+ Args:
71
+ token (str): The encoded JWT.
72
+ certs (Union[str, bytes, Mapping[str, Union[str, bytes]]]): The
73
+ certificate used to validate the JWT signature. If bytes or string,
74
+ it must the the public key certificate in PEM format. If a mapping,
75
+ it must be a mapping of key IDs to public key certificates in PEM
76
+ format. The mapping must contain the same key ID that's specified
77
+ in the token's header.
78
+ verify (bool): Whether to perform signature and claim validation.
79
+ Verification is done by default.
80
+ audience (str): The audience claim, 'aud', that this JWT should
81
+ contain. If None then the JWT's 'aud' parameter is not verified.
82
+
83
+ Returns:
84
+ Mapping[str, str]: The deserialized JSON payload in the JWT.
85
+
86
+ Raises:
87
+ ValueError: if any verification checks failed.
88
+ """
89
+
90
+ return jwt.decode(token, certs, verify, audience)
91
+
92
+
93
+ class Credentials(
94
+ jwt.Credentials, _credentials_async.Signing, _credentials_async.Credentials
95
+ ):
96
+ """Credentials that use a JWT as the bearer token.
97
+
98
+ These credentials require an "audience" claim. This claim identifies the
99
+ intended recipient of the bearer token.
100
+
101
+ The constructor arguments determine the claims for the JWT that is
102
+ sent with requests. Usually, you'll construct these credentials with
103
+ one of the helper constructors as shown in the next section.
104
+
105
+ To create JWT credentials using a Google service account private key
106
+ JSON file::
107
+
108
+ audience = 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher'
109
+ credentials = jwt_async.Credentials.from_service_account_file(
110
+ 'service-account.json',
111
+ audience=audience)
112
+
113
+ If you already have the service account file loaded and parsed::
114
+
115
+ service_account_info = json.load(open('service_account.json'))
116
+ credentials = jwt_async.Credentials.from_service_account_info(
117
+ service_account_info,
118
+ audience=audience)
119
+
120
+ Both helper methods pass on arguments to the constructor, so you can
121
+ specify the JWT claims::
122
+
123
+ credentials = jwt_async.Credentials.from_service_account_file(
124
+ 'service-account.json',
125
+ audience=audience,
126
+ additional_claims={'meta': 'data'})
127
+
128
+ You can also construct the credentials directly if you have a
129
+ :class:`~google.auth.crypt.Signer` instance::
130
+
131
+ credentials = jwt_async.Credentials(
132
+ signer,
133
+ issuer='your-issuer',
134
+ subject='your-subject',
135
+ audience=audience)
136
+
137
+ The claims are considered immutable. If you want to modify the claims,
138
+ you can easily create another instance using :meth:`with_claims`::
139
+
140
+ new_audience = (
141
+ 'https://pubsub.googleapis.com/google.pubsub.v1.Subscriber')
142
+ new_credentials = credentials.with_claims(audience=new_audience)
143
+ """
144
+
145
+
146
+ class OnDemandCredentials(
147
+ jwt.OnDemandCredentials, _credentials_async.Signing, _credentials_async.Credentials
148
+ ):
149
+ """On-demand JWT credentials.
150
+
151
+ Like :class:`Credentials`, this class uses a JWT as the bearer token for
152
+ authentication. However, this class does not require the audience at
153
+ construction time. Instead, it will generate a new token on-demand for
154
+ each request using the request URI as the audience. It caches tokens
155
+ so that multiple requests to the same URI do not incur the overhead
156
+ of generating a new token every time.
157
+
158
+ This behavior is especially useful for `gRPC`_ clients. A gRPC service may
159
+ have multiple audience and gRPC clients may not know all of the audiences
160
+ required for accessing a particular service. With these credentials,
161
+ no knowledge of the audiences is required ahead of time.
162
+
163
+ .. _grpc: http://www.grpc.io/
164
+ """
.venv/lib/python3.11/site-packages/google/auth/_oauth2client.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Helpers for transitioning from oauth2client to google-auth.
16
+
17
+ .. warning::
18
+ This module is private as it is intended to assist first-party downstream
19
+ clients with the transition from oauth2client to google-auth.
20
+ """
21
+
22
+ from __future__ import absolute_import
23
+
24
+ from google.auth import _helpers
25
+ import google.auth.app_engine
26
+ import google.auth.compute_engine
27
+ import google.oauth2.credentials
28
+ import google.oauth2.service_account
29
+
30
+ try:
31
+ import oauth2client.client # type: ignore
32
+ import oauth2client.contrib.gce # type: ignore
33
+ import oauth2client.service_account # type: ignore
34
+ except ImportError as caught_exc:
35
+ raise ImportError("oauth2client is not installed.") from caught_exc
36
+
37
+ try:
38
+ import oauth2client.contrib.appengine # type: ignore
39
+
40
+ _HAS_APPENGINE = True
41
+ except ImportError:
42
+ _HAS_APPENGINE = False
43
+
44
+
45
+ _CONVERT_ERROR_TMPL = "Unable to convert {} to a google-auth credentials class."
46
+
47
+
48
+ def _convert_oauth2_credentials(credentials):
49
+ """Converts to :class:`google.oauth2.credentials.Credentials`.
50
+
51
+ Args:
52
+ credentials (Union[oauth2client.client.OAuth2Credentials,
53
+ oauth2client.client.GoogleCredentials]): The credentials to
54
+ convert.
55
+
56
+ Returns:
57
+ google.oauth2.credentials.Credentials: The converted credentials.
58
+ """
59
+ new_credentials = google.oauth2.credentials.Credentials(
60
+ token=credentials.access_token,
61
+ refresh_token=credentials.refresh_token,
62
+ token_uri=credentials.token_uri,
63
+ client_id=credentials.client_id,
64
+ client_secret=credentials.client_secret,
65
+ scopes=credentials.scopes,
66
+ )
67
+
68
+ new_credentials._expires = credentials.token_expiry
69
+
70
+ return new_credentials
71
+
72
+
73
+ def _convert_service_account_credentials(credentials):
74
+ """Converts to :class:`google.oauth2.service_account.Credentials`.
75
+
76
+ Args:
77
+ credentials (Union[
78
+ oauth2client.service_account.ServiceAccountCredentials,
79
+ oauth2client.service_account._JWTAccessCredentials]): The
80
+ credentials to convert.
81
+
82
+ Returns:
83
+ google.oauth2.service_account.Credentials: The converted credentials.
84
+ """
85
+ info = credentials.serialization_data.copy()
86
+ info["token_uri"] = credentials.token_uri
87
+ return google.oauth2.service_account.Credentials.from_service_account_info(info)
88
+
89
+
90
+ def _convert_gce_app_assertion_credentials(credentials):
91
+ """Converts to :class:`google.auth.compute_engine.Credentials`.
92
+
93
+ Args:
94
+ credentials (oauth2client.contrib.gce.AppAssertionCredentials): The
95
+ credentials to convert.
96
+
97
+ Returns:
98
+ google.oauth2.service_account.Credentials: The converted credentials.
99
+ """
100
+ return google.auth.compute_engine.Credentials(
101
+ service_account_email=credentials.service_account_email
102
+ )
103
+
104
+
105
+ def _convert_appengine_app_assertion_credentials(credentials):
106
+ """Converts to :class:`google.auth.app_engine.Credentials`.
107
+
108
+ Args:
109
+ credentials (oauth2client.contrib.app_engine.AppAssertionCredentials):
110
+ The credentials to convert.
111
+
112
+ Returns:
113
+ google.oauth2.service_account.Credentials: The converted credentials.
114
+ """
115
+ # pylint: disable=invalid-name
116
+ return google.auth.app_engine.Credentials(
117
+ scopes=_helpers.string_to_scopes(credentials.scope),
118
+ service_account_id=credentials.service_account_id,
119
+ )
120
+
121
+
122
+ _CLASS_CONVERSION_MAP = {
123
+ oauth2client.client.OAuth2Credentials: _convert_oauth2_credentials,
124
+ oauth2client.client.GoogleCredentials: _convert_oauth2_credentials,
125
+ oauth2client.service_account.ServiceAccountCredentials: _convert_service_account_credentials,
126
+ oauth2client.service_account._JWTAccessCredentials: _convert_service_account_credentials,
127
+ oauth2client.contrib.gce.AppAssertionCredentials: _convert_gce_app_assertion_credentials,
128
+ }
129
+
130
+ if _HAS_APPENGINE:
131
+ _CLASS_CONVERSION_MAP[
132
+ oauth2client.contrib.appengine.AppAssertionCredentials
133
+ ] = _convert_appengine_app_assertion_credentials
134
+
135
+
136
+ def convert(credentials):
137
+ """Convert oauth2client credentials to google-auth credentials.
138
+
139
+ This class converts:
140
+
141
+ - :class:`oauth2client.client.OAuth2Credentials` to
142
+ :class:`google.oauth2.credentials.Credentials`.
143
+ - :class:`oauth2client.client.GoogleCredentials` to
144
+ :class:`google.oauth2.credentials.Credentials`.
145
+ - :class:`oauth2client.service_account.ServiceAccountCredentials` to
146
+ :class:`google.oauth2.service_account.Credentials`.
147
+ - :class:`oauth2client.service_account._JWTAccessCredentials` to
148
+ :class:`google.oauth2.service_account.Credentials`.
149
+ - :class:`oauth2client.contrib.gce.AppAssertionCredentials` to
150
+ :class:`google.auth.compute_engine.Credentials`.
151
+ - :class:`oauth2client.contrib.appengine.AppAssertionCredentials` to
152
+ :class:`google.auth.app_engine.Credentials`.
153
+
154
+ Returns:
155
+ google.auth.credentials.Credentials: The converted credentials.
156
+
157
+ Raises:
158
+ ValueError: If the credentials could not be converted.
159
+ """
160
+
161
+ credentials_class = type(credentials)
162
+
163
+ try:
164
+ return _CLASS_CONVERSION_MAP[credentials_class](credentials)
165
+ except KeyError as caught_exc:
166
+ new_exc = ValueError(_CONVERT_ERROR_TMPL.format(credentials_class))
167
+ raise new_exc from caught_exc
.venv/lib/python3.11/site-packages/google/auth/_refresh_worker.py ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2023 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import copy
16
+ import logging
17
+ import threading
18
+
19
+ import google.auth.exceptions as e
20
+
21
+ _LOGGER = logging.getLogger(__name__)
22
+
23
+
24
+ class RefreshThreadManager:
25
+ """
26
+ Organizes exactly one background job that refresh a token.
27
+ """
28
+
29
+ def __init__(self):
30
+ """Initializes the manager."""
31
+
32
+ self._worker = None
33
+ self._lock = threading.Lock() # protects access to worker threads.
34
+
35
+ def start_refresh(self, cred, request):
36
+ """Starts a refresh thread for the given credentials.
37
+ The credentials are refreshed using the request parameter.
38
+ request and cred MUST not be None
39
+
40
+ Returns True if a background refresh was kicked off. False otherwise.
41
+
42
+ Args:
43
+ cred: A credentials object.
44
+ request: A request object.
45
+ Returns:
46
+ bool
47
+ """
48
+ if cred is None or request is None:
49
+ raise e.InvalidValue(
50
+ "Unable to start refresh. cred and request must be valid and instantiated objects."
51
+ )
52
+
53
+ with self._lock:
54
+ if self._worker is not None and self._worker._error_info is not None:
55
+ return False
56
+
57
+ if self._worker is None or not self._worker.is_alive(): # pragma: NO COVER
58
+ self._worker = RefreshThread(cred=cred, request=copy.deepcopy(request))
59
+ self._worker.start()
60
+ return True
61
+
62
+ def clear_error(self):
63
+ """
64
+ Removes any errors that were stored from previous background refreshes.
65
+ """
66
+ with self._lock:
67
+ if self._worker:
68
+ self._worker._error_info = None
69
+
70
+ def __getstate__(self):
71
+ """Pickle helper that serializes the _lock attribute."""
72
+ state = self.__dict__.copy()
73
+ state["_lock"] = None
74
+ return state
75
+
76
+ def __setstate__(self, state):
77
+ """Pickle helper that deserializes the _lock attribute."""
78
+ state["_lock"] = threading.Lock()
79
+ self.__dict__.update(state)
80
+
81
+
82
+ class RefreshThread(threading.Thread):
83
+ """
84
+ Thread that refreshes credentials.
85
+ """
86
+
87
+ def __init__(self, cred, request, **kwargs):
88
+ """Initializes the thread.
89
+
90
+ Args:
91
+ cred: A Credential object to refresh.
92
+ request: A Request object used to perform a credential refresh.
93
+ **kwargs: Additional keyword arguments.
94
+ """
95
+
96
+ super().__init__(**kwargs)
97
+ self._cred = cred
98
+ self._request = request
99
+ self._error_info = None
100
+
101
+ def run(self):
102
+ """
103
+ Perform the credential refresh.
104
+ """
105
+ try:
106
+ self._cred.refresh(self._request)
107
+ except Exception as err: # pragma: NO COVER
108
+ _LOGGER.error(f"Background refresh failed due to: {err}")
109
+ self._error_info = err
.venv/lib/python3.11/site-packages/google/auth/_service_account_info.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Helper functions for loading data from a Google service account file."""
16
+
17
+ import io
18
+ import json
19
+
20
+ from google.auth import crypt
21
+ from google.auth import exceptions
22
+
23
+
24
+ def from_dict(data, require=None, use_rsa_signer=True):
25
+ """Validates a dictionary containing Google service account data.
26
+
27
+ Creates and returns a :class:`google.auth.crypt.Signer` instance from the
28
+ private key specified in the data.
29
+
30
+ Args:
31
+ data (Mapping[str, str]): The service account data
32
+ require (Sequence[str]): List of keys required to be present in the
33
+ info.
34
+ use_rsa_signer (Optional[bool]): Whether to use RSA signer or EC signer.
35
+ We use RSA signer by default.
36
+
37
+ Returns:
38
+ google.auth.crypt.Signer: A signer created from the private key in the
39
+ service account file.
40
+
41
+ Raises:
42
+ MalformedError: if the data was in the wrong format, or if one of the
43
+ required keys is missing.
44
+ """
45
+ keys_needed = set(require if require is not None else [])
46
+
47
+ missing = keys_needed.difference(data.keys())
48
+
49
+ if missing:
50
+ raise exceptions.MalformedError(
51
+ "Service account info was not in the expected format, missing "
52
+ "fields {}.".format(", ".join(missing))
53
+ )
54
+
55
+ # Create a signer.
56
+ if use_rsa_signer:
57
+ signer = crypt.RSASigner.from_service_account_info(data)
58
+ else:
59
+ signer = crypt.ES256Signer.from_service_account_info(data)
60
+
61
+ return signer
62
+
63
+
64
+ def from_filename(filename, require=None, use_rsa_signer=True):
65
+ """Reads a Google service account JSON file and returns its parsed info.
66
+
67
+ Args:
68
+ filename (str): The path to the service account .json file.
69
+ require (Sequence[str]): List of keys required to be present in the
70
+ info.
71
+ use_rsa_signer (Optional[bool]): Whether to use RSA signer or EC signer.
72
+ We use RSA signer by default.
73
+
74
+ Returns:
75
+ Tuple[ Mapping[str, str], google.auth.crypt.Signer ]: The verified
76
+ info and a signer instance.
77
+ """
78
+ with io.open(filename, "r", encoding="utf-8") as json_file:
79
+ data = json.load(json_file)
80
+ return data, from_dict(data, require=require, use_rsa_signer=use_rsa_signer)
.venv/lib/python3.11/site-packages/google/auth/app_engine.py ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Google App Engine standard environment support.
16
+
17
+ This module provides authentication and signing for applications running on App
18
+ Engine in the standard environment using the `App Identity API`_.
19
+
20
+
21
+ .. _App Identity API:
22
+ https://cloud.google.com/appengine/docs/python/appidentity/
23
+ """
24
+
25
+ import datetime
26
+
27
+ from google.auth import _helpers
28
+ from google.auth import credentials
29
+ from google.auth import crypt
30
+ from google.auth import exceptions
31
+
32
+ # pytype: disable=import-error
33
+ try:
34
+ from google.appengine.api import app_identity # type: ignore
35
+ except ImportError:
36
+ app_identity = None # type: ignore
37
+ # pytype: enable=import-error
38
+
39
+
40
+ class Signer(crypt.Signer):
41
+ """Signs messages using the App Engine App Identity service.
42
+
43
+ This can be used in place of :class:`google.auth.crypt.Signer` when
44
+ running in the App Engine standard environment.
45
+ """
46
+
47
+ @property
48
+ def key_id(self):
49
+ """Optional[str]: The key ID used to identify this private key.
50
+
51
+ .. warning::
52
+ This is always ``None``. The key ID used by App Engine can not
53
+ be reliably determined ahead of time.
54
+ """
55
+ return None
56
+
57
+ @_helpers.copy_docstring(crypt.Signer)
58
+ def sign(self, message):
59
+ message = _helpers.to_bytes(message)
60
+ _, signature = app_identity.sign_blob(message)
61
+ return signature
62
+
63
+
64
+ def get_project_id():
65
+ """Gets the project ID for the current App Engine application.
66
+
67
+ Returns:
68
+ str: The project ID
69
+
70
+ Raises:
71
+ google.auth.exceptions.OSError: If the App Engine APIs are unavailable.
72
+ """
73
+ # pylint: disable=missing-raises-doc
74
+ # Pylint rightfully thinks google.auth.exceptions.OSError is OSError, but doesn't
75
+ # realize it's a valid alias.
76
+ if app_identity is None:
77
+ raise exceptions.OSError("The App Engine APIs are not available.")
78
+ return app_identity.get_application_id()
79
+
80
+
81
+ class Credentials(
82
+ credentials.Scoped, credentials.Signing, credentials.CredentialsWithQuotaProject
83
+ ):
84
+ """App Engine standard environment credentials.
85
+
86
+ These credentials use the App Engine App Identity API to obtain access
87
+ tokens.
88
+ """
89
+
90
+ def __init__(
91
+ self,
92
+ scopes=None,
93
+ default_scopes=None,
94
+ service_account_id=None,
95
+ quota_project_id=None,
96
+ ):
97
+ """
98
+ Args:
99
+ scopes (Sequence[str]): Scopes to request from the App Identity
100
+ API.
101
+ default_scopes (Sequence[str]): Default scopes passed by a
102
+ Google client library. Use 'scopes' for user-defined scopes.
103
+ service_account_id (str): The service account ID passed into
104
+ :func:`google.appengine.api.app_identity.get_access_token`.
105
+ If not specified, the default application service account
106
+ ID will be used.
107
+ quota_project_id (Optional[str]): The project ID used for quota
108
+ and billing.
109
+
110
+ Raises:
111
+ google.auth.exceptions.OSError: If the App Engine APIs are unavailable.
112
+ """
113
+ # pylint: disable=missing-raises-doc
114
+ # Pylint rightfully thinks google.auth.exceptions.OSError is OSError, but doesn't
115
+ # realize it's a valid alias.
116
+ if app_identity is None:
117
+ raise exceptions.OSError("The App Engine APIs are not available.")
118
+
119
+ super(Credentials, self).__init__()
120
+ self._scopes = scopes
121
+ self._default_scopes = default_scopes
122
+ self._service_account_id = service_account_id
123
+ self._signer = Signer()
124
+ self._quota_project_id = quota_project_id
125
+
126
+ @_helpers.copy_docstring(credentials.Credentials)
127
+ def refresh(self, request):
128
+ scopes = self._scopes if self._scopes is not None else self._default_scopes
129
+ # pylint: disable=unused-argument
130
+ token, ttl = app_identity.get_access_token(scopes, self._service_account_id)
131
+ expiry = datetime.datetime.utcfromtimestamp(ttl)
132
+
133
+ self.token, self.expiry = token, expiry
134
+
135
+ @property
136
+ def service_account_email(self):
137
+ """The service account email."""
138
+ if self._service_account_id is None:
139
+ self._service_account_id = app_identity.get_service_account_name()
140
+ return self._service_account_id
141
+
142
+ @property
143
+ def requires_scopes(self):
144
+ """Checks if the credentials requires scopes.
145
+
146
+ Returns:
147
+ bool: True if there are no scopes set otherwise False.
148
+ """
149
+ return not self._scopes and not self._default_scopes
150
+
151
+ @_helpers.copy_docstring(credentials.Scoped)
152
+ def with_scopes(self, scopes, default_scopes=None):
153
+ return self.__class__(
154
+ scopes=scopes,
155
+ default_scopes=default_scopes,
156
+ service_account_id=self._service_account_id,
157
+ quota_project_id=self.quota_project_id,
158
+ )
159
+
160
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
161
+ def with_quota_project(self, quota_project_id):
162
+ return self.__class__(
163
+ scopes=self._scopes,
164
+ service_account_id=self._service_account_id,
165
+ quota_project_id=quota_project_id,
166
+ )
167
+
168
+ @_helpers.copy_docstring(credentials.Signing)
169
+ def sign_bytes(self, message):
170
+ return self._signer.sign(message)
171
+
172
+ @property # type: ignore
173
+ @_helpers.copy_docstring(credentials.Signing)
174
+ def signer_email(self):
175
+ return self.service_account_email
176
+
177
+ @property # type: ignore
178
+ @_helpers.copy_docstring(credentials.Signing)
179
+ def signer(self):
180
+ return self._signer
.venv/lib/python3.11/site-packages/google/auth/credentials.py ADDED
@@ -0,0 +1,522 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ """Interfaces for credentials."""
17
+
18
+ import abc
19
+ from enum import Enum
20
+ import os
21
+
22
+ from google.auth import _helpers, environment_vars
23
+ from google.auth import exceptions
24
+ from google.auth import metrics
25
+ from google.auth._credentials_base import _BaseCredentials
26
+ from google.auth._refresh_worker import RefreshThreadManager
27
+
28
+ DEFAULT_UNIVERSE_DOMAIN = "googleapis.com"
29
+
30
+
31
+ class Credentials(_BaseCredentials):
32
+ """Base class for all credentials.
33
+
34
+ All credentials have a :attr:`token` that is used for authentication and
35
+ may also optionally set an :attr:`expiry` to indicate when the token will
36
+ no longer be valid.
37
+
38
+ Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
39
+ Credentials can do this automatically before the first HTTP request in
40
+ :meth:`before_request`.
41
+
42
+ Although the token and expiration will change as the credentials are
43
+ :meth:`refreshed <refresh>` and used, credentials should be considered
44
+ immutable. Various credentials will accept configuration such as private
45
+ keys, scopes, and other options. These options are not changeable after
46
+ construction. Some classes will provide mechanisms to copy the credentials
47
+ with modifications such as :meth:`ScopedCredentials.with_scopes`.
48
+ """
49
+
50
+ def __init__(self):
51
+ super(Credentials, self).__init__()
52
+
53
+ self.expiry = None
54
+ """Optional[datetime]: When the token expires and is no longer valid.
55
+ If this is None, the token is assumed to never expire."""
56
+ self._quota_project_id = None
57
+ """Optional[str]: Project to use for quota and billing purposes."""
58
+ self._trust_boundary = None
59
+ """Optional[dict]: Cache of a trust boundary response which has a list
60
+ of allowed regions and an encoded string representation of credentials
61
+ trust boundary."""
62
+ self._universe_domain = DEFAULT_UNIVERSE_DOMAIN
63
+ """Optional[str]: The universe domain value, default is googleapis.com
64
+ """
65
+
66
+ self._use_non_blocking_refresh = False
67
+ self._refresh_worker = RefreshThreadManager()
68
+
69
+ @property
70
+ def expired(self):
71
+ """Checks if the credentials are expired.
72
+
73
+ Note that credentials can be invalid but not expired because
74
+ Credentials with :attr:`expiry` set to None is considered to never
75
+ expire.
76
+
77
+ .. deprecated:: v2.24.0
78
+ Prefer checking :attr:`token_state` instead.
79
+ """
80
+ if not self.expiry:
81
+ return False
82
+ # Remove some threshold from expiry to err on the side of reporting
83
+ # expiration early so that we avoid the 401-refresh-retry loop.
84
+ skewed_expiry = self.expiry - _helpers.REFRESH_THRESHOLD
85
+ return _helpers.utcnow() >= skewed_expiry
86
+
87
+ @property
88
+ def valid(self):
89
+ """Checks the validity of the credentials.
90
+
91
+ This is True if the credentials have a :attr:`token` and the token
92
+ is not :attr:`expired`.
93
+
94
+ .. deprecated:: v2.24.0
95
+ Prefer checking :attr:`token_state` instead.
96
+ """
97
+ return self.token is not None and not self.expired
98
+
99
+ @property
100
+ def token_state(self):
101
+ """
102
+ See `:obj:`TokenState`
103
+ """
104
+ if self.token is None:
105
+ return TokenState.INVALID
106
+
107
+ # Credentials that can't expire are always treated as fresh.
108
+ if self.expiry is None:
109
+ return TokenState.FRESH
110
+
111
+ expired = _helpers.utcnow() >= self.expiry
112
+ if expired:
113
+ return TokenState.INVALID
114
+
115
+ is_stale = _helpers.utcnow() >= (self.expiry - _helpers.REFRESH_THRESHOLD)
116
+ if is_stale:
117
+ return TokenState.STALE
118
+
119
+ return TokenState.FRESH
120
+
121
+ @property
122
+ def quota_project_id(self):
123
+ """Project to use for quota and billing purposes."""
124
+ return self._quota_project_id
125
+
126
+ @property
127
+ def universe_domain(self):
128
+ """The universe domain value."""
129
+ return self._universe_domain
130
+
131
+ def get_cred_info(self):
132
+ """The credential information JSON.
133
+
134
+ The credential information will be added to auth related error messages
135
+ by client library.
136
+
137
+ Returns:
138
+ Mapping[str, str]: The credential information JSON.
139
+ """
140
+ return None
141
+
142
+ @abc.abstractmethod
143
+ def refresh(self, request):
144
+ """Refreshes the access token.
145
+
146
+ Args:
147
+ request (google.auth.transport.Request): The object used to make
148
+ HTTP requests.
149
+
150
+ Raises:
151
+ google.auth.exceptions.RefreshError: If the credentials could
152
+ not be refreshed.
153
+ """
154
+ # pylint: disable=missing-raises-doc
155
+ # (pylint doesn't recognize that this is abstract)
156
+ raise NotImplementedError("Refresh must be implemented")
157
+
158
+ def _metric_header_for_usage(self):
159
+ """The x-goog-api-client header for token usage metric.
160
+
161
+ This header will be added to the API service requests in before_request
162
+ method. For example, "cred-type/sa-jwt" means service account self
163
+ signed jwt access token is used in the API service request
164
+ authorization header. Children credentials classes need to override
165
+ this method to provide the header value, if the token usage metric is
166
+ needed.
167
+
168
+ Returns:
169
+ str: The x-goog-api-client header value.
170
+ """
171
+ return None
172
+
173
+ def apply(self, headers, token=None):
174
+ """Apply the token to the authentication header.
175
+
176
+ Args:
177
+ headers (Mapping): The HTTP request headers.
178
+ token (Optional[str]): If specified, overrides the current access
179
+ token.
180
+ """
181
+ self._apply(headers, token=token)
182
+ """Trust boundary value will be a cached value from global lookup.
183
+
184
+ The response of trust boundary will be a list of regions and a hex
185
+ encoded representation.
186
+
187
+ An example of global lookup response:
188
+ {
189
+ "locations": [
190
+ "us-central1", "us-east1", "europe-west1", "asia-east1"
191
+ ]
192
+ "encoded_locations": "0xA30"
193
+ }
194
+ """
195
+ if self._trust_boundary is not None:
196
+ headers["x-allowed-locations"] = self._trust_boundary["encoded_locations"]
197
+ if self.quota_project_id:
198
+ headers["x-goog-user-project"] = self.quota_project_id
199
+
200
+ def _blocking_refresh(self, request):
201
+ if not self.valid:
202
+ self.refresh(request)
203
+
204
+ def _non_blocking_refresh(self, request):
205
+ use_blocking_refresh_fallback = False
206
+
207
+ if self.token_state == TokenState.STALE:
208
+ use_blocking_refresh_fallback = not self._refresh_worker.start_refresh(
209
+ self, request
210
+ )
211
+
212
+ if self.token_state == TokenState.INVALID or use_blocking_refresh_fallback:
213
+ self.refresh(request)
214
+ # If the blocking refresh succeeds then we can clear the error info
215
+ # on the background refresh worker, and perform refreshes in a
216
+ # background thread.
217
+ self._refresh_worker.clear_error()
218
+
219
+ def before_request(self, request, method, url, headers):
220
+ """Performs credential-specific before request logic.
221
+
222
+ Refreshes the credentials if necessary, then calls :meth:`apply` to
223
+ apply the token to the authentication header.
224
+
225
+ Args:
226
+ request (google.auth.transport.Request): The object used to make
227
+ HTTP requests.
228
+ method (str): The request's HTTP method or the RPC method being
229
+ invoked.
230
+ url (str): The request's URI or the RPC service's URI.
231
+ headers (Mapping): The request's headers.
232
+ """
233
+ # pylint: disable=unused-argument
234
+ # (Subclasses may use these arguments to ascertain information about
235
+ # the http request.)
236
+ if self._use_non_blocking_refresh:
237
+ self._non_blocking_refresh(request)
238
+ else:
239
+ self._blocking_refresh(request)
240
+
241
+ metrics.add_metric_header(headers, self._metric_header_for_usage())
242
+ self.apply(headers)
243
+
244
+ def with_non_blocking_refresh(self):
245
+ self._use_non_blocking_refresh = True
246
+
247
+
248
+ class CredentialsWithQuotaProject(Credentials):
249
+ """Abstract base for credentials supporting ``with_quota_project`` factory"""
250
+
251
+ def with_quota_project(self, quota_project_id):
252
+ """Returns a copy of these credentials with a modified quota project.
253
+
254
+ Args:
255
+ quota_project_id (str): The project to use for quota and
256
+ billing purposes
257
+
258
+ Returns:
259
+ google.auth.credentials.Credentials: A new credentials instance.
260
+ """
261
+ raise NotImplementedError("This credential does not support quota project.")
262
+
263
+ def with_quota_project_from_environment(self):
264
+ quota_from_env = os.environ.get(environment_vars.GOOGLE_CLOUD_QUOTA_PROJECT)
265
+ if quota_from_env:
266
+ return self.with_quota_project(quota_from_env)
267
+ return self
268
+
269
+
270
+ class CredentialsWithTokenUri(Credentials):
271
+ """Abstract base for credentials supporting ``with_token_uri`` factory"""
272
+
273
+ def with_token_uri(self, token_uri):
274
+ """Returns a copy of these credentials with a modified token uri.
275
+
276
+ Args:
277
+ token_uri (str): The uri to use for fetching/exchanging tokens
278
+
279
+ Returns:
280
+ google.auth.credentials.Credentials: A new credentials instance.
281
+ """
282
+ raise NotImplementedError("This credential does not use token uri.")
283
+
284
+
285
+ class CredentialsWithUniverseDomain(Credentials):
286
+ """Abstract base for credentials supporting ``with_universe_domain`` factory"""
287
+
288
+ def with_universe_domain(self, universe_domain):
289
+ """Returns a copy of these credentials with a modified universe domain.
290
+
291
+ Args:
292
+ universe_domain (str): The universe domain to use
293
+
294
+ Returns:
295
+ google.auth.credentials.Credentials: A new credentials instance.
296
+ """
297
+ raise NotImplementedError(
298
+ "This credential does not support with_universe_domain."
299
+ )
300
+
301
+
302
+ class AnonymousCredentials(Credentials):
303
+ """Credentials that do not provide any authentication information.
304
+
305
+ These are useful in the case of services that support anonymous access or
306
+ local service emulators that do not use credentials.
307
+ """
308
+
309
+ @property
310
+ def expired(self):
311
+ """Returns `False`, anonymous credentials never expire."""
312
+ return False
313
+
314
+ @property
315
+ def valid(self):
316
+ """Returns `True`, anonymous credentials are always valid."""
317
+ return True
318
+
319
+ def refresh(self, request):
320
+ """Raises :class:``InvalidOperation``, anonymous credentials cannot be
321
+ refreshed."""
322
+ raise exceptions.InvalidOperation("Anonymous credentials cannot be refreshed.")
323
+
324
+ def apply(self, headers, token=None):
325
+ """Anonymous credentials do nothing to the request.
326
+
327
+ The optional ``token`` argument is not supported.
328
+
329
+ Raises:
330
+ google.auth.exceptions.InvalidValue: If a token was specified.
331
+ """
332
+ if token is not None:
333
+ raise exceptions.InvalidValue("Anonymous credentials don't support tokens.")
334
+
335
+ def before_request(self, request, method, url, headers):
336
+ """Anonymous credentials do nothing to the request."""
337
+
338
+
339
+ class ReadOnlyScoped(metaclass=abc.ABCMeta):
340
+ """Interface for credentials whose scopes can be queried.
341
+
342
+ OAuth 2.0-based credentials allow limiting access using scopes as described
343
+ in `RFC6749 Section 3.3`_.
344
+ If a credential class implements this interface then the credentials either
345
+ use scopes in their implementation.
346
+
347
+ Some credentials require scopes in order to obtain a token. You can check
348
+ if scoping is necessary with :attr:`requires_scopes`::
349
+
350
+ if credentials.requires_scopes:
351
+ # Scoping is required.
352
+ credentials = credentials.with_scopes(scopes=['one', 'two'])
353
+
354
+ Credentials that require scopes must either be constructed with scopes::
355
+
356
+ credentials = SomeScopedCredentials(scopes=['one', 'two'])
357
+
358
+ Or must copy an existing instance using :meth:`with_scopes`::
359
+
360
+ scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
361
+
362
+ Some credentials have scopes but do not allow or require scopes to be set,
363
+ these credentials can be used as-is.
364
+
365
+ .. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
366
+ """
367
+
368
+ def __init__(self):
369
+ super(ReadOnlyScoped, self).__init__()
370
+ self._scopes = None
371
+ self._default_scopes = None
372
+
373
+ @property
374
+ def scopes(self):
375
+ """Sequence[str]: the credentials' current set of scopes."""
376
+ return self._scopes
377
+
378
+ @property
379
+ def default_scopes(self):
380
+ """Sequence[str]: the credentials' current set of default scopes."""
381
+ return self._default_scopes
382
+
383
+ @abc.abstractproperty
384
+ def requires_scopes(self):
385
+ """True if these credentials require scopes to obtain an access token.
386
+ """
387
+ return False
388
+
389
+ def has_scopes(self, scopes):
390
+ """Checks if the credentials have the given scopes.
391
+
392
+ .. warning: This method is not guaranteed to be accurate if the
393
+ credentials are :attr:`~Credentials.invalid`.
394
+
395
+ Args:
396
+ scopes (Sequence[str]): The list of scopes to check.
397
+
398
+ Returns:
399
+ bool: True if the credentials have the given scopes.
400
+ """
401
+ credential_scopes = (
402
+ self._scopes if self._scopes is not None else self._default_scopes
403
+ )
404
+ return set(scopes).issubset(set(credential_scopes or []))
405
+
406
+
407
+ class Scoped(ReadOnlyScoped):
408
+ """Interface for credentials whose scopes can be replaced while copying.
409
+
410
+ OAuth 2.0-based credentials allow limiting access using scopes as described
411
+ in `RFC6749 Section 3.3`_.
412
+ If a credential class implements this interface then the credentials either
413
+ use scopes in their implementation.
414
+
415
+ Some credentials require scopes in order to obtain a token. You can check
416
+ if scoping is necessary with :attr:`requires_scopes`::
417
+
418
+ if credentials.requires_scopes:
419
+ # Scoping is required.
420
+ credentials = credentials.create_scoped(['one', 'two'])
421
+
422
+ Credentials that require scopes must either be constructed with scopes::
423
+
424
+ credentials = SomeScopedCredentials(scopes=['one', 'two'])
425
+
426
+ Or must copy an existing instance using :meth:`with_scopes`::
427
+
428
+ scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
429
+
430
+ Some credentials have scopes but do not allow or require scopes to be set,
431
+ these credentials can be used as-is.
432
+
433
+ .. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
434
+ """
435
+
436
+ @abc.abstractmethod
437
+ def with_scopes(self, scopes, default_scopes=None):
438
+ """Create a copy of these credentials with the specified scopes.
439
+
440
+ Args:
441
+ scopes (Sequence[str]): The list of scopes to attach to the
442
+ current credentials.
443
+
444
+ Raises:
445
+ NotImplementedError: If the credentials' scopes can not be changed.
446
+ This can be avoided by checking :attr:`requires_scopes` before
447
+ calling this method.
448
+ """
449
+ raise NotImplementedError("This class does not require scoping.")
450
+
451
+
452
+ def with_scopes_if_required(credentials, scopes, default_scopes=None):
453
+ """Creates a copy of the credentials with scopes if scoping is required.
454
+
455
+ This helper function is useful when you do not know (or care to know) the
456
+ specific type of credentials you are using (such as when you use
457
+ :func:`google.auth.default`). This function will call
458
+ :meth:`Scoped.with_scopes` if the credentials are scoped credentials and if
459
+ the credentials require scoping. Otherwise, it will return the credentials
460
+ as-is.
461
+
462
+ Args:
463
+ credentials (google.auth.credentials.Credentials): The credentials to
464
+ scope if necessary.
465
+ scopes (Sequence[str]): The list of scopes to use.
466
+ default_scopes (Sequence[str]): Default scopes passed by a
467
+ Google client library. Use 'scopes' for user-defined scopes.
468
+
469
+ Returns:
470
+ google.auth.credentials.Credentials: Either a new set of scoped
471
+ credentials, or the passed in credentials instance if no scoping
472
+ was required.
473
+ """
474
+ if isinstance(credentials, Scoped) and credentials.requires_scopes:
475
+ return credentials.with_scopes(scopes, default_scopes=default_scopes)
476
+ else:
477
+ return credentials
478
+
479
+
480
+ class Signing(metaclass=abc.ABCMeta):
481
+ """Interface for credentials that can cryptographically sign messages."""
482
+
483
+ @abc.abstractmethod
484
+ def sign_bytes(self, message):
485
+ """Signs the given message.
486
+
487
+ Args:
488
+ message (bytes): The message to sign.
489
+
490
+ Returns:
491
+ bytes: The message's cryptographic signature.
492
+ """
493
+ # pylint: disable=missing-raises-doc,redundant-returns-doc
494
+ # (pylint doesn't recognize that this is abstract)
495
+ raise NotImplementedError("Sign bytes must be implemented.")
496
+
497
+ @abc.abstractproperty
498
+ def signer_email(self):
499
+ """Optional[str]: An email address that identifies the signer."""
500
+ # pylint: disable=missing-raises-doc
501
+ # (pylint doesn't recognize that this is abstract)
502
+ raise NotImplementedError("Signer email must be implemented.")
503
+
504
+ @abc.abstractproperty
505
+ def signer(self):
506
+ """google.auth.crypt.Signer: The signer used to sign bytes."""
507
+ # pylint: disable=missing-raises-doc
508
+ # (pylint doesn't recognize that this is abstract)
509
+ raise NotImplementedError("Signer must be implemented.")
510
+
511
+
512
+ class TokenState(Enum):
513
+ """
514
+ Tracks the state of a token.
515
+ FRESH: The token is valid. It is not expired or close to expired, or the token has no expiry.
516
+ STALE: The token is close to expired, and should be refreshed. The token can be used normally.
517
+ INVALID: The token is expired or invalid. The token cannot be used for a normal operation.
518
+ """
519
+
520
+ FRESH = 1
521
+ STALE = 2
522
+ INVALID = 3
.venv/lib/python3.11/site-packages/google/auth/environment_vars.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Environment variables used by :mod:`google.auth`."""
16
+
17
+
18
+ PROJECT = "GOOGLE_CLOUD_PROJECT"
19
+ """Environment variable defining default project.
20
+
21
+ This used by :func:`google.auth.default` to explicitly set a project ID. This
22
+ environment variable is also used by the Google Cloud Python Library.
23
+ """
24
+
25
+ LEGACY_PROJECT = "GCLOUD_PROJECT"
26
+ """Previously used environment variable defining the default project.
27
+
28
+ This environment variable is used instead of the current one in some
29
+ situations (such as Google App Engine).
30
+ """
31
+
32
+ GOOGLE_CLOUD_QUOTA_PROJECT = "GOOGLE_CLOUD_QUOTA_PROJECT"
33
+ """Environment variable defining the project to be used for
34
+ quota and billing."""
35
+
36
+ CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS"
37
+ """Environment variable defining the location of Google application default
38
+ credentials."""
39
+
40
+ # The environment variable name which can replace ~/.config if set.
41
+ CLOUD_SDK_CONFIG_DIR = "CLOUDSDK_CONFIG"
42
+ """Environment variable defines the location of Google Cloud SDK's config
43
+ files."""
44
+
45
+ # These two variables allow for customization of the addresses used when
46
+ # contacting the GCE metadata service.
47
+ GCE_METADATA_HOST = "GCE_METADATA_HOST"
48
+ """Environment variable providing an alternate hostname or host:port to be
49
+ used for GCE metadata requests.
50
+
51
+ This environment variable was originally named GCE_METADATA_ROOT. The system will
52
+ check this environemnt variable first; should there be no value present,
53
+ the system will fall back to the old variable.
54
+ """
55
+
56
+ GCE_METADATA_ROOT = "GCE_METADATA_ROOT"
57
+ """Old environment variable for GCE_METADATA_HOST."""
58
+
59
+ GCE_METADATA_IP = "GCE_METADATA_IP"
60
+ """Environment variable providing an alternate ip:port to be used for ip-only
61
+ GCE metadata requests."""
62
+
63
+ GOOGLE_API_USE_CLIENT_CERTIFICATE = "GOOGLE_API_USE_CLIENT_CERTIFICATE"
64
+ """Environment variable controlling whether to use client certificate or not.
65
+
66
+ The default value is false. Users have to explicitly set this value to true
67
+ in order to use client certificate to establish a mutual TLS channel."""
68
+
69
+ LEGACY_APPENGINE_RUNTIME = "APPENGINE_RUNTIME"
70
+ """Gen1 environment variable defining the App Engine Runtime.
71
+
72
+ Used to distinguish between GAE gen1 and GAE gen2+.
73
+ """
74
+
75
+ # AWS environment variables used with AWS workload identity pools to retrieve
76
+ # AWS security credentials and the AWS region needed to create a serialized
77
+ # signed requests to the AWS STS GetCalledIdentity API that can be exchanged
78
+ # for a Google access tokens via the GCP STS endpoint.
79
+ # When not available the AWS metadata server is used to retrieve these values.
80
+ AWS_ACCESS_KEY_ID = "AWS_ACCESS_KEY_ID"
81
+ AWS_SECRET_ACCESS_KEY = "AWS_SECRET_ACCESS_KEY"
82
+ AWS_SESSION_TOKEN = "AWS_SESSION_TOKEN"
83
+ AWS_REGION = "AWS_REGION"
84
+ AWS_DEFAULT_REGION = "AWS_DEFAULT_REGION"
.venv/lib/python3.11/site-packages/google/auth/exceptions.py ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Exceptions used in the google.auth package."""
16
+
17
+
18
+ class GoogleAuthError(Exception):
19
+ """Base class for all google.auth errors."""
20
+
21
+ def __init__(self, *args, **kwargs):
22
+ super(GoogleAuthError, self).__init__(*args)
23
+ retryable = kwargs.get("retryable", False)
24
+ self._retryable = retryable
25
+
26
+ @property
27
+ def retryable(self):
28
+ return self._retryable
29
+
30
+
31
+ class TransportError(GoogleAuthError):
32
+ """Used to indicate an error occurred during an HTTP request."""
33
+
34
+
35
+ class RefreshError(GoogleAuthError):
36
+ """Used to indicate that an refreshing the credentials' access token
37
+ failed."""
38
+
39
+
40
+ class UserAccessTokenError(GoogleAuthError):
41
+ """Used to indicate ``gcloud auth print-access-token`` command failed."""
42
+
43
+
44
+ class DefaultCredentialsError(GoogleAuthError):
45
+ """Used to indicate that acquiring default credentials failed."""
46
+
47
+
48
+ class MutualTLSChannelError(GoogleAuthError):
49
+ """Used to indicate that mutual TLS channel creation is failed, or mutual
50
+ TLS channel credentials is missing or invalid."""
51
+
52
+
53
+ class ClientCertError(GoogleAuthError):
54
+ """Used to indicate that client certificate is missing or invalid."""
55
+
56
+ @property
57
+ def retryable(self):
58
+ return False
59
+
60
+
61
+ class OAuthError(GoogleAuthError):
62
+ """Used to indicate an error occurred during an OAuth related HTTP
63
+ request."""
64
+
65
+
66
+ class ReauthFailError(RefreshError):
67
+ """An exception for when reauth failed."""
68
+
69
+ def __init__(self, message=None, **kwargs):
70
+ super(ReauthFailError, self).__init__(
71
+ "Reauthentication failed. {0}".format(message), **kwargs
72
+ )
73
+
74
+
75
+ class ReauthSamlChallengeFailError(ReauthFailError):
76
+ """An exception for SAML reauth challenge failures."""
77
+
78
+
79
+ class MalformedError(DefaultCredentialsError, ValueError):
80
+ """An exception for malformed data."""
81
+
82
+
83
+ class InvalidResource(DefaultCredentialsError, ValueError):
84
+ """An exception for URL error."""
85
+
86
+
87
+ class InvalidOperation(DefaultCredentialsError, ValueError):
88
+ """An exception for invalid operation."""
89
+
90
+
91
+ class InvalidValue(DefaultCredentialsError, ValueError):
92
+ """Used to wrap general ValueError of python."""
93
+
94
+
95
+ class InvalidType(DefaultCredentialsError, TypeError):
96
+ """Used to wrap general TypeError of python."""
97
+
98
+
99
+ class OSError(DefaultCredentialsError, EnvironmentError):
100
+ """Used to wrap EnvironmentError(OSError after python3.3)."""
101
+
102
+
103
+ class TimeoutError(GoogleAuthError):
104
+ """Used to indicate a timeout error occurred during an HTTP request."""
105
+
106
+
107
+ class ResponseError(GoogleAuthError):
108
+ """Used to indicate an error occurred when reading an HTTP response."""
.venv/lib/python3.11/site-packages/google/auth/external_account.py ADDED
@@ -0,0 +1,628 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """External Account Credentials.
16
+
17
+ This module provides credentials that exchange workload identity pool external
18
+ credentials for Google access tokens. This facilitates accessing Google Cloud
19
+ Platform resources from on-prem and non-Google Cloud platforms (e.g. AWS,
20
+ Microsoft Azure, OIDC identity providers), using native credentials retrieved
21
+ from the current environment without the need to copy, save and manage
22
+ long-lived service account credentials.
23
+
24
+ Specifically, this is intended to use access tokens acquired using the GCP STS
25
+ token exchange endpoint following the `OAuth 2.0 Token Exchange`_ spec.
26
+
27
+ .. _OAuth 2.0 Token Exchange: https://tools.ietf.org/html/rfc8693
28
+ """
29
+
30
+ import abc
31
+ import copy
32
+ from dataclasses import dataclass
33
+ import datetime
34
+ import functools
35
+ import io
36
+ import json
37
+ import re
38
+
39
+ from google.auth import _helpers
40
+ from google.auth import credentials
41
+ from google.auth import exceptions
42
+ from google.auth import impersonated_credentials
43
+ from google.auth import metrics
44
+ from google.oauth2 import sts
45
+ from google.oauth2 import utils
46
+
47
+ # External account JSON type identifier.
48
+ _EXTERNAL_ACCOUNT_JSON_TYPE = "external_account"
49
+ # The token exchange grant_type used for exchanging credentials.
50
+ _STS_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:token-exchange"
51
+ # The token exchange requested_token_type. This is always an access_token.
52
+ _STS_REQUESTED_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:access_token"
53
+ # Cloud resource manager URL used to retrieve project information.
54
+ _CLOUD_RESOURCE_MANAGER = "https://cloudresourcemanager.googleapis.com/v1/projects/"
55
+ # Default Google sts token url.
56
+ _DEFAULT_TOKEN_URL = "https://sts.{universe_domain}/v1/token"
57
+
58
+
59
+ @dataclass
60
+ class SupplierContext:
61
+ """A context class that contains information about the requested third party credential that is passed
62
+ to AWS security credential and subject token suppliers.
63
+
64
+ Attributes:
65
+ subject_token_type (str): The requested subject token type based on the Oauth2.0 token exchange spec.
66
+ Expected values include::
67
+
68
+ “urn:ietf:params:oauth:token-type:jwt”
69
+ “urn:ietf:params:oauth:token-type:id-token”
70
+ “urn:ietf:params:oauth:token-type:saml2”
71
+ “urn:ietf:params:aws:token-type:aws4_request”
72
+
73
+ audience (str): The requested audience for the subject token.
74
+ """
75
+
76
+ subject_token_type: str
77
+ audience: str
78
+
79
+
80
+ class Credentials(
81
+ credentials.Scoped,
82
+ credentials.CredentialsWithQuotaProject,
83
+ credentials.CredentialsWithTokenUri,
84
+ metaclass=abc.ABCMeta,
85
+ ):
86
+ """Base class for all external account credentials.
87
+
88
+ This is used to instantiate Credentials for exchanging external account
89
+ credentials for Google access token and authorizing requests to Google APIs.
90
+ The base class implements the common logic for exchanging external account
91
+ credentials for Google access tokens.
92
+ """
93
+
94
+ def __init__(
95
+ self,
96
+ audience,
97
+ subject_token_type,
98
+ token_url,
99
+ credential_source,
100
+ service_account_impersonation_url=None,
101
+ service_account_impersonation_options=None,
102
+ client_id=None,
103
+ client_secret=None,
104
+ token_info_url=None,
105
+ quota_project_id=None,
106
+ scopes=None,
107
+ default_scopes=None,
108
+ workforce_pool_user_project=None,
109
+ universe_domain=credentials.DEFAULT_UNIVERSE_DOMAIN,
110
+ trust_boundary=None,
111
+ ):
112
+ """Instantiates an external account credentials object.
113
+
114
+ Args:
115
+ audience (str): The STS audience field.
116
+ subject_token_type (str): The subject token type based on the Oauth2.0 token exchange spec.
117
+ Expected values include::
118
+
119
+ “urn:ietf:params:oauth:token-type:jwt”
120
+ “urn:ietf:params:oauth:token-type:id-token”
121
+ “urn:ietf:params:oauth:token-type:saml2”
122
+ “urn:ietf:params:aws:token-type:aws4_request”
123
+
124
+ token_url (str): The STS endpoint URL.
125
+ credential_source (Mapping): The credential source dictionary.
126
+ service_account_impersonation_url (Optional[str]): The optional service account
127
+ impersonation generateAccessToken URL.
128
+ client_id (Optional[str]): The optional client ID.
129
+ client_secret (Optional[str]): The optional client secret.
130
+ token_info_url (str): The optional STS endpoint URL for token introspection.
131
+ quota_project_id (Optional[str]): The optional quota project ID.
132
+ scopes (Optional[Sequence[str]]): Optional scopes to request during the
133
+ authorization grant.
134
+ default_scopes (Optional[Sequence[str]]): Default scopes passed by a
135
+ Google client library. Use 'scopes' for user-defined scopes.
136
+ workforce_pool_user_project (Optona[str]): The optional workforce pool user
137
+ project number when the credential corresponds to a workforce pool and not
138
+ a workload identity pool. The underlying principal must still have
139
+ serviceusage.services.use IAM permission to use the project for
140
+ billing/quota.
141
+ universe_domain (str): The universe domain. The default universe
142
+ domain is googleapis.com.
143
+ trust_boundary (str): String representation of trust boundary meta.
144
+ Raises:
145
+ google.auth.exceptions.RefreshError: If the generateAccessToken
146
+ endpoint returned an error.
147
+ """
148
+ super(Credentials, self).__init__()
149
+ self._audience = audience
150
+ self._subject_token_type = subject_token_type
151
+ self._universe_domain = universe_domain
152
+ self._token_url = token_url
153
+ if self._token_url == _DEFAULT_TOKEN_URL:
154
+ self._token_url = self._token_url.replace(
155
+ "{universe_domain}", self._universe_domain
156
+ )
157
+ self._token_info_url = token_info_url
158
+ self._credential_source = credential_source
159
+ self._service_account_impersonation_url = service_account_impersonation_url
160
+ self._service_account_impersonation_options = (
161
+ service_account_impersonation_options or {}
162
+ )
163
+ self._client_id = client_id
164
+ self._client_secret = client_secret
165
+ self._quota_project_id = quota_project_id
166
+ self._scopes = scopes
167
+ self._default_scopes = default_scopes
168
+ self._workforce_pool_user_project = workforce_pool_user_project
169
+ self._trust_boundary = {
170
+ "locations": [],
171
+ "encoded_locations": "0x0",
172
+ } # expose a placeholder trust boundary value.
173
+
174
+ if self._client_id:
175
+ self._client_auth = utils.ClientAuthentication(
176
+ utils.ClientAuthType.basic, self._client_id, self._client_secret
177
+ )
178
+ else:
179
+ self._client_auth = None
180
+ self._sts_client = sts.Client(self._token_url, self._client_auth)
181
+
182
+ self._metrics_options = self._create_default_metrics_options()
183
+
184
+ self._impersonated_credentials = None
185
+ self._project_id = None
186
+ self._supplier_context = SupplierContext(
187
+ self._subject_token_type, self._audience
188
+ )
189
+ self._cred_file_path = None
190
+
191
+ if not self.is_workforce_pool and self._workforce_pool_user_project:
192
+ # Workload identity pools do not support workforce pool user projects.
193
+ raise exceptions.InvalidValue(
194
+ "workforce_pool_user_project should not be set for non-workforce pool "
195
+ "credentials"
196
+ )
197
+
198
+ @property
199
+ def info(self):
200
+ """Generates the dictionary representation of the current credentials.
201
+
202
+ Returns:
203
+ Mapping: The dictionary representation of the credentials. This is the
204
+ reverse of "from_info" defined on the subclasses of this class. It is
205
+ useful for serializing the current credentials so it can deserialized
206
+ later.
207
+ """
208
+ config_info = self._constructor_args()
209
+ config_info.update(
210
+ type=_EXTERNAL_ACCOUNT_JSON_TYPE,
211
+ service_account_impersonation=config_info.pop(
212
+ "service_account_impersonation_options", None
213
+ ),
214
+ )
215
+ config_info.pop("scopes", None)
216
+ config_info.pop("default_scopes", None)
217
+ return {key: value for key, value in config_info.items() if value is not None}
218
+
219
+ def _constructor_args(self):
220
+ args = {
221
+ "audience": self._audience,
222
+ "subject_token_type": self._subject_token_type,
223
+ "token_url": self._token_url,
224
+ "token_info_url": self._token_info_url,
225
+ "service_account_impersonation_url": self._service_account_impersonation_url,
226
+ "service_account_impersonation_options": copy.deepcopy(
227
+ self._service_account_impersonation_options
228
+ )
229
+ or None,
230
+ "credential_source": copy.deepcopy(self._credential_source),
231
+ "quota_project_id": self._quota_project_id,
232
+ "client_id": self._client_id,
233
+ "client_secret": self._client_secret,
234
+ "workforce_pool_user_project": self._workforce_pool_user_project,
235
+ "scopes": self._scopes,
236
+ "default_scopes": self._default_scopes,
237
+ "universe_domain": self._universe_domain,
238
+ }
239
+ if not self.is_workforce_pool:
240
+ args.pop("workforce_pool_user_project")
241
+ return args
242
+
243
+ @property
244
+ def service_account_email(self):
245
+ """Returns the service account email if service account impersonation is used.
246
+
247
+ Returns:
248
+ Optional[str]: The service account email if impersonation is used. Otherwise
249
+ None is returned.
250
+ """
251
+ if self._service_account_impersonation_url:
252
+ # Parse email from URL. The formal looks as follows:
253
+ # https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken
254
+ url = self._service_account_impersonation_url
255
+ start_index = url.rfind("/")
256
+ end_index = url.find(":generateAccessToken")
257
+ if start_index != -1 and end_index != -1 and start_index < end_index:
258
+ start_index = start_index + 1
259
+ return url[start_index:end_index]
260
+ return None
261
+
262
+ @property
263
+ def is_user(self):
264
+ """Returns whether the credentials represent a user (True) or workload (False).
265
+ Workloads behave similarly to service accounts. Currently workloads will use
266
+ service account impersonation but will eventually not require impersonation.
267
+ As a result, this property is more reliable than the service account email
268
+ property in determining if the credentials represent a user or workload.
269
+
270
+ Returns:
271
+ bool: True if the credentials represent a user. False if they represent a
272
+ workload.
273
+ """
274
+ # If service account impersonation is used, the credentials will always represent a
275
+ # service account.
276
+ if self._service_account_impersonation_url:
277
+ return False
278
+ return self.is_workforce_pool
279
+
280
+ @property
281
+ def is_workforce_pool(self):
282
+ """Returns whether the credentials represent a workforce pool (True) or
283
+ workload (False) based on the credentials' audience.
284
+
285
+ This will also return True for impersonated workforce pool credentials.
286
+
287
+ Returns:
288
+ bool: True if the credentials represent a workforce pool. False if they
289
+ represent a workload.
290
+ """
291
+ # Workforce pools representing users have the following audience format:
292
+ # //iam.googleapis.com/locations/$location/workforcePools/$poolId/providers/$providerId
293
+ p = re.compile(r"//iam\.googleapis\.com/locations/[^/]+/workforcePools/")
294
+ return p.match(self._audience or "") is not None
295
+
296
+ @property
297
+ def requires_scopes(self):
298
+ """Checks if the credentials requires scopes.
299
+
300
+ Returns:
301
+ bool: True if there are no scopes set otherwise False.
302
+ """
303
+ return not self._scopes and not self._default_scopes
304
+
305
+ @property
306
+ def project_number(self):
307
+ """Optional[str]: The project number corresponding to the workload identity pool."""
308
+
309
+ # STS audience pattern:
310
+ # //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/...
311
+ components = self._audience.split("/")
312
+ try:
313
+ project_index = components.index("projects")
314
+ if project_index + 1 < len(components):
315
+ return components[project_index + 1] or None
316
+ except ValueError:
317
+ return None
318
+
319
+ @property
320
+ def token_info_url(self):
321
+ """Optional[str]: The STS token introspection endpoint."""
322
+
323
+ return self._token_info_url
324
+
325
+ @_helpers.copy_docstring(credentials.Credentials)
326
+ def get_cred_info(self):
327
+ if self._cred_file_path:
328
+ cred_info_json = {
329
+ "credential_source": self._cred_file_path,
330
+ "credential_type": "external account credentials",
331
+ }
332
+ if self.service_account_email:
333
+ cred_info_json["principal"] = self.service_account_email
334
+ return cred_info_json
335
+ return None
336
+
337
+ @_helpers.copy_docstring(credentials.Scoped)
338
+ def with_scopes(self, scopes, default_scopes=None):
339
+ kwargs = self._constructor_args()
340
+ kwargs.update(scopes=scopes, default_scopes=default_scopes)
341
+ scoped = self.__class__(**kwargs)
342
+ scoped._cred_file_path = self._cred_file_path
343
+ scoped._metrics_options = self._metrics_options
344
+ return scoped
345
+
346
+ @abc.abstractmethod
347
+ def retrieve_subject_token(self, request):
348
+ """Retrieves the subject token using the credential_source object.
349
+
350
+ Args:
351
+ request (google.auth.transport.Request): A callable used to make
352
+ HTTP requests.
353
+ Returns:
354
+ str: The retrieved subject token.
355
+ """
356
+ # pylint: disable=missing-raises-doc
357
+ # (pylint doesn't recognize that this is abstract)
358
+ raise NotImplementedError("retrieve_subject_token must be implemented")
359
+
360
+ def get_project_id(self, request):
361
+ """Retrieves the project ID corresponding to the workload identity or workforce pool.
362
+ For workforce pool credentials, it returns the project ID corresponding to
363
+ the workforce_pool_user_project.
364
+
365
+ When not determinable, None is returned.
366
+
367
+ This is introduced to support the current pattern of using the Auth library:
368
+
369
+ credentials, project_id = google.auth.default()
370
+
371
+ The resource may not have permission (resourcemanager.projects.get) to
372
+ call this API or the required scopes may not be selected:
373
+ https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes
374
+
375
+ Args:
376
+ request (google.auth.transport.Request): A callable used to make
377
+ HTTP requests.
378
+ Returns:
379
+ Optional[str]: The project ID corresponding to the workload identity pool
380
+ or workforce pool if determinable.
381
+ """
382
+ if self._project_id:
383
+ # If already retrieved, return the cached project ID value.
384
+ return self._project_id
385
+ scopes = self._scopes if self._scopes is not None else self._default_scopes
386
+ # Scopes are required in order to retrieve a valid access token.
387
+ project_number = self.project_number or self._workforce_pool_user_project
388
+ if project_number and scopes:
389
+ headers = {}
390
+ url = _CLOUD_RESOURCE_MANAGER + project_number
391
+ self.before_request(request, "GET", url, headers)
392
+ response = request(url=url, method="GET", headers=headers)
393
+
394
+ response_body = (
395
+ response.data.decode("utf-8")
396
+ if hasattr(response.data, "decode")
397
+ else response.data
398
+ )
399
+ response_data = json.loads(response_body)
400
+
401
+ if response.status == 200:
402
+ # Cache result as this field is immutable.
403
+ self._project_id = response_data.get("projectId")
404
+ return self._project_id
405
+
406
+ return None
407
+
408
+ @_helpers.copy_docstring(credentials.Credentials)
409
+ def refresh(self, request):
410
+ scopes = self._scopes if self._scopes is not None else self._default_scopes
411
+
412
+ # Inject client certificate into request.
413
+ if self._mtls_required():
414
+ request = functools.partial(
415
+ request, cert=self._get_mtls_cert_and_key_paths()
416
+ )
417
+
418
+ if self._should_initialize_impersonated_credentials():
419
+ self._impersonated_credentials = self._initialize_impersonated_credentials()
420
+
421
+ if self._impersonated_credentials:
422
+ self._impersonated_credentials.refresh(request)
423
+ self.token = self._impersonated_credentials.token
424
+ self.expiry = self._impersonated_credentials.expiry
425
+ else:
426
+ now = _helpers.utcnow()
427
+ additional_options = None
428
+ # Do not pass workforce_pool_user_project when client authentication
429
+ # is used. The client ID is sufficient for determining the user project.
430
+ if self._workforce_pool_user_project and not self._client_id:
431
+ additional_options = {"userProject": self._workforce_pool_user_project}
432
+ additional_headers = {
433
+ metrics.API_CLIENT_HEADER: metrics.byoid_metrics_header(
434
+ self._metrics_options
435
+ )
436
+ }
437
+ response_data = self._sts_client.exchange_token(
438
+ request=request,
439
+ grant_type=_STS_GRANT_TYPE,
440
+ subject_token=self.retrieve_subject_token(request),
441
+ subject_token_type=self._subject_token_type,
442
+ audience=self._audience,
443
+ scopes=scopes,
444
+ requested_token_type=_STS_REQUESTED_TOKEN_TYPE,
445
+ additional_options=additional_options,
446
+ additional_headers=additional_headers,
447
+ )
448
+ self.token = response_data.get("access_token")
449
+ expires_in = response_data.get("expires_in")
450
+ # Some services do not respect the OAUTH2.0 RFC and send expires_in as a
451
+ # JSON String.
452
+ if isinstance(expires_in, str):
453
+ expires_in = int(expires_in)
454
+
455
+ lifetime = datetime.timedelta(seconds=expires_in)
456
+
457
+ self.expiry = now + lifetime
458
+
459
+ def _make_copy(self):
460
+ kwargs = self._constructor_args()
461
+ new_cred = self.__class__(**kwargs)
462
+ new_cred._cred_file_path = self._cred_file_path
463
+ new_cred._metrics_options = self._metrics_options
464
+ return new_cred
465
+
466
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
467
+ def with_quota_project(self, quota_project_id):
468
+ # Return copy of instance with the provided quota project ID.
469
+ cred = self._make_copy()
470
+ cred._quota_project_id = quota_project_id
471
+ return cred
472
+
473
+ @_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
474
+ def with_token_uri(self, token_uri):
475
+ cred = self._make_copy()
476
+ cred._token_url = token_uri
477
+ return cred
478
+
479
+ @_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
480
+ def with_universe_domain(self, universe_domain):
481
+ cred = self._make_copy()
482
+ cred._universe_domain = universe_domain
483
+ return cred
484
+
485
+ def _should_initialize_impersonated_credentials(self):
486
+ return (
487
+ self._service_account_impersonation_url is not None
488
+ and self._impersonated_credentials is None
489
+ )
490
+
491
+ def _initialize_impersonated_credentials(self):
492
+ """Generates an impersonated credentials.
493
+
494
+ For more details, see `projects.serviceAccounts.generateAccessToken`_.
495
+
496
+ .. _projects.serviceAccounts.generateAccessToken: https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateAccessToken
497
+
498
+ Returns:
499
+ impersonated_credentials.Credential: The impersonated credentials
500
+ object.
501
+
502
+ Raises:
503
+ google.auth.exceptions.RefreshError: If the generateAccessToken
504
+ endpoint returned an error.
505
+ """
506
+ # Return copy of instance with no service account impersonation.
507
+ kwargs = self._constructor_args()
508
+ kwargs.update(
509
+ service_account_impersonation_url=None,
510
+ service_account_impersonation_options={},
511
+ )
512
+ source_credentials = self.__class__(**kwargs)
513
+ source_credentials._metrics_options = self._metrics_options
514
+
515
+ # Determine target_principal.
516
+ target_principal = self.service_account_email
517
+ if not target_principal:
518
+ raise exceptions.RefreshError(
519
+ "Unable to determine target principal from service account impersonation URL."
520
+ )
521
+
522
+ scopes = self._scopes if self._scopes is not None else self._default_scopes
523
+ # Initialize and return impersonated credentials.
524
+ return impersonated_credentials.Credentials(
525
+ source_credentials=source_credentials,
526
+ target_principal=target_principal,
527
+ target_scopes=scopes,
528
+ quota_project_id=self._quota_project_id,
529
+ iam_endpoint_override=self._service_account_impersonation_url,
530
+ lifetime=self._service_account_impersonation_options.get(
531
+ "token_lifetime_seconds"
532
+ ),
533
+ )
534
+
535
+ def _create_default_metrics_options(self):
536
+ metrics_options = {}
537
+ if self._service_account_impersonation_url:
538
+ metrics_options["sa-impersonation"] = "true"
539
+ else:
540
+ metrics_options["sa-impersonation"] = "false"
541
+ if self._service_account_impersonation_options.get("token_lifetime_seconds"):
542
+ metrics_options["config-lifetime"] = "true"
543
+ else:
544
+ metrics_options["config-lifetime"] = "false"
545
+
546
+ return metrics_options
547
+
548
+ def _mtls_required(self):
549
+ """Returns a boolean representing whether the current credential is configured
550
+ for mTLS and should add a certificate to the outgoing calls to the sts and service
551
+ account impersonation endpoint.
552
+
553
+ Returns:
554
+ bool: True if the credential is configured for mTLS, False if it is not.
555
+ """
556
+ return False
557
+
558
+ def _get_mtls_cert_and_key_paths(self):
559
+ """Gets the file locations for a certificate and private key file
560
+ to be used for configuring mTLS for the sts and service account
561
+ impersonation calls. Currently only expected to return a value when using
562
+ X509 workload identity federation.
563
+
564
+ Returns:
565
+ Tuple[str, str]: The cert and key file locations as strings in a tuple.
566
+
567
+ Raises:
568
+ NotImplementedError: When the current credential is not configured for
569
+ mTLS.
570
+ """
571
+ raise NotImplementedError(
572
+ "_get_mtls_cert_and_key_location must be implemented."
573
+ )
574
+
575
+ @classmethod
576
+ def from_info(cls, info, **kwargs):
577
+ """Creates a Credentials instance from parsed external account info.
578
+
579
+ Args:
580
+ info (Mapping[str, str]): The external account info in Google
581
+ format.
582
+ kwargs: Additional arguments to pass to the constructor.
583
+
584
+ Returns:
585
+ google.auth.identity_pool.Credentials: The constructed
586
+ credentials.
587
+
588
+ Raises:
589
+ InvalidValue: For invalid parameters.
590
+ """
591
+ return cls(
592
+ audience=info.get("audience"),
593
+ subject_token_type=info.get("subject_token_type"),
594
+ token_url=info.get("token_url"),
595
+ token_info_url=info.get("token_info_url"),
596
+ service_account_impersonation_url=info.get(
597
+ "service_account_impersonation_url"
598
+ ),
599
+ service_account_impersonation_options=info.get(
600
+ "service_account_impersonation"
601
+ )
602
+ or {},
603
+ client_id=info.get("client_id"),
604
+ client_secret=info.get("client_secret"),
605
+ credential_source=info.get("credential_source"),
606
+ quota_project_id=info.get("quota_project_id"),
607
+ workforce_pool_user_project=info.get("workforce_pool_user_project"),
608
+ universe_domain=info.get(
609
+ "universe_domain", credentials.DEFAULT_UNIVERSE_DOMAIN
610
+ ),
611
+ **kwargs
612
+ )
613
+
614
+ @classmethod
615
+ def from_file(cls, filename, **kwargs):
616
+ """Creates a Credentials instance from an external account json file.
617
+
618
+ Args:
619
+ filename (str): The path to the external account json file.
620
+ kwargs: Additional arguments to pass to the constructor.
621
+
622
+ Returns:
623
+ google.auth.identity_pool.Credentials: The constructed
624
+ credentials.
625
+ """
626
+ with io.open(filename, "r", encoding="utf-8") as json_file:
627
+ data = json.load(json_file)
628
+ return cls.from_info(data, **kwargs)
.venv/lib/python3.11/site-packages/google/auth/iam.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2017 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Tools for using the Google `Cloud Identity and Access Management (IAM)
16
+ API`_'s auth-related functionality.
17
+
18
+ .. _Cloud Identity and Access Management (IAM) API:
19
+ https://cloud.google.com/iam/docs/
20
+ """
21
+
22
+ import base64
23
+ import http.client as http_client
24
+ import json
25
+
26
+ from google.auth import _exponential_backoff
27
+ from google.auth import _helpers
28
+ from google.auth import credentials
29
+ from google.auth import crypt
30
+ from google.auth import exceptions
31
+
32
+ IAM_RETRY_CODES = {
33
+ http_client.INTERNAL_SERVER_ERROR,
34
+ http_client.BAD_GATEWAY,
35
+ http_client.SERVICE_UNAVAILABLE,
36
+ http_client.GATEWAY_TIMEOUT,
37
+ }
38
+
39
+ _IAM_SCOPE = ["https://www.googleapis.com/auth/iam"]
40
+
41
+ _IAM_ENDPOINT = (
42
+ "https://iamcredentials.googleapis.com/v1/projects/-"
43
+ + "/serviceAccounts/{}:generateAccessToken"
44
+ )
45
+
46
+ _IAM_SIGN_ENDPOINT = (
47
+ "https://iamcredentials.googleapis.com/v1/projects/-"
48
+ + "/serviceAccounts/{}:signBlob"
49
+ )
50
+
51
+ _IAM_SIGNJWT_ENDPOINT = (
52
+ "https://iamcredentials.googleapis.com/v1/projects/-"
53
+ + "/serviceAccounts/{}:signJwt"
54
+ )
55
+
56
+ _IAM_IDTOKEN_ENDPOINT = (
57
+ "https://iamcredentials.googleapis.com/v1/"
58
+ + "projects/-/serviceAccounts/{}:generateIdToken"
59
+ )
60
+
61
+
62
+ class Signer(crypt.Signer):
63
+ """Signs messages using the IAM `signBlob API`_.
64
+
65
+ This is useful when you need to sign bytes but do not have access to the
66
+ credential's private key file.
67
+
68
+ .. _signBlob API:
69
+ https://cloud.google.com/iam/reference/rest/v1/projects.serviceAccounts
70
+ /signBlob
71
+ """
72
+
73
+ def __init__(self, request, credentials, service_account_email):
74
+ """
75
+ Args:
76
+ request (google.auth.transport.Request): The object used to make
77
+ HTTP requests.
78
+ credentials (google.auth.credentials.Credentials): The credentials
79
+ that will be used to authenticate the request to the IAM API.
80
+ The credentials must have of one the following scopes:
81
+
82
+ - https://www.googleapis.com/auth/iam
83
+ - https://www.googleapis.com/auth/cloud-platform
84
+ service_account_email (str): The service account email identifying
85
+ which service account to use to sign bytes. Often, this can
86
+ be the same as the service account email in the given
87
+ credentials.
88
+ """
89
+ self._request = request
90
+ self._credentials = credentials
91
+ self._service_account_email = service_account_email
92
+
93
+ def _make_signing_request(self, message):
94
+ """Makes a request to the API signBlob API."""
95
+ message = _helpers.to_bytes(message)
96
+
97
+ method = "POST"
98
+ url = _IAM_SIGN_ENDPOINT.replace(
99
+ credentials.DEFAULT_UNIVERSE_DOMAIN, self._credentials.universe_domain
100
+ ).format(self._service_account_email)
101
+ headers = {"Content-Type": "application/json"}
102
+ body = json.dumps(
103
+ {"payload": base64.b64encode(message).decode("utf-8")}
104
+ ).encode("utf-8")
105
+
106
+ retries = _exponential_backoff.ExponentialBackoff()
107
+ for _ in retries:
108
+ self._credentials.before_request(self._request, method, url, headers)
109
+
110
+ response = self._request(url=url, method=method, body=body, headers=headers)
111
+
112
+ if response.status in IAM_RETRY_CODES:
113
+ continue
114
+
115
+ if response.status != http_client.OK:
116
+ raise exceptions.TransportError(
117
+ "Error calling the IAM signBlob API: {}".format(response.data)
118
+ )
119
+
120
+ return json.loads(response.data.decode("utf-8"))
121
+ raise exceptions.TransportError("exhausted signBlob endpoint retries")
122
+
123
+ @property
124
+ def key_id(self):
125
+ """Optional[str]: The key ID used to identify this private key.
126
+
127
+ .. warning::
128
+ This is always ``None``. The key ID used by IAM can not
129
+ be reliably determined ahead of time.
130
+ """
131
+ return None
132
+
133
+ @_helpers.copy_docstring(crypt.Signer)
134
+ def sign(self, message):
135
+ response = self._make_signing_request(message)
136
+ return base64.b64decode(response["signedBlob"])
.venv/lib/python3.11/site-packages/google/auth/identity_pool.py ADDED
@@ -0,0 +1,439 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Identity Pool Credentials.
16
+
17
+ This module provides credentials to access Google Cloud resources from on-prem
18
+ or non-Google Cloud platforms which support external credentials (e.g. OIDC ID
19
+ tokens) retrieved from local file locations or local servers. This includes
20
+ Microsoft Azure and OIDC identity providers (e.g. K8s workloads registered with
21
+ Hub with Hub workload identity enabled).
22
+
23
+ These credentials are recommended over the use of service account credentials
24
+ in on-prem/non-Google Cloud platforms as they do not involve the management of
25
+ long-live service account private keys.
26
+
27
+ Identity Pool Credentials are initialized using external_account
28
+ arguments which are typically loaded from an external credentials file or
29
+ an external credentials URL.
30
+
31
+ This module also provides a definition for an abstract subject token supplier.
32
+ This supplier can be implemented to return a valid OIDC or SAML2.0 subject token
33
+ and used to create Identity Pool credentials. The credentials will then call the
34
+ supplier instead of using pre-defined methods such as reading a local file or
35
+ calling a URL.
36
+ """
37
+
38
+ try:
39
+ from collections.abc import Mapping
40
+ # Python 2.7 compatibility
41
+ except ImportError: # pragma: NO COVER
42
+ from collections import Mapping # type: ignore
43
+ import abc
44
+ import json
45
+ import os
46
+ from typing import NamedTuple
47
+
48
+ from google.auth import _helpers
49
+ from google.auth import exceptions
50
+ from google.auth import external_account
51
+ from google.auth.transport import _mtls_helper
52
+
53
+
54
+ class SubjectTokenSupplier(metaclass=abc.ABCMeta):
55
+ """Base class for subject token suppliers. This can be implemented with custom logic to retrieve
56
+ a subject token to exchange for a Google Cloud access token when using Workload or
57
+ Workforce Identity Federation. The identity pool credential does not cache the subject token,
58
+ so caching logic should be added in the implementation.
59
+ """
60
+
61
+ @abc.abstractmethod
62
+ def get_subject_token(self, context, request):
63
+ """Returns the requested subject token. The subject token must be valid.
64
+
65
+ .. warning: This is not cached by the calling Google credential, so caching logic should be implemented in the supplier.
66
+
67
+ Args:
68
+ context (google.auth.externalaccount.SupplierContext): The context object
69
+ containing information about the requested audience and subject token type.
70
+ request (google.auth.transport.Request): The object used to make
71
+ HTTP requests.
72
+
73
+ Raises:
74
+ google.auth.exceptions.RefreshError: If an error is encountered during
75
+ subject token retrieval logic.
76
+
77
+ Returns:
78
+ str: The requested subject token string.
79
+ """
80
+ raise NotImplementedError("")
81
+
82
+
83
+ class _TokenContent(NamedTuple):
84
+ """Models the token content response from file and url internal suppliers.
85
+ Attributes:
86
+ content (str): The string content of the file or URL response.
87
+ location (str): The location the content was retrieved from. This will either be a file location or a URL.
88
+ """
89
+
90
+ content: str
91
+ location: str
92
+
93
+
94
+ class _FileSupplier(SubjectTokenSupplier):
95
+ """ Internal implementation of subject token supplier which supports reading a subject token from a file."""
96
+
97
+ def __init__(self, path, format_type, subject_token_field_name):
98
+ self._path = path
99
+ self._format_type = format_type
100
+ self._subject_token_field_name = subject_token_field_name
101
+
102
+ @_helpers.copy_docstring(SubjectTokenSupplier)
103
+ def get_subject_token(self, context, request):
104
+ if not os.path.exists(self._path):
105
+ raise exceptions.RefreshError("File '{}' was not found.".format(self._path))
106
+
107
+ with open(self._path, "r", encoding="utf-8") as file_obj:
108
+ token_content = _TokenContent(file_obj.read(), self._path)
109
+
110
+ return _parse_token_data(
111
+ token_content, self._format_type, self._subject_token_field_name
112
+ )
113
+
114
+
115
+ class _UrlSupplier(SubjectTokenSupplier):
116
+ """ Internal implementation of subject token supplier which supports retrieving a subject token by calling a URL endpoint."""
117
+
118
+ def __init__(self, url, format_type, subject_token_field_name, headers):
119
+ self._url = url
120
+ self._format_type = format_type
121
+ self._subject_token_field_name = subject_token_field_name
122
+ self._headers = headers
123
+
124
+ @_helpers.copy_docstring(SubjectTokenSupplier)
125
+ def get_subject_token(self, context, request):
126
+ response = request(url=self._url, method="GET", headers=self._headers)
127
+
128
+ # support both string and bytes type response.data
129
+ response_body = (
130
+ response.data.decode("utf-8")
131
+ if hasattr(response.data, "decode")
132
+ else response.data
133
+ )
134
+
135
+ if response.status != 200:
136
+ raise exceptions.RefreshError(
137
+ "Unable to retrieve Identity Pool subject token", response_body
138
+ )
139
+ token_content = _TokenContent(response_body, self._url)
140
+ return _parse_token_data(
141
+ token_content, self._format_type, self._subject_token_field_name
142
+ )
143
+
144
+
145
+ class _X509Supplier(SubjectTokenSupplier):
146
+ """Internal supplier for X509 workload credentials. This class is used internally and always returns an empty string as the subject token."""
147
+
148
+ @_helpers.copy_docstring(SubjectTokenSupplier)
149
+ def get_subject_token(self, context, request):
150
+ return ""
151
+
152
+
153
+ def _parse_token_data(token_content, format_type="text", subject_token_field_name=None):
154
+ if format_type == "text":
155
+ token = token_content.content
156
+ else:
157
+ try:
158
+ # Parse file content as JSON.
159
+ response_data = json.loads(token_content.content)
160
+ # Get the subject_token.
161
+ token = response_data[subject_token_field_name]
162
+ except (KeyError, ValueError):
163
+ raise exceptions.RefreshError(
164
+ "Unable to parse subject_token from JSON file '{}' using key '{}'".format(
165
+ token_content.location, subject_token_field_name
166
+ )
167
+ )
168
+ if not token:
169
+ raise exceptions.RefreshError(
170
+ "Missing subject_token in the credential_source file"
171
+ )
172
+ return token
173
+
174
+
175
+ class Credentials(external_account.Credentials):
176
+ """External account credentials sourced from files and URLs."""
177
+
178
+ def __init__(
179
+ self,
180
+ audience,
181
+ subject_token_type,
182
+ token_url=external_account._DEFAULT_TOKEN_URL,
183
+ credential_source=None,
184
+ subject_token_supplier=None,
185
+ *args,
186
+ **kwargs
187
+ ):
188
+ """Instantiates an external account credentials object from a file/URL.
189
+
190
+ Args:
191
+ audience (str): The STS audience field.
192
+ subject_token_type (str): The subject token type based on the Oauth2.0 token exchange spec.
193
+ Expected values include::
194
+
195
+ “urn:ietf:params:oauth:token-type:jwt”
196
+ “urn:ietf:params:oauth:token-type:id-token”
197
+ “urn:ietf:params:oauth:token-type:saml2”
198
+
199
+ token_url (Optional [str]): The STS endpoint URL. If not provided, will default to "https://sts.googleapis.com/v1/token".
200
+ credential_source (Optional [Mapping]): The credential source dictionary used to
201
+ provide instructions on how to retrieve external credential to be
202
+ exchanged for Google access tokens. Either a credential source or
203
+ a subject token supplier must be provided.
204
+
205
+ Example credential_source for url-sourced credential::
206
+
207
+ {
208
+ "url": "http://www.example.com",
209
+ "format": {
210
+ "type": "json",
211
+ "subject_token_field_name": "access_token",
212
+ },
213
+ "headers": {"foo": "bar"},
214
+ }
215
+
216
+ Example credential_source for file-sourced credential::
217
+
218
+ {
219
+ "file": "/path/to/token/file.txt"
220
+ }
221
+ subject_token_supplier (Optional [SubjectTokenSupplier]): Optional subject token supplier.
222
+ This will be called to supply a valid subject token which will then
223
+ be exchanged for Google access tokens. Either a subject token supplier
224
+ or a credential source must be provided.
225
+ args (List): Optional positional arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
226
+ kwargs (Mapping): Optional keyword arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
227
+
228
+ Raises:
229
+ google.auth.exceptions.RefreshError: If an error is encountered during
230
+ access token retrieval logic.
231
+ ValueError: For invalid parameters.
232
+
233
+ .. note:: Typically one of the helper constructors
234
+ :meth:`from_file` or
235
+ :meth:`from_info` are used instead of calling the constructor directly.
236
+ """
237
+
238
+ super(Credentials, self).__init__(
239
+ audience=audience,
240
+ subject_token_type=subject_token_type,
241
+ token_url=token_url,
242
+ credential_source=credential_source,
243
+ *args,
244
+ **kwargs
245
+ )
246
+ if credential_source is None and subject_token_supplier is None:
247
+ raise exceptions.InvalidValue(
248
+ "A valid credential source or a subject token supplier must be provided."
249
+ )
250
+ if credential_source is not None and subject_token_supplier is not None:
251
+ raise exceptions.InvalidValue(
252
+ "Identity pool credential cannot have both a credential source and a subject token supplier."
253
+ )
254
+
255
+ if subject_token_supplier is not None:
256
+ self._subject_token_supplier = subject_token_supplier
257
+ self._credential_source_file = None
258
+ self._credential_source_url = None
259
+ self._credential_source_certificate = None
260
+ else:
261
+ if not isinstance(credential_source, Mapping):
262
+ self._credential_source_executable = None
263
+ raise exceptions.MalformedError(
264
+ "Invalid credential_source. The credential_source is not a dict."
265
+ )
266
+ self._credential_source_file = credential_source.get("file")
267
+ self._credential_source_url = credential_source.get("url")
268
+ self._credential_source_certificate = credential_source.get("certificate")
269
+
270
+ # environment_id is only supported in AWS or dedicated future external
271
+ # account credentials.
272
+ if "environment_id" in credential_source:
273
+ raise exceptions.MalformedError(
274
+ "Invalid Identity Pool credential_source field 'environment_id'"
275
+ )
276
+
277
+ # check that only one of file, url, or certificate are provided.
278
+ self._validate_single_source()
279
+
280
+ if self._credential_source_certificate:
281
+ self._validate_certificate_config()
282
+ else:
283
+ self._validate_file_or_url_config(credential_source)
284
+
285
+ if self._credential_source_file:
286
+ self._subject_token_supplier = _FileSupplier(
287
+ self._credential_source_file,
288
+ self._credential_source_format_type,
289
+ self._credential_source_field_name,
290
+ )
291
+ elif self._credential_source_url:
292
+ self._subject_token_supplier = _UrlSupplier(
293
+ self._credential_source_url,
294
+ self._credential_source_format_type,
295
+ self._credential_source_field_name,
296
+ self._credential_source_headers,
297
+ )
298
+ else: # self._credential_source_certificate
299
+ self._subject_token_supplier = _X509Supplier()
300
+
301
+ @_helpers.copy_docstring(external_account.Credentials)
302
+ def retrieve_subject_token(self, request):
303
+ return self._subject_token_supplier.get_subject_token(
304
+ self._supplier_context, request
305
+ )
306
+
307
+ def _get_mtls_cert_and_key_paths(self):
308
+ if self._credential_source_certificate is None:
309
+ raise exceptions.RefreshError(
310
+ 'The credential is not configured to use mtls requests. The credential should include a "certificate" section in the credential source.'
311
+ )
312
+ else:
313
+ return _mtls_helper._get_workload_cert_and_key_paths(
314
+ self._certificate_config_location
315
+ )
316
+
317
+ def _mtls_required(self):
318
+ return self._credential_source_certificate is not None
319
+
320
+ def _create_default_metrics_options(self):
321
+ metrics_options = super(Credentials, self)._create_default_metrics_options()
322
+ # Check that credential source is a dict before checking for credential type. This check needs to be done
323
+ # here because the external_account credential constructor needs to pass the metrics options to the
324
+ # impersonated credential object before the identity_pool credentials are validated.
325
+ if isinstance(self._credential_source, Mapping):
326
+ if self._credential_source.get("file"):
327
+ metrics_options["source"] = "file"
328
+ elif self._credential_source.get("url"):
329
+ metrics_options["source"] = "url"
330
+ else:
331
+ metrics_options["source"] = "x509"
332
+ else:
333
+ metrics_options["source"] = "programmatic"
334
+ return metrics_options
335
+
336
+ def _has_custom_supplier(self):
337
+ return self._credential_source is None
338
+
339
+ def _constructor_args(self):
340
+ args = super(Credentials, self)._constructor_args()
341
+ # If a custom supplier was used, append it to the args dict.
342
+ if self._has_custom_supplier():
343
+ args.update({"subject_token_supplier": self._subject_token_supplier})
344
+ return args
345
+
346
+ def _validate_certificate_config(self):
347
+ self._certificate_config_location = self._credential_source_certificate.get(
348
+ "certificate_config_location"
349
+ )
350
+ use_default = self._credential_source_certificate.get(
351
+ "use_default_certificate_config"
352
+ )
353
+ if self._certificate_config_location and use_default:
354
+ raise exceptions.MalformedError(
355
+ "Invalid certificate configuration, certificate_config_location cannot be specified when use_default_certificate_config = true."
356
+ )
357
+ if not self._certificate_config_location and not use_default:
358
+ raise exceptions.MalformedError(
359
+ "Invalid certificate configuration, use_default_certificate_config should be true if no certificate_config_location is provided."
360
+ )
361
+
362
+ def _validate_file_or_url_config(self, credential_source):
363
+ self._credential_source_headers = credential_source.get("headers")
364
+ credential_source_format = credential_source.get("format", {})
365
+ # Get credential_source format type. When not provided, this
366
+ # defaults to text.
367
+ self._credential_source_format_type = (
368
+ credential_source_format.get("type") or "text"
369
+ )
370
+ if self._credential_source_format_type not in ["text", "json"]:
371
+ raise exceptions.MalformedError(
372
+ "Invalid credential_source format '{}'".format(
373
+ self._credential_source_format_type
374
+ )
375
+ )
376
+ # For JSON types, get the required subject_token field name.
377
+ if self._credential_source_format_type == "json":
378
+ self._credential_source_field_name = credential_source_format.get(
379
+ "subject_token_field_name"
380
+ )
381
+ if self._credential_source_field_name is None:
382
+ raise exceptions.MalformedError(
383
+ "Missing subject_token_field_name for JSON credential_source format"
384
+ )
385
+ else:
386
+ self._credential_source_field_name = None
387
+
388
+ def _validate_single_source(self):
389
+ credential_sources = [
390
+ self._credential_source_file,
391
+ self._credential_source_url,
392
+ self._credential_source_certificate,
393
+ ]
394
+ valid_credential_sources = list(
395
+ filter(lambda source: source is not None, credential_sources)
396
+ )
397
+
398
+ if len(valid_credential_sources) > 1:
399
+ raise exceptions.MalformedError(
400
+ "Ambiguous credential_source. 'file', 'url', and 'certificate' are mutually exclusive.."
401
+ )
402
+ if len(valid_credential_sources) != 1:
403
+ raise exceptions.MalformedError(
404
+ "Missing credential_source. A 'file', 'url', or 'certificate' must be provided."
405
+ )
406
+
407
+ @classmethod
408
+ def from_info(cls, info, **kwargs):
409
+ """Creates an Identity Pool Credentials instance from parsed external account info.
410
+
411
+ Args:
412
+ info (Mapping[str, str]): The Identity Pool external account info in Google
413
+ format.
414
+ kwargs: Additional arguments to pass to the constructor.
415
+
416
+ Returns:
417
+ google.auth.identity_pool.Credentials: The constructed
418
+ credentials.
419
+
420
+ Raises:
421
+ ValueError: For invalid parameters.
422
+ """
423
+ subject_token_supplier = info.get("subject_token_supplier")
424
+ kwargs.update({"subject_token_supplier": subject_token_supplier})
425
+ return super(Credentials, cls).from_info(info, **kwargs)
426
+
427
+ @classmethod
428
+ def from_file(cls, filename, **kwargs):
429
+ """Creates an IdentityPool Credentials instance from an external account json file.
430
+
431
+ Args:
432
+ filename (str): The path to the IdentityPool external account json file.
433
+ kwargs: Additional arguments to pass to the constructor.
434
+
435
+ Returns:
436
+ google.auth.identity_pool.Credentials: The constructed
437
+ credentials.
438
+ """
439
+ return super(Credentials, cls).from_file(filename, **kwargs)
.venv/lib/python3.11/site-packages/google/auth/impersonated_credentials.py ADDED
@@ -0,0 +1,579 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2018 Google Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Google Cloud Impersonated credentials.
16
+
17
+ This module provides authentication for applications where local credentials
18
+ impersonates a remote service account using `IAM Credentials API`_.
19
+
20
+ This class can be used to impersonate a service account as long as the original
21
+ Credential object has the "Service Account Token Creator" role on the target
22
+ service account.
23
+
24
+ .. _IAM Credentials API:
25
+ https://cloud.google.com/iam/credentials/reference/rest/
26
+ """
27
+
28
+ import base64
29
+ import copy
30
+ from datetime import datetime
31
+ import http.client as http_client
32
+ import json
33
+
34
+ from google.auth import _exponential_backoff
35
+ from google.auth import _helpers
36
+ from google.auth import credentials
37
+ from google.auth import exceptions
38
+ from google.auth import iam
39
+ from google.auth import jwt
40
+ from google.auth import metrics
41
+ from google.oauth2 import _client
42
+
43
+
44
+ _REFRESH_ERROR = "Unable to acquire impersonated credentials"
45
+
46
+ _DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
47
+
48
+ _GOOGLE_OAUTH2_TOKEN_ENDPOINT = "https://oauth2.googleapis.com/token"
49
+
50
+
51
+ def _make_iam_token_request(
52
+ request,
53
+ principal,
54
+ headers,
55
+ body,
56
+ universe_domain=credentials.DEFAULT_UNIVERSE_DOMAIN,
57
+ iam_endpoint_override=None,
58
+ ):
59
+ """Makes a request to the Google Cloud IAM service for an access token.
60
+ Args:
61
+ request (Request): The Request object to use.
62
+ principal (str): The principal to request an access token for.
63
+ headers (Mapping[str, str]): Map of headers to transmit.
64
+ body (Mapping[str, str]): JSON Payload body for the iamcredentials
65
+ API call.
66
+ iam_endpoint_override (Optiona[str]): The full IAM endpoint override
67
+ with the target_principal embedded. This is useful when supporting
68
+ impersonation with regional endpoints.
69
+
70
+ Raises:
71
+ google.auth.exceptions.TransportError: Raised if there is an underlying
72
+ HTTP connection error
73
+ google.auth.exceptions.RefreshError: Raised if the impersonated
74
+ credentials are not available. Common reasons are
75
+ `iamcredentials.googleapis.com` is not enabled or the
76
+ `Service Account Token Creator` is not assigned
77
+ """
78
+ iam_endpoint = iam_endpoint_override or iam._IAM_ENDPOINT.replace(
79
+ credentials.DEFAULT_UNIVERSE_DOMAIN, universe_domain
80
+ ).format(principal)
81
+
82
+ body = json.dumps(body).encode("utf-8")
83
+
84
+ response = request(url=iam_endpoint, method="POST", headers=headers, body=body)
85
+
86
+ # support both string and bytes type response.data
87
+ response_body = (
88
+ response.data.decode("utf-8")
89
+ if hasattr(response.data, "decode")
90
+ else response.data
91
+ )
92
+
93
+ if response.status != http_client.OK:
94
+ raise exceptions.RefreshError(_REFRESH_ERROR, response_body)
95
+
96
+ try:
97
+ token_response = json.loads(response_body)
98
+ token = token_response["accessToken"]
99
+ expiry = datetime.strptime(token_response["expireTime"], "%Y-%m-%dT%H:%M:%SZ")
100
+
101
+ return token, expiry
102
+
103
+ except (KeyError, ValueError) as caught_exc:
104
+ new_exc = exceptions.RefreshError(
105
+ "{}: No access token or invalid expiration in response.".format(
106
+ _REFRESH_ERROR
107
+ ),
108
+ response_body,
109
+ )
110
+ raise new_exc from caught_exc
111
+
112
+
113
+ class Credentials(
114
+ credentials.Scoped, credentials.CredentialsWithQuotaProject, credentials.Signing
115
+ ):
116
+ """This module defines impersonated credentials which are essentially
117
+ impersonated identities.
118
+
119
+ Impersonated Credentials allows credentials issued to a user or
120
+ service account to impersonate another. The target service account must
121
+ grant the originating credential principal the
122
+ `Service Account Token Creator`_ IAM role:
123
+
124
+ For more information about Token Creator IAM role and
125
+ IAMCredentials API, see
126
+ `Creating Short-Lived Service Account Credentials`_.
127
+
128
+ .. _Service Account Token Creator:
129
+ https://cloud.google.com/iam/docs/service-accounts#the_service_account_token_creator_role
130
+
131
+ .. _Creating Short-Lived Service Account Credentials:
132
+ https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials
133
+
134
+ Usage:
135
+
136
+ First grant source_credentials the `Service Account Token Creator`
137
+ role on the target account to impersonate. In this example, the
138
+ service account represented by svc_account.json has the
139
+ token creator role on
140
+ `impersonated-account@_project_.iam.gserviceaccount.com`.
141
+
142
+ Enable the IAMCredentials API on the source project:
143
+ `gcloud services enable iamcredentials.googleapis.com`.
144
+
145
+ Initialize a source credential which does not have access to
146
+ list bucket::
147
+
148
+ from google.oauth2 import service_account
149
+
150
+ target_scopes = [
151
+ 'https://www.googleapis.com/auth/devstorage.read_only']
152
+
153
+ source_credentials = (
154
+ service_account.Credentials.from_service_account_file(
155
+ '/path/to/svc_account.json',
156
+ scopes=target_scopes))
157
+
158
+ Now use the source credentials to acquire credentials to impersonate
159
+ another service account::
160
+
161
+ from google.auth import impersonated_credentials
162
+
163
+ target_credentials = impersonated_credentials.Credentials(
164
+ source_credentials=source_credentials,
165
+ target_principal='impersonated-account@_project_.iam.gserviceaccount.com',
166
+ target_scopes = target_scopes,
167
+ lifetime=500)
168
+
169
+ Resource access is granted::
170
+
171
+ client = storage.Client(credentials=target_credentials)
172
+ buckets = client.list_buckets(project='your_project')
173
+ for bucket in buckets:
174
+ print(bucket.name)
175
+ """
176
+
177
+ def __init__(
178
+ self,
179
+ source_credentials,
180
+ target_principal,
181
+ target_scopes,
182
+ delegates=None,
183
+ subject=None,
184
+ lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
185
+ quota_project_id=None,
186
+ iam_endpoint_override=None,
187
+ ):
188
+ """
189
+ Args:
190
+ source_credentials (google.auth.Credentials): The source credential
191
+ used as to acquire the impersonated credentials.
192
+ target_principal (str): The service account to impersonate.
193
+ target_scopes (Sequence[str]): Scopes to request during the
194
+ authorization grant.
195
+ delegates (Sequence[str]): The chained list of delegates required
196
+ to grant the final access_token. If set, the sequence of
197
+ identities must have "Service Account Token Creator" capability
198
+ granted to the prceeding identity. For example, if set to
199
+ [serviceAccountB, serviceAccountC], the source_credential
200
+ must have the Token Creator role on serviceAccountB.
201
+ serviceAccountB must have the Token Creator on
202
+ serviceAccountC.
203
+ Finally, C must have Token Creator on target_principal.
204
+ If left unset, source_credential must have that role on
205
+ target_principal.
206
+ lifetime (int): Number of seconds the delegated credential should
207
+ be valid for (upto 3600).
208
+ quota_project_id (Optional[str]): The project ID used for quota and billing.
209
+ This project may be different from the project used to
210
+ create the credentials.
211
+ iam_endpoint_override (Optional[str]): The full IAM endpoint override
212
+ with the target_principal embedded. This is useful when supporting
213
+ impersonation with regional endpoints.
214
+ subject (Optional[str]): sub field of a JWT. This field should only be set
215
+ if you wish to impersonate as a user. This feature is useful when
216
+ using domain wide delegation.
217
+ """
218
+
219
+ super(Credentials, self).__init__()
220
+
221
+ self._source_credentials = copy.copy(source_credentials)
222
+ # Service account source credentials must have the _IAM_SCOPE
223
+ # added to refresh correctly. User credentials cannot have
224
+ # their original scopes modified.
225
+ if isinstance(self._source_credentials, credentials.Scoped):
226
+ self._source_credentials = self._source_credentials.with_scopes(
227
+ iam._IAM_SCOPE
228
+ )
229
+ # If the source credential is service account and self signed jwt
230
+ # is needed, we need to create a jwt credential inside it
231
+ if (
232
+ hasattr(self._source_credentials, "_create_self_signed_jwt")
233
+ and self._source_credentials._always_use_jwt_access
234
+ ):
235
+ self._source_credentials._create_self_signed_jwt(None)
236
+
237
+ self._universe_domain = source_credentials.universe_domain
238
+ self._target_principal = target_principal
239
+ self._target_scopes = target_scopes
240
+ self._delegates = delegates
241
+ self._subject = subject
242
+ self._lifetime = lifetime or _DEFAULT_TOKEN_LIFETIME_SECS
243
+ self.token = None
244
+ self.expiry = _helpers.utcnow()
245
+ self._quota_project_id = quota_project_id
246
+ self._iam_endpoint_override = iam_endpoint_override
247
+ self._cred_file_path = None
248
+
249
+ def _metric_header_for_usage(self):
250
+ return metrics.CRED_TYPE_SA_IMPERSONATE
251
+
252
+ @_helpers.copy_docstring(credentials.Credentials)
253
+ def refresh(self, request):
254
+ self._update_token(request)
255
+
256
+ def _update_token(self, request):
257
+ """Updates credentials with a new access_token representing
258
+ the impersonated account.
259
+
260
+ Args:
261
+ request (google.auth.transport.requests.Request): Request object
262
+ to use for refreshing credentials.
263
+ """
264
+
265
+ # Refresh our source credentials if it is not valid.
266
+ if (
267
+ self._source_credentials.token_state == credentials.TokenState.STALE
268
+ or self._source_credentials.token_state == credentials.TokenState.INVALID
269
+ ):
270
+ self._source_credentials.refresh(request)
271
+
272
+ body = {
273
+ "delegates": self._delegates,
274
+ "scope": self._target_scopes,
275
+ "lifetime": str(self._lifetime) + "s",
276
+ }
277
+
278
+ headers = {
279
+ "Content-Type": "application/json",
280
+ metrics.API_CLIENT_HEADER: metrics.token_request_access_token_impersonate(),
281
+ }
282
+
283
+ # Apply the source credentials authentication info.
284
+ self._source_credentials.apply(headers)
285
+
286
+ # If a subject is specified a domain-wide delegation auth-flow is initiated
287
+ # to impersonate as the provided subject (user).
288
+ if self._subject:
289
+ if self.universe_domain != credentials.DEFAULT_UNIVERSE_DOMAIN:
290
+ raise exceptions.GoogleAuthError(
291
+ "Domain-wide delegation is not supported in universes other "
292
+ + "than googleapis.com"
293
+ )
294
+
295
+ now = _helpers.utcnow()
296
+ payload = {
297
+ "iss": self._target_principal,
298
+ "scope": _helpers.scopes_to_string(self._target_scopes or ()),
299
+ "sub": self._subject,
300
+ "aud": _GOOGLE_OAUTH2_TOKEN_ENDPOINT,
301
+ "iat": _helpers.datetime_to_secs(now),
302
+ "exp": _helpers.datetime_to_secs(now) + _DEFAULT_TOKEN_LIFETIME_SECS,
303
+ }
304
+
305
+ assertion = _sign_jwt_request(
306
+ request=request,
307
+ principal=self._target_principal,
308
+ headers=headers,
309
+ payload=payload,
310
+ delegates=self._delegates,
311
+ )
312
+
313
+ self.token, self.expiry, _ = _client.jwt_grant(
314
+ request, _GOOGLE_OAUTH2_TOKEN_ENDPOINT, assertion
315
+ )
316
+
317
+ return
318
+
319
+ self.token, self.expiry = _make_iam_token_request(
320
+ request=request,
321
+ principal=self._target_principal,
322
+ headers=headers,
323
+ body=body,
324
+ universe_domain=self.universe_domain,
325
+ iam_endpoint_override=self._iam_endpoint_override,
326
+ )
327
+
328
+ def sign_bytes(self, message):
329
+ from google.auth.transport.requests import AuthorizedSession
330
+
331
+ iam_sign_endpoint = iam._IAM_SIGN_ENDPOINT.replace(
332
+ credentials.DEFAULT_UNIVERSE_DOMAIN, self.universe_domain
333
+ ).format(self._target_principal)
334
+
335
+ body = {
336
+ "payload": base64.b64encode(message).decode("utf-8"),
337
+ "delegates": self._delegates,
338
+ }
339
+
340
+ headers = {"Content-Type": "application/json"}
341
+
342
+ authed_session = AuthorizedSession(self._source_credentials)
343
+
344
+ try:
345
+ retries = _exponential_backoff.ExponentialBackoff()
346
+ for _ in retries:
347
+ response = authed_session.post(
348
+ url=iam_sign_endpoint, headers=headers, json=body
349
+ )
350
+ if response.status_code in iam.IAM_RETRY_CODES:
351
+ continue
352
+ if response.status_code != http_client.OK:
353
+ raise exceptions.TransportError(
354
+ "Error calling sign_bytes: {}".format(response.json())
355
+ )
356
+
357
+ return base64.b64decode(response.json()["signedBlob"])
358
+ finally:
359
+ authed_session.close()
360
+ raise exceptions.TransportError("exhausted signBlob endpoint retries")
361
+
362
+ @property
363
+ def signer_email(self):
364
+ return self._target_principal
365
+
366
+ @property
367
+ def service_account_email(self):
368
+ return self._target_principal
369
+
370
+ @property
371
+ def signer(self):
372
+ return self
373
+
374
+ @property
375
+ def requires_scopes(self):
376
+ return not self._target_scopes
377
+
378
+ @_helpers.copy_docstring(credentials.Credentials)
379
+ def get_cred_info(self):
380
+ if self._cred_file_path:
381
+ return {
382
+ "credential_source": self._cred_file_path,
383
+ "credential_type": "impersonated credentials",
384
+ "principal": self._target_principal,
385
+ }
386
+ return None
387
+
388
+ def _make_copy(self):
389
+ cred = self.__class__(
390
+ self._source_credentials,
391
+ target_principal=self._target_principal,
392
+ target_scopes=self._target_scopes,
393
+ delegates=self._delegates,
394
+ lifetime=self._lifetime,
395
+ quota_project_id=self._quota_project_id,
396
+ iam_endpoint_override=self._iam_endpoint_override,
397
+ )
398
+ cred._cred_file_path = self._cred_file_path
399
+ return cred
400
+
401
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
402
+ def with_quota_project(self, quota_project_id):
403
+ cred = self._make_copy()
404
+ cred._quota_project_id = quota_project_id
405
+ return cred
406
+
407
+ @_helpers.copy_docstring(credentials.Scoped)
408
+ def with_scopes(self, scopes, default_scopes=None):
409
+ cred = self._make_copy()
410
+ cred._target_scopes = scopes or default_scopes
411
+ return cred
412
+
413
+
414
+ class IDTokenCredentials(credentials.CredentialsWithQuotaProject):
415
+ """Open ID Connect ID Token-based service account credentials.
416
+
417
+ """
418
+
419
+ def __init__(
420
+ self,
421
+ target_credentials,
422
+ target_audience=None,
423
+ include_email=False,
424
+ quota_project_id=None,
425
+ ):
426
+ """
427
+ Args:
428
+ target_credentials (google.auth.Credentials): The target
429
+ credential used as to acquire the id tokens for.
430
+ target_audience (string): Audience to issue the token for.
431
+ include_email (bool): Include email in IdToken
432
+ quota_project_id (Optional[str]): The project ID used for
433
+ quota and billing.
434
+ """
435
+ super(IDTokenCredentials, self).__init__()
436
+
437
+ if not isinstance(target_credentials, Credentials):
438
+ raise exceptions.GoogleAuthError(
439
+ "Provided Credential must be " "impersonated_credentials"
440
+ )
441
+ self._target_credentials = target_credentials
442
+ self._target_audience = target_audience
443
+ self._include_email = include_email
444
+ self._quota_project_id = quota_project_id
445
+
446
+ def from_credentials(self, target_credentials, target_audience=None):
447
+ return self.__class__(
448
+ target_credentials=target_credentials,
449
+ target_audience=target_audience,
450
+ include_email=self._include_email,
451
+ quota_project_id=self._quota_project_id,
452
+ )
453
+
454
+ def with_target_audience(self, target_audience):
455
+ return self.__class__(
456
+ target_credentials=self._target_credentials,
457
+ target_audience=target_audience,
458
+ include_email=self._include_email,
459
+ quota_project_id=self._quota_project_id,
460
+ )
461
+
462
+ def with_include_email(self, include_email):
463
+ return self.__class__(
464
+ target_credentials=self._target_credentials,
465
+ target_audience=self._target_audience,
466
+ include_email=include_email,
467
+ quota_project_id=self._quota_project_id,
468
+ )
469
+
470
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
471
+ def with_quota_project(self, quota_project_id):
472
+ return self.__class__(
473
+ target_credentials=self._target_credentials,
474
+ target_audience=self._target_audience,
475
+ include_email=self._include_email,
476
+ quota_project_id=quota_project_id,
477
+ )
478
+
479
+ @_helpers.copy_docstring(credentials.Credentials)
480
+ def refresh(self, request):
481
+ from google.auth.transport.requests import AuthorizedSession
482
+
483
+ iam_sign_endpoint = iam._IAM_IDTOKEN_ENDPOINT.replace(
484
+ credentials.DEFAULT_UNIVERSE_DOMAIN,
485
+ self._target_credentials.universe_domain,
486
+ ).format(self._target_credentials.signer_email)
487
+
488
+ body = {
489
+ "audience": self._target_audience,
490
+ "delegates": self._target_credentials._delegates,
491
+ "includeEmail": self._include_email,
492
+ }
493
+
494
+ headers = {
495
+ "Content-Type": "application/json",
496
+ metrics.API_CLIENT_HEADER: metrics.token_request_id_token_impersonate(),
497
+ }
498
+
499
+ authed_session = AuthorizedSession(
500
+ self._target_credentials._source_credentials, auth_request=request
501
+ )
502
+
503
+ try:
504
+ response = authed_session.post(
505
+ url=iam_sign_endpoint,
506
+ headers=headers,
507
+ data=json.dumps(body).encode("utf-8"),
508
+ )
509
+ finally:
510
+ authed_session.close()
511
+
512
+ if response.status_code != http_client.OK:
513
+ raise exceptions.RefreshError(
514
+ "Error getting ID token: {}".format(response.json())
515
+ )
516
+
517
+ id_token = response.json()["token"]
518
+ self.token = id_token
519
+ self.expiry = datetime.utcfromtimestamp(
520
+ jwt.decode(id_token, verify=False)["exp"]
521
+ )
522
+
523
+
524
+ def _sign_jwt_request(request, principal, headers, payload, delegates=[]):
525
+ """Makes a request to the Google Cloud IAM service to sign a JWT using a
526
+ service account's system-managed private key.
527
+ Args:
528
+ request (Request): The Request object to use.
529
+ principal (str): The principal to request an access token for.
530
+ headers (Mapping[str, str]): Map of headers to transmit.
531
+ payload (Mapping[str, str]): The JWT payload to sign. Must be a
532
+ serialized JSON object that contains a JWT Claims Set.
533
+ delegates (Sequence[str]): The chained list of delegates required
534
+ to grant the final access_token. If set, the sequence of
535
+ identities must have "Service Account Token Creator" capability
536
+ granted to the prceeding identity. For example, if set to
537
+ [serviceAccountB, serviceAccountC], the source_credential
538
+ must have the Token Creator role on serviceAccountB.
539
+ serviceAccountB must have the Token Creator on
540
+ serviceAccountC.
541
+ Finally, C must have Token Creator on target_principal.
542
+ If left unset, source_credential must have that role on
543
+ target_principal.
544
+
545
+ Raises:
546
+ google.auth.exceptions.TransportError: Raised if there is an underlying
547
+ HTTP connection error
548
+ google.auth.exceptions.RefreshError: Raised if the impersonated
549
+ credentials are not available. Common reasons are
550
+ `iamcredentials.googleapis.com` is not enabled or the
551
+ `Service Account Token Creator` is not assigned
552
+ """
553
+ iam_endpoint = iam._IAM_SIGNJWT_ENDPOINT.format(principal)
554
+
555
+ body = {"delegates": delegates, "payload": json.dumps(payload)}
556
+ body = json.dumps(body).encode("utf-8")
557
+
558
+ response = request(url=iam_endpoint, method="POST", headers=headers, body=body)
559
+
560
+ # support both string and bytes type response.data
561
+ response_body = (
562
+ response.data.decode("utf-8")
563
+ if hasattr(response.data, "decode")
564
+ else response.data
565
+ )
566
+
567
+ if response.status != http_client.OK:
568
+ raise exceptions.RefreshError(_REFRESH_ERROR, response_body)
569
+
570
+ try:
571
+ jwt_response = json.loads(response_body)
572
+ signed_jwt = jwt_response["signedJwt"]
573
+ return signed_jwt
574
+
575
+ except (KeyError, ValueError) as caught_exc:
576
+ new_exc = exceptions.RefreshError(
577
+ "{}: No signed JWT in response.".format(_REFRESH_ERROR), response_body
578
+ )
579
+ raise new_exc from caught_exc
.venv/lib/python3.11/site-packages/google/auth/jwt.py ADDED
@@ -0,0 +1,878 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """JSON Web Tokens
16
+
17
+ Provides support for creating (encoding) and verifying (decoding) JWTs,
18
+ especially JWTs generated and consumed by Google infrastructure.
19
+
20
+ See `rfc7519`_ for more details on JWTs.
21
+
22
+ To encode a JWT use :func:`encode`::
23
+
24
+ from google.auth import crypt
25
+ from google.auth import jwt
26
+
27
+ signer = crypt.Signer(private_key)
28
+ payload = {'some': 'payload'}
29
+ encoded = jwt.encode(signer, payload)
30
+
31
+ To decode a JWT and verify claims use :func:`decode`::
32
+
33
+ claims = jwt.decode(encoded, certs=public_certs)
34
+
35
+ You can also skip verification::
36
+
37
+ claims = jwt.decode(encoded, verify=False)
38
+
39
+ .. _rfc7519: https://tools.ietf.org/html/rfc7519
40
+
41
+ """
42
+
43
+ try:
44
+ from collections.abc import Mapping
45
+ # Python 2.7 compatibility
46
+ except ImportError: # pragma: NO COVER
47
+ from collections import Mapping # type: ignore
48
+ import copy
49
+ import datetime
50
+ import json
51
+ import urllib
52
+
53
+ import cachetools
54
+
55
+ from google.auth import _helpers
56
+ from google.auth import _service_account_info
57
+ from google.auth import crypt
58
+ from google.auth import exceptions
59
+ import google.auth.credentials
60
+
61
+ try:
62
+ from google.auth.crypt import es256
63
+ except ImportError: # pragma: NO COVER
64
+ es256 = None # type: ignore
65
+
66
+ _DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
67
+ _DEFAULT_MAX_CACHE_SIZE = 10
68
+ _ALGORITHM_TO_VERIFIER_CLASS = {"RS256": crypt.RSAVerifier}
69
+ _CRYPTOGRAPHY_BASED_ALGORITHMS = frozenset(["ES256"])
70
+
71
+ if es256 is not None: # pragma: NO COVER
72
+ _ALGORITHM_TO_VERIFIER_CLASS["ES256"] = es256.ES256Verifier # type: ignore
73
+
74
+
75
+ def encode(signer, payload, header=None, key_id=None):
76
+ """Make a signed JWT.
77
+
78
+ Args:
79
+ signer (google.auth.crypt.Signer): The signer used to sign the JWT.
80
+ payload (Mapping[str, str]): The JWT payload.
81
+ header (Mapping[str, str]): Additional JWT header payload.
82
+ key_id (str): The key id to add to the JWT header. If the
83
+ signer has a key id it will be used as the default. If this is
84
+ specified it will override the signer's key id.
85
+
86
+ Returns:
87
+ bytes: The encoded JWT.
88
+ """
89
+ if header is None:
90
+ header = {}
91
+
92
+ if key_id is None:
93
+ key_id = signer.key_id
94
+
95
+ header.update({"typ": "JWT"})
96
+
97
+ if "alg" not in header:
98
+ if es256 is not None and isinstance(signer, es256.ES256Signer):
99
+ header.update({"alg": "ES256"})
100
+ else:
101
+ header.update({"alg": "RS256"})
102
+
103
+ if key_id is not None:
104
+ header["kid"] = key_id
105
+
106
+ segments = [
107
+ _helpers.unpadded_urlsafe_b64encode(json.dumps(header).encode("utf-8")),
108
+ _helpers.unpadded_urlsafe_b64encode(json.dumps(payload).encode("utf-8")),
109
+ ]
110
+
111
+ signing_input = b".".join(segments)
112
+ signature = signer.sign(signing_input)
113
+ segments.append(_helpers.unpadded_urlsafe_b64encode(signature))
114
+
115
+ return b".".join(segments)
116
+
117
+
118
+ def _decode_jwt_segment(encoded_section):
119
+ """Decodes a single JWT segment."""
120
+ section_bytes = _helpers.padded_urlsafe_b64decode(encoded_section)
121
+ try:
122
+ return json.loads(section_bytes.decode("utf-8"))
123
+ except ValueError as caught_exc:
124
+ new_exc = exceptions.MalformedError(
125
+ "Can't parse segment: {0}".format(section_bytes)
126
+ )
127
+ raise new_exc from caught_exc
128
+
129
+
130
+ def _unverified_decode(token):
131
+ """Decodes a token and does no verification.
132
+
133
+ Args:
134
+ token (Union[str, bytes]): The encoded JWT.
135
+
136
+ Returns:
137
+ Tuple[Mapping, Mapping, str, str]: header, payload, signed_section, and
138
+ signature.
139
+
140
+ Raises:
141
+ google.auth.exceptions.MalformedError: if there are an incorrect amount of segments in the token or segments of the wrong type.
142
+ """
143
+ token = _helpers.to_bytes(token)
144
+
145
+ if token.count(b".") != 2:
146
+ raise exceptions.MalformedError(
147
+ "Wrong number of segments in token: {0}".format(token)
148
+ )
149
+
150
+ encoded_header, encoded_payload, signature = token.split(b".")
151
+ signed_section = encoded_header + b"." + encoded_payload
152
+ signature = _helpers.padded_urlsafe_b64decode(signature)
153
+
154
+ # Parse segments
155
+ header = _decode_jwt_segment(encoded_header)
156
+ payload = _decode_jwt_segment(encoded_payload)
157
+
158
+ if not isinstance(header, Mapping):
159
+ raise exceptions.MalformedError(
160
+ "Header segment should be a JSON object: {0}".format(encoded_header)
161
+ )
162
+
163
+ if not isinstance(payload, Mapping):
164
+ raise exceptions.MalformedError(
165
+ "Payload segment should be a JSON object: {0}".format(encoded_payload)
166
+ )
167
+
168
+ return header, payload, signed_section, signature
169
+
170
+
171
+ def decode_header(token):
172
+ """Return the decoded header of a token.
173
+
174
+ No verification is done. This is useful to extract the key id from
175
+ the header in order to acquire the appropriate certificate to verify
176
+ the token.
177
+
178
+ Args:
179
+ token (Union[str, bytes]): the encoded JWT.
180
+
181
+ Returns:
182
+ Mapping: The decoded JWT header.
183
+ """
184
+ header, _, _, _ = _unverified_decode(token)
185
+ return header
186
+
187
+
188
+ def _verify_iat_and_exp(payload, clock_skew_in_seconds=0):
189
+ """Verifies the ``iat`` (Issued At) and ``exp`` (Expires) claims in a token
190
+ payload.
191
+
192
+ Args:
193
+ payload (Mapping[str, str]): The JWT payload.
194
+ clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
195
+ validation.
196
+
197
+ Raises:
198
+ google.auth.exceptions.InvalidValue: if value validation failed.
199
+ google.auth.exceptions.MalformedError: if schema validation failed.
200
+ """
201
+ now = _helpers.datetime_to_secs(_helpers.utcnow())
202
+
203
+ # Make sure the iat and exp claims are present.
204
+ for key in ("iat", "exp"):
205
+ if key not in payload:
206
+ raise exceptions.MalformedError(
207
+ "Token does not contain required claim {}".format(key)
208
+ )
209
+
210
+ # Make sure the token wasn't issued in the future.
211
+ iat = payload["iat"]
212
+ # Err on the side of accepting a token that is slightly early to account
213
+ # for clock skew.
214
+ earliest = iat - clock_skew_in_seconds
215
+ if now < earliest:
216
+ raise exceptions.InvalidValue(
217
+ "Token used too early, {} < {}. Check that your computer's clock is set correctly.".format(
218
+ now, iat
219
+ )
220
+ )
221
+
222
+ # Make sure the token wasn't issued in the past.
223
+ exp = payload["exp"]
224
+ # Err on the side of accepting a token that is slightly out of date
225
+ # to account for clow skew.
226
+ latest = exp + clock_skew_in_seconds
227
+ if latest < now:
228
+ raise exceptions.InvalidValue("Token expired, {} < {}".format(latest, now))
229
+
230
+
231
+ def decode(token, certs=None, verify=True, audience=None, clock_skew_in_seconds=0):
232
+ """Decode and verify a JWT.
233
+
234
+ Args:
235
+ token (str): The encoded JWT.
236
+ certs (Union[str, bytes, Mapping[str, Union[str, bytes]]]): The
237
+ certificate used to validate the JWT signature. If bytes or string,
238
+ it must the the public key certificate in PEM format. If a mapping,
239
+ it must be a mapping of key IDs to public key certificates in PEM
240
+ format. The mapping must contain the same key ID that's specified
241
+ in the token's header.
242
+ verify (bool): Whether to perform signature and claim validation.
243
+ Verification is done by default.
244
+ audience (str or list): The audience claim, 'aud', that this JWT should
245
+ contain. Or a list of audience claims. If None then the JWT's 'aud'
246
+ parameter is not verified.
247
+ clock_skew_in_seconds (int): The clock skew used for `iat` and `exp`
248
+ validation.
249
+
250
+ Returns:
251
+ Mapping[str, str]: The deserialized JSON payload in the JWT.
252
+
253
+ Raises:
254
+ google.auth.exceptions.InvalidValue: if value validation failed.
255
+ google.auth.exceptions.MalformedError: if schema validation failed.
256
+ """
257
+ header, payload, signed_section, signature = _unverified_decode(token)
258
+
259
+ if not verify:
260
+ return payload
261
+
262
+ # Pluck the key id and algorithm from the header and make sure we have
263
+ # a verifier that can support it.
264
+ key_alg = header.get("alg")
265
+ key_id = header.get("kid")
266
+
267
+ try:
268
+ verifier_cls = _ALGORITHM_TO_VERIFIER_CLASS[key_alg]
269
+ except KeyError as exc:
270
+ if key_alg in _CRYPTOGRAPHY_BASED_ALGORITHMS:
271
+ raise exceptions.InvalidValue(
272
+ "The key algorithm {} requires the cryptography package to be installed.".format(
273
+ key_alg
274
+ )
275
+ ) from exc
276
+ else:
277
+ raise exceptions.InvalidValue(
278
+ "Unsupported signature algorithm {}".format(key_alg)
279
+ ) from exc
280
+ # If certs is specified as a dictionary of key IDs to certificates, then
281
+ # use the certificate identified by the key ID in the token header.
282
+ if isinstance(certs, Mapping):
283
+ if key_id:
284
+ if key_id not in certs:
285
+ raise exceptions.MalformedError(
286
+ "Certificate for key id {} not found.".format(key_id)
287
+ )
288
+ certs_to_check = [certs[key_id]]
289
+ # If there's no key id in the header, check against all of the certs.
290
+ else:
291
+ certs_to_check = certs.values()
292
+ else:
293
+ certs_to_check = certs
294
+
295
+ # Verify that the signature matches the message.
296
+ if not crypt.verify_signature(
297
+ signed_section, signature, certs_to_check, verifier_cls
298
+ ):
299
+ raise exceptions.MalformedError("Could not verify token signature.")
300
+
301
+ # Verify the issued at and created times in the payload.
302
+ _verify_iat_and_exp(payload, clock_skew_in_seconds)
303
+
304
+ # Check audience.
305
+ if audience is not None:
306
+ claim_audience = payload.get("aud")
307
+ if isinstance(audience, str):
308
+ audience = [audience]
309
+ if claim_audience not in audience:
310
+ raise exceptions.InvalidValue(
311
+ "Token has wrong audience {}, expected one of {}".format(
312
+ claim_audience, audience
313
+ )
314
+ )
315
+
316
+ return payload
317
+
318
+
319
+ class Credentials(
320
+ google.auth.credentials.Signing, google.auth.credentials.CredentialsWithQuotaProject
321
+ ):
322
+ """Credentials that use a JWT as the bearer token.
323
+
324
+ These credentials require an "audience" claim. This claim identifies the
325
+ intended recipient of the bearer token.
326
+
327
+ The constructor arguments determine the claims for the JWT that is
328
+ sent with requests. Usually, you'll construct these credentials with
329
+ one of the helper constructors as shown in the next section.
330
+
331
+ To create JWT credentials using a Google service account private key
332
+ JSON file::
333
+
334
+ audience = 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher'
335
+ credentials = jwt.Credentials.from_service_account_file(
336
+ 'service-account.json',
337
+ audience=audience)
338
+
339
+ If you already have the service account file loaded and parsed::
340
+
341
+ service_account_info = json.load(open('service_account.json'))
342
+ credentials = jwt.Credentials.from_service_account_info(
343
+ service_account_info,
344
+ audience=audience)
345
+
346
+ Both helper methods pass on arguments to the constructor, so you can
347
+ specify the JWT claims::
348
+
349
+ credentials = jwt.Credentials.from_service_account_file(
350
+ 'service-account.json',
351
+ audience=audience,
352
+ additional_claims={'meta': 'data'})
353
+
354
+ You can also construct the credentials directly if you have a
355
+ :class:`~google.auth.crypt.Signer` instance::
356
+
357
+ credentials = jwt.Credentials(
358
+ signer,
359
+ issuer='your-issuer',
360
+ subject='your-subject',
361
+ audience=audience)
362
+
363
+ The claims are considered immutable. If you want to modify the claims,
364
+ you can easily create another instance using :meth:`with_claims`::
365
+
366
+ new_audience = (
367
+ 'https://pubsub.googleapis.com/google.pubsub.v1.Subscriber')
368
+ new_credentials = credentials.with_claims(audience=new_audience)
369
+ """
370
+
371
+ def __init__(
372
+ self,
373
+ signer,
374
+ issuer,
375
+ subject,
376
+ audience,
377
+ additional_claims=None,
378
+ token_lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
379
+ quota_project_id=None,
380
+ ):
381
+ """
382
+ Args:
383
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
384
+ issuer (str): The `iss` claim.
385
+ subject (str): The `sub` claim.
386
+ audience (str): the `aud` claim. The intended audience for the
387
+ credentials.
388
+ additional_claims (Mapping[str, str]): Any additional claims for
389
+ the JWT payload.
390
+ token_lifetime (int): The amount of time in seconds for
391
+ which the token is valid. Defaults to 1 hour.
392
+ quota_project_id (Optional[str]): The project ID used for quota
393
+ and billing.
394
+ """
395
+ super(Credentials, self).__init__()
396
+ self._signer = signer
397
+ self._issuer = issuer
398
+ self._subject = subject
399
+ self._audience = audience
400
+ self._token_lifetime = token_lifetime
401
+ self._quota_project_id = quota_project_id
402
+
403
+ if additional_claims is None:
404
+ additional_claims = {}
405
+
406
+ self._additional_claims = additional_claims
407
+
408
+ @classmethod
409
+ def _from_signer_and_info(cls, signer, info, **kwargs):
410
+ """Creates a Credentials instance from a signer and service account
411
+ info.
412
+
413
+ Args:
414
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
415
+ info (Mapping[str, str]): The service account info.
416
+ kwargs: Additional arguments to pass to the constructor.
417
+
418
+ Returns:
419
+ google.auth.jwt.Credentials: The constructed credentials.
420
+
421
+ Raises:
422
+ google.auth.exceptions.MalformedError: If the info is not in the expected format.
423
+ """
424
+ kwargs.setdefault("subject", info["client_email"])
425
+ kwargs.setdefault("issuer", info["client_email"])
426
+ return cls(signer, **kwargs)
427
+
428
+ @classmethod
429
+ def from_service_account_info(cls, info, **kwargs):
430
+ """Creates an Credentials instance from a dictionary.
431
+
432
+ Args:
433
+ info (Mapping[str, str]): The service account info in Google
434
+ format.
435
+ kwargs: Additional arguments to pass to the constructor.
436
+
437
+ Returns:
438
+ google.auth.jwt.Credentials: The constructed credentials.
439
+
440
+ Raises:
441
+ google.auth.exceptions.MalformedError: If the info is not in the expected format.
442
+ """
443
+ signer = _service_account_info.from_dict(info, require=["client_email"])
444
+ return cls._from_signer_and_info(signer, info, **kwargs)
445
+
446
+ @classmethod
447
+ def from_service_account_file(cls, filename, **kwargs):
448
+ """Creates a Credentials instance from a service account .json file
449
+ in Google format.
450
+
451
+ Args:
452
+ filename (str): The path to the service account .json file.
453
+ kwargs: Additional arguments to pass to the constructor.
454
+
455
+ Returns:
456
+ google.auth.jwt.Credentials: The constructed credentials.
457
+ """
458
+ info, signer = _service_account_info.from_filename(
459
+ filename, require=["client_email"]
460
+ )
461
+ return cls._from_signer_and_info(signer, info, **kwargs)
462
+
463
+ @classmethod
464
+ def from_signing_credentials(cls, credentials, audience, **kwargs):
465
+ """Creates a new :class:`google.auth.jwt.Credentials` instance from an
466
+ existing :class:`google.auth.credentials.Signing` instance.
467
+
468
+ The new instance will use the same signer as the existing instance and
469
+ will use the existing instance's signer email as the issuer and
470
+ subject by default.
471
+
472
+ Example::
473
+
474
+ svc_creds = service_account.Credentials.from_service_account_file(
475
+ 'service_account.json')
476
+ audience = (
477
+ 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher')
478
+ jwt_creds = jwt.Credentials.from_signing_credentials(
479
+ svc_creds, audience=audience)
480
+
481
+ Args:
482
+ credentials (google.auth.credentials.Signing): The credentials to
483
+ use to construct the new credentials.
484
+ audience (str): the `aud` claim. The intended audience for the
485
+ credentials.
486
+ kwargs: Additional arguments to pass to the constructor.
487
+
488
+ Returns:
489
+ google.auth.jwt.Credentials: A new Credentials instance.
490
+ """
491
+ kwargs.setdefault("issuer", credentials.signer_email)
492
+ kwargs.setdefault("subject", credentials.signer_email)
493
+ return cls(credentials.signer, audience=audience, **kwargs)
494
+
495
+ def with_claims(
496
+ self, issuer=None, subject=None, audience=None, additional_claims=None
497
+ ):
498
+ """Returns a copy of these credentials with modified claims.
499
+
500
+ Args:
501
+ issuer (str): The `iss` claim. If unspecified the current issuer
502
+ claim will be used.
503
+ subject (str): The `sub` claim. If unspecified the current subject
504
+ claim will be used.
505
+ audience (str): the `aud` claim. If unspecified the current
506
+ audience claim will be used.
507
+ additional_claims (Mapping[str, str]): Any additional claims for
508
+ the JWT payload. This will be merged with the current
509
+ additional claims.
510
+
511
+ Returns:
512
+ google.auth.jwt.Credentials: A new credentials instance.
513
+ """
514
+ new_additional_claims = copy.deepcopy(self._additional_claims)
515
+ new_additional_claims.update(additional_claims or {})
516
+
517
+ return self.__class__(
518
+ self._signer,
519
+ issuer=issuer if issuer is not None else self._issuer,
520
+ subject=subject if subject is not None else self._subject,
521
+ audience=audience if audience is not None else self._audience,
522
+ additional_claims=new_additional_claims,
523
+ quota_project_id=self._quota_project_id,
524
+ )
525
+
526
+ @_helpers.copy_docstring(google.auth.credentials.CredentialsWithQuotaProject)
527
+ def with_quota_project(self, quota_project_id):
528
+ return self.__class__(
529
+ self._signer,
530
+ issuer=self._issuer,
531
+ subject=self._subject,
532
+ audience=self._audience,
533
+ additional_claims=self._additional_claims,
534
+ quota_project_id=quota_project_id,
535
+ )
536
+
537
+ def _make_jwt(self):
538
+ """Make a signed JWT.
539
+
540
+ Returns:
541
+ Tuple[bytes, datetime]: The encoded JWT and the expiration.
542
+ """
543
+ now = _helpers.utcnow()
544
+ lifetime = datetime.timedelta(seconds=self._token_lifetime)
545
+ expiry = now + lifetime
546
+
547
+ payload = {
548
+ "iss": self._issuer,
549
+ "sub": self._subject,
550
+ "iat": _helpers.datetime_to_secs(now),
551
+ "exp": _helpers.datetime_to_secs(expiry),
552
+ }
553
+ if self._audience:
554
+ payload["aud"] = self._audience
555
+
556
+ payload.update(self._additional_claims)
557
+
558
+ jwt = encode(self._signer, payload)
559
+
560
+ return jwt, expiry
561
+
562
+ def refresh(self, request):
563
+ """Refreshes the access token.
564
+
565
+ Args:
566
+ request (Any): Unused.
567
+ """
568
+ # pylint: disable=unused-argument
569
+ # (pylint doesn't correctly recognize overridden methods.)
570
+ self.token, self.expiry = self._make_jwt()
571
+
572
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
573
+ def sign_bytes(self, message):
574
+ return self._signer.sign(message)
575
+
576
+ @property # type: ignore
577
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
578
+ def signer_email(self):
579
+ return self._issuer
580
+
581
+ @property # type: ignore
582
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
583
+ def signer(self):
584
+ return self._signer
585
+
586
+ @property # type: ignore
587
+ def additional_claims(self):
588
+ """ Additional claims the JWT object was created with."""
589
+ return self._additional_claims
590
+
591
+
592
+ class OnDemandCredentials(
593
+ google.auth.credentials.Signing, google.auth.credentials.CredentialsWithQuotaProject
594
+ ):
595
+ """On-demand JWT credentials.
596
+
597
+ Like :class:`Credentials`, this class uses a JWT as the bearer token for
598
+ authentication. However, this class does not require the audience at
599
+ construction time. Instead, it will generate a new token on-demand for
600
+ each request using the request URI as the audience. It caches tokens
601
+ so that multiple requests to the same URI do not incur the overhead
602
+ of generating a new token every time.
603
+
604
+ This behavior is especially useful for `gRPC`_ clients. A gRPC service may
605
+ have multiple audience and gRPC clients may not know all of the audiences
606
+ required for accessing a particular service. With these credentials,
607
+ no knowledge of the audiences is required ahead of time.
608
+
609
+ .. _grpc: http://www.grpc.io/
610
+ """
611
+
612
+ def __init__(
613
+ self,
614
+ signer,
615
+ issuer,
616
+ subject,
617
+ additional_claims=None,
618
+ token_lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
619
+ max_cache_size=_DEFAULT_MAX_CACHE_SIZE,
620
+ quota_project_id=None,
621
+ ):
622
+ """
623
+ Args:
624
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
625
+ issuer (str): The `iss` claim.
626
+ subject (str): The `sub` claim.
627
+ additional_claims (Mapping[str, str]): Any additional claims for
628
+ the JWT payload.
629
+ token_lifetime (int): The amount of time in seconds for
630
+ which the token is valid. Defaults to 1 hour.
631
+ max_cache_size (int): The maximum number of JWT tokens to keep in
632
+ cache. Tokens are cached using :class:`cachetools.LRUCache`.
633
+ quota_project_id (Optional[str]): The project ID used for quota
634
+ and billing.
635
+
636
+ """
637
+ super(OnDemandCredentials, self).__init__()
638
+ self._signer = signer
639
+ self._issuer = issuer
640
+ self._subject = subject
641
+ self._token_lifetime = token_lifetime
642
+ self._quota_project_id = quota_project_id
643
+
644
+ if additional_claims is None:
645
+ additional_claims = {}
646
+
647
+ self._additional_claims = additional_claims
648
+ self._cache = cachetools.LRUCache(maxsize=max_cache_size)
649
+
650
+ @classmethod
651
+ def _from_signer_and_info(cls, signer, info, **kwargs):
652
+ """Creates an OnDemandCredentials instance from a signer and service
653
+ account info.
654
+
655
+ Args:
656
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
657
+ info (Mapping[str, str]): The service account info.
658
+ kwargs: Additional arguments to pass to the constructor.
659
+
660
+ Returns:
661
+ google.auth.jwt.OnDemandCredentials: The constructed credentials.
662
+
663
+ Raises:
664
+ google.auth.exceptions.MalformedError: If the info is not in the expected format.
665
+ """
666
+ kwargs.setdefault("subject", info["client_email"])
667
+ kwargs.setdefault("issuer", info["client_email"])
668
+ return cls(signer, **kwargs)
669
+
670
+ @classmethod
671
+ def from_service_account_info(cls, info, **kwargs):
672
+ """Creates an OnDemandCredentials instance from a dictionary.
673
+
674
+ Args:
675
+ info (Mapping[str, str]): The service account info in Google
676
+ format.
677
+ kwargs: Additional arguments to pass to the constructor.
678
+
679
+ Returns:
680
+ google.auth.jwt.OnDemandCredentials: The constructed credentials.
681
+
682
+ Raises:
683
+ google.auth.exceptions.MalformedError: If the info is not in the expected format.
684
+ """
685
+ signer = _service_account_info.from_dict(info, require=["client_email"])
686
+ return cls._from_signer_and_info(signer, info, **kwargs)
687
+
688
+ @classmethod
689
+ def from_service_account_file(cls, filename, **kwargs):
690
+ """Creates an OnDemandCredentials instance from a service account .json
691
+ file in Google format.
692
+
693
+ Args:
694
+ filename (str): The path to the service account .json file.
695
+ kwargs: Additional arguments to pass to the constructor.
696
+
697
+ Returns:
698
+ google.auth.jwt.OnDemandCredentials: The constructed credentials.
699
+ """
700
+ info, signer = _service_account_info.from_filename(
701
+ filename, require=["client_email"]
702
+ )
703
+ return cls._from_signer_and_info(signer, info, **kwargs)
704
+
705
+ @classmethod
706
+ def from_signing_credentials(cls, credentials, **kwargs):
707
+ """Creates a new :class:`google.auth.jwt.OnDemandCredentials` instance
708
+ from an existing :class:`google.auth.credentials.Signing` instance.
709
+
710
+ The new instance will use the same signer as the existing instance and
711
+ will use the existing instance's signer email as the issuer and
712
+ subject by default.
713
+
714
+ Example::
715
+
716
+ svc_creds = service_account.Credentials.from_service_account_file(
717
+ 'service_account.json')
718
+ jwt_creds = jwt.OnDemandCredentials.from_signing_credentials(
719
+ svc_creds)
720
+
721
+ Args:
722
+ credentials (google.auth.credentials.Signing): The credentials to
723
+ use to construct the new credentials.
724
+ kwargs: Additional arguments to pass to the constructor.
725
+
726
+ Returns:
727
+ google.auth.jwt.Credentials: A new Credentials instance.
728
+ """
729
+ kwargs.setdefault("issuer", credentials.signer_email)
730
+ kwargs.setdefault("subject", credentials.signer_email)
731
+ return cls(credentials.signer, **kwargs)
732
+
733
+ def with_claims(self, issuer=None, subject=None, additional_claims=None):
734
+ """Returns a copy of these credentials with modified claims.
735
+
736
+ Args:
737
+ issuer (str): The `iss` claim. If unspecified the current issuer
738
+ claim will be used.
739
+ subject (str): The `sub` claim. If unspecified the current subject
740
+ claim will be used.
741
+ additional_claims (Mapping[str, str]): Any additional claims for
742
+ the JWT payload. This will be merged with the current
743
+ additional claims.
744
+
745
+ Returns:
746
+ google.auth.jwt.OnDemandCredentials: A new credentials instance.
747
+ """
748
+ new_additional_claims = copy.deepcopy(self._additional_claims)
749
+ new_additional_claims.update(additional_claims or {})
750
+
751
+ return self.__class__(
752
+ self._signer,
753
+ issuer=issuer if issuer is not None else self._issuer,
754
+ subject=subject if subject is not None else self._subject,
755
+ additional_claims=new_additional_claims,
756
+ max_cache_size=self._cache.maxsize,
757
+ quota_project_id=self._quota_project_id,
758
+ )
759
+
760
+ @_helpers.copy_docstring(google.auth.credentials.CredentialsWithQuotaProject)
761
+ def with_quota_project(self, quota_project_id):
762
+
763
+ return self.__class__(
764
+ self._signer,
765
+ issuer=self._issuer,
766
+ subject=self._subject,
767
+ additional_claims=self._additional_claims,
768
+ max_cache_size=self._cache.maxsize,
769
+ quota_project_id=quota_project_id,
770
+ )
771
+
772
+ @property
773
+ def valid(self):
774
+ """Checks the validity of the credentials.
775
+
776
+ These credentials are always valid because it generates tokens on
777
+ demand.
778
+ """
779
+ return True
780
+
781
+ def _make_jwt_for_audience(self, audience):
782
+ """Make a new JWT for the given audience.
783
+
784
+ Args:
785
+ audience (str): The intended audience.
786
+
787
+ Returns:
788
+ Tuple[bytes, datetime]: The encoded JWT and the expiration.
789
+ """
790
+ now = _helpers.utcnow()
791
+ lifetime = datetime.timedelta(seconds=self._token_lifetime)
792
+ expiry = now + lifetime
793
+
794
+ payload = {
795
+ "iss": self._issuer,
796
+ "sub": self._subject,
797
+ "iat": _helpers.datetime_to_secs(now),
798
+ "exp": _helpers.datetime_to_secs(expiry),
799
+ "aud": audience,
800
+ }
801
+
802
+ payload.update(self._additional_claims)
803
+
804
+ jwt = encode(self._signer, payload)
805
+
806
+ return jwt, expiry
807
+
808
+ def _get_jwt_for_audience(self, audience):
809
+ """Get a JWT For a given audience.
810
+
811
+ If there is already an existing, non-expired token in the cache for
812
+ the audience, that token is used. Otherwise, a new token will be
813
+ created.
814
+
815
+ Args:
816
+ audience (str): The intended audience.
817
+
818
+ Returns:
819
+ bytes: The encoded JWT.
820
+ """
821
+ token, expiry = self._cache.get(audience, (None, None))
822
+
823
+ if token is None or expiry < _helpers.utcnow():
824
+ token, expiry = self._make_jwt_for_audience(audience)
825
+ self._cache[audience] = token, expiry
826
+
827
+ return token
828
+
829
+ def refresh(self, request):
830
+ """Raises an exception, these credentials can not be directly
831
+ refreshed.
832
+
833
+ Args:
834
+ request (Any): Unused.
835
+
836
+ Raises:
837
+ google.auth.RefreshError
838
+ """
839
+ # pylint: disable=unused-argument
840
+ # (pylint doesn't correctly recognize overridden methods.)
841
+ raise exceptions.RefreshError(
842
+ "OnDemandCredentials can not be directly refreshed."
843
+ )
844
+
845
+ def before_request(self, request, method, url, headers):
846
+ """Performs credential-specific before request logic.
847
+
848
+ Args:
849
+ request (Any): Unused. JWT credentials do not need to make an
850
+ HTTP request to refresh.
851
+ method (str): The request's HTTP method.
852
+ url (str): The request's URI. This is used as the audience claim
853
+ when generating the JWT.
854
+ headers (Mapping): The request's headers.
855
+ """
856
+ # pylint: disable=unused-argument
857
+ # (pylint doesn't correctly recognize overridden methods.)
858
+ parts = urllib.parse.urlsplit(url)
859
+ # Strip query string and fragment
860
+ audience = urllib.parse.urlunsplit(
861
+ (parts.scheme, parts.netloc, parts.path, "", "")
862
+ )
863
+ token = self._get_jwt_for_audience(audience)
864
+ self.apply(headers, token=token)
865
+
866
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
867
+ def sign_bytes(self, message):
868
+ return self._signer.sign(message)
869
+
870
+ @property # type: ignore
871
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
872
+ def signer_email(self):
873
+ return self._issuer
874
+
875
+ @property # type: ignore
876
+ @_helpers.copy_docstring(google.auth.credentials.Signing)
877
+ def signer(self):
878
+ return self._signer
.venv/lib/python3.11/site-packages/google/auth/metrics.py ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2023 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """ We use x-goog-api-client header to report metrics. This module provides
16
+ the constants and helper methods to construct x-goog-api-client header.
17
+ """
18
+
19
+ import platform
20
+
21
+ from google.auth import version
22
+
23
+
24
+ API_CLIENT_HEADER = "x-goog-api-client"
25
+
26
+ # BYOID Specific consts
27
+ BYOID_HEADER_SECTION = "google-byoid-sdk"
28
+
29
+ # Auth request type
30
+ REQUEST_TYPE_ACCESS_TOKEN = "auth-request-type/at"
31
+ REQUEST_TYPE_ID_TOKEN = "auth-request-type/it"
32
+ REQUEST_TYPE_MDS_PING = "auth-request-type/mds"
33
+ REQUEST_TYPE_REAUTH_START = "auth-request-type/re-start"
34
+ REQUEST_TYPE_REAUTH_CONTINUE = "auth-request-type/re-cont"
35
+
36
+ # Credential type
37
+ CRED_TYPE_USER = "cred-type/u"
38
+ CRED_TYPE_SA_ASSERTION = "cred-type/sa"
39
+ CRED_TYPE_SA_JWT = "cred-type/jwt"
40
+ CRED_TYPE_SA_MDS = "cred-type/mds"
41
+ CRED_TYPE_SA_IMPERSONATE = "cred-type/imp"
42
+
43
+
44
+ # Versions
45
+ def python_and_auth_lib_version():
46
+ return "gl-python/{} auth/{}".format(platform.python_version(), version.__version__)
47
+
48
+
49
+ # Token request metric header values
50
+
51
+ # x-goog-api-client header value for access token request via metadata server.
52
+ # Example: "gl-python/3.7 auth/1.1 auth-request-type/at cred-type/mds"
53
+ def token_request_access_token_mds():
54
+ return "{} {} {}".format(
55
+ python_and_auth_lib_version(), REQUEST_TYPE_ACCESS_TOKEN, CRED_TYPE_SA_MDS
56
+ )
57
+
58
+
59
+ # x-goog-api-client header value for ID token request via metadata server.
60
+ # Example: "gl-python/3.7 auth/1.1 auth-request-type/it cred-type/mds"
61
+ def token_request_id_token_mds():
62
+ return "{} {} {}".format(
63
+ python_and_auth_lib_version(), REQUEST_TYPE_ID_TOKEN, CRED_TYPE_SA_MDS
64
+ )
65
+
66
+
67
+ # x-goog-api-client header value for impersonated credentials access token request.
68
+ # Example: "gl-python/3.7 auth/1.1 auth-request-type/at cred-type/imp"
69
+ def token_request_access_token_impersonate():
70
+ return "{} {} {}".format(
71
+ python_and_auth_lib_version(),
72
+ REQUEST_TYPE_ACCESS_TOKEN,
73
+ CRED_TYPE_SA_IMPERSONATE,
74
+ )
75
+
76
+
77
+ # x-goog-api-client header value for impersonated credentials ID token request.
78
+ # Example: "gl-python/3.7 auth/1.1 auth-request-type/it cred-type/imp"
79
+ def token_request_id_token_impersonate():
80
+ return "{} {} {}".format(
81
+ python_and_auth_lib_version(), REQUEST_TYPE_ID_TOKEN, CRED_TYPE_SA_IMPERSONATE
82
+ )
83
+
84
+
85
+ # x-goog-api-client header value for service account credentials access token
86
+ # request (assertion flow).
87
+ # Example: "gl-python/3.7 auth/1.1 auth-request-type/at cred-type/sa"
88
+ def token_request_access_token_sa_assertion():
89
+ return "{} {} {}".format(
90
+ python_and_auth_lib_version(), REQUEST_TYPE_ACCESS_TOKEN, CRED_TYPE_SA_ASSERTION
91
+ )
92
+
93
+
94
+ # x-goog-api-client header value for service account credentials ID token
95
+ # request (assertion flow).
96
+ # Example: "gl-python/3.7 auth/1.1 auth-request-type/it cred-type/sa"
97
+ def token_request_id_token_sa_assertion():
98
+ return "{} {} {}".format(
99
+ python_and_auth_lib_version(), REQUEST_TYPE_ID_TOKEN, CRED_TYPE_SA_ASSERTION
100
+ )
101
+
102
+
103
+ # x-goog-api-client header value for user credentials token request.
104
+ # Example: "gl-python/3.7 auth/1.1 cred-type/u"
105
+ def token_request_user():
106
+ return "{} {}".format(python_and_auth_lib_version(), CRED_TYPE_USER)
107
+
108
+
109
+ # Miscellenous metrics
110
+
111
+ # x-goog-api-client header value for metadata server ping.
112
+ # Example: "gl-python/3.7 auth/1.1 auth-request-type/mds"
113
+ def mds_ping():
114
+ return "{} {}".format(python_and_auth_lib_version(), REQUEST_TYPE_MDS_PING)
115
+
116
+
117
+ # x-goog-api-client header value for reauth start endpoint calls.
118
+ # Example: "gl-python/3.7 auth/1.1 auth-request-type/re-start"
119
+ def reauth_start():
120
+ return "{} {}".format(python_and_auth_lib_version(), REQUEST_TYPE_REAUTH_START)
121
+
122
+
123
+ # x-goog-api-client header value for reauth continue endpoint calls.
124
+ # Example: "gl-python/3.7 auth/1.1 cred-type/re-cont"
125
+ def reauth_continue():
126
+ return "{} {}".format(python_and_auth_lib_version(), REQUEST_TYPE_REAUTH_CONTINUE)
127
+
128
+
129
+ # x-goog-api-client header value for BYOID calls to the Security Token Service exchange token endpoint.
130
+ # Example: "gl-python/3.7 auth/1.1 google-byoid-sdk source/aws sa-impersonation/true sa-impersonation/true"
131
+ def byoid_metrics_header(metrics_options):
132
+ header = "{} {}".format(python_and_auth_lib_version(), BYOID_HEADER_SECTION)
133
+ for key, value in metrics_options.items():
134
+ header = "{} {}/{}".format(header, key, value)
135
+ return header
136
+
137
+
138
+ def add_metric_header(headers, metric_header_value):
139
+ """Add x-goog-api-client header with the given value.
140
+
141
+ Args:
142
+ headers (Mapping[str, str]): The headers to which we will add the
143
+ metric header.
144
+ metric_header_value (Optional[str]): If value is None, do nothing;
145
+ if headers already has a x-goog-api-client header, append the value
146
+ to the existing header; otherwise add a new x-goog-api-client
147
+ header with the given value.
148
+ """
149
+ if not metric_header_value:
150
+ return
151
+ if API_CLIENT_HEADER not in headers:
152
+ headers[API_CLIENT_HEADER] = metric_header_value
153
+ else:
154
+ headers[API_CLIENT_HEADER] += " " + metric_header_value
.venv/lib/python3.11/site-packages/google/auth/pluggable.py ADDED
@@ -0,0 +1,429 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2022 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Pluggable Credentials.
16
+ Pluggable Credentials are initialized using external_account arguments which
17
+ are typically loaded from third-party executables. Unlike other
18
+ credentials that can be initialized with a list of explicit arguments, secrets
19
+ or credentials, external account clients use the environment and hints/guidelines
20
+ provided by the external_account JSON file to retrieve credentials and exchange
21
+ them for Google access tokens.
22
+
23
+ Example credential_source for pluggable credential:
24
+ {
25
+ "executable": {
26
+ "command": "/path/to/get/credentials.sh --arg1=value1 --arg2=value2",
27
+ "timeout_millis": 5000,
28
+ "output_file": "/path/to/generated/cached/credentials"
29
+ }
30
+ }
31
+ """
32
+
33
+ try:
34
+ from collections.abc import Mapping
35
+ # Python 2.7 compatibility
36
+ except ImportError: # pragma: NO COVER
37
+ from collections import Mapping # type: ignore
38
+ import json
39
+ import os
40
+ import subprocess
41
+ import sys
42
+ import time
43
+
44
+ from google.auth import _helpers
45
+ from google.auth import exceptions
46
+ from google.auth import external_account
47
+
48
+ # The max supported executable spec version.
49
+ EXECUTABLE_SUPPORTED_MAX_VERSION = 1
50
+
51
+ EXECUTABLE_TIMEOUT_MILLIS_DEFAULT = 30 * 1000 # 30 seconds
52
+ EXECUTABLE_TIMEOUT_MILLIS_LOWER_BOUND = 5 * 1000 # 5 seconds
53
+ EXECUTABLE_TIMEOUT_MILLIS_UPPER_BOUND = 120 * 1000 # 2 minutes
54
+
55
+ EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_LOWER_BOUND = 30 * 1000 # 30 seconds
56
+ EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_UPPER_BOUND = 30 * 60 * 1000 # 30 minutes
57
+
58
+
59
+ class Credentials(external_account.Credentials):
60
+ """External account credentials sourced from executables."""
61
+
62
+ def __init__(
63
+ self,
64
+ audience,
65
+ subject_token_type,
66
+ token_url,
67
+ credential_source,
68
+ *args,
69
+ **kwargs
70
+ ):
71
+ """Instantiates an external account credentials object from a executables.
72
+
73
+ Args:
74
+ audience (str): The STS audience field.
75
+ subject_token_type (str): The subject token type.
76
+ token_url (str): The STS endpoint URL.
77
+ credential_source (Mapping): The credential source dictionary used to
78
+ provide instructions on how to retrieve external credential to be
79
+ exchanged for Google access tokens.
80
+
81
+ Example credential_source for pluggable credential:
82
+
83
+ {
84
+ "executable": {
85
+ "command": "/path/to/get/credentials.sh --arg1=value1 --arg2=value2",
86
+ "timeout_millis": 5000,
87
+ "output_file": "/path/to/generated/cached/credentials"
88
+ }
89
+ }
90
+ args (List): Optional positional arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
91
+ kwargs (Mapping): Optional keyword arguments passed into the underlying :meth:`~external_account.Credentials.__init__` method.
92
+
93
+ Raises:
94
+ google.auth.exceptions.RefreshError: If an error is encountered during
95
+ access token retrieval logic.
96
+ google.auth.exceptions.InvalidValue: For invalid parameters.
97
+ google.auth.exceptions.MalformedError: For invalid parameters.
98
+
99
+ .. note:: Typically one of the helper constructors
100
+ :meth:`from_file` or
101
+ :meth:`from_info` are used instead of calling the constructor directly.
102
+ """
103
+
104
+ self.interactive = kwargs.pop("interactive", False)
105
+ super(Credentials, self).__init__(
106
+ audience=audience,
107
+ subject_token_type=subject_token_type,
108
+ token_url=token_url,
109
+ credential_source=credential_source,
110
+ *args,
111
+ **kwargs
112
+ )
113
+ if not isinstance(credential_source, Mapping):
114
+ self._credential_source_executable = None
115
+ raise exceptions.MalformedError(
116
+ "Missing credential_source. The credential_source is not a dict."
117
+ )
118
+ self._credential_source_executable = credential_source.get("executable")
119
+ if not self._credential_source_executable:
120
+ raise exceptions.MalformedError(
121
+ "Missing credential_source. An 'executable' must be provided."
122
+ )
123
+ self._credential_source_executable_command = self._credential_source_executable.get(
124
+ "command"
125
+ )
126
+ self._credential_source_executable_timeout_millis = self._credential_source_executable.get(
127
+ "timeout_millis"
128
+ )
129
+ self._credential_source_executable_interactive_timeout_millis = self._credential_source_executable.get(
130
+ "interactive_timeout_millis"
131
+ )
132
+ self._credential_source_executable_output_file = self._credential_source_executable.get(
133
+ "output_file"
134
+ )
135
+
136
+ # Dummy value. This variable is only used via injection, not exposed to ctor
137
+ self._tokeninfo_username = ""
138
+
139
+ if not self._credential_source_executable_command:
140
+ raise exceptions.MalformedError(
141
+ "Missing command field. Executable command must be provided."
142
+ )
143
+ if not self._credential_source_executable_timeout_millis:
144
+ self._credential_source_executable_timeout_millis = (
145
+ EXECUTABLE_TIMEOUT_MILLIS_DEFAULT
146
+ )
147
+ elif (
148
+ self._credential_source_executable_timeout_millis
149
+ < EXECUTABLE_TIMEOUT_MILLIS_LOWER_BOUND
150
+ or self._credential_source_executable_timeout_millis
151
+ > EXECUTABLE_TIMEOUT_MILLIS_UPPER_BOUND
152
+ ):
153
+ raise exceptions.InvalidValue("Timeout must be between 5 and 120 seconds.")
154
+
155
+ if self._credential_source_executable_interactive_timeout_millis:
156
+ if (
157
+ self._credential_source_executable_interactive_timeout_millis
158
+ < EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_LOWER_BOUND
159
+ or self._credential_source_executable_interactive_timeout_millis
160
+ > EXECUTABLE_INTERACTIVE_TIMEOUT_MILLIS_UPPER_BOUND
161
+ ):
162
+ raise exceptions.InvalidValue(
163
+ "Interactive timeout must be between 30 seconds and 30 minutes."
164
+ )
165
+
166
+ @_helpers.copy_docstring(external_account.Credentials)
167
+ def retrieve_subject_token(self, request):
168
+ self._validate_running_mode()
169
+
170
+ # Check output file.
171
+ if self._credential_source_executable_output_file is not None:
172
+ try:
173
+ with open(
174
+ self._credential_source_executable_output_file, encoding="utf-8"
175
+ ) as output_file:
176
+ response = json.load(output_file)
177
+ except Exception:
178
+ pass
179
+ else:
180
+ try:
181
+ # If the cached response is expired, _parse_subject_token will raise an error which will be ignored and we will call the executable again.
182
+ subject_token = self._parse_subject_token(response)
183
+ if (
184
+ "expiration_time" not in response
185
+ ): # Always treat missing expiration_time as expired and proceed to executable run.
186
+ raise exceptions.RefreshError
187
+ except (exceptions.MalformedError, exceptions.InvalidValue):
188
+ raise
189
+ except exceptions.RefreshError:
190
+ pass
191
+ else:
192
+ return subject_token
193
+
194
+ if not _helpers.is_python_3():
195
+ raise exceptions.RefreshError(
196
+ "Pluggable auth is only supported for python 3.7+"
197
+ )
198
+
199
+ # Inject env vars.
200
+ env = os.environ.copy()
201
+ self._inject_env_variables(env)
202
+ env["GOOGLE_EXTERNAL_ACCOUNT_REVOKE"] = "0"
203
+
204
+ # Run executable.
205
+ exe_timeout = (
206
+ self._credential_source_executable_interactive_timeout_millis / 1000
207
+ if self.interactive
208
+ else self._credential_source_executable_timeout_millis / 1000
209
+ )
210
+ exe_stdin = sys.stdin if self.interactive else None
211
+ exe_stdout = sys.stdout if self.interactive else subprocess.PIPE
212
+ exe_stderr = sys.stdout if self.interactive else subprocess.STDOUT
213
+
214
+ result = subprocess.run(
215
+ self._credential_source_executable_command.split(),
216
+ timeout=exe_timeout,
217
+ stdin=exe_stdin,
218
+ stdout=exe_stdout,
219
+ stderr=exe_stderr,
220
+ env=env,
221
+ )
222
+ if result.returncode != 0:
223
+ raise exceptions.RefreshError(
224
+ "Executable exited with non-zero return code {}. Error: {}".format(
225
+ result.returncode, result.stdout
226
+ )
227
+ )
228
+
229
+ # Handle executable output.
230
+ response = json.loads(result.stdout.decode("utf-8")) if result.stdout else None
231
+ if not response and self._credential_source_executable_output_file is not None:
232
+ response = json.load(
233
+ open(self._credential_source_executable_output_file, encoding="utf-8")
234
+ )
235
+
236
+ subject_token = self._parse_subject_token(response)
237
+ return subject_token
238
+
239
+ def revoke(self, request):
240
+ """Revokes the subject token using the credential_source object.
241
+
242
+ Args:
243
+ request (google.auth.transport.Request): A callable used to make
244
+ HTTP requests.
245
+ Raises:
246
+ google.auth.exceptions.RefreshError: If the executable revocation
247
+ not properly executed.
248
+
249
+ """
250
+ if not self.interactive:
251
+ raise exceptions.InvalidValue(
252
+ "Revoke is only enabled under interactive mode."
253
+ )
254
+ self._validate_running_mode()
255
+
256
+ if not _helpers.is_python_3():
257
+ raise exceptions.RefreshError(
258
+ "Pluggable auth is only supported for python 3.7+"
259
+ )
260
+
261
+ # Inject variables
262
+ env = os.environ.copy()
263
+ self._inject_env_variables(env)
264
+ env["GOOGLE_EXTERNAL_ACCOUNT_REVOKE"] = "1"
265
+
266
+ # Run executable
267
+ result = subprocess.run(
268
+ self._credential_source_executable_command.split(),
269
+ timeout=self._credential_source_executable_interactive_timeout_millis
270
+ / 1000,
271
+ stdout=subprocess.PIPE,
272
+ stderr=subprocess.STDOUT,
273
+ env=env,
274
+ )
275
+
276
+ if result.returncode != 0:
277
+ raise exceptions.RefreshError(
278
+ "Auth revoke failed on executable. Exit with non-zero return code {}. Error: {}".format(
279
+ result.returncode, result.stdout
280
+ )
281
+ )
282
+
283
+ response = json.loads(result.stdout.decode("utf-8"))
284
+ self._validate_revoke_response(response)
285
+
286
+ @property
287
+ def external_account_id(self):
288
+ """Returns the external account identifier.
289
+
290
+ When service account impersonation is used the identifier is the service
291
+ account email.
292
+
293
+ Without service account impersonation, this returns None, unless it is
294
+ being used by the Google Cloud CLI which populates this field.
295
+ """
296
+
297
+ return self.service_account_email or self._tokeninfo_username
298
+
299
+ @classmethod
300
+ def from_info(cls, info, **kwargs):
301
+ """Creates a Pluggable Credentials instance from parsed external account info.
302
+
303
+ Args:
304
+ info (Mapping[str, str]): The Pluggable external account info in Google
305
+ format.
306
+ kwargs: Additional arguments to pass to the constructor.
307
+
308
+ Returns:
309
+ google.auth.pluggable.Credentials: The constructed
310
+ credentials.
311
+
312
+ Raises:
313
+ google.auth.exceptions.InvalidValue: For invalid parameters.
314
+ google.auth.exceptions.MalformedError: For invalid parameters.
315
+ """
316
+ return super(Credentials, cls).from_info(info, **kwargs)
317
+
318
+ @classmethod
319
+ def from_file(cls, filename, **kwargs):
320
+ """Creates an Pluggable Credentials instance from an external account json file.
321
+
322
+ Args:
323
+ filename (str): The path to the Pluggable external account json file.
324
+ kwargs: Additional arguments to pass to the constructor.
325
+
326
+ Returns:
327
+ google.auth.pluggable.Credentials: The constructed
328
+ credentials.
329
+ """
330
+ return super(Credentials, cls).from_file(filename, **kwargs)
331
+
332
+ def _inject_env_variables(self, env):
333
+ env["GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE"] = self._audience
334
+ env["GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE"] = self._subject_token_type
335
+ env["GOOGLE_EXTERNAL_ACCOUNT_ID"] = self.external_account_id
336
+ env["GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE"] = "1" if self.interactive else "0"
337
+
338
+ if self._service_account_impersonation_url is not None:
339
+ env[
340
+ "GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL"
341
+ ] = self.service_account_email
342
+ if self._credential_source_executable_output_file is not None:
343
+ env[
344
+ "GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE"
345
+ ] = self._credential_source_executable_output_file
346
+
347
+ def _parse_subject_token(self, response):
348
+ self._validate_response_schema(response)
349
+ if not response["success"]:
350
+ if "code" not in response or "message" not in response:
351
+ raise exceptions.MalformedError(
352
+ "Error code and message fields are required in the response."
353
+ )
354
+ raise exceptions.RefreshError(
355
+ "Executable returned unsuccessful response: code: {}, message: {}.".format(
356
+ response["code"], response["message"]
357
+ )
358
+ )
359
+ if "expiration_time" in response and response["expiration_time"] < time.time():
360
+ raise exceptions.RefreshError(
361
+ "The token returned by the executable is expired."
362
+ )
363
+ if "token_type" not in response:
364
+ raise exceptions.MalformedError(
365
+ "The executable response is missing the token_type field."
366
+ )
367
+ if (
368
+ response["token_type"] == "urn:ietf:params:oauth:token-type:jwt"
369
+ or response["token_type"] == "urn:ietf:params:oauth:token-type:id_token"
370
+ ): # OIDC
371
+ return response["id_token"]
372
+ elif response["token_type"] == "urn:ietf:params:oauth:token-type:saml2": # SAML
373
+ return response["saml_response"]
374
+ else:
375
+ raise exceptions.RefreshError("Executable returned unsupported token type.")
376
+
377
+ def _validate_revoke_response(self, response):
378
+ self._validate_response_schema(response)
379
+ if not response["success"]:
380
+ raise exceptions.RefreshError("Revoke failed with unsuccessful response.")
381
+
382
+ def _validate_response_schema(self, response):
383
+ if "version" not in response:
384
+ raise exceptions.MalformedError(
385
+ "The executable response is missing the version field."
386
+ )
387
+ if response["version"] > EXECUTABLE_SUPPORTED_MAX_VERSION:
388
+ raise exceptions.RefreshError(
389
+ "Executable returned unsupported version {}.".format(
390
+ response["version"]
391
+ )
392
+ )
393
+
394
+ if "success" not in response:
395
+ raise exceptions.MalformedError(
396
+ "The executable response is missing the success field."
397
+ )
398
+
399
+ def _validate_running_mode(self):
400
+ env_allow_executables = os.environ.get(
401
+ "GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES"
402
+ )
403
+ if env_allow_executables != "1":
404
+ raise exceptions.MalformedError(
405
+ "Executables need to be explicitly allowed (set GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES to '1') to run."
406
+ )
407
+
408
+ if self.interactive and not self._credential_source_executable_output_file:
409
+ raise exceptions.MalformedError(
410
+ "An output_file must be specified in the credential configuration for interactive mode."
411
+ )
412
+
413
+ if (
414
+ self.interactive
415
+ and not self._credential_source_executable_interactive_timeout_millis
416
+ ):
417
+ raise exceptions.InvalidOperation(
418
+ "Interactive mode cannot run without an interactive timeout."
419
+ )
420
+
421
+ if self.interactive and not self.is_workforce_pool:
422
+ raise exceptions.InvalidValue(
423
+ "Interactive mode is only enabled for workforce pool."
424
+ )
425
+
426
+ def _create_default_metrics_options(self):
427
+ metrics_options = super(Credentials, self)._create_default_metrics_options()
428
+ metrics_options["source"] = "executable"
429
+ return metrics_options
.venv/lib/python3.11/site-packages/google/auth/py.typed ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ # Marker file for PEP 561.
2
+ # The google-auth package uses inline types.
.venv/lib/python3.11/site-packages/google/auth/version.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2021 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ __version__ = "2.38.0"
.venv/lib/python3.11/site-packages/h11/__init__.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230),
2
+ # containing no networking code at all, loosely modelled on hyper-h2's generic
3
+ # implementation of HTTP/2 (and in particular the h2.connection.H2Connection
4
+ # class). There's still a bunch of subtle details you need to get right if you
5
+ # want to make this actually useful, because it doesn't implement all the
6
+ # semantics to check that what you're asking to write to the wire is sensible,
7
+ # but at least it gets you out of dealing with the wire itself.
8
+
9
+ from h11._connection import Connection, NEED_DATA, PAUSED
10
+ from h11._events import (
11
+ ConnectionClosed,
12
+ Data,
13
+ EndOfMessage,
14
+ Event,
15
+ InformationalResponse,
16
+ Request,
17
+ Response,
18
+ )
19
+ from h11._state import (
20
+ CLIENT,
21
+ CLOSED,
22
+ DONE,
23
+ ERROR,
24
+ IDLE,
25
+ MIGHT_SWITCH_PROTOCOL,
26
+ MUST_CLOSE,
27
+ SEND_BODY,
28
+ SEND_RESPONSE,
29
+ SERVER,
30
+ SWITCHED_PROTOCOL,
31
+ )
32
+ from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError
33
+ from h11._version import __version__
34
+
35
+ PRODUCT_ID = "python-h11/" + __version__
36
+
37
+
38
+ __all__ = (
39
+ "Connection",
40
+ "NEED_DATA",
41
+ "PAUSED",
42
+ "ConnectionClosed",
43
+ "Data",
44
+ "EndOfMessage",
45
+ "Event",
46
+ "InformationalResponse",
47
+ "Request",
48
+ "Response",
49
+ "CLIENT",
50
+ "CLOSED",
51
+ "DONE",
52
+ "ERROR",
53
+ "IDLE",
54
+ "MUST_CLOSE",
55
+ "SEND_BODY",
56
+ "SEND_RESPONSE",
57
+ "SERVER",
58
+ "SWITCHED_PROTOCOL",
59
+ "ProtocolError",
60
+ "LocalProtocolError",
61
+ "RemoteProtocolError",
62
+ )
.venv/lib/python3.11/site-packages/h11/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (1.31 kB). View file
 
.venv/lib/python3.11/site-packages/h11/__pycache__/_abnf.cpython-311.pyc ADDED
Binary file (1.81 kB). View file
 
.venv/lib/python3.11/site-packages/h11/__pycache__/_connection.cpython-311.pyc ADDED
Binary file (24.3 kB). View file
 
.venv/lib/python3.11/site-packages/h11/__pycache__/_events.cpython-311.pyc ADDED
Binary file (15.2 kB). View file
 
.venv/lib/python3.11/site-packages/h11/__pycache__/_headers.cpython-311.pyc ADDED
Binary file (9.11 kB). View file
 
.venv/lib/python3.11/site-packages/h11/__pycache__/_readers.cpython-311.pyc ADDED
Binary file (10.8 kB). View file
 
.venv/lib/python3.11/site-packages/h11/__pycache__/_receivebuffer.cpython-311.pyc ADDED
Binary file (5.14 kB). View file
 
.venv/lib/python3.11/site-packages/h11/__pycache__/_state.cpython-311.pyc ADDED
Binary file (9.78 kB). View file
 
.venv/lib/python3.11/site-packages/h11/__pycache__/_util.cpython-311.pyc ADDED
Binary file (5.36 kB). View file
 
.venv/lib/python3.11/site-packages/h11/__pycache__/_version.cpython-311.pyc ADDED
Binary file (199 Bytes). View file
 
.venv/lib/python3.11/site-packages/h11/__pycache__/_writers.cpython-311.pyc ADDED
Binary file (7.3 kB). View file
 
.venv/lib/python3.11/site-packages/h11/_abnf.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # We use native strings for all the re patterns, to take advantage of string
2
+ # formatting, and then convert to bytestrings when compiling the final re
3
+ # objects.
4
+
5
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace
6
+ # OWS = *( SP / HTAB )
7
+ # ; optional whitespace
8
+ OWS = r"[ \t]*"
9
+
10
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators
11
+ # token = 1*tchar
12
+ #
13
+ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
14
+ # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
15
+ # / DIGIT / ALPHA
16
+ # ; any VCHAR, except delimiters
17
+ token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+"
18
+
19
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields
20
+ # field-name = token
21
+ field_name = token
22
+
23
+ # The standard says:
24
+ #
25
+ # field-value = *( field-content / obs-fold )
26
+ # field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
27
+ # field-vchar = VCHAR / obs-text
28
+ # obs-fold = CRLF 1*( SP / HTAB )
29
+ # ; obsolete line folding
30
+ # ; see Section 3.2.4
31
+ #
32
+ # https://tools.ietf.org/html/rfc5234#appendix-B.1
33
+ #
34
+ # VCHAR = %x21-7E
35
+ # ; visible (printing) characters
36
+ #
37
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string
38
+ # obs-text = %x80-FF
39
+ #
40
+ # However, the standard definition of field-content is WRONG! It disallows
41
+ # fields containing a single visible character surrounded by whitespace,
42
+ # e.g. "foo a bar".
43
+ #
44
+ # See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189
45
+ #
46
+ # So our definition of field_content attempts to fix it up...
47
+ #
48
+ # Also, we allow lots of control characters, because apparently people assume
49
+ # that they're legal in practice (e.g., google analytics makes cookies with
50
+ # \x01 in them!):
51
+ # https://github.com/python-hyper/h11/issues/57
52
+ # We still don't allow NUL or whitespace, because those are often treated as
53
+ # meta-characters and letting them through can lead to nasty issues like SSRF.
54
+ vchar = r"[\x21-\x7e]"
55
+ vchar_or_obs_text = r"[^\x00\s]"
56
+ field_vchar = vchar_or_obs_text
57
+ field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals())
58
+
59
+ # We handle obs-fold at a different level, and our fixed-up field_content
60
+ # already grows to swallow the whole value, so ? instead of *
61
+ field_value = r"({field_content})?".format(**globals())
62
+
63
+ # header-field = field-name ":" OWS field-value OWS
64
+ header_field = (
65
+ r"(?P<field_name>{field_name})"
66
+ r":"
67
+ r"{OWS}"
68
+ r"(?P<field_value>{field_value})"
69
+ r"{OWS}".format(**globals())
70
+ )
71
+
72
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line
73
+ #
74
+ # request-line = method SP request-target SP HTTP-version CRLF
75
+ # method = token
76
+ # HTTP-version = HTTP-name "/" DIGIT "." DIGIT
77
+ # HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive
78
+ #
79
+ # request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full
80
+ # URL, host+port (for connect), or even "*", but in any case we are guaranteed
81
+ # that it contists of the visible printing characters.
82
+ method = token
83
+ request_target = r"{vchar}+".format(**globals())
84
+ http_version = r"HTTP/(?P<http_version>[0-9]\.[0-9])"
85
+ request_line = (
86
+ r"(?P<method>{method})"
87
+ r" "
88
+ r"(?P<target>{request_target})"
89
+ r" "
90
+ r"{http_version}".format(**globals())
91
+ )
92
+
93
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line
94
+ #
95
+ # status-line = HTTP-version SP status-code SP reason-phrase CRLF
96
+ # status-code = 3DIGIT
97
+ # reason-phrase = *( HTAB / SP / VCHAR / obs-text )
98
+ status_code = r"[0-9]{3}"
99
+ reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals())
100
+ status_line = (
101
+ r"{http_version}"
102
+ r" "
103
+ r"(?P<status_code>{status_code})"
104
+ # However, there are apparently a few too many servers out there that just
105
+ # leave out the reason phrase:
106
+ # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036
107
+ # https://github.com/seanmonstar/httparse/issues/29
108
+ # so make it optional. ?: is a non-capturing group.
109
+ r"(?: (?P<reason>{reason_phrase}))?".format(**globals())
110
+ )
111
+
112
+ HEXDIG = r"[0-9A-Fa-f]"
113
+ # Actually
114
+ #
115
+ # chunk-size = 1*HEXDIG
116
+ #
117
+ # but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20
118
+ chunk_size = r"({HEXDIG}){{1,20}}".format(**globals())
119
+ # Actually
120
+ #
121
+ # chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
122
+ #
123
+ # but we aren't parsing the things so we don't really care.
124
+ chunk_ext = r";.*"
125
+ chunk_header = (
126
+ r"(?P<chunk_size>{chunk_size})"
127
+ r"(?P<chunk_ext>{chunk_ext})?"
128
+ r"{OWS}\r\n".format(
129
+ **globals()
130
+ ) # Even though the specification does not allow for extra whitespaces,
131
+ # we are lenient with trailing whitespaces because some servers on the wild use it.
132
+ )
.venv/lib/python3.11/site-packages/h11/_connection.py ADDED
@@ -0,0 +1,633 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This contains the main Connection class. Everything in h11 revolves around
2
+ # this.
3
+ from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union
4
+
5
+ from ._events import (
6
+ ConnectionClosed,
7
+ Data,
8
+ EndOfMessage,
9
+ Event,
10
+ InformationalResponse,
11
+ Request,
12
+ Response,
13
+ )
14
+ from ._headers import get_comma_header, has_expect_100_continue, set_comma_header
15
+ from ._readers import READERS, ReadersType
16
+ from ._receivebuffer import ReceiveBuffer
17
+ from ._state import (
18
+ _SWITCH_CONNECT,
19
+ _SWITCH_UPGRADE,
20
+ CLIENT,
21
+ ConnectionState,
22
+ DONE,
23
+ ERROR,
24
+ MIGHT_SWITCH_PROTOCOL,
25
+ SEND_BODY,
26
+ SERVER,
27
+ SWITCHED_PROTOCOL,
28
+ )
29
+ from ._util import ( # Import the internal things we need
30
+ LocalProtocolError,
31
+ RemoteProtocolError,
32
+ Sentinel,
33
+ )
34
+ from ._writers import WRITERS, WritersType
35
+
36
+ # Everything in __all__ gets re-exported as part of the h11 public API.
37
+ __all__ = ["Connection", "NEED_DATA", "PAUSED"]
38
+
39
+
40
+ class NEED_DATA(Sentinel, metaclass=Sentinel):
41
+ pass
42
+
43
+
44
+ class PAUSED(Sentinel, metaclass=Sentinel):
45
+ pass
46
+
47
+
48
+ # If we ever have this much buffered without it making a complete parseable
49
+ # event, we error out. The only time we really buffer is when reading the
50
+ # request/response line + headers together, so this is effectively the limit on
51
+ # the size of that.
52
+ #
53
+ # Some precedents for defaults:
54
+ # - node.js: 80 * 1024
55
+ # - tomcat: 8 * 1024
56
+ # - IIS: 16 * 1024
57
+ # - Apache: <8 KiB per line>
58
+ DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024
59
+
60
+ # RFC 7230's rules for connection lifecycles:
61
+ # - If either side says they want to close the connection, then the connection
62
+ # must close.
63
+ # - HTTP/1.1 defaults to keep-alive unless someone says Connection: close
64
+ # - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive
65
+ # (and even this is a mess -- e.g. if you're implementing a proxy then
66
+ # sending Connection: keep-alive is forbidden).
67
+ #
68
+ # We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So
69
+ # our rule is:
70
+ # - If someone says Connection: close, we will close
71
+ # - If someone uses HTTP/1.0, we will close.
72
+ def _keep_alive(event: Union[Request, Response]) -> bool:
73
+ connection = get_comma_header(event.headers, b"connection")
74
+ if b"close" in connection:
75
+ return False
76
+ if getattr(event, "http_version", b"1.1") < b"1.1":
77
+ return False
78
+ return True
79
+
80
+
81
+ def _body_framing(
82
+ request_method: bytes, event: Union[Request, Response]
83
+ ) -> Tuple[str, Union[Tuple[()], Tuple[int]]]:
84
+ # Called when we enter SEND_BODY to figure out framing information for
85
+ # this body.
86
+ #
87
+ # These are the only two events that can trigger a SEND_BODY state:
88
+ assert type(event) in (Request, Response)
89
+ # Returns one of:
90
+ #
91
+ # ("content-length", count)
92
+ # ("chunked", ())
93
+ # ("http/1.0", ())
94
+ #
95
+ # which are (lookup key, *args) for constructing body reader/writer
96
+ # objects.
97
+ #
98
+ # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3
99
+ #
100
+ # Step 1: some responses always have an empty body, regardless of what the
101
+ # headers say.
102
+ if type(event) is Response:
103
+ if (
104
+ event.status_code in (204, 304)
105
+ or request_method == b"HEAD"
106
+ or (request_method == b"CONNECT" and 200 <= event.status_code < 300)
107
+ ):
108
+ return ("content-length", (0,))
109
+ # Section 3.3.3 also lists another case -- responses with status_code
110
+ # < 200. For us these are InformationalResponses, not Responses, so
111
+ # they can't get into this function in the first place.
112
+ assert event.status_code >= 200
113
+
114
+ # Step 2: check for Transfer-Encoding (T-E beats C-L):
115
+ transfer_encodings = get_comma_header(event.headers, b"transfer-encoding")
116
+ if transfer_encodings:
117
+ assert transfer_encodings == [b"chunked"]
118
+ return ("chunked", ())
119
+
120
+ # Step 3: check for Content-Length
121
+ content_lengths = get_comma_header(event.headers, b"content-length")
122
+ if content_lengths:
123
+ return ("content-length", (int(content_lengths[0]),))
124
+
125
+ # Step 4: no applicable headers; fallback/default depends on type
126
+ if type(event) is Request:
127
+ return ("content-length", (0,))
128
+ else:
129
+ return ("http/1.0", ())
130
+
131
+
132
+ ################################################################
133
+ #
134
+ # The main Connection class
135
+ #
136
+ ################################################################
137
+
138
+
139
+ class Connection:
140
+ """An object encapsulating the state of an HTTP connection.
141
+
142
+ Args:
143
+ our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If
144
+ you're implementing a server, pass :data:`h11.SERVER`.
145
+
146
+ max_incomplete_event_size (int):
147
+ The maximum number of bytes we're willing to buffer of an
148
+ incomplete event. In practice this mostly sets a limit on the
149
+ maximum size of the request/response line + headers. If this is
150
+ exceeded, then :meth:`next_event` will raise
151
+ :exc:`RemoteProtocolError`.
152
+
153
+ """
154
+
155
+ def __init__(
156
+ self,
157
+ our_role: Type[Sentinel],
158
+ max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE,
159
+ ) -> None:
160
+ self._max_incomplete_event_size = max_incomplete_event_size
161
+ # State and role tracking
162
+ if our_role not in (CLIENT, SERVER):
163
+ raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role))
164
+ self.our_role = our_role
165
+ self.their_role: Type[Sentinel]
166
+ if our_role is CLIENT:
167
+ self.their_role = SERVER
168
+ else:
169
+ self.their_role = CLIENT
170
+ self._cstate = ConnectionState()
171
+
172
+ # Callables for converting data->events or vice-versa given the
173
+ # current state
174
+ self._writer = self._get_io_object(self.our_role, None, WRITERS)
175
+ self._reader = self._get_io_object(self.their_role, None, READERS)
176
+
177
+ # Holds any unprocessed received data
178
+ self._receive_buffer = ReceiveBuffer()
179
+ # If this is true, then it indicates that the incoming connection was
180
+ # closed *after* the end of whatever's in self._receive_buffer:
181
+ self._receive_buffer_closed = False
182
+
183
+ # Extra bits of state that don't fit into the state machine.
184
+ #
185
+ # These two are only used to interpret framing headers for figuring
186
+ # out how to read/write response bodies. their_http_version is also
187
+ # made available as a convenient public API.
188
+ self.their_http_version: Optional[bytes] = None
189
+ self._request_method: Optional[bytes] = None
190
+ # This is pure flow-control and doesn't at all affect the set of legal
191
+ # transitions, so no need to bother ConnectionState with it:
192
+ self.client_is_waiting_for_100_continue = False
193
+
194
+ @property
195
+ def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]:
196
+ """A dictionary like::
197
+
198
+ {CLIENT: <client state>, SERVER: <server state>}
199
+
200
+ See :ref:`state-machine` for details.
201
+
202
+ """
203
+ return dict(self._cstate.states)
204
+
205
+ @property
206
+ def our_state(self) -> Type[Sentinel]:
207
+ """The current state of whichever role we are playing. See
208
+ :ref:`state-machine` for details.
209
+ """
210
+ return self._cstate.states[self.our_role]
211
+
212
+ @property
213
+ def their_state(self) -> Type[Sentinel]:
214
+ """The current state of whichever role we are NOT playing. See
215
+ :ref:`state-machine` for details.
216
+ """
217
+ return self._cstate.states[self.their_role]
218
+
219
+ @property
220
+ def they_are_waiting_for_100_continue(self) -> bool:
221
+ return self.their_role is CLIENT and self.client_is_waiting_for_100_continue
222
+
223
+ def start_next_cycle(self) -> None:
224
+ """Attempt to reset our connection state for a new request/response
225
+ cycle.
226
+
227
+ If both client and server are in :data:`DONE` state, then resets them
228
+ both to :data:`IDLE` state in preparation for a new request/response
229
+ cycle on this same connection. Otherwise, raises a
230
+ :exc:`LocalProtocolError`.
231
+
232
+ See :ref:`keepalive-and-pipelining`.
233
+
234
+ """
235
+ old_states = dict(self._cstate.states)
236
+ self._cstate.start_next_cycle()
237
+ self._request_method = None
238
+ # self.their_http_version gets left alone, since it presumably lasts
239
+ # beyond a single request/response cycle
240
+ assert not self.client_is_waiting_for_100_continue
241
+ self._respond_to_state_changes(old_states)
242
+
243
+ def _process_error(self, role: Type[Sentinel]) -> None:
244
+ old_states = dict(self._cstate.states)
245
+ self._cstate.process_error(role)
246
+ self._respond_to_state_changes(old_states)
247
+
248
+ def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]:
249
+ if type(event) is InformationalResponse and event.status_code == 101:
250
+ return _SWITCH_UPGRADE
251
+ if type(event) is Response:
252
+ if (
253
+ _SWITCH_CONNECT in self._cstate.pending_switch_proposals
254
+ and 200 <= event.status_code < 300
255
+ ):
256
+ return _SWITCH_CONNECT
257
+ return None
258
+
259
+ # All events go through here
260
+ def _process_event(self, role: Type[Sentinel], event: Event) -> None:
261
+ # First, pass the event through the state machine to make sure it
262
+ # succeeds.
263
+ old_states = dict(self._cstate.states)
264
+ if role is CLIENT and type(event) is Request:
265
+ if event.method == b"CONNECT":
266
+ self._cstate.process_client_switch_proposal(_SWITCH_CONNECT)
267
+ if get_comma_header(event.headers, b"upgrade"):
268
+ self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE)
269
+ server_switch_event = None
270
+ if role is SERVER:
271
+ server_switch_event = self._server_switch_event(event)
272
+ self._cstate.process_event(role, type(event), server_switch_event)
273
+
274
+ # Then perform the updates triggered by it.
275
+
276
+ if type(event) is Request:
277
+ self._request_method = event.method
278
+
279
+ if role is self.their_role and type(event) in (
280
+ Request,
281
+ Response,
282
+ InformationalResponse,
283
+ ):
284
+ event = cast(Union[Request, Response, InformationalResponse], event)
285
+ self.their_http_version = event.http_version
286
+
287
+ # Keep alive handling
288
+ #
289
+ # RFC 7230 doesn't really say what one should do if Connection: close
290
+ # shows up on a 1xx InformationalResponse. I think the idea is that
291
+ # this is not supposed to happen. In any case, if it does happen, we
292
+ # ignore it.
293
+ if type(event) in (Request, Response) and not _keep_alive(
294
+ cast(Union[Request, Response], event)
295
+ ):
296
+ self._cstate.process_keep_alive_disabled()
297
+
298
+ # 100-continue
299
+ if type(event) is Request and has_expect_100_continue(event):
300
+ self.client_is_waiting_for_100_continue = True
301
+ if type(event) in (InformationalResponse, Response):
302
+ self.client_is_waiting_for_100_continue = False
303
+ if role is CLIENT and type(event) in (Data, EndOfMessage):
304
+ self.client_is_waiting_for_100_continue = False
305
+
306
+ self._respond_to_state_changes(old_states, event)
307
+
308
+ def _get_io_object(
309
+ self,
310
+ role: Type[Sentinel],
311
+ event: Optional[Event],
312
+ io_dict: Union[ReadersType, WritersType],
313
+ ) -> Optional[Callable[..., Any]]:
314
+ # event may be None; it's only used when entering SEND_BODY
315
+ state = self._cstate.states[role]
316
+ if state is SEND_BODY:
317
+ # Special case: the io_dict has a dict of reader/writer factories
318
+ # that depend on the request/response framing.
319
+ framing_type, args = _body_framing(
320
+ cast(bytes, self._request_method), cast(Union[Request, Response], event)
321
+ )
322
+ return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index]
323
+ else:
324
+ # General case: the io_dict just has the appropriate reader/writer
325
+ # for this state
326
+ return io_dict.get((role, state)) # type: ignore[return-value]
327
+
328
+ # This must be called after any action that might have caused
329
+ # self._cstate.states to change.
330
+ def _respond_to_state_changes(
331
+ self,
332
+ old_states: Dict[Type[Sentinel], Type[Sentinel]],
333
+ event: Optional[Event] = None,
334
+ ) -> None:
335
+ # Update reader/writer
336
+ if self.our_state != old_states[self.our_role]:
337
+ self._writer = self._get_io_object(self.our_role, event, WRITERS)
338
+ if self.their_state != old_states[self.their_role]:
339
+ self._reader = self._get_io_object(self.their_role, event, READERS)
340
+
341
+ @property
342
+ def trailing_data(self) -> Tuple[bytes, bool]:
343
+ """Data that has been received, but not yet processed, represented as
344
+ a tuple with two elements, where the first is a byte-string containing
345
+ the unprocessed data itself, and the second is a bool that is True if
346
+ the receive connection was closed.
347
+
348
+ See :ref:`switching-protocols` for discussion of why you'd want this.
349
+ """
350
+ return (bytes(self._receive_buffer), self._receive_buffer_closed)
351
+
352
+ def receive_data(self, data: bytes) -> None:
353
+ """Add data to our internal receive buffer.
354
+
355
+ This does not actually do any processing on the data, just stores
356
+ it. To trigger processing, you have to call :meth:`next_event`.
357
+
358
+ Args:
359
+ data (:term:`bytes-like object`):
360
+ The new data that was just received.
361
+
362
+ Special case: If *data* is an empty byte-string like ``b""``,
363
+ then this indicates that the remote side has closed the
364
+ connection (end of file). Normally this is convenient, because
365
+ standard Python APIs like :meth:`file.read` or
366
+ :meth:`socket.recv` use ``b""`` to indicate end-of-file, while
367
+ other failures to read are indicated using other mechanisms
368
+ like raising :exc:`TimeoutError`. When using such an API you
369
+ can just blindly pass through whatever you get from ``read``
370
+ to :meth:`receive_data`, and everything will work.
371
+
372
+ But, if you have an API where reading an empty string is a
373
+ valid non-EOF condition, then you need to be aware of this and
374
+ make sure to check for such strings and avoid passing them to
375
+ :meth:`receive_data`.
376
+
377
+ Returns:
378
+ Nothing, but after calling this you should call :meth:`next_event`
379
+ to parse the newly received data.
380
+
381
+ Raises:
382
+ RuntimeError:
383
+ Raised if you pass an empty *data*, indicating EOF, and then
384
+ pass a non-empty *data*, indicating more data that somehow
385
+ arrived after the EOF.
386
+
387
+ (Calling ``receive_data(b"")`` multiple times is fine,
388
+ and equivalent to calling it once.)
389
+
390
+ """
391
+ if data:
392
+ if self._receive_buffer_closed:
393
+ raise RuntimeError("received close, then received more data?")
394
+ self._receive_buffer += data
395
+ else:
396
+ self._receive_buffer_closed = True
397
+
398
+ def _extract_next_receive_event(
399
+ self,
400
+ ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]:
401
+ state = self.their_state
402
+ # We don't pause immediately when they enter DONE, because even in
403
+ # DONE state we can still process a ConnectionClosed() event. But
404
+ # if we have data in our buffer, then we definitely aren't getting
405
+ # a ConnectionClosed() immediately and we need to pause.
406
+ if state is DONE and self._receive_buffer:
407
+ return PAUSED
408
+ if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL:
409
+ return PAUSED
410
+ assert self._reader is not None
411
+ event = self._reader(self._receive_buffer)
412
+ if event is None:
413
+ if not self._receive_buffer and self._receive_buffer_closed:
414
+ # In some unusual cases (basically just HTTP/1.0 bodies), EOF
415
+ # triggers an actual protocol event; in that case, we want to
416
+ # return that event, and then the state will change and we'll
417
+ # get called again to generate the actual ConnectionClosed().
418
+ if hasattr(self._reader, "read_eof"):
419
+ event = self._reader.read_eof() # type: ignore[attr-defined]
420
+ else:
421
+ event = ConnectionClosed()
422
+ if event is None:
423
+ event = NEED_DATA
424
+ return event # type: ignore[no-any-return]
425
+
426
+ def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]:
427
+ """Parse the next event out of our receive buffer, update our internal
428
+ state, and return it.
429
+
430
+ This is a mutating operation -- think of it like calling :func:`next`
431
+ on an iterator.
432
+
433
+ Returns:
434
+ : One of three things:
435
+
436
+ 1) An event object -- see :ref:`events`.
437
+
438
+ 2) The special constant :data:`NEED_DATA`, which indicates that
439
+ you need to read more data from your socket and pass it to
440
+ :meth:`receive_data` before this method will be able to return
441
+ any more events.
442
+
443
+ 3) The special constant :data:`PAUSED`, which indicates that we
444
+ are not in a state where we can process incoming data (usually
445
+ because the peer has finished their part of the current
446
+ request/response cycle, and you have not yet called
447
+ :meth:`start_next_cycle`). See :ref:`flow-control` for details.
448
+
449
+ Raises:
450
+ RemoteProtocolError:
451
+ The peer has misbehaved. You should close the connection
452
+ (possibly after sending some kind of 4xx response).
453
+
454
+ Once this method returns :class:`ConnectionClosed` once, then all
455
+ subsequent calls will also return :class:`ConnectionClosed`.
456
+
457
+ If this method raises any exception besides :exc:`RemoteProtocolError`
458
+ then that's a bug -- if it happens please file a bug report!
459
+
460
+ If this method raises any exception then it also sets
461
+ :attr:`Connection.their_state` to :data:`ERROR` -- see
462
+ :ref:`error-handling` for discussion.
463
+
464
+ """
465
+
466
+ if self.their_state is ERROR:
467
+ raise RemoteProtocolError("Can't receive data when peer state is ERROR")
468
+ try:
469
+ event = self._extract_next_receive_event()
470
+ if event not in [NEED_DATA, PAUSED]:
471
+ self._process_event(self.their_role, cast(Event, event))
472
+ if event is NEED_DATA:
473
+ if len(self._receive_buffer) > self._max_incomplete_event_size:
474
+ # 431 is "Request header fields too large" which is pretty
475
+ # much the only situation where we can get here
476
+ raise RemoteProtocolError(
477
+ "Receive buffer too long", error_status_hint=431
478
+ )
479
+ if self._receive_buffer_closed:
480
+ # We're still trying to complete some event, but that's
481
+ # never going to happen because no more data is coming
482
+ raise RemoteProtocolError("peer unexpectedly closed connection")
483
+ return event
484
+ except BaseException as exc:
485
+ self._process_error(self.their_role)
486
+ if isinstance(exc, LocalProtocolError):
487
+ exc._reraise_as_remote_protocol_error()
488
+ else:
489
+ raise
490
+
491
+ def send(self, event: Event) -> Optional[bytes]:
492
+ """Convert a high-level event into bytes that can be sent to the peer,
493
+ while updating our internal state machine.
494
+
495
+ Args:
496
+ event: The :ref:`event <events>` to send.
497
+
498
+ Returns:
499
+ If ``type(event) is ConnectionClosed``, then returns
500
+ ``None``. Otherwise, returns a :term:`bytes-like object`.
501
+
502
+ Raises:
503
+ LocalProtocolError:
504
+ Sending this event at this time would violate our
505
+ understanding of the HTTP/1.1 protocol.
506
+
507
+ If this method raises any exception then it also sets
508
+ :attr:`Connection.our_state` to :data:`ERROR` -- see
509
+ :ref:`error-handling` for discussion.
510
+
511
+ """
512
+ data_list = self.send_with_data_passthrough(event)
513
+ if data_list is None:
514
+ return None
515
+ else:
516
+ return b"".join(data_list)
517
+
518
+ def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]:
519
+ """Identical to :meth:`send`, except that in situations where
520
+ :meth:`send` returns a single :term:`bytes-like object`, this instead
521
+ returns a list of them -- and when sending a :class:`Data` event, this
522
+ list is guaranteed to contain the exact object you passed in as
523
+ :attr:`Data.data`. See :ref:`sendfile` for discussion.
524
+
525
+ """
526
+ if self.our_state is ERROR:
527
+ raise LocalProtocolError("Can't send data when our state is ERROR")
528
+ try:
529
+ if type(event) is Response:
530
+ event = self._clean_up_response_headers_for_sending(event)
531
+ # We want to call _process_event before calling the writer,
532
+ # because if someone tries to do something invalid then this will
533
+ # give a sensible error message, while our writers all just assume
534
+ # they will only receive valid events. But, _process_event might
535
+ # change self._writer. So we have to do a little dance:
536
+ writer = self._writer
537
+ self._process_event(self.our_role, event)
538
+ if type(event) is ConnectionClosed:
539
+ return None
540
+ else:
541
+ # In any situation where writer is None, process_event should
542
+ # have raised ProtocolError
543
+ assert writer is not None
544
+ data_list: List[bytes] = []
545
+ writer(event, data_list.append)
546
+ return data_list
547
+ except:
548
+ self._process_error(self.our_role)
549
+ raise
550
+
551
+ def send_failed(self) -> None:
552
+ """Notify the state machine that we failed to send the data it gave
553
+ us.
554
+
555
+ This causes :attr:`Connection.our_state` to immediately become
556
+ :data:`ERROR` -- see :ref:`error-handling` for discussion.
557
+
558
+ """
559
+ self._process_error(self.our_role)
560
+
561
+ # When sending a Response, we take responsibility for a few things:
562
+ #
563
+ # - Sometimes you MUST set Connection: close. We take care of those
564
+ # times. (You can also set it yourself if you want, and if you do then
565
+ # we'll respect that and close the connection at the right time. But you
566
+ # don't have to worry about that unless you want to.)
567
+ #
568
+ # - The user has to set Content-Length if they want it. Otherwise, for
569
+ # responses that have bodies (e.g. not HEAD), then we will automatically
570
+ # select the right mechanism for streaming a body of unknown length,
571
+ # which depends on depending on the peer's HTTP version.
572
+ #
573
+ # This function's *only* responsibility is making sure headers are set up
574
+ # right -- everything downstream just looks at the headers. There are no
575
+ # side channels.
576
+ def _clean_up_response_headers_for_sending(self, response: Response) -> Response:
577
+ assert type(response) is Response
578
+
579
+ headers = response.headers
580
+ need_close = False
581
+
582
+ # HEAD requests need some special handling: they always act like they
583
+ # have Content-Length: 0, and that's how _body_framing treats
584
+ # them. But their headers are supposed to match what we would send if
585
+ # the request was a GET. (Technically there is one deviation allowed:
586
+ # we're allowed to leave out the framing headers -- see
587
+ # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as
588
+ # easy to get them right.)
589
+ method_for_choosing_headers = cast(bytes, self._request_method)
590
+ if method_for_choosing_headers == b"HEAD":
591
+ method_for_choosing_headers = b"GET"
592
+ framing_type, _ = _body_framing(method_for_choosing_headers, response)
593
+ if framing_type in ("chunked", "http/1.0"):
594
+ # This response has a body of unknown length.
595
+ # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked
596
+ # If our peer is HTTP/1.0, we use no framing headers, and close the
597
+ # connection afterwards.
598
+ #
599
+ # Make sure to clear Content-Length (in principle user could have
600
+ # set both and then we ignored Content-Length b/c
601
+ # Transfer-Encoding overwrote it -- this would be naughty of them,
602
+ # but the HTTP spec says that if our peer does this then we have
603
+ # to fix it instead of erroring out, so we'll accord the user the
604
+ # same respect).
605
+ headers = set_comma_header(headers, b"content-length", [])
606
+ if self.their_http_version is None or self.their_http_version < b"1.1":
607
+ # Either we never got a valid request and are sending back an
608
+ # error (their_http_version is None), so we assume the worst;
609
+ # or else we did get a valid HTTP/1.0 request, so we know that
610
+ # they don't understand chunked encoding.
611
+ headers = set_comma_header(headers, b"transfer-encoding", [])
612
+ # This is actually redundant ATM, since currently we
613
+ # unconditionally disable keep-alive when talking to HTTP/1.0
614
+ # peers. But let's be defensive just in case we add
615
+ # Connection: keep-alive support later:
616
+ if self._request_method != b"HEAD":
617
+ need_close = True
618
+ else:
619
+ headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"])
620
+
621
+ if not self._cstate.keep_alive or need_close:
622
+ # Make sure Connection: close is set
623
+ connection = set(get_comma_header(headers, b"connection"))
624
+ connection.discard(b"keep-alive")
625
+ connection.add(b"close")
626
+ headers = set_comma_header(headers, b"connection", sorted(connection))
627
+
628
+ return Response(
629
+ headers=headers,
630
+ status_code=response.status_code,
631
+ http_version=response.http_version,
632
+ reason=response.reason,
633
+ )
.venv/lib/python3.11/site-packages/h11/_events.py ADDED
@@ -0,0 +1,369 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # High level events that make up HTTP/1.1 conversations. Loosely inspired by
2
+ # the corresponding events in hyper-h2:
3
+ #
4
+ # http://python-hyper.org/h2/en/stable/api.html#events
5
+ #
6
+ # Don't subclass these. Stuff will break.
7
+
8
+ import re
9
+ from abc import ABC
10
+ from dataclasses import dataclass, field
11
+ from typing import Any, cast, Dict, List, Tuple, Union
12
+
13
+ from ._abnf import method, request_target
14
+ from ._headers import Headers, normalize_and_validate
15
+ from ._util import bytesify, LocalProtocolError, validate
16
+
17
+ # Everything in __all__ gets re-exported as part of the h11 public API.
18
+ __all__ = [
19
+ "Event",
20
+ "Request",
21
+ "InformationalResponse",
22
+ "Response",
23
+ "Data",
24
+ "EndOfMessage",
25
+ "ConnectionClosed",
26
+ ]
27
+
28
+ method_re = re.compile(method.encode("ascii"))
29
+ request_target_re = re.compile(request_target.encode("ascii"))
30
+
31
+
32
+ class Event(ABC):
33
+ """
34
+ Base class for h11 events.
35
+ """
36
+
37
+ __slots__ = ()
38
+
39
+
40
+ @dataclass(init=False, frozen=True)
41
+ class Request(Event):
42
+ """The beginning of an HTTP request.
43
+
44
+ Fields:
45
+
46
+ .. attribute:: method
47
+
48
+ An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte
49
+ string. :term:`Bytes-like objects <bytes-like object>` and native
50
+ strings containing only ascii characters will be automatically
51
+ converted to byte strings.
52
+
53
+ .. attribute:: target
54
+
55
+ The target of an HTTP request, e.g. ``b"/index.html"``, or one of the
56
+ more exotic formats described in `RFC 7320, section 5.3
57
+ <https://tools.ietf.org/html/rfc7230#section-5.3>`_. Always a byte
58
+ string. :term:`Bytes-like objects <bytes-like object>` and native
59
+ strings containing only ascii characters will be automatically
60
+ converted to byte strings.
61
+
62
+ .. attribute:: headers
63
+
64
+ Request headers, represented as a list of (name, value) pairs. See
65
+ :ref:`the header normalization rules <headers-format>` for details.
66
+
67
+ .. attribute:: http_version
68
+
69
+ The HTTP protocol version, represented as a byte string like
70
+ ``b"1.1"``. See :ref:`the HTTP version normalization rules
71
+ <http_version-format>` for details.
72
+
73
+ """
74
+
75
+ __slots__ = ("method", "headers", "target", "http_version")
76
+
77
+ method: bytes
78
+ headers: Headers
79
+ target: bytes
80
+ http_version: bytes
81
+
82
+ def __init__(
83
+ self,
84
+ *,
85
+ method: Union[bytes, str],
86
+ headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]],
87
+ target: Union[bytes, str],
88
+ http_version: Union[bytes, str] = b"1.1",
89
+ _parsed: bool = False,
90
+ ) -> None:
91
+ super().__init__()
92
+ if isinstance(headers, Headers):
93
+ object.__setattr__(self, "headers", headers)
94
+ else:
95
+ object.__setattr__(
96
+ self, "headers", normalize_and_validate(headers, _parsed=_parsed)
97
+ )
98
+ if not _parsed:
99
+ object.__setattr__(self, "method", bytesify(method))
100
+ object.__setattr__(self, "target", bytesify(target))
101
+ object.__setattr__(self, "http_version", bytesify(http_version))
102
+ else:
103
+ object.__setattr__(self, "method", method)
104
+ object.__setattr__(self, "target", target)
105
+ object.__setattr__(self, "http_version", http_version)
106
+
107
+ # "A server MUST respond with a 400 (Bad Request) status code to any
108
+ # HTTP/1.1 request message that lacks a Host header field and to any
109
+ # request message that contains more than one Host header field or a
110
+ # Host header field with an invalid field-value."
111
+ # -- https://tools.ietf.org/html/rfc7230#section-5.4
112
+ host_count = 0
113
+ for name, value in self.headers:
114
+ if name == b"host":
115
+ host_count += 1
116
+ if self.http_version == b"1.1" and host_count == 0:
117
+ raise LocalProtocolError("Missing mandatory Host: header")
118
+ if host_count > 1:
119
+ raise LocalProtocolError("Found multiple Host: headers")
120
+
121
+ validate(method_re, self.method, "Illegal method characters")
122
+ validate(request_target_re, self.target, "Illegal target characters")
123
+
124
+ # This is an unhashable type.
125
+ __hash__ = None # type: ignore
126
+
127
+
128
+ @dataclass(init=False, frozen=True)
129
+ class _ResponseBase(Event):
130
+ __slots__ = ("headers", "http_version", "reason", "status_code")
131
+
132
+ headers: Headers
133
+ http_version: bytes
134
+ reason: bytes
135
+ status_code: int
136
+
137
+ def __init__(
138
+ self,
139
+ *,
140
+ headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]],
141
+ status_code: int,
142
+ http_version: Union[bytes, str] = b"1.1",
143
+ reason: Union[bytes, str] = b"",
144
+ _parsed: bool = False,
145
+ ) -> None:
146
+ super().__init__()
147
+ if isinstance(headers, Headers):
148
+ object.__setattr__(self, "headers", headers)
149
+ else:
150
+ object.__setattr__(
151
+ self, "headers", normalize_and_validate(headers, _parsed=_parsed)
152
+ )
153
+ if not _parsed:
154
+ object.__setattr__(self, "reason", bytesify(reason))
155
+ object.__setattr__(self, "http_version", bytesify(http_version))
156
+ if not isinstance(status_code, int):
157
+ raise LocalProtocolError("status code must be integer")
158
+ # Because IntEnum objects are instances of int, but aren't
159
+ # duck-compatible (sigh), see gh-72.
160
+ object.__setattr__(self, "status_code", int(status_code))
161
+ else:
162
+ object.__setattr__(self, "reason", reason)
163
+ object.__setattr__(self, "http_version", http_version)
164
+ object.__setattr__(self, "status_code", status_code)
165
+
166
+ self.__post_init__()
167
+
168
+ def __post_init__(self) -> None:
169
+ pass
170
+
171
+ # This is an unhashable type.
172
+ __hash__ = None # type: ignore
173
+
174
+
175
+ @dataclass(init=False, frozen=True)
176
+ class InformationalResponse(_ResponseBase):
177
+ """An HTTP informational response.
178
+
179
+ Fields:
180
+
181
+ .. attribute:: status_code
182
+
183
+ The status code of this response, as an integer. For an
184
+ :class:`InformationalResponse`, this is always in the range [100,
185
+ 200).
186
+
187
+ .. attribute:: headers
188
+
189
+ Request headers, represented as a list of (name, value) pairs. See
190
+ :ref:`the header normalization rules <headers-format>` for
191
+ details.
192
+
193
+ .. attribute:: http_version
194
+
195
+ The HTTP protocol version, represented as a byte string like
196
+ ``b"1.1"``. See :ref:`the HTTP version normalization rules
197
+ <http_version-format>` for details.
198
+
199
+ .. attribute:: reason
200
+
201
+ The reason phrase of this response, as a byte string. For example:
202
+ ``b"OK"``, or ``b"Not Found"``.
203
+
204
+ """
205
+
206
+ def __post_init__(self) -> None:
207
+ if not (100 <= self.status_code < 200):
208
+ raise LocalProtocolError(
209
+ "InformationalResponse status_code should be in range "
210
+ "[100, 200), not {}".format(self.status_code)
211
+ )
212
+
213
+ # This is an unhashable type.
214
+ __hash__ = None # type: ignore
215
+
216
+
217
+ @dataclass(init=False, frozen=True)
218
+ class Response(_ResponseBase):
219
+ """The beginning of an HTTP response.
220
+
221
+ Fields:
222
+
223
+ .. attribute:: status_code
224
+
225
+ The status code of this response, as an integer. For an
226
+ :class:`Response`, this is always in the range [200,
227
+ 1000).
228
+
229
+ .. attribute:: headers
230
+
231
+ Request headers, represented as a list of (name, value) pairs. See
232
+ :ref:`the header normalization rules <headers-format>` for details.
233
+
234
+ .. attribute:: http_version
235
+
236
+ The HTTP protocol version, represented as a byte string like
237
+ ``b"1.1"``. See :ref:`the HTTP version normalization rules
238
+ <http_version-format>` for details.
239
+
240
+ .. attribute:: reason
241
+
242
+ The reason phrase of this response, as a byte string. For example:
243
+ ``b"OK"``, or ``b"Not Found"``.
244
+
245
+ """
246
+
247
+ def __post_init__(self) -> None:
248
+ if not (200 <= self.status_code < 1000):
249
+ raise LocalProtocolError(
250
+ "Response status_code should be in range [200, 1000), not {}".format(
251
+ self.status_code
252
+ )
253
+ )
254
+
255
+ # This is an unhashable type.
256
+ __hash__ = None # type: ignore
257
+
258
+
259
+ @dataclass(init=False, frozen=True)
260
+ class Data(Event):
261
+ """Part of an HTTP message body.
262
+
263
+ Fields:
264
+
265
+ .. attribute:: data
266
+
267
+ A :term:`bytes-like object` containing part of a message body. Or, if
268
+ using the ``combine=False`` argument to :meth:`Connection.send`, then
269
+ any object that your socket writing code knows what to do with, and for
270
+ which calling :func:`len` returns the number of bytes that will be
271
+ written -- see :ref:`sendfile` for details.
272
+
273
+ .. attribute:: chunk_start
274
+
275
+ A marker that indicates whether this data object is from the start of a
276
+ chunked transfer encoding chunk. This field is ignored when when a Data
277
+ event is provided to :meth:`Connection.send`: it is only valid on
278
+ events emitted from :meth:`Connection.next_event`. You probably
279
+ shouldn't use this attribute at all; see
280
+ :ref:`chunk-delimiters-are-bad` for details.
281
+
282
+ .. attribute:: chunk_end
283
+
284
+ A marker that indicates whether this data object is the last for a
285
+ given chunked transfer encoding chunk. This field is ignored when when
286
+ a Data event is provided to :meth:`Connection.send`: it is only valid
287
+ on events emitted from :meth:`Connection.next_event`. You probably
288
+ shouldn't use this attribute at all; see
289
+ :ref:`chunk-delimiters-are-bad` for details.
290
+
291
+ """
292
+
293
+ __slots__ = ("data", "chunk_start", "chunk_end")
294
+
295
+ data: bytes
296
+ chunk_start: bool
297
+ chunk_end: bool
298
+
299
+ def __init__(
300
+ self, data: bytes, chunk_start: bool = False, chunk_end: bool = False
301
+ ) -> None:
302
+ object.__setattr__(self, "data", data)
303
+ object.__setattr__(self, "chunk_start", chunk_start)
304
+ object.__setattr__(self, "chunk_end", chunk_end)
305
+
306
+ # This is an unhashable type.
307
+ __hash__ = None # type: ignore
308
+
309
+
310
+ # XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that
311
+ # are forbidden to be sent in a trailer, since processing them as if they were
312
+ # present in the header section might bypass external security filters."
313
+ # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part
314
+ # Unfortunately, the list of forbidden fields is long and vague :-/
315
+ @dataclass(init=False, frozen=True)
316
+ class EndOfMessage(Event):
317
+ """The end of an HTTP message.
318
+
319
+ Fields:
320
+
321
+ .. attribute:: headers
322
+
323
+ Default value: ``[]``
324
+
325
+ Any trailing headers attached to this message, represented as a list of
326
+ (name, value) pairs. See :ref:`the header normalization rules
327
+ <headers-format>` for details.
328
+
329
+ Must be empty unless ``Transfer-Encoding: chunked`` is in use.
330
+
331
+ """
332
+
333
+ __slots__ = ("headers",)
334
+
335
+ headers: Headers
336
+
337
+ def __init__(
338
+ self,
339
+ *,
340
+ headers: Union[
341
+ Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None
342
+ ] = None,
343
+ _parsed: bool = False,
344
+ ) -> None:
345
+ super().__init__()
346
+ if headers is None:
347
+ headers = Headers([])
348
+ elif not isinstance(headers, Headers):
349
+ headers = normalize_and_validate(headers, _parsed=_parsed)
350
+
351
+ object.__setattr__(self, "headers", headers)
352
+
353
+ # This is an unhashable type.
354
+ __hash__ = None # type: ignore
355
+
356
+
357
+ @dataclass(frozen=True)
358
+ class ConnectionClosed(Event):
359
+ """This event indicates that the sender has closed their outgoing
360
+ connection.
361
+
362
+ Note that this does not necessarily mean that they can't *receive* further
363
+ data, because TCP connections are composed to two one-way channels which
364
+ can be closed independently. See :ref:`closing` for details.
365
+
366
+ No fields.
367
+ """
368
+
369
+ pass
.venv/lib/python3.11/site-packages/h11/_headers.py ADDED
@@ -0,0 +1,278 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union
3
+
4
+ from ._abnf import field_name, field_value
5
+ from ._util import bytesify, LocalProtocolError, validate
6
+
7
+ if TYPE_CHECKING:
8
+ from ._events import Request
9
+
10
+ try:
11
+ from typing import Literal
12
+ except ImportError:
13
+ from typing_extensions import Literal # type: ignore
14
+
15
+
16
+ # Facts
17
+ # -----
18
+ #
19
+ # Headers are:
20
+ # keys: case-insensitive ascii
21
+ # values: mixture of ascii and raw bytes
22
+ #
23
+ # "Historically, HTTP has allowed field content with text in the ISO-8859-1
24
+ # charset [ISO-8859-1], supporting other charsets only through use of
25
+ # [RFC2047] encoding. In practice, most HTTP header field values use only a
26
+ # subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD
27
+ # limit their field values to US-ASCII octets. A recipient SHOULD treat other
28
+ # octets in field content (obs-text) as opaque data."
29
+ # And it deprecates all non-ascii values
30
+ #
31
+ # Leading/trailing whitespace in header names is forbidden
32
+ #
33
+ # Values get leading/trailing whitespace stripped
34
+ #
35
+ # Content-Disposition actually needs to contain unicode semantically; to
36
+ # accomplish this it has a terrifically weird way of encoding the filename
37
+ # itself as ascii (and even this still has lots of cross-browser
38
+ # incompatibilities)
39
+ #
40
+ # Order is important:
41
+ # "a proxy MUST NOT change the order of these field values when forwarding a
42
+ # message"
43
+ # (and there are several headers where the order indicates a preference)
44
+ #
45
+ # Multiple occurences of the same header:
46
+ # "A sender MUST NOT generate multiple header fields with the same field name
47
+ # in a message unless either the entire field value for that header field is
48
+ # defined as a comma-separated list [or the header is Set-Cookie which gets a
49
+ # special exception]" - RFC 7230. (cookies are in RFC 6265)
50
+ #
51
+ # So every header aside from Set-Cookie can be merged by b", ".join if it
52
+ # occurs repeatedly. But, of course, they can't necessarily be split by
53
+ # .split(b","), because quoting.
54
+ #
55
+ # Given all this mess (case insensitive, duplicates allowed, order is
56
+ # important, ...), there doesn't appear to be any standard way to handle
57
+ # headers in Python -- they're almost like dicts, but... actually just
58
+ # aren't. For now we punt and just use a super simple representation: headers
59
+ # are a list of pairs
60
+ #
61
+ # [(name1, value1), (name2, value2), ...]
62
+ #
63
+ # where all entries are bytestrings, names are lowercase and have no
64
+ # leading/trailing whitespace, and values are bytestrings with no
65
+ # leading/trailing whitespace. Searching and updating are done via naive O(n)
66
+ # methods.
67
+ #
68
+ # Maybe a dict-of-lists would be better?
69
+
70
+ _content_length_re = re.compile(rb"[0-9]+")
71
+ _field_name_re = re.compile(field_name.encode("ascii"))
72
+ _field_value_re = re.compile(field_value.encode("ascii"))
73
+
74
+
75
+ class Headers(Sequence[Tuple[bytes, bytes]]):
76
+ """
77
+ A list-like interface that allows iterating over headers as byte-pairs
78
+ of (lowercased-name, value).
79
+
80
+ Internally we actually store the representation as three-tuples,
81
+ including both the raw original casing, in order to preserve casing
82
+ over-the-wire, and the lowercased name, for case-insensitive comparisions.
83
+
84
+ r = Request(
85
+ method="GET",
86
+ target="/",
87
+ headers=[("Host", "example.org"), ("Connection", "keep-alive")],
88
+ http_version="1.1",
89
+ )
90
+ assert r.headers == [
91
+ (b"host", b"example.org"),
92
+ (b"connection", b"keep-alive")
93
+ ]
94
+ assert r.headers.raw_items() == [
95
+ (b"Host", b"example.org"),
96
+ (b"Connection", b"keep-alive")
97
+ ]
98
+ """
99
+
100
+ __slots__ = "_full_items"
101
+
102
+ def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None:
103
+ self._full_items = full_items
104
+
105
+ def __bool__(self) -> bool:
106
+ return bool(self._full_items)
107
+
108
+ def __eq__(self, other: object) -> bool:
109
+ return list(self) == list(other) # type: ignore
110
+
111
+ def __len__(self) -> int:
112
+ return len(self._full_items)
113
+
114
+ def __repr__(self) -> str:
115
+ return "<Headers(%s)>" % repr(list(self))
116
+
117
+ def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override]
118
+ _, name, value = self._full_items[idx]
119
+ return (name, value)
120
+
121
+ def raw_items(self) -> List[Tuple[bytes, bytes]]:
122
+ return [(raw_name, value) for raw_name, _, value in self._full_items]
123
+
124
+
125
+ HeaderTypes = Union[
126
+ List[Tuple[bytes, bytes]],
127
+ List[Tuple[bytes, str]],
128
+ List[Tuple[str, bytes]],
129
+ List[Tuple[str, str]],
130
+ ]
131
+
132
+
133
+ @overload
134
+ def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers:
135
+ ...
136
+
137
+
138
+ @overload
139
+ def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers:
140
+ ...
141
+
142
+
143
+ @overload
144
+ def normalize_and_validate(
145
+ headers: Union[Headers, HeaderTypes], _parsed: bool = False
146
+ ) -> Headers:
147
+ ...
148
+
149
+
150
+ def normalize_and_validate(
151
+ headers: Union[Headers, HeaderTypes], _parsed: bool = False
152
+ ) -> Headers:
153
+ new_headers = []
154
+ seen_content_length = None
155
+ saw_transfer_encoding = False
156
+ for name, value in headers:
157
+ # For headers coming out of the parser, we can safely skip some steps,
158
+ # because it always returns bytes and has already run these regexes
159
+ # over the data:
160
+ if not _parsed:
161
+ name = bytesify(name)
162
+ value = bytesify(value)
163
+ validate(_field_name_re, name, "Illegal header name {!r}", name)
164
+ validate(_field_value_re, value, "Illegal header value {!r}", value)
165
+ assert isinstance(name, bytes)
166
+ assert isinstance(value, bytes)
167
+
168
+ raw_name = name
169
+ name = name.lower()
170
+ if name == b"content-length":
171
+ lengths = {length.strip() for length in value.split(b",")}
172
+ if len(lengths) != 1:
173
+ raise LocalProtocolError("conflicting Content-Length headers")
174
+ value = lengths.pop()
175
+ validate(_content_length_re, value, "bad Content-Length")
176
+ if seen_content_length is None:
177
+ seen_content_length = value
178
+ new_headers.append((raw_name, name, value))
179
+ elif seen_content_length != value:
180
+ raise LocalProtocolError("conflicting Content-Length headers")
181
+ elif name == b"transfer-encoding":
182
+ # "A server that receives a request message with a transfer coding
183
+ # it does not understand SHOULD respond with 501 (Not
184
+ # Implemented)."
185
+ # https://tools.ietf.org/html/rfc7230#section-3.3.1
186
+ if saw_transfer_encoding:
187
+ raise LocalProtocolError(
188
+ "multiple Transfer-Encoding headers", error_status_hint=501
189
+ )
190
+ # "All transfer-coding names are case-insensitive"
191
+ # -- https://tools.ietf.org/html/rfc7230#section-4
192
+ value = value.lower()
193
+ if value != b"chunked":
194
+ raise LocalProtocolError(
195
+ "Only Transfer-Encoding: chunked is supported",
196
+ error_status_hint=501,
197
+ )
198
+ saw_transfer_encoding = True
199
+ new_headers.append((raw_name, name, value))
200
+ else:
201
+ new_headers.append((raw_name, name, value))
202
+ return Headers(new_headers)
203
+
204
+
205
+ def get_comma_header(headers: Headers, name: bytes) -> List[bytes]:
206
+ # Should only be used for headers whose value is a list of
207
+ # comma-separated, case-insensitive values.
208
+ #
209
+ # The header name `name` is expected to be lower-case bytes.
210
+ #
211
+ # Connection: meets these criteria (including cast insensitivity).
212
+ #
213
+ # Content-Length: technically is just a single value (1*DIGIT), but the
214
+ # standard makes reference to implementations that do multiple values, and
215
+ # using this doesn't hurt. Ditto, case insensitivity doesn't things either
216
+ # way.
217
+ #
218
+ # Transfer-Encoding: is more complex (allows for quoted strings), so
219
+ # splitting on , is actually wrong. For example, this is legal:
220
+ #
221
+ # Transfer-Encoding: foo; options="1,2", chunked
222
+ #
223
+ # and should be parsed as
224
+ #
225
+ # foo; options="1,2"
226
+ # chunked
227
+ #
228
+ # but this naive function will parse it as
229
+ #
230
+ # foo; options="1
231
+ # 2"
232
+ # chunked
233
+ #
234
+ # However, this is okay because the only thing we are going to do with
235
+ # any Transfer-Encoding is reject ones that aren't just "chunked", so
236
+ # both of these will be treated the same anyway.
237
+ #
238
+ # Expect: the only legal value is the literal string
239
+ # "100-continue". Splitting on commas is harmless. Case insensitive.
240
+ #
241
+ out: List[bytes] = []
242
+ for _, found_name, found_raw_value in headers._full_items:
243
+ if found_name == name:
244
+ found_raw_value = found_raw_value.lower()
245
+ for found_split_value in found_raw_value.split(b","):
246
+ found_split_value = found_split_value.strip()
247
+ if found_split_value:
248
+ out.append(found_split_value)
249
+ return out
250
+
251
+
252
+ def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers:
253
+ # The header name `name` is expected to be lower-case bytes.
254
+ #
255
+ # Note that when we store the header we use title casing for the header
256
+ # names, in order to match the conventional HTTP header style.
257
+ #
258
+ # Simply calling `.title()` is a blunt approach, but it's correct
259
+ # here given the cases where we're using `set_comma_header`...
260
+ #
261
+ # Connection, Content-Length, Transfer-Encoding.
262
+ new_headers: List[Tuple[bytes, bytes]] = []
263
+ for found_raw_name, found_name, found_raw_value in headers._full_items:
264
+ if found_name != name:
265
+ new_headers.append((found_raw_name, found_raw_value))
266
+ for new_value in new_values:
267
+ new_headers.append((name.title(), new_value))
268
+ return normalize_and_validate(new_headers)
269
+
270
+
271
+ def has_expect_100_continue(request: "Request") -> bool:
272
+ # https://tools.ietf.org/html/rfc7231#section-5.1.1
273
+ # "A server that receives a 100-continue expectation in an HTTP/1.0 request
274
+ # MUST ignore that expectation."
275
+ if request.http_version < b"1.1":
276
+ return False
277
+ expect = get_comma_header(request.headers, b"expect")
278
+ return b"100-continue" in expect
.venv/lib/python3.11/site-packages/h11/_readers.py ADDED
@@ -0,0 +1,247 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Code to read HTTP data
2
+ #
3
+ # Strategy: each reader is a callable which takes a ReceiveBuffer object, and
4
+ # either:
5
+ # 1) consumes some of it and returns an Event
6
+ # 2) raises a LocalProtocolError (for consistency -- e.g. we call validate()
7
+ # and it might raise a LocalProtocolError, so simpler just to always use
8
+ # this)
9
+ # 3) returns None, meaning "I need more data"
10
+ #
11
+ # If they have a .read_eof attribute, then this will be called if an EOF is
12
+ # received -- but this is optional. Either way, the actual ConnectionClosed
13
+ # event will be generated afterwards.
14
+ #
15
+ # READERS is a dict describing how to pick a reader. It maps states to either:
16
+ # - a reader
17
+ # - or, for body readers, a dict of per-framing reader factories
18
+
19
+ import re
20
+ from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union
21
+
22
+ from ._abnf import chunk_header, header_field, request_line, status_line
23
+ from ._events import Data, EndOfMessage, InformationalResponse, Request, Response
24
+ from ._receivebuffer import ReceiveBuffer
25
+ from ._state import (
26
+ CLIENT,
27
+ CLOSED,
28
+ DONE,
29
+ IDLE,
30
+ MUST_CLOSE,
31
+ SEND_BODY,
32
+ SEND_RESPONSE,
33
+ SERVER,
34
+ )
35
+ from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate
36
+
37
+ __all__ = ["READERS"]
38
+
39
+ header_field_re = re.compile(header_field.encode("ascii"))
40
+ obs_fold_re = re.compile(rb"[ \t]+")
41
+
42
+
43
+ def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]:
44
+ it = iter(lines)
45
+ last: Optional[bytes] = None
46
+ for line in it:
47
+ match = obs_fold_re.match(line)
48
+ if match:
49
+ if last is None:
50
+ raise LocalProtocolError("continuation line at start of headers")
51
+ if not isinstance(last, bytearray):
52
+ # Cast to a mutable type, avoiding copy on append to ensure O(n) time
53
+ last = bytearray(last)
54
+ last += b" "
55
+ last += line[match.end() :]
56
+ else:
57
+ if last is not None:
58
+ yield last
59
+ last = line
60
+ if last is not None:
61
+ yield last
62
+
63
+
64
+ def _decode_header_lines(
65
+ lines: Iterable[bytes],
66
+ ) -> Iterable[Tuple[bytes, bytes]]:
67
+ for line in _obsolete_line_fold(lines):
68
+ matches = validate(header_field_re, line, "illegal header line: {!r}", line)
69
+ yield (matches["field_name"], matches["field_value"])
70
+
71
+
72
+ request_line_re = re.compile(request_line.encode("ascii"))
73
+
74
+
75
+ def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]:
76
+ lines = buf.maybe_extract_lines()
77
+ if lines is None:
78
+ if buf.is_next_line_obviously_invalid_request_line():
79
+ raise LocalProtocolError("illegal request line")
80
+ return None
81
+ if not lines:
82
+ raise LocalProtocolError("no request line received")
83
+ matches = validate(
84
+ request_line_re, lines[0], "illegal request line: {!r}", lines[0]
85
+ )
86
+ return Request(
87
+ headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches
88
+ )
89
+
90
+
91
+ status_line_re = re.compile(status_line.encode("ascii"))
92
+
93
+
94
+ def maybe_read_from_SEND_RESPONSE_server(
95
+ buf: ReceiveBuffer,
96
+ ) -> Union[InformationalResponse, Response, None]:
97
+ lines = buf.maybe_extract_lines()
98
+ if lines is None:
99
+ if buf.is_next_line_obviously_invalid_request_line():
100
+ raise LocalProtocolError("illegal request line")
101
+ return None
102
+ if not lines:
103
+ raise LocalProtocolError("no response line received")
104
+ matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0])
105
+ http_version = (
106
+ b"1.1" if matches["http_version"] is None else matches["http_version"]
107
+ )
108
+ reason = b"" if matches["reason"] is None else matches["reason"]
109
+ status_code = int(matches["status_code"])
110
+ class_: Union[Type[InformationalResponse], Type[Response]] = (
111
+ InformationalResponse if status_code < 200 else Response
112
+ )
113
+ return class_(
114
+ headers=list(_decode_header_lines(lines[1:])),
115
+ _parsed=True,
116
+ status_code=status_code,
117
+ reason=reason,
118
+ http_version=http_version,
119
+ )
120
+
121
+
122
+ class ContentLengthReader:
123
+ def __init__(self, length: int) -> None:
124
+ self._length = length
125
+ self._remaining = length
126
+
127
+ def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:
128
+ if self._remaining == 0:
129
+ return EndOfMessage()
130
+ data = buf.maybe_extract_at_most(self._remaining)
131
+ if data is None:
132
+ return None
133
+ self._remaining -= len(data)
134
+ return Data(data=data)
135
+
136
+ def read_eof(self) -> NoReturn:
137
+ raise RemoteProtocolError(
138
+ "peer closed connection without sending complete message body "
139
+ "(received {} bytes, expected {})".format(
140
+ self._length - self._remaining, self._length
141
+ )
142
+ )
143
+
144
+
145
+ chunk_header_re = re.compile(chunk_header.encode("ascii"))
146
+
147
+
148
+ class ChunkedReader:
149
+ def __init__(self) -> None:
150
+ self._bytes_in_chunk = 0
151
+ # After reading a chunk, we have to throw away the trailing \r\n; if
152
+ # this is >0 then we discard that many bytes before resuming regular
153
+ # de-chunkification.
154
+ self._bytes_to_discard = 0
155
+ self._reading_trailer = False
156
+
157
+ def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:
158
+ if self._reading_trailer:
159
+ lines = buf.maybe_extract_lines()
160
+ if lines is None:
161
+ return None
162
+ return EndOfMessage(headers=list(_decode_header_lines(lines)))
163
+ if self._bytes_to_discard > 0:
164
+ data = buf.maybe_extract_at_most(self._bytes_to_discard)
165
+ if data is None:
166
+ return None
167
+ self._bytes_to_discard -= len(data)
168
+ if self._bytes_to_discard > 0:
169
+ return None
170
+ # else, fall through and read some more
171
+ assert self._bytes_to_discard == 0
172
+ if self._bytes_in_chunk == 0:
173
+ # We need to refill our chunk count
174
+ chunk_header = buf.maybe_extract_next_line()
175
+ if chunk_header is None:
176
+ return None
177
+ matches = validate(
178
+ chunk_header_re,
179
+ chunk_header,
180
+ "illegal chunk header: {!r}",
181
+ chunk_header,
182
+ )
183
+ # XX FIXME: we discard chunk extensions. Does anyone care?
184
+ self._bytes_in_chunk = int(matches["chunk_size"], base=16)
185
+ if self._bytes_in_chunk == 0:
186
+ self._reading_trailer = True
187
+ return self(buf)
188
+ chunk_start = True
189
+ else:
190
+ chunk_start = False
191
+ assert self._bytes_in_chunk > 0
192
+ data = buf.maybe_extract_at_most(self._bytes_in_chunk)
193
+ if data is None:
194
+ return None
195
+ self._bytes_in_chunk -= len(data)
196
+ if self._bytes_in_chunk == 0:
197
+ self._bytes_to_discard = 2
198
+ chunk_end = True
199
+ else:
200
+ chunk_end = False
201
+ return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end)
202
+
203
+ def read_eof(self) -> NoReturn:
204
+ raise RemoteProtocolError(
205
+ "peer closed connection without sending complete message body "
206
+ "(incomplete chunked read)"
207
+ )
208
+
209
+
210
+ class Http10Reader:
211
+ def __call__(self, buf: ReceiveBuffer) -> Optional[Data]:
212
+ data = buf.maybe_extract_at_most(999999999)
213
+ if data is None:
214
+ return None
215
+ return Data(data=data)
216
+
217
+ def read_eof(self) -> EndOfMessage:
218
+ return EndOfMessage()
219
+
220
+
221
+ def expect_nothing(buf: ReceiveBuffer) -> None:
222
+ if buf:
223
+ raise LocalProtocolError("Got data when expecting EOF")
224
+ return None
225
+
226
+
227
+ ReadersType = Dict[
228
+ Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]],
229
+ Union[Callable[..., Any], Dict[str, Callable[..., Any]]],
230
+ ]
231
+
232
+ READERS: ReadersType = {
233
+ (CLIENT, IDLE): maybe_read_from_IDLE_client,
234
+ (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server,
235
+ (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server,
236
+ (CLIENT, DONE): expect_nothing,
237
+ (CLIENT, MUST_CLOSE): expect_nothing,
238
+ (CLIENT, CLOSED): expect_nothing,
239
+ (SERVER, DONE): expect_nothing,
240
+ (SERVER, MUST_CLOSE): expect_nothing,
241
+ (SERVER, CLOSED): expect_nothing,
242
+ SEND_BODY: {
243
+ "chunked": ChunkedReader,
244
+ "content-length": ContentLengthReader,
245
+ "http/1.0": Http10Reader,
246
+ },
247
+ }
.venv/lib/python3.11/site-packages/h11/_receivebuffer.py ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ import sys
3
+ from typing import List, Optional, Union
4
+
5
+ __all__ = ["ReceiveBuffer"]
6
+
7
+
8
+ # Operations we want to support:
9
+ # - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable),
10
+ # or wait until there is one
11
+ # - read at-most-N bytes
12
+ # Goals:
13
+ # - on average, do this fast
14
+ # - worst case, do this in O(n) where n is the number of bytes processed
15
+ # Plan:
16
+ # - store bytearray, offset, how far we've searched for a separator token
17
+ # - use the how-far-we've-searched data to avoid rescanning
18
+ # - while doing a stream of uninterrupted processing, advance offset instead
19
+ # of constantly copying
20
+ # WARNING:
21
+ # - I haven't benchmarked or profiled any of this yet.
22
+ #
23
+ # Note that starting in Python 3.4, deleting the initial n bytes from a
24
+ # bytearray is amortized O(n), thanks to some excellent work by Antoine
25
+ # Martin:
26
+ #
27
+ # https://bugs.python.org/issue19087
28
+ #
29
+ # This means that if we only supported 3.4+, we could get rid of the code here
30
+ # involving self._start and self.compress, because it's doing exactly the same
31
+ # thing that bytearray now does internally.
32
+ #
33
+ # BUT unfortunately, we still support 2.7, and reading short segments out of a
34
+ # long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually
35
+ # delete this code. Yet:
36
+ #
37
+ # https://pythonclock.org/
38
+ #
39
+ # (Two things to double-check first though: make sure PyPy also has the
40
+ # optimization, and benchmark to make sure it's a win, since we do have a
41
+ # slightly clever thing where we delay calling compress() until we've
42
+ # processed a whole event, which could in theory be slightly more efficient
43
+ # than the internal bytearray support.)
44
+ blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE)
45
+
46
+
47
+ class ReceiveBuffer:
48
+ def __init__(self) -> None:
49
+ self._data = bytearray()
50
+ self._next_line_search = 0
51
+ self._multiple_lines_search = 0
52
+
53
+ def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer":
54
+ self._data += byteslike
55
+ return self
56
+
57
+ def __bool__(self) -> bool:
58
+ return bool(len(self))
59
+
60
+ def __len__(self) -> int:
61
+ return len(self._data)
62
+
63
+ # for @property unprocessed_data
64
+ def __bytes__(self) -> bytes:
65
+ return bytes(self._data)
66
+
67
+ def _extract(self, count: int) -> bytearray:
68
+ # extracting an initial slice of the data buffer and return it
69
+ out = self._data[:count]
70
+ del self._data[:count]
71
+
72
+ self._next_line_search = 0
73
+ self._multiple_lines_search = 0
74
+
75
+ return out
76
+
77
+ def maybe_extract_at_most(self, count: int) -> Optional[bytearray]:
78
+ """
79
+ Extract a fixed number of bytes from the buffer.
80
+ """
81
+ out = self._data[:count]
82
+ if not out:
83
+ return None
84
+
85
+ return self._extract(count)
86
+
87
+ def maybe_extract_next_line(self) -> Optional[bytearray]:
88
+ """
89
+ Extract the first line, if it is completed in the buffer.
90
+ """
91
+ # Only search in buffer space that we've not already looked at.
92
+ search_start_index = max(0, self._next_line_search - 1)
93
+ partial_idx = self._data.find(b"\r\n", search_start_index)
94
+
95
+ if partial_idx == -1:
96
+ self._next_line_search = len(self._data)
97
+ return None
98
+
99
+ # + 2 is to compensate len(b"\r\n")
100
+ idx = partial_idx + 2
101
+
102
+ return self._extract(idx)
103
+
104
+ def maybe_extract_lines(self) -> Optional[List[bytearray]]:
105
+ """
106
+ Extract everything up to the first blank line, and return a list of lines.
107
+ """
108
+ # Handle the case where we have an immediate empty line.
109
+ if self._data[:1] == b"\n":
110
+ self._extract(1)
111
+ return []
112
+
113
+ if self._data[:2] == b"\r\n":
114
+ self._extract(2)
115
+ return []
116
+
117
+ # Only search in buffer space that we've not already looked at.
118
+ match = blank_line_regex.search(self._data, self._multiple_lines_search)
119
+ if match is None:
120
+ self._multiple_lines_search = max(0, len(self._data) - 2)
121
+ return None
122
+
123
+ # Truncate the buffer and return it.
124
+ idx = match.span(0)[-1]
125
+ out = self._extract(idx)
126
+ lines = out.split(b"\n")
127
+
128
+ for line in lines:
129
+ if line.endswith(b"\r"):
130
+ del line[-1]
131
+
132
+ assert lines[-2] == lines[-1] == b""
133
+
134
+ del lines[-2:]
135
+
136
+ return lines
137
+
138
+ # In theory we should wait until `\r\n` before starting to validate
139
+ # incoming data. However it's interesting to detect (very) invalid data
140
+ # early given they might not even contain `\r\n` at all (hence only
141
+ # timeout will get rid of them).
142
+ # This is not a 100% effective detection but more of a cheap sanity check
143
+ # allowing for early abort in some useful cases.
144
+ # This is especially interesting when peer is messing up with HTTPS and
145
+ # sent us a TLS stream where we were expecting plain HTTP given all
146
+ # versions of TLS so far start handshake with a 0x16 message type code.
147
+ def is_next_line_obviously_invalid_request_line(self) -> bool:
148
+ try:
149
+ # HTTP header line must not contain non-printable characters
150
+ # and should not start with a space
151
+ return self._data[0] < 0x21
152
+ except IndexError:
153
+ return False
.venv/lib/python3.11/site-packages/h11/_state.py ADDED
@@ -0,0 +1,367 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ################################################################
2
+ # The core state machine
3
+ ################################################################
4
+ #
5
+ # Rule 1: everything that affects the state machine and state transitions must
6
+ # live here in this file. As much as possible goes into the table-based
7
+ # representation, but for the bits that don't quite fit, the actual code and
8
+ # state must nonetheless live here.
9
+ #
10
+ # Rule 2: this file does not know about what role we're playing; it only knows
11
+ # about HTTP request/response cycles in the abstract. This ensures that we
12
+ # don't cheat and apply different rules to local and remote parties.
13
+ #
14
+ #
15
+ # Theory of operation
16
+ # ===================
17
+ #
18
+ # Possibly the simplest way to think about this is that we actually have 5
19
+ # different state machines here. Yes, 5. These are:
20
+ #
21
+ # 1) The client state, with its complicated automaton (see the docs)
22
+ # 2) The server state, with its complicated automaton (see the docs)
23
+ # 3) The keep-alive state, with possible states {True, False}
24
+ # 4) The SWITCH_CONNECT state, with possible states {False, True}
25
+ # 5) The SWITCH_UPGRADE state, with possible states {False, True}
26
+ #
27
+ # For (3)-(5), the first state listed is the initial state.
28
+ #
29
+ # (1)-(3) are stored explicitly in member variables. The last
30
+ # two are stored implicitly in the pending_switch_proposals set as:
31
+ # (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals)
32
+ # (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals)
33
+ #
34
+ # And each of these machines has two different kinds of transitions:
35
+ #
36
+ # a) Event-triggered
37
+ # b) State-triggered
38
+ #
39
+ # Event triggered is the obvious thing that you'd think it is: some event
40
+ # happens, and if it's the right event at the right time then a transition
41
+ # happens. But there are somewhat complicated rules for which machines can
42
+ # "see" which events. (As a rule of thumb, if a machine "sees" an event, this
43
+ # means two things: the event can affect the machine, and if the machine is
44
+ # not in a state where it expects that event then it's an error.) These rules
45
+ # are:
46
+ #
47
+ # 1) The client machine sees all h11.events objects emitted by the client.
48
+ #
49
+ # 2) The server machine sees all h11.events objects emitted by the server.
50
+ #
51
+ # It also sees the client's Request event.
52
+ #
53
+ # And sometimes, server events are annotated with a _SWITCH_* event. For
54
+ # example, we can have a (Response, _SWITCH_CONNECT) event, which is
55
+ # different from a regular Response event.
56
+ #
57
+ # 3) The keep-alive machine sees the process_keep_alive_disabled() event
58
+ # (which is derived from Request/Response events), and this event
59
+ # transitions it from True -> False, or from False -> False. There's no way
60
+ # to transition back.
61
+ #
62
+ # 4&5) The _SWITCH_* machines transition from False->True when we get a
63
+ # Request that proposes the relevant type of switch (via
64
+ # process_client_switch_proposals), and they go from True->False when we
65
+ # get a Response that has no _SWITCH_* annotation.
66
+ #
67
+ # So that's event-triggered transitions.
68
+ #
69
+ # State-triggered transitions are less standard. What they do here is couple
70
+ # the machines together. The way this works is, when certain *joint*
71
+ # configurations of states are achieved, then we automatically transition to a
72
+ # new *joint* state. So, for example, if we're ever in a joint state with
73
+ #
74
+ # client: DONE
75
+ # keep-alive: False
76
+ #
77
+ # then the client state immediately transitions to:
78
+ #
79
+ # client: MUST_CLOSE
80
+ #
81
+ # This is fundamentally different from an event-based transition, because it
82
+ # doesn't matter how we arrived at the {client: DONE, keep-alive: False} state
83
+ # -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive
84
+ # transitioned True -> False. Either way, once this precondition is satisfied,
85
+ # this transition is immediately triggered.
86
+ #
87
+ # What if two conflicting state-based transitions get enabled at the same
88
+ # time? In practice there's only one case where this arises (client DONE ->
89
+ # MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by
90
+ # explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition.
91
+ #
92
+ # Implementation
93
+ # --------------
94
+ #
95
+ # The event-triggered transitions for the server and client machines are all
96
+ # stored explicitly in a table. Ditto for the state-triggered transitions that
97
+ # involve just the server and client state.
98
+ #
99
+ # The transitions for the other machines, and the state-triggered transitions
100
+ # that involve the other machines, are written out as explicit Python code.
101
+ #
102
+ # It'd be nice if there were some cleaner way to do all this. This isn't
103
+ # *too* terrible, but I feel like it could probably be better.
104
+ #
105
+ # WARNING
106
+ # -------
107
+ #
108
+ # The script that generates the state machine diagrams for the docs knows how
109
+ # to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS
110
+ # tables. But it can't automatically read the transitions that are written
111
+ # directly in Python code. So if you touch those, you need to also update the
112
+ # script to keep it in sync!
113
+ from typing import cast, Dict, Optional, Set, Tuple, Type, Union
114
+
115
+ from ._events import *
116
+ from ._util import LocalProtocolError, Sentinel
117
+
118
+ # Everything in __all__ gets re-exported as part of the h11 public API.
119
+ __all__ = [
120
+ "CLIENT",
121
+ "SERVER",
122
+ "IDLE",
123
+ "SEND_RESPONSE",
124
+ "SEND_BODY",
125
+ "DONE",
126
+ "MUST_CLOSE",
127
+ "CLOSED",
128
+ "MIGHT_SWITCH_PROTOCOL",
129
+ "SWITCHED_PROTOCOL",
130
+ "ERROR",
131
+ ]
132
+
133
+
134
+ class CLIENT(Sentinel, metaclass=Sentinel):
135
+ pass
136
+
137
+
138
+ class SERVER(Sentinel, metaclass=Sentinel):
139
+ pass
140
+
141
+
142
+ # States
143
+ class IDLE(Sentinel, metaclass=Sentinel):
144
+ pass
145
+
146
+
147
+ class SEND_RESPONSE(Sentinel, metaclass=Sentinel):
148
+ pass
149
+
150
+
151
+ class SEND_BODY(Sentinel, metaclass=Sentinel):
152
+ pass
153
+
154
+
155
+ class DONE(Sentinel, metaclass=Sentinel):
156
+ pass
157
+
158
+
159
+ class MUST_CLOSE(Sentinel, metaclass=Sentinel):
160
+ pass
161
+
162
+
163
+ class CLOSED(Sentinel, metaclass=Sentinel):
164
+ pass
165
+
166
+
167
+ class ERROR(Sentinel, metaclass=Sentinel):
168
+ pass
169
+
170
+
171
+ # Switch types
172
+ class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel):
173
+ pass
174
+
175
+
176
+ class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel):
177
+ pass
178
+
179
+
180
+ class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel):
181
+ pass
182
+
183
+
184
+ class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel):
185
+ pass
186
+
187
+
188
+ EventTransitionType = Dict[
189
+ Type[Sentinel],
190
+ Dict[
191
+ Type[Sentinel],
192
+ Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]],
193
+ ],
194
+ ]
195
+
196
+ EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = {
197
+ CLIENT: {
198
+ IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED},
199
+ SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE},
200
+ DONE: {ConnectionClosed: CLOSED},
201
+ MUST_CLOSE: {ConnectionClosed: CLOSED},
202
+ CLOSED: {ConnectionClosed: CLOSED},
203
+ MIGHT_SWITCH_PROTOCOL: {},
204
+ SWITCHED_PROTOCOL: {},
205
+ ERROR: {},
206
+ },
207
+ SERVER: {
208
+ IDLE: {
209
+ ConnectionClosed: CLOSED,
210
+ Response: SEND_BODY,
211
+ # Special case: server sees client Request events, in this form
212
+ (Request, CLIENT): SEND_RESPONSE,
213
+ },
214
+ SEND_RESPONSE: {
215
+ InformationalResponse: SEND_RESPONSE,
216
+ Response: SEND_BODY,
217
+ (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL,
218
+ (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL,
219
+ },
220
+ SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE},
221
+ DONE: {ConnectionClosed: CLOSED},
222
+ MUST_CLOSE: {ConnectionClosed: CLOSED},
223
+ CLOSED: {ConnectionClosed: CLOSED},
224
+ SWITCHED_PROTOCOL: {},
225
+ ERROR: {},
226
+ },
227
+ }
228
+
229
+ StateTransitionType = Dict[
230
+ Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]]
231
+ ]
232
+
233
+ # NB: there are also some special-case state-triggered transitions hard-coded
234
+ # into _fire_state_triggered_transitions below.
235
+ STATE_TRIGGERED_TRANSITIONS: StateTransitionType = {
236
+ # (Client state, Server state) -> new states
237
+ # Protocol negotiation
238
+ (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL},
239
+ # Socket shutdown
240
+ (CLOSED, DONE): {SERVER: MUST_CLOSE},
241
+ (CLOSED, IDLE): {SERVER: MUST_CLOSE},
242
+ (ERROR, DONE): {SERVER: MUST_CLOSE},
243
+ (DONE, CLOSED): {CLIENT: MUST_CLOSE},
244
+ (IDLE, CLOSED): {CLIENT: MUST_CLOSE},
245
+ (DONE, ERROR): {CLIENT: MUST_CLOSE},
246
+ }
247
+
248
+
249
+ class ConnectionState:
250
+ def __init__(self) -> None:
251
+ # Extra bits of state that don't quite fit into the state model.
252
+
253
+ # If this is False then it enables the automatic DONE -> MUST_CLOSE
254
+ # transition. Don't set this directly; call .keep_alive_disabled()
255
+ self.keep_alive = True
256
+
257
+ # This is a subset of {UPGRADE, CONNECT}, containing the proposals
258
+ # made by the client for switching protocols.
259
+ self.pending_switch_proposals: Set[Type[Sentinel]] = set()
260
+
261
+ self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE}
262
+
263
+ def process_error(self, role: Type[Sentinel]) -> None:
264
+ self.states[role] = ERROR
265
+ self._fire_state_triggered_transitions()
266
+
267
+ def process_keep_alive_disabled(self) -> None:
268
+ self.keep_alive = False
269
+ self._fire_state_triggered_transitions()
270
+
271
+ def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None:
272
+ self.pending_switch_proposals.add(switch_event)
273
+ self._fire_state_triggered_transitions()
274
+
275
+ def process_event(
276
+ self,
277
+ role: Type[Sentinel],
278
+ event_type: Type[Event],
279
+ server_switch_event: Optional[Type[Sentinel]] = None,
280
+ ) -> None:
281
+ _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type
282
+ if server_switch_event is not None:
283
+ assert role is SERVER
284
+ if server_switch_event not in self.pending_switch_proposals:
285
+ raise LocalProtocolError(
286
+ "Received server {} event without a pending proposal".format(
287
+ server_switch_event
288
+ )
289
+ )
290
+ _event_type = (event_type, server_switch_event)
291
+ if server_switch_event is None and _event_type is Response:
292
+ self.pending_switch_proposals = set()
293
+ self._fire_event_triggered_transitions(role, _event_type)
294
+ # Special case: the server state does get to see Request
295
+ # events.
296
+ if _event_type is Request:
297
+ assert role is CLIENT
298
+ self._fire_event_triggered_transitions(SERVER, (Request, CLIENT))
299
+ self._fire_state_triggered_transitions()
300
+
301
+ def _fire_event_triggered_transitions(
302
+ self,
303
+ role: Type[Sentinel],
304
+ event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]],
305
+ ) -> None:
306
+ state = self.states[role]
307
+ try:
308
+ new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type]
309
+ except KeyError:
310
+ event_type = cast(Type[Event], event_type)
311
+ raise LocalProtocolError(
312
+ "can't handle event type {} when role={} and state={}".format(
313
+ event_type.__name__, role, self.states[role]
314
+ )
315
+ ) from None
316
+ self.states[role] = new_state
317
+
318
+ def _fire_state_triggered_transitions(self) -> None:
319
+ # We apply these rules repeatedly until converging on a fixed point
320
+ while True:
321
+ start_states = dict(self.states)
322
+
323
+ # It could happen that both these special-case transitions are
324
+ # enabled at the same time:
325
+ #
326
+ # DONE -> MIGHT_SWITCH_PROTOCOL
327
+ # DONE -> MUST_CLOSE
328
+ #
329
+ # For example, this will always be true of a HTTP/1.0 client
330
+ # requesting CONNECT. If this happens, the protocol switch takes
331
+ # priority. From there the client will either go to
332
+ # SWITCHED_PROTOCOL, in which case it's none of our business when
333
+ # they close the connection, or else the server will deny the
334
+ # request, in which case the client will go back to DONE and then
335
+ # from there to MUST_CLOSE.
336
+ if self.pending_switch_proposals:
337
+ if self.states[CLIENT] is DONE:
338
+ self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL
339
+
340
+ if not self.pending_switch_proposals:
341
+ if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL:
342
+ self.states[CLIENT] = DONE
343
+
344
+ if not self.keep_alive:
345
+ for role in (CLIENT, SERVER):
346
+ if self.states[role] is DONE:
347
+ self.states[role] = MUST_CLOSE
348
+
349
+ # Tabular state-triggered transitions
350
+ joint_state = (self.states[CLIENT], self.states[SERVER])
351
+ changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {})
352
+ self.states.update(changes)
353
+
354
+ if self.states == start_states:
355
+ # Fixed point reached
356
+ return
357
+
358
+ def start_next_cycle(self) -> None:
359
+ if self.states != {CLIENT: DONE, SERVER: DONE}:
360
+ raise LocalProtocolError(
361
+ "not in a reusable state. self.states={}".format(self.states)
362
+ )
363
+ # Can't reach DONE/DONE with any of these active, but still, let's be
364
+ # sure.
365
+ assert self.keep_alive
366
+ assert not self.pending_switch_proposals
367
+ self.states = {CLIENT: IDLE, SERVER: IDLE}
.venv/lib/python3.11/site-packages/h11/_util.py ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union
2
+
3
+ __all__ = [
4
+ "ProtocolError",
5
+ "LocalProtocolError",
6
+ "RemoteProtocolError",
7
+ "validate",
8
+ "bytesify",
9
+ ]
10
+
11
+
12
+ class ProtocolError(Exception):
13
+ """Exception indicating a violation of the HTTP/1.1 protocol.
14
+
15
+ This as an abstract base class, with two concrete base classes:
16
+ :exc:`LocalProtocolError`, which indicates that you tried to do something
17
+ that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which
18
+ indicates that the remote peer tried to do something that HTTP/1.1 says is
19
+ illegal. See :ref:`error-handling` for details.
20
+
21
+ In addition to the normal :exc:`Exception` features, it has one attribute:
22
+
23
+ .. attribute:: error_status_hint
24
+
25
+ This gives a suggestion as to what status code a server might use if
26
+ this error occurred as part of a request.
27
+
28
+ For a :exc:`RemoteProtocolError`, this is useful as a suggestion for
29
+ how you might want to respond to a misbehaving peer, if you're
30
+ implementing a server.
31
+
32
+ For a :exc:`LocalProtocolError`, this can be taken as a suggestion for
33
+ how your peer might have responded to *you* if h11 had allowed you to
34
+ continue.
35
+
36
+ The default is 400 Bad Request, a generic catch-all for protocol
37
+ violations.
38
+
39
+ """
40
+
41
+ def __init__(self, msg: str, error_status_hint: int = 400) -> None:
42
+ if type(self) is ProtocolError:
43
+ raise TypeError("tried to directly instantiate ProtocolError")
44
+ Exception.__init__(self, msg)
45
+ self.error_status_hint = error_status_hint
46
+
47
+
48
+ # Strategy: there are a number of public APIs where a LocalProtocolError can
49
+ # be raised (send(), all the different event constructors, ...), and only one
50
+ # public API where RemoteProtocolError can be raised
51
+ # (receive_data()). Therefore we always raise LocalProtocolError internally,
52
+ # and then receive_data will translate this into a RemoteProtocolError.
53
+ #
54
+ # Internally:
55
+ # LocalProtocolError is the generic "ProtocolError".
56
+ # Externally:
57
+ # LocalProtocolError is for local errors and RemoteProtocolError is for
58
+ # remote errors.
59
+ class LocalProtocolError(ProtocolError):
60
+ def _reraise_as_remote_protocol_error(self) -> NoReturn:
61
+ # After catching a LocalProtocolError, use this method to re-raise it
62
+ # as a RemoteProtocolError. This method must be called from inside an
63
+ # except: block.
64
+ #
65
+ # An easy way to get an equivalent RemoteProtocolError is just to
66
+ # modify 'self' in place.
67
+ self.__class__ = RemoteProtocolError # type: ignore
68
+ # But the re-raising is somewhat non-trivial -- you might think that
69
+ # now that we've modified the in-flight exception object, that just
70
+ # doing 'raise' to re-raise it would be enough. But it turns out that
71
+ # this doesn't work, because Python tracks the exception type
72
+ # (exc_info[0]) separately from the exception object (exc_info[1]),
73
+ # and we only modified the latter. So we really do need to re-raise
74
+ # the new type explicitly.
75
+ # On py3, the traceback is part of the exception object, so our
76
+ # in-place modification preserved it and we can just re-raise:
77
+ raise self
78
+
79
+
80
+ class RemoteProtocolError(ProtocolError):
81
+ pass
82
+
83
+
84
+ def validate(
85
+ regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any
86
+ ) -> Dict[str, bytes]:
87
+ match = regex.fullmatch(data)
88
+ if not match:
89
+ if format_args:
90
+ msg = msg.format(*format_args)
91
+ raise LocalProtocolError(msg)
92
+ return match.groupdict()
93
+
94
+
95
+ # Sentinel values
96
+ #
97
+ # - Inherit identity-based comparison and hashing from object
98
+ # - Have a nice repr
99
+ # - Have a *bonus property*: type(sentinel) is sentinel
100
+ #
101
+ # The bonus property is useful if you want to take the return value from
102
+ # next_event() and do some sort of dispatch based on type(event).
103
+
104
+ _T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel")
105
+
106
+
107
+ class Sentinel(type):
108
+ def __new__(
109
+ cls: Type[_T_Sentinel],
110
+ name: str,
111
+ bases: Tuple[type, ...],
112
+ namespace: Dict[str, Any],
113
+ **kwds: Any
114
+ ) -> _T_Sentinel:
115
+ assert bases == (Sentinel,)
116
+ v = super().__new__(cls, name, bases, namespace, **kwds)
117
+ v.__class__ = v # type: ignore
118
+ return v
119
+
120
+ def __repr__(self) -> str:
121
+ return self.__name__
122
+
123
+
124
+ # Used for methods, request targets, HTTP versions, header names, and header
125
+ # values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always
126
+ # returns bytes.
127
+ def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes:
128
+ # Fast-path:
129
+ if type(s) is bytes:
130
+ return s
131
+ if isinstance(s, str):
132
+ s = s.encode("ascii")
133
+ if isinstance(s, int):
134
+ raise TypeError("expected bytes-like object, not int")
135
+ return bytes(s)
.venv/lib/python3.11/site-packages/h11/_version.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file must be kept very simple, because it is consumed from several
2
+ # places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc.
3
+
4
+ # We use a simple scheme:
5
+ # 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev
6
+ # where the +dev versions are never released into the wild, they're just what
7
+ # we stick into the VCS in between releases.
8
+ #
9
+ # This is compatible with PEP 440:
10
+ # http://legacy.python.org/dev/peps/pep-0440/
11
+ # via the use of the "local suffix" "+dev", which is disallowed on index
12
+ # servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we
13
+ # want. (Contrast with the special suffix 1.0.0.dev, which sorts *before*
14
+ # 1.0.0.)
15
+
16
+ __version__ = "0.14.0"
.venv/lib/python3.11/site-packages/h11/_writers.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Code to read HTTP data
2
+ #
3
+ # Strategy: each writer takes an event + a write-some-bytes function, which is
4
+ # calls.
5
+ #
6
+ # WRITERS is a dict describing how to pick a reader. It maps states to either:
7
+ # - a writer
8
+ # - or, for body writers, a dict of framin-dependent writer factories
9
+
10
+ from typing import Any, Callable, Dict, List, Tuple, Type, Union
11
+
12
+ from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response
13
+ from ._headers import Headers
14
+ from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER
15
+ from ._util import LocalProtocolError, Sentinel
16
+
17
+ __all__ = ["WRITERS"]
18
+
19
+ Writer = Callable[[bytes], Any]
20
+
21
+
22
+ def write_headers(headers: Headers, write: Writer) -> None:
23
+ # "Since the Host field-value is critical information for handling a
24
+ # request, a user agent SHOULD generate Host as the first header field
25
+ # following the request-line." - RFC 7230
26
+ raw_items = headers._full_items
27
+ for raw_name, name, value in raw_items:
28
+ if name == b"host":
29
+ write(b"%s: %s\r\n" % (raw_name, value))
30
+ for raw_name, name, value in raw_items:
31
+ if name != b"host":
32
+ write(b"%s: %s\r\n" % (raw_name, value))
33
+ write(b"\r\n")
34
+
35
+
36
+ def write_request(request: Request, write: Writer) -> None:
37
+ if request.http_version != b"1.1":
38
+ raise LocalProtocolError("I only send HTTP/1.1")
39
+ write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target))
40
+ write_headers(request.headers, write)
41
+
42
+
43
+ # Shared between InformationalResponse and Response
44
+ def write_any_response(
45
+ response: Union[InformationalResponse, Response], write: Writer
46
+ ) -> None:
47
+ if response.http_version != b"1.1":
48
+ raise LocalProtocolError("I only send HTTP/1.1")
49
+ status_bytes = str(response.status_code).encode("ascii")
50
+ # We don't bother sending ascii status messages like "OK"; they're
51
+ # optional and ignored by the protocol. (But the space after the numeric
52
+ # status code is mandatory.)
53
+ #
54
+ # XX FIXME: could at least make an effort to pull out the status message
55
+ # from stdlib's http.HTTPStatus table. Or maybe just steal their enums
56
+ # (either by import or copy/paste). We already accept them as status codes
57
+ # since they're of type IntEnum < int.
58
+ write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason))
59
+ write_headers(response.headers, write)
60
+
61
+
62
+ class BodyWriter:
63
+ def __call__(self, event: Event, write: Writer) -> None:
64
+ if type(event) is Data:
65
+ self.send_data(event.data, write)
66
+ elif type(event) is EndOfMessage:
67
+ self.send_eom(event.headers, write)
68
+ else: # pragma: no cover
69
+ assert False
70
+
71
+ def send_data(self, data: bytes, write: Writer) -> None:
72
+ pass
73
+
74
+ def send_eom(self, headers: Headers, write: Writer) -> None:
75
+ pass
76
+
77
+
78
+ #
79
+ # These are all careful not to do anything to 'data' except call len(data) and
80
+ # write(data). This allows us to transparently pass-through funny objects,
81
+ # like placeholder objects referring to files on disk that will be sent via
82
+ # sendfile(2).
83
+ #
84
+ class ContentLengthWriter(BodyWriter):
85
+ def __init__(self, length: int) -> None:
86
+ self._length = length
87
+
88
+ def send_data(self, data: bytes, write: Writer) -> None:
89
+ self._length -= len(data)
90
+ if self._length < 0:
91
+ raise LocalProtocolError("Too much data for declared Content-Length")
92
+ write(data)
93
+
94
+ def send_eom(self, headers: Headers, write: Writer) -> None:
95
+ if self._length != 0:
96
+ raise LocalProtocolError("Too little data for declared Content-Length")
97
+ if headers:
98
+ raise LocalProtocolError("Content-Length and trailers don't mix")
99
+
100
+
101
+ class ChunkedWriter(BodyWriter):
102
+ def send_data(self, data: bytes, write: Writer) -> None:
103
+ # if we encoded 0-length data in the naive way, it would look like an
104
+ # end-of-message.
105
+ if not data:
106
+ return
107
+ write(b"%x\r\n" % len(data))
108
+ write(data)
109
+ write(b"\r\n")
110
+
111
+ def send_eom(self, headers: Headers, write: Writer) -> None:
112
+ write(b"0\r\n")
113
+ write_headers(headers, write)
114
+
115
+
116
+ class Http10Writer(BodyWriter):
117
+ def send_data(self, data: bytes, write: Writer) -> None:
118
+ write(data)
119
+
120
+ def send_eom(self, headers: Headers, write: Writer) -> None:
121
+ if headers:
122
+ raise LocalProtocolError("can't send trailers to HTTP/1.0 client")
123
+ # no need to close the socket ourselves, that will be taken care of by
124
+ # Connection: close machinery
125
+
126
+
127
+ WritersType = Dict[
128
+ Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]],
129
+ Union[
130
+ Dict[str, Type[BodyWriter]],
131
+ Callable[[Union[InformationalResponse, Response], Writer], None],
132
+ Callable[[Request, Writer], None],
133
+ ],
134
+ ]
135
+
136
+ WRITERS: WritersType = {
137
+ (CLIENT, IDLE): write_request,
138
+ (SERVER, IDLE): write_any_response,
139
+ (SERVER, SEND_RESPONSE): write_any_response,
140
+ SEND_BODY: {
141
+ "chunked": ChunkedWriter,
142
+ "content-length": ContentLengthWriter,
143
+ "http/1.0": Http10Writer,
144
+ },
145
+ }
.venv/lib/python3.11/site-packages/h11/py.typed ADDED
@@ -0,0 +1 @@
 
 
1
+ Marker
.venv/lib/python3.11/site-packages/h11/tests/__init__.py ADDED
File without changes
.venv/lib/python3.11/site-packages/h11/tests/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (182 Bytes). View file
 
.venv/lib/python3.11/site-packages/h11/tests/__pycache__/helpers.cpython-311.pyc ADDED
Binary file (5.1 kB). View file