Add source batch 8/11
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +19 -0
- source/pydantic_extra_types-2.11.0.dist-info/INSTALLER +1 -0
- source/pydantic_extra_types-2.11.0.dist-info/METADATA +87 -0
- source/pydantic_extra_types-2.11.0.dist-info/RECORD +54 -0
- source/pydantic_extra_types-2.11.0.dist-info/WHEEL +4 -0
- source/pydantic_extra_types-2.11.0.dist-info/licenses/LICENSE +21 -0
- source/pydantic_settings-2.13.1.dist-info/INSTALLER +1 -0
- source/pydantic_settings-2.13.1.dist-info/METADATA +63 -0
- source/pydantic_settings-2.13.1.dist-info/RECORD +50 -0
- source/pydantic_settings-2.13.1.dist-info/WHEEL +4 -0
- source/pydantic_settings-2.13.1.dist-info/licenses/LICENSE +21 -0
- source/pydantic_settings/__init__.py +69 -0
- source/pydantic_settings/exceptions.py +4 -0
- source/pydantic_settings/main.py +901 -0
- source/pydantic_settings/py.typed +0 -0
- source/pydantic_settings/sources/__init__.py +84 -0
- source/pydantic_settings/sources/base.py +579 -0
- source/pydantic_settings/sources/providers/__init__.py +45 -0
- source/pydantic_settings/sources/providers/aws.py +86 -0
- source/pydantic_settings/sources/providers/azure.py +159 -0
- source/pydantic_settings/sources/providers/cli.py +1522 -0
- source/pydantic_settings/sources/providers/dotenv.py +170 -0
- source/pydantic_settings/sources/providers/env.py +310 -0
- source/pydantic_settings/sources/providers/gcp.py +241 -0
- source/pydantic_settings/sources/providers/json.py +48 -0
- source/pydantic_settings/sources/providers/nested_secrets.py +166 -0
- source/pydantic_settings/sources/providers/pyproject.py +62 -0
- source/pydantic_settings/sources/providers/secrets.py +132 -0
- source/pydantic_settings/sources/providers/toml.py +67 -0
- source/pydantic_settings/sources/providers/yaml.py +130 -0
- source/pydantic_settings/sources/types.py +99 -0
- source/pydantic_settings/sources/utils.py +283 -0
- source/pydantic_settings/utils.py +43 -0
- source/pydantic_settings/version.py +1 -0
- source/pygments-2.19.2.dist-info/INSTALLER +1 -0
- source/pygments-2.19.2.dist-info/METADATA +58 -0
- source/pygments-2.19.2.dist-info/RECORD +684 -0
- source/pygments-2.19.2.dist-info/WHEEL +4 -0
- source/pygments-2.19.2.dist-info/entry_points.txt +2 -0
- source/pygments-2.19.2.dist-info/licenses/AUTHORS +291 -0
- source/pygments-2.19.2.dist-info/licenses/LICENSE +25 -0
- source/pygments/__init__.py +82 -0
- source/pygments/__main__.py +17 -0
- source/pygments/cmdline.py +668 -0
- source/pygments/console.py +70 -0
- source/pygments/filter.py +70 -0
- source/pygments/filters/__init__.py +940 -0
- source/pygments/formatter.py +129 -0
- source/pygments/formatters/__init__.py +157 -0
- source/pygments/formatters/_mapping.py +23 -0
.gitattributes
CHANGED
|
@@ -227,3 +227,22 @@ source/pycountry/locales/uk/LC_MESSAGES/iso639-3.mo filter=lfs diff=lfs merge=lf
|
|
| 227 |
source/pycountry/locales/vi/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
|
| 228 |
source/pycountry/locales/zh_CN/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
|
| 229 |
source/pydantic_core/_pydantic_core.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 227 |
source/pycountry/locales/vi/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
|
| 228 |
source/pycountry/locales/zh_CN/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
|
| 229 |
source/pydantic_core/_pydantic_core.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 230 |
+
source/pyzmq.libs/libsodium-19479d6d.so.26.2.0 filter=lfs diff=lfs merge=lfs -text
|
| 231 |
+
source/pyzmq.libs/libzmq-7b073b3d.so.5.2.5 filter=lfs diff=lfs merge=lfs -text
|
| 232 |
+
source/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_http_parser.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 233 |
+
source/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_http_writer.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 234 |
+
source/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_websocket/mask.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 235 |
+
source/ray/_private/runtime_env/agent/thirdparty_files/aiohttp/_websocket/reader_c.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 236 |
+
source/ray/_private/runtime_env/agent/thirdparty_files/frozenlist/_frozenlist.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 237 |
+
source/ray/_private/runtime_env/agent/thirdparty_files/multidict/_multidict.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 238 |
+
source/ray/_private/runtime_env/agent/thirdparty_files/propcache/_helpers_c.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 239 |
+
source/ray/_private/runtime_env/agent/thirdparty_files/yarl/_quoting_c.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 240 |
+
source/ray/_raylet.so filter=lfs diff=lfs merge=lfs -text
|
| 241 |
+
source/ray/core/libjemalloc.so filter=lfs diff=lfs merge=lfs -text
|
| 242 |
+
source/ray/core/src/ray/gcs/gcs_server filter=lfs diff=lfs merge=lfs -text
|
| 243 |
+
source/ray/core/src/ray/raylet/raylet filter=lfs diff=lfs merge=lfs -text
|
| 244 |
+
source/ray/jars/ray_dist.jar filter=lfs diff=lfs merge=lfs -text
|
| 245 |
+
source/ray/thirdparty_files/psutil/_psutil_linux.abi3.so filter=lfs diff=lfs merge=lfs -text
|
| 246 |
+
source/regex/_regex.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 247 |
+
source/rignore/rignore.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 248 |
+
source/rpds/rpds.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
source/pydantic_extra_types-2.11.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
source/pydantic_extra_types-2.11.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.4
|
| 2 |
+
Name: pydantic-extra-types
|
| 3 |
+
Version: 2.11.0
|
| 4 |
+
Summary: Extra Pydantic types.
|
| 5 |
+
Project-URL: Homepage, https://github.com/pydantic/pydantic-extra-types
|
| 6 |
+
Project-URL: Source, https://github.com/pydantic/pydantic-extra-types
|
| 7 |
+
Project-URL: Changelog, https://github.com/pydantic/pydantic-extra-types/releases
|
| 8 |
+
Project-URL: Documentation, https://docs.pydantic.dev/latest/
|
| 9 |
+
Author-email: Samuel Colvin <s@muelcolvin.com>, Yasser Tahiri <hello@yezz.me>
|
| 10 |
+
License-Expression: MIT
|
| 11 |
+
License-File: LICENSE
|
| 12 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 13 |
+
Classifier: Environment :: Console
|
| 14 |
+
Classifier: Environment :: MacOS X
|
| 15 |
+
Classifier: Framework :: Pydantic
|
| 16 |
+
Classifier: Framework :: Pydantic :: 2
|
| 17 |
+
Classifier: Intended Audience :: Developers
|
| 18 |
+
Classifier: Intended Audience :: Information Technology
|
| 19 |
+
Classifier: Intended Audience :: System Administrators
|
| 20 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 21 |
+
Classifier: Operating System :: POSIX :: Linux
|
| 22 |
+
Classifier: Operating System :: Unix
|
| 23 |
+
Classifier: Programming Language :: Python
|
| 24 |
+
Classifier: Programming Language :: Python :: 3
|
| 25 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 26 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 27 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 28 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 29 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 30 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 31 |
+
Classifier: Programming Language :: Python :: 3.14
|
| 32 |
+
Classifier: Topic :: Internet
|
| 33 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 34 |
+
Requires-Python: >=3.9
|
| 35 |
+
Requires-Dist: pydantic>=2.5.2
|
| 36 |
+
Requires-Dist: typing-extensions
|
| 37 |
+
Provides-Extra: all
|
| 38 |
+
Requires-Dist: cron-converter>=1.2.2; extra == 'all'
|
| 39 |
+
Requires-Dist: pendulum<4.0.0,>=3.0.0; extra == 'all'
|
| 40 |
+
Requires-Dist: phonenumbers<10,>=8; extra == 'all'
|
| 41 |
+
Requires-Dist: pycountry>=23; extra == 'all'
|
| 42 |
+
Requires-Dist: pymongo<5.0.0,>=4.0.0; extra == 'all'
|
| 43 |
+
Requires-Dist: python-ulid<2,>=1; (python_version < '3.9') and extra == 'all'
|
| 44 |
+
Requires-Dist: python-ulid<4,>=1; (python_version >= '3.9') and extra == 'all'
|
| 45 |
+
Requires-Dist: pytz>=2024.1; extra == 'all'
|
| 46 |
+
Requires-Dist: semver>=3.0.2; extra == 'all'
|
| 47 |
+
Requires-Dist: semver~=3.0.2; extra == 'all'
|
| 48 |
+
Requires-Dist: tzdata>=2024.1; extra == 'all'
|
| 49 |
+
Provides-Extra: cron
|
| 50 |
+
Requires-Dist: cron-converter>=1.2.2; extra == 'cron'
|
| 51 |
+
Provides-Extra: pendulum
|
| 52 |
+
Requires-Dist: pendulum<4.0.0,>=3.0.0; extra == 'pendulum'
|
| 53 |
+
Provides-Extra: phonenumbers
|
| 54 |
+
Requires-Dist: phonenumbers<10,>=8; extra == 'phonenumbers'
|
| 55 |
+
Provides-Extra: pycountry
|
| 56 |
+
Requires-Dist: pycountry>=23; extra == 'pycountry'
|
| 57 |
+
Provides-Extra: python-ulid
|
| 58 |
+
Requires-Dist: python-ulid<2,>=1; (python_version < '3.9') and extra == 'python-ulid'
|
| 59 |
+
Requires-Dist: python-ulid<4,>=1; (python_version >= '3.9') and extra == 'python-ulid'
|
| 60 |
+
Provides-Extra: semver
|
| 61 |
+
Requires-Dist: semver>=3.0.2; extra == 'semver'
|
| 62 |
+
Description-Content-Type: text/markdown
|
| 63 |
+
|
| 64 |
+
# Pydantic Extra Types
|
| 65 |
+
|
| 66 |
+
[](https://github.com/pydantic/pydantic-extra-types/actions/workflows/ci.yml)
|
| 67 |
+
[](https://codecov.io/gh/pydantic/pydantic-extra-types)
|
| 68 |
+
[](https://pypi.python.org/pypi/pydantic-extra-types)
|
| 69 |
+
[](https://github.com/pydantic/pydantic-extra-types/blob/main/LICENSE)
|
| 70 |
+
|
| 71 |
+
A place for pydantic types that probably shouldn't exist in the main pydantic lib.
|
| 72 |
+
|
| 73 |
+
See [pydantic/pydantic#5012](https://github.com/pydantic/pydantic/issues/5012) for more info.
|
| 74 |
+
|
| 75 |
+
## Installation
|
| 76 |
+
|
| 77 |
+
Install this library with the desired extras dependencies as listed in [project.optional-dependencies](./pyproject.toml).
|
| 78 |
+
|
| 79 |
+
For example, if pendulum support was desired:
|
| 80 |
+
|
| 81 |
+
```shell
|
| 82 |
+
# via uv
|
| 83 |
+
$ uv add "pydantic-extra-types[pendulum]"
|
| 84 |
+
|
| 85 |
+
# via pip
|
| 86 |
+
$ pip install -U "pydantic-extra-types[pendulum]"
|
| 87 |
+
```
|
source/pydantic_extra_types-2.11.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
pydantic_extra_types-2.11.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
pydantic_extra_types-2.11.0.dist-info/METADATA,sha256=l8kK8z1-Tyn3ZAT3jIIovFYHPCBV4TTtQGKhMwf5RNI,3991
|
| 3 |
+
pydantic_extra_types-2.11.0.dist-info/RECORD,,
|
| 4 |
+
pydantic_extra_types-2.11.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
| 5 |
+
pydantic_extra_types-2.11.0.dist-info/licenses/LICENSE,sha256=oJu7u0a-Y-ie85pab3LWENpHsqa2ug7vFEIyfxzyPus,1103
|
| 6 |
+
pydantic_extra_types/__init__.py,sha256=IEKg3RTc1FJK4LSR4R_Rck7Ox42HCTJNrGuBmZNMfYI,23
|
| 7 |
+
pydantic_extra_types/__pycache__/__init__.cpython-312.pyc,,
|
| 8 |
+
pydantic_extra_types/__pycache__/color.cpython-312.pyc,,
|
| 9 |
+
pydantic_extra_types/__pycache__/coordinate.cpython-312.pyc,,
|
| 10 |
+
pydantic_extra_types/__pycache__/country.cpython-312.pyc,,
|
| 11 |
+
pydantic_extra_types/__pycache__/cron.cpython-312.pyc,,
|
| 12 |
+
pydantic_extra_types/__pycache__/currency_code.cpython-312.pyc,,
|
| 13 |
+
pydantic_extra_types/__pycache__/domain.cpython-312.pyc,,
|
| 14 |
+
pydantic_extra_types/__pycache__/epoch.cpython-312.pyc,,
|
| 15 |
+
pydantic_extra_types/__pycache__/isbn.cpython-312.pyc,,
|
| 16 |
+
pydantic_extra_types/__pycache__/language_code.cpython-312.pyc,,
|
| 17 |
+
pydantic_extra_types/__pycache__/mac_address.cpython-312.pyc,,
|
| 18 |
+
pydantic_extra_types/__pycache__/mime_types.cpython-312.pyc,,
|
| 19 |
+
pydantic_extra_types/__pycache__/mongo_object_id.cpython-312.pyc,,
|
| 20 |
+
pydantic_extra_types/__pycache__/path.cpython-312.pyc,,
|
| 21 |
+
pydantic_extra_types/__pycache__/payment.cpython-312.pyc,,
|
| 22 |
+
pydantic_extra_types/__pycache__/pendulum_dt.cpython-312.pyc,,
|
| 23 |
+
pydantic_extra_types/__pycache__/phone_numbers.cpython-312.pyc,,
|
| 24 |
+
pydantic_extra_types/__pycache__/routing_number.cpython-312.pyc,,
|
| 25 |
+
pydantic_extra_types/__pycache__/s3.cpython-312.pyc,,
|
| 26 |
+
pydantic_extra_types/__pycache__/script_code.cpython-312.pyc,,
|
| 27 |
+
pydantic_extra_types/__pycache__/semantic_version.cpython-312.pyc,,
|
| 28 |
+
pydantic_extra_types/__pycache__/semver.cpython-312.pyc,,
|
| 29 |
+
pydantic_extra_types/__pycache__/timezone_name.cpython-312.pyc,,
|
| 30 |
+
pydantic_extra_types/__pycache__/ulid.cpython-312.pyc,,
|
| 31 |
+
pydantic_extra_types/color.py,sha256=QZFWt0SBuCPG9Xnljwwf6Uw4h9iqIguNbJSHBArzb94,20855
|
| 32 |
+
pydantic_extra_types/coordinate.py,sha256=uCTOiiPkgPdN3b7MY39946gwT0yn14OwzCtLQUcsFHw,7717
|
| 33 |
+
pydantic_extra_types/country.py,sha256=GPEDQzaV7kDaqeG8lM5iD6390QxetcU53i75d7yyP7w,9011
|
| 34 |
+
pydantic_extra_types/cron.py,sha256=zBBMrZgeRCbmehmQXhtzyp3fEhSAYI8ASqbfbh5nf4k,4851
|
| 35 |
+
pydantic_extra_types/currency_code.py,sha256=vaDTYuqG3-EpK_q9auJZGKAYMIqVX6Ja9Ao33yO8OjM,6218
|
| 36 |
+
pydantic_extra_types/domain.py,sha256=mXi0juayH22tHeXminEv0dW85kMCW_bnN4hkQpA5Kec,1978
|
| 37 |
+
pydantic_extra_types/epoch.py,sha256=9Fzn70W_vcicX9AV0ULPCO85QktUVqi7b2AAzvFtHGE,2710
|
| 38 |
+
pydantic_extra_types/isbn.py,sha256=yOukuBK-PHzqBt7jZviA1RS1uWaXeqp8ctRuL-BHSHI,4949
|
| 39 |
+
pydantic_extra_types/language_code.py,sha256=DH2knjn0BLz492sbEH_ltIi_bFTrWo_-SZQDVHVeMk8,12273
|
| 40 |
+
pydantic_extra_types/mac_address.py,sha256=0vMBDSuq3qUbxaE6Bry1Ozfs-7xU-wlzW5h4f96gBDY,3589
|
| 41 |
+
pydantic_extra_types/mime_types.py,sha256=_BRSO7cQX9b-_5WGIXSJ7zk0eVABENBqb4JxWsCN4-Y,151665
|
| 42 |
+
pydantic_extra_types/mongo_object_id.py,sha256=yfOjzTvCgXGO4VvKksPcPRAx8pFvhmNKZ3mVXxScFgU,2274
|
| 43 |
+
pydantic_extra_types/path.py,sha256=rcmKyebDIaiaKU7sR1kkJcaQ7KDpV_4kUQO04lzysxg,2049
|
| 44 |
+
pydantic_extra_types/payment.py,sha256=hSU1-vFL3-efCOoG_du4J6ZI8brjg85UMRs8V8LGooU,7397
|
| 45 |
+
pydantic_extra_types/pendulum_dt.py,sha256=QHGUlc1fq_ZL3wepT_ZQ24laoKPygN6DONWc5Fk32Q8,12937
|
| 46 |
+
pydantic_extra_types/phone_numbers.py,sha256=wmEW_F7JXjhz8l8DUB0_qizpFQjwNfkyFk0m8reI9ws,8573
|
| 47 |
+
pydantic_extra_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 48 |
+
pydantic_extra_types/routing_number.py,sha256=SkrixXaP4KPdZmJT8TOB-Pr5UDHKPPQQIJPS8LkM4UE,3077
|
| 49 |
+
pydantic_extra_types/s3.py,sha256=ulm_WRwUlauXWYEXvwcZdA-gco6dkLncuC_VxzY2s7s,2064
|
| 50 |
+
pydantic_extra_types/script_code.py,sha256=70dKnyaVU1uQqygBsU5ggEpbwF54XEYD1O4UzfQwQWs,2967
|
| 51 |
+
pydantic_extra_types/semantic_version.py,sha256=P4PKrHIsNYnl8J9j-uB06RUpcpfpCuUvy2F7aAY5bRc,2156
|
| 52 |
+
pydantic_extra_types/semver.py,sha256=jO_FTXafExXHEfUqDq8Ma_HQ-Jp4JALV1yqciqDhGJ0,2192
|
| 53 |
+
pydantic_extra_types/timezone_name.py,sha256=VcrNJCQONFjLwnOAi1OwqGxicAAfe1eB8yUA6I01qNA,6111
|
| 54 |
+
pydantic_extra_types/ulid.py,sha256=ODBJmeQ4o436ly5d8yFWSL6c7ZbSQ8P8kJXrh8HvVI8,2418
|
source/pydantic_extra_types-2.11.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: hatchling 1.28.0
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
source/pydantic_extra_types-2.11.0.dist-info/licenses/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
The MIT License (MIT)
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2023 Samuel Colvin and other contributors
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
source/pydantic_settings-2.13.1.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
source/pydantic_settings-2.13.1.dist-info/METADATA
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.4
|
| 2 |
+
Name: pydantic-settings
|
| 3 |
+
Version: 2.13.1
|
| 4 |
+
Summary: Settings management using Pydantic
|
| 5 |
+
Project-URL: Homepage, https://github.com/pydantic/pydantic-settings
|
| 6 |
+
Project-URL: Funding, https://github.com/sponsors/samuelcolvin
|
| 7 |
+
Project-URL: Source, https://github.com/pydantic/pydantic-settings
|
| 8 |
+
Project-URL: Changelog, https://github.com/pydantic/pydantic-settings/releases
|
| 9 |
+
Project-URL: Documentation, https://docs.pydantic.dev/dev-v2/concepts/pydantic_settings/
|
| 10 |
+
Author-email: Samuel Colvin <s@muelcolvin.com>, Eric Jolibois <em.jolibois@gmail.com>, Hasan Ramezani <hasan.r67@gmail.com>
|
| 11 |
+
License-Expression: MIT
|
| 12 |
+
License-File: LICENSE
|
| 13 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 14 |
+
Classifier: Environment :: Console
|
| 15 |
+
Classifier: Environment :: MacOS X
|
| 16 |
+
Classifier: Framework :: Pydantic
|
| 17 |
+
Classifier: Framework :: Pydantic :: 2
|
| 18 |
+
Classifier: Intended Audience :: Developers
|
| 19 |
+
Classifier: Intended Audience :: Information Technology
|
| 20 |
+
Classifier: Intended Audience :: System Administrators
|
| 21 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 22 |
+
Classifier: Operating System :: POSIX :: Linux
|
| 23 |
+
Classifier: Operating System :: Unix
|
| 24 |
+
Classifier: Programming Language :: Python
|
| 25 |
+
Classifier: Programming Language :: Python :: 3
|
| 26 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 27 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 28 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 29 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 30 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 31 |
+
Classifier: Programming Language :: Python :: 3.14
|
| 32 |
+
Classifier: Topic :: Internet
|
| 33 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 34 |
+
Requires-Python: >=3.10
|
| 35 |
+
Requires-Dist: pydantic>=2.7.0
|
| 36 |
+
Requires-Dist: python-dotenv>=0.21.0
|
| 37 |
+
Requires-Dist: typing-inspection>=0.4.0
|
| 38 |
+
Provides-Extra: aws-secrets-manager
|
| 39 |
+
Requires-Dist: boto3-stubs[secretsmanager]; extra == 'aws-secrets-manager'
|
| 40 |
+
Requires-Dist: boto3>=1.35.0; extra == 'aws-secrets-manager'
|
| 41 |
+
Provides-Extra: azure-key-vault
|
| 42 |
+
Requires-Dist: azure-identity>=1.16.0; extra == 'azure-key-vault'
|
| 43 |
+
Requires-Dist: azure-keyvault-secrets>=4.8.0; extra == 'azure-key-vault'
|
| 44 |
+
Provides-Extra: gcp-secret-manager
|
| 45 |
+
Requires-Dist: google-cloud-secret-manager>=2.23.1; extra == 'gcp-secret-manager'
|
| 46 |
+
Provides-Extra: toml
|
| 47 |
+
Requires-Dist: tomli>=2.0.1; extra == 'toml'
|
| 48 |
+
Provides-Extra: yaml
|
| 49 |
+
Requires-Dist: pyyaml>=6.0.1; extra == 'yaml'
|
| 50 |
+
Description-Content-Type: text/markdown
|
| 51 |
+
|
| 52 |
+
# pydantic-settings
|
| 53 |
+
|
| 54 |
+
[](https://github.com/pydantic/pydantic-settings/actions/workflows/ci.yml?query=branch%3Amain)
|
| 55 |
+
[](https://codecov.io/gh/pydantic/pydantic-settings)
|
| 56 |
+
[](https://pypi.python.org/pypi/pydantic-settings)
|
| 57 |
+
[](https://github.com/pydantic/pydantic-settings/blob/main/LICENSE)
|
| 58 |
+
[](https://pepy.tech/project/pydantic-settings)
|
| 59 |
+
[](https://github.com/pydantic/pydantic-settings)
|
| 60 |
+
|
| 61 |
+
Settings management using Pydantic.
|
| 62 |
+
|
| 63 |
+
See [documentation](https://docs.pydantic.dev/latest/concepts/pydantic_settings/) for more details.
|
source/pydantic_settings-2.13.1.dist-info/RECORD
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
pydantic_settings-2.13.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
pydantic_settings-2.13.1.dist-info/METADATA,sha256=-DWfWghFDnoUbQ0oR7n_hp2GvwY_uKSogbfEwArRbHk,3395
|
| 3 |
+
pydantic_settings-2.13.1.dist-info/RECORD,,
|
| 4 |
+
pydantic_settings-2.13.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
| 5 |
+
pydantic_settings-2.13.1.dist-info/licenses/LICENSE,sha256=6zVadT4CA0bTPYO_l2kTW4n8YQVorFMaAcKVvO5_2Zg,1103
|
| 6 |
+
pydantic_settings/__init__.py,sha256=_2aYjmOukI0KsXbkdAsnnNCTcwxDl8qSi5X_djGJ9vc,1707
|
| 7 |
+
pydantic_settings/__pycache__/__init__.cpython-312.pyc,,
|
| 8 |
+
pydantic_settings/__pycache__/exceptions.cpython-312.pyc,,
|
| 9 |
+
pydantic_settings/__pycache__/main.cpython-312.pyc,,
|
| 10 |
+
pydantic_settings/__pycache__/utils.cpython-312.pyc,,
|
| 11 |
+
pydantic_settings/__pycache__/version.cpython-312.pyc,,
|
| 12 |
+
pydantic_settings/exceptions.py,sha256=SHLrIBHeFltPMc8abiQxw-MGqEadlYI-VdLELiZtWPU,97
|
| 13 |
+
pydantic_settings/main.py,sha256=IaXqxUvbcIboeOc6A5EGTz6Gy7Ua1ZqXuexrO98baV0,43312
|
| 14 |
+
pydantic_settings/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 15 |
+
pydantic_settings/sources/__init__.py,sha256=iCWayST4iRK5Qkzlc2n_OsIPjCf8WZQOkq30N7MQ7io,2306
|
| 16 |
+
pydantic_settings/sources/__pycache__/__init__.cpython-312.pyc,,
|
| 17 |
+
pydantic_settings/sources/__pycache__/base.cpython-312.pyc,,
|
| 18 |
+
pydantic_settings/sources/__pycache__/types.cpython-312.pyc,,
|
| 19 |
+
pydantic_settings/sources/__pycache__/utils.cpython-312.pyc,,
|
| 20 |
+
pydantic_settings/sources/base.py,sha256=JzekCIMHXP1_1UPbZh6WyqlJgCI2EXNO7j4-zwF3Hwk,23462
|
| 21 |
+
pydantic_settings/sources/providers/__init__.py,sha256=KfYerDF3UC-0aLPc29KLuIYomPxYksryLFSPBVrZXSg,1281
|
| 22 |
+
pydantic_settings/sources/providers/__pycache__/__init__.cpython-312.pyc,,
|
| 23 |
+
pydantic_settings/sources/providers/__pycache__/aws.cpython-312.pyc,,
|
| 24 |
+
pydantic_settings/sources/providers/__pycache__/azure.cpython-312.pyc,,
|
| 25 |
+
pydantic_settings/sources/providers/__pycache__/cli.cpython-312.pyc,,
|
| 26 |
+
pydantic_settings/sources/providers/__pycache__/dotenv.cpython-312.pyc,,
|
| 27 |
+
pydantic_settings/sources/providers/__pycache__/env.cpython-312.pyc,,
|
| 28 |
+
pydantic_settings/sources/providers/__pycache__/gcp.cpython-312.pyc,,
|
| 29 |
+
pydantic_settings/sources/providers/__pycache__/json.cpython-312.pyc,,
|
| 30 |
+
pydantic_settings/sources/providers/__pycache__/nested_secrets.cpython-312.pyc,,
|
| 31 |
+
pydantic_settings/sources/providers/__pycache__/pyproject.cpython-312.pyc,,
|
| 32 |
+
pydantic_settings/sources/providers/__pycache__/secrets.cpython-312.pyc,,
|
| 33 |
+
pydantic_settings/sources/providers/__pycache__/toml.cpython-312.pyc,,
|
| 34 |
+
pydantic_settings/sources/providers/__pycache__/yaml.cpython-312.pyc,,
|
| 35 |
+
pydantic_settings/sources/providers/aws.py,sha256=dj4fgS2R9bbCsqIi4vYbyssPxhe47FWkQSqprjT_tOA,2729
|
| 36 |
+
pydantic_settings/sources/providers/azure.py,sha256=Qhf7IR7p0177NBwiLih6vTiMwWatgpP5EfQu2EI3KiA,5584
|
| 37 |
+
pydantic_settings/sources/providers/cli.py,sha256=XdxBJHz60RbxsN41dM1vNEv_tfM_9-xjNQHRn3fk8tw,70951
|
| 38 |
+
pydantic_settings/sources/providers/dotenv.py,sha256=biseN85xsyiqgyFs_DCr16dmVI5ScPD0ANdnQT9uB3A,6062
|
| 39 |
+
pydantic_settings/sources/providers/env.py,sha256=N3rGQLlyMGWPqIlJkr8kH41sarwFn5HZIuD_KCuX-Mo,12482
|
| 40 |
+
pydantic_settings/sources/providers/gcp.py,sha256=Y_5sa0ig-zN6MwVwRzAABNj87bN9nDJsLhXGCBU94aE,9962
|
| 41 |
+
pydantic_settings/sources/providers/json.py,sha256=H0BpGTSkS0V9H59jr0ZTp_an2kLCSfef0TqwJuHY0iM,1492
|
| 42 |
+
pydantic_settings/sources/providers/nested_secrets.py,sha256=9vpesWyl4fssfbcalPqjjoiCr1hvi1ikexFwH2UqgPo,6622
|
| 43 |
+
pydantic_settings/sources/providers/pyproject.py,sha256=zSQsV3-jtZhiLm3YlrlYoE2__tZBazp0KjQyKLNyLr0,2052
|
| 44 |
+
pydantic_settings/sources/providers/secrets.py,sha256=k5CFjS6ImQH4mP_bTaVJ3Iq8RF_ul0l9FEUPJUY8YLk,4470
|
| 45 |
+
pydantic_settings/sources/providers/toml.py,sha256=xySqX4H--8E7yWq49SXzAK-BDPMaW-evfGCgQ3Bsq9g,1883
|
| 46 |
+
pydantic_settings/sources/providers/yaml.py,sha256=pCZ-YDsjVyQIePEIqoPplIVtL9Vrr6gNAvC7JaFaR2w,4777
|
| 47 |
+
pydantic_settings/sources/types.py,sha256=USO_neh9hE5tOWRdJE2mD9olIc7feiV6i4zYMbI-U9g,1921
|
| 48 |
+
pydantic_settings/sources/utils.py,sha256=Xo1dlxGgYuRmr_zdjDW_HMyoIT9MA7J1oglmPetn_Bg,10057
|
| 49 |
+
pydantic_settings/utils.py,sha256=8jXay93JWt26z12XORLcgGMPCndKVDJVPWtQNt4JZ3Q,1353
|
| 50 |
+
pydantic_settings/version.py,sha256=hCaqYhF5zWECzLn4r40RCAiIqvptDeoy_pjU9PLXces,19
|
source/pydantic_settings-2.13.1.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: hatchling 1.28.0
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
source/pydantic_settings-2.13.1.dist-info/licenses/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
The MIT License (MIT)
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2022 Samuel Colvin and other contributors
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
source/pydantic_settings/__init__.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .exceptions import SettingsError
|
| 2 |
+
from .main import BaseSettings, CliApp, SettingsConfigDict
|
| 3 |
+
from .sources import (
|
| 4 |
+
CLI_SUPPRESS,
|
| 5 |
+
AWSSecretsManagerSettingsSource,
|
| 6 |
+
AzureKeyVaultSettingsSource,
|
| 7 |
+
CliDualFlag,
|
| 8 |
+
CliExplicitFlag,
|
| 9 |
+
CliImplicitFlag,
|
| 10 |
+
CliMutuallyExclusiveGroup,
|
| 11 |
+
CliPositionalArg,
|
| 12 |
+
CliSettingsSource,
|
| 13 |
+
CliSubCommand,
|
| 14 |
+
CliSuppress,
|
| 15 |
+
CliToggleFlag,
|
| 16 |
+
CliUnknownArgs,
|
| 17 |
+
DotEnvSettingsSource,
|
| 18 |
+
EnvSettingsSource,
|
| 19 |
+
ForceDecode,
|
| 20 |
+
GoogleSecretManagerSettingsSource,
|
| 21 |
+
InitSettingsSource,
|
| 22 |
+
JsonConfigSettingsSource,
|
| 23 |
+
NestedSecretsSettingsSource,
|
| 24 |
+
NoDecode,
|
| 25 |
+
PydanticBaseSettingsSource,
|
| 26 |
+
PyprojectTomlConfigSettingsSource,
|
| 27 |
+
SecretsSettingsSource,
|
| 28 |
+
TomlConfigSettingsSource,
|
| 29 |
+
YamlConfigSettingsSource,
|
| 30 |
+
get_subcommand,
|
| 31 |
+
)
|
| 32 |
+
from .version import VERSION
|
| 33 |
+
|
| 34 |
+
__all__ = (
|
| 35 |
+
'CLI_SUPPRESS',
|
| 36 |
+
'AWSSecretsManagerSettingsSource',
|
| 37 |
+
'AzureKeyVaultSettingsSource',
|
| 38 |
+
'BaseSettings',
|
| 39 |
+
'CliApp',
|
| 40 |
+
'CliExplicitFlag',
|
| 41 |
+
'CliImplicitFlag',
|
| 42 |
+
'CliToggleFlag',
|
| 43 |
+
'CliDualFlag',
|
| 44 |
+
'CliMutuallyExclusiveGroup',
|
| 45 |
+
'CliPositionalArg',
|
| 46 |
+
'CliSettingsSource',
|
| 47 |
+
'CliSubCommand',
|
| 48 |
+
'CliSuppress',
|
| 49 |
+
'CliUnknownArgs',
|
| 50 |
+
'DotEnvSettingsSource',
|
| 51 |
+
'EnvSettingsSource',
|
| 52 |
+
'ForceDecode',
|
| 53 |
+
'GoogleSecretManagerSettingsSource',
|
| 54 |
+
'InitSettingsSource',
|
| 55 |
+
'JsonConfigSettingsSource',
|
| 56 |
+
'NestedSecretsSettingsSource',
|
| 57 |
+
'NoDecode',
|
| 58 |
+
'PydanticBaseSettingsSource',
|
| 59 |
+
'PyprojectTomlConfigSettingsSource',
|
| 60 |
+
'SecretsSettingsSource',
|
| 61 |
+
'SettingsConfigDict',
|
| 62 |
+
'SettingsError',
|
| 63 |
+
'TomlConfigSettingsSource',
|
| 64 |
+
'YamlConfigSettingsSource',
|
| 65 |
+
'__version__',
|
| 66 |
+
'get_subcommand',
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
__version__ = VERSION
|
source/pydantic_settings/exceptions.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
class SettingsError(ValueError):
|
| 2 |
+
"""Base exception for settings-related errors."""
|
| 3 |
+
|
| 4 |
+
pass
|
source/pydantic_settings/main.py
ADDED
|
@@ -0,0 +1,901 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations as _annotations
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import inspect
|
| 5 |
+
import re
|
| 6 |
+
import threading
|
| 7 |
+
import warnings
|
| 8 |
+
from argparse import Namespace
|
| 9 |
+
from collections.abc import Mapping
|
| 10 |
+
from types import SimpleNamespace
|
| 11 |
+
from typing import Any, ClassVar, Literal, TextIO, TypeVar, cast
|
| 12 |
+
|
| 13 |
+
from pydantic import ConfigDict
|
| 14 |
+
from pydantic._internal._config import config_keys
|
| 15 |
+
from pydantic._internal._signature import _field_name_for_signature
|
| 16 |
+
from pydantic._internal._utils import deep_update, is_model_class
|
| 17 |
+
from pydantic.dataclasses import is_pydantic_dataclass
|
| 18 |
+
from pydantic.main import BaseModel
|
| 19 |
+
|
| 20 |
+
from .exceptions import SettingsError
|
| 21 |
+
from .sources import (
|
| 22 |
+
ENV_FILE_SENTINEL,
|
| 23 |
+
CliSettingsSource,
|
| 24 |
+
DefaultSettingsSource,
|
| 25 |
+
DotEnvSettingsSource,
|
| 26 |
+
DotenvType,
|
| 27 |
+
EnvPrefixTarget,
|
| 28 |
+
EnvSettingsSource,
|
| 29 |
+
InitSettingsSource,
|
| 30 |
+
JsonConfigSettingsSource,
|
| 31 |
+
PathType,
|
| 32 |
+
PydanticBaseSettingsSource,
|
| 33 |
+
PydanticModel,
|
| 34 |
+
PyprojectTomlConfigSettingsSource,
|
| 35 |
+
SecretsSettingsSource,
|
| 36 |
+
TomlConfigSettingsSource,
|
| 37 |
+
YamlConfigSettingsSource,
|
| 38 |
+
get_subcommand,
|
| 39 |
+
)
|
| 40 |
+
from .sources.utils import _get_alias_names
|
| 41 |
+
|
| 42 |
+
T = TypeVar('T')
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class SettingsConfigDict(ConfigDict, total=False):
|
| 46 |
+
case_sensitive: bool
|
| 47 |
+
nested_model_default_partial_update: bool | None
|
| 48 |
+
env_prefix: str
|
| 49 |
+
env_prefix_target: EnvPrefixTarget
|
| 50 |
+
env_file: DotenvType | None
|
| 51 |
+
env_file_encoding: str | None
|
| 52 |
+
env_ignore_empty: bool
|
| 53 |
+
env_nested_delimiter: str | None
|
| 54 |
+
env_nested_max_split: int | None
|
| 55 |
+
env_parse_none_str: str | None
|
| 56 |
+
env_parse_enums: bool | None
|
| 57 |
+
cli_prog_name: str | None
|
| 58 |
+
cli_parse_args: bool | list[str] | tuple[str, ...] | None
|
| 59 |
+
cli_parse_none_str: str | None
|
| 60 |
+
cli_hide_none_type: bool
|
| 61 |
+
cli_avoid_json: bool
|
| 62 |
+
cli_enforce_required: bool
|
| 63 |
+
cli_use_class_docs_for_groups: bool
|
| 64 |
+
cli_exit_on_error: bool
|
| 65 |
+
cli_prefix: str
|
| 66 |
+
cli_flag_prefix_char: str
|
| 67 |
+
cli_implicit_flags: bool | Literal['dual', 'toggle'] | None
|
| 68 |
+
cli_ignore_unknown_args: bool | None
|
| 69 |
+
cli_kebab_case: bool | Literal['all', 'no_enums'] | None
|
| 70 |
+
cli_shortcuts: Mapping[str, str | list[str]] | None
|
| 71 |
+
secrets_dir: PathType | None
|
| 72 |
+
json_file: PathType | None
|
| 73 |
+
json_file_encoding: str | None
|
| 74 |
+
yaml_file: PathType | None
|
| 75 |
+
yaml_file_encoding: str | None
|
| 76 |
+
yaml_config_section: str | None
|
| 77 |
+
"""
|
| 78 |
+
Specifies the section in a YAML file from which to load the settings.
|
| 79 |
+
Supports dot-notation for nested paths (e.g., 'config.app.settings').
|
| 80 |
+
If provided, the settings will be loaded from the specified section.
|
| 81 |
+
This is useful when the YAML file contains multiple configuration sections
|
| 82 |
+
and you only want to load a specific subset into your settings model.
|
| 83 |
+
"""
|
| 84 |
+
|
| 85 |
+
pyproject_toml_depth: int
|
| 86 |
+
"""
|
| 87 |
+
Number of levels **up** from the current working directory to attempt to find a pyproject.toml
|
| 88 |
+
file.
|
| 89 |
+
|
| 90 |
+
This is only used when a pyproject.toml file is not found in the current working directory.
|
| 91 |
+
"""
|
| 92 |
+
|
| 93 |
+
pyproject_toml_table_header: tuple[str, ...]
|
| 94 |
+
"""
|
| 95 |
+
Header of the TOML table within a pyproject.toml file to use when filling variables.
|
| 96 |
+
This is supplied as a `tuple[str, ...]` instead of a `str` to accommodate for headers
|
| 97 |
+
containing a `.`.
|
| 98 |
+
|
| 99 |
+
For example, `toml_table_header = ("tool", "my.tool", "foo")` can be used to fill variable
|
| 100 |
+
values from a table with header `[tool."my.tool".foo]`.
|
| 101 |
+
|
| 102 |
+
To use the root table, exclude this config setting or provide an empty tuple.
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
toml_file: PathType | None
|
| 106 |
+
enable_decoding: bool
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
# Extend `config_keys` by pydantic settings config keys to
|
| 110 |
+
# support setting config through class kwargs.
|
| 111 |
+
# Pydantic uses `config_keys` in `pydantic._internal._config.ConfigWrapper.for_model`
|
| 112 |
+
# to extract config keys from model kwargs, So, by adding pydantic settings keys to
|
| 113 |
+
# `config_keys`, they will be considered as valid config keys and will be collected
|
| 114 |
+
# by Pydantic.
|
| 115 |
+
config_keys |= set(SettingsConfigDict.__annotations__.keys())
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
class BaseSettings(BaseModel):
|
| 119 |
+
"""
|
| 120 |
+
Base class for settings, allowing values to be overridden by environment variables.
|
| 121 |
+
|
| 122 |
+
This is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose),
|
| 123 |
+
Heroku and any 12 factor app design.
|
| 124 |
+
|
| 125 |
+
All the below attributes can be set via `model_config`.
|
| 126 |
+
|
| 127 |
+
Args:
|
| 128 |
+
_case_sensitive: Whether environment and CLI variable names should be read with case-sensitivity.
|
| 129 |
+
Defaults to `None`.
|
| 130 |
+
_nested_model_default_partial_update: Whether to allow partial updates on nested model default object fields.
|
| 131 |
+
Defaults to `False`.
|
| 132 |
+
_env_prefix: Prefix for all environment variables. Defaults to `None`.
|
| 133 |
+
_env_prefix_target: Targets to which `_env_prefix` is applied. Default: `variable`.
|
| 134 |
+
_env_file: The env file(s) to load settings values from. Defaults to `Path('')`, which
|
| 135 |
+
means that the value from `model_config['env_file']` should be used. You can also pass
|
| 136 |
+
`None` to indicate that environment variables should not be loaded from an env file.
|
| 137 |
+
_env_file_encoding: The env file encoding, e.g. `'latin-1'`. Defaults to `None`.
|
| 138 |
+
_env_ignore_empty: Ignore environment variables where the value is an empty string. Default to `False`.
|
| 139 |
+
_env_nested_delimiter: The nested env values delimiter. Defaults to `None`.
|
| 140 |
+
_env_nested_max_split: The nested env values maximum nesting. Defaults to `None`, which means no limit.
|
| 141 |
+
_env_parse_none_str: The env string value that should be parsed (e.g. "null", "void", "None", etc.)
|
| 142 |
+
into `None` type(None). Defaults to `None` type(None), which means no parsing should occur.
|
| 143 |
+
_env_parse_enums: Parse enum field names to values. Defaults to `None.`, which means no parsing should occur.
|
| 144 |
+
_cli_prog_name: The CLI program name to display in help text. Defaults to `None` if _cli_parse_args is `None`.
|
| 145 |
+
Otherwise, defaults to sys.argv[0].
|
| 146 |
+
_cli_parse_args: The list of CLI arguments to parse. Defaults to None.
|
| 147 |
+
If set to `True`, defaults to sys.argv[1:].
|
| 148 |
+
_cli_settings_source: Override the default CLI settings source with a user defined instance. Defaults to None.
|
| 149 |
+
_cli_parse_none_str: The CLI string value that should be parsed (e.g. "null", "void", "None", etc.) into
|
| 150 |
+
`None` type(None). Defaults to _env_parse_none_str value if set. Otherwise, defaults to "null" if
|
| 151 |
+
_cli_avoid_json is `False`, and "None" if _cli_avoid_json is `True`.
|
| 152 |
+
_cli_hide_none_type: Hide `None` values in CLI help text. Defaults to `False`.
|
| 153 |
+
_cli_avoid_json: Avoid complex JSON objects in CLI help text. Defaults to `False`.
|
| 154 |
+
_cli_enforce_required: Enforce required fields at the CLI. Defaults to `False`.
|
| 155 |
+
_cli_use_class_docs_for_groups: Use class docstrings in CLI group help text instead of field descriptions.
|
| 156 |
+
Defaults to `False`.
|
| 157 |
+
_cli_exit_on_error: Determines whether or not the internal parser exits with error info when an error occurs.
|
| 158 |
+
Defaults to `True`.
|
| 159 |
+
_cli_prefix: The root parser command line arguments prefix. Defaults to "".
|
| 160 |
+
_cli_flag_prefix_char: The flag prefix character to use for CLI optional arguments. Defaults to '-'.
|
| 161 |
+
_cli_implicit_flags: Controls how `bool` fields are exposed as CLI flags.
|
| 162 |
+
|
| 163 |
+
- False (default): no implicit flags are generated; booleans must be set explicitly (e.g. --flag=true).
|
| 164 |
+
- True / 'dual': optional boolean fields generate both positive and negative forms (--flag and --no-flag).
|
| 165 |
+
- 'toggle': required boolean fields remain in 'dual' mode, while optional boolean fields generate a single
|
| 166 |
+
flag aligned with the default value (if default=False, expose --flag; if default=True, expose --no-flag).
|
| 167 |
+
_cli_ignore_unknown_args: Whether to ignore unknown CLI args and parse only known ones. Defaults to `False`.
|
| 168 |
+
_cli_kebab_case: CLI args use kebab case. Defaults to `False`.
|
| 169 |
+
_cli_shortcuts: Mapping of target field name to alias names. Defaults to `None`.
|
| 170 |
+
_secrets_dir: The secret files directory or a sequence of directories. Defaults to `None`.
|
| 171 |
+
_build_sources: Pre-initialized sources and init kwargs to use for building instantiation values.
|
| 172 |
+
Defaults to `None`.
|
| 173 |
+
"""
|
| 174 |
+
|
| 175 |
+
def __init__(
|
| 176 |
+
__pydantic_self__,
|
| 177 |
+
_case_sensitive: bool | None = None,
|
| 178 |
+
_nested_model_default_partial_update: bool | None = None,
|
| 179 |
+
_env_prefix: str | None = None,
|
| 180 |
+
_env_prefix_target: EnvPrefixTarget | None = None,
|
| 181 |
+
_env_file: DotenvType | None = ENV_FILE_SENTINEL,
|
| 182 |
+
_env_file_encoding: str | None = None,
|
| 183 |
+
_env_ignore_empty: bool | None = None,
|
| 184 |
+
_env_nested_delimiter: str | None = None,
|
| 185 |
+
_env_nested_max_split: int | None = None,
|
| 186 |
+
_env_parse_none_str: str | None = None,
|
| 187 |
+
_env_parse_enums: bool | None = None,
|
| 188 |
+
_cli_prog_name: str | None = None,
|
| 189 |
+
_cli_parse_args: bool | list[str] | tuple[str, ...] | None = None,
|
| 190 |
+
_cli_settings_source: CliSettingsSource[Any] | None = None,
|
| 191 |
+
_cli_parse_none_str: str | None = None,
|
| 192 |
+
_cli_hide_none_type: bool | None = None,
|
| 193 |
+
_cli_avoid_json: bool | None = None,
|
| 194 |
+
_cli_enforce_required: bool | None = None,
|
| 195 |
+
_cli_use_class_docs_for_groups: bool | None = None,
|
| 196 |
+
_cli_exit_on_error: bool | None = None,
|
| 197 |
+
_cli_prefix: str | None = None,
|
| 198 |
+
_cli_flag_prefix_char: str | None = None,
|
| 199 |
+
_cli_implicit_flags: bool | Literal['dual', 'toggle'] | None = None,
|
| 200 |
+
_cli_ignore_unknown_args: bool | None = None,
|
| 201 |
+
_cli_kebab_case: bool | Literal['all', 'no_enums'] | None = None,
|
| 202 |
+
_cli_shortcuts: Mapping[str, str | list[str]] | None = None,
|
| 203 |
+
_secrets_dir: PathType | None = None,
|
| 204 |
+
_build_sources: tuple[tuple[PydanticBaseSettingsSource, ...], dict[str, Any]] | None = None,
|
| 205 |
+
**values: Any,
|
| 206 |
+
) -> None:
|
| 207 |
+
sources, init_kwargs = (
|
| 208 |
+
_build_sources
|
| 209 |
+
if _build_sources is not None
|
| 210 |
+
else __pydantic_self__.__class__._settings_init_sources(
|
| 211 |
+
_case_sensitive=_case_sensitive,
|
| 212 |
+
_nested_model_default_partial_update=_nested_model_default_partial_update,
|
| 213 |
+
_env_prefix=_env_prefix,
|
| 214 |
+
_env_prefix_target=_env_prefix_target,
|
| 215 |
+
_env_file=_env_file,
|
| 216 |
+
_env_file_encoding=_env_file_encoding,
|
| 217 |
+
_env_ignore_empty=_env_ignore_empty,
|
| 218 |
+
_env_nested_delimiter=_env_nested_delimiter,
|
| 219 |
+
_env_nested_max_split=_env_nested_max_split,
|
| 220 |
+
_env_parse_none_str=_env_parse_none_str,
|
| 221 |
+
_env_parse_enums=_env_parse_enums,
|
| 222 |
+
_cli_prog_name=_cli_prog_name,
|
| 223 |
+
_cli_parse_args=_cli_parse_args,
|
| 224 |
+
_cli_settings_source=_cli_settings_source,
|
| 225 |
+
_cli_parse_none_str=_cli_parse_none_str,
|
| 226 |
+
_cli_hide_none_type=_cli_hide_none_type,
|
| 227 |
+
_cli_avoid_json=_cli_avoid_json,
|
| 228 |
+
_cli_enforce_required=_cli_enforce_required,
|
| 229 |
+
_cli_use_class_docs_for_groups=_cli_use_class_docs_for_groups,
|
| 230 |
+
_cli_exit_on_error=_cli_exit_on_error,
|
| 231 |
+
_cli_prefix=_cli_prefix,
|
| 232 |
+
_cli_flag_prefix_char=_cli_flag_prefix_char,
|
| 233 |
+
_cli_implicit_flags=_cli_implicit_flags,
|
| 234 |
+
_cli_ignore_unknown_args=_cli_ignore_unknown_args,
|
| 235 |
+
_cli_kebab_case=_cli_kebab_case,
|
| 236 |
+
_cli_shortcuts=_cli_shortcuts,
|
| 237 |
+
_secrets_dir=_secrets_dir,
|
| 238 |
+
**values,
|
| 239 |
+
)
|
| 240 |
+
)
|
| 241 |
+
|
| 242 |
+
super().__init__(**__pydantic_self__.__class__._settings_build_values(sources, init_kwargs))
|
| 243 |
+
|
| 244 |
+
@classmethod
|
| 245 |
+
def settings_customise_sources(
|
| 246 |
+
cls,
|
| 247 |
+
settings_cls: type[BaseSettings],
|
| 248 |
+
init_settings: PydanticBaseSettingsSource,
|
| 249 |
+
env_settings: PydanticBaseSettingsSource,
|
| 250 |
+
dotenv_settings: PydanticBaseSettingsSource,
|
| 251 |
+
file_secret_settings: PydanticBaseSettingsSource,
|
| 252 |
+
) -> tuple[PydanticBaseSettingsSource, ...]:
|
| 253 |
+
"""
|
| 254 |
+
Define the sources and their order for loading the settings values.
|
| 255 |
+
|
| 256 |
+
Args:
|
| 257 |
+
settings_cls: The Settings class.
|
| 258 |
+
init_settings: The `InitSettingsSource` instance.
|
| 259 |
+
env_settings: The `EnvSettingsSource` instance.
|
| 260 |
+
dotenv_settings: The `DotEnvSettingsSource` instance.
|
| 261 |
+
file_secret_settings: The `SecretsSettingsSource` instance.
|
| 262 |
+
|
| 263 |
+
Returns:
|
| 264 |
+
A tuple containing the sources and their order for loading the settings values.
|
| 265 |
+
"""
|
| 266 |
+
return init_settings, env_settings, dotenv_settings, file_secret_settings
|
| 267 |
+
|
| 268 |
+
@classmethod
|
| 269 |
+
def _settings_init_sources(
|
| 270 |
+
cls,
|
| 271 |
+
_case_sensitive: bool | None = None,
|
| 272 |
+
_nested_model_default_partial_update: bool | None = None,
|
| 273 |
+
_env_prefix: str | None = None,
|
| 274 |
+
_env_prefix_target: EnvPrefixTarget | None = None,
|
| 275 |
+
_env_file: DotenvType | None = None,
|
| 276 |
+
_env_file_encoding: str | None = None,
|
| 277 |
+
_env_ignore_empty: bool | None = None,
|
| 278 |
+
_env_nested_delimiter: str | None = None,
|
| 279 |
+
_env_nested_max_split: int | None = None,
|
| 280 |
+
_env_parse_none_str: str | None = None,
|
| 281 |
+
_env_parse_enums: bool | None = None,
|
| 282 |
+
_cli_prog_name: str | None = None,
|
| 283 |
+
_cli_parse_args: bool | list[str] | tuple[str, ...] | None = None,
|
| 284 |
+
_cli_settings_source: CliSettingsSource[Any] | None = None,
|
| 285 |
+
_cli_parse_none_str: str | None = None,
|
| 286 |
+
_cli_hide_none_type: bool | None = None,
|
| 287 |
+
_cli_avoid_json: bool | None = None,
|
| 288 |
+
_cli_enforce_required: bool | None = None,
|
| 289 |
+
_cli_use_class_docs_for_groups: bool | None = None,
|
| 290 |
+
_cli_exit_on_error: bool | None = None,
|
| 291 |
+
_cli_prefix: str | None = None,
|
| 292 |
+
_cli_flag_prefix_char: str | None = None,
|
| 293 |
+
_cli_implicit_flags: bool | Literal['dual', 'toggle'] | None = None,
|
| 294 |
+
_cli_ignore_unknown_args: bool | None = None,
|
| 295 |
+
_cli_kebab_case: bool | Literal['all', 'no_enums'] | None = None,
|
| 296 |
+
_cli_shortcuts: Mapping[str, str | list[str]] | None = None,
|
| 297 |
+
_secrets_dir: PathType | None = None,
|
| 298 |
+
**init_kwargs: dict[str, Any],
|
| 299 |
+
) -> tuple[tuple[PydanticBaseSettingsSource, ...], dict[str, Any]]:
|
| 300 |
+
# Determine settings config values
|
| 301 |
+
case_sensitive = _case_sensitive if _case_sensitive is not None else cls.model_config.get('case_sensitive')
|
| 302 |
+
env_prefix = _env_prefix if _env_prefix is not None else cls.model_config.get('env_prefix')
|
| 303 |
+
env_prefix_target = (
|
| 304 |
+
_env_prefix_target if _env_prefix_target is not None else cls.model_config.get('env_prefix_target')
|
| 305 |
+
)
|
| 306 |
+
nested_model_default_partial_update = (
|
| 307 |
+
_nested_model_default_partial_update
|
| 308 |
+
if _nested_model_default_partial_update is not None
|
| 309 |
+
else cls.model_config.get('nested_model_default_partial_update')
|
| 310 |
+
)
|
| 311 |
+
env_file = _env_file if _env_file != ENV_FILE_SENTINEL else cls.model_config.get('env_file')
|
| 312 |
+
env_file_encoding = (
|
| 313 |
+
_env_file_encoding if _env_file_encoding is not None else cls.model_config.get('env_file_encoding')
|
| 314 |
+
)
|
| 315 |
+
env_ignore_empty = (
|
| 316 |
+
_env_ignore_empty if _env_ignore_empty is not None else cls.model_config.get('env_ignore_empty')
|
| 317 |
+
)
|
| 318 |
+
env_nested_delimiter = (
|
| 319 |
+
_env_nested_delimiter if _env_nested_delimiter is not None else cls.model_config.get('env_nested_delimiter')
|
| 320 |
+
)
|
| 321 |
+
env_nested_max_split = (
|
| 322 |
+
_env_nested_max_split if _env_nested_max_split is not None else cls.model_config.get('env_nested_max_split')
|
| 323 |
+
)
|
| 324 |
+
env_parse_none_str = (
|
| 325 |
+
_env_parse_none_str if _env_parse_none_str is not None else cls.model_config.get('env_parse_none_str')
|
| 326 |
+
)
|
| 327 |
+
env_parse_enums = _env_parse_enums if _env_parse_enums is not None else cls.model_config.get('env_parse_enums')
|
| 328 |
+
|
| 329 |
+
cli_prog_name = _cli_prog_name if _cli_prog_name is not None else cls.model_config.get('cli_prog_name')
|
| 330 |
+
cli_parse_args = _cli_parse_args if _cli_parse_args is not None else cls.model_config.get('cli_parse_args')
|
| 331 |
+
cli_settings_source = (
|
| 332 |
+
_cli_settings_source if _cli_settings_source is not None else cls.model_config.get('cli_settings_source')
|
| 333 |
+
)
|
| 334 |
+
cli_parse_none_str = (
|
| 335 |
+
_cli_parse_none_str if _cli_parse_none_str is not None else cls.model_config.get('cli_parse_none_str')
|
| 336 |
+
)
|
| 337 |
+
cli_parse_none_str = cli_parse_none_str if not env_parse_none_str else env_parse_none_str
|
| 338 |
+
cli_hide_none_type = (
|
| 339 |
+
_cli_hide_none_type if _cli_hide_none_type is not None else cls.model_config.get('cli_hide_none_type')
|
| 340 |
+
)
|
| 341 |
+
cli_avoid_json = _cli_avoid_json if _cli_avoid_json is not None else cls.model_config.get('cli_avoid_json')
|
| 342 |
+
cli_enforce_required = (
|
| 343 |
+
_cli_enforce_required if _cli_enforce_required is not None else cls.model_config.get('cli_enforce_required')
|
| 344 |
+
)
|
| 345 |
+
cli_use_class_docs_for_groups = (
|
| 346 |
+
_cli_use_class_docs_for_groups
|
| 347 |
+
if _cli_use_class_docs_for_groups is not None
|
| 348 |
+
else cls.model_config.get('cli_use_class_docs_for_groups')
|
| 349 |
+
)
|
| 350 |
+
cli_exit_on_error = (
|
| 351 |
+
_cli_exit_on_error if _cli_exit_on_error is not None else cls.model_config.get('cli_exit_on_error')
|
| 352 |
+
)
|
| 353 |
+
cli_prefix = _cli_prefix if _cli_prefix is not None else cls.model_config.get('cli_prefix')
|
| 354 |
+
cli_flag_prefix_char = (
|
| 355 |
+
_cli_flag_prefix_char if _cli_flag_prefix_char is not None else cls.model_config.get('cli_flag_prefix_char')
|
| 356 |
+
)
|
| 357 |
+
cli_implicit_flags = (
|
| 358 |
+
_cli_implicit_flags if _cli_implicit_flags is not None else cls.model_config.get('cli_implicit_flags')
|
| 359 |
+
)
|
| 360 |
+
cli_ignore_unknown_args = (
|
| 361 |
+
_cli_ignore_unknown_args
|
| 362 |
+
if _cli_ignore_unknown_args is not None
|
| 363 |
+
else cls.model_config.get('cli_ignore_unknown_args')
|
| 364 |
+
)
|
| 365 |
+
cli_kebab_case = _cli_kebab_case if _cli_kebab_case is not None else cls.model_config.get('cli_kebab_case')
|
| 366 |
+
cli_shortcuts = _cli_shortcuts if _cli_shortcuts is not None else cls.model_config.get('cli_shortcuts')
|
| 367 |
+
|
| 368 |
+
secrets_dir = _secrets_dir if _secrets_dir is not None else cls.model_config.get('secrets_dir')
|
| 369 |
+
|
| 370 |
+
# Configure built-in sources
|
| 371 |
+
default_settings = DefaultSettingsSource(
|
| 372 |
+
cls, nested_model_default_partial_update=nested_model_default_partial_update
|
| 373 |
+
)
|
| 374 |
+
init_settings = InitSettingsSource(
|
| 375 |
+
cls,
|
| 376 |
+
init_kwargs=init_kwargs,
|
| 377 |
+
nested_model_default_partial_update=nested_model_default_partial_update,
|
| 378 |
+
)
|
| 379 |
+
env_settings = EnvSettingsSource(
|
| 380 |
+
cls,
|
| 381 |
+
case_sensitive=case_sensitive,
|
| 382 |
+
env_prefix=env_prefix,
|
| 383 |
+
env_prefix_target=env_prefix_target,
|
| 384 |
+
env_nested_delimiter=env_nested_delimiter,
|
| 385 |
+
env_nested_max_split=env_nested_max_split,
|
| 386 |
+
env_ignore_empty=env_ignore_empty,
|
| 387 |
+
env_parse_none_str=env_parse_none_str,
|
| 388 |
+
env_parse_enums=env_parse_enums,
|
| 389 |
+
)
|
| 390 |
+
dotenv_settings = DotEnvSettingsSource(
|
| 391 |
+
cls,
|
| 392 |
+
env_file=env_file,
|
| 393 |
+
env_file_encoding=env_file_encoding,
|
| 394 |
+
case_sensitive=case_sensitive,
|
| 395 |
+
env_prefix=env_prefix,
|
| 396 |
+
env_prefix_target=env_prefix_target,
|
| 397 |
+
env_nested_delimiter=env_nested_delimiter,
|
| 398 |
+
env_nested_max_split=env_nested_max_split,
|
| 399 |
+
env_ignore_empty=env_ignore_empty,
|
| 400 |
+
env_parse_none_str=env_parse_none_str,
|
| 401 |
+
env_parse_enums=env_parse_enums,
|
| 402 |
+
)
|
| 403 |
+
|
| 404 |
+
file_secret_settings = SecretsSettingsSource(
|
| 405 |
+
cls,
|
| 406 |
+
secrets_dir=secrets_dir,
|
| 407 |
+
case_sensitive=case_sensitive,
|
| 408 |
+
env_prefix=env_prefix,
|
| 409 |
+
env_prefix_target=env_prefix_target,
|
| 410 |
+
)
|
| 411 |
+
# Provide a hook to set built-in sources priority and add / remove sources
|
| 412 |
+
sources = cls.settings_customise_sources(
|
| 413 |
+
cls,
|
| 414 |
+
init_settings=init_settings,
|
| 415 |
+
env_settings=env_settings,
|
| 416 |
+
dotenv_settings=dotenv_settings,
|
| 417 |
+
file_secret_settings=file_secret_settings,
|
| 418 |
+
) + (default_settings,)
|
| 419 |
+
custom_cli_sources = [source for source in sources if isinstance(source, CliSettingsSource)]
|
| 420 |
+
if not any(custom_cli_sources):
|
| 421 |
+
if isinstance(cli_settings_source, CliSettingsSource):
|
| 422 |
+
sources = (cli_settings_source,) + sources
|
| 423 |
+
elif cli_parse_args is not None:
|
| 424 |
+
cli_settings = CliSettingsSource[Any](
|
| 425 |
+
cls,
|
| 426 |
+
cli_prog_name=cli_prog_name,
|
| 427 |
+
cli_parse_args=cli_parse_args,
|
| 428 |
+
cli_parse_none_str=cli_parse_none_str,
|
| 429 |
+
cli_hide_none_type=cli_hide_none_type,
|
| 430 |
+
cli_avoid_json=cli_avoid_json,
|
| 431 |
+
cli_enforce_required=cli_enforce_required,
|
| 432 |
+
cli_use_class_docs_for_groups=cli_use_class_docs_for_groups,
|
| 433 |
+
cli_exit_on_error=cli_exit_on_error,
|
| 434 |
+
cli_prefix=cli_prefix,
|
| 435 |
+
cli_flag_prefix_char=cli_flag_prefix_char,
|
| 436 |
+
cli_implicit_flags=cli_implicit_flags,
|
| 437 |
+
cli_ignore_unknown_args=cli_ignore_unknown_args,
|
| 438 |
+
cli_kebab_case=cli_kebab_case,
|
| 439 |
+
cli_shortcuts=cli_shortcuts,
|
| 440 |
+
case_sensitive=case_sensitive,
|
| 441 |
+
)
|
| 442 |
+
sources = (cli_settings,) + sources
|
| 443 |
+
# We ensure that if command line arguments haven't been parsed yet, we do so.
|
| 444 |
+
elif cli_parse_args not in (None, False) and not custom_cli_sources[0].env_vars:
|
| 445 |
+
custom_cli_sources[0](args=cli_parse_args) # type: ignore
|
| 446 |
+
|
| 447 |
+
cls._settings_warn_unused_config_keys(sources, cls.model_config)
|
| 448 |
+
|
| 449 |
+
return sources, init_kwargs
|
| 450 |
+
|
| 451 |
+
@classmethod
|
| 452 |
+
def _settings_build_values(
|
| 453 |
+
cls, sources: tuple[PydanticBaseSettingsSource, ...], init_kwargs: dict[str, Any]
|
| 454 |
+
) -> dict[str, Any]:
|
| 455 |
+
if sources:
|
| 456 |
+
state: dict[str, Any] = {}
|
| 457 |
+
defaults: dict[str, Any] = {}
|
| 458 |
+
states: dict[str, dict[str, Any]] = {}
|
| 459 |
+
for source in sources:
|
| 460 |
+
if isinstance(source, PydanticBaseSettingsSource):
|
| 461 |
+
source._set_current_state(state)
|
| 462 |
+
source._set_settings_sources_data(states)
|
| 463 |
+
|
| 464 |
+
source_name = source.__name__ if hasattr(source, '__name__') else type(source).__name__
|
| 465 |
+
source_state = source()
|
| 466 |
+
|
| 467 |
+
if isinstance(source, DefaultSettingsSource):
|
| 468 |
+
defaults = source_state
|
| 469 |
+
|
| 470 |
+
states[source_name] = source_state
|
| 471 |
+
state = deep_update(source_state, state)
|
| 472 |
+
|
| 473 |
+
# Strip any default values not explicity set before returning final state
|
| 474 |
+
state = {key: val for key, val in state.items() if key not in defaults or defaults[key] != val}
|
| 475 |
+
cls._settings_restore_init_kwarg_names(cls, init_kwargs, state)
|
| 476 |
+
|
| 477 |
+
return state
|
| 478 |
+
else:
|
| 479 |
+
# no one should mean to do this, but I think returning an empty dict is marginally preferable
|
| 480 |
+
# to an informative error and much better than a confusing error
|
| 481 |
+
return {}
|
| 482 |
+
|
| 483 |
+
@staticmethod
|
| 484 |
+
def _settings_restore_init_kwarg_names(
|
| 485 |
+
settings_cls: type[BaseSettings], init_kwargs: dict[str, Any], state: dict[str, Any]
|
| 486 |
+
) -> None:
|
| 487 |
+
"""
|
| 488 |
+
Restore the init_kwarg key names to the final merged state dictionary.
|
| 489 |
+
|
| 490 |
+
This function renames keys in state to match the original init_kwargs key names,
|
| 491 |
+
preserving the merged values from the source priority order.
|
| 492 |
+
"""
|
| 493 |
+
if init_kwargs and state:
|
| 494 |
+
state_kwarg_names = set(state.keys())
|
| 495 |
+
init_kwarg_names = set(init_kwargs.keys())
|
| 496 |
+
for field_name, field_info in settings_cls.model_fields.items():
|
| 497 |
+
alias_names, *_ = _get_alias_names(field_name, field_info)
|
| 498 |
+
matchable_names = set(alias_names)
|
| 499 |
+
include_name = settings_cls.model_config.get(
|
| 500 |
+
'populate_by_name', False
|
| 501 |
+
) or settings_cls.model_config.get('validate_by_name', False)
|
| 502 |
+
if include_name:
|
| 503 |
+
matchable_names.add(field_name)
|
| 504 |
+
init_kwarg_name = init_kwarg_names & matchable_names
|
| 505 |
+
state_kwarg_name = state_kwarg_names & matchable_names
|
| 506 |
+
if init_kwarg_name and state_kwarg_name:
|
| 507 |
+
# Use deterministic selection for both keys.
|
| 508 |
+
# Target key: the key from init_kwargs that should be used in the final state.
|
| 509 |
+
target_key = next(iter(init_kwarg_name))
|
| 510 |
+
# Source key: prefer the alias (first in alias_names) if present in state,
|
| 511 |
+
# as InitSettingsSource normalizes to the preferred alias.
|
| 512 |
+
# This ensures we get the highest-priority value for this field.
|
| 513 |
+
source_key = None
|
| 514 |
+
for alias in alias_names:
|
| 515 |
+
if alias in state_kwarg_name:
|
| 516 |
+
source_key = alias
|
| 517 |
+
break
|
| 518 |
+
if source_key is None:
|
| 519 |
+
# Fall back to field_name if no alias found in state
|
| 520 |
+
source_key = field_name if field_name in state_kwarg_name else next(iter(state_kwarg_name))
|
| 521 |
+
# Get the value from the source key and remove all matching keys
|
| 522 |
+
value = state.pop(source_key)
|
| 523 |
+
for key in state_kwarg_name - {source_key}:
|
| 524 |
+
state.pop(key, None)
|
| 525 |
+
state[target_key] = value
|
| 526 |
+
|
| 527 |
+
@staticmethod
|
| 528 |
+
def _settings_warn_unused_config_keys(sources: tuple[object, ...], model_config: SettingsConfigDict) -> None:
|
| 529 |
+
"""
|
| 530 |
+
Warns if any values in model_config were set but the corresponding settings source has not been initialised.
|
| 531 |
+
|
| 532 |
+
The list alternative sources and their config keys can be found here:
|
| 533 |
+
https://docs.pydantic.dev/latest/concepts/pydantic_settings/#other-settings-source
|
| 534 |
+
|
| 535 |
+
Args:
|
| 536 |
+
sources: The tuple of configured sources
|
| 537 |
+
model_config: The model config to check for unused config keys
|
| 538 |
+
"""
|
| 539 |
+
|
| 540 |
+
def warn_if_not_used(source_type: type[PydanticBaseSettingsSource], keys: tuple[str, ...]) -> None:
|
| 541 |
+
if not any(isinstance(source, source_type) for source in sources):
|
| 542 |
+
for key in keys:
|
| 543 |
+
if model_config.get(key) is not None:
|
| 544 |
+
warnings.warn(
|
| 545 |
+
f'Config key `{key}` is set in model_config but will be ignored because no '
|
| 546 |
+
f'{source_type.__name__} source is configured. To use this config key, add a '
|
| 547 |
+
f'{source_type.__name__} source to the settings sources via the '
|
| 548 |
+
'settings_customise_sources hook.',
|
| 549 |
+
UserWarning,
|
| 550 |
+
stacklevel=3,
|
| 551 |
+
)
|
| 552 |
+
|
| 553 |
+
warn_if_not_used(JsonConfigSettingsSource, ('json_file', 'json_file_encoding'))
|
| 554 |
+
warn_if_not_used(PyprojectTomlConfigSettingsSource, ('pyproject_toml_depth', 'pyproject_toml_table_header'))
|
| 555 |
+
warn_if_not_used(TomlConfigSettingsSource, ('toml_file',))
|
| 556 |
+
warn_if_not_used(YamlConfigSettingsSource, ('yaml_file', 'yaml_file_encoding', 'yaml_config_section'))
|
| 557 |
+
|
| 558 |
+
model_config: ClassVar[SettingsConfigDict] = SettingsConfigDict(
|
| 559 |
+
extra='forbid',
|
| 560 |
+
arbitrary_types_allowed=True,
|
| 561 |
+
validate_default=True,
|
| 562 |
+
case_sensitive=False,
|
| 563 |
+
env_prefix='',
|
| 564 |
+
env_prefix_target='variable',
|
| 565 |
+
nested_model_default_partial_update=False,
|
| 566 |
+
env_file=None,
|
| 567 |
+
env_file_encoding=None,
|
| 568 |
+
env_ignore_empty=False,
|
| 569 |
+
env_nested_delimiter=None,
|
| 570 |
+
env_nested_max_split=None,
|
| 571 |
+
env_parse_none_str=None,
|
| 572 |
+
env_parse_enums=None,
|
| 573 |
+
cli_prog_name=None,
|
| 574 |
+
cli_parse_args=None,
|
| 575 |
+
cli_parse_none_str=None,
|
| 576 |
+
cli_hide_none_type=False,
|
| 577 |
+
cli_avoid_json=False,
|
| 578 |
+
cli_enforce_required=False,
|
| 579 |
+
cli_use_class_docs_for_groups=False,
|
| 580 |
+
cli_exit_on_error=True,
|
| 581 |
+
cli_prefix='',
|
| 582 |
+
cli_flag_prefix_char='-',
|
| 583 |
+
cli_implicit_flags=False,
|
| 584 |
+
cli_ignore_unknown_args=False,
|
| 585 |
+
cli_kebab_case=False,
|
| 586 |
+
cli_shortcuts=None,
|
| 587 |
+
json_file=None,
|
| 588 |
+
json_file_encoding=None,
|
| 589 |
+
yaml_file=None,
|
| 590 |
+
yaml_file_encoding=None,
|
| 591 |
+
yaml_config_section=None,
|
| 592 |
+
toml_file=None,
|
| 593 |
+
secrets_dir=None,
|
| 594 |
+
protected_namespaces=('model_validate', 'model_dump', 'settings_customise_sources'),
|
| 595 |
+
enable_decoding=True,
|
| 596 |
+
)
|
| 597 |
+
|
| 598 |
+
|
| 599 |
+
class CliApp:
|
| 600 |
+
"""
|
| 601 |
+
A utility class for running Pydantic `BaseSettings`, `BaseModel`, or `pydantic.dataclasses.dataclass` as
|
| 602 |
+
CLI applications.
|
| 603 |
+
"""
|
| 604 |
+
|
| 605 |
+
_subcommand_stack: ClassVar[dict[int, tuple[CliSettingsSource[Any], Any, str]]] = {}
|
| 606 |
+
_ansi_color: ClassVar[re.Pattern[str]] = re.compile(r'\x1b\[[0-9;]*m')
|
| 607 |
+
|
| 608 |
+
@staticmethod
|
| 609 |
+
def _get_base_settings_cls(model_cls: type[Any]) -> type[BaseSettings]:
|
| 610 |
+
if issubclass(model_cls, BaseSettings):
|
| 611 |
+
return model_cls
|
| 612 |
+
|
| 613 |
+
class CliAppBaseSettings(BaseSettings, model_cls): # type: ignore
|
| 614 |
+
__doc__ = model_cls.__doc__
|
| 615 |
+
model_config = SettingsConfigDict(
|
| 616 |
+
nested_model_default_partial_update=True,
|
| 617 |
+
case_sensitive=True,
|
| 618 |
+
cli_hide_none_type=True,
|
| 619 |
+
cli_avoid_json=True,
|
| 620 |
+
cli_enforce_required=True,
|
| 621 |
+
cli_implicit_flags=True,
|
| 622 |
+
cli_kebab_case=True,
|
| 623 |
+
)
|
| 624 |
+
|
| 625 |
+
return CliAppBaseSettings
|
| 626 |
+
|
| 627 |
+
@staticmethod
|
| 628 |
+
def _run_cli_cmd(model: Any, cli_cmd_method_name: str, is_required: bool) -> Any:
|
| 629 |
+
command = getattr(type(model), cli_cmd_method_name, None)
|
| 630 |
+
if command is None:
|
| 631 |
+
if is_required:
|
| 632 |
+
raise SettingsError(f'Error: {type(model).__name__} class is missing {cli_cmd_method_name} entrypoint')
|
| 633 |
+
return model
|
| 634 |
+
|
| 635 |
+
# If the method is asynchronous, we handle its execution based on the current event loop status.
|
| 636 |
+
if inspect.iscoroutinefunction(command):
|
| 637 |
+
# For asynchronous methods, we have two execution scenarios:
|
| 638 |
+
# 1. If no event loop is running in the current thread, run the coroutine directly with asyncio.run().
|
| 639 |
+
# 2. If an event loop is already running in the current thread, run the coroutine in a separate thread to avoid conflicts.
|
| 640 |
+
try:
|
| 641 |
+
# Check if an event loop is currently running in this thread.
|
| 642 |
+
loop = asyncio.get_running_loop()
|
| 643 |
+
except RuntimeError:
|
| 644 |
+
loop = None
|
| 645 |
+
|
| 646 |
+
if loop and loop.is_running():
|
| 647 |
+
# We're in a context with an active event loop (e.g., Jupyter Notebook).
|
| 648 |
+
# Running asyncio.run() here would cause conflicts, so we use a separate thread.
|
| 649 |
+
exception_container = []
|
| 650 |
+
|
| 651 |
+
def run_coro() -> None:
|
| 652 |
+
try:
|
| 653 |
+
# Execute the coroutine in a new event loop in this separate thread.
|
| 654 |
+
asyncio.run(command(model))
|
| 655 |
+
except Exception as e:
|
| 656 |
+
exception_container.append(e)
|
| 657 |
+
|
| 658 |
+
thread = threading.Thread(target=run_coro)
|
| 659 |
+
thread.start()
|
| 660 |
+
thread.join()
|
| 661 |
+
if exception_container:
|
| 662 |
+
# Propagate exceptions from the separate thread.
|
| 663 |
+
raise exception_container[0]
|
| 664 |
+
else:
|
| 665 |
+
# No event loop is running; safe to run the coroutine directly.
|
| 666 |
+
asyncio.run(command(model))
|
| 667 |
+
else:
|
| 668 |
+
# For synchronous methods, call them directly.
|
| 669 |
+
command(model)
|
| 670 |
+
|
| 671 |
+
return model
|
| 672 |
+
|
| 673 |
+
@staticmethod
|
| 674 |
+
def run(
|
| 675 |
+
model_cls: type[T],
|
| 676 |
+
cli_args: list[str] | Namespace | SimpleNamespace | dict[str, Any] | None = None,
|
| 677 |
+
cli_settings_source: CliSettingsSource[Any] | None = None,
|
| 678 |
+
cli_exit_on_error: bool | None = None,
|
| 679 |
+
cli_cmd_method_name: str = 'cli_cmd',
|
| 680 |
+
**model_init_data: Any,
|
| 681 |
+
) -> T:
|
| 682 |
+
"""
|
| 683 |
+
Runs a Pydantic `BaseSettings`, `BaseModel`, or `pydantic.dataclasses.dataclass` as a CLI application.
|
| 684 |
+
Running a model as a CLI application requires the `cli_cmd` method to be defined in the model class.
|
| 685 |
+
|
| 686 |
+
Args:
|
| 687 |
+
model_cls: The model class to run as a CLI application.
|
| 688 |
+
cli_args: The list of CLI arguments to parse. If `cli_settings_source` is specified, this may
|
| 689 |
+
also be a namespace or dictionary of pre-parsed CLI arguments. Defaults to `sys.argv[1:]`.
|
| 690 |
+
cli_settings_source: Override the default CLI settings source with a user defined instance.
|
| 691 |
+
Defaults to `None`.
|
| 692 |
+
cli_exit_on_error: Determines whether this function exits on error. If model is subclass of
|
| 693 |
+
`BaseSettings`, defaults to BaseSettings `cli_exit_on_error` value. Otherwise, defaults to
|
| 694 |
+
`True`.
|
| 695 |
+
cli_cmd_method_name: The CLI command method name to run. Defaults to "cli_cmd".
|
| 696 |
+
model_init_data: The model init data.
|
| 697 |
+
|
| 698 |
+
Returns:
|
| 699 |
+
The ran instance of model.
|
| 700 |
+
|
| 701 |
+
Raises:
|
| 702 |
+
SettingsError: If model_cls is not subclass of `BaseModel` or `pydantic.dataclasses.dataclass`.
|
| 703 |
+
SettingsError: If model_cls does not have a `cli_cmd` entrypoint defined.
|
| 704 |
+
"""
|
| 705 |
+
|
| 706 |
+
if not (is_pydantic_dataclass(model_cls) or is_model_class(model_cls)):
|
| 707 |
+
raise SettingsError(
|
| 708 |
+
f'Error: {model_cls.__name__} is not subclass of BaseModel or pydantic.dataclasses.dataclass'
|
| 709 |
+
)
|
| 710 |
+
|
| 711 |
+
cli_settings = None
|
| 712 |
+
cli_parse_args = True if cli_args is None else cli_args
|
| 713 |
+
if cli_settings_source is not None:
|
| 714 |
+
if isinstance(cli_parse_args, (Namespace, SimpleNamespace, dict)):
|
| 715 |
+
cli_settings = cli_settings_source(parsed_args=cli_parse_args)
|
| 716 |
+
else:
|
| 717 |
+
cli_settings = cli_settings_source(args=cli_parse_args)
|
| 718 |
+
elif isinstance(cli_parse_args, (Namespace, SimpleNamespace, dict)):
|
| 719 |
+
raise SettingsError('Error: `cli_args` must be list[str] or None when `cli_settings_source` is not used')
|
| 720 |
+
|
| 721 |
+
model_init_data['_cli_parse_args'] = cli_parse_args
|
| 722 |
+
model_init_data['_cli_exit_on_error'] = cli_exit_on_error
|
| 723 |
+
model_init_data['_cli_settings_source'] = cli_settings
|
| 724 |
+
if not issubclass(model_cls, BaseSettings):
|
| 725 |
+
base_settings_cls = CliApp._get_base_settings_cls(model_cls)
|
| 726 |
+
sources, init_kwargs = base_settings_cls._settings_init_sources(**model_init_data)
|
| 727 |
+
model = base_settings_cls(**base_settings_cls._settings_build_values(sources, init_kwargs))
|
| 728 |
+
model_init_data = {}
|
| 729 |
+
for field_name, field_info in base_settings_cls.model_fields.items():
|
| 730 |
+
model_init_data[_field_name_for_signature(field_name, field_info)] = getattr(model, field_name)
|
| 731 |
+
command = model_cls(**model_init_data)
|
| 732 |
+
else:
|
| 733 |
+
sources, init_kwargs = model_cls._settings_init_sources(**model_init_data)
|
| 734 |
+
command = model_cls(_build_sources=(sources, init_kwargs))
|
| 735 |
+
|
| 736 |
+
subcommand_dest = ':subcommand'
|
| 737 |
+
cli_settings_source = [source for source in sources if isinstance(source, CliSettingsSource)][0]
|
| 738 |
+
CliApp._subcommand_stack[id(command)] = (cli_settings_source, cli_settings_source.root_parser, subcommand_dest)
|
| 739 |
+
try:
|
| 740 |
+
data_model = CliApp._run_cli_cmd(command, cli_cmd_method_name, is_required=False)
|
| 741 |
+
finally:
|
| 742 |
+
del CliApp._subcommand_stack[id(command)]
|
| 743 |
+
return data_model
|
| 744 |
+
|
| 745 |
+
@staticmethod
|
| 746 |
+
def run_subcommand(
|
| 747 |
+
model: PydanticModel, cli_exit_on_error: bool | None = None, cli_cmd_method_name: str = 'cli_cmd'
|
| 748 |
+
) -> PydanticModel:
|
| 749 |
+
"""
|
| 750 |
+
Runs the model subcommand. Running a model subcommand requires the `cli_cmd` method to be defined in
|
| 751 |
+
the nested model subcommand class.
|
| 752 |
+
|
| 753 |
+
Args:
|
| 754 |
+
model: The model to run the subcommand from.
|
| 755 |
+
cli_exit_on_error: Determines whether this function exits with error if no subcommand is found.
|
| 756 |
+
Defaults to model_config `cli_exit_on_error` value if set. Otherwise, defaults to `True`.
|
| 757 |
+
cli_cmd_method_name: The CLI command method name to run. Defaults to "cli_cmd".
|
| 758 |
+
|
| 759 |
+
Returns:
|
| 760 |
+
The ran subcommand model.
|
| 761 |
+
|
| 762 |
+
Raises:
|
| 763 |
+
SystemExit: When no subcommand is found and cli_exit_on_error=`True` (the default).
|
| 764 |
+
SettingsError: When no subcommand is found and cli_exit_on_error=`False`.
|
| 765 |
+
"""
|
| 766 |
+
|
| 767 |
+
if id(model) in CliApp._subcommand_stack:
|
| 768 |
+
cli_settings_source, parser, subcommand_dest = CliApp._subcommand_stack[id(model)]
|
| 769 |
+
else:
|
| 770 |
+
cli_settings_source = CliSettingsSource[Any](CliApp._get_base_settings_cls(type(model)))
|
| 771 |
+
parser = cli_settings_source.root_parser
|
| 772 |
+
subcommand_dest = ':subcommand'
|
| 773 |
+
|
| 774 |
+
cli_exit_on_error = cli_settings_source.cli_exit_on_error if cli_exit_on_error is None else cli_exit_on_error
|
| 775 |
+
|
| 776 |
+
errors: list[SettingsError | SystemExit] = []
|
| 777 |
+
subcommand = get_subcommand(
|
| 778 |
+
model, is_required=True, cli_exit_on_error=cli_exit_on_error, _suppress_errors=errors
|
| 779 |
+
)
|
| 780 |
+
if errors:
|
| 781 |
+
err = errors[0]
|
| 782 |
+
if err.__context__ is None and err.__cause__ is None and cli_settings_source._format_help is not None:
|
| 783 |
+
error_message = f'{err}\n{cli_settings_source._format_help(parser)}'
|
| 784 |
+
raise type(err)(error_message) from None
|
| 785 |
+
else:
|
| 786 |
+
raise err
|
| 787 |
+
|
| 788 |
+
subcommand_cls = cast(type[BaseModel], type(subcommand))
|
| 789 |
+
subcommand_arg = cli_settings_source._parser_map[subcommand_dest][subcommand_cls]
|
| 790 |
+
subcommand_alias = subcommand_arg.subcommand_alias(subcommand_cls)
|
| 791 |
+
subcommand_dest = f'{subcommand_dest.split(":")[0]}{subcommand_alias}.:subcommand'
|
| 792 |
+
subcommand_parser = subcommand_arg.parser
|
| 793 |
+
CliApp._subcommand_stack[id(subcommand)] = (cli_settings_source, subcommand_parser, subcommand_dest)
|
| 794 |
+
try:
|
| 795 |
+
data_model = CliApp._run_cli_cmd(subcommand, cli_cmd_method_name, is_required=True)
|
| 796 |
+
finally:
|
| 797 |
+
del CliApp._subcommand_stack[id(subcommand)]
|
| 798 |
+
return data_model
|
| 799 |
+
|
| 800 |
+
@staticmethod
|
| 801 |
+
def serialize(
|
| 802 |
+
model: PydanticModel,
|
| 803 |
+
list_style: Literal['json', 'argparse', 'lazy'] = 'json',
|
| 804 |
+
dict_style: Literal['json', 'env'] = 'json',
|
| 805 |
+
positionals_first: bool = False,
|
| 806 |
+
) -> list[str]:
|
| 807 |
+
"""
|
| 808 |
+
Serializes the CLI arguments for a Pydantic data model.
|
| 809 |
+
|
| 810 |
+
Args:
|
| 811 |
+
model: The data model to serialize.
|
| 812 |
+
list_style:
|
| 813 |
+
Controls how list-valued fields are serialized on the command line.
|
| 814 |
+
- 'json' (default):
|
| 815 |
+
Lists are encoded as a single JSON array.
|
| 816 |
+
Example: `--tags '["a","b","c"]'`
|
| 817 |
+
- 'argparse':
|
| 818 |
+
Each list element becomes its own repeated flag, following
|
| 819 |
+
typical `argparse` conventions.
|
| 820 |
+
Example: `--tags a --tags b --tags c`
|
| 821 |
+
- 'lazy':
|
| 822 |
+
Lists are emitted as a single comma-separated string without JSON
|
| 823 |
+
quoting or escaping.
|
| 824 |
+
Example: `--tags a,b,c`
|
| 825 |
+
dict_style:
|
| 826 |
+
Controls how dictionary-valued fields are serialized.
|
| 827 |
+
- 'json' (default):
|
| 828 |
+
The entire dictionary is emitted as a single JSON object.
|
| 829 |
+
Example: `--config '{"host": "localhost", "port": 5432}'`
|
| 830 |
+
- 'env':
|
| 831 |
+
The dictionary is flattened into multiple CLI flags using
|
| 832 |
+
environment-variable-style assignement.
|
| 833 |
+
Example: `--config host=localhost --config port=5432`
|
| 834 |
+
positionals_first: Controls whether positional arguments should be serialized
|
| 835 |
+
first compared to optional arguments. Defaults to `False`.
|
| 836 |
+
|
| 837 |
+
Returns:
|
| 838 |
+
The serialized CLI arguments for the data model.
|
| 839 |
+
"""
|
| 840 |
+
|
| 841 |
+
base_settings_cls = CliApp._get_base_settings_cls(type(model))
|
| 842 |
+
serialized_args = CliSettingsSource[Any](base_settings_cls)._serialized_args(
|
| 843 |
+
model,
|
| 844 |
+
list_style=list_style,
|
| 845 |
+
dict_style=dict_style,
|
| 846 |
+
positionals_first=positionals_first,
|
| 847 |
+
)
|
| 848 |
+
return CliSettingsSource._flatten_serialized_args(serialized_args, positionals_first)
|
| 849 |
+
|
| 850 |
+
@staticmethod
|
| 851 |
+
def format_help(
|
| 852 |
+
model: PydanticModel | type[T],
|
| 853 |
+
cli_settings_source: CliSettingsSource[Any] | None = None,
|
| 854 |
+
strip_ansi_color: bool = False,
|
| 855 |
+
) -> str:
|
| 856 |
+
"""
|
| 857 |
+
Return a string containing a help message for a Pydantic model.
|
| 858 |
+
|
| 859 |
+
Args:
|
| 860 |
+
model: The model or model class.
|
| 861 |
+
cli_settings_source: Override the default CLI settings source with a user defined instance.
|
| 862 |
+
Defaults to `None`.
|
| 863 |
+
strip_ansi_color: Strips ANSI color codes from the help message when set to `True`.
|
| 864 |
+
|
| 865 |
+
Returns:
|
| 866 |
+
The help message string for the model.
|
| 867 |
+
"""
|
| 868 |
+
model_cls = model if isinstance(model, type) else type(model)
|
| 869 |
+
if cli_settings_source is None:
|
| 870 |
+
if not isinstance(model, type) and id(model) in CliApp._subcommand_stack:
|
| 871 |
+
cli_settings_source, *_ = CliApp._subcommand_stack[id(model)]
|
| 872 |
+
else:
|
| 873 |
+
cli_settings_source = CliSettingsSource(CliApp._get_base_settings_cls(model_cls))
|
| 874 |
+
help_message = cli_settings_source._format_help(cli_settings_source.root_parser)
|
| 875 |
+
return help_message if not strip_ansi_color else CliApp._ansi_color.sub('', help_message)
|
| 876 |
+
|
| 877 |
+
@staticmethod
|
| 878 |
+
def print_help(
|
| 879 |
+
model: PydanticModel | type[T],
|
| 880 |
+
cli_settings_source: CliSettingsSource[Any] | None = None,
|
| 881 |
+
file: TextIO | None = None,
|
| 882 |
+
strip_ansi_color: bool = False,
|
| 883 |
+
) -> None:
|
| 884 |
+
"""
|
| 885 |
+
Print a help message for a Pydantic model.
|
| 886 |
+
|
| 887 |
+
Args:
|
| 888 |
+
model: The model or model class.
|
| 889 |
+
cli_settings_source: Override the default CLI settings source with a user defined instance.
|
| 890 |
+
Defaults to `None`.
|
| 891 |
+
file: A text stream to which the help message is written. If `None`, the output is sent to sys.stdout.
|
| 892 |
+
strip_ansi_color: Strips ANSI color codes from the help message when set to `True`.
|
| 893 |
+
"""
|
| 894 |
+
print(
|
| 895 |
+
CliApp.format_help(
|
| 896 |
+
model,
|
| 897 |
+
cli_settings_source=cli_settings_source,
|
| 898 |
+
strip_ansi_color=strip_ansi_color,
|
| 899 |
+
),
|
| 900 |
+
file=file,
|
| 901 |
+
)
|
source/pydantic_settings/py.typed
ADDED
|
File without changes
|
source/pydantic_settings/sources/__init__.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Package for handling configuration sources in pydantic-settings."""
|
| 2 |
+
|
| 3 |
+
from .base import (
|
| 4 |
+
ConfigFileSourceMixin,
|
| 5 |
+
DefaultSettingsSource,
|
| 6 |
+
InitSettingsSource,
|
| 7 |
+
PydanticBaseEnvSettingsSource,
|
| 8 |
+
PydanticBaseSettingsSource,
|
| 9 |
+
get_subcommand,
|
| 10 |
+
)
|
| 11 |
+
from .providers.aws import AWSSecretsManagerSettingsSource
|
| 12 |
+
from .providers.azure import AzureKeyVaultSettingsSource
|
| 13 |
+
from .providers.cli import (
|
| 14 |
+
CLI_SUPPRESS,
|
| 15 |
+
CliDualFlag,
|
| 16 |
+
CliExplicitFlag,
|
| 17 |
+
CliImplicitFlag,
|
| 18 |
+
CliMutuallyExclusiveGroup,
|
| 19 |
+
CliPositionalArg,
|
| 20 |
+
CliSettingsSource,
|
| 21 |
+
CliSubCommand,
|
| 22 |
+
CliSuppress,
|
| 23 |
+
CliToggleFlag,
|
| 24 |
+
CliUnknownArgs,
|
| 25 |
+
)
|
| 26 |
+
from .providers.dotenv import DotEnvSettingsSource, read_env_file
|
| 27 |
+
from .providers.env import EnvSettingsSource
|
| 28 |
+
from .providers.gcp import GoogleSecretManagerSettingsSource
|
| 29 |
+
from .providers.json import JsonConfigSettingsSource
|
| 30 |
+
from .providers.nested_secrets import NestedSecretsSettingsSource
|
| 31 |
+
from .providers.pyproject import PyprojectTomlConfigSettingsSource
|
| 32 |
+
from .providers.secrets import SecretsSettingsSource
|
| 33 |
+
from .providers.toml import TomlConfigSettingsSource
|
| 34 |
+
from .providers.yaml import YamlConfigSettingsSource
|
| 35 |
+
from .types import (
|
| 36 |
+
DEFAULT_PATH,
|
| 37 |
+
ENV_FILE_SENTINEL,
|
| 38 |
+
DotenvType,
|
| 39 |
+
EnvPrefixTarget,
|
| 40 |
+
ForceDecode,
|
| 41 |
+
NoDecode,
|
| 42 |
+
PathType,
|
| 43 |
+
PydanticModel,
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
__all__ = [
|
| 47 |
+
'CLI_SUPPRESS',
|
| 48 |
+
'ENV_FILE_SENTINEL',
|
| 49 |
+
'DEFAULT_PATH',
|
| 50 |
+
'AWSSecretsManagerSettingsSource',
|
| 51 |
+
'AzureKeyVaultSettingsSource',
|
| 52 |
+
'CliExplicitFlag',
|
| 53 |
+
'CliImplicitFlag',
|
| 54 |
+
'CliToggleFlag',
|
| 55 |
+
'CliDualFlag',
|
| 56 |
+
'CliMutuallyExclusiveGroup',
|
| 57 |
+
'CliPositionalArg',
|
| 58 |
+
'CliSettingsSource',
|
| 59 |
+
'CliSubCommand',
|
| 60 |
+
'CliSuppress',
|
| 61 |
+
'CliUnknownArgs',
|
| 62 |
+
'DefaultSettingsSource',
|
| 63 |
+
'DotEnvSettingsSource',
|
| 64 |
+
'DotenvType',
|
| 65 |
+
'EnvPrefixTarget',
|
| 66 |
+
'EnvSettingsSource',
|
| 67 |
+
'ForceDecode',
|
| 68 |
+
'GoogleSecretManagerSettingsSource',
|
| 69 |
+
'InitSettingsSource',
|
| 70 |
+
'JsonConfigSettingsSource',
|
| 71 |
+
'NestedSecretsSettingsSource',
|
| 72 |
+
'NoDecode',
|
| 73 |
+
'PathType',
|
| 74 |
+
'PydanticBaseEnvSettingsSource',
|
| 75 |
+
'PydanticBaseSettingsSource',
|
| 76 |
+
'ConfigFileSourceMixin',
|
| 77 |
+
'PydanticModel',
|
| 78 |
+
'PyprojectTomlConfigSettingsSource',
|
| 79 |
+
'SecretsSettingsSource',
|
| 80 |
+
'TomlConfigSettingsSource',
|
| 81 |
+
'YamlConfigSettingsSource',
|
| 82 |
+
'get_subcommand',
|
| 83 |
+
'read_env_file',
|
| 84 |
+
]
|
source/pydantic_settings/sources/base.py
ADDED
|
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Base classes and core functionality for pydantic-settings sources."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations as _annotations
|
| 4 |
+
|
| 5 |
+
import json
|
| 6 |
+
from abc import ABC, abstractmethod
|
| 7 |
+
from collections.abc import Sequence
|
| 8 |
+
from dataclasses import asdict, is_dataclass
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from typing import TYPE_CHECKING, Any, cast, get_args
|
| 11 |
+
|
| 12 |
+
from pydantic import AliasChoices, AliasPath, BaseModel, TypeAdapter
|
| 13 |
+
from pydantic._internal._typing_extra import ( # type: ignore[attr-defined]
|
| 14 |
+
get_origin,
|
| 15 |
+
)
|
| 16 |
+
from pydantic._internal._utils import deep_update, is_model_class
|
| 17 |
+
from pydantic.fields import FieldInfo
|
| 18 |
+
from typing_inspection.introspection import is_union_origin
|
| 19 |
+
|
| 20 |
+
from ..exceptions import SettingsError
|
| 21 |
+
from ..utils import _lenient_issubclass
|
| 22 |
+
from .types import EnvNoneType, EnvPrefixTarget, ForceDecode, NoDecode, PathType, PydanticModel, _CliSubCommand
|
| 23 |
+
from .utils import (
|
| 24 |
+
_annotation_is_complex,
|
| 25 |
+
_get_alias_names,
|
| 26 |
+
_get_field_metadata,
|
| 27 |
+
_get_model_fields,
|
| 28 |
+
_resolve_type_alias,
|
| 29 |
+
_strip_annotated,
|
| 30 |
+
_union_is_complex,
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
if TYPE_CHECKING:
|
| 34 |
+
from pydantic_settings.main import BaseSettings
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def get_subcommand(
|
| 38 |
+
model: PydanticModel,
|
| 39 |
+
is_required: bool = True,
|
| 40 |
+
cli_exit_on_error: bool | None = None,
|
| 41 |
+
_suppress_errors: list[SettingsError | SystemExit] | None = None,
|
| 42 |
+
) -> PydanticModel | None:
|
| 43 |
+
"""
|
| 44 |
+
Get the subcommand from a model.
|
| 45 |
+
|
| 46 |
+
Args:
|
| 47 |
+
model: The model to get the subcommand from.
|
| 48 |
+
is_required: Determines whether a model must have subcommand set and raises error if not
|
| 49 |
+
found. Defaults to `True`.
|
| 50 |
+
cli_exit_on_error: Determines whether this function exits with error if no subcommand is found.
|
| 51 |
+
Defaults to model_config `cli_exit_on_error` value if set. Otherwise, defaults to `True`.
|
| 52 |
+
|
| 53 |
+
Returns:
|
| 54 |
+
The subcommand model if found, otherwise `None`.
|
| 55 |
+
|
| 56 |
+
Raises:
|
| 57 |
+
SystemExit: When no subcommand is found and is_required=`True` and cli_exit_on_error=`True`
|
| 58 |
+
(the default).
|
| 59 |
+
SettingsError: When no subcommand is found and is_required=`True` and
|
| 60 |
+
cli_exit_on_error=`False`.
|
| 61 |
+
"""
|
| 62 |
+
|
| 63 |
+
model_cls = type(model)
|
| 64 |
+
if cli_exit_on_error is None and is_model_class(model_cls):
|
| 65 |
+
model_default = model_cls.model_config.get('cli_exit_on_error')
|
| 66 |
+
if isinstance(model_default, bool):
|
| 67 |
+
cli_exit_on_error = model_default
|
| 68 |
+
if cli_exit_on_error is None:
|
| 69 |
+
cli_exit_on_error = True
|
| 70 |
+
|
| 71 |
+
subcommands: list[str] = []
|
| 72 |
+
for field_name, field_info in _get_model_fields(model_cls).items():
|
| 73 |
+
if _CliSubCommand in field_info.metadata:
|
| 74 |
+
if getattr(model, field_name) is not None:
|
| 75 |
+
return getattr(model, field_name)
|
| 76 |
+
subcommands.append(field_name)
|
| 77 |
+
|
| 78 |
+
if is_required:
|
| 79 |
+
error_message = (
|
| 80 |
+
f'Error: CLI subcommand is required {{{", ".join(subcommands)}}}'
|
| 81 |
+
if subcommands
|
| 82 |
+
else 'Error: CLI subcommand is required but no subcommands were found.'
|
| 83 |
+
)
|
| 84 |
+
err = SystemExit(error_message) if cli_exit_on_error else SettingsError(error_message)
|
| 85 |
+
if _suppress_errors is None:
|
| 86 |
+
raise err
|
| 87 |
+
_suppress_errors.append(err)
|
| 88 |
+
|
| 89 |
+
return None
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class PydanticBaseSettingsSource(ABC):
|
| 93 |
+
"""
|
| 94 |
+
Abstract base class for settings sources, every settings source classes should inherit from it.
|
| 95 |
+
"""
|
| 96 |
+
|
| 97 |
+
def __init__(self, settings_cls: type[BaseSettings]):
|
| 98 |
+
self.settings_cls = settings_cls
|
| 99 |
+
self.config = settings_cls.model_config
|
| 100 |
+
self._current_state: dict[str, Any] = {}
|
| 101 |
+
self._settings_sources_data: dict[str, dict[str, Any]] = {}
|
| 102 |
+
|
| 103 |
+
def _set_current_state(self, state: dict[str, Any]) -> None:
|
| 104 |
+
"""
|
| 105 |
+
Record the state of settings from the previous settings sources. This should
|
| 106 |
+
be called right before __call__.
|
| 107 |
+
"""
|
| 108 |
+
self._current_state = state
|
| 109 |
+
|
| 110 |
+
def _set_settings_sources_data(self, states: dict[str, dict[str, Any]]) -> None:
|
| 111 |
+
"""
|
| 112 |
+
Record the state of settings from all previous settings sources. This should
|
| 113 |
+
be called right before __call__.
|
| 114 |
+
"""
|
| 115 |
+
self._settings_sources_data = states
|
| 116 |
+
|
| 117 |
+
@property
|
| 118 |
+
def current_state(self) -> dict[str, Any]:
|
| 119 |
+
"""
|
| 120 |
+
The current state of the settings, populated by the previous settings sources.
|
| 121 |
+
"""
|
| 122 |
+
return self._current_state
|
| 123 |
+
|
| 124 |
+
@property
|
| 125 |
+
def settings_sources_data(self) -> dict[str, dict[str, Any]]:
|
| 126 |
+
"""
|
| 127 |
+
The state of all previous settings sources.
|
| 128 |
+
"""
|
| 129 |
+
return self._settings_sources_data
|
| 130 |
+
|
| 131 |
+
@abstractmethod
|
| 132 |
+
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
| 133 |
+
"""
|
| 134 |
+
Gets the value, the key for model creation, and a flag to determine whether value is complex.
|
| 135 |
+
|
| 136 |
+
This is an abstract method that should be overridden in every settings source classes.
|
| 137 |
+
|
| 138 |
+
Args:
|
| 139 |
+
field: The field.
|
| 140 |
+
field_name: The field name.
|
| 141 |
+
|
| 142 |
+
Returns:
|
| 143 |
+
A tuple that contains the value, key and a flag to determine whether value is complex.
|
| 144 |
+
"""
|
| 145 |
+
pass
|
| 146 |
+
|
| 147 |
+
def field_is_complex(self, field: FieldInfo) -> bool:
|
| 148 |
+
"""
|
| 149 |
+
Checks whether a field is complex, in which case it will attempt to be parsed as JSON.
|
| 150 |
+
|
| 151 |
+
Args:
|
| 152 |
+
field: The field.
|
| 153 |
+
|
| 154 |
+
Returns:
|
| 155 |
+
Whether the field is complex.
|
| 156 |
+
"""
|
| 157 |
+
return _annotation_is_complex(field.annotation, field.metadata)
|
| 158 |
+
|
| 159 |
+
def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any:
|
| 160 |
+
"""
|
| 161 |
+
Prepares the value of a field.
|
| 162 |
+
|
| 163 |
+
Args:
|
| 164 |
+
field_name: The field name.
|
| 165 |
+
field: The field.
|
| 166 |
+
value: The value of the field that has to be prepared.
|
| 167 |
+
value_is_complex: A flag to determine whether value is complex.
|
| 168 |
+
|
| 169 |
+
Returns:
|
| 170 |
+
The prepared value.
|
| 171 |
+
"""
|
| 172 |
+
if value is not None and (self.field_is_complex(field) or value_is_complex):
|
| 173 |
+
return self.decode_complex_value(field_name, field, value)
|
| 174 |
+
return value
|
| 175 |
+
|
| 176 |
+
def decode_complex_value(self, field_name: str, field: FieldInfo, value: Any) -> Any:
|
| 177 |
+
"""
|
| 178 |
+
Decode the value for a complex field
|
| 179 |
+
|
| 180 |
+
Args:
|
| 181 |
+
field_name: The field name.
|
| 182 |
+
field: The field.
|
| 183 |
+
value: The value of the field that has to be prepared.
|
| 184 |
+
|
| 185 |
+
Returns:
|
| 186 |
+
The decoded value for further preparation
|
| 187 |
+
"""
|
| 188 |
+
if field and (
|
| 189 |
+
NoDecode in _get_field_metadata(field)
|
| 190 |
+
or (self.config.get('enable_decoding') is False and ForceDecode not in field.metadata)
|
| 191 |
+
):
|
| 192 |
+
return value
|
| 193 |
+
|
| 194 |
+
return json.loads(value)
|
| 195 |
+
|
| 196 |
+
@abstractmethod
|
| 197 |
+
def __call__(self) -> dict[str, Any]:
|
| 198 |
+
pass
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
class ConfigFileSourceMixin(ABC):
|
| 202 |
+
def _read_files(self, files: PathType | None, deep_merge: bool = False) -> dict[str, Any]:
|
| 203 |
+
if files is None:
|
| 204 |
+
return {}
|
| 205 |
+
if not isinstance(files, Sequence) or isinstance(files, str):
|
| 206 |
+
files = [files]
|
| 207 |
+
vars: dict[str, Any] = {}
|
| 208 |
+
for file in files:
|
| 209 |
+
if isinstance(file, str):
|
| 210 |
+
file_path = Path(file)
|
| 211 |
+
else:
|
| 212 |
+
file_path = file
|
| 213 |
+
if isinstance(file_path, Path):
|
| 214 |
+
file_path = file_path.expanduser()
|
| 215 |
+
|
| 216 |
+
if not file_path.is_file():
|
| 217 |
+
continue
|
| 218 |
+
|
| 219 |
+
updating_vars = self._read_file(file_path)
|
| 220 |
+
if deep_merge:
|
| 221 |
+
vars = deep_update(vars, updating_vars)
|
| 222 |
+
else:
|
| 223 |
+
vars.update(updating_vars)
|
| 224 |
+
return vars
|
| 225 |
+
|
| 226 |
+
@abstractmethod
|
| 227 |
+
def _read_file(self, path: Path) -> dict[str, Any]:
|
| 228 |
+
pass
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
class DefaultSettingsSource(PydanticBaseSettingsSource):
|
| 232 |
+
"""
|
| 233 |
+
Source class for loading default object values.
|
| 234 |
+
|
| 235 |
+
Args:
|
| 236 |
+
settings_cls: The Settings class.
|
| 237 |
+
nested_model_default_partial_update: Whether to allow partial updates on nested model default object fields.
|
| 238 |
+
Defaults to `False`.
|
| 239 |
+
"""
|
| 240 |
+
|
| 241 |
+
def __init__(self, settings_cls: type[BaseSettings], nested_model_default_partial_update: bool | None = None):
|
| 242 |
+
super().__init__(settings_cls)
|
| 243 |
+
self.defaults: dict[str, Any] = {}
|
| 244 |
+
self.nested_model_default_partial_update = (
|
| 245 |
+
nested_model_default_partial_update
|
| 246 |
+
if nested_model_default_partial_update is not None
|
| 247 |
+
else self.config.get('nested_model_default_partial_update', False)
|
| 248 |
+
)
|
| 249 |
+
if self.nested_model_default_partial_update:
|
| 250 |
+
for field_name, field_info in settings_cls.model_fields.items():
|
| 251 |
+
alias_names, *_ = _get_alias_names(field_name, field_info)
|
| 252 |
+
preferred_alias = alias_names[0]
|
| 253 |
+
if is_dataclass(type(field_info.default)):
|
| 254 |
+
self.defaults[preferred_alias] = asdict(field_info.default)
|
| 255 |
+
elif is_model_class(type(field_info.default)):
|
| 256 |
+
self.defaults[preferred_alias] = field_info.default.model_dump()
|
| 257 |
+
|
| 258 |
+
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
| 259 |
+
# Nothing to do here. Only implement the return statement to make mypy happy
|
| 260 |
+
return None, '', False
|
| 261 |
+
|
| 262 |
+
def __call__(self) -> dict[str, Any]:
|
| 263 |
+
return self.defaults
|
| 264 |
+
|
| 265 |
+
def __repr__(self) -> str:
|
| 266 |
+
return (
|
| 267 |
+
f'{self.__class__.__name__}(nested_model_default_partial_update={self.nested_model_default_partial_update})'
|
| 268 |
+
)
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
class InitSettingsSource(PydanticBaseSettingsSource):
|
| 272 |
+
"""
|
| 273 |
+
Source class for loading values provided during settings class initialization.
|
| 274 |
+
"""
|
| 275 |
+
|
| 276 |
+
def __init__(
|
| 277 |
+
self,
|
| 278 |
+
settings_cls: type[BaseSettings],
|
| 279 |
+
init_kwargs: dict[str, Any],
|
| 280 |
+
nested_model_default_partial_update: bool | None = None,
|
| 281 |
+
):
|
| 282 |
+
self.init_kwargs = {}
|
| 283 |
+
init_kwarg_names = set(init_kwargs.keys())
|
| 284 |
+
for field_name, field_info in settings_cls.model_fields.items():
|
| 285 |
+
alias_names, *_ = _get_alias_names(field_name, field_info)
|
| 286 |
+
# When populate_by_name is True, allow using the field name as an input key,
|
| 287 |
+
# but normalize to the preferred alias to keep keys consistent across sources.
|
| 288 |
+
matchable_names = set(alias_names)
|
| 289 |
+
include_name = settings_cls.model_config.get('populate_by_name', False) or settings_cls.model_config.get(
|
| 290 |
+
'validate_by_name', False
|
| 291 |
+
)
|
| 292 |
+
if include_name:
|
| 293 |
+
matchable_names.add(field_name)
|
| 294 |
+
init_kwarg_name = init_kwarg_names & matchable_names
|
| 295 |
+
if init_kwarg_name:
|
| 296 |
+
preferred_alias = alias_names[0] if alias_names else field_name
|
| 297 |
+
# Choose provided key deterministically: prefer the first alias in alias_names order;
|
| 298 |
+
# fall back to field_name if allowed and provided.
|
| 299 |
+
provided_key = next((alias for alias in alias_names if alias in init_kwarg_names), None)
|
| 300 |
+
if provided_key is None and include_name and field_name in init_kwarg_names:
|
| 301 |
+
provided_key = field_name
|
| 302 |
+
# provided_key should not be None here because init_kwarg_name is non-empty
|
| 303 |
+
assert provided_key is not None
|
| 304 |
+
init_kwarg_names -= init_kwarg_name
|
| 305 |
+
self.init_kwargs[preferred_alias] = init_kwargs[provided_key]
|
| 306 |
+
# Include any remaining init kwargs (e.g., extras) unchanged
|
| 307 |
+
# Note: If populate_by_name is True and the provided key is the field name, but
|
| 308 |
+
# no alias exists, we keep it as-is so it can be processed as extra if allowed.
|
| 309 |
+
self.init_kwargs.update({key: val for key, val in init_kwargs.items() if key in init_kwarg_names})
|
| 310 |
+
|
| 311 |
+
super().__init__(settings_cls)
|
| 312 |
+
self.nested_model_default_partial_update = (
|
| 313 |
+
nested_model_default_partial_update
|
| 314 |
+
if nested_model_default_partial_update is not None
|
| 315 |
+
else self.config.get('nested_model_default_partial_update', False)
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
| 319 |
+
# Nothing to do here. Only implement the return statement to make mypy happy
|
| 320 |
+
return None, '', False
|
| 321 |
+
|
| 322 |
+
def __call__(self) -> dict[str, Any]:
|
| 323 |
+
return (
|
| 324 |
+
TypeAdapter(dict[str, Any]).dump_python(self.init_kwargs)
|
| 325 |
+
if self.nested_model_default_partial_update
|
| 326 |
+
else self.init_kwargs
|
| 327 |
+
)
|
| 328 |
+
|
| 329 |
+
def __repr__(self) -> str:
|
| 330 |
+
return f'{self.__class__.__name__}(init_kwargs={self.init_kwargs!r})'
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
class PydanticBaseEnvSettingsSource(PydanticBaseSettingsSource):
|
| 334 |
+
def __init__(
|
| 335 |
+
self,
|
| 336 |
+
settings_cls: type[BaseSettings],
|
| 337 |
+
case_sensitive: bool | None = None,
|
| 338 |
+
env_prefix: str | None = None,
|
| 339 |
+
env_prefix_target: EnvPrefixTarget | None = None,
|
| 340 |
+
env_ignore_empty: bool | None = None,
|
| 341 |
+
env_parse_none_str: str | None = None,
|
| 342 |
+
env_parse_enums: bool | None = None,
|
| 343 |
+
) -> None:
|
| 344 |
+
super().__init__(settings_cls)
|
| 345 |
+
self.case_sensitive = case_sensitive if case_sensitive is not None else self.config.get('case_sensitive', False)
|
| 346 |
+
self.env_prefix = env_prefix if env_prefix is not None else self.config.get('env_prefix', '')
|
| 347 |
+
self.env_prefix_target = (
|
| 348 |
+
env_prefix_target if env_prefix_target is not None else self.config.get('env_prefix_target', 'variable')
|
| 349 |
+
)
|
| 350 |
+
self.env_ignore_empty = (
|
| 351 |
+
env_ignore_empty if env_ignore_empty is not None else self.config.get('env_ignore_empty', False)
|
| 352 |
+
)
|
| 353 |
+
self.env_parse_none_str = (
|
| 354 |
+
env_parse_none_str if env_parse_none_str is not None else self.config.get('env_parse_none_str')
|
| 355 |
+
)
|
| 356 |
+
self.env_parse_enums = env_parse_enums if env_parse_enums is not None else self.config.get('env_parse_enums')
|
| 357 |
+
|
| 358 |
+
def _apply_case_sensitive(self, value: str) -> str:
|
| 359 |
+
return value.lower() if not self.case_sensitive else value
|
| 360 |
+
|
| 361 |
+
def _extract_field_info(self, field: FieldInfo, field_name: str) -> list[tuple[str, str, bool]]:
|
| 362 |
+
"""
|
| 363 |
+
Extracts field info. This info is used to get the value of field from environment variables.
|
| 364 |
+
|
| 365 |
+
It returns a list of tuples, each tuple contains:
|
| 366 |
+
* field_key: The key of field that has to be used in model creation.
|
| 367 |
+
* env_name: The environment variable name of the field.
|
| 368 |
+
* value_is_complex: A flag to determine whether the value from environment variable
|
| 369 |
+
is complex and has to be parsed.
|
| 370 |
+
|
| 371 |
+
Args:
|
| 372 |
+
field (FieldInfo): The field.
|
| 373 |
+
field_name (str): The field name.
|
| 374 |
+
|
| 375 |
+
Returns:
|
| 376 |
+
list[tuple[str, str, bool]]: List of tuples, each tuple contains field_key, env_name, and value_is_complex.
|
| 377 |
+
"""
|
| 378 |
+
field_info: list[tuple[str, str, bool]] = []
|
| 379 |
+
if isinstance(field.validation_alias, (AliasChoices, AliasPath)):
|
| 380 |
+
v_alias: str | list[str | int] | list[list[str | int]] | None = field.validation_alias.convert_to_aliases()
|
| 381 |
+
else:
|
| 382 |
+
v_alias = field.validation_alias
|
| 383 |
+
|
| 384 |
+
if v_alias:
|
| 385 |
+
env_prefix = self.env_prefix if self.env_prefix_target in ('alias', 'all') else ''
|
| 386 |
+
if isinstance(v_alias, list): # AliasChoices, AliasPath
|
| 387 |
+
for alias in v_alias:
|
| 388 |
+
if isinstance(alias, str): # AliasPath
|
| 389 |
+
field_info.append(
|
| 390 |
+
(alias, self._apply_case_sensitive(env_prefix + alias), True if len(alias) > 1 else False)
|
| 391 |
+
)
|
| 392 |
+
elif isinstance(alias, list): # AliasChoices
|
| 393 |
+
first_arg = cast(str, alias[0]) # first item of an AliasChoices must be a str
|
| 394 |
+
field_info.append(
|
| 395 |
+
(
|
| 396 |
+
first_arg,
|
| 397 |
+
self._apply_case_sensitive(env_prefix + first_arg),
|
| 398 |
+
True if len(alias) > 1 else False,
|
| 399 |
+
)
|
| 400 |
+
)
|
| 401 |
+
else: # string validation alias
|
| 402 |
+
field_info.append((v_alias, self._apply_case_sensitive(env_prefix + v_alias), False))
|
| 403 |
+
|
| 404 |
+
if not v_alias or self.config.get('populate_by_name', False) or self.config.get('validate_by_name', False):
|
| 405 |
+
annotation = _strip_annotated(_resolve_type_alias(field.annotation))
|
| 406 |
+
env_prefix = self.env_prefix if self.env_prefix_target in ('variable', 'all') else ''
|
| 407 |
+
if is_union_origin(get_origin(annotation)) and _union_is_complex(annotation, field.metadata):
|
| 408 |
+
field_info.append((field_name, self._apply_case_sensitive(env_prefix + field_name), True))
|
| 409 |
+
else:
|
| 410 |
+
field_info.append((field_name, self._apply_case_sensitive(env_prefix + field_name), False))
|
| 411 |
+
|
| 412 |
+
return field_info
|
| 413 |
+
|
| 414 |
+
def _replace_field_names_case_insensitively(self, field: FieldInfo, field_values: dict[str, Any]) -> dict[str, Any]:
|
| 415 |
+
"""
|
| 416 |
+
Replace field names in values dict by looking in models fields insensitively.
|
| 417 |
+
|
| 418 |
+
By having the following models:
|
| 419 |
+
|
| 420 |
+
```py
|
| 421 |
+
class SubSubSub(BaseModel):
|
| 422 |
+
VaL3: str
|
| 423 |
+
|
| 424 |
+
class SubSub(BaseModel):
|
| 425 |
+
Val2: str
|
| 426 |
+
SUB_sub_SuB: SubSubSub
|
| 427 |
+
|
| 428 |
+
class Sub(BaseModel):
|
| 429 |
+
VAL1: str
|
| 430 |
+
SUB_sub: SubSub
|
| 431 |
+
|
| 432 |
+
class Settings(BaseSettings):
|
| 433 |
+
nested: Sub
|
| 434 |
+
|
| 435 |
+
model_config = SettingsConfigDict(env_nested_delimiter='__')
|
| 436 |
+
```
|
| 437 |
+
|
| 438 |
+
Then:
|
| 439 |
+
_replace_field_names_case_insensitively(
|
| 440 |
+
field,
|
| 441 |
+
{"val1": "v1", "sub_SUB": {"VAL2": "v2", "sub_SUB_sUb": {"vAl3": "v3"}}}
|
| 442 |
+
)
|
| 443 |
+
Returns {'VAL1': 'v1', 'SUB_sub': {'Val2': 'v2', 'SUB_sub_SuB': {'VaL3': 'v3'}}}
|
| 444 |
+
"""
|
| 445 |
+
values: dict[str, Any] = {}
|
| 446 |
+
|
| 447 |
+
for name, value in field_values.items():
|
| 448 |
+
sub_model_field: FieldInfo | None = None
|
| 449 |
+
|
| 450 |
+
annotation = field.annotation
|
| 451 |
+
|
| 452 |
+
# If field is Optional, we need to find the actual type
|
| 453 |
+
if is_union_origin(get_origin(field.annotation)):
|
| 454 |
+
args = get_args(annotation)
|
| 455 |
+
if len(args) == 2 and type(None) in args:
|
| 456 |
+
for arg in args:
|
| 457 |
+
if arg is not None:
|
| 458 |
+
annotation = arg
|
| 459 |
+
break
|
| 460 |
+
|
| 461 |
+
# This is here to make mypy happy
|
| 462 |
+
# Item "None" of "Optional[Type[Any]]" has no attribute "model_fields"
|
| 463 |
+
if not annotation or not hasattr(annotation, 'model_fields'):
|
| 464 |
+
values[name] = value
|
| 465 |
+
continue
|
| 466 |
+
else:
|
| 467 |
+
model_fields: dict[str, FieldInfo] = annotation.model_fields
|
| 468 |
+
|
| 469 |
+
# Find field in sub model by looking in fields case insensitively
|
| 470 |
+
field_key: str | None = None
|
| 471 |
+
for sub_model_field_name, sub_model_field in model_fields.items():
|
| 472 |
+
aliases, _ = _get_alias_names(sub_model_field_name, sub_model_field)
|
| 473 |
+
_search = (alias for alias in aliases if alias.lower() == name.lower())
|
| 474 |
+
if field_key := next(_search, None):
|
| 475 |
+
break
|
| 476 |
+
|
| 477 |
+
if not field_key:
|
| 478 |
+
values[name] = value
|
| 479 |
+
continue
|
| 480 |
+
|
| 481 |
+
if (
|
| 482 |
+
sub_model_field is not None
|
| 483 |
+
and _lenient_issubclass(sub_model_field.annotation, BaseModel)
|
| 484 |
+
and isinstance(value, dict)
|
| 485 |
+
):
|
| 486 |
+
values[field_key] = self._replace_field_names_case_insensitively(sub_model_field, value)
|
| 487 |
+
else:
|
| 488 |
+
values[field_key] = value
|
| 489 |
+
|
| 490 |
+
return values
|
| 491 |
+
|
| 492 |
+
def _replace_env_none_type_values(self, field_value: dict[str, Any]) -> dict[str, Any]:
|
| 493 |
+
"""
|
| 494 |
+
Recursively parse values that are of "None" type(EnvNoneType) to `None` type(None).
|
| 495 |
+
"""
|
| 496 |
+
values: dict[str, Any] = {}
|
| 497 |
+
|
| 498 |
+
for key, value in field_value.items():
|
| 499 |
+
if not isinstance(value, EnvNoneType):
|
| 500 |
+
values[key] = value if not isinstance(value, dict) else self._replace_env_none_type_values(value)
|
| 501 |
+
else:
|
| 502 |
+
values[key] = None
|
| 503 |
+
|
| 504 |
+
return values
|
| 505 |
+
|
| 506 |
+
def _get_resolved_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
| 507 |
+
"""
|
| 508 |
+
Gets the value, the preferred alias key for model creation, and a flag to determine whether value
|
| 509 |
+
is complex.
|
| 510 |
+
|
| 511 |
+
Note:
|
| 512 |
+
In V3, this method should either be made public, or, this method should be removed and the
|
| 513 |
+
abstract method get_field_value should be updated to include a "use_preferred_alias" flag.
|
| 514 |
+
|
| 515 |
+
Args:
|
| 516 |
+
field: The field.
|
| 517 |
+
field_name: The field name.
|
| 518 |
+
|
| 519 |
+
Returns:
|
| 520 |
+
A tuple that contains the value, preferred key and a flag to determine whether value is complex.
|
| 521 |
+
"""
|
| 522 |
+
field_value, field_key, value_is_complex = self.get_field_value(field, field_name)
|
| 523 |
+
# Only use preferred_key when no value was found; otherwise preserve the key that matched
|
| 524 |
+
if field_value is None and not (
|
| 525 |
+
value_is_complex
|
| 526 |
+
or (
|
| 527 |
+
(self.config.get('populate_by_name', False) or self.config.get('validate_by_name', False))
|
| 528 |
+
and (field_key == field_name)
|
| 529 |
+
)
|
| 530 |
+
):
|
| 531 |
+
field_infos = self._extract_field_info(field, field_name)
|
| 532 |
+
preferred_key, *_ = field_infos[0]
|
| 533 |
+
return field_value, preferred_key, value_is_complex
|
| 534 |
+
return field_value, field_key, value_is_complex
|
| 535 |
+
|
| 536 |
+
def __call__(self) -> dict[str, Any]:
|
| 537 |
+
data: dict[str, Any] = {}
|
| 538 |
+
|
| 539 |
+
for field_name, field in self.settings_cls.model_fields.items():
|
| 540 |
+
try:
|
| 541 |
+
field_value, field_key, value_is_complex = self._get_resolved_field_value(field, field_name)
|
| 542 |
+
except Exception as e:
|
| 543 |
+
raise SettingsError(
|
| 544 |
+
f'error getting value for field "{field_name}" from source "{self.__class__.__name__}"'
|
| 545 |
+
) from e
|
| 546 |
+
|
| 547 |
+
try:
|
| 548 |
+
field_value = self.prepare_field_value(field_name, field, field_value, value_is_complex)
|
| 549 |
+
except ValueError as e:
|
| 550 |
+
raise SettingsError(
|
| 551 |
+
f'error parsing value for field "{field_name}" from source "{self.__class__.__name__}"'
|
| 552 |
+
) from e
|
| 553 |
+
|
| 554 |
+
if field_value is not None:
|
| 555 |
+
if self.env_parse_none_str is not None:
|
| 556 |
+
if isinstance(field_value, dict):
|
| 557 |
+
field_value = self._replace_env_none_type_values(field_value)
|
| 558 |
+
elif isinstance(field_value, EnvNoneType):
|
| 559 |
+
field_value = None
|
| 560 |
+
if (
|
| 561 |
+
not self.case_sensitive
|
| 562 |
+
# and _lenient_issubclass(field.annotation, BaseModel)
|
| 563 |
+
and isinstance(field_value, dict)
|
| 564 |
+
):
|
| 565 |
+
data[field_key] = self._replace_field_names_case_insensitively(field, field_value)
|
| 566 |
+
else:
|
| 567 |
+
data[field_key] = field_value
|
| 568 |
+
|
| 569 |
+
return data
|
| 570 |
+
|
| 571 |
+
|
| 572 |
+
__all__ = [
|
| 573 |
+
'ConfigFileSourceMixin',
|
| 574 |
+
'DefaultSettingsSource',
|
| 575 |
+
'InitSettingsSource',
|
| 576 |
+
'PydanticBaseEnvSettingsSource',
|
| 577 |
+
'PydanticBaseSettingsSource',
|
| 578 |
+
'SettingsError',
|
| 579 |
+
]
|
source/pydantic_settings/sources/providers/__init__.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Package containing individual source implementations."""
|
| 2 |
+
|
| 3 |
+
from .aws import AWSSecretsManagerSettingsSource
|
| 4 |
+
from .azure import AzureKeyVaultSettingsSource
|
| 5 |
+
from .cli import (
|
| 6 |
+
CliDualFlag,
|
| 7 |
+
CliExplicitFlag,
|
| 8 |
+
CliImplicitFlag,
|
| 9 |
+
CliMutuallyExclusiveGroup,
|
| 10 |
+
CliPositionalArg,
|
| 11 |
+
CliSettingsSource,
|
| 12 |
+
CliSubCommand,
|
| 13 |
+
CliSuppress,
|
| 14 |
+
CliToggleFlag,
|
| 15 |
+
)
|
| 16 |
+
from .dotenv import DotEnvSettingsSource
|
| 17 |
+
from .env import EnvSettingsSource
|
| 18 |
+
from .gcp import GoogleSecretManagerSettingsSource
|
| 19 |
+
from .json import JsonConfigSettingsSource
|
| 20 |
+
from .pyproject import PyprojectTomlConfigSettingsSource
|
| 21 |
+
from .secrets import SecretsSettingsSource
|
| 22 |
+
from .toml import TomlConfigSettingsSource
|
| 23 |
+
from .yaml import YamlConfigSettingsSource
|
| 24 |
+
|
| 25 |
+
__all__ = [
|
| 26 |
+
'AWSSecretsManagerSettingsSource',
|
| 27 |
+
'AzureKeyVaultSettingsSource',
|
| 28 |
+
'CliExplicitFlag',
|
| 29 |
+
'CliImplicitFlag',
|
| 30 |
+
'CliToggleFlag',
|
| 31 |
+
'CliDualFlag',
|
| 32 |
+
'CliMutuallyExclusiveGroup',
|
| 33 |
+
'CliPositionalArg',
|
| 34 |
+
'CliSettingsSource',
|
| 35 |
+
'CliSubCommand',
|
| 36 |
+
'CliSuppress',
|
| 37 |
+
'DotEnvSettingsSource',
|
| 38 |
+
'EnvSettingsSource',
|
| 39 |
+
'GoogleSecretManagerSettingsSource',
|
| 40 |
+
'JsonConfigSettingsSource',
|
| 41 |
+
'PyprojectTomlConfigSettingsSource',
|
| 42 |
+
'SecretsSettingsSource',
|
| 43 |
+
'TomlConfigSettingsSource',
|
| 44 |
+
'YamlConfigSettingsSource',
|
| 45 |
+
]
|
source/pydantic_settings/sources/providers/aws.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations as _annotations # important for BaseSettings import to work
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
from collections.abc import Mapping
|
| 5 |
+
from typing import TYPE_CHECKING
|
| 6 |
+
|
| 7 |
+
from ..utils import parse_env_vars
|
| 8 |
+
from .env import EnvSettingsSource
|
| 9 |
+
|
| 10 |
+
if TYPE_CHECKING:
|
| 11 |
+
from pydantic_settings.main import BaseSettings
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
boto3_client = None
|
| 15 |
+
SecretsManagerClient = None
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def import_aws_secrets_manager() -> None:
|
| 19 |
+
global boto3_client
|
| 20 |
+
global SecretsManagerClient
|
| 21 |
+
|
| 22 |
+
try:
|
| 23 |
+
from boto3 import client as boto3_client
|
| 24 |
+
from mypy_boto3_secretsmanager.client import SecretsManagerClient
|
| 25 |
+
except ImportError as e: # pragma: no cover
|
| 26 |
+
raise ImportError(
|
| 27 |
+
'AWS Secrets Manager dependencies are not installed, run `pip install pydantic-settings[aws-secrets-manager]`'
|
| 28 |
+
) from e
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class AWSSecretsManagerSettingsSource(EnvSettingsSource):
|
| 32 |
+
_secret_id: str
|
| 33 |
+
_secretsmanager_client: SecretsManagerClient # type: ignore
|
| 34 |
+
|
| 35 |
+
def __init__(
|
| 36 |
+
self,
|
| 37 |
+
settings_cls: type[BaseSettings],
|
| 38 |
+
secret_id: str,
|
| 39 |
+
region_name: str | None = None,
|
| 40 |
+
endpoint_url: str | None = None,
|
| 41 |
+
case_sensitive: bool | None = True,
|
| 42 |
+
env_prefix: str | None = None,
|
| 43 |
+
env_nested_delimiter: str | None = '--',
|
| 44 |
+
env_parse_none_str: str | None = None,
|
| 45 |
+
env_parse_enums: bool | None = None,
|
| 46 |
+
version_id: str | None = None,
|
| 47 |
+
) -> None:
|
| 48 |
+
import_aws_secrets_manager()
|
| 49 |
+
self._secretsmanager_client = boto3_client('secretsmanager', region_name=region_name, endpoint_url=endpoint_url) # type: ignore
|
| 50 |
+
self._secret_id = secret_id
|
| 51 |
+
self._version_id = version_id
|
| 52 |
+
super().__init__(
|
| 53 |
+
settings_cls,
|
| 54 |
+
case_sensitive=case_sensitive,
|
| 55 |
+
env_prefix=env_prefix,
|
| 56 |
+
env_nested_delimiter=env_nested_delimiter,
|
| 57 |
+
env_ignore_empty=False,
|
| 58 |
+
env_parse_none_str=env_parse_none_str,
|
| 59 |
+
env_parse_enums=env_parse_enums,
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
def _load_env_vars(self) -> Mapping[str, str | None]:
|
| 63 |
+
request = {'SecretId': self._secret_id}
|
| 64 |
+
|
| 65 |
+
if self._version_id:
|
| 66 |
+
request['VersionId'] = self._version_id
|
| 67 |
+
|
| 68 |
+
response = self._secretsmanager_client.get_secret_value(**request) # type: ignore
|
| 69 |
+
|
| 70 |
+
return parse_env_vars(
|
| 71 |
+
json.loads(response['SecretString']),
|
| 72 |
+
self.case_sensitive,
|
| 73 |
+
self.env_ignore_empty,
|
| 74 |
+
self.env_parse_none_str,
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
def __repr__(self) -> str:
|
| 78 |
+
return (
|
| 79 |
+
f'{self.__class__.__name__}(secret_id={self._secret_id!r}, '
|
| 80 |
+
f'env_nested_delimiter={self.env_nested_delimiter!r})'
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
__all__ = [
|
| 85 |
+
'AWSSecretsManagerSettingsSource',
|
| 86 |
+
]
|
source/pydantic_settings/sources/providers/azure.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Azure Key Vault settings source."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations as _annotations
|
| 4 |
+
|
| 5 |
+
from collections.abc import Iterator, Mapping
|
| 6 |
+
from typing import TYPE_CHECKING
|
| 7 |
+
|
| 8 |
+
from pydantic.alias_generators import to_snake
|
| 9 |
+
from pydantic.fields import FieldInfo
|
| 10 |
+
|
| 11 |
+
from .env import EnvSettingsSource
|
| 12 |
+
|
| 13 |
+
if TYPE_CHECKING:
|
| 14 |
+
from azure.core.credentials import TokenCredential
|
| 15 |
+
from azure.core.exceptions import ResourceNotFoundError
|
| 16 |
+
from azure.keyvault.secrets import SecretClient
|
| 17 |
+
|
| 18 |
+
from pydantic_settings.main import BaseSettings
|
| 19 |
+
else:
|
| 20 |
+
TokenCredential = None
|
| 21 |
+
ResourceNotFoundError = None
|
| 22 |
+
SecretClient = None
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def import_azure_key_vault() -> None:
|
| 26 |
+
global TokenCredential
|
| 27 |
+
global SecretClient
|
| 28 |
+
global ResourceNotFoundError
|
| 29 |
+
|
| 30 |
+
try:
|
| 31 |
+
from azure.core.credentials import TokenCredential
|
| 32 |
+
from azure.core.exceptions import ResourceNotFoundError
|
| 33 |
+
from azure.keyvault.secrets import SecretClient
|
| 34 |
+
except ImportError as e: # pragma: no cover
|
| 35 |
+
raise ImportError(
|
| 36 |
+
'Azure Key Vault dependencies are not installed, run `pip install pydantic-settings[azure-key-vault]`'
|
| 37 |
+
) from e
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class AzureKeyVaultMapping(Mapping[str, str | None]):
|
| 41 |
+
_loaded_secrets: dict[str, str | None]
|
| 42 |
+
_secret_client: SecretClient
|
| 43 |
+
_secret_names: list[str]
|
| 44 |
+
|
| 45 |
+
def __init__(
|
| 46 |
+
self,
|
| 47 |
+
secret_client: SecretClient,
|
| 48 |
+
case_sensitive: bool,
|
| 49 |
+
snake_case_conversion: bool,
|
| 50 |
+
env_prefix: str | None,
|
| 51 |
+
) -> None:
|
| 52 |
+
self._loaded_secrets = {}
|
| 53 |
+
self._secret_client = secret_client
|
| 54 |
+
self._case_sensitive = case_sensitive
|
| 55 |
+
self._snake_case_conversion = snake_case_conversion
|
| 56 |
+
self._env_prefix = env_prefix if env_prefix else ''
|
| 57 |
+
self._secret_map: dict[str, str] = self._load_remote()
|
| 58 |
+
|
| 59 |
+
def _load_remote(self) -> dict[str, str]:
|
| 60 |
+
secret_names: Iterator[str] = (
|
| 61 |
+
secret.name for secret in self._secret_client.list_properties_of_secrets() if secret.name and secret.enabled
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
if self._snake_case_conversion:
|
| 65 |
+
name_map: dict[str, str] = {}
|
| 66 |
+
for name in secret_names:
|
| 67 |
+
if name.startswith(self._env_prefix):
|
| 68 |
+
name_map[f'{self._env_prefix}{to_snake(name[len(self._env_prefix) :])}'] = name
|
| 69 |
+
else:
|
| 70 |
+
name_map[to_snake(name)] = name
|
| 71 |
+
return name_map
|
| 72 |
+
|
| 73 |
+
if self._case_sensitive:
|
| 74 |
+
return {name: name for name in secret_names}
|
| 75 |
+
|
| 76 |
+
return {name.lower(): name for name in secret_names}
|
| 77 |
+
|
| 78 |
+
def __getitem__(self, key: str) -> str | None:
|
| 79 |
+
new_key = key
|
| 80 |
+
|
| 81 |
+
if self._snake_case_conversion:
|
| 82 |
+
if key.startswith(self._env_prefix):
|
| 83 |
+
new_key = f'{self._env_prefix}{to_snake(key[len(self._env_prefix) :])}'
|
| 84 |
+
else:
|
| 85 |
+
new_key = to_snake(key)
|
| 86 |
+
|
| 87 |
+
elif not self._case_sensitive:
|
| 88 |
+
new_key = key.lower()
|
| 89 |
+
|
| 90 |
+
if new_key not in self._loaded_secrets:
|
| 91 |
+
if new_key in self._secret_map:
|
| 92 |
+
self._loaded_secrets[new_key] = self._secret_client.get_secret(self._secret_map[new_key]).value
|
| 93 |
+
else:
|
| 94 |
+
raise KeyError(key)
|
| 95 |
+
|
| 96 |
+
return self._loaded_secrets[new_key]
|
| 97 |
+
|
| 98 |
+
def __len__(self) -> int:
|
| 99 |
+
return len(self._secret_map)
|
| 100 |
+
|
| 101 |
+
def __iter__(self) -> Iterator[str]:
|
| 102 |
+
return iter(self._secret_map.keys())
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class AzureKeyVaultSettingsSource(EnvSettingsSource):
|
| 106 |
+
_url: str
|
| 107 |
+
_credential: TokenCredential
|
| 108 |
+
|
| 109 |
+
def __init__(
|
| 110 |
+
self,
|
| 111 |
+
settings_cls: type[BaseSettings],
|
| 112 |
+
url: str,
|
| 113 |
+
credential: TokenCredential,
|
| 114 |
+
dash_to_underscore: bool = False,
|
| 115 |
+
case_sensitive: bool | None = None,
|
| 116 |
+
snake_case_conversion: bool = False,
|
| 117 |
+
env_prefix: str | None = None,
|
| 118 |
+
env_parse_none_str: str | None = None,
|
| 119 |
+
env_parse_enums: bool | None = None,
|
| 120 |
+
) -> None:
|
| 121 |
+
import_azure_key_vault()
|
| 122 |
+
self._url = url
|
| 123 |
+
self._credential = credential
|
| 124 |
+
self._dash_to_underscore = dash_to_underscore
|
| 125 |
+
self._snake_case_conversion = snake_case_conversion
|
| 126 |
+
super().__init__(
|
| 127 |
+
settings_cls,
|
| 128 |
+
case_sensitive=True if snake_case_conversion else case_sensitive,
|
| 129 |
+
env_prefix=env_prefix,
|
| 130 |
+
env_nested_delimiter='__' if snake_case_conversion else '--',
|
| 131 |
+
env_ignore_empty=False,
|
| 132 |
+
env_parse_none_str=env_parse_none_str,
|
| 133 |
+
env_parse_enums=env_parse_enums,
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
def _load_env_vars(self) -> Mapping[str, str | None]:
|
| 137 |
+
secret_client = SecretClient(vault_url=self._url, credential=self._credential)
|
| 138 |
+
return AzureKeyVaultMapping(
|
| 139 |
+
secret_client=secret_client,
|
| 140 |
+
case_sensitive=self.case_sensitive,
|
| 141 |
+
snake_case_conversion=self._snake_case_conversion,
|
| 142 |
+
env_prefix=self.env_prefix,
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
def _extract_field_info(self, field: FieldInfo, field_name: str) -> list[tuple[str, str, bool]]:
|
| 146 |
+
if self._snake_case_conversion:
|
| 147 |
+
field_info = list((x[0], x[1], x[2]) for x in super()._extract_field_info(field, field_name))
|
| 148 |
+
return field_info
|
| 149 |
+
|
| 150 |
+
if self._dash_to_underscore:
|
| 151 |
+
return list((x[0], x[1].replace('_', '-'), x[2]) for x in super()._extract_field_info(field, field_name))
|
| 152 |
+
|
| 153 |
+
return super()._extract_field_info(field, field_name)
|
| 154 |
+
|
| 155 |
+
def __repr__(self) -> str:
|
| 156 |
+
return f'{self.__class__.__name__}(url={self._url!r}, env_nested_delimiter={self.env_nested_delimiter!r})'
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
__all__ = ['AzureKeyVaultMapping', 'AzureKeyVaultSettingsSource']
|
source/pydantic_settings/sources/providers/cli.py
ADDED
|
@@ -0,0 +1,1522 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Command-line interface settings source."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations as _annotations
|
| 4 |
+
|
| 5 |
+
import copy
|
| 6 |
+
import json
|
| 7 |
+
import re
|
| 8 |
+
import shlex
|
| 9 |
+
import sys
|
| 10 |
+
import typing
|
| 11 |
+
from argparse import (
|
| 12 |
+
SUPPRESS,
|
| 13 |
+
ArgumentParser,
|
| 14 |
+
BooleanOptionalAction,
|
| 15 |
+
Namespace,
|
| 16 |
+
RawDescriptionHelpFormatter,
|
| 17 |
+
_SubParsersAction,
|
| 18 |
+
)
|
| 19 |
+
from collections import defaultdict
|
| 20 |
+
from collections.abc import Callable, Mapping, Sequence
|
| 21 |
+
from enum import Enum
|
| 22 |
+
from functools import cached_property
|
| 23 |
+
from itertools import chain
|
| 24 |
+
from textwrap import dedent
|
| 25 |
+
from types import SimpleNamespace
|
| 26 |
+
from typing import (
|
| 27 |
+
TYPE_CHECKING,
|
| 28 |
+
Annotated,
|
| 29 |
+
Any,
|
| 30 |
+
Generic,
|
| 31 |
+
Literal,
|
| 32 |
+
NoReturn,
|
| 33 |
+
TypeVar,
|
| 34 |
+
cast,
|
| 35 |
+
get_args,
|
| 36 |
+
get_origin,
|
| 37 |
+
overload,
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
from pydantic import AliasChoices, AliasPath, BaseModel, Field, PrivateAttr, TypeAdapter, ValidationError
|
| 41 |
+
from pydantic._internal._repr import Representation
|
| 42 |
+
from pydantic._internal._utils import is_model_class
|
| 43 |
+
from pydantic.dataclasses import is_pydantic_dataclass
|
| 44 |
+
from pydantic.fields import FieldInfo
|
| 45 |
+
from pydantic_core import PydanticUndefined
|
| 46 |
+
from typing_inspection import typing_objects
|
| 47 |
+
from typing_inspection.introspection import is_union_origin
|
| 48 |
+
|
| 49 |
+
from ...exceptions import SettingsError
|
| 50 |
+
from ...utils import _lenient_issubclass, _typing_base, _WithArgsTypes
|
| 51 |
+
from ..types import (
|
| 52 |
+
ForceDecode,
|
| 53 |
+
NoDecode,
|
| 54 |
+
PydanticModel,
|
| 55 |
+
_CliDualFlag,
|
| 56 |
+
_CliExplicitFlag,
|
| 57 |
+
_CliImplicitFlag,
|
| 58 |
+
_CliPositionalArg,
|
| 59 |
+
_CliSubCommand,
|
| 60 |
+
_CliToggleFlag,
|
| 61 |
+
_CliUnknownArgs,
|
| 62 |
+
)
|
| 63 |
+
from ..utils import (
|
| 64 |
+
_annotation_contains_types,
|
| 65 |
+
_annotation_enum_val_to_name,
|
| 66 |
+
_get_alias_names,
|
| 67 |
+
_get_model_fields,
|
| 68 |
+
_is_function,
|
| 69 |
+
_strip_annotated,
|
| 70 |
+
parse_env_vars,
|
| 71 |
+
)
|
| 72 |
+
from .env import EnvSettingsSource
|
| 73 |
+
|
| 74 |
+
if TYPE_CHECKING:
|
| 75 |
+
from pydantic_settings.main import BaseSettings
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class _CliInternalArgParser(ArgumentParser):
|
| 79 |
+
def __init__(self, cli_exit_on_error: bool = True, **kwargs: Any) -> None:
|
| 80 |
+
super().__init__(**kwargs)
|
| 81 |
+
self._cli_exit_on_error = cli_exit_on_error
|
| 82 |
+
|
| 83 |
+
def error(self, message: str) -> NoReturn:
|
| 84 |
+
if not self._cli_exit_on_error:
|
| 85 |
+
raise SettingsError(f'error parsing CLI: {message}')
|
| 86 |
+
super().error(message)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
class CliMutuallyExclusiveGroup(BaseModel):
|
| 90 |
+
pass
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class _CliArg(BaseModel):
|
| 94 |
+
model: Any
|
| 95 |
+
parser: Any
|
| 96 |
+
field_name: str
|
| 97 |
+
arg_prefix: str
|
| 98 |
+
case_sensitive: bool
|
| 99 |
+
hide_none_type: bool
|
| 100 |
+
kebab_case: bool | Literal['all', 'no_enums'] | None
|
| 101 |
+
enable_decoding: bool | None
|
| 102 |
+
env_prefix_len: int
|
| 103 |
+
args: list[str] = []
|
| 104 |
+
kwargs: dict[str, Any] = {}
|
| 105 |
+
|
| 106 |
+
_alias_names: tuple[str, ...] = PrivateAttr(())
|
| 107 |
+
_alias_paths: dict[str, int | None] = PrivateAttr({})
|
| 108 |
+
_is_alias_path_only: bool = PrivateAttr(False)
|
| 109 |
+
_field_info: FieldInfo = PrivateAttr()
|
| 110 |
+
|
| 111 |
+
def __init__(
|
| 112 |
+
self,
|
| 113 |
+
field_info: FieldInfo,
|
| 114 |
+
parser_map: defaultdict[str | FieldInfo, dict[int | None | str | type[BaseModel], _CliArg]],
|
| 115 |
+
**values: Any,
|
| 116 |
+
) -> None:
|
| 117 |
+
super().__init__(**values)
|
| 118 |
+
self._field_info = field_info
|
| 119 |
+
self._alias_names, self._is_alias_path_only = _get_alias_names(
|
| 120 |
+
self.field_name, self.field_info, alias_path_args=self._alias_paths, case_sensitive=self.case_sensitive
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
alias_path_dests = {f'{self.arg_prefix}{name}': index for name, index in self._alias_paths.items()}
|
| 124 |
+
if self.subcommand_dest:
|
| 125 |
+
for sub_model in self.sub_models:
|
| 126 |
+
subcommand_alias = self.subcommand_alias(sub_model)
|
| 127 |
+
parser_map[self.subcommand_dest][subcommand_alias] = self.model_copy(update={'args': [], 'kwargs': {}})
|
| 128 |
+
parser_map[self.subcommand_dest][sub_model] = parser_map[self.subcommand_dest][subcommand_alias]
|
| 129 |
+
parser_map[self.field_info][subcommand_alias] = parser_map[self.subcommand_dest][subcommand_alias]
|
| 130 |
+
elif self.dest not in alias_path_dests:
|
| 131 |
+
parser_map[self.dest][None] = self
|
| 132 |
+
parser_map[self.field_info][None] = parser_map[self.dest][None]
|
| 133 |
+
for alias_path_dest, index in alias_path_dests.items():
|
| 134 |
+
parser_map[alias_path_dest][index] = self.model_copy(update={'args': [], 'kwargs': {}})
|
| 135 |
+
parser_map[self.field_info][index] = parser_map[alias_path_dest][index]
|
| 136 |
+
|
| 137 |
+
@classmethod
|
| 138 |
+
def get_kebab_case(cls, name: str, kebab_case: bool | Literal['all', 'no_enums'] | None) -> str:
|
| 139 |
+
return name.replace('_', '-') if kebab_case not in (None, False) else name
|
| 140 |
+
|
| 141 |
+
@classmethod
|
| 142 |
+
def get_enum_names(
|
| 143 |
+
cls, annotation: type[Any], kebab_case: bool | Literal['all', 'no_enums'] | None
|
| 144 |
+
) -> tuple[str, ...]:
|
| 145 |
+
enum_names: tuple[str, ...] = ()
|
| 146 |
+
annotation = _strip_annotated(annotation)
|
| 147 |
+
for type_ in get_args(annotation):
|
| 148 |
+
enum_names += cls.get_enum_names(type_, kebab_case)
|
| 149 |
+
if annotation and _lenient_issubclass(annotation, Enum):
|
| 150 |
+
enum_names += tuple(cls.get_kebab_case(name, kebab_case == 'all') for name in annotation.__members__.keys())
|
| 151 |
+
return enum_names
|
| 152 |
+
|
| 153 |
+
def subcommand_alias(self, sub_model: type[BaseModel]) -> str:
|
| 154 |
+
return self.get_kebab_case(
|
| 155 |
+
sub_model.__name__ if len(self.sub_models) > 1 else self.preferred_alias, self.kebab_case
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
@cached_property
|
| 159 |
+
def field_info(self) -> FieldInfo:
|
| 160 |
+
return self._field_info
|
| 161 |
+
|
| 162 |
+
@cached_property
|
| 163 |
+
def subcommand_dest(self) -> str | None:
|
| 164 |
+
return f'{self.arg_prefix}:subcommand' if _CliSubCommand in self.field_info.metadata else None
|
| 165 |
+
|
| 166 |
+
@cached_property
|
| 167 |
+
def dest(self) -> str:
|
| 168 |
+
if (
|
| 169 |
+
not self.subcommand_dest
|
| 170 |
+
and self.arg_prefix
|
| 171 |
+
and self.field_info.validation_alias is not None
|
| 172 |
+
and not self.is_parser_submodel
|
| 173 |
+
):
|
| 174 |
+
# Strip prefix if validation alias is set and value is not complex.
|
| 175 |
+
# Related https://github.com/pydantic/pydantic-settings/pull/25
|
| 176 |
+
return f'{self.arg_prefix}{self.preferred_alias}'[self.env_prefix_len :]
|
| 177 |
+
return f'{self.arg_prefix}{self.preferred_alias}'
|
| 178 |
+
|
| 179 |
+
@cached_property
|
| 180 |
+
def preferred_arg_name(self) -> str:
|
| 181 |
+
return self.args[0].replace('_', '-') if self.kebab_case else self.args[0]
|
| 182 |
+
|
| 183 |
+
@cached_property
|
| 184 |
+
def sub_models(self) -> list[type[BaseModel]]:
|
| 185 |
+
field_types: tuple[Any, ...] = (
|
| 186 |
+
(self.field_info.annotation,)
|
| 187 |
+
if not get_args(self.field_info.annotation)
|
| 188 |
+
else get_args(self.field_info.annotation)
|
| 189 |
+
)
|
| 190 |
+
if self.hide_none_type:
|
| 191 |
+
field_types = tuple([type_ for type_ in field_types if type_ is not type(None)])
|
| 192 |
+
|
| 193 |
+
sub_models: list[type[BaseModel]] = []
|
| 194 |
+
for type_ in field_types:
|
| 195 |
+
if _annotation_contains_types(type_, (_CliSubCommand,), is_include_origin=False):
|
| 196 |
+
raise SettingsError(
|
| 197 |
+
f'CliSubCommand is not outermost annotation for {self.model.__name__}.{self.field_name}'
|
| 198 |
+
)
|
| 199 |
+
elif _annotation_contains_types(type_, (_CliPositionalArg,), is_include_origin=False):
|
| 200 |
+
raise SettingsError(
|
| 201 |
+
f'CliPositionalArg is not outermost annotation for {self.model.__name__}.{self.field_name}'
|
| 202 |
+
)
|
| 203 |
+
if is_model_class(_strip_annotated(type_)) or is_pydantic_dataclass(_strip_annotated(type_)):
|
| 204 |
+
sub_models.append(_strip_annotated(type_))
|
| 205 |
+
return sub_models
|
| 206 |
+
|
| 207 |
+
@cached_property
|
| 208 |
+
def alias_names(self) -> tuple[str, ...]:
|
| 209 |
+
return self._alias_names
|
| 210 |
+
|
| 211 |
+
@cached_property
|
| 212 |
+
def alias_paths(self) -> dict[str, int | None]:
|
| 213 |
+
return self._alias_paths
|
| 214 |
+
|
| 215 |
+
@cached_property
|
| 216 |
+
def preferred_alias(self) -> str:
|
| 217 |
+
return self._alias_names[0]
|
| 218 |
+
|
| 219 |
+
@cached_property
|
| 220 |
+
def is_alias_path_only(self) -> bool:
|
| 221 |
+
return self._is_alias_path_only
|
| 222 |
+
|
| 223 |
+
@cached_property
|
| 224 |
+
def is_append_action(self) -> bool:
|
| 225 |
+
return not self.subcommand_dest and _annotation_contains_types(
|
| 226 |
+
self.field_info.annotation, (list, set, dict, Sequence, Mapping), is_strip_annotated=True
|
| 227 |
+
)
|
| 228 |
+
|
| 229 |
+
@cached_property
|
| 230 |
+
def is_parser_submodel(self) -> bool:
|
| 231 |
+
return not self.subcommand_dest and bool(self.sub_models) and not self.is_append_action
|
| 232 |
+
|
| 233 |
+
@cached_property
|
| 234 |
+
def is_no_decode(self) -> bool:
|
| 235 |
+
return self.field_info is not None and (
|
| 236 |
+
NoDecode in self.field_info.metadata
|
| 237 |
+
or (self.enable_decoding is False and ForceDecode not in self.field_info.metadata)
|
| 238 |
+
)
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
T = TypeVar('T')
|
| 242 |
+
CliSubCommand = Annotated[T | None, _CliSubCommand]
|
| 243 |
+
CliPositionalArg = Annotated[T, _CliPositionalArg]
|
| 244 |
+
_CliBoolFlag = TypeVar('_CliBoolFlag', bound=bool)
|
| 245 |
+
CliImplicitFlag = Annotated[_CliBoolFlag, _CliImplicitFlag]
|
| 246 |
+
CliExplicitFlag = Annotated[_CliBoolFlag, _CliExplicitFlag]
|
| 247 |
+
CliToggleFlag = Annotated[_CliBoolFlag, _CliToggleFlag]
|
| 248 |
+
CliDualFlag = Annotated[_CliBoolFlag, _CliDualFlag]
|
| 249 |
+
CLI_SUPPRESS = SUPPRESS
|
| 250 |
+
CliSuppress = Annotated[T, CLI_SUPPRESS]
|
| 251 |
+
CliUnknownArgs = Annotated[list[str], Field(default=[]), _CliUnknownArgs, NoDecode]
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
class CliSettingsSource(EnvSettingsSource, Generic[T]):
|
| 255 |
+
"""
|
| 256 |
+
Source class for loading settings values from CLI.
|
| 257 |
+
|
| 258 |
+
Note:
|
| 259 |
+
A `CliSettingsSource` connects with a `root_parser` object by using the parser methods to add
|
| 260 |
+
`settings_cls` fields as command line arguments. The `CliSettingsSource` internal parser representation
|
| 261 |
+
is based upon the `argparse` parsing library, and therefore, requires the parser methods to support
|
| 262 |
+
the same attributes as their `argparse` library counterparts.
|
| 263 |
+
|
| 264 |
+
Args:
|
| 265 |
+
cli_prog_name: The CLI program name to display in help text. Defaults to `None` if cli_parse_args is `None`.
|
| 266 |
+
Otherwise, defaults to sys.argv[0].
|
| 267 |
+
cli_parse_args: The list of CLI arguments to parse. Defaults to None.
|
| 268 |
+
If set to `True`, defaults to sys.argv[1:].
|
| 269 |
+
cli_parse_none_str: The CLI string value that should be parsed (e.g. "null", "void", "None", etc.) into `None`
|
| 270 |
+
type(None). Defaults to "null" if cli_avoid_json is `False`, and "None" if cli_avoid_json is `True`.
|
| 271 |
+
cli_hide_none_type: Hide `None` values in CLI help text. Defaults to `False`.
|
| 272 |
+
cli_avoid_json: Avoid complex JSON objects in CLI help text. Defaults to `False`.
|
| 273 |
+
cli_enforce_required: Enforce required fields at the CLI. Defaults to `False`.
|
| 274 |
+
cli_use_class_docs_for_groups: Use class docstrings in CLI group help text instead of field descriptions.
|
| 275 |
+
Defaults to `False`.
|
| 276 |
+
cli_exit_on_error: Determines whether or not the internal parser exits with error info when an error occurs.
|
| 277 |
+
Defaults to `True`.
|
| 278 |
+
cli_prefix: Prefix for command line arguments added under the root parser. Defaults to "".
|
| 279 |
+
cli_flag_prefix_char: The flag prefix character to use for CLI optional arguments. Defaults to '-'.
|
| 280 |
+
cli_implicit_flags: Controls how `bool` fields are exposed as CLI flags.
|
| 281 |
+
|
| 282 |
+
- False (default): no implicit flags are generated; booleans must be set explicitly (e.g. --flag=true).
|
| 283 |
+
- True / 'dual': optional boolean fields generate both positive and negative forms (--flag and --no-flag).
|
| 284 |
+
- 'toggle': required boolean fields remain in 'dual' mode, while optional boolean fields generate a single
|
| 285 |
+
flag aligned with the default value (if default=False, expose --flag; if default=True, expose --no-flag).
|
| 286 |
+
cli_ignore_unknown_args: Whether to ignore unknown CLI args and parse only known ones. Defaults to `False`.
|
| 287 |
+
cli_kebab_case: CLI args use kebab case. Defaults to `False`.
|
| 288 |
+
cli_shortcuts: Mapping of target field name to alias names. Defaults to `None`.
|
| 289 |
+
case_sensitive: Whether CLI "--arg" names should be read with case-sensitivity. Defaults to `True`.
|
| 290 |
+
Note: Case-insensitive matching is only supported on the internal root parser and does not apply to CLI
|
| 291 |
+
subcommands.
|
| 292 |
+
root_parser: The root parser object.
|
| 293 |
+
parse_args_method: The root parser parse args method. Defaults to `argparse.ArgumentParser.parse_args`.
|
| 294 |
+
add_argument_method: The root parser add argument method. Defaults to `argparse.ArgumentParser.add_argument`.
|
| 295 |
+
add_argument_group_method: The root parser add argument group method.
|
| 296 |
+
Defaults to `argparse.ArgumentParser.add_argument_group`.
|
| 297 |
+
add_parser_method: The root parser add new parser (sub-command) method.
|
| 298 |
+
Defaults to `argparse._SubParsersAction.add_parser`.
|
| 299 |
+
add_subparsers_method: The root parser add subparsers (sub-commands) method.
|
| 300 |
+
Defaults to `argparse.ArgumentParser.add_subparsers`.
|
| 301 |
+
format_help_method: The root parser format help method. Defaults to `argparse.ArgumentParser.format_help`.
|
| 302 |
+
formatter_class: A class for customizing the root parser help text. Defaults to `argparse.RawDescriptionHelpFormatter`.
|
| 303 |
+
"""
|
| 304 |
+
|
| 305 |
+
def __init__(
|
| 306 |
+
self,
|
| 307 |
+
settings_cls: type[BaseSettings],
|
| 308 |
+
cli_prog_name: str | None = None,
|
| 309 |
+
cli_parse_args: bool | list[str] | tuple[str, ...] | None = None,
|
| 310 |
+
cli_parse_none_str: str | None = None,
|
| 311 |
+
cli_hide_none_type: bool | None = None,
|
| 312 |
+
cli_avoid_json: bool | None = None,
|
| 313 |
+
cli_enforce_required: bool | None = None,
|
| 314 |
+
cli_use_class_docs_for_groups: bool | None = None,
|
| 315 |
+
cli_exit_on_error: bool | None = None,
|
| 316 |
+
cli_prefix: str | None = None,
|
| 317 |
+
cli_flag_prefix_char: str | None = None,
|
| 318 |
+
cli_implicit_flags: bool | Literal['dual', 'toggle'] | None = None,
|
| 319 |
+
cli_ignore_unknown_args: bool | None = None,
|
| 320 |
+
cli_kebab_case: bool | Literal['all', 'no_enums'] | None = None,
|
| 321 |
+
cli_shortcuts: Mapping[str, str | list[str]] | None = None,
|
| 322 |
+
case_sensitive: bool | None = True,
|
| 323 |
+
root_parser: Any = None,
|
| 324 |
+
parse_args_method: Callable[..., Any] | None = None,
|
| 325 |
+
add_argument_method: Callable[..., Any] | None = ArgumentParser.add_argument,
|
| 326 |
+
add_argument_group_method: Callable[..., Any] | None = ArgumentParser.add_argument_group,
|
| 327 |
+
add_parser_method: Callable[..., Any] | None = _SubParsersAction.add_parser,
|
| 328 |
+
add_subparsers_method: Callable[..., Any] | None = ArgumentParser.add_subparsers,
|
| 329 |
+
format_help_method: Callable[..., Any] | None = ArgumentParser.format_help,
|
| 330 |
+
formatter_class: Any = RawDescriptionHelpFormatter,
|
| 331 |
+
) -> None:
|
| 332 |
+
self.cli_prog_name = (
|
| 333 |
+
cli_prog_name if cli_prog_name is not None else settings_cls.model_config.get('cli_prog_name', sys.argv[0])
|
| 334 |
+
)
|
| 335 |
+
self.cli_hide_none_type = (
|
| 336 |
+
cli_hide_none_type
|
| 337 |
+
if cli_hide_none_type is not None
|
| 338 |
+
else settings_cls.model_config.get('cli_hide_none_type', False)
|
| 339 |
+
)
|
| 340 |
+
self.cli_avoid_json = (
|
| 341 |
+
cli_avoid_json if cli_avoid_json is not None else settings_cls.model_config.get('cli_avoid_json', False)
|
| 342 |
+
)
|
| 343 |
+
if not cli_parse_none_str:
|
| 344 |
+
cli_parse_none_str = 'None' if self.cli_avoid_json is True else 'null'
|
| 345 |
+
self.cli_parse_none_str = cli_parse_none_str
|
| 346 |
+
self.cli_enforce_required = (
|
| 347 |
+
cli_enforce_required
|
| 348 |
+
if cli_enforce_required is not None
|
| 349 |
+
else settings_cls.model_config.get('cli_enforce_required', False)
|
| 350 |
+
)
|
| 351 |
+
self.cli_use_class_docs_for_groups = (
|
| 352 |
+
cli_use_class_docs_for_groups
|
| 353 |
+
if cli_use_class_docs_for_groups is not None
|
| 354 |
+
else settings_cls.model_config.get('cli_use_class_docs_for_groups', False)
|
| 355 |
+
)
|
| 356 |
+
self.cli_exit_on_error = (
|
| 357 |
+
cli_exit_on_error
|
| 358 |
+
if cli_exit_on_error is not None
|
| 359 |
+
else settings_cls.model_config.get('cli_exit_on_error', True)
|
| 360 |
+
)
|
| 361 |
+
self.cli_prefix = cli_prefix if cli_prefix is not None else settings_cls.model_config.get('cli_prefix', '')
|
| 362 |
+
self.cli_flag_prefix_char = (
|
| 363 |
+
cli_flag_prefix_char
|
| 364 |
+
if cli_flag_prefix_char is not None
|
| 365 |
+
else settings_cls.model_config.get('cli_flag_prefix_char', '-')
|
| 366 |
+
)
|
| 367 |
+
self._cli_flag_prefix = self.cli_flag_prefix_char * 2
|
| 368 |
+
if self.cli_prefix:
|
| 369 |
+
if cli_prefix.startswith('.') or cli_prefix.endswith('.') or not cli_prefix.replace('.', '').isidentifier(): # type: ignore
|
| 370 |
+
raise SettingsError(f'CLI settings source prefix is invalid: {cli_prefix}')
|
| 371 |
+
self.cli_prefix += '.'
|
| 372 |
+
self.cli_implicit_flags = (
|
| 373 |
+
cli_implicit_flags
|
| 374 |
+
if cli_implicit_flags is not None
|
| 375 |
+
else settings_cls.model_config.get('cli_implicit_flags', False)
|
| 376 |
+
)
|
| 377 |
+
self.cli_ignore_unknown_args = (
|
| 378 |
+
cli_ignore_unknown_args
|
| 379 |
+
if cli_ignore_unknown_args is not None
|
| 380 |
+
else settings_cls.model_config.get('cli_ignore_unknown_args', False)
|
| 381 |
+
)
|
| 382 |
+
self.cli_kebab_case = (
|
| 383 |
+
cli_kebab_case if cli_kebab_case is not None else settings_cls.model_config.get('cli_kebab_case', False)
|
| 384 |
+
)
|
| 385 |
+
self.cli_shortcuts = (
|
| 386 |
+
cli_shortcuts if cli_shortcuts is not None else settings_cls.model_config.get('cli_shortcuts', None)
|
| 387 |
+
)
|
| 388 |
+
|
| 389 |
+
case_sensitive = case_sensitive if case_sensitive is not None else True
|
| 390 |
+
if not case_sensitive and root_parser is not None:
|
| 391 |
+
raise SettingsError('Case-insensitive matching is only supported on the internal root parser')
|
| 392 |
+
|
| 393 |
+
super().__init__(
|
| 394 |
+
settings_cls,
|
| 395 |
+
env_nested_delimiter='.',
|
| 396 |
+
env_parse_none_str=self.cli_parse_none_str,
|
| 397 |
+
env_parse_enums=True,
|
| 398 |
+
env_prefix=self.cli_prefix,
|
| 399 |
+
case_sensitive=case_sensitive,
|
| 400 |
+
)
|
| 401 |
+
|
| 402 |
+
root_parser = (
|
| 403 |
+
_CliInternalArgParser(
|
| 404 |
+
cli_exit_on_error=self.cli_exit_on_error,
|
| 405 |
+
prog=self.cli_prog_name,
|
| 406 |
+
description=None if settings_cls.__doc__ is None else dedent(settings_cls.__doc__),
|
| 407 |
+
formatter_class=formatter_class,
|
| 408 |
+
prefix_chars=self.cli_flag_prefix_char,
|
| 409 |
+
allow_abbrev=False,
|
| 410 |
+
add_help=False,
|
| 411 |
+
)
|
| 412 |
+
if root_parser is None
|
| 413 |
+
else root_parser
|
| 414 |
+
)
|
| 415 |
+
self._connect_root_parser(
|
| 416 |
+
root_parser=root_parser,
|
| 417 |
+
parse_args_method=parse_args_method,
|
| 418 |
+
add_argument_method=add_argument_method,
|
| 419 |
+
add_argument_group_method=add_argument_group_method,
|
| 420 |
+
add_parser_method=add_parser_method,
|
| 421 |
+
add_subparsers_method=add_subparsers_method,
|
| 422 |
+
format_help_method=format_help_method,
|
| 423 |
+
formatter_class=formatter_class,
|
| 424 |
+
)
|
| 425 |
+
|
| 426 |
+
if cli_parse_args not in (None, False):
|
| 427 |
+
if cli_parse_args is True:
|
| 428 |
+
cli_parse_args = sys.argv[1:]
|
| 429 |
+
elif not isinstance(cli_parse_args, (list, tuple)):
|
| 430 |
+
raise SettingsError(
|
| 431 |
+
f'cli_parse_args must be a list or tuple of strings, received {type(cli_parse_args)}'
|
| 432 |
+
)
|
| 433 |
+
self._load_env_vars(parsed_args=self._parse_args(self.root_parser, cli_parse_args))
|
| 434 |
+
|
| 435 |
+
@overload
|
| 436 |
+
def __call__(self) -> dict[str, Any]: ...
|
| 437 |
+
|
| 438 |
+
@overload
|
| 439 |
+
def __call__(self, *, args: list[str] | tuple[str, ...] | bool) -> CliSettingsSource[T]:
|
| 440 |
+
"""
|
| 441 |
+
Parse and load the command line arguments list into the CLI settings source.
|
| 442 |
+
|
| 443 |
+
Args:
|
| 444 |
+
args:
|
| 445 |
+
The command line arguments to parse and load. Defaults to `None`, which means do not parse
|
| 446 |
+
command line arguments. If set to `True`, defaults to sys.argv[1:]. If set to `False`, does
|
| 447 |
+
not parse command line arguments.
|
| 448 |
+
|
| 449 |
+
Returns:
|
| 450 |
+
CliSettingsSource: The object instance itself.
|
| 451 |
+
"""
|
| 452 |
+
...
|
| 453 |
+
|
| 454 |
+
@overload
|
| 455 |
+
def __call__(self, *, parsed_args: Namespace | SimpleNamespace | dict[str, Any]) -> CliSettingsSource[T]:
|
| 456 |
+
"""
|
| 457 |
+
Loads parsed command line arguments into the CLI settings source.
|
| 458 |
+
|
| 459 |
+
Note:
|
| 460 |
+
The parsed args must be in `argparse.Namespace`, `SimpleNamespace`, or vars dictionary
|
| 461 |
+
(e.g., vars(argparse.Namespace)) format.
|
| 462 |
+
|
| 463 |
+
Args:
|
| 464 |
+
parsed_args: The parsed args to load.
|
| 465 |
+
|
| 466 |
+
Returns:
|
| 467 |
+
CliSettingsSource: The object instance itself.
|
| 468 |
+
"""
|
| 469 |
+
...
|
| 470 |
+
|
| 471 |
+
def __call__(
|
| 472 |
+
self,
|
| 473 |
+
*,
|
| 474 |
+
args: list[str] | tuple[str, ...] | bool | None = None,
|
| 475 |
+
parsed_args: Namespace | SimpleNamespace | dict[str, list[str] | str] | None = None,
|
| 476 |
+
) -> dict[str, Any] | CliSettingsSource[T]:
|
| 477 |
+
if args is not None and parsed_args is not None:
|
| 478 |
+
raise SettingsError('`args` and `parsed_args` are mutually exclusive')
|
| 479 |
+
elif args is not None:
|
| 480 |
+
if args is False:
|
| 481 |
+
return self._load_env_vars(parsed_args={})
|
| 482 |
+
if args is True:
|
| 483 |
+
args = sys.argv[1:]
|
| 484 |
+
return self._load_env_vars(parsed_args=self._parse_args(self.root_parser, args))
|
| 485 |
+
elif parsed_args is not None:
|
| 486 |
+
return self._load_env_vars(parsed_args=copy.copy(parsed_args))
|
| 487 |
+
else:
|
| 488 |
+
return super().__call__()
|
| 489 |
+
|
| 490 |
+
@overload
|
| 491 |
+
def _load_env_vars(self) -> Mapping[str, str | None]: ...
|
| 492 |
+
|
| 493 |
+
@overload
|
| 494 |
+
def _load_env_vars(self, *, parsed_args: Namespace | SimpleNamespace | dict[str, Any]) -> CliSettingsSource[T]:
|
| 495 |
+
"""
|
| 496 |
+
Loads the parsed command line arguments into the CLI environment settings variables.
|
| 497 |
+
|
| 498 |
+
Note:
|
| 499 |
+
The parsed args must be in `argparse.Namespace`, `SimpleNamespace`, or vars dictionary
|
| 500 |
+
(e.g., vars(argparse.Namespace)) format.
|
| 501 |
+
|
| 502 |
+
Args:
|
| 503 |
+
parsed_args: The parsed args to load.
|
| 504 |
+
|
| 505 |
+
Returns:
|
| 506 |
+
CliSettingsSource: The object instance itself.
|
| 507 |
+
"""
|
| 508 |
+
...
|
| 509 |
+
|
| 510 |
+
def _load_env_vars(
|
| 511 |
+
self, *, parsed_args: Namespace | SimpleNamespace | dict[str, list[str] | str] | None = None
|
| 512 |
+
) -> Mapping[str, str | None] | CliSettingsSource[T]:
|
| 513 |
+
if parsed_args is None:
|
| 514 |
+
return {}
|
| 515 |
+
|
| 516 |
+
if isinstance(parsed_args, (Namespace, SimpleNamespace)):
|
| 517 |
+
parsed_args = vars(parsed_args)
|
| 518 |
+
|
| 519 |
+
selected_subcommands = self._resolve_parsed_args(parsed_args)
|
| 520 |
+
for arg_dest, arg_map in self._parser_map.items():
|
| 521 |
+
if isinstance(arg_dest, str) and arg_dest.endswith(':subcommand'):
|
| 522 |
+
for subcommand_dest in [arg.dest for arg in arg_map.values()]:
|
| 523 |
+
if subcommand_dest not in selected_subcommands:
|
| 524 |
+
parsed_args[subcommand_dest] = self.cli_parse_none_str
|
| 525 |
+
|
| 526 |
+
parsed_args = {
|
| 527 |
+
key: val
|
| 528 |
+
for key, val in parsed_args.items()
|
| 529 |
+
if not key.endswith(':subcommand') and val is not PydanticUndefined
|
| 530 |
+
}
|
| 531 |
+
if selected_subcommands:
|
| 532 |
+
last_selected_subcommand = max(selected_subcommands, key=len)
|
| 533 |
+
if not any(field_name for field_name in parsed_args.keys() if f'{last_selected_subcommand}.' in field_name):
|
| 534 |
+
parsed_args[last_selected_subcommand] = '{}'
|
| 535 |
+
|
| 536 |
+
parsed_args.update(self._cli_unknown_args)
|
| 537 |
+
|
| 538 |
+
self.env_vars = parse_env_vars(
|
| 539 |
+
cast(Mapping[str, str], parsed_args),
|
| 540 |
+
self.case_sensitive,
|
| 541 |
+
self.env_ignore_empty,
|
| 542 |
+
self.cli_parse_none_str,
|
| 543 |
+
)
|
| 544 |
+
|
| 545 |
+
return self
|
| 546 |
+
|
| 547 |
+
def _resolve_parsed_args(self, parsed_args: dict[str, list[str] | str]) -> list[str]:
|
| 548 |
+
selected_subcommands: list[str] = []
|
| 549 |
+
for field_name, val in list(parsed_args.items()):
|
| 550 |
+
if isinstance(val, list):
|
| 551 |
+
if self._is_nested_alias_path_only_workaround(parsed_args, field_name, val):
|
| 552 |
+
# Workaround for nested alias path environment variables not being handled.
|
| 553 |
+
# See https://github.com/pydantic/pydantic-settings/issues/670
|
| 554 |
+
continue
|
| 555 |
+
|
| 556 |
+
cli_arg = self._parser_map.get(field_name, {}).get(None)
|
| 557 |
+
if cli_arg and cli_arg.is_no_decode:
|
| 558 |
+
parsed_args[field_name] = ','.join(val)
|
| 559 |
+
continue
|
| 560 |
+
|
| 561 |
+
parsed_args[field_name] = self._merge_parsed_list(val, field_name)
|
| 562 |
+
elif field_name.endswith(':subcommand') and val is not None:
|
| 563 |
+
selected_subcommands.append(self._parser_map[field_name][val].dest)
|
| 564 |
+
elif self.cli_kebab_case == 'all' and isinstance(val, str):
|
| 565 |
+
snake_val = val.replace('-', '_')
|
| 566 |
+
cli_arg = self._parser_map.get(field_name, {}).get(None)
|
| 567 |
+
if (
|
| 568 |
+
cli_arg
|
| 569 |
+
and cli_arg.field_info.annotation
|
| 570 |
+
and (snake_val in cli_arg.get_enum_names(cli_arg.field_info.annotation, False))
|
| 571 |
+
):
|
| 572 |
+
if '_' in val:
|
| 573 |
+
raise ValueError(f'Input should be kebab-case "{val.replace("_", "-")}", not "{val}"')
|
| 574 |
+
parsed_args[field_name] = snake_val
|
| 575 |
+
|
| 576 |
+
return selected_subcommands
|
| 577 |
+
|
| 578 |
+
def _is_nested_alias_path_only_workaround(
|
| 579 |
+
self, parsed_args: dict[str, list[str] | str], field_name: str, val: list[str]
|
| 580 |
+
) -> bool:
|
| 581 |
+
"""
|
| 582 |
+
Workaround for nested alias path environment variables not being handled.
|
| 583 |
+
See https://github.com/pydantic/pydantic-settings/issues/670
|
| 584 |
+
"""
|
| 585 |
+
known_arg = self._parser_map.get(field_name, {}).values()
|
| 586 |
+
if not known_arg:
|
| 587 |
+
return False
|
| 588 |
+
arg = next(iter(known_arg))
|
| 589 |
+
if arg.is_alias_path_only and arg.arg_prefix.endswith('.'):
|
| 590 |
+
del parsed_args[field_name]
|
| 591 |
+
nested_dest = arg.arg_prefix[:-1]
|
| 592 |
+
nested_val = f'"{arg.preferred_alias}": {self._merge_parsed_list(val, field_name)}'
|
| 593 |
+
parsed_args[nested_dest] = (
|
| 594 |
+
f'{{{nested_val}}}'
|
| 595 |
+
if nested_dest not in parsed_args
|
| 596 |
+
else f'{parsed_args[nested_dest][:-1]}, {nested_val}}}'
|
| 597 |
+
)
|
| 598 |
+
return True
|
| 599 |
+
return False
|
| 600 |
+
|
| 601 |
+
def _get_merge_parsed_list_types(self, parsed_list: list[str], field_name: str) -> tuple[type | None, type | None]:
|
| 602 |
+
merge_type = self._cli_dict_args.get(field_name, list)
|
| 603 |
+
if (
|
| 604 |
+
merge_type is list
|
| 605 |
+
or not is_union_origin(get_origin(merge_type))
|
| 606 |
+
or not any(
|
| 607 |
+
type_
|
| 608 |
+
for type_ in get_args(merge_type)
|
| 609 |
+
if type_ is not type(None) and get_origin(type_) not in (dict, Mapping)
|
| 610 |
+
)
|
| 611 |
+
):
|
| 612 |
+
inferred_type = merge_type
|
| 613 |
+
else:
|
| 614 |
+
inferred_type = list if parsed_list and (len(parsed_list) > 1 or parsed_list[0].startswith('[')) else str
|
| 615 |
+
|
| 616 |
+
return merge_type, inferred_type
|
| 617 |
+
|
| 618 |
+
def _merged_list_to_str(self, merged_list: list[str], field_name: str) -> str:
|
| 619 |
+
decode_list: list[str] = []
|
| 620 |
+
is_use_decode: bool | None = None
|
| 621 |
+
cli_arg_map = self._parser_map.get(field_name, {})
|
| 622 |
+
try:
|
| 623 |
+
list_adapter: Any = TypeAdapter(next(iter(cli_arg_map.values())).field_info.annotation)
|
| 624 |
+
is_num_type_str = type(next(iter(list_adapter.validate_python(['1'])))) is str
|
| 625 |
+
except (StopIteration, ValidationError):
|
| 626 |
+
is_num_type_str = None
|
| 627 |
+
for index, item in enumerate(merged_list):
|
| 628 |
+
cli_arg = cli_arg_map.get(index)
|
| 629 |
+
is_decode = cli_arg is None or not cli_arg.is_no_decode
|
| 630 |
+
if is_use_decode is None:
|
| 631 |
+
is_use_decode = is_decode
|
| 632 |
+
elif is_use_decode != is_decode:
|
| 633 |
+
raise SettingsError('Mixing Decode and NoDecode across different AliasPath fields is not allowed')
|
| 634 |
+
if is_use_decode:
|
| 635 |
+
item = item.replace('\\', '\\\\')
|
| 636 |
+
try:
|
| 637 |
+
unquoted_item = item[1:-1] if item.startswith('"') and item.endswith('"') else item
|
| 638 |
+
float(unquoted_item)
|
| 639 |
+
item = f'"{unquoted_item}"' if is_num_type_str else unquoted_item
|
| 640 |
+
except ValueError:
|
| 641 |
+
pass
|
| 642 |
+
elif item.startswith('"') and item.endswith('"'):
|
| 643 |
+
item = item[1:-1]
|
| 644 |
+
decode_list.append(item)
|
| 645 |
+
merged_list_str = ','.join(decode_list)
|
| 646 |
+
return f'[{merged_list_str}]' if is_use_decode else merged_list_str
|
| 647 |
+
|
| 648 |
+
def _merge_parsed_list(self, parsed_list: list[str], field_name: str) -> str:
|
| 649 |
+
try:
|
| 650 |
+
merged_list: list[str] = []
|
| 651 |
+
is_last_consumed_a_value = False
|
| 652 |
+
merge_type, inferred_type = self._get_merge_parsed_list_types(parsed_list, field_name)
|
| 653 |
+
for val in parsed_list:
|
| 654 |
+
if not isinstance(val, str):
|
| 655 |
+
# If val is not a string, it's from an external parser and we can ignore parsing the rest of the
|
| 656 |
+
# list.
|
| 657 |
+
break
|
| 658 |
+
val = val.strip()
|
| 659 |
+
if val.startswith('[') and val.endswith(']'):
|
| 660 |
+
val = val[1:-1].strip()
|
| 661 |
+
while val:
|
| 662 |
+
val = val.strip()
|
| 663 |
+
if val.startswith(','):
|
| 664 |
+
val = self._consume_comma(val, merged_list, is_last_consumed_a_value)
|
| 665 |
+
is_last_consumed_a_value = False
|
| 666 |
+
else:
|
| 667 |
+
if val.startswith('{') or val.startswith('['):
|
| 668 |
+
val = self._consume_object_or_array(val, merged_list)
|
| 669 |
+
else:
|
| 670 |
+
try:
|
| 671 |
+
val = self._consume_string_or_number(val, merged_list, merge_type)
|
| 672 |
+
except ValueError as e:
|
| 673 |
+
if merge_type is inferred_type:
|
| 674 |
+
raise e
|
| 675 |
+
merge_type = inferred_type
|
| 676 |
+
val = self._consume_string_or_number(val, merged_list, merge_type)
|
| 677 |
+
is_last_consumed_a_value = True
|
| 678 |
+
if not is_last_consumed_a_value:
|
| 679 |
+
val = self._consume_comma(val, merged_list, is_last_consumed_a_value)
|
| 680 |
+
|
| 681 |
+
if merge_type is str:
|
| 682 |
+
return merged_list[0]
|
| 683 |
+
elif merge_type is list:
|
| 684 |
+
return self._merged_list_to_str(merged_list, field_name)
|
| 685 |
+
else:
|
| 686 |
+
merged_dict: dict[str, str] = {}
|
| 687 |
+
for item in merged_list:
|
| 688 |
+
merged_dict.update(json.loads(item))
|
| 689 |
+
return json.dumps(merged_dict)
|
| 690 |
+
except Exception as e:
|
| 691 |
+
raise SettingsError(f'Parsing error encountered for {field_name}: {e}')
|
| 692 |
+
|
| 693 |
+
def _consume_comma(self, item: str, merged_list: list[str], is_last_consumed_a_value: bool) -> str:
|
| 694 |
+
if not is_last_consumed_a_value:
|
| 695 |
+
merged_list.append('""')
|
| 696 |
+
return item[1:]
|
| 697 |
+
|
| 698 |
+
def _consume_object_or_array(self, item: str, merged_list: list[str]) -> str:
|
| 699 |
+
count = 1
|
| 700 |
+
close_delim = '}' if item.startswith('{') else ']'
|
| 701 |
+
in_str = False
|
| 702 |
+
for consumed in range(1, len(item)):
|
| 703 |
+
if item[consumed] == '"' and item[consumed - 1] != '\\':
|
| 704 |
+
in_str = not in_str
|
| 705 |
+
elif in_str:
|
| 706 |
+
continue
|
| 707 |
+
elif item[consumed] in ('{', '['):
|
| 708 |
+
count += 1
|
| 709 |
+
elif item[consumed] in ('}', ']'):
|
| 710 |
+
count -= 1
|
| 711 |
+
if item[consumed] == close_delim and count == 0:
|
| 712 |
+
merged_list.append(item[: consumed + 1])
|
| 713 |
+
return item[consumed + 1 :]
|
| 714 |
+
raise SettingsError(f'Missing end delimiter "{close_delim}"')
|
| 715 |
+
|
| 716 |
+
def _consume_string_or_number(self, item: str, merged_list: list[str], merge_type: type[Any] | None) -> str:
|
| 717 |
+
consumed = 0 if merge_type is not str else len(item)
|
| 718 |
+
is_find_end_quote = False
|
| 719 |
+
while consumed < len(item):
|
| 720 |
+
if item[consumed] == '"' and (consumed == 0 or item[consumed - 1] != '\\'):
|
| 721 |
+
is_find_end_quote = not is_find_end_quote
|
| 722 |
+
if not is_find_end_quote and item[consumed] == ',':
|
| 723 |
+
break
|
| 724 |
+
consumed += 1
|
| 725 |
+
if is_find_end_quote:
|
| 726 |
+
raise SettingsError('Mismatched quotes')
|
| 727 |
+
val_string = item[:consumed].strip()
|
| 728 |
+
if merge_type in (list, str):
|
| 729 |
+
try:
|
| 730 |
+
float(val_string)
|
| 731 |
+
except ValueError:
|
| 732 |
+
if val_string == self.cli_parse_none_str:
|
| 733 |
+
val_string = 'null'
|
| 734 |
+
if val_string not in ('true', 'false', 'null') and not val_string.startswith('"'):
|
| 735 |
+
val_string = f'"{val_string}"'
|
| 736 |
+
merged_list.append(val_string)
|
| 737 |
+
else:
|
| 738 |
+
key, val = (kv for kv in val_string.split('=', 1))
|
| 739 |
+
if key.startswith('"') and not key.endswith('"') and not val.startswith('"') and val.endswith('"'):
|
| 740 |
+
raise ValueError(f'Dictionary key=val parameter is a quoted string: {val_string}')
|
| 741 |
+
key, val = key.strip('"'), val.strip('"')
|
| 742 |
+
merged_list.append(json.dumps({key: val}))
|
| 743 |
+
return item[consumed:]
|
| 744 |
+
|
| 745 |
+
def _verify_cli_flag_annotations(self, model: type[BaseModel], field_name: str, field_info: FieldInfo) -> None:
|
| 746 |
+
if _CliImplicitFlag in field_info.metadata:
|
| 747 |
+
cli_flag_name = 'CliImplicitFlag'
|
| 748 |
+
elif _CliExplicitFlag in field_info.metadata:
|
| 749 |
+
cli_flag_name = 'CliExplicitFlag'
|
| 750 |
+
elif _CliToggleFlag in field_info.metadata:
|
| 751 |
+
cli_flag_name = 'CliToggleFlag'
|
| 752 |
+
if not isinstance(field_info.default, bool):
|
| 753 |
+
raise SettingsError(
|
| 754 |
+
f'{cli_flag_name} argument {model.__name__}.{field_name} must have a default bool value'
|
| 755 |
+
)
|
| 756 |
+
elif _CliDualFlag in field_info.metadata:
|
| 757 |
+
cli_flag_name = 'CliDualFlag'
|
| 758 |
+
else:
|
| 759 |
+
return
|
| 760 |
+
|
| 761 |
+
if field_info.annotation is not bool:
|
| 762 |
+
raise SettingsError(f'{cli_flag_name} argument {model.__name__}.{field_name} is not of type bool')
|
| 763 |
+
|
| 764 |
+
def _sort_arg_fields(self, model: type[BaseModel]) -> list[tuple[str, FieldInfo]]:
|
| 765 |
+
positional_variadic_arg = []
|
| 766 |
+
positional_args, subcommand_args, optional_args = [], [], []
|
| 767 |
+
for field_name, field_info in _get_model_fields(model).items():
|
| 768 |
+
if _CliSubCommand in field_info.metadata:
|
| 769 |
+
if not field_info.is_required():
|
| 770 |
+
raise SettingsError(f'subcommand argument {model.__name__}.{field_name} has a default value')
|
| 771 |
+
else:
|
| 772 |
+
alias_names, *_ = _get_alias_names(field_name, field_info)
|
| 773 |
+
if len(alias_names) > 1:
|
| 774 |
+
raise SettingsError(f'subcommand argument {model.__name__}.{field_name} has multiple aliases')
|
| 775 |
+
field_types = [type_ for type_ in get_args(field_info.annotation) if type_ is not type(None)]
|
| 776 |
+
for field_type in field_types:
|
| 777 |
+
if not (is_model_class(field_type) or is_pydantic_dataclass(field_type)):
|
| 778 |
+
raise SettingsError(
|
| 779 |
+
f'subcommand argument {model.__name__}.{field_name} has type not derived from BaseModel'
|
| 780 |
+
)
|
| 781 |
+
subcommand_args.append((field_name, field_info))
|
| 782 |
+
elif _CliPositionalArg in field_info.metadata:
|
| 783 |
+
alias_names, *_ = _get_alias_names(field_name, field_info)
|
| 784 |
+
if len(alias_names) > 1:
|
| 785 |
+
raise SettingsError(f'positional argument {model.__name__}.{field_name} has multiple aliases')
|
| 786 |
+
is_append_action = _annotation_contains_types(
|
| 787 |
+
field_info.annotation, (list, set, dict, Sequence, Mapping), is_strip_annotated=True
|
| 788 |
+
)
|
| 789 |
+
if not is_append_action:
|
| 790 |
+
positional_args.append((field_name, field_info))
|
| 791 |
+
else:
|
| 792 |
+
positional_variadic_arg.append((field_name, field_info))
|
| 793 |
+
else:
|
| 794 |
+
self._verify_cli_flag_annotations(model, field_name, field_info)
|
| 795 |
+
optional_args.append((field_name, field_info))
|
| 796 |
+
|
| 797 |
+
if positional_variadic_arg:
|
| 798 |
+
if len(positional_variadic_arg) > 1:
|
| 799 |
+
field_names = ', '.join([name for name, info in positional_variadic_arg])
|
| 800 |
+
raise SettingsError(f'{model.__name__} has multiple variadic positional arguments: {field_names}')
|
| 801 |
+
elif subcommand_args:
|
| 802 |
+
field_names = ', '.join([name for name, info in positional_variadic_arg + subcommand_args])
|
| 803 |
+
raise SettingsError(
|
| 804 |
+
f'{model.__name__} has variadic positional arguments and subcommand arguments: {field_names}'
|
| 805 |
+
)
|
| 806 |
+
|
| 807 |
+
return positional_args + positional_variadic_arg + subcommand_args + optional_args
|
| 808 |
+
|
| 809 |
+
@property
|
| 810 |
+
def root_parser(self) -> T:
|
| 811 |
+
"""The connected root parser instance."""
|
| 812 |
+
return self._root_parser
|
| 813 |
+
|
| 814 |
+
def _connect_parser_method(
|
| 815 |
+
self, parser_method: Callable[..., Any] | None, method_name: str, *args: Any, **kwargs: Any
|
| 816 |
+
) -> Callable[..., Any]:
|
| 817 |
+
if (
|
| 818 |
+
parser_method is not None
|
| 819 |
+
and self.case_sensitive is False
|
| 820 |
+
and method_name == 'parse_args_method'
|
| 821 |
+
and isinstance(self._root_parser, _CliInternalArgParser)
|
| 822 |
+
):
|
| 823 |
+
|
| 824 |
+
def parse_args_insensitive_method(
|
| 825 |
+
root_parser: _CliInternalArgParser,
|
| 826 |
+
args: list[str] | tuple[str, ...] | None = None,
|
| 827 |
+
namespace: Namespace | None = None,
|
| 828 |
+
) -> Any:
|
| 829 |
+
insensitive_args = []
|
| 830 |
+
for arg in shlex.split(shlex.join(args)) if args else []:
|
| 831 |
+
flag_prefix = rf'\{self.cli_flag_prefix_char}{{1,2}}'
|
| 832 |
+
matched = re.match(rf'^({flag_prefix}[^\s=]+)(.*)', arg)
|
| 833 |
+
if matched:
|
| 834 |
+
arg = matched.group(1).lower() + matched.group(2)
|
| 835 |
+
insensitive_args.append(arg)
|
| 836 |
+
return parser_method(root_parser, insensitive_args, namespace)
|
| 837 |
+
|
| 838 |
+
return parse_args_insensitive_method
|
| 839 |
+
|
| 840 |
+
elif parser_method is None:
|
| 841 |
+
|
| 842 |
+
def none_parser_method(*args: Any, **kwargs: Any) -> Any:
|
| 843 |
+
raise SettingsError(
|
| 844 |
+
f'cannot connect CLI settings source root parser: {method_name} is set to `None` but is needed for connecting'
|
| 845 |
+
)
|
| 846 |
+
|
| 847 |
+
return none_parser_method
|
| 848 |
+
|
| 849 |
+
else:
|
| 850 |
+
return parser_method
|
| 851 |
+
|
| 852 |
+
def _connect_group_method(self, add_argument_group_method: Callable[..., Any] | None) -> Callable[..., Any]:
|
| 853 |
+
add_argument_group = self._connect_parser_method(add_argument_group_method, 'add_argument_group_method')
|
| 854 |
+
|
| 855 |
+
def add_group_method(parser: Any, **kwargs: Any) -> Any:
|
| 856 |
+
if not kwargs.pop('_is_cli_mutually_exclusive_group'):
|
| 857 |
+
kwargs.pop('required')
|
| 858 |
+
return add_argument_group(parser, **kwargs)
|
| 859 |
+
else:
|
| 860 |
+
main_group_kwargs = {arg: kwargs.pop(arg) for arg in ['title', 'description'] if arg in kwargs}
|
| 861 |
+
main_group_kwargs['title'] += ' (mutually exclusive)'
|
| 862 |
+
group = add_argument_group(parser, **main_group_kwargs)
|
| 863 |
+
if not hasattr(group, 'add_mutually_exclusive_group'):
|
| 864 |
+
raise SettingsError(
|
| 865 |
+
'cannot connect CLI settings source root parser: '
|
| 866 |
+
'group object is missing add_mutually_exclusive_group but is needed for connecting'
|
| 867 |
+
)
|
| 868 |
+
return group.add_mutually_exclusive_group(**kwargs)
|
| 869 |
+
|
| 870 |
+
return add_group_method
|
| 871 |
+
|
| 872 |
+
def _connect_root_parser(
|
| 873 |
+
self,
|
| 874 |
+
root_parser: T,
|
| 875 |
+
parse_args_method: Callable[..., Any] | None,
|
| 876 |
+
add_argument_method: Callable[..., Any] | None = ArgumentParser.add_argument,
|
| 877 |
+
add_argument_group_method: Callable[..., Any] | None = ArgumentParser.add_argument_group,
|
| 878 |
+
add_parser_method: Callable[..., Any] | None = _SubParsersAction.add_parser,
|
| 879 |
+
add_subparsers_method: Callable[..., Any] | None = ArgumentParser.add_subparsers,
|
| 880 |
+
format_help_method: Callable[..., Any] | None = ArgumentParser.format_help,
|
| 881 |
+
formatter_class: Any = RawDescriptionHelpFormatter,
|
| 882 |
+
) -> None:
|
| 883 |
+
self._cli_unknown_args: dict[str, list[str]] = {}
|
| 884 |
+
|
| 885 |
+
def _parse_known_args(*args: Any, **kwargs: Any) -> Namespace:
|
| 886 |
+
args, unknown_args = ArgumentParser.parse_known_args(*args, **kwargs)
|
| 887 |
+
for dest in self._cli_unknown_args:
|
| 888 |
+
self._cli_unknown_args[dest] = unknown_args
|
| 889 |
+
return cast(Namespace, args)
|
| 890 |
+
|
| 891 |
+
self._root_parser = root_parser
|
| 892 |
+
if parse_args_method is None:
|
| 893 |
+
parse_args_method = _parse_known_args if self.cli_ignore_unknown_args else ArgumentParser.parse_args
|
| 894 |
+
self._parse_args = self._connect_parser_method(parse_args_method, 'parse_args_method')
|
| 895 |
+
self._add_argument = self._connect_parser_method(add_argument_method, 'add_argument_method')
|
| 896 |
+
self._add_group = self._connect_group_method(add_argument_group_method)
|
| 897 |
+
self._add_parser = self._connect_parser_method(add_parser_method, 'add_parser_method')
|
| 898 |
+
self._add_subparsers = self._connect_parser_method(add_subparsers_method, 'add_subparsers_method')
|
| 899 |
+
self._format_help = self._connect_parser_method(format_help_method, 'format_help_method')
|
| 900 |
+
self._formatter_class = formatter_class
|
| 901 |
+
self._cli_dict_args: dict[str, type[Any] | None] = {}
|
| 902 |
+
self._parser_map: defaultdict[str | FieldInfo, dict[int | None | str | type[BaseModel], _CliArg]] = defaultdict(
|
| 903 |
+
dict
|
| 904 |
+
)
|
| 905 |
+
self._add_default_help()
|
| 906 |
+
self._add_parser_args(
|
| 907 |
+
parser=self.root_parser,
|
| 908 |
+
model=self.settings_cls,
|
| 909 |
+
added_args=[],
|
| 910 |
+
arg_prefix=self.env_prefix,
|
| 911 |
+
subcommand_prefix=self.env_prefix,
|
| 912 |
+
group=None,
|
| 913 |
+
alias_prefixes=[],
|
| 914 |
+
model_default=PydanticUndefined,
|
| 915 |
+
)
|
| 916 |
+
|
| 917 |
+
def _add_default_help(self) -> None:
|
| 918 |
+
if isinstance(self._root_parser, _CliInternalArgParser):
|
| 919 |
+
if not self.cli_prefix:
|
| 920 |
+
for field_name, field_info in _get_model_fields(self.settings_cls).items():
|
| 921 |
+
alias_names, *_ = _get_alias_names(field_name, field_info, case_sensitive=self.case_sensitive)
|
| 922 |
+
if 'help' in alias_names:
|
| 923 |
+
return
|
| 924 |
+
|
| 925 |
+
self._add_argument(
|
| 926 |
+
self.root_parser,
|
| 927 |
+
f'{self._cli_flag_prefix[:1]}h',
|
| 928 |
+
f'{self._cli_flag_prefix[:2]}help',
|
| 929 |
+
action='help',
|
| 930 |
+
default=SUPPRESS,
|
| 931 |
+
help='show this help message and exit',
|
| 932 |
+
)
|
| 933 |
+
|
| 934 |
+
def _add_parser_args(
|
| 935 |
+
self,
|
| 936 |
+
parser: Any,
|
| 937 |
+
model: type[BaseModel],
|
| 938 |
+
added_args: list[str],
|
| 939 |
+
arg_prefix: str,
|
| 940 |
+
subcommand_prefix: str,
|
| 941 |
+
group: Any,
|
| 942 |
+
alias_prefixes: list[str],
|
| 943 |
+
model_default: Any,
|
| 944 |
+
is_model_suppressed: bool = False,
|
| 945 |
+
discriminator_vals: dict[str, set[Any]] = {},
|
| 946 |
+
is_last_discriminator: bool = True,
|
| 947 |
+
) -> ArgumentParser:
|
| 948 |
+
subparsers: Any = None
|
| 949 |
+
alias_path_args: dict[str, int | None] = {}
|
| 950 |
+
# Ignore model default if the default is a model and not a subclass of the current model.
|
| 951 |
+
model_default = (
|
| 952 |
+
None
|
| 953 |
+
if (
|
| 954 |
+
(is_model_class(type(model_default)) or is_pydantic_dataclass(type(model_default)))
|
| 955 |
+
and not issubclass(type(model_default), model)
|
| 956 |
+
)
|
| 957 |
+
else model_default
|
| 958 |
+
)
|
| 959 |
+
for field_name, field_info in self._sort_arg_fields(model):
|
| 960 |
+
arg = _CliArg(
|
| 961 |
+
parser=parser,
|
| 962 |
+
field_info=field_info,
|
| 963 |
+
parser_map=self._parser_map,
|
| 964 |
+
model=model,
|
| 965 |
+
field_name=field_name,
|
| 966 |
+
arg_prefix=arg_prefix,
|
| 967 |
+
case_sensitive=self.case_sensitive,
|
| 968 |
+
hide_none_type=self.cli_hide_none_type,
|
| 969 |
+
kebab_case=self.cli_kebab_case,
|
| 970 |
+
enable_decoding=self.config.get('enable_decoding'),
|
| 971 |
+
env_prefix_len=self.env_prefix_len,
|
| 972 |
+
)
|
| 973 |
+
alias_path_args.update(arg.alias_paths)
|
| 974 |
+
|
| 975 |
+
if arg.subcommand_dest:
|
| 976 |
+
for sub_model in arg.sub_models:
|
| 977 |
+
subcommand_alias = arg.subcommand_alias(sub_model)
|
| 978 |
+
subcommand_arg = self._parser_map[arg.subcommand_dest][subcommand_alias]
|
| 979 |
+
subcommand_arg.args = [subcommand_alias]
|
| 980 |
+
subcommand_arg.kwargs['allow_abbrev'] = False
|
| 981 |
+
subcommand_arg.kwargs['formatter_class'] = self._formatter_class
|
| 982 |
+
subcommand_arg.kwargs['description'] = (
|
| 983 |
+
None if sub_model.__doc__ is None else dedent(sub_model.__doc__)
|
| 984 |
+
)
|
| 985 |
+
subcommand_arg.kwargs['help'] = None if len(arg.sub_models) > 1 else field_info.description
|
| 986 |
+
if self.cli_use_class_docs_for_groups:
|
| 987 |
+
subcommand_arg.kwargs['help'] = None if sub_model.__doc__ is None else dedent(sub_model.__doc__)
|
| 988 |
+
|
| 989 |
+
subparsers = (
|
| 990 |
+
self._add_subparsers(
|
| 991 |
+
parser,
|
| 992 |
+
title='subcommands',
|
| 993 |
+
dest=f'{arg_prefix}:subcommand',
|
| 994 |
+
description=field_info.description if len(arg.sub_models) > 1 else None,
|
| 995 |
+
)
|
| 996 |
+
if subparsers is None
|
| 997 |
+
else subparsers
|
| 998 |
+
)
|
| 999 |
+
|
| 1000 |
+
if hasattr(subparsers, 'metavar'):
|
| 1001 |
+
subparsers.metavar = (
|
| 1002 |
+
f'{subparsers.metavar[:-1]},{subcommand_alias}}}'
|
| 1003 |
+
if subparsers.metavar
|
| 1004 |
+
else f'{{{subcommand_alias}}}'
|
| 1005 |
+
)
|
| 1006 |
+
|
| 1007 |
+
subcommand_arg.parser = self._add_parser(subparsers, *subcommand_arg.args, **subcommand_arg.kwargs)
|
| 1008 |
+
self._add_parser_args(
|
| 1009 |
+
parser=subcommand_arg.parser,
|
| 1010 |
+
model=sub_model,
|
| 1011 |
+
added_args=[],
|
| 1012 |
+
arg_prefix=f'{arg.dest}.',
|
| 1013 |
+
subcommand_prefix=f'{subcommand_prefix}{arg.preferred_alias}.',
|
| 1014 |
+
group=None,
|
| 1015 |
+
alias_prefixes=[],
|
| 1016 |
+
model_default=PydanticUndefined,
|
| 1017 |
+
)
|
| 1018 |
+
else:
|
| 1019 |
+
flag_prefix: str = self._cli_flag_prefix
|
| 1020 |
+
arg.kwargs['dest'] = arg.dest
|
| 1021 |
+
arg.kwargs['default'] = CLI_SUPPRESS
|
| 1022 |
+
arg.kwargs['help'] = self._help_format(field_name, field_info, model_default, is_model_suppressed)
|
| 1023 |
+
arg.kwargs['metavar'] = self._metavar_format(field_info.annotation)
|
| 1024 |
+
arg.kwargs['required'] = (
|
| 1025 |
+
self.cli_enforce_required and field_info.is_required() and model_default is PydanticUndefined
|
| 1026 |
+
)
|
| 1027 |
+
|
| 1028 |
+
arg_names = self._get_arg_names(
|
| 1029 |
+
arg,
|
| 1030 |
+
subcommand_prefix,
|
| 1031 |
+
alias_prefixes,
|
| 1032 |
+
added_args,
|
| 1033 |
+
discriminator_vals,
|
| 1034 |
+
is_last_discriminator,
|
| 1035 |
+
)
|
| 1036 |
+
if not arg_names or (arg.kwargs['dest'] in added_args):
|
| 1037 |
+
continue
|
| 1038 |
+
|
| 1039 |
+
self._convert_append_action(arg.kwargs, field_info, arg.is_append_action)
|
| 1040 |
+
|
| 1041 |
+
if _CliPositionalArg in field_info.metadata:
|
| 1042 |
+
arg_names, flag_prefix = self._convert_positional_arg(
|
| 1043 |
+
arg.kwargs, field_info, arg.preferred_alias, model_default
|
| 1044 |
+
)
|
| 1045 |
+
|
| 1046 |
+
self._convert_bool_flag(arg.kwargs, field_info, model_default)
|
| 1047 |
+
|
| 1048 |
+
non_recursive_sub_models = [m for m in arg.sub_models if m is not model]
|
| 1049 |
+
if (
|
| 1050 |
+
arg.is_parser_submodel
|
| 1051 |
+
and not getattr(field_info.annotation, '__pydantic_root_model__', False)
|
| 1052 |
+
and non_recursive_sub_models
|
| 1053 |
+
):
|
| 1054 |
+
self._add_parser_submodels(
|
| 1055 |
+
parser,
|
| 1056 |
+
model,
|
| 1057 |
+
non_recursive_sub_models,
|
| 1058 |
+
added_args,
|
| 1059 |
+
arg_prefix,
|
| 1060 |
+
subcommand_prefix,
|
| 1061 |
+
flag_prefix,
|
| 1062 |
+
arg_names,
|
| 1063 |
+
arg.kwargs,
|
| 1064 |
+
field_name,
|
| 1065 |
+
field_info,
|
| 1066 |
+
arg.alias_names,
|
| 1067 |
+
model_default=model_default,
|
| 1068 |
+
is_model_suppressed=is_model_suppressed,
|
| 1069 |
+
)
|
| 1070 |
+
elif _CliUnknownArgs in field_info.metadata:
|
| 1071 |
+
self._cli_unknown_args[arg.kwargs['dest']] = []
|
| 1072 |
+
elif not arg.is_alias_path_only:
|
| 1073 |
+
if isinstance(group, dict):
|
| 1074 |
+
group = self._add_group(parser, **group)
|
| 1075 |
+
context = parser if group is None else group
|
| 1076 |
+
if arg.kwargs.get('action') == 'store_false':
|
| 1077 |
+
flag_prefix += 'no-'
|
| 1078 |
+
arg.args = [f'{flag_prefix[: 1 if len(name) == 1 else None]}{name}' for name in arg_names]
|
| 1079 |
+
self._add_argument(context, *arg.args, **arg.kwargs)
|
| 1080 |
+
added_args += list(arg_names)
|
| 1081 |
+
|
| 1082 |
+
self._add_parser_alias_paths(parser, alias_path_args, added_args, arg_prefix, subcommand_prefix, group)
|
| 1083 |
+
return parser
|
| 1084 |
+
|
| 1085 |
+
def _convert_append_action(self, kwargs: dict[str, Any], field_info: FieldInfo, is_append_action: bool) -> None:
|
| 1086 |
+
if is_append_action:
|
| 1087 |
+
kwargs['action'] = 'append'
|
| 1088 |
+
if _annotation_contains_types(field_info.annotation, (dict, Mapping), is_strip_annotated=True):
|
| 1089 |
+
self._cli_dict_args[kwargs['dest']] = field_info.annotation
|
| 1090 |
+
|
| 1091 |
+
def _convert_bool_flag(self, kwargs: dict[str, Any], field_info: FieldInfo, model_default: Any) -> None:
|
| 1092 |
+
if kwargs['metavar'] == 'bool':
|
| 1093 |
+
meta_bool_flags = [
|
| 1094 |
+
meta
|
| 1095 |
+
for meta in field_info.metadata
|
| 1096 |
+
if isinstance(meta, type) and issubclass(meta, _CliImplicitFlag | _CliExplicitFlag)
|
| 1097 |
+
]
|
| 1098 |
+
if not meta_bool_flags and self.cli_implicit_flags:
|
| 1099 |
+
meta_bool_flags = [_CliImplicitFlag]
|
| 1100 |
+
if meta_bool_flags:
|
| 1101 |
+
bool_flag = meta_bool_flags.pop()
|
| 1102 |
+
if bool_flag is _CliImplicitFlag:
|
| 1103 |
+
bool_flag = (
|
| 1104 |
+
_CliToggleFlag
|
| 1105 |
+
if self.cli_implicit_flags == 'toggle' and isinstance(field_info.default, bool)
|
| 1106 |
+
else _CliDualFlag
|
| 1107 |
+
)
|
| 1108 |
+
if bool_flag is _CliDualFlag:
|
| 1109 |
+
del kwargs['metavar']
|
| 1110 |
+
kwargs['action'] = BooleanOptionalAction
|
| 1111 |
+
elif bool_flag is _CliToggleFlag:
|
| 1112 |
+
del kwargs['metavar']
|
| 1113 |
+
kwargs['action'] = 'store_false' if field_info.default else 'store_true'
|
| 1114 |
+
|
| 1115 |
+
def _convert_positional_arg(
|
| 1116 |
+
self, kwargs: dict[str, Any], field_info: FieldInfo, preferred_alias: str, model_default: Any
|
| 1117 |
+
) -> tuple[list[str], str]:
|
| 1118 |
+
flag_prefix = ''
|
| 1119 |
+
arg_names = [kwargs['dest']]
|
| 1120 |
+
kwargs['default'] = PydanticUndefined
|
| 1121 |
+
kwargs['metavar'] = _CliArg.get_kebab_case(preferred_alias.upper(), self.cli_kebab_case)
|
| 1122 |
+
|
| 1123 |
+
# Note: CLI positional args are always strictly required at the CLI. Therefore, use field_info.is_required in
|
| 1124 |
+
# conjunction with model_default instead of the derived kwargs['required'].
|
| 1125 |
+
is_required = field_info.is_required() and model_default is PydanticUndefined
|
| 1126 |
+
if kwargs.get('action') == 'append':
|
| 1127 |
+
del kwargs['action']
|
| 1128 |
+
kwargs['nargs'] = '+' if is_required else '*'
|
| 1129 |
+
elif not is_required:
|
| 1130 |
+
kwargs['nargs'] = '?'
|
| 1131 |
+
|
| 1132 |
+
del kwargs['dest']
|
| 1133 |
+
del kwargs['required']
|
| 1134 |
+
return arg_names, flag_prefix
|
| 1135 |
+
|
| 1136 |
+
def _get_arg_names(
|
| 1137 |
+
self,
|
| 1138 |
+
arg: _CliArg,
|
| 1139 |
+
subcommand_prefix: str,
|
| 1140 |
+
alias_prefixes: list[str],
|
| 1141 |
+
added_args: list[str],
|
| 1142 |
+
discriminator_vals: dict[str, set[Any]],
|
| 1143 |
+
is_last_discriminator: bool,
|
| 1144 |
+
) -> list[str]:
|
| 1145 |
+
arg_names: list[str] = []
|
| 1146 |
+
for prefix in [arg.arg_prefix] + alias_prefixes:
|
| 1147 |
+
for name in arg.alias_names:
|
| 1148 |
+
arg_name = _CliArg.get_kebab_case(
|
| 1149 |
+
f'{prefix}{name}'
|
| 1150 |
+
if subcommand_prefix == self.env_prefix
|
| 1151 |
+
else f'{prefix.replace(subcommand_prefix, "", 1)}{name}',
|
| 1152 |
+
self.cli_kebab_case,
|
| 1153 |
+
)
|
| 1154 |
+
if arg_name not in added_args:
|
| 1155 |
+
arg_names.append(arg_name)
|
| 1156 |
+
|
| 1157 |
+
if self.cli_shortcuts:
|
| 1158 |
+
for target, aliases in self.cli_shortcuts.items():
|
| 1159 |
+
if target in arg_names:
|
| 1160 |
+
alias_list = [aliases] if isinstance(aliases, str) else aliases
|
| 1161 |
+
arg_names.extend(alias for alias in alias_list if alias not in added_args)
|
| 1162 |
+
|
| 1163 |
+
tags: set[Any] = set()
|
| 1164 |
+
discriminators = discriminator_vals.get(arg.dest)
|
| 1165 |
+
if discriminators is not None:
|
| 1166 |
+
_annotation_contains_types(
|
| 1167 |
+
arg.field_info.annotation,
|
| 1168 |
+
(Literal,),
|
| 1169 |
+
is_include_origin=True,
|
| 1170 |
+
collect=tags,
|
| 1171 |
+
)
|
| 1172 |
+
discriminators.update(chain.from_iterable(get_args(tag) for tag in tags))
|
| 1173 |
+
if not is_last_discriminator:
|
| 1174 |
+
return []
|
| 1175 |
+
arg.kwargs['metavar'] = self._metavar_format(Literal[tuple(sorted(discriminators))])
|
| 1176 |
+
|
| 1177 |
+
return arg_names
|
| 1178 |
+
|
| 1179 |
+
def _add_parser_submodels(
|
| 1180 |
+
self,
|
| 1181 |
+
parser: Any,
|
| 1182 |
+
model: type[BaseModel],
|
| 1183 |
+
sub_models: list[type[BaseModel]],
|
| 1184 |
+
added_args: list[str],
|
| 1185 |
+
arg_prefix: str,
|
| 1186 |
+
subcommand_prefix: str,
|
| 1187 |
+
flag_prefix: str,
|
| 1188 |
+
arg_names: list[str],
|
| 1189 |
+
kwargs: dict[str, Any],
|
| 1190 |
+
field_name: str,
|
| 1191 |
+
field_info: FieldInfo,
|
| 1192 |
+
alias_names: tuple[str, ...],
|
| 1193 |
+
model_default: Any,
|
| 1194 |
+
is_model_suppressed: bool,
|
| 1195 |
+
) -> None:
|
| 1196 |
+
if issubclass(model, CliMutuallyExclusiveGroup):
|
| 1197 |
+
# Argparse has deprecated "calling add_argument_group() or add_mutually_exclusive_group() on a
|
| 1198 |
+
# mutually exclusive group" (https://docs.python.org/3/library/argparse.html#mutual-exclusion).
|
| 1199 |
+
# Since nested models result in a group add, raise an exception for nested models in a mutually
|
| 1200 |
+
# exclusive group.
|
| 1201 |
+
raise SettingsError('cannot have nested models in a CliMutuallyExclusiveGroup')
|
| 1202 |
+
|
| 1203 |
+
model_group_kwargs: dict[str, Any] = {}
|
| 1204 |
+
model_group_kwargs['title'] = f'{arg_names[0]} options'
|
| 1205 |
+
model_group_kwargs['description'] = field_info.description
|
| 1206 |
+
model_group_kwargs['required'] = kwargs['required']
|
| 1207 |
+
model_group_kwargs['_is_cli_mutually_exclusive_group'] = any(
|
| 1208 |
+
issubclass(model, CliMutuallyExclusiveGroup) for model in sub_models
|
| 1209 |
+
)
|
| 1210 |
+
if model_group_kwargs['_is_cli_mutually_exclusive_group'] and len(sub_models) > 1:
|
| 1211 |
+
raise SettingsError('cannot use union with CliMutuallyExclusiveGroup')
|
| 1212 |
+
if self.cli_use_class_docs_for_groups and len(sub_models) == 1:
|
| 1213 |
+
model_group_kwargs['description'] = None if sub_models[0].__doc__ is None else dedent(sub_models[0].__doc__)
|
| 1214 |
+
|
| 1215 |
+
if model_default is not PydanticUndefined:
|
| 1216 |
+
if is_model_class(type(model_default)) or is_pydantic_dataclass(type(model_default)):
|
| 1217 |
+
model_default = getattr(model_default, field_name)
|
| 1218 |
+
else:
|
| 1219 |
+
if field_info.default is not PydanticUndefined:
|
| 1220 |
+
model_default = field_info.default
|
| 1221 |
+
elif field_info.default_factory is not None:
|
| 1222 |
+
model_default = field_info.default_factory
|
| 1223 |
+
if model_default is None:
|
| 1224 |
+
desc_header = f'default: {self.cli_parse_none_str} (undefined)'
|
| 1225 |
+
if model_group_kwargs['description'] is not None:
|
| 1226 |
+
model_group_kwargs['description'] = dedent(f'{desc_header}\n{model_group_kwargs["description"]}')
|
| 1227 |
+
else:
|
| 1228 |
+
model_group_kwargs['description'] = desc_header
|
| 1229 |
+
|
| 1230 |
+
preferred_alias = alias_names[0]
|
| 1231 |
+
is_model_suppressed = self._is_field_suppressed(field_info) or is_model_suppressed
|
| 1232 |
+
if is_model_suppressed:
|
| 1233 |
+
model_group_kwargs['description'] = CLI_SUPPRESS
|
| 1234 |
+
added_args.append(arg_names[0])
|
| 1235 |
+
kwargs['required'] = False
|
| 1236 |
+
kwargs['nargs'] = '?'
|
| 1237 |
+
kwargs['const'] = '{}'
|
| 1238 |
+
kwargs['help'] = (
|
| 1239 |
+
CLI_SUPPRESS
|
| 1240 |
+
if is_model_suppressed or self.cli_avoid_json
|
| 1241 |
+
else f'set {arg_names[0]} from JSON string (default: {{}})'
|
| 1242 |
+
)
|
| 1243 |
+
model_group = self._add_group(parser, **model_group_kwargs)
|
| 1244 |
+
self._add_argument(model_group, *(f'{flag_prefix}{name}' for name in arg_names), **kwargs)
|
| 1245 |
+
discriminator_vals: dict[str, set[Any]] = (
|
| 1246 |
+
{f'{arg_prefix}{preferred_alias}.{field_info.discriminator}': set()} if field_info.discriminator else {}
|
| 1247 |
+
)
|
| 1248 |
+
for model in sub_models:
|
| 1249 |
+
self._add_parser_args(
|
| 1250 |
+
parser=parser,
|
| 1251 |
+
model=model,
|
| 1252 |
+
added_args=added_args,
|
| 1253 |
+
arg_prefix=f'{arg_prefix}{preferred_alias}.',
|
| 1254 |
+
subcommand_prefix=subcommand_prefix,
|
| 1255 |
+
group=model_group,
|
| 1256 |
+
alias_prefixes=[f'{arg_prefix}{name}.' for name in alias_names[1:]],
|
| 1257 |
+
model_default=model_default,
|
| 1258 |
+
is_model_suppressed=is_model_suppressed,
|
| 1259 |
+
discriminator_vals=discriminator_vals,
|
| 1260 |
+
is_last_discriminator=model is sub_models[-1],
|
| 1261 |
+
)
|
| 1262 |
+
|
| 1263 |
+
def _add_parser_alias_paths(
|
| 1264 |
+
self,
|
| 1265 |
+
parser: Any,
|
| 1266 |
+
alias_path_args: dict[str, int | None],
|
| 1267 |
+
added_args: list[str],
|
| 1268 |
+
arg_prefix: str,
|
| 1269 |
+
subcommand_prefix: str,
|
| 1270 |
+
group: Any,
|
| 1271 |
+
) -> None:
|
| 1272 |
+
if alias_path_args:
|
| 1273 |
+
context = parser
|
| 1274 |
+
if group is not None:
|
| 1275 |
+
context = self._add_group(parser, **group) if isinstance(group, dict) else group
|
| 1276 |
+
for name, index in alias_path_args.items():
|
| 1277 |
+
arg_name = (
|
| 1278 |
+
f'{arg_prefix}{name}'
|
| 1279 |
+
if subcommand_prefix == self.env_prefix
|
| 1280 |
+
else f'{arg_prefix.replace(subcommand_prefix, "", 1)}{name}'
|
| 1281 |
+
)
|
| 1282 |
+
kwargs: dict[str, Any] = {}
|
| 1283 |
+
kwargs['default'] = CLI_SUPPRESS
|
| 1284 |
+
kwargs['help'] = 'pydantic alias path'
|
| 1285 |
+
kwargs['action'] = 'append'
|
| 1286 |
+
kwargs['metavar'] = 'list'
|
| 1287 |
+
if index is None:
|
| 1288 |
+
kwargs['metavar'] = 'dict'
|
| 1289 |
+
self._cli_dict_args[arg_name] = dict
|
| 1290 |
+
args = [f'{self._cli_flag_prefix}{arg_name}']
|
| 1291 |
+
for key, arg in self._parser_map[arg_name].items():
|
| 1292 |
+
arg.args, arg.kwargs = args, kwargs
|
| 1293 |
+
self._add_argument(context, *args, **kwargs)
|
| 1294 |
+
added_args.append(arg_name)
|
| 1295 |
+
|
| 1296 |
+
def _get_modified_args(self, obj: Any) -> tuple[str, ...]:
|
| 1297 |
+
if not self.cli_hide_none_type:
|
| 1298 |
+
return get_args(obj)
|
| 1299 |
+
else:
|
| 1300 |
+
return tuple([type_ for type_ in get_args(obj) if type_ is not type(None)])
|
| 1301 |
+
|
| 1302 |
+
def _metavar_format_choices(self, args: list[str], obj_qualname: str | None = None) -> str:
|
| 1303 |
+
if 'JSON' in args:
|
| 1304 |
+
args = args[: args.index('JSON') + 1] + [arg for arg in args[args.index('JSON') + 1 :] if arg != 'JSON']
|
| 1305 |
+
metavar = ','.join(args)
|
| 1306 |
+
if obj_qualname:
|
| 1307 |
+
return f'{obj_qualname}[{metavar}]'
|
| 1308 |
+
else:
|
| 1309 |
+
return metavar if len(args) == 1 else f'{{{metavar}}}'
|
| 1310 |
+
|
| 1311 |
+
def _metavar_format_recurse(self, obj: Any) -> str:
|
| 1312 |
+
"""Pretty metavar representation of a type. Adapts logic from `pydantic._repr.display_as_type`."""
|
| 1313 |
+
obj = _strip_annotated(obj)
|
| 1314 |
+
if _is_function(obj):
|
| 1315 |
+
# If function is locally defined use __name__ instead of __qualname__
|
| 1316 |
+
return obj.__name__ if '<locals>' in obj.__qualname__ else obj.__qualname__
|
| 1317 |
+
elif obj is ...:
|
| 1318 |
+
return '...'
|
| 1319 |
+
elif isinstance(obj, Representation):
|
| 1320 |
+
return repr(obj)
|
| 1321 |
+
elif isinstance(obj, typing.ForwardRef) or typing_objects.is_typealiastype(obj):
|
| 1322 |
+
return str(obj)
|
| 1323 |
+
|
| 1324 |
+
if not isinstance(obj, (_typing_base, _WithArgsTypes, type)):
|
| 1325 |
+
obj = obj.__class__
|
| 1326 |
+
|
| 1327 |
+
origin = get_origin(obj)
|
| 1328 |
+
if is_union_origin(origin):
|
| 1329 |
+
return self._metavar_format_choices(list(map(self._metavar_format_recurse, self._get_modified_args(obj))))
|
| 1330 |
+
elif typing_objects.is_literal(origin):
|
| 1331 |
+
return self._metavar_format_choices(list(map(str, self._get_modified_args(obj))))
|
| 1332 |
+
elif _lenient_issubclass(obj, Enum):
|
| 1333 |
+
return self._metavar_format_choices(
|
| 1334 |
+
[_CliArg.get_kebab_case(name, self.cli_kebab_case == 'all') for name in obj.__members__.keys()]
|
| 1335 |
+
)
|
| 1336 |
+
elif isinstance(obj, _WithArgsTypes):
|
| 1337 |
+
return self._metavar_format_choices(
|
| 1338 |
+
list(map(self._metavar_format_recurse, self._get_modified_args(obj))),
|
| 1339 |
+
obj_qualname=obj.__qualname__ if hasattr(obj, '__qualname__') else str(obj),
|
| 1340 |
+
)
|
| 1341 |
+
elif obj is type(None):
|
| 1342 |
+
return self.cli_parse_none_str
|
| 1343 |
+
elif is_model_class(obj) or is_pydantic_dataclass(obj):
|
| 1344 |
+
return (
|
| 1345 |
+
self._metavar_format_recurse(_get_model_fields(obj)['root'].annotation)
|
| 1346 |
+
if getattr(obj, '__pydantic_root_model__', False)
|
| 1347 |
+
else 'JSON'
|
| 1348 |
+
)
|
| 1349 |
+
elif isinstance(obj, type):
|
| 1350 |
+
return obj.__qualname__
|
| 1351 |
+
else:
|
| 1352 |
+
return repr(obj).replace('typing.', '').replace('typing_extensions.', '')
|
| 1353 |
+
|
| 1354 |
+
def _metavar_format(self, obj: Any) -> str:
|
| 1355 |
+
return self._metavar_format_recurse(obj).replace(', ', ',')
|
| 1356 |
+
|
| 1357 |
+
def _help_format(
|
| 1358 |
+
self, field_name: str, field_info: FieldInfo, model_default: Any, is_model_suppressed: bool
|
| 1359 |
+
) -> str:
|
| 1360 |
+
_help = field_info.description if field_info.description else ''
|
| 1361 |
+
if is_model_suppressed or self._is_field_suppressed(field_info):
|
| 1362 |
+
return CLI_SUPPRESS
|
| 1363 |
+
|
| 1364 |
+
if field_info.is_required() and model_default in (PydanticUndefined, None):
|
| 1365 |
+
if _CliPositionalArg not in field_info.metadata:
|
| 1366 |
+
ifdef = 'ifdef: ' if model_default is None else ''
|
| 1367 |
+
_help += f' ({ifdef}required)' if _help else f'({ifdef}required)'
|
| 1368 |
+
else:
|
| 1369 |
+
default = f'(default: {self.cli_parse_none_str})'
|
| 1370 |
+
if is_model_class(type(model_default)) or is_pydantic_dataclass(type(model_default)):
|
| 1371 |
+
default = f'(default: {getattr(model_default, field_name)})'
|
| 1372 |
+
elif model_default not in (PydanticUndefined, None) and _is_function(model_default):
|
| 1373 |
+
default = f'(default factory: {self._metavar_format(model_default)})'
|
| 1374 |
+
elif field_info.default not in (PydanticUndefined, None):
|
| 1375 |
+
enum_name = _annotation_enum_val_to_name(field_info.annotation, field_info.default)
|
| 1376 |
+
default = f'(default: {field_info.default if enum_name is None else enum_name})'
|
| 1377 |
+
elif field_info.default_factory is not None:
|
| 1378 |
+
default = f'(default factory: {self._metavar_format(field_info.default_factory)})'
|
| 1379 |
+
|
| 1380 |
+
if _CliToggleFlag not in field_info.metadata:
|
| 1381 |
+
_help += f' {default}' if _help else default
|
| 1382 |
+
return _help.replace('%', '%%') if issubclass(type(self._root_parser), ArgumentParser) else _help
|
| 1383 |
+
|
| 1384 |
+
def _is_field_suppressed(self, field_info: FieldInfo) -> bool:
|
| 1385 |
+
_help = field_info.description if field_info.description else ''
|
| 1386 |
+
return _help == CLI_SUPPRESS or CLI_SUPPRESS in field_info.metadata
|
| 1387 |
+
|
| 1388 |
+
def _update_alias_path_only_default(
|
| 1389 |
+
self, arg_name: str, value: Any, field_info: FieldInfo, alias_path_only_defaults: dict[str, Any]
|
| 1390 |
+
) -> list[Any] | dict[str, Any]:
|
| 1391 |
+
alias_path: AliasPath = [
|
| 1392 |
+
alias if isinstance(alias, AliasPath) else cast(AliasPath, alias.choices[0])
|
| 1393 |
+
for alias in (field_info.alias, field_info.validation_alias)
|
| 1394 |
+
if isinstance(alias, (AliasPath, AliasChoices))
|
| 1395 |
+
][0]
|
| 1396 |
+
|
| 1397 |
+
alias_nested_paths: list[str] = alias_path.path[1:-1] # type: ignore
|
| 1398 |
+
if not alias_nested_paths:
|
| 1399 |
+
alias_path_only_defaults.setdefault(arg_name, [])
|
| 1400 |
+
alias_default = alias_path_only_defaults[arg_name]
|
| 1401 |
+
else:
|
| 1402 |
+
alias_path_only_defaults.setdefault(arg_name, {})
|
| 1403 |
+
current_path = alias_path_only_defaults[arg_name]
|
| 1404 |
+
|
| 1405 |
+
for nested_path in alias_nested_paths[:-1]:
|
| 1406 |
+
current_path.setdefault(nested_path, {})
|
| 1407 |
+
current_path = current_path[nested_path]
|
| 1408 |
+
current_path.setdefault(alias_nested_paths[-1], [])
|
| 1409 |
+
alias_default = current_path[alias_nested_paths[-1]]
|
| 1410 |
+
|
| 1411 |
+
alias_path_index = cast(int, alias_path.path[-1])
|
| 1412 |
+
alias_default.extend([''] * max(alias_path_index + 1 - len(alias_default), 0))
|
| 1413 |
+
alias_default[alias_path_index] = value
|
| 1414 |
+
return alias_path_only_defaults[arg_name]
|
| 1415 |
+
|
| 1416 |
+
def _coerce_value_styles(
|
| 1417 |
+
self,
|
| 1418 |
+
model_default: Any,
|
| 1419 |
+
value: str | list[Any] | dict[str, Any],
|
| 1420 |
+
list_style: Literal['json', 'argparse', 'lazy'] = 'json',
|
| 1421 |
+
dict_style: Literal['json', 'env'] = 'json',
|
| 1422 |
+
) -> list[str | list[Any] | dict[str, Any]]:
|
| 1423 |
+
values = [value]
|
| 1424 |
+
if isinstance(value, str):
|
| 1425 |
+
if isinstance(model_default, list):
|
| 1426 |
+
if list_style == 'lazy':
|
| 1427 |
+
values = [','.join(f'{v}' for v in json.loads(value))]
|
| 1428 |
+
elif list_style == 'argparse':
|
| 1429 |
+
values = [f'{v}' for v in json.loads(value)]
|
| 1430 |
+
elif isinstance(model_default, dict):
|
| 1431 |
+
if dict_style == 'env':
|
| 1432 |
+
values = [f'{k}={v}' for k, v in json.loads(value).items()]
|
| 1433 |
+
return values
|
| 1434 |
+
|
| 1435 |
+
@staticmethod
|
| 1436 |
+
def _flatten_serialized_args(
|
| 1437 |
+
serialized_args: dict[str, list[str]],
|
| 1438 |
+
positionals_first: bool,
|
| 1439 |
+
) -> list[str]:
|
| 1440 |
+
return (
|
| 1441 |
+
serialized_args['optional'] + serialized_args['positional']
|
| 1442 |
+
if not positionals_first
|
| 1443 |
+
else serialized_args['positional'] + serialized_args['optional']
|
| 1444 |
+
) + serialized_args['subcommand']
|
| 1445 |
+
|
| 1446 |
+
def _serialized_args(
|
| 1447 |
+
self,
|
| 1448 |
+
model: PydanticModel,
|
| 1449 |
+
list_style: Literal['json', 'argparse', 'lazy'] = 'json',
|
| 1450 |
+
dict_style: Literal['json', 'env'] = 'json',
|
| 1451 |
+
positionals_first: bool = False,
|
| 1452 |
+
_is_submodel: bool = False,
|
| 1453 |
+
) -> dict[str, list[str]]:
|
| 1454 |
+
alias_path_only_defaults: dict[str, Any] = {}
|
| 1455 |
+
optional_args: list[str | list[Any] | dict[str, Any]] = []
|
| 1456 |
+
positional_args: list[str | list[Any] | dict[str, Any]] = []
|
| 1457 |
+
subcommand_args: list[str] = []
|
| 1458 |
+
for field_name, field_info in _get_model_fields(type(model) if _is_submodel else self.settings_cls).items():
|
| 1459 |
+
model_default = getattr(model, field_name)
|
| 1460 |
+
if field_info.default == model_default:
|
| 1461 |
+
continue
|
| 1462 |
+
if _CliSubCommand in field_info.metadata and model_default is None:
|
| 1463 |
+
continue
|
| 1464 |
+
arg = next(iter(self._parser_map[field_info].values()))
|
| 1465 |
+
if arg.subcommand_dest:
|
| 1466 |
+
subcommand_args.append(arg.subcommand_alias(type(model_default)))
|
| 1467 |
+
sub_args = self._serialized_args(
|
| 1468 |
+
model_default,
|
| 1469 |
+
list_style=list_style,
|
| 1470 |
+
dict_style=dict_style,
|
| 1471 |
+
positionals_first=positionals_first,
|
| 1472 |
+
_is_submodel=True,
|
| 1473 |
+
)
|
| 1474 |
+
subcommand_args += self._flatten_serialized_args(sub_args, positionals_first)
|
| 1475 |
+
continue
|
| 1476 |
+
if is_model_class(type(model_default)) or is_pydantic_dataclass(type(model_default)):
|
| 1477 |
+
sub_args = self._serialized_args(
|
| 1478 |
+
model_default,
|
| 1479 |
+
list_style=list_style,
|
| 1480 |
+
dict_style=dict_style,
|
| 1481 |
+
positionals_first=positionals_first,
|
| 1482 |
+
_is_submodel=True,
|
| 1483 |
+
)
|
| 1484 |
+
optional_args += sub_args['optional']
|
| 1485 |
+
positional_args += sub_args['positional']
|
| 1486 |
+
subcommand_args += sub_args['subcommand']
|
| 1487 |
+
continue
|
| 1488 |
+
|
| 1489 |
+
matched = re.match(r'(-*)(.+)', arg.preferred_arg_name)
|
| 1490 |
+
flag_chars, arg_name = matched.groups() if matched else ('', '')
|
| 1491 |
+
value: str | list[Any] | dict[str, Any] = (
|
| 1492 |
+
json.dumps(model_default) if isinstance(model_default, (dict, list, set)) else str(model_default)
|
| 1493 |
+
)
|
| 1494 |
+
|
| 1495 |
+
if arg.is_alias_path_only:
|
| 1496 |
+
# For alias path only, we wont know the complete value until we've finished parsing the entire class. In
|
| 1497 |
+
# this case, insert value as a non-string reference pointing to the relevant alias_path_only_defaults
|
| 1498 |
+
# entry and convert into completed string value later.
|
| 1499 |
+
value = self._update_alias_path_only_default(arg_name, value, field_info, alias_path_only_defaults)
|
| 1500 |
+
|
| 1501 |
+
if _CliPositionalArg in field_info.metadata:
|
| 1502 |
+
for value in model_default if isinstance(model_default, list) else [model_default]:
|
| 1503 |
+
value = json.dumps(value) if isinstance(value, (dict, list, set)) else str(value)
|
| 1504 |
+
positional_args.append(value)
|
| 1505 |
+
continue
|
| 1506 |
+
|
| 1507 |
+
# Note: prepend 'no-' for boolean optional action flag if model_default value is False and flag is not a short option
|
| 1508 |
+
if arg.kwargs.get('action') == BooleanOptionalAction and model_default is False and flag_chars == '--':
|
| 1509 |
+
flag_chars += 'no-'
|
| 1510 |
+
|
| 1511 |
+
for value in self._coerce_value_styles(model_default, value, list_style=list_style, dict_style=dict_style):
|
| 1512 |
+
optional_args.append(f'{flag_chars}{arg_name}')
|
| 1513 |
+
|
| 1514 |
+
# If implicit bool flag, do not add a value
|
| 1515 |
+
if arg.kwargs.get('action') not in (BooleanOptionalAction, 'store_true', 'store_false'):
|
| 1516 |
+
optional_args.append(value)
|
| 1517 |
+
|
| 1518 |
+
return {
|
| 1519 |
+
'optional': [json.dumps(value) if not isinstance(value, str) else value for value in optional_args],
|
| 1520 |
+
'positional': [json.dumps(value) if not isinstance(value, str) else value for value in positional_args],
|
| 1521 |
+
'subcommand': subcommand_args,
|
| 1522 |
+
}
|
source/pydantic_settings/sources/providers/dotenv.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Dotenv file settings source."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations as _annotations
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import warnings
|
| 7 |
+
from collections.abc import Mapping
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from typing import TYPE_CHECKING, Any
|
| 10 |
+
|
| 11 |
+
from dotenv import dotenv_values
|
| 12 |
+
from pydantic._internal._typing_extra import ( # type: ignore[attr-defined]
|
| 13 |
+
get_origin,
|
| 14 |
+
)
|
| 15 |
+
from typing_inspection.introspection import is_union_origin
|
| 16 |
+
|
| 17 |
+
from ..types import ENV_FILE_SENTINEL, DotenvType, EnvPrefixTarget
|
| 18 |
+
from ..utils import (
|
| 19 |
+
_annotation_is_complex,
|
| 20 |
+
_union_is_complex,
|
| 21 |
+
parse_env_vars,
|
| 22 |
+
)
|
| 23 |
+
from .env import EnvSettingsSource
|
| 24 |
+
|
| 25 |
+
if TYPE_CHECKING:
|
| 26 |
+
from pydantic_settings.main import BaseSettings
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class DotEnvSettingsSource(EnvSettingsSource):
|
| 30 |
+
"""
|
| 31 |
+
Source class for loading settings values from env files.
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
def __init__(
|
| 35 |
+
self,
|
| 36 |
+
settings_cls: type[BaseSettings],
|
| 37 |
+
env_file: DotenvType | None = ENV_FILE_SENTINEL,
|
| 38 |
+
env_file_encoding: str | None = None,
|
| 39 |
+
case_sensitive: bool | None = None,
|
| 40 |
+
env_prefix: str | None = None,
|
| 41 |
+
env_prefix_target: EnvPrefixTarget | None = None,
|
| 42 |
+
env_nested_delimiter: str | None = None,
|
| 43 |
+
env_nested_max_split: int | None = None,
|
| 44 |
+
env_ignore_empty: bool | None = None,
|
| 45 |
+
env_parse_none_str: str | None = None,
|
| 46 |
+
env_parse_enums: bool | None = None,
|
| 47 |
+
) -> None:
|
| 48 |
+
self.env_file = env_file if env_file != ENV_FILE_SENTINEL else settings_cls.model_config.get('env_file')
|
| 49 |
+
self.env_file_encoding = (
|
| 50 |
+
env_file_encoding if env_file_encoding is not None else settings_cls.model_config.get('env_file_encoding')
|
| 51 |
+
)
|
| 52 |
+
super().__init__(
|
| 53 |
+
settings_cls,
|
| 54 |
+
case_sensitive,
|
| 55 |
+
env_prefix,
|
| 56 |
+
env_prefix_target,
|
| 57 |
+
env_nested_delimiter,
|
| 58 |
+
env_nested_max_split,
|
| 59 |
+
env_ignore_empty,
|
| 60 |
+
env_parse_none_str,
|
| 61 |
+
env_parse_enums,
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
def _load_env_vars(self) -> Mapping[str, str | None]:
|
| 65 |
+
return self._read_env_files()
|
| 66 |
+
|
| 67 |
+
@staticmethod
|
| 68 |
+
def _static_read_env_file(
|
| 69 |
+
file_path: Path,
|
| 70 |
+
*,
|
| 71 |
+
encoding: str | None = None,
|
| 72 |
+
case_sensitive: bool = False,
|
| 73 |
+
ignore_empty: bool = False,
|
| 74 |
+
parse_none_str: str | None = None,
|
| 75 |
+
) -> Mapping[str, str | None]:
|
| 76 |
+
file_vars: dict[str, str | None] = dotenv_values(file_path, encoding=encoding or 'utf8')
|
| 77 |
+
return parse_env_vars(file_vars, case_sensitive, ignore_empty, parse_none_str)
|
| 78 |
+
|
| 79 |
+
def _read_env_file(
|
| 80 |
+
self,
|
| 81 |
+
file_path: Path,
|
| 82 |
+
) -> Mapping[str, str | None]:
|
| 83 |
+
return self._static_read_env_file(
|
| 84 |
+
file_path,
|
| 85 |
+
encoding=self.env_file_encoding,
|
| 86 |
+
case_sensitive=self.case_sensitive,
|
| 87 |
+
ignore_empty=self.env_ignore_empty,
|
| 88 |
+
parse_none_str=self.env_parse_none_str,
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
def _read_env_files(self) -> Mapping[str, str | None]:
|
| 92 |
+
env_files = self.env_file
|
| 93 |
+
if env_files is None:
|
| 94 |
+
return {}
|
| 95 |
+
|
| 96 |
+
if isinstance(env_files, (str, os.PathLike)):
|
| 97 |
+
env_files = [env_files]
|
| 98 |
+
|
| 99 |
+
dotenv_vars: dict[str, str | None] = {}
|
| 100 |
+
for env_file in env_files:
|
| 101 |
+
env_path = Path(env_file).expanduser()
|
| 102 |
+
if env_path.is_file():
|
| 103 |
+
dotenv_vars.update(self._read_env_file(env_path))
|
| 104 |
+
|
| 105 |
+
return dotenv_vars
|
| 106 |
+
|
| 107 |
+
def __call__(self) -> dict[str, Any]:
|
| 108 |
+
data: dict[str, Any] = super().__call__()
|
| 109 |
+
is_extra_allowed = self.config.get('extra') != 'forbid'
|
| 110 |
+
|
| 111 |
+
# As `extra` config is allowed in dotenv settings source, We have to
|
| 112 |
+
# update data with extra env variables from dotenv file.
|
| 113 |
+
for env_name, env_value in self.env_vars.items():
|
| 114 |
+
if not env_value or env_name in data or (self.env_prefix and env_name in self.settings_cls.model_fields):
|
| 115 |
+
continue
|
| 116 |
+
env_used = False
|
| 117 |
+
for field_name, field in self.settings_cls.model_fields.items():
|
| 118 |
+
for _, field_env_name, _ in self._extract_field_info(field, field_name):
|
| 119 |
+
if env_name == field_env_name or (
|
| 120 |
+
(
|
| 121 |
+
_annotation_is_complex(field.annotation, field.metadata)
|
| 122 |
+
or (
|
| 123 |
+
is_union_origin(get_origin(field.annotation))
|
| 124 |
+
and _union_is_complex(field.annotation, field.metadata)
|
| 125 |
+
)
|
| 126 |
+
)
|
| 127 |
+
and env_name.startswith(field_env_name)
|
| 128 |
+
):
|
| 129 |
+
env_used = True
|
| 130 |
+
break
|
| 131 |
+
if env_used:
|
| 132 |
+
break
|
| 133 |
+
if not env_used:
|
| 134 |
+
if is_extra_allowed and env_name.startswith(self.env_prefix):
|
| 135 |
+
# env_prefix should be respected and removed from the env_name
|
| 136 |
+
normalized_env_name = env_name[len(self.env_prefix) :]
|
| 137 |
+
data[normalized_env_name] = env_value
|
| 138 |
+
else:
|
| 139 |
+
data[env_name] = env_value
|
| 140 |
+
return data
|
| 141 |
+
|
| 142 |
+
def __repr__(self) -> str:
|
| 143 |
+
return (
|
| 144 |
+
f'{self.__class__.__name__}(env_file={self.env_file!r}, env_file_encoding={self.env_file_encoding!r}, '
|
| 145 |
+
f'env_nested_delimiter={self.env_nested_delimiter!r}, env_prefix_len={self.env_prefix_len!r})'
|
| 146 |
+
)
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def read_env_file(
|
| 150 |
+
file_path: Path,
|
| 151 |
+
*,
|
| 152 |
+
encoding: str | None = None,
|
| 153 |
+
case_sensitive: bool = False,
|
| 154 |
+
ignore_empty: bool = False,
|
| 155 |
+
parse_none_str: str | None = None,
|
| 156 |
+
) -> Mapping[str, str | None]:
|
| 157 |
+
warnings.warn(
|
| 158 |
+
'read_env_file will be removed in the next version, use DotEnvSettingsSource._static_read_env_file if you must',
|
| 159 |
+
DeprecationWarning,
|
| 160 |
+
)
|
| 161 |
+
return DotEnvSettingsSource._static_read_env_file(
|
| 162 |
+
file_path,
|
| 163 |
+
encoding=encoding,
|
| 164 |
+
case_sensitive=case_sensitive,
|
| 165 |
+
ignore_empty=ignore_empty,
|
| 166 |
+
parse_none_str=parse_none_str,
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
__all__ = ['DotEnvSettingsSource', 'read_env_file']
|
source/pydantic_settings/sources/providers/env.py
ADDED
|
@@ -0,0 +1,310 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations as _annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
from collections.abc import Mapping
|
| 5 |
+
from typing import (
|
| 6 |
+
TYPE_CHECKING,
|
| 7 |
+
Any,
|
| 8 |
+
get_args,
|
| 9 |
+
get_origin,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
from pydantic import Json, TypeAdapter, ValidationError
|
| 13 |
+
from pydantic._internal._utils import deep_update, is_model_class
|
| 14 |
+
from pydantic.dataclasses import is_pydantic_dataclass
|
| 15 |
+
from pydantic.fields import FieldInfo
|
| 16 |
+
from typing_inspection.introspection import is_union_origin
|
| 17 |
+
|
| 18 |
+
from ...utils import _lenient_issubclass
|
| 19 |
+
from ..base import PydanticBaseEnvSettingsSource
|
| 20 |
+
from ..types import EnvNoneType, EnvPrefixTarget
|
| 21 |
+
from ..utils import (
|
| 22 |
+
_annotation_contains_types,
|
| 23 |
+
_annotation_enum_name_to_val,
|
| 24 |
+
_annotation_is_complex,
|
| 25 |
+
_get_model_fields,
|
| 26 |
+
_union_is_complex,
|
| 27 |
+
parse_env_vars,
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
if TYPE_CHECKING:
|
| 31 |
+
from pydantic_settings.main import BaseSettings
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class EnvSettingsSource(PydanticBaseEnvSettingsSource):
|
| 35 |
+
"""
|
| 36 |
+
Source class for loading settings values from environment variables.
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
def __init__(
|
| 40 |
+
self,
|
| 41 |
+
settings_cls: type[BaseSettings],
|
| 42 |
+
case_sensitive: bool | None = None,
|
| 43 |
+
env_prefix: str | None = None,
|
| 44 |
+
env_prefix_target: EnvPrefixTarget | None = None,
|
| 45 |
+
env_nested_delimiter: str | None = None,
|
| 46 |
+
env_nested_max_split: int | None = None,
|
| 47 |
+
env_ignore_empty: bool | None = None,
|
| 48 |
+
env_parse_none_str: str | None = None,
|
| 49 |
+
env_parse_enums: bool | None = None,
|
| 50 |
+
) -> None:
|
| 51 |
+
super().__init__(
|
| 52 |
+
settings_cls,
|
| 53 |
+
case_sensitive,
|
| 54 |
+
env_prefix,
|
| 55 |
+
env_prefix_target,
|
| 56 |
+
env_ignore_empty,
|
| 57 |
+
env_parse_none_str,
|
| 58 |
+
env_parse_enums,
|
| 59 |
+
)
|
| 60 |
+
self.env_nested_delimiter = (
|
| 61 |
+
env_nested_delimiter if env_nested_delimiter is not None else self.config.get('env_nested_delimiter')
|
| 62 |
+
)
|
| 63 |
+
self.env_nested_max_split = (
|
| 64 |
+
env_nested_max_split if env_nested_max_split is not None else self.config.get('env_nested_max_split')
|
| 65 |
+
)
|
| 66 |
+
self.maxsplit = (self.env_nested_max_split or 0) - 1
|
| 67 |
+
self.env_prefix_len = len(self.env_prefix)
|
| 68 |
+
|
| 69 |
+
self.env_vars = self._load_env_vars()
|
| 70 |
+
|
| 71 |
+
def _load_env_vars(self) -> Mapping[str, str | None]:
|
| 72 |
+
return parse_env_vars(os.environ, self.case_sensitive, self.env_ignore_empty, self.env_parse_none_str)
|
| 73 |
+
|
| 74 |
+
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
| 75 |
+
"""
|
| 76 |
+
Gets the value for field from environment variables and a flag to determine whether value is complex.
|
| 77 |
+
|
| 78 |
+
Args:
|
| 79 |
+
field: The field.
|
| 80 |
+
field_name: The field name.
|
| 81 |
+
|
| 82 |
+
Returns:
|
| 83 |
+
A tuple that contains the value (`None` if not found), key, and
|
| 84 |
+
a flag to determine whether value is complex.
|
| 85 |
+
"""
|
| 86 |
+
|
| 87 |
+
env_val: str | None = None
|
| 88 |
+
for field_key, env_name, value_is_complex in self._extract_field_info(field, field_name):
|
| 89 |
+
env_val = self.env_vars.get(env_name)
|
| 90 |
+
if env_val is not None:
|
| 91 |
+
break
|
| 92 |
+
|
| 93 |
+
return env_val, field_key, value_is_complex
|
| 94 |
+
|
| 95 |
+
def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any:
|
| 96 |
+
"""
|
| 97 |
+
Prepare value for the field.
|
| 98 |
+
|
| 99 |
+
* Extract value for nested field.
|
| 100 |
+
* Deserialize value to python object for complex field.
|
| 101 |
+
|
| 102 |
+
Args:
|
| 103 |
+
field: The field.
|
| 104 |
+
field_name: The field name.
|
| 105 |
+
|
| 106 |
+
Returns:
|
| 107 |
+
A tuple contains prepared value for the field.
|
| 108 |
+
|
| 109 |
+
Raises:
|
| 110 |
+
ValuesError: When There is an error in deserializing value for complex field.
|
| 111 |
+
"""
|
| 112 |
+
is_complex, allow_parse_failure = self._field_is_complex(field)
|
| 113 |
+
if self.env_parse_enums:
|
| 114 |
+
enum_val = _annotation_enum_name_to_val(field.annotation, value)
|
| 115 |
+
value = value if enum_val is None else enum_val
|
| 116 |
+
|
| 117 |
+
if is_complex or value_is_complex:
|
| 118 |
+
if isinstance(value, EnvNoneType):
|
| 119 |
+
return value
|
| 120 |
+
elif value is None:
|
| 121 |
+
# field is complex but no value found so far, try explode_env_vars
|
| 122 |
+
env_val_built = self.explode_env_vars(field_name, field, self.env_vars)
|
| 123 |
+
if env_val_built:
|
| 124 |
+
return env_val_built
|
| 125 |
+
else:
|
| 126 |
+
# field is complex and there's a value, decode that as JSON, then add explode_env_vars
|
| 127 |
+
try:
|
| 128 |
+
value = self.decode_complex_value(field_name, field, value)
|
| 129 |
+
except ValueError as e:
|
| 130 |
+
if not allow_parse_failure:
|
| 131 |
+
raise e
|
| 132 |
+
|
| 133 |
+
if isinstance(value, dict):
|
| 134 |
+
return deep_update(value, self.explode_env_vars(field_name, field, self.env_vars))
|
| 135 |
+
else:
|
| 136 |
+
return value
|
| 137 |
+
elif value is not None:
|
| 138 |
+
# simplest case, field is not complex, we only need to add the value if it was found
|
| 139 |
+
return self._coerce_env_val_strict(field, value)
|
| 140 |
+
|
| 141 |
+
def _field_is_complex(self, field: FieldInfo) -> tuple[bool, bool]:
|
| 142 |
+
"""
|
| 143 |
+
Find out if a field is complex, and if so whether JSON errors should be ignored
|
| 144 |
+
"""
|
| 145 |
+
if self.field_is_complex(field):
|
| 146 |
+
allow_parse_failure = False
|
| 147 |
+
elif is_union_origin(get_origin(field.annotation)) and _union_is_complex(field.annotation, field.metadata):
|
| 148 |
+
allow_parse_failure = True
|
| 149 |
+
else:
|
| 150 |
+
return False, False
|
| 151 |
+
|
| 152 |
+
return True, allow_parse_failure
|
| 153 |
+
|
| 154 |
+
# Default value of `case_sensitive` is `None`, because we don't want to break existing behavior.
|
| 155 |
+
# We have to change the method to a non-static method and use
|
| 156 |
+
# `self.case_sensitive` instead in V3.
|
| 157 |
+
def next_field(
|
| 158 |
+
self, field: FieldInfo | Any | None, key: str, case_sensitive: bool | None = None
|
| 159 |
+
) -> FieldInfo | None:
|
| 160 |
+
"""
|
| 161 |
+
Find the field in a sub model by key(env name)
|
| 162 |
+
|
| 163 |
+
By having the following models:
|
| 164 |
+
|
| 165 |
+
```py
|
| 166 |
+
class SubSubModel(BaseSettings):
|
| 167 |
+
dvals: Dict
|
| 168 |
+
|
| 169 |
+
class SubModel(BaseSettings):
|
| 170 |
+
vals: list[str]
|
| 171 |
+
sub_sub_model: SubSubModel
|
| 172 |
+
|
| 173 |
+
class Cfg(BaseSettings):
|
| 174 |
+
sub_model: SubModel
|
| 175 |
+
```
|
| 176 |
+
|
| 177 |
+
Then:
|
| 178 |
+
next_field(sub_model, 'vals') Returns the `vals` field of `SubModel` class
|
| 179 |
+
next_field(sub_model, 'sub_sub_model') Returns `sub_sub_model` field of `SubModel` class
|
| 180 |
+
|
| 181 |
+
Args:
|
| 182 |
+
field: The field.
|
| 183 |
+
key: The key (env name).
|
| 184 |
+
case_sensitive: Whether to search for key case sensitively.
|
| 185 |
+
|
| 186 |
+
Returns:
|
| 187 |
+
Field if it finds the next field otherwise `None`.
|
| 188 |
+
"""
|
| 189 |
+
if not field:
|
| 190 |
+
return None
|
| 191 |
+
|
| 192 |
+
annotation = field.annotation if isinstance(field, FieldInfo) else field
|
| 193 |
+
for type_ in get_args(annotation):
|
| 194 |
+
type_has_key = self.next_field(type_, key, case_sensitive)
|
| 195 |
+
if type_has_key:
|
| 196 |
+
return type_has_key
|
| 197 |
+
if _lenient_issubclass(get_origin(annotation), dict):
|
| 198 |
+
# get value type if it's a dict
|
| 199 |
+
return get_args(annotation)[-1]
|
| 200 |
+
elif is_model_class(annotation) or is_pydantic_dataclass(annotation): # type: ignore[arg-type]
|
| 201 |
+
fields = _get_model_fields(annotation)
|
| 202 |
+
# `case_sensitive is None` is here to be compatible with the old behavior.
|
| 203 |
+
# Has to be removed in V3.
|
| 204 |
+
for field_name, f in fields.items():
|
| 205 |
+
for _, env_name, _ in self._extract_field_info(f, field_name):
|
| 206 |
+
if case_sensitive is None or case_sensitive:
|
| 207 |
+
if field_name == key or env_name == key:
|
| 208 |
+
return f
|
| 209 |
+
elif field_name.lower() == key.lower() or env_name.lower() == key.lower():
|
| 210 |
+
return f
|
| 211 |
+
return None
|
| 212 |
+
|
| 213 |
+
def explode_env_vars(self, field_name: str, field: FieldInfo, env_vars: Mapping[str, str | None]) -> dict[str, Any]: # noqa: C901
|
| 214 |
+
"""
|
| 215 |
+
Process env_vars and extract the values of keys containing env_nested_delimiter into nested dictionaries.
|
| 216 |
+
|
| 217 |
+
This is applied to a single field, hence filtering by env_var prefix.
|
| 218 |
+
|
| 219 |
+
Args:
|
| 220 |
+
field_name: The field name.
|
| 221 |
+
field: The field.
|
| 222 |
+
env_vars: Environment variables.
|
| 223 |
+
|
| 224 |
+
Returns:
|
| 225 |
+
A dictionary contains extracted values from nested env values.
|
| 226 |
+
"""
|
| 227 |
+
if not self.env_nested_delimiter:
|
| 228 |
+
return {}
|
| 229 |
+
|
| 230 |
+
ann = field.annotation
|
| 231 |
+
is_dict = ann is dict or _lenient_issubclass(get_origin(ann), dict)
|
| 232 |
+
|
| 233 |
+
prefixes = [
|
| 234 |
+
f'{env_name}{self.env_nested_delimiter}' for _, env_name, _ in self._extract_field_info(field, field_name)
|
| 235 |
+
]
|
| 236 |
+
result: dict[str, Any] = {}
|
| 237 |
+
for env_name, env_val in env_vars.items():
|
| 238 |
+
try:
|
| 239 |
+
prefix = next(prefix for prefix in prefixes if env_name.startswith(prefix))
|
| 240 |
+
except StopIteration:
|
| 241 |
+
continue
|
| 242 |
+
# we remove the prefix before splitting in case the prefix has characters in common with the delimiter
|
| 243 |
+
env_name_without_prefix = env_name[len(prefix) :]
|
| 244 |
+
*keys, last_key = env_name_without_prefix.split(self.env_nested_delimiter, self.maxsplit)
|
| 245 |
+
env_var = result
|
| 246 |
+
target_field: FieldInfo | None = field
|
| 247 |
+
for key in keys:
|
| 248 |
+
target_field = self.next_field(target_field, key, self.case_sensitive)
|
| 249 |
+
if isinstance(env_var, dict):
|
| 250 |
+
env_var = env_var.setdefault(key, {})
|
| 251 |
+
|
| 252 |
+
# get proper field with last_key
|
| 253 |
+
target_field = self.next_field(target_field, last_key, self.case_sensitive)
|
| 254 |
+
|
| 255 |
+
# check if env_val maps to a complex field and if so, parse the env_val
|
| 256 |
+
if (target_field or is_dict) and env_val:
|
| 257 |
+
if isinstance(target_field, FieldInfo):
|
| 258 |
+
is_complex, allow_json_failure = self._field_is_complex(target_field)
|
| 259 |
+
if self.env_parse_enums:
|
| 260 |
+
enum_val = _annotation_enum_name_to_val(target_field.annotation, env_val)
|
| 261 |
+
env_val = env_val if enum_val is None else enum_val
|
| 262 |
+
elif target_field:
|
| 263 |
+
# target_field is a raw type (e.g. from dict value type annotation)
|
| 264 |
+
is_complex = _annotation_is_complex(target_field, [])
|
| 265 |
+
allow_json_failure = True
|
| 266 |
+
else:
|
| 267 |
+
# nested field type is dict
|
| 268 |
+
is_complex, allow_json_failure = True, True
|
| 269 |
+
if is_complex:
|
| 270 |
+
try:
|
| 271 |
+
field_info = target_field if isinstance(target_field, FieldInfo) else None
|
| 272 |
+
env_val = self.decode_complex_value(last_key, field_info, env_val) # type: ignore
|
| 273 |
+
except ValueError as e:
|
| 274 |
+
if not allow_json_failure:
|
| 275 |
+
raise e
|
| 276 |
+
if isinstance(env_var, dict):
|
| 277 |
+
if last_key not in env_var or not isinstance(env_val, EnvNoneType) or env_var[last_key] == {}:
|
| 278 |
+
env_var[last_key] = self._coerce_env_val_strict(target_field, env_val)
|
| 279 |
+
return result
|
| 280 |
+
|
| 281 |
+
def _coerce_env_val_strict(self, field: FieldInfo | None, value: Any) -> Any:
|
| 282 |
+
"""
|
| 283 |
+
Coerce environment string values based on field annotation if model config is `strict=True`.
|
| 284 |
+
|
| 285 |
+
Args:
|
| 286 |
+
field: The field.
|
| 287 |
+
value: The value to coerce.
|
| 288 |
+
|
| 289 |
+
Returns:
|
| 290 |
+
The coerced value if successful, otherwise the original value.
|
| 291 |
+
"""
|
| 292 |
+
try:
|
| 293 |
+
if self.config.get('strict') and isinstance(value, str) and field is not None:
|
| 294 |
+
if value == self.env_parse_none_str:
|
| 295 |
+
return value
|
| 296 |
+
if not _annotation_contains_types(field.annotation, (Json,), is_instance=True):
|
| 297 |
+
return TypeAdapter(field.annotation).validate_python(value)
|
| 298 |
+
except ValidationError:
|
| 299 |
+
# Allow validation error to be raised at time of instatiation
|
| 300 |
+
pass
|
| 301 |
+
return value
|
| 302 |
+
|
| 303 |
+
def __repr__(self) -> str:
|
| 304 |
+
return (
|
| 305 |
+
f'{self.__class__.__name__}(env_nested_delimiter={self.env_nested_delimiter!r}, '
|
| 306 |
+
f'env_prefix_len={self.env_prefix_len!r})'
|
| 307 |
+
)
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
__all__ = ['EnvSettingsSource']
|
source/pydantic_settings/sources/providers/gcp.py
ADDED
|
@@ -0,0 +1,241 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations as _annotations
|
| 2 |
+
|
| 3 |
+
import warnings
|
| 4 |
+
from collections.abc import Iterator, Mapping
|
| 5 |
+
from functools import cached_property
|
| 6 |
+
from typing import TYPE_CHECKING, Any
|
| 7 |
+
|
| 8 |
+
from pydantic.fields import FieldInfo
|
| 9 |
+
|
| 10 |
+
from ..types import SecretVersion
|
| 11 |
+
from .env import EnvSettingsSource
|
| 12 |
+
|
| 13 |
+
if TYPE_CHECKING:
|
| 14 |
+
from google.auth import default as google_auth_default
|
| 15 |
+
from google.auth.credentials import Credentials
|
| 16 |
+
from google.cloud.secretmanager import SecretManagerServiceClient
|
| 17 |
+
|
| 18 |
+
from pydantic_settings.main import BaseSettings
|
| 19 |
+
else:
|
| 20 |
+
Credentials = None
|
| 21 |
+
SecretManagerServiceClient = None
|
| 22 |
+
google_auth_default = None
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def import_gcp_secret_manager() -> None:
|
| 26 |
+
global Credentials
|
| 27 |
+
global SecretManagerServiceClient
|
| 28 |
+
global google_auth_default
|
| 29 |
+
|
| 30 |
+
try:
|
| 31 |
+
from google.auth import default as google_auth_default
|
| 32 |
+
from google.auth.credentials import Credentials
|
| 33 |
+
|
| 34 |
+
with warnings.catch_warnings():
|
| 35 |
+
warnings.filterwarnings('ignore', category=FutureWarning)
|
| 36 |
+
from google.cloud.secretmanager import SecretManagerServiceClient
|
| 37 |
+
except ImportError as e: # pragma: no cover
|
| 38 |
+
raise ImportError(
|
| 39 |
+
'GCP Secret Manager dependencies are not installed, run `pip install pydantic-settings[gcp-secret-manager]`'
|
| 40 |
+
) from e
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class GoogleSecretManagerMapping(Mapping[str, str | None]):
|
| 44 |
+
_loaded_secrets: dict[str, str | None]
|
| 45 |
+
_secret_client: SecretManagerServiceClient
|
| 46 |
+
|
| 47 |
+
def __init__(self, secret_client: SecretManagerServiceClient, project_id: str, case_sensitive: bool) -> None:
|
| 48 |
+
self._loaded_secrets = {}
|
| 49 |
+
self._secret_client = secret_client
|
| 50 |
+
self._project_id = project_id
|
| 51 |
+
self._case_sensitive = case_sensitive
|
| 52 |
+
|
| 53 |
+
@property
|
| 54 |
+
def _gcp_project_path(self) -> str:
|
| 55 |
+
return self._secret_client.common_project_path(self._project_id)
|
| 56 |
+
|
| 57 |
+
def _select_case_insensitive_secret(self, lower_name: str, candidates: list[str]) -> str:
|
| 58 |
+
if len(candidates) == 1:
|
| 59 |
+
return candidates[0]
|
| 60 |
+
|
| 61 |
+
# Sort to ensure deterministic selection (prefer lowercase / ASCII last)
|
| 62 |
+
candidates.sort()
|
| 63 |
+
winner = candidates[-1]
|
| 64 |
+
warnings.warn(
|
| 65 |
+
f"Secret collision: Found multiple secrets {candidates} normalizing to '{lower_name}'. "
|
| 66 |
+
f"Using '{winner}' for case-insensitive lookup.",
|
| 67 |
+
UserWarning,
|
| 68 |
+
stacklevel=2,
|
| 69 |
+
)
|
| 70 |
+
return winner
|
| 71 |
+
|
| 72 |
+
@cached_property
|
| 73 |
+
def _secret_name_map(self) -> dict[str, str]:
|
| 74 |
+
mapping: dict[str, str] = {}
|
| 75 |
+
# Group secrets by normalized name to detect collisions
|
| 76 |
+
normalized_groups: dict[str, list[str]] = {}
|
| 77 |
+
|
| 78 |
+
secrets = self._secret_client.list_secrets(parent=self._gcp_project_path)
|
| 79 |
+
for secret in secrets:
|
| 80 |
+
name = self._secret_client.parse_secret_path(secret.name).get('secret', '')
|
| 81 |
+
mapping[name] = name
|
| 82 |
+
|
| 83 |
+
if not self._case_sensitive:
|
| 84 |
+
lower_name = name.lower()
|
| 85 |
+
if lower_name not in normalized_groups:
|
| 86 |
+
normalized_groups[lower_name] = []
|
| 87 |
+
normalized_groups[lower_name].append(name)
|
| 88 |
+
|
| 89 |
+
if not self._case_sensitive:
|
| 90 |
+
for lower_name, candidates in normalized_groups.items():
|
| 91 |
+
mapping[lower_name] = self._select_case_insensitive_secret(lower_name, candidates)
|
| 92 |
+
|
| 93 |
+
return mapping
|
| 94 |
+
|
| 95 |
+
@property
|
| 96 |
+
def _secret_names(self) -> list[str]:
|
| 97 |
+
return list(self._secret_name_map.keys())
|
| 98 |
+
|
| 99 |
+
def _secret_version_path(self, key: str, version: str = 'latest') -> str:
|
| 100 |
+
return self._secret_client.secret_version_path(self._project_id, key, version)
|
| 101 |
+
|
| 102 |
+
def _get_secret_value(self, gcp_secret_name: str, version: str = 'latest') -> str | None:
|
| 103 |
+
try:
|
| 104 |
+
return self._secret_client.access_secret_version(
|
| 105 |
+
name=self._secret_version_path(gcp_secret_name, version)
|
| 106 |
+
).payload.data.decode('UTF-8')
|
| 107 |
+
except Exception:
|
| 108 |
+
return None
|
| 109 |
+
|
| 110 |
+
def __getitem__(self, key: str) -> str | None:
|
| 111 |
+
if key in self._loaded_secrets:
|
| 112 |
+
return self._loaded_secrets[key]
|
| 113 |
+
|
| 114 |
+
gcp_secret_name = self._secret_name_map.get(key)
|
| 115 |
+
if gcp_secret_name is None and not self._case_sensitive:
|
| 116 |
+
gcp_secret_name = self._secret_name_map.get(key.lower())
|
| 117 |
+
|
| 118 |
+
if gcp_secret_name:
|
| 119 |
+
self._loaded_secrets[key] = self._get_secret_value(gcp_secret_name)
|
| 120 |
+
else:
|
| 121 |
+
raise KeyError(key)
|
| 122 |
+
|
| 123 |
+
return self._loaded_secrets[key]
|
| 124 |
+
|
| 125 |
+
def __len__(self) -> int:
|
| 126 |
+
return len(self._secret_names)
|
| 127 |
+
|
| 128 |
+
def __iter__(self) -> Iterator[str]:
|
| 129 |
+
return iter(self._secret_names)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class GoogleSecretManagerSettingsSource(EnvSettingsSource):
|
| 133 |
+
_credentials: Credentials
|
| 134 |
+
_secret_client: SecretManagerServiceClient
|
| 135 |
+
_project_id: str
|
| 136 |
+
|
| 137 |
+
def __init__(
|
| 138 |
+
self,
|
| 139 |
+
settings_cls: type[BaseSettings],
|
| 140 |
+
credentials: Credentials | None = None,
|
| 141 |
+
project_id: str | None = None,
|
| 142 |
+
env_prefix: str | None = None,
|
| 143 |
+
env_parse_none_str: str | None = None,
|
| 144 |
+
env_parse_enums: bool | None = None,
|
| 145 |
+
secret_client: SecretManagerServiceClient | None = None,
|
| 146 |
+
case_sensitive: bool | None = True,
|
| 147 |
+
) -> None:
|
| 148 |
+
# Import Google Packages if they haven't already been imported
|
| 149 |
+
if SecretManagerServiceClient is None or Credentials is None or google_auth_default is None:
|
| 150 |
+
import_gcp_secret_manager()
|
| 151 |
+
|
| 152 |
+
# If credentials or project_id are not passed, then
|
| 153 |
+
# try to get them from the default function
|
| 154 |
+
if not credentials or not project_id:
|
| 155 |
+
_creds, _project_id = google_auth_default()
|
| 156 |
+
|
| 157 |
+
# Set the credentials and/or project id if they weren't specified
|
| 158 |
+
if credentials is None:
|
| 159 |
+
credentials = _creds
|
| 160 |
+
|
| 161 |
+
if project_id is None:
|
| 162 |
+
if isinstance(_project_id, str):
|
| 163 |
+
project_id = _project_id
|
| 164 |
+
else:
|
| 165 |
+
raise AttributeError(
|
| 166 |
+
'project_id is required to be specified either as an argument or from the google.auth.default. See https://google-auth.readthedocs.io/en/master/reference/google.auth.html#google.auth.default'
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
self._credentials: Credentials = credentials
|
| 170 |
+
self._project_id: str = project_id
|
| 171 |
+
|
| 172 |
+
if secret_client:
|
| 173 |
+
self._secret_client = secret_client
|
| 174 |
+
else:
|
| 175 |
+
self._secret_client = SecretManagerServiceClient(credentials=self._credentials)
|
| 176 |
+
|
| 177 |
+
super().__init__(
|
| 178 |
+
settings_cls,
|
| 179 |
+
case_sensitive=case_sensitive,
|
| 180 |
+
env_prefix=env_prefix,
|
| 181 |
+
env_ignore_empty=False,
|
| 182 |
+
env_parse_none_str=env_parse_none_str,
|
| 183 |
+
env_parse_enums=env_parse_enums,
|
| 184 |
+
)
|
| 185 |
+
|
| 186 |
+
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
| 187 |
+
"""Override get_field_value to get the secret value from GCP Secret Manager.
|
| 188 |
+
Look for a SecretVersion metadata field to specify a particular SecretVersion.
|
| 189 |
+
|
| 190 |
+
Args:
|
| 191 |
+
field: The field to get the value for
|
| 192 |
+
field_name: The declared name of the field
|
| 193 |
+
|
| 194 |
+
Returns:
|
| 195 |
+
A tuple of (value, key, value_is_complex), where `key` is the identifier used
|
| 196 |
+
to populate the model (either the field name or an alias, depending on
|
| 197 |
+
configuration).
|
| 198 |
+
"""
|
| 199 |
+
|
| 200 |
+
secret_version = next((m.version for m in field.metadata if isinstance(m, SecretVersion)), None)
|
| 201 |
+
|
| 202 |
+
# If a secret version is specified, try to get that specific version of the secret from
|
| 203 |
+
# GCP Secret Manager via the GoogleSecretManagerMapping. This allows different versions
|
| 204 |
+
# of the same secret name to be retrieved independently and cached in the GoogleSecretManagerMapping
|
| 205 |
+
if secret_version and isinstance(self.env_vars, GoogleSecretManagerMapping):
|
| 206 |
+
for field_key, env_name, value_is_complex in self._extract_field_info(field, field_name):
|
| 207 |
+
gcp_secret_name = self.env_vars._secret_name_map.get(env_name)
|
| 208 |
+
if gcp_secret_name is None and not self.case_sensitive:
|
| 209 |
+
gcp_secret_name = self.env_vars._secret_name_map.get(env_name.lower())
|
| 210 |
+
|
| 211 |
+
if gcp_secret_name:
|
| 212 |
+
env_val = self.env_vars._get_secret_value(gcp_secret_name, secret_version)
|
| 213 |
+
if env_val is not None:
|
| 214 |
+
# If populate_by_name is enabled, return field_name to allow multiple fields
|
| 215 |
+
# with the same alias but different versions to be distinguished
|
| 216 |
+
if self.settings_cls.model_config.get('populate_by_name'):
|
| 217 |
+
return env_val, field_name, value_is_complex
|
| 218 |
+
return env_val, field_key, value_is_complex
|
| 219 |
+
|
| 220 |
+
# If a secret version is specified but not found, we should not fall back to "latest" (default behavior)
|
| 221 |
+
# as that would be incorrect. We return None to indicate the value was not found.
|
| 222 |
+
return None, field_name, False
|
| 223 |
+
|
| 224 |
+
val, key, is_complex = super().get_field_value(field, field_name)
|
| 225 |
+
|
| 226 |
+
# If populate_by_name is enabled, we need to return the field_name as the key
|
| 227 |
+
# without this being enabled, you cannot load two secrets with the same name but different versions
|
| 228 |
+
if self.settings_cls.model_config.get('populate_by_name') and val is not None:
|
| 229 |
+
return val, field_name, is_complex
|
| 230 |
+
return val, key, is_complex
|
| 231 |
+
|
| 232 |
+
def _load_env_vars(self) -> Mapping[str, str | None]:
|
| 233 |
+
return GoogleSecretManagerMapping(
|
| 234 |
+
self._secret_client, project_id=self._project_id, case_sensitive=self.case_sensitive
|
| 235 |
+
)
|
| 236 |
+
|
| 237 |
+
def __repr__(self) -> str:
|
| 238 |
+
return f'{self.__class__.__name__}(project_id={self._project_id!r}, env_nested_delimiter={self.env_nested_delimiter!r})'
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
__all__ = ['GoogleSecretManagerSettingsSource', 'GoogleSecretManagerMapping']
|
source/pydantic_settings/sources/providers/json.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""JSON file settings source."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations as _annotations
|
| 4 |
+
|
| 5 |
+
import json
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
from typing import (
|
| 8 |
+
TYPE_CHECKING,
|
| 9 |
+
Any,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
from ..base import ConfigFileSourceMixin, InitSettingsSource
|
| 13 |
+
from ..types import DEFAULT_PATH, PathType
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING:
|
| 16 |
+
from pydantic_settings.main import BaseSettings
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class JsonConfigSettingsSource(InitSettingsSource, ConfigFileSourceMixin):
|
| 20 |
+
"""
|
| 21 |
+
A source class that loads variables from a JSON file
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
def __init__(
|
| 25 |
+
self,
|
| 26 |
+
settings_cls: type[BaseSettings],
|
| 27 |
+
json_file: PathType | None = DEFAULT_PATH,
|
| 28 |
+
json_file_encoding: str | None = None,
|
| 29 |
+
deep_merge: bool = False,
|
| 30 |
+
):
|
| 31 |
+
self.json_file_path = json_file if json_file != DEFAULT_PATH else settings_cls.model_config.get('json_file')
|
| 32 |
+
self.json_file_encoding = (
|
| 33 |
+
json_file_encoding
|
| 34 |
+
if json_file_encoding is not None
|
| 35 |
+
else settings_cls.model_config.get('json_file_encoding')
|
| 36 |
+
)
|
| 37 |
+
self.json_data = self._read_files(self.json_file_path, deep_merge=deep_merge)
|
| 38 |
+
super().__init__(settings_cls, self.json_data)
|
| 39 |
+
|
| 40 |
+
def _read_file(self, file_path: Path) -> dict[str, Any]:
|
| 41 |
+
with file_path.open(encoding=self.json_file_encoding) as json_file:
|
| 42 |
+
return json.load(json_file)
|
| 43 |
+
|
| 44 |
+
def __repr__(self) -> str:
|
| 45 |
+
return f'{self.__class__.__name__}(json_file={self.json_file_path})'
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
__all__ = ['JsonConfigSettingsSource']
|
source/pydantic_settings/sources/providers/nested_secrets.py
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import warnings
|
| 3 |
+
from functools import reduce
|
| 4 |
+
from glob import iglob
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from typing import TYPE_CHECKING, Any, Literal, Optional
|
| 7 |
+
|
| 8 |
+
from ...exceptions import SettingsError
|
| 9 |
+
from ...utils import path_type_label
|
| 10 |
+
from ..base import PydanticBaseSettingsSource
|
| 11 |
+
from ..utils import parse_env_vars
|
| 12 |
+
from .env import EnvSettingsSource
|
| 13 |
+
from .secrets import SecretsSettingsSource
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING:
|
| 16 |
+
from ...main import BaseSettings
|
| 17 |
+
from ...sources import PathType
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
SECRETS_DIR_MAX_SIZE = 16 * 2**20 # 16 MiB seems to be a reasonable default
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class NestedSecretsSettingsSource(EnvSettingsSource):
|
| 24 |
+
def __init__(
|
| 25 |
+
self,
|
| 26 |
+
file_secret_settings: PydanticBaseSettingsSource | SecretsSettingsSource,
|
| 27 |
+
secrets_dir: Optional['PathType'] = None,
|
| 28 |
+
secrets_dir_missing: Literal['ok', 'warn', 'error'] | None = None,
|
| 29 |
+
secrets_dir_max_size: int | None = None,
|
| 30 |
+
secrets_case_sensitive: bool | None = None,
|
| 31 |
+
secrets_prefix: str | None = None,
|
| 32 |
+
secrets_nested_delimiter: str | None = None,
|
| 33 |
+
secrets_nested_subdir: bool | None = None,
|
| 34 |
+
# args for compatibility with SecretsSettingsSource, don't use directly
|
| 35 |
+
case_sensitive: bool | None = None,
|
| 36 |
+
env_prefix: str | None = None,
|
| 37 |
+
) -> None:
|
| 38 |
+
# We allow the first argument to be settings_cls like original
|
| 39 |
+
# SecretsSettingsSource. However, it is recommended to pass
|
| 40 |
+
# SecretsSettingsSource instance instead (as it is shown in usage examples),
|
| 41 |
+
# otherwise `_secrets_dir` arg passed to Settings() constructor will be ignored.
|
| 42 |
+
settings_cls: type[BaseSettings] = getattr(
|
| 43 |
+
file_secret_settings,
|
| 44 |
+
'settings_cls',
|
| 45 |
+
file_secret_settings, # type: ignore[arg-type]
|
| 46 |
+
)
|
| 47 |
+
# config options
|
| 48 |
+
conf = settings_cls.model_config
|
| 49 |
+
self.secrets_dir: PathType | None = first_not_none(
|
| 50 |
+
getattr(file_secret_settings, 'secrets_dir', None),
|
| 51 |
+
secrets_dir,
|
| 52 |
+
conf.get('secrets_dir'),
|
| 53 |
+
)
|
| 54 |
+
self.secrets_dir_missing: Literal['ok', 'warn', 'error'] = first_not_none(
|
| 55 |
+
secrets_dir_missing,
|
| 56 |
+
conf.get('secrets_dir_missing'),
|
| 57 |
+
'warn',
|
| 58 |
+
)
|
| 59 |
+
if self.secrets_dir_missing not in ('ok', 'warn', 'error'):
|
| 60 |
+
raise SettingsError(f'invalid secrets_dir_missing value: {self.secrets_dir_missing}')
|
| 61 |
+
self.secrets_dir_max_size: int = first_not_none(
|
| 62 |
+
secrets_dir_max_size,
|
| 63 |
+
conf.get('secrets_dir_max_size'),
|
| 64 |
+
SECRETS_DIR_MAX_SIZE,
|
| 65 |
+
)
|
| 66 |
+
self.case_sensitive: bool = first_not_none(
|
| 67 |
+
secrets_case_sensitive,
|
| 68 |
+
conf.get('secrets_case_sensitive'),
|
| 69 |
+
case_sensitive,
|
| 70 |
+
conf.get('case_sensitive'),
|
| 71 |
+
False,
|
| 72 |
+
)
|
| 73 |
+
self.secrets_prefix: str = first_not_none(
|
| 74 |
+
secrets_prefix,
|
| 75 |
+
conf.get('secrets_prefix'),
|
| 76 |
+
env_prefix,
|
| 77 |
+
conf.get('env_prefix'),
|
| 78 |
+
'',
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
# nested options
|
| 82 |
+
self.secrets_nested_delimiter: str | None = first_not_none(
|
| 83 |
+
secrets_nested_delimiter,
|
| 84 |
+
conf.get('secrets_nested_delimiter'),
|
| 85 |
+
conf.get('env_nested_delimiter'),
|
| 86 |
+
)
|
| 87 |
+
self.secrets_nested_subdir: bool = first_not_none(
|
| 88 |
+
secrets_nested_subdir,
|
| 89 |
+
conf.get('secrets_nested_subdir'),
|
| 90 |
+
False,
|
| 91 |
+
)
|
| 92 |
+
if self.secrets_nested_subdir:
|
| 93 |
+
if secrets_nested_delimiter or conf.get('secrets_nested_delimiter'):
|
| 94 |
+
raise SettingsError('Options secrets_nested_delimiter and secrets_nested_subdir are mutually exclusive')
|
| 95 |
+
else:
|
| 96 |
+
self.secrets_nested_delimiter = os.sep
|
| 97 |
+
|
| 98 |
+
# ensure valid secrets_path
|
| 99 |
+
if self.secrets_dir is None:
|
| 100 |
+
paths = []
|
| 101 |
+
elif isinstance(self.secrets_dir, (Path, str)):
|
| 102 |
+
paths = [self.secrets_dir]
|
| 103 |
+
else:
|
| 104 |
+
paths = list(self.secrets_dir)
|
| 105 |
+
self.secrets_paths: list[Path] = [Path(p).expanduser().resolve() for p in paths]
|
| 106 |
+
for path in self.secrets_paths:
|
| 107 |
+
self.validate_secrets_path(path)
|
| 108 |
+
|
| 109 |
+
# construct parent
|
| 110 |
+
super().__init__(
|
| 111 |
+
settings_cls,
|
| 112 |
+
case_sensitive=self.case_sensitive,
|
| 113 |
+
env_prefix=self.secrets_prefix,
|
| 114 |
+
env_nested_delimiter=self.secrets_nested_delimiter,
|
| 115 |
+
env_ignore_empty=False, # match SecretsSettingsSource behaviour
|
| 116 |
+
env_parse_enums=True, # we can pass everything here, it will still behave as "True"
|
| 117 |
+
env_parse_none_str=None, # match SecretsSettingsSource behaviour
|
| 118 |
+
)
|
| 119 |
+
self.env_parse_none_str = None # update manually because of None
|
| 120 |
+
|
| 121 |
+
# update parent members
|
| 122 |
+
if not len(self.secrets_paths):
|
| 123 |
+
self.env_vars = {}
|
| 124 |
+
else:
|
| 125 |
+
secrets = reduce(
|
| 126 |
+
lambda d1, d2: dict((*d1.items(), *d2.items())),
|
| 127 |
+
(self.load_secrets(p) for p in self.secrets_paths),
|
| 128 |
+
)
|
| 129 |
+
self.env_vars = parse_env_vars(
|
| 130 |
+
secrets,
|
| 131 |
+
self.case_sensitive,
|
| 132 |
+
self.env_ignore_empty,
|
| 133 |
+
self.env_parse_none_str,
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
def validate_secrets_path(self, path: Path) -> None:
|
| 137 |
+
if not path.exists():
|
| 138 |
+
if self.secrets_dir_missing == 'ok':
|
| 139 |
+
pass
|
| 140 |
+
elif self.secrets_dir_missing == 'warn':
|
| 141 |
+
warnings.warn(f'directory "{path}" does not exist', stacklevel=2)
|
| 142 |
+
elif self.secrets_dir_missing == 'error':
|
| 143 |
+
raise SettingsError(f'directory "{path}" does not exist')
|
| 144 |
+
else:
|
| 145 |
+
raise ValueError # unreachable, checked before
|
| 146 |
+
else:
|
| 147 |
+
if not path.is_dir():
|
| 148 |
+
raise SettingsError(f'secrets_dir must reference a directory, not a {path_type_label(path)}')
|
| 149 |
+
secrets_dir_size = sum(f.stat().st_size for f in path.glob('**/*') if f.is_file())
|
| 150 |
+
if secrets_dir_size > self.secrets_dir_max_size:
|
| 151 |
+
raise SettingsError(f'secrets_dir size is above {self.secrets_dir_max_size} bytes')
|
| 152 |
+
|
| 153 |
+
@staticmethod
|
| 154 |
+
def load_secrets(path: Path) -> dict[str, str]:
|
| 155 |
+
return {
|
| 156 |
+
str(p.relative_to(path)): p.read_text().strip()
|
| 157 |
+
for p in map(Path, iglob(f'{path}/**/*', recursive=True))
|
| 158 |
+
if p.is_file()
|
| 159 |
+
}
|
| 160 |
+
|
| 161 |
+
def __repr__(self) -> str:
|
| 162 |
+
return f'NestedSecretsSettingsSource(secrets_dir={self.secrets_dir!r})'
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
def first_not_none(*objs: Any) -> Any:
|
| 166 |
+
return next(filter(lambda o: o is not None, objs), None)
|
source/pydantic_settings/sources/providers/pyproject.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Pyproject TOML file settings source."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations as _annotations
|
| 4 |
+
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from typing import (
|
| 7 |
+
TYPE_CHECKING,
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
from .toml import TomlConfigSettingsSource
|
| 11 |
+
|
| 12 |
+
if TYPE_CHECKING:
|
| 13 |
+
from pydantic_settings.main import BaseSettings
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class PyprojectTomlConfigSettingsSource(TomlConfigSettingsSource):
|
| 17 |
+
"""
|
| 18 |
+
A source class that loads variables from a `pyproject.toml` file.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
def __init__(
|
| 22 |
+
self,
|
| 23 |
+
settings_cls: type[BaseSettings],
|
| 24 |
+
toml_file: Path | None = None,
|
| 25 |
+
) -> None:
|
| 26 |
+
self.toml_file_path = self._pick_pyproject_toml_file(
|
| 27 |
+
toml_file, settings_cls.model_config.get('pyproject_toml_depth', 0)
|
| 28 |
+
)
|
| 29 |
+
self.toml_table_header: tuple[str, ...] = settings_cls.model_config.get(
|
| 30 |
+
'pyproject_toml_table_header', ('tool', 'pydantic-settings')
|
| 31 |
+
)
|
| 32 |
+
self.toml_data = self._read_files(self.toml_file_path)
|
| 33 |
+
for key in self.toml_table_header:
|
| 34 |
+
self.toml_data = self.toml_data.get(key, {})
|
| 35 |
+
super(TomlConfigSettingsSource, self).__init__(settings_cls, self.toml_data)
|
| 36 |
+
|
| 37 |
+
@staticmethod
|
| 38 |
+
def _pick_pyproject_toml_file(provided: Path | None, depth: int) -> Path:
|
| 39 |
+
"""Pick a `pyproject.toml` file path to use.
|
| 40 |
+
|
| 41 |
+
Args:
|
| 42 |
+
provided: Explicit path provided when instantiating this class.
|
| 43 |
+
depth: Number of directories up the tree to check of a pyproject.toml.
|
| 44 |
+
|
| 45 |
+
"""
|
| 46 |
+
if provided:
|
| 47 |
+
return provided.resolve()
|
| 48 |
+
rv = Path.cwd() / 'pyproject.toml'
|
| 49 |
+
count = 0
|
| 50 |
+
if not rv.is_file():
|
| 51 |
+
child = rv.parent.parent / 'pyproject.toml'
|
| 52 |
+
while count < depth:
|
| 53 |
+
if child.is_file():
|
| 54 |
+
return child
|
| 55 |
+
if str(child.parent) == rv.root:
|
| 56 |
+
break # end discovery after checking system root once
|
| 57 |
+
child = child.parent.parent / 'pyproject.toml'
|
| 58 |
+
count += 1
|
| 59 |
+
return rv
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
__all__ = ['PyprojectTomlConfigSettingsSource']
|
source/pydantic_settings/sources/providers/secrets.py
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Secrets file settings source."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations as _annotations
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import warnings
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
from typing import (
|
| 9 |
+
TYPE_CHECKING,
|
| 10 |
+
Any,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
from pydantic.fields import FieldInfo
|
| 14 |
+
|
| 15 |
+
from pydantic_settings.utils import path_type_label
|
| 16 |
+
|
| 17 |
+
from ...exceptions import SettingsError
|
| 18 |
+
from ..base import PydanticBaseEnvSettingsSource
|
| 19 |
+
from ..types import EnvPrefixTarget, PathType
|
| 20 |
+
|
| 21 |
+
if TYPE_CHECKING:
|
| 22 |
+
from pydantic_settings.main import BaseSettings
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class SecretsSettingsSource(PydanticBaseEnvSettingsSource):
|
| 26 |
+
"""
|
| 27 |
+
Source class for loading settings values from secret files.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
def __init__(
|
| 31 |
+
self,
|
| 32 |
+
settings_cls: type[BaseSettings],
|
| 33 |
+
secrets_dir: PathType | None = None,
|
| 34 |
+
case_sensitive: bool | None = None,
|
| 35 |
+
env_prefix: str | None = None,
|
| 36 |
+
env_prefix_target: EnvPrefixTarget | None = None,
|
| 37 |
+
env_ignore_empty: bool | None = None,
|
| 38 |
+
env_parse_none_str: str | None = None,
|
| 39 |
+
env_parse_enums: bool | None = None,
|
| 40 |
+
) -> None:
|
| 41 |
+
super().__init__(
|
| 42 |
+
settings_cls,
|
| 43 |
+
case_sensitive,
|
| 44 |
+
env_prefix,
|
| 45 |
+
env_prefix_target,
|
| 46 |
+
env_ignore_empty,
|
| 47 |
+
env_parse_none_str,
|
| 48 |
+
env_parse_enums,
|
| 49 |
+
)
|
| 50 |
+
self.secrets_dir = secrets_dir if secrets_dir is not None else self.config.get('secrets_dir')
|
| 51 |
+
|
| 52 |
+
def __call__(self) -> dict[str, Any]:
|
| 53 |
+
"""
|
| 54 |
+
Build fields from "secrets" files.
|
| 55 |
+
"""
|
| 56 |
+
secrets: dict[str, str | None] = {}
|
| 57 |
+
|
| 58 |
+
if self.secrets_dir is None:
|
| 59 |
+
return secrets
|
| 60 |
+
|
| 61 |
+
secrets_dirs = [self.secrets_dir] if isinstance(self.secrets_dir, (str, os.PathLike)) else self.secrets_dir
|
| 62 |
+
secrets_paths = [Path(p).expanduser() for p in secrets_dirs]
|
| 63 |
+
self.secrets_paths = []
|
| 64 |
+
|
| 65 |
+
for path in secrets_paths:
|
| 66 |
+
if not path.exists():
|
| 67 |
+
warnings.warn(f'directory "{path}" does not exist')
|
| 68 |
+
else:
|
| 69 |
+
self.secrets_paths.append(path)
|
| 70 |
+
|
| 71 |
+
if not len(self.secrets_paths):
|
| 72 |
+
return secrets
|
| 73 |
+
|
| 74 |
+
for path in self.secrets_paths:
|
| 75 |
+
if not path.is_dir():
|
| 76 |
+
raise SettingsError(f'secrets_dir must reference a directory, not a {path_type_label(path)}')
|
| 77 |
+
|
| 78 |
+
return super().__call__()
|
| 79 |
+
|
| 80 |
+
@classmethod
|
| 81 |
+
def find_case_path(cls, dir_path: Path, file_name: str, case_sensitive: bool) -> Path | None:
|
| 82 |
+
"""
|
| 83 |
+
Find a file within path's directory matching filename, optionally ignoring case.
|
| 84 |
+
|
| 85 |
+
Args:
|
| 86 |
+
dir_path: Directory path.
|
| 87 |
+
file_name: File name.
|
| 88 |
+
case_sensitive: Whether to search for file name case sensitively.
|
| 89 |
+
|
| 90 |
+
Returns:
|
| 91 |
+
Whether file path or `None` if file does not exist in directory.
|
| 92 |
+
"""
|
| 93 |
+
for f in dir_path.iterdir():
|
| 94 |
+
if f.name == file_name:
|
| 95 |
+
return f
|
| 96 |
+
elif not case_sensitive and f.name.lower() == file_name.lower():
|
| 97 |
+
return f
|
| 98 |
+
return None
|
| 99 |
+
|
| 100 |
+
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
| 101 |
+
"""
|
| 102 |
+
Gets the value for field from secret file and a flag to determine whether value is complex.
|
| 103 |
+
|
| 104 |
+
Args:
|
| 105 |
+
field: The field.
|
| 106 |
+
field_name: The field name.
|
| 107 |
+
|
| 108 |
+
Returns:
|
| 109 |
+
A tuple that contains the value (`None` if the file does not exist), key, and
|
| 110 |
+
a flag to determine whether value is complex.
|
| 111 |
+
"""
|
| 112 |
+
|
| 113 |
+
for field_key, env_name, value_is_complex in self._extract_field_info(field, field_name):
|
| 114 |
+
# paths reversed to match the last-wins behaviour of `env_file`
|
| 115 |
+
for secrets_path in reversed(self.secrets_paths):
|
| 116 |
+
path = self.find_case_path(secrets_path, env_name, self.case_sensitive)
|
| 117 |
+
if not path:
|
| 118 |
+
# path does not exist, we currently don't return a warning for this
|
| 119 |
+
continue
|
| 120 |
+
|
| 121 |
+
if path.is_file():
|
| 122 |
+
return path.read_text().strip(), field_key, value_is_complex
|
| 123 |
+
else:
|
| 124 |
+
warnings.warn(
|
| 125 |
+
f'attempted to load secret file "{path}" but found a {path_type_label(path)} instead.',
|
| 126 |
+
stacklevel=4,
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
return None, field_key, value_is_complex
|
| 130 |
+
|
| 131 |
+
def __repr__(self) -> str:
|
| 132 |
+
return f'{self.__class__.__name__}(secrets_dir={self.secrets_dir!r})'
|
source/pydantic_settings/sources/providers/toml.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TOML file settings source."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations as _annotations
|
| 4 |
+
|
| 5 |
+
import sys
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
from typing import (
|
| 8 |
+
TYPE_CHECKING,
|
| 9 |
+
Any,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
from ..base import ConfigFileSourceMixin, InitSettingsSource
|
| 13 |
+
from ..types import DEFAULT_PATH, PathType
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING:
|
| 16 |
+
from pydantic_settings.main import BaseSettings
|
| 17 |
+
|
| 18 |
+
if sys.version_info >= (3, 11):
|
| 19 |
+
import tomllib
|
| 20 |
+
else:
|
| 21 |
+
tomllib = None
|
| 22 |
+
import tomli
|
| 23 |
+
else:
|
| 24 |
+
tomllib = None
|
| 25 |
+
tomli = None
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def import_toml() -> None:
|
| 29 |
+
global tomli
|
| 30 |
+
global tomllib
|
| 31 |
+
if sys.version_info < (3, 11):
|
| 32 |
+
if tomli is not None:
|
| 33 |
+
return
|
| 34 |
+
try:
|
| 35 |
+
import tomli
|
| 36 |
+
except ImportError as e: # pragma: no cover
|
| 37 |
+
raise ImportError('tomli is not installed, run `pip install pydantic-settings[toml]`') from e
|
| 38 |
+
else:
|
| 39 |
+
if tomllib is not None:
|
| 40 |
+
return
|
| 41 |
+
import tomllib
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class TomlConfigSettingsSource(InitSettingsSource, ConfigFileSourceMixin):
|
| 45 |
+
"""
|
| 46 |
+
A source class that loads variables from a TOML file
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
def __init__(
|
| 50 |
+
self,
|
| 51 |
+
settings_cls: type[BaseSettings],
|
| 52 |
+
toml_file: PathType | None = DEFAULT_PATH,
|
| 53 |
+
deep_merge: bool = False,
|
| 54 |
+
):
|
| 55 |
+
self.toml_file_path = toml_file if toml_file != DEFAULT_PATH else settings_cls.model_config.get('toml_file')
|
| 56 |
+
self.toml_data = self._read_files(self.toml_file_path, deep_merge=deep_merge)
|
| 57 |
+
super().__init__(settings_cls, self.toml_data)
|
| 58 |
+
|
| 59 |
+
def _read_file(self, file_path: Path) -> dict[str, Any]:
|
| 60 |
+
import_toml()
|
| 61 |
+
with file_path.open(mode='rb') as toml_file:
|
| 62 |
+
if sys.version_info < (3, 11):
|
| 63 |
+
return tomli.load(toml_file)
|
| 64 |
+
return tomllib.load(toml_file)
|
| 65 |
+
|
| 66 |
+
def __repr__(self) -> str:
|
| 67 |
+
return f'{self.__class__.__name__}(toml_file={self.toml_file_path})'
|
source/pydantic_settings/sources/providers/yaml.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""YAML file settings source."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations as _annotations
|
| 4 |
+
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from typing import (
|
| 7 |
+
TYPE_CHECKING,
|
| 8 |
+
Any,
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
from ..base import ConfigFileSourceMixin, InitSettingsSource
|
| 12 |
+
from ..types import DEFAULT_PATH, PathType
|
| 13 |
+
|
| 14 |
+
if TYPE_CHECKING:
|
| 15 |
+
import yaml
|
| 16 |
+
|
| 17 |
+
from pydantic_settings.main import BaseSettings
|
| 18 |
+
else:
|
| 19 |
+
yaml = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def import_yaml() -> None:
|
| 23 |
+
global yaml
|
| 24 |
+
if yaml is not None:
|
| 25 |
+
return
|
| 26 |
+
try:
|
| 27 |
+
import yaml
|
| 28 |
+
except ImportError as e:
|
| 29 |
+
raise ImportError('PyYAML is not installed, run `pip install pydantic-settings[yaml]`') from e
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class YamlConfigSettingsSource(InitSettingsSource, ConfigFileSourceMixin):
|
| 33 |
+
"""
|
| 34 |
+
A source class that loads variables from a yaml file
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
def __init__(
|
| 38 |
+
self,
|
| 39 |
+
settings_cls: type[BaseSettings],
|
| 40 |
+
yaml_file: PathType | None = DEFAULT_PATH,
|
| 41 |
+
yaml_file_encoding: str | None = None,
|
| 42 |
+
yaml_config_section: str | None = None,
|
| 43 |
+
deep_merge: bool = False,
|
| 44 |
+
):
|
| 45 |
+
self.yaml_file_path = yaml_file if yaml_file != DEFAULT_PATH else settings_cls.model_config.get('yaml_file')
|
| 46 |
+
self.yaml_file_encoding = (
|
| 47 |
+
yaml_file_encoding
|
| 48 |
+
if yaml_file_encoding is not None
|
| 49 |
+
else settings_cls.model_config.get('yaml_file_encoding')
|
| 50 |
+
)
|
| 51 |
+
self.yaml_config_section = (
|
| 52 |
+
yaml_config_section
|
| 53 |
+
if yaml_config_section is not None
|
| 54 |
+
else settings_cls.model_config.get('yaml_config_section')
|
| 55 |
+
)
|
| 56 |
+
self.yaml_data = self._read_files(self.yaml_file_path, deep_merge=deep_merge)
|
| 57 |
+
|
| 58 |
+
if self.yaml_config_section is not None:
|
| 59 |
+
self.yaml_data = self._traverse_nested_section(
|
| 60 |
+
self.yaml_data, self.yaml_config_section, self.yaml_config_section
|
| 61 |
+
)
|
| 62 |
+
super().__init__(settings_cls, self.yaml_data)
|
| 63 |
+
|
| 64 |
+
def _read_file(self, file_path: Path) -> dict[str, Any]:
|
| 65 |
+
import_yaml()
|
| 66 |
+
with file_path.open(encoding=self.yaml_file_encoding) as yaml_file:
|
| 67 |
+
return yaml.safe_load(yaml_file) or {}
|
| 68 |
+
|
| 69 |
+
def _traverse_nested_section(
|
| 70 |
+
self, data: dict[str, Any], section_path: str, original_path: str | None = None
|
| 71 |
+
) -> dict[str, Any]:
|
| 72 |
+
"""
|
| 73 |
+
Traverse nested YAML sections using dot-notation path.
|
| 74 |
+
|
| 75 |
+
This method tries to match the longest possible key first before splitting on dots,
|
| 76 |
+
allowing access to YAML keys that contain literal dot characters.
|
| 77 |
+
|
| 78 |
+
For example, with section_path="a.b.c", it will try:
|
| 79 |
+
1. "a.b.c" as a literal key
|
| 80 |
+
2. "a.b" as a key, then traverse to "c"
|
| 81 |
+
3. "a" as a key, then traverse to "b.c"
|
| 82 |
+
4. "a" as a key, then "b" as a key, then "c" as a key
|
| 83 |
+
"""
|
| 84 |
+
# Track the original path for error messages
|
| 85 |
+
if original_path is None:
|
| 86 |
+
original_path = section_path
|
| 87 |
+
|
| 88 |
+
# Only reject truly empty paths
|
| 89 |
+
if not section_path:
|
| 90 |
+
raise ValueError('yaml_config_section cannot be empty')
|
| 91 |
+
|
| 92 |
+
# Try the full path as a literal key first (even with leading/trailing/consecutive dots)
|
| 93 |
+
try:
|
| 94 |
+
return data[section_path]
|
| 95 |
+
except KeyError:
|
| 96 |
+
pass # Not a literal key, try splitting
|
| 97 |
+
except TypeError:
|
| 98 |
+
raise TypeError(
|
| 99 |
+
f'yaml_config_section path "{original_path}" cannot be traversed in {self.yaml_file_path}. '
|
| 100 |
+
f'An intermediate value is not a dictionary.'
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
# If path contains no dots, we already tried it as a literal key above
|
| 104 |
+
if '.' not in section_path:
|
| 105 |
+
raise KeyError(f'yaml_config_section key "{original_path}" not found in {self.yaml_file_path}')
|
| 106 |
+
|
| 107 |
+
# Try progressively shorter prefixes (greedy left-to-right approach)
|
| 108 |
+
parts = section_path.split('.')
|
| 109 |
+
for i in range(len(parts) - 1, 0, -1):
|
| 110 |
+
prefix = '.'.join(parts[:i])
|
| 111 |
+
suffix = '.'.join(parts[i:])
|
| 112 |
+
|
| 113 |
+
if prefix in data:
|
| 114 |
+
# Found the prefix as a literal key, now recursively traverse the suffix
|
| 115 |
+
try:
|
| 116 |
+
return self._traverse_nested_section(data[prefix], suffix, original_path)
|
| 117 |
+
except TypeError:
|
| 118 |
+
raise TypeError(
|
| 119 |
+
f'yaml_config_section path "{original_path}" cannot be traversed in {self.yaml_file_path}. '
|
| 120 |
+
f'An intermediate value is not a dictionary.'
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
# If we get here, no match was found
|
| 124 |
+
raise KeyError(f'yaml_config_section key "{original_path}" not found in {self.yaml_file_path}')
|
| 125 |
+
|
| 126 |
+
def __repr__(self) -> str:
|
| 127 |
+
return f'{self.__class__.__name__}(yaml_file={self.yaml_file_path})'
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
__all__ = ['YamlConfigSettingsSource']
|
source/pydantic_settings/sources/types.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Type definitions for pydantic-settings sources."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations as _annotations
|
| 4 |
+
|
| 5 |
+
from collections.abc import Sequence
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
from typing import TYPE_CHECKING, Any, Literal
|
| 8 |
+
|
| 9 |
+
if TYPE_CHECKING:
|
| 10 |
+
from pydantic._internal._dataclasses import PydanticDataclass
|
| 11 |
+
from pydantic.main import BaseModel
|
| 12 |
+
|
| 13 |
+
PydanticModel = PydanticDataclass | BaseModel
|
| 14 |
+
else:
|
| 15 |
+
PydanticModel = Any
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class EnvNoneType(str):
|
| 19 |
+
pass
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class NoDecode:
|
| 23 |
+
"""Annotation to prevent decoding of a field value."""
|
| 24 |
+
|
| 25 |
+
pass
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class ForceDecode:
|
| 29 |
+
"""Annotation to force decoding of a field value."""
|
| 30 |
+
|
| 31 |
+
pass
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
EnvPrefixTarget = Literal['variable', 'alias', 'all']
|
| 35 |
+
DotenvType = Path | str | Sequence[Path | str]
|
| 36 |
+
PathType = Path | str | Sequence[Path | str]
|
| 37 |
+
DEFAULT_PATH: PathType = Path('')
|
| 38 |
+
|
| 39 |
+
# This is used as default value for `_env_file` in the `BaseSettings` class and
|
| 40 |
+
# `env_file` in `DotEnvSettingsSource` so the default can be distinguished from `None`.
|
| 41 |
+
# See the docstring of `BaseSettings` for more details.
|
| 42 |
+
ENV_FILE_SENTINEL: DotenvType = Path('')
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class _CliSubCommand:
|
| 46 |
+
pass
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class _CliPositionalArg:
|
| 50 |
+
pass
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class _CliImplicitFlag:
|
| 54 |
+
pass
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class _CliToggleFlag(_CliImplicitFlag):
|
| 58 |
+
pass
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class _CliDualFlag(_CliImplicitFlag):
|
| 62 |
+
pass
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class _CliExplicitFlag:
|
| 66 |
+
pass
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class _CliUnknownArgs:
|
| 70 |
+
pass
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class SecretVersion:
|
| 74 |
+
def __init__(self, version: str) -> None:
|
| 75 |
+
self.version = version
|
| 76 |
+
|
| 77 |
+
def __repr__(self) -> str:
|
| 78 |
+
return f'{self.__class__.__name__}({self.version!r})'
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
__all__ = [
|
| 82 |
+
'DEFAULT_PATH',
|
| 83 |
+
'ENV_FILE_SENTINEL',
|
| 84 |
+
'EnvPrefixTarget',
|
| 85 |
+
'DotenvType',
|
| 86 |
+
'EnvNoneType',
|
| 87 |
+
'ForceDecode',
|
| 88 |
+
'NoDecode',
|
| 89 |
+
'PathType',
|
| 90 |
+
'PydanticModel',
|
| 91 |
+
'SecretVersion',
|
| 92 |
+
'_CliExplicitFlag',
|
| 93 |
+
'_CliImplicitFlag',
|
| 94 |
+
'_CliToggleFlag',
|
| 95 |
+
'_CliDualFlag',
|
| 96 |
+
'_CliPositionalArg',
|
| 97 |
+
'_CliSubCommand',
|
| 98 |
+
'_CliUnknownArgs',
|
| 99 |
+
]
|
source/pydantic_settings/sources/utils.py
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utility functions for pydantic-settings sources."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations as _annotations
|
| 4 |
+
|
| 5 |
+
from collections import deque
|
| 6 |
+
from collections.abc import Mapping, Sequence
|
| 7 |
+
from dataclasses import is_dataclass
|
| 8 |
+
from enum import Enum
|
| 9 |
+
from typing import Any, TypeVar, cast, get_args, get_origin
|
| 10 |
+
|
| 11 |
+
from pydantic import BaseModel, Json, RootModel, Secret
|
| 12 |
+
from pydantic._internal._utils import is_model_class
|
| 13 |
+
from pydantic.dataclasses import is_pydantic_dataclass
|
| 14 |
+
from pydantic.fields import FieldInfo
|
| 15 |
+
from typing_inspection import typing_objects
|
| 16 |
+
|
| 17 |
+
from ..exceptions import SettingsError
|
| 18 |
+
from ..utils import _lenient_issubclass
|
| 19 |
+
from .types import EnvNoneType
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _get_env_var_key(key: str, case_sensitive: bool = False) -> str:
|
| 23 |
+
return key if case_sensitive else key.lower()
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def _parse_env_none_str(value: str | None, parse_none_str: str | None = None) -> str | None | EnvNoneType:
|
| 27 |
+
return value if not (value == parse_none_str and parse_none_str is not None) else EnvNoneType(value)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def parse_env_vars(
|
| 31 |
+
env_vars: Mapping[str, str | None],
|
| 32 |
+
case_sensitive: bool = False,
|
| 33 |
+
ignore_empty: bool = False,
|
| 34 |
+
parse_none_str: str | None = None,
|
| 35 |
+
) -> Mapping[str, str | None]:
|
| 36 |
+
return {
|
| 37 |
+
_get_env_var_key(k, case_sensitive): _parse_env_none_str(v, parse_none_str)
|
| 38 |
+
for k, v in env_vars.items()
|
| 39 |
+
if not (ignore_empty and v == '')
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def _substitute_typevars(tp: Any, param_map: dict[Any, Any]) -> Any:
|
| 44 |
+
"""Substitute TypeVars in a type annotation with concrete types from param_map."""
|
| 45 |
+
if isinstance(tp, TypeVar) and tp in param_map:
|
| 46 |
+
return param_map[tp]
|
| 47 |
+
args = get_args(tp)
|
| 48 |
+
if not args:
|
| 49 |
+
return tp
|
| 50 |
+
new_args = tuple(_substitute_typevars(arg, param_map) for arg in args)
|
| 51 |
+
if new_args == args:
|
| 52 |
+
return tp
|
| 53 |
+
origin = get_origin(tp)
|
| 54 |
+
if origin is not None:
|
| 55 |
+
try:
|
| 56 |
+
return origin[new_args]
|
| 57 |
+
except TypeError:
|
| 58 |
+
# types.UnionType and similar are not directly subscriptable,
|
| 59 |
+
# reconstruct using | operator
|
| 60 |
+
import functools
|
| 61 |
+
import operator
|
| 62 |
+
|
| 63 |
+
return functools.reduce(operator.or_, new_args)
|
| 64 |
+
return tp
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _resolve_type_alias(annotation: Any) -> Any:
|
| 68 |
+
"""Resolve a TypeAliasType to its underlying value, substituting type params if parameterized."""
|
| 69 |
+
if typing_objects.is_typealiastype(annotation):
|
| 70 |
+
return annotation.__value__
|
| 71 |
+
origin = get_origin(annotation)
|
| 72 |
+
if typing_objects.is_typealiastype(origin):
|
| 73 |
+
type_params = getattr(origin, '__type_params__', ())
|
| 74 |
+
type_args = get_args(annotation)
|
| 75 |
+
value = origin.__value__
|
| 76 |
+
if type_params and type_args:
|
| 77 |
+
return _substitute_typevars(value, dict(zip(type_params, type_args)))
|
| 78 |
+
return value
|
| 79 |
+
return annotation
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def _annotation_is_complex(annotation: Any, metadata: list[Any]) -> bool:
|
| 83 |
+
# If the model is a root model, the root annotation should be used to
|
| 84 |
+
# evaluate the complexity.
|
| 85 |
+
annotation = _resolve_type_alias(annotation)
|
| 86 |
+
if annotation is not None and _lenient_issubclass(annotation, RootModel) and annotation is not RootModel:
|
| 87 |
+
annotation = cast('type[RootModel[Any]]', annotation)
|
| 88 |
+
root_annotation = annotation.model_fields['root'].annotation
|
| 89 |
+
if root_annotation is not None: # pragma: no branch
|
| 90 |
+
annotation = root_annotation
|
| 91 |
+
|
| 92 |
+
if any(isinstance(md, Json) for md in metadata): # type: ignore[misc]
|
| 93 |
+
return False
|
| 94 |
+
|
| 95 |
+
origin = get_origin(annotation)
|
| 96 |
+
|
| 97 |
+
# Check if annotation is of the form Annotated[type, metadata].
|
| 98 |
+
if typing_objects.is_annotated(origin):
|
| 99 |
+
# Return result of recursive call on inner type.
|
| 100 |
+
inner, *meta = get_args(annotation)
|
| 101 |
+
return _annotation_is_complex(inner, meta)
|
| 102 |
+
|
| 103 |
+
if origin is Secret:
|
| 104 |
+
return False
|
| 105 |
+
|
| 106 |
+
return (
|
| 107 |
+
_annotation_is_complex_inner(annotation)
|
| 108 |
+
or _annotation_is_complex_inner(origin)
|
| 109 |
+
or hasattr(origin, '__pydantic_core_schema__')
|
| 110 |
+
or hasattr(origin, '__get_pydantic_core_schema__')
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def _get_field_metadata(field: FieldInfo) -> list[Any]:
|
| 115 |
+
annotation = _resolve_type_alias(field.annotation)
|
| 116 |
+
metadata = field.metadata
|
| 117 |
+
origin = get_origin(annotation)
|
| 118 |
+
if typing_objects.is_annotated(origin):
|
| 119 |
+
_, *meta = get_args(annotation)
|
| 120 |
+
metadata += meta
|
| 121 |
+
return metadata
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def _annotation_is_complex_inner(annotation: type[Any] | None) -> bool:
|
| 125 |
+
if _lenient_issubclass(annotation, (str, bytes)):
|
| 126 |
+
return False
|
| 127 |
+
|
| 128 |
+
return _lenient_issubclass(
|
| 129 |
+
annotation, (BaseModel, Mapping, Sequence, tuple, set, frozenset, deque)
|
| 130 |
+
) or is_dataclass(annotation)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def _union_is_complex(annotation: type[Any] | None, metadata: list[Any]) -> bool:
|
| 134 |
+
"""Check if a union type contains any complex types."""
|
| 135 |
+
return any(_annotation_is_complex(arg, metadata) for arg in get_args(annotation))
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
def _annotation_contains_types(
|
| 139 |
+
annotation: type[Any] | None,
|
| 140 |
+
types: tuple[Any, ...],
|
| 141 |
+
is_include_origin: bool = True,
|
| 142 |
+
is_strip_annotated: bool = False,
|
| 143 |
+
is_instance: bool = False,
|
| 144 |
+
collect: set[Any] | None = None,
|
| 145 |
+
) -> bool:
|
| 146 |
+
"""Check if a type annotation contains any of the specified types."""
|
| 147 |
+
if is_strip_annotated:
|
| 148 |
+
annotation = _strip_annotated(annotation)
|
| 149 |
+
if is_include_origin is True:
|
| 150 |
+
origin = get_origin(annotation)
|
| 151 |
+
if origin in types:
|
| 152 |
+
if collect is None:
|
| 153 |
+
return True
|
| 154 |
+
collect.add(annotation)
|
| 155 |
+
if is_instance and any(isinstance(origin, type_) for type_ in types):
|
| 156 |
+
if collect is None:
|
| 157 |
+
return True
|
| 158 |
+
collect.add(annotation)
|
| 159 |
+
for type_ in get_args(annotation):
|
| 160 |
+
if (
|
| 161 |
+
_annotation_contains_types(
|
| 162 |
+
type_,
|
| 163 |
+
types,
|
| 164 |
+
is_include_origin=True,
|
| 165 |
+
is_strip_annotated=is_strip_annotated,
|
| 166 |
+
is_instance=is_instance,
|
| 167 |
+
collect=collect,
|
| 168 |
+
)
|
| 169 |
+
and collect is None
|
| 170 |
+
):
|
| 171 |
+
return True
|
| 172 |
+
if is_instance and any(isinstance(annotation, type_) for type_ in types):
|
| 173 |
+
if collect is None:
|
| 174 |
+
return True
|
| 175 |
+
collect.add(annotation)
|
| 176 |
+
if annotation in types:
|
| 177 |
+
if collect is not None:
|
| 178 |
+
collect.add(annotation)
|
| 179 |
+
return True
|
| 180 |
+
return False
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
def _strip_annotated(annotation: Any) -> Any:
|
| 184 |
+
if typing_objects.is_annotated(get_origin(annotation)):
|
| 185 |
+
return annotation.__origin__
|
| 186 |
+
else:
|
| 187 |
+
return annotation
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
def _annotation_enum_val_to_name(annotation: type[Any] | None, value: Any) -> str | None:
|
| 191 |
+
for type_ in (annotation, get_origin(annotation), *get_args(annotation)):
|
| 192 |
+
if _lenient_issubclass(type_, Enum):
|
| 193 |
+
if value in type_.__members__.values():
|
| 194 |
+
return type_(value).name
|
| 195 |
+
return None
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def _annotation_enum_name_to_val(annotation: type[Any] | None, name: Any) -> Any:
|
| 199 |
+
for type_ in (annotation, get_origin(annotation), *get_args(annotation)):
|
| 200 |
+
if _lenient_issubclass(type_, Enum):
|
| 201 |
+
if name in type_.__members__.keys():
|
| 202 |
+
return type_[name]
|
| 203 |
+
return None
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def _get_model_fields(model_cls: type[Any]) -> dict[str, Any]:
|
| 207 |
+
"""Get fields from a pydantic model or dataclass."""
|
| 208 |
+
|
| 209 |
+
if is_pydantic_dataclass(model_cls) and hasattr(model_cls, '__pydantic_fields__'):
|
| 210 |
+
return model_cls.__pydantic_fields__
|
| 211 |
+
if is_model_class(model_cls):
|
| 212 |
+
return model_cls.model_fields
|
| 213 |
+
raise SettingsError(f'Error: {model_cls.__name__} is not subclass of BaseModel or pydantic.dataclasses.dataclass')
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
def _get_alias_names(
|
| 217 |
+
field_name: str,
|
| 218 |
+
field_info: Any,
|
| 219 |
+
alias_path_args: dict[str, int | None] | None = None,
|
| 220 |
+
case_sensitive: bool = True,
|
| 221 |
+
) -> tuple[tuple[str, ...], bool]:
|
| 222 |
+
"""Get alias names for a field, handling alias paths and case sensitivity."""
|
| 223 |
+
from pydantic import AliasChoices, AliasPath
|
| 224 |
+
|
| 225 |
+
alias_names: list[str] = []
|
| 226 |
+
is_alias_path_only: bool = True
|
| 227 |
+
if not any((field_info.alias, field_info.validation_alias)):
|
| 228 |
+
alias_names += [field_name]
|
| 229 |
+
is_alias_path_only = False
|
| 230 |
+
else:
|
| 231 |
+
new_alias_paths: list[AliasPath] = []
|
| 232 |
+
for alias in (field_info.alias, field_info.validation_alias):
|
| 233 |
+
if alias is None:
|
| 234 |
+
continue
|
| 235 |
+
elif isinstance(alias, str):
|
| 236 |
+
alias_names.append(alias)
|
| 237 |
+
is_alias_path_only = False
|
| 238 |
+
elif isinstance(alias, AliasChoices):
|
| 239 |
+
for name in alias.choices:
|
| 240 |
+
if isinstance(name, str):
|
| 241 |
+
alias_names.append(name)
|
| 242 |
+
is_alias_path_only = False
|
| 243 |
+
else:
|
| 244 |
+
new_alias_paths.append(name)
|
| 245 |
+
else:
|
| 246 |
+
new_alias_paths.append(alias)
|
| 247 |
+
for alias_path in new_alias_paths:
|
| 248 |
+
name = cast(str, alias_path.path[0])
|
| 249 |
+
name = name.lower() if not case_sensitive else name
|
| 250 |
+
if alias_path_args is not None:
|
| 251 |
+
alias_path_args[name] = (
|
| 252 |
+
alias_path.path[1] if len(alias_path.path) > 1 and isinstance(alias_path.path[1], int) else None
|
| 253 |
+
)
|
| 254 |
+
if not alias_names and is_alias_path_only:
|
| 255 |
+
alias_names.append(name)
|
| 256 |
+
if not case_sensitive:
|
| 257 |
+
alias_names = [alias_name.lower() for alias_name in alias_names]
|
| 258 |
+
return tuple(dict.fromkeys(alias_names)), is_alias_path_only
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
def _is_function(obj: Any) -> bool:
|
| 262 |
+
"""Check if an object is a function."""
|
| 263 |
+
from types import BuiltinFunctionType, FunctionType
|
| 264 |
+
|
| 265 |
+
return isinstance(obj, (FunctionType, BuiltinFunctionType))
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
__all__ = [
|
| 269 |
+
'_annotation_contains_types',
|
| 270 |
+
'_annotation_enum_name_to_val',
|
| 271 |
+
'_annotation_enum_val_to_name',
|
| 272 |
+
'_annotation_is_complex',
|
| 273 |
+
'_annotation_is_complex_inner',
|
| 274 |
+
'_get_alias_names',
|
| 275 |
+
'_get_env_var_key',
|
| 276 |
+
'_get_model_fields',
|
| 277 |
+
'_is_function',
|
| 278 |
+
'_parse_env_none_str',
|
| 279 |
+
'_resolve_type_alias',
|
| 280 |
+
'_strip_annotated',
|
| 281 |
+
'_union_is_complex',
|
| 282 |
+
'parse_env_vars',
|
| 283 |
+
]
|
source/pydantic_settings/utils.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import types
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
from typing import Any, _Final, _GenericAlias, get_origin # type: ignore [attr-defined]
|
| 4 |
+
|
| 5 |
+
_PATH_TYPE_LABELS = {
|
| 6 |
+
Path.is_dir: 'directory',
|
| 7 |
+
Path.is_file: 'file',
|
| 8 |
+
Path.is_mount: 'mount point',
|
| 9 |
+
Path.is_symlink: 'symlink',
|
| 10 |
+
Path.is_block_device: 'block device',
|
| 11 |
+
Path.is_char_device: 'char device',
|
| 12 |
+
Path.is_fifo: 'FIFO',
|
| 13 |
+
Path.is_socket: 'socket',
|
| 14 |
+
}
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def path_type_label(p: Path) -> str:
|
| 18 |
+
"""
|
| 19 |
+
Find out what sort of thing a path is.
|
| 20 |
+
"""
|
| 21 |
+
assert p.exists(), 'path does not exist'
|
| 22 |
+
for method, name in _PATH_TYPE_LABELS.items():
|
| 23 |
+
if method(p):
|
| 24 |
+
return name
|
| 25 |
+
|
| 26 |
+
return 'unknown' # pragma: no cover
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
# TODO remove and replace usage by `isinstance(cls, type) and issubclass(cls, class_or_tuple)`
|
| 30 |
+
# once we drop support for Python 3.10.
|
| 31 |
+
def _lenient_issubclass(cls: Any, class_or_tuple: Any) -> bool: # pragma: no cover
|
| 32 |
+
try:
|
| 33 |
+
return isinstance(cls, type) and issubclass(cls, class_or_tuple)
|
| 34 |
+
except TypeError:
|
| 35 |
+
if get_origin(cls) is not None:
|
| 36 |
+
# Up until Python 3.10, isinstance(<generic_alias>, type) is True
|
| 37 |
+
# (e.g. list[int])
|
| 38 |
+
return False
|
| 39 |
+
raise
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
_WithArgsTypes = (_GenericAlias, types.GenericAlias, types.UnionType)
|
| 43 |
+
_typing_base: Any = _Final # pyright: ignore[reportAttributeAccessIssue]
|
source/pydantic_settings/version.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
VERSION = '2.13.1'
|
source/pygments-2.19.2.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
source/pygments-2.19.2.dist-info/METADATA
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.4
|
| 2 |
+
Name: Pygments
|
| 3 |
+
Version: 2.19.2
|
| 4 |
+
Summary: Pygments is a syntax highlighting package written in Python.
|
| 5 |
+
Project-URL: Homepage, https://pygments.org
|
| 6 |
+
Project-URL: Documentation, https://pygments.org/docs
|
| 7 |
+
Project-URL: Source, https://github.com/pygments/pygments
|
| 8 |
+
Project-URL: Bug Tracker, https://github.com/pygments/pygments/issues
|
| 9 |
+
Project-URL: Changelog, https://github.com/pygments/pygments/blob/master/CHANGES
|
| 10 |
+
Author-email: Georg Brandl <georg@python.org>
|
| 11 |
+
Maintainer: Matthäus G. Chajdas
|
| 12 |
+
Maintainer-email: Georg Brandl <georg@python.org>, Jean Abou Samra <jean@abou-samra.fr>
|
| 13 |
+
License: BSD-2-Clause
|
| 14 |
+
License-File: AUTHORS
|
| 15 |
+
License-File: LICENSE
|
| 16 |
+
Keywords: syntax highlighting
|
| 17 |
+
Classifier: Development Status :: 6 - Mature
|
| 18 |
+
Classifier: Intended Audience :: Developers
|
| 19 |
+
Classifier: Intended Audience :: End Users/Desktop
|
| 20 |
+
Classifier: Intended Audience :: System Administrators
|
| 21 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 22 |
+
Classifier: Operating System :: OS Independent
|
| 23 |
+
Classifier: Programming Language :: Python
|
| 24 |
+
Classifier: Programming Language :: Python :: 3
|
| 25 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 26 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 27 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 28 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 29 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 30 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 31 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 32 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 33 |
+
Classifier: Topic :: Text Processing :: Filters
|
| 34 |
+
Classifier: Topic :: Utilities
|
| 35 |
+
Requires-Python: >=3.8
|
| 36 |
+
Provides-Extra: plugins
|
| 37 |
+
Provides-Extra: windows-terminal
|
| 38 |
+
Requires-Dist: colorama>=0.4.6; extra == 'windows-terminal'
|
| 39 |
+
Description-Content-Type: text/x-rst
|
| 40 |
+
|
| 41 |
+
Pygments
|
| 42 |
+
~~~~~~~~
|
| 43 |
+
|
| 44 |
+
Pygments is a syntax highlighting package written in Python.
|
| 45 |
+
|
| 46 |
+
It is a generic syntax highlighter suitable for use in code hosting, forums,
|
| 47 |
+
wikis or other applications that need to prettify source code. Highlights
|
| 48 |
+
are:
|
| 49 |
+
|
| 50 |
+
* a wide range of over 500 languages and other text formats is supported
|
| 51 |
+
* special attention is paid to details, increasing quality by a fair amount
|
| 52 |
+
* support for new languages and formats are added easily
|
| 53 |
+
* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
|
| 54 |
+
formats that PIL supports and ANSI sequences
|
| 55 |
+
* it is usable as a command-line tool and as a library
|
| 56 |
+
|
| 57 |
+
Copyright 2006-2025 by the Pygments team, see ``AUTHORS``.
|
| 58 |
+
Licensed under the BSD, see ``LICENSE`` for details.
|
source/pygments-2.19.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,684 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../bin/pygmentize,sha256=0Ek1aUsGndfaDY0X6CNhSbklx2Y_C_tUQCb8kdWf4BI,215
|
| 2 |
+
pygments-2.19.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
pygments-2.19.2.dist-info/METADATA,sha256=euEA1n1nAGxkeYA92DX89HqbWfrHlEQeqOZqp_WYTYI,2512
|
| 4 |
+
pygments-2.19.2.dist-info/RECORD,,
|
| 5 |
+
pygments-2.19.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
| 6 |
+
pygments-2.19.2.dist-info/entry_points.txt,sha256=uUXw-XhMKBEX4pWcCtpuTTnPhL3h7OEE2jWi51VQsa8,53
|
| 7 |
+
pygments-2.19.2.dist-info/licenses/AUTHORS,sha256=BmDjGKbyFYAq3Icxq4XQxl_yfPzKP10oWX8wZHYZW9k,10824
|
| 8 |
+
pygments-2.19.2.dist-info/licenses/LICENSE,sha256=qdZvHVJt8C4p3Oc0NtNOVuhjL0bCdbvf_HBWnogvnxc,1331
|
| 9 |
+
pygments/__init__.py,sha256=_3UT86TGpHuW8FekdZ8uLidEZH1NhmcLiOy2KKNPCt4,2959
|
| 10 |
+
pygments/__main__.py,sha256=p8AJyoyCOMYGvzWHdnq0_A9qaaVqaj02nIu3xhJp1_4,348
|
| 11 |
+
pygments/__pycache__/__init__.cpython-312.pyc,,
|
| 12 |
+
pygments/__pycache__/__main__.cpython-312.pyc,,
|
| 13 |
+
pygments/__pycache__/cmdline.cpython-312.pyc,,
|
| 14 |
+
pygments/__pycache__/console.cpython-312.pyc,,
|
| 15 |
+
pygments/__pycache__/filter.cpython-312.pyc,,
|
| 16 |
+
pygments/__pycache__/formatter.cpython-312.pyc,,
|
| 17 |
+
pygments/__pycache__/lexer.cpython-312.pyc,,
|
| 18 |
+
pygments/__pycache__/modeline.cpython-312.pyc,,
|
| 19 |
+
pygments/__pycache__/plugin.cpython-312.pyc,,
|
| 20 |
+
pygments/__pycache__/regexopt.cpython-312.pyc,,
|
| 21 |
+
pygments/__pycache__/scanner.cpython-312.pyc,,
|
| 22 |
+
pygments/__pycache__/sphinxext.cpython-312.pyc,,
|
| 23 |
+
pygments/__pycache__/style.cpython-312.pyc,,
|
| 24 |
+
pygments/__pycache__/token.cpython-312.pyc,,
|
| 25 |
+
pygments/__pycache__/unistring.cpython-312.pyc,,
|
| 26 |
+
pygments/__pycache__/util.cpython-312.pyc,,
|
| 27 |
+
pygments/cmdline.py,sha256=4pL9Kpn2PUEKPobgrsQgg-vCx2NjsrapKzQ6LxQR7Q0,23536
|
| 28 |
+
pygments/console.py,sha256=AagDWqwea2yBWf10KC9ptBgMpMjxKp8yABAmh-NQOVk,1718
|
| 29 |
+
pygments/filter.py,sha256=YLtpTnZiu07nY3oK9nfR6E9Y1FBHhP5PX8gvkJWcfag,1910
|
| 30 |
+
pygments/filters/__init__.py,sha256=B00KqPCQh5E0XhzaDK74Qa1E4fDSTlD6b0Pvr1v-vEQ,40344
|
| 31 |
+
pygments/filters/__pycache__/__init__.cpython-312.pyc,,
|
| 32 |
+
pygments/formatter.py,sha256=H_4J-moKkKfRWUOW9J0u7hhw6n1LiO-2Xu1q2B0sE5w,4366
|
| 33 |
+
pygments/formatters/__init__.py,sha256=7OuvmoYLyoPzoOQV_brHG8GSKYB_wjFSkAQng6x2y9g,5349
|
| 34 |
+
pygments/formatters/__pycache__/__init__.cpython-312.pyc,,
|
| 35 |
+
pygments/formatters/__pycache__/_mapping.cpython-312.pyc,,
|
| 36 |
+
pygments/formatters/__pycache__/bbcode.cpython-312.pyc,,
|
| 37 |
+
pygments/formatters/__pycache__/groff.cpython-312.pyc,,
|
| 38 |
+
pygments/formatters/__pycache__/html.cpython-312.pyc,,
|
| 39 |
+
pygments/formatters/__pycache__/img.cpython-312.pyc,,
|
| 40 |
+
pygments/formatters/__pycache__/irc.cpython-312.pyc,,
|
| 41 |
+
pygments/formatters/__pycache__/latex.cpython-312.pyc,,
|
| 42 |
+
pygments/formatters/__pycache__/other.cpython-312.pyc,,
|
| 43 |
+
pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc,,
|
| 44 |
+
pygments/formatters/__pycache__/rtf.cpython-312.pyc,,
|
| 45 |
+
pygments/formatters/__pycache__/svg.cpython-312.pyc,,
|
| 46 |
+
pygments/formatters/__pycache__/terminal.cpython-312.pyc,,
|
| 47 |
+
pygments/formatters/__pycache__/terminal256.cpython-312.pyc,,
|
| 48 |
+
pygments/formatters/_mapping.py,sha256=1Cw37FuQlNacnxRKmtlPX4nyLoX9_ttko5ZwscNUZZ4,4176
|
| 49 |
+
pygments/formatters/bbcode.py,sha256=s0Ka35OKuIchoSgEAGf6rj0rl2a9ym9L31JVNSRbZFQ,3296
|
| 50 |
+
pygments/formatters/groff.py,sha256=pLcIHj4jJS_lRAVFnyJODKDu1Xlyl9_AEIdOtbl3DT0,5082
|
| 51 |
+
pygments/formatters/html.py,sha256=FrHJ69FUliEyPY0zTfab0C1gPf7LXsKgeRlhwkniqIs,35953
|
| 52 |
+
pygments/formatters/img.py,sha256=aRpFo8mBmWTL3sBUjRCWkeS3rc6FZrSFC4EksDrl53g,23301
|
| 53 |
+
pygments/formatters/irc.py,sha256=R0Js0TYWySlI2yE9sW6tN4d4X-x3k9ZmudsijGPnLmU,4945
|
| 54 |
+
pygments/formatters/latex.py,sha256=BRYtbLeW_YD1kwhhnFInhJIKylurnri8CF1lP069KWE,19258
|
| 55 |
+
pygments/formatters/other.py,sha256=8pYW27sU_7XicLUqOEt2yWSO0h1IEUM3TIv34KODLwo,4986
|
| 56 |
+
pygments/formatters/pangomarkup.py,sha256=pcFvEC7K1Me0EjGeOZth4oCnEY85bfqc77XzZASEPpY,2206
|
| 57 |
+
pygments/formatters/rtf.py,sha256=kcKMCxTXu-2-hpgEftlGJRm7Ss-yA_Sy8OsHH_qzykA,11921
|
| 58 |
+
pygments/formatters/svg.py,sha256=R6A2ME6JsMQWFiyn8wcKwFUOD6vsu-HLwiIztLu-77E,7138
|
| 59 |
+
pygments/formatters/terminal.py,sha256=J_F_dFXwR9LHWvatIDnwqRYJyjVmSo1Zx8K_XDh6SyM,4626
|
| 60 |
+
pygments/formatters/terminal256.py,sha256=7GQFLE5cfmeu53CAzANO74-kBk2BFkXfn5phmZjYkhM,11717
|
| 61 |
+
pygments/lexer.py,sha256=ib-F_0GxHkwGpb6vWP0DeLMLc7EYgjo3hWFKN5IgOq0,35109
|
| 62 |
+
pygments/lexers/__init__.py,sha256=6YhzxGKlWk38P6JpIJUQ1rVvV0DEZjEmdYsdMQ58hSk,12067
|
| 63 |
+
pygments/lexers/__pycache__/__init__.cpython-312.pyc,,
|
| 64 |
+
pygments/lexers/__pycache__/_ada_builtins.cpython-312.pyc,,
|
| 65 |
+
pygments/lexers/__pycache__/_asy_builtins.cpython-312.pyc,,
|
| 66 |
+
pygments/lexers/__pycache__/_cl_builtins.cpython-312.pyc,,
|
| 67 |
+
pygments/lexers/__pycache__/_cocoa_builtins.cpython-312.pyc,,
|
| 68 |
+
pygments/lexers/__pycache__/_csound_builtins.cpython-312.pyc,,
|
| 69 |
+
pygments/lexers/__pycache__/_css_builtins.cpython-312.pyc,,
|
| 70 |
+
pygments/lexers/__pycache__/_googlesql_builtins.cpython-312.pyc,,
|
| 71 |
+
pygments/lexers/__pycache__/_julia_builtins.cpython-312.pyc,,
|
| 72 |
+
pygments/lexers/__pycache__/_lasso_builtins.cpython-312.pyc,,
|
| 73 |
+
pygments/lexers/__pycache__/_lilypond_builtins.cpython-312.pyc,,
|
| 74 |
+
pygments/lexers/__pycache__/_lua_builtins.cpython-312.pyc,,
|
| 75 |
+
pygments/lexers/__pycache__/_luau_builtins.cpython-312.pyc,,
|
| 76 |
+
pygments/lexers/__pycache__/_mapping.cpython-312.pyc,,
|
| 77 |
+
pygments/lexers/__pycache__/_mql_builtins.cpython-312.pyc,,
|
| 78 |
+
pygments/lexers/__pycache__/_mysql_builtins.cpython-312.pyc,,
|
| 79 |
+
pygments/lexers/__pycache__/_openedge_builtins.cpython-312.pyc,,
|
| 80 |
+
pygments/lexers/__pycache__/_php_builtins.cpython-312.pyc,,
|
| 81 |
+
pygments/lexers/__pycache__/_postgres_builtins.cpython-312.pyc,,
|
| 82 |
+
pygments/lexers/__pycache__/_qlik_builtins.cpython-312.pyc,,
|
| 83 |
+
pygments/lexers/__pycache__/_scheme_builtins.cpython-312.pyc,,
|
| 84 |
+
pygments/lexers/__pycache__/_scilab_builtins.cpython-312.pyc,,
|
| 85 |
+
pygments/lexers/__pycache__/_sourcemod_builtins.cpython-312.pyc,,
|
| 86 |
+
pygments/lexers/__pycache__/_sql_builtins.cpython-312.pyc,,
|
| 87 |
+
pygments/lexers/__pycache__/_stan_builtins.cpython-312.pyc,,
|
| 88 |
+
pygments/lexers/__pycache__/_stata_builtins.cpython-312.pyc,,
|
| 89 |
+
pygments/lexers/__pycache__/_tsql_builtins.cpython-312.pyc,,
|
| 90 |
+
pygments/lexers/__pycache__/_usd_builtins.cpython-312.pyc,,
|
| 91 |
+
pygments/lexers/__pycache__/_vbscript_builtins.cpython-312.pyc,,
|
| 92 |
+
pygments/lexers/__pycache__/_vim_builtins.cpython-312.pyc,,
|
| 93 |
+
pygments/lexers/__pycache__/actionscript.cpython-312.pyc,,
|
| 94 |
+
pygments/lexers/__pycache__/ada.cpython-312.pyc,,
|
| 95 |
+
pygments/lexers/__pycache__/agile.cpython-312.pyc,,
|
| 96 |
+
pygments/lexers/__pycache__/algebra.cpython-312.pyc,,
|
| 97 |
+
pygments/lexers/__pycache__/ambient.cpython-312.pyc,,
|
| 98 |
+
pygments/lexers/__pycache__/amdgpu.cpython-312.pyc,,
|
| 99 |
+
pygments/lexers/__pycache__/ampl.cpython-312.pyc,,
|
| 100 |
+
pygments/lexers/__pycache__/apdlexer.cpython-312.pyc,,
|
| 101 |
+
pygments/lexers/__pycache__/apl.cpython-312.pyc,,
|
| 102 |
+
pygments/lexers/__pycache__/archetype.cpython-312.pyc,,
|
| 103 |
+
pygments/lexers/__pycache__/arrow.cpython-312.pyc,,
|
| 104 |
+
pygments/lexers/__pycache__/arturo.cpython-312.pyc,,
|
| 105 |
+
pygments/lexers/__pycache__/asc.cpython-312.pyc,,
|
| 106 |
+
pygments/lexers/__pycache__/asm.cpython-312.pyc,,
|
| 107 |
+
pygments/lexers/__pycache__/asn1.cpython-312.pyc,,
|
| 108 |
+
pygments/lexers/__pycache__/automation.cpython-312.pyc,,
|
| 109 |
+
pygments/lexers/__pycache__/bare.cpython-312.pyc,,
|
| 110 |
+
pygments/lexers/__pycache__/basic.cpython-312.pyc,,
|
| 111 |
+
pygments/lexers/__pycache__/bdd.cpython-312.pyc,,
|
| 112 |
+
pygments/lexers/__pycache__/berry.cpython-312.pyc,,
|
| 113 |
+
pygments/lexers/__pycache__/bibtex.cpython-312.pyc,,
|
| 114 |
+
pygments/lexers/__pycache__/blueprint.cpython-312.pyc,,
|
| 115 |
+
pygments/lexers/__pycache__/boa.cpython-312.pyc,,
|
| 116 |
+
pygments/lexers/__pycache__/bqn.cpython-312.pyc,,
|
| 117 |
+
pygments/lexers/__pycache__/business.cpython-312.pyc,,
|
| 118 |
+
pygments/lexers/__pycache__/c_cpp.cpython-312.pyc,,
|
| 119 |
+
pygments/lexers/__pycache__/c_like.cpython-312.pyc,,
|
| 120 |
+
pygments/lexers/__pycache__/capnproto.cpython-312.pyc,,
|
| 121 |
+
pygments/lexers/__pycache__/carbon.cpython-312.pyc,,
|
| 122 |
+
pygments/lexers/__pycache__/cddl.cpython-312.pyc,,
|
| 123 |
+
pygments/lexers/__pycache__/chapel.cpython-312.pyc,,
|
| 124 |
+
pygments/lexers/__pycache__/clean.cpython-312.pyc,,
|
| 125 |
+
pygments/lexers/__pycache__/codeql.cpython-312.pyc,,
|
| 126 |
+
pygments/lexers/__pycache__/comal.cpython-312.pyc,,
|
| 127 |
+
pygments/lexers/__pycache__/compiled.cpython-312.pyc,,
|
| 128 |
+
pygments/lexers/__pycache__/configs.cpython-312.pyc,,
|
| 129 |
+
pygments/lexers/__pycache__/console.cpython-312.pyc,,
|
| 130 |
+
pygments/lexers/__pycache__/cplint.cpython-312.pyc,,
|
| 131 |
+
pygments/lexers/__pycache__/crystal.cpython-312.pyc,,
|
| 132 |
+
pygments/lexers/__pycache__/csound.cpython-312.pyc,,
|
| 133 |
+
pygments/lexers/__pycache__/css.cpython-312.pyc,,
|
| 134 |
+
pygments/lexers/__pycache__/d.cpython-312.pyc,,
|
| 135 |
+
pygments/lexers/__pycache__/dalvik.cpython-312.pyc,,
|
| 136 |
+
pygments/lexers/__pycache__/data.cpython-312.pyc,,
|
| 137 |
+
pygments/lexers/__pycache__/dax.cpython-312.pyc,,
|
| 138 |
+
pygments/lexers/__pycache__/devicetree.cpython-312.pyc,,
|
| 139 |
+
pygments/lexers/__pycache__/diff.cpython-312.pyc,,
|
| 140 |
+
pygments/lexers/__pycache__/dns.cpython-312.pyc,,
|
| 141 |
+
pygments/lexers/__pycache__/dotnet.cpython-312.pyc,,
|
| 142 |
+
pygments/lexers/__pycache__/dsls.cpython-312.pyc,,
|
| 143 |
+
pygments/lexers/__pycache__/dylan.cpython-312.pyc,,
|
| 144 |
+
pygments/lexers/__pycache__/ecl.cpython-312.pyc,,
|
| 145 |
+
pygments/lexers/__pycache__/eiffel.cpython-312.pyc,,
|
| 146 |
+
pygments/lexers/__pycache__/elm.cpython-312.pyc,,
|
| 147 |
+
pygments/lexers/__pycache__/elpi.cpython-312.pyc,,
|
| 148 |
+
pygments/lexers/__pycache__/email.cpython-312.pyc,,
|
| 149 |
+
pygments/lexers/__pycache__/erlang.cpython-312.pyc,,
|
| 150 |
+
pygments/lexers/__pycache__/esoteric.cpython-312.pyc,,
|
| 151 |
+
pygments/lexers/__pycache__/ezhil.cpython-312.pyc,,
|
| 152 |
+
pygments/lexers/__pycache__/factor.cpython-312.pyc,,
|
| 153 |
+
pygments/lexers/__pycache__/fantom.cpython-312.pyc,,
|
| 154 |
+
pygments/lexers/__pycache__/felix.cpython-312.pyc,,
|
| 155 |
+
pygments/lexers/__pycache__/fift.cpython-312.pyc,,
|
| 156 |
+
pygments/lexers/__pycache__/floscript.cpython-312.pyc,,
|
| 157 |
+
pygments/lexers/__pycache__/forth.cpython-312.pyc,,
|
| 158 |
+
pygments/lexers/__pycache__/fortran.cpython-312.pyc,,
|
| 159 |
+
pygments/lexers/__pycache__/foxpro.cpython-312.pyc,,
|
| 160 |
+
pygments/lexers/__pycache__/freefem.cpython-312.pyc,,
|
| 161 |
+
pygments/lexers/__pycache__/func.cpython-312.pyc,,
|
| 162 |
+
pygments/lexers/__pycache__/functional.cpython-312.pyc,,
|
| 163 |
+
pygments/lexers/__pycache__/futhark.cpython-312.pyc,,
|
| 164 |
+
pygments/lexers/__pycache__/gcodelexer.cpython-312.pyc,,
|
| 165 |
+
pygments/lexers/__pycache__/gdscript.cpython-312.pyc,,
|
| 166 |
+
pygments/lexers/__pycache__/gleam.cpython-312.pyc,,
|
| 167 |
+
pygments/lexers/__pycache__/go.cpython-312.pyc,,
|
| 168 |
+
pygments/lexers/__pycache__/grammar_notation.cpython-312.pyc,,
|
| 169 |
+
pygments/lexers/__pycache__/graph.cpython-312.pyc,,
|
| 170 |
+
pygments/lexers/__pycache__/graphics.cpython-312.pyc,,
|
| 171 |
+
pygments/lexers/__pycache__/graphql.cpython-312.pyc,,
|
| 172 |
+
pygments/lexers/__pycache__/graphviz.cpython-312.pyc,,
|
| 173 |
+
pygments/lexers/__pycache__/gsql.cpython-312.pyc,,
|
| 174 |
+
pygments/lexers/__pycache__/hare.cpython-312.pyc,,
|
| 175 |
+
pygments/lexers/__pycache__/haskell.cpython-312.pyc,,
|
| 176 |
+
pygments/lexers/__pycache__/haxe.cpython-312.pyc,,
|
| 177 |
+
pygments/lexers/__pycache__/hdl.cpython-312.pyc,,
|
| 178 |
+
pygments/lexers/__pycache__/hexdump.cpython-312.pyc,,
|
| 179 |
+
pygments/lexers/__pycache__/html.cpython-312.pyc,,
|
| 180 |
+
pygments/lexers/__pycache__/idl.cpython-312.pyc,,
|
| 181 |
+
pygments/lexers/__pycache__/igor.cpython-312.pyc,,
|
| 182 |
+
pygments/lexers/__pycache__/inferno.cpython-312.pyc,,
|
| 183 |
+
pygments/lexers/__pycache__/installers.cpython-312.pyc,,
|
| 184 |
+
pygments/lexers/__pycache__/int_fiction.cpython-312.pyc,,
|
| 185 |
+
pygments/lexers/__pycache__/iolang.cpython-312.pyc,,
|
| 186 |
+
pygments/lexers/__pycache__/j.cpython-312.pyc,,
|
| 187 |
+
pygments/lexers/__pycache__/javascript.cpython-312.pyc,,
|
| 188 |
+
pygments/lexers/__pycache__/jmespath.cpython-312.pyc,,
|
| 189 |
+
pygments/lexers/__pycache__/jslt.cpython-312.pyc,,
|
| 190 |
+
pygments/lexers/__pycache__/json5.cpython-312.pyc,,
|
| 191 |
+
pygments/lexers/__pycache__/jsonnet.cpython-312.pyc,,
|
| 192 |
+
pygments/lexers/__pycache__/jsx.cpython-312.pyc,,
|
| 193 |
+
pygments/lexers/__pycache__/julia.cpython-312.pyc,,
|
| 194 |
+
pygments/lexers/__pycache__/jvm.cpython-312.pyc,,
|
| 195 |
+
pygments/lexers/__pycache__/kuin.cpython-312.pyc,,
|
| 196 |
+
pygments/lexers/__pycache__/kusto.cpython-312.pyc,,
|
| 197 |
+
pygments/lexers/__pycache__/ldap.cpython-312.pyc,,
|
| 198 |
+
pygments/lexers/__pycache__/lean.cpython-312.pyc,,
|
| 199 |
+
pygments/lexers/__pycache__/lilypond.cpython-312.pyc,,
|
| 200 |
+
pygments/lexers/__pycache__/lisp.cpython-312.pyc,,
|
| 201 |
+
pygments/lexers/__pycache__/macaulay2.cpython-312.pyc,,
|
| 202 |
+
pygments/lexers/__pycache__/make.cpython-312.pyc,,
|
| 203 |
+
pygments/lexers/__pycache__/maple.cpython-312.pyc,,
|
| 204 |
+
pygments/lexers/__pycache__/markup.cpython-312.pyc,,
|
| 205 |
+
pygments/lexers/__pycache__/math.cpython-312.pyc,,
|
| 206 |
+
pygments/lexers/__pycache__/matlab.cpython-312.pyc,,
|
| 207 |
+
pygments/lexers/__pycache__/maxima.cpython-312.pyc,,
|
| 208 |
+
pygments/lexers/__pycache__/meson.cpython-312.pyc,,
|
| 209 |
+
pygments/lexers/__pycache__/mime.cpython-312.pyc,,
|
| 210 |
+
pygments/lexers/__pycache__/minecraft.cpython-312.pyc,,
|
| 211 |
+
pygments/lexers/__pycache__/mips.cpython-312.pyc,,
|
| 212 |
+
pygments/lexers/__pycache__/ml.cpython-312.pyc,,
|
| 213 |
+
pygments/lexers/__pycache__/modeling.cpython-312.pyc,,
|
| 214 |
+
pygments/lexers/__pycache__/modula2.cpython-312.pyc,,
|
| 215 |
+
pygments/lexers/__pycache__/mojo.cpython-312.pyc,,
|
| 216 |
+
pygments/lexers/__pycache__/monte.cpython-312.pyc,,
|
| 217 |
+
pygments/lexers/__pycache__/mosel.cpython-312.pyc,,
|
| 218 |
+
pygments/lexers/__pycache__/ncl.cpython-312.pyc,,
|
| 219 |
+
pygments/lexers/__pycache__/nimrod.cpython-312.pyc,,
|
| 220 |
+
pygments/lexers/__pycache__/nit.cpython-312.pyc,,
|
| 221 |
+
pygments/lexers/__pycache__/nix.cpython-312.pyc,,
|
| 222 |
+
pygments/lexers/__pycache__/numbair.cpython-312.pyc,,
|
| 223 |
+
pygments/lexers/__pycache__/oberon.cpython-312.pyc,,
|
| 224 |
+
pygments/lexers/__pycache__/objective.cpython-312.pyc,,
|
| 225 |
+
pygments/lexers/__pycache__/ooc.cpython-312.pyc,,
|
| 226 |
+
pygments/lexers/__pycache__/openscad.cpython-312.pyc,,
|
| 227 |
+
pygments/lexers/__pycache__/other.cpython-312.pyc,,
|
| 228 |
+
pygments/lexers/__pycache__/parasail.cpython-312.pyc,,
|
| 229 |
+
pygments/lexers/__pycache__/parsers.cpython-312.pyc,,
|
| 230 |
+
pygments/lexers/__pycache__/pascal.cpython-312.pyc,,
|
| 231 |
+
pygments/lexers/__pycache__/pawn.cpython-312.pyc,,
|
| 232 |
+
pygments/lexers/__pycache__/pddl.cpython-312.pyc,,
|
| 233 |
+
pygments/lexers/__pycache__/perl.cpython-312.pyc,,
|
| 234 |
+
pygments/lexers/__pycache__/phix.cpython-312.pyc,,
|
| 235 |
+
pygments/lexers/__pycache__/php.cpython-312.pyc,,
|
| 236 |
+
pygments/lexers/__pycache__/pointless.cpython-312.pyc,,
|
| 237 |
+
pygments/lexers/__pycache__/pony.cpython-312.pyc,,
|
| 238 |
+
pygments/lexers/__pycache__/praat.cpython-312.pyc,,
|
| 239 |
+
pygments/lexers/__pycache__/procfile.cpython-312.pyc,,
|
| 240 |
+
pygments/lexers/__pycache__/prolog.cpython-312.pyc,,
|
| 241 |
+
pygments/lexers/__pycache__/promql.cpython-312.pyc,,
|
| 242 |
+
pygments/lexers/__pycache__/prql.cpython-312.pyc,,
|
| 243 |
+
pygments/lexers/__pycache__/ptx.cpython-312.pyc,,
|
| 244 |
+
pygments/lexers/__pycache__/python.cpython-312.pyc,,
|
| 245 |
+
pygments/lexers/__pycache__/q.cpython-312.pyc,,
|
| 246 |
+
pygments/lexers/__pycache__/qlik.cpython-312.pyc,,
|
| 247 |
+
pygments/lexers/__pycache__/qvt.cpython-312.pyc,,
|
| 248 |
+
pygments/lexers/__pycache__/r.cpython-312.pyc,,
|
| 249 |
+
pygments/lexers/__pycache__/rdf.cpython-312.pyc,,
|
| 250 |
+
pygments/lexers/__pycache__/rebol.cpython-312.pyc,,
|
| 251 |
+
pygments/lexers/__pycache__/rego.cpython-312.pyc,,
|
| 252 |
+
pygments/lexers/__pycache__/resource.cpython-312.pyc,,
|
| 253 |
+
pygments/lexers/__pycache__/ride.cpython-312.pyc,,
|
| 254 |
+
pygments/lexers/__pycache__/rita.cpython-312.pyc,,
|
| 255 |
+
pygments/lexers/__pycache__/rnc.cpython-312.pyc,,
|
| 256 |
+
pygments/lexers/__pycache__/roboconf.cpython-312.pyc,,
|
| 257 |
+
pygments/lexers/__pycache__/robotframework.cpython-312.pyc,,
|
| 258 |
+
pygments/lexers/__pycache__/ruby.cpython-312.pyc,,
|
| 259 |
+
pygments/lexers/__pycache__/rust.cpython-312.pyc,,
|
| 260 |
+
pygments/lexers/__pycache__/sas.cpython-312.pyc,,
|
| 261 |
+
pygments/lexers/__pycache__/savi.cpython-312.pyc,,
|
| 262 |
+
pygments/lexers/__pycache__/scdoc.cpython-312.pyc,,
|
| 263 |
+
pygments/lexers/__pycache__/scripting.cpython-312.pyc,,
|
| 264 |
+
pygments/lexers/__pycache__/sgf.cpython-312.pyc,,
|
| 265 |
+
pygments/lexers/__pycache__/shell.cpython-312.pyc,,
|
| 266 |
+
pygments/lexers/__pycache__/sieve.cpython-312.pyc,,
|
| 267 |
+
pygments/lexers/__pycache__/slash.cpython-312.pyc,,
|
| 268 |
+
pygments/lexers/__pycache__/smalltalk.cpython-312.pyc,,
|
| 269 |
+
pygments/lexers/__pycache__/smithy.cpython-312.pyc,,
|
| 270 |
+
pygments/lexers/__pycache__/smv.cpython-312.pyc,,
|
| 271 |
+
pygments/lexers/__pycache__/snobol.cpython-312.pyc,,
|
| 272 |
+
pygments/lexers/__pycache__/solidity.cpython-312.pyc,,
|
| 273 |
+
pygments/lexers/__pycache__/soong.cpython-312.pyc,,
|
| 274 |
+
pygments/lexers/__pycache__/sophia.cpython-312.pyc,,
|
| 275 |
+
pygments/lexers/__pycache__/special.cpython-312.pyc,,
|
| 276 |
+
pygments/lexers/__pycache__/spice.cpython-312.pyc,,
|
| 277 |
+
pygments/lexers/__pycache__/sql.cpython-312.pyc,,
|
| 278 |
+
pygments/lexers/__pycache__/srcinfo.cpython-312.pyc,,
|
| 279 |
+
pygments/lexers/__pycache__/stata.cpython-312.pyc,,
|
| 280 |
+
pygments/lexers/__pycache__/supercollider.cpython-312.pyc,,
|
| 281 |
+
pygments/lexers/__pycache__/tablegen.cpython-312.pyc,,
|
| 282 |
+
pygments/lexers/__pycache__/tact.cpython-312.pyc,,
|
| 283 |
+
pygments/lexers/__pycache__/tal.cpython-312.pyc,,
|
| 284 |
+
pygments/lexers/__pycache__/tcl.cpython-312.pyc,,
|
| 285 |
+
pygments/lexers/__pycache__/teal.cpython-312.pyc,,
|
| 286 |
+
pygments/lexers/__pycache__/templates.cpython-312.pyc,,
|
| 287 |
+
pygments/lexers/__pycache__/teraterm.cpython-312.pyc,,
|
| 288 |
+
pygments/lexers/__pycache__/testing.cpython-312.pyc,,
|
| 289 |
+
pygments/lexers/__pycache__/text.cpython-312.pyc,,
|
| 290 |
+
pygments/lexers/__pycache__/textedit.cpython-312.pyc,,
|
| 291 |
+
pygments/lexers/__pycache__/textfmts.cpython-312.pyc,,
|
| 292 |
+
pygments/lexers/__pycache__/theorem.cpython-312.pyc,,
|
| 293 |
+
pygments/lexers/__pycache__/thingsdb.cpython-312.pyc,,
|
| 294 |
+
pygments/lexers/__pycache__/tlb.cpython-312.pyc,,
|
| 295 |
+
pygments/lexers/__pycache__/tls.cpython-312.pyc,,
|
| 296 |
+
pygments/lexers/__pycache__/tnt.cpython-312.pyc,,
|
| 297 |
+
pygments/lexers/__pycache__/trafficscript.cpython-312.pyc,,
|
| 298 |
+
pygments/lexers/__pycache__/typoscript.cpython-312.pyc,,
|
| 299 |
+
pygments/lexers/__pycache__/typst.cpython-312.pyc,,
|
| 300 |
+
pygments/lexers/__pycache__/ul4.cpython-312.pyc,,
|
| 301 |
+
pygments/lexers/__pycache__/unicon.cpython-312.pyc,,
|
| 302 |
+
pygments/lexers/__pycache__/urbi.cpython-312.pyc,,
|
| 303 |
+
pygments/lexers/__pycache__/usd.cpython-312.pyc,,
|
| 304 |
+
pygments/lexers/__pycache__/varnish.cpython-312.pyc,,
|
| 305 |
+
pygments/lexers/__pycache__/verification.cpython-312.pyc,,
|
| 306 |
+
pygments/lexers/__pycache__/verifpal.cpython-312.pyc,,
|
| 307 |
+
pygments/lexers/__pycache__/vip.cpython-312.pyc,,
|
| 308 |
+
pygments/lexers/__pycache__/vyper.cpython-312.pyc,,
|
| 309 |
+
pygments/lexers/__pycache__/web.cpython-312.pyc,,
|
| 310 |
+
pygments/lexers/__pycache__/webassembly.cpython-312.pyc,,
|
| 311 |
+
pygments/lexers/__pycache__/webidl.cpython-312.pyc,,
|
| 312 |
+
pygments/lexers/__pycache__/webmisc.cpython-312.pyc,,
|
| 313 |
+
pygments/lexers/__pycache__/wgsl.cpython-312.pyc,,
|
| 314 |
+
pygments/lexers/__pycache__/whiley.cpython-312.pyc,,
|
| 315 |
+
pygments/lexers/__pycache__/wowtoc.cpython-312.pyc,,
|
| 316 |
+
pygments/lexers/__pycache__/wren.cpython-312.pyc,,
|
| 317 |
+
pygments/lexers/__pycache__/x10.cpython-312.pyc,,
|
| 318 |
+
pygments/lexers/__pycache__/xorg.cpython-312.pyc,,
|
| 319 |
+
pygments/lexers/__pycache__/yang.cpython-312.pyc,,
|
| 320 |
+
pygments/lexers/__pycache__/yara.cpython-312.pyc,,
|
| 321 |
+
pygments/lexers/__pycache__/zig.cpython-312.pyc,,
|
| 322 |
+
pygments/lexers/_ada_builtins.py,sha256=CA_OnShtdc7wWh9oYcRlcrkDAQwYUKl6w7tdSbALQd4,1543
|
| 323 |
+
pygments/lexers/_asy_builtins.py,sha256=cd9M00YH19w5ZL7aqucmC3nwpJGTS04U-01NLy5E2_4,27287
|
| 324 |
+
pygments/lexers/_cl_builtins.py,sha256=kQeUIyZjP4kX0frkICDcKxBYQCLqzIDXa5WV5cevhDo,13994
|
| 325 |
+
pygments/lexers/_cocoa_builtins.py,sha256=Ka1lLJe7JfWtdho4IFIB82X9yBvrbfHCCmEG-peXXhQ,105173
|
| 326 |
+
pygments/lexers/_csound_builtins.py,sha256=qnQYKeI26ZHim316uqy_hDiRiCoHo2RHjD3sYBALyXs,18414
|
| 327 |
+
pygments/lexers/_css_builtins.py,sha256=aD-dhLFXVd1Atn_bZd7gEdQn7Mhe60_VHpvZ340WzDI,12446
|
| 328 |
+
pygments/lexers/_googlesql_builtins.py,sha256=IkrOk-T2v1yzbGzUEEQh5_Cf4uC_cmL_uuhwDpZlTug,16132
|
| 329 |
+
pygments/lexers/_julia_builtins.py,sha256=N2WdSw5zgI2fhDat_i4YeVqurRTC_P8x71ez00SCN6U,11883
|
| 330 |
+
pygments/lexers/_lasso_builtins.py,sha256=8q1gbsrMJeaeUhxIYKhaOxC9j_B-NBpq_XFj2Ze41X0,134510
|
| 331 |
+
pygments/lexers/_lilypond_builtins.py,sha256=XTbGL1z1oKMoqWLEktG33jx5GdGTI9CpeO5NheEi4Y0,108094
|
| 332 |
+
pygments/lexers/_lua_builtins.py,sha256=PhFdZV5-Tzz2j_q4lvG9lr84ELGfL41BhnrSDNNTaG4,8108
|
| 333 |
+
pygments/lexers/_luau_builtins.py,sha256=-IDrU04kUVfjXwSQzMMpXmMYhNsQxZVVZk8cuAA0Lo0,955
|
| 334 |
+
pygments/lexers/_mapping.py,sha256=9fv7xYOUAOr6LzfdFS4MDbPu78o4OQQH-2nsI1bNZf4,70438
|
| 335 |
+
pygments/lexers/_mql_builtins.py,sha256=ybRQjlb7Cul0sDstnzxJl3h0qS6Ieqsr811fqrxyumU,24713
|
| 336 |
+
pygments/lexers/_mysql_builtins.py,sha256=y0kAWZVAs0z2dTFJJV42OZpILgRnd8T3zSlBFv-g_oA,25838
|
| 337 |
+
pygments/lexers/_openedge_builtins.py,sha256=Sz4j9-CPWIaxMa-2fZgY66j7igcu1ob1GR2UtI8zAkg,49398
|
| 338 |
+
pygments/lexers/_php_builtins.py,sha256=Jd4BZpjMDELPi4EVoSxK1-8BFTc63HUwYfm1rLrGj0M,107922
|
| 339 |
+
pygments/lexers/_postgres_builtins.py,sha256=Pqh4z0RBRbnW6rCQtWUdzWCJxNyqpJ7_0HOktxHDxk4,13343
|
| 340 |
+
pygments/lexers/_qlik_builtins.py,sha256=xuJy9c9uZDXv6h8z582P5PrxqkxTZ_nS8gPl9OD9VN8,12595
|
| 341 |
+
pygments/lexers/_scheme_builtins.py,sha256=2hNtJOJmP21lUsikpqMJ2gAmLT3Rwn_KEeqhXwCjgfk,32564
|
| 342 |
+
pygments/lexers/_scilab_builtins.py,sha256=oZYPB1XPdIEz3pII11pFDe6extRRyWGA7pY06X8KZ8w,52411
|
| 343 |
+
pygments/lexers/_sourcemod_builtins.py,sha256=H8AFLsNDdEpymIWOpDwbDJGCP1w-x-1gSlzPDioMF4o,26777
|
| 344 |
+
pygments/lexers/_sql_builtins.py,sha256=oe8F9wWuO2iS6nEsZAdJtCUChBTjgM1Sq_aipu74jXM,6767
|
| 345 |
+
pygments/lexers/_stan_builtins.py,sha256=dwi1hllM_NsaCv-aXJy7lEi57X5Hh5gSD97aCQyT9KM,13445
|
| 346 |
+
pygments/lexers/_stata_builtins.py,sha256=Hqrr6j77zWU3cGGpBPohwexZci43YA4_sVYE4E1sNow,27227
|
| 347 |
+
pygments/lexers/_tsql_builtins.py,sha256=Pi2RhTXcLE3glI9oxNhyVsOMn-fK_1TRxJ-EsYP5LcI,15460
|
| 348 |
+
pygments/lexers/_usd_builtins.py,sha256=c9hbU1cwqBUCFIhNfu_Dob8ywv1rlPhi9w2OTj3kR8s,1658
|
| 349 |
+
pygments/lexers/_vbscript_builtins.py,sha256=MqJ2ABywD21aSRtWYZRG64CCbGstC1kfsiHGJmZzxiw,4225
|
| 350 |
+
pygments/lexers/_vim_builtins.py,sha256=bA4mH8t1mPPQfEiUCKEqRO1O0rL2DUG0Ux1Bt8ZSu0E,57066
|
| 351 |
+
pygments/lexers/actionscript.py,sha256=JBngCe5UhYT_0dLD2j7PnPO0xRRJhmypEuQ-C5in8pY,11727
|
| 352 |
+
pygments/lexers/ada.py,sha256=58k5ra1vGS4iLpW3h1ItY9ftzF3WevaeAAXzAYTiYkQ,5353
|
| 353 |
+
pygments/lexers/agile.py,sha256=DN-7AVIqtG1MshA94rtSGYI_884hVHgzq405wD0_dl8,896
|
| 354 |
+
pygments/lexers/algebra.py,sha256=yGTu9Tt-cQzAISQYIC5MS5a3z4QmL-tGcXnd_pkWGbk,9952
|
| 355 |
+
pygments/lexers/ambient.py,sha256=UnzKpIlfSm3iitHvMd7XTMSY8TjZYYhKOC3AiARS_cE,2605
|
| 356 |
+
pygments/lexers/amdgpu.py,sha256=S8qjn2UMLhBFm3Yn_c06XAGf8cl5x_ZeluelWG_-JAw,1723
|
| 357 |
+
pygments/lexers/ampl.py,sha256=ZBRfDXm760gR1a1gqItnsHuoO3JdUcTBjJ5tFY9UtPA,4176
|
| 358 |
+
pygments/lexers/apdlexer.py,sha256=Zr5-jgjxC8PKzRlEeclakZXPHci7FHBZghQ6wwiuT7A,30800
|
| 359 |
+
pygments/lexers/apl.py,sha256=PTQMp-bxT5P-DbrEvFha10HBTcsDJ5srL3I1s9ljz58,3404
|
| 360 |
+
pygments/lexers/archetype.py,sha256=pQVlP1Fb5OA8nn7QwmFaaaOSvvpoIsQVw43FVCQCve4,11538
|
| 361 |
+
pygments/lexers/arrow.py,sha256=2PKdbWq3xQLF1KoDbWvSxpjwKRrznnDiArTflRGZzBo,3564
|
| 362 |
+
pygments/lexers/arturo.py,sha256=U5MtRNHJtnBn4ZOeWmW6MKlVRG7SX6KhTRamDqzn9tA,11414
|
| 363 |
+
pygments/lexers/asc.py,sha256=-DgZl9jccBDHPlDmjCsrEqx0-Q7ap7XVdNKtxLNWG1w,1693
|
| 364 |
+
pygments/lexers/asm.py,sha256=xm2Y5mcT-sF3oQvair4SWs9EWTyndoaUoSsDy5v6shI,41967
|
| 365 |
+
pygments/lexers/asn1.py,sha256=BlcloIX2bu6Q7BxGcksuhYFHGsXLVKyB4B9mFd4Pj6E,4262
|
| 366 |
+
pygments/lexers/automation.py,sha256=Q61qon8EwpfakMh_2MS2E2zUUT16rG3UNIKPYjITeTs,19831
|
| 367 |
+
pygments/lexers/bare.py,sha256=tWoei86JJX1k-ADhaXd5TgX6ItDTici9yFWpkTPhnfM,3020
|
| 368 |
+
pygments/lexers/basic.py,sha256=qpVe5h8Fa7NJo1EihN-4R_UZpHO6my2Ssgkb-BktkKs,27989
|
| 369 |
+
pygments/lexers/bdd.py,sha256=yysefcOFAEyk9kJ2y4EXmzJTecgLYUHlWixt_3YzPMU,1641
|
| 370 |
+
pygments/lexers/berry.py,sha256=zxGowFb8HMIyN15-m8nmWnW6bPRR4esKtSEVugc9uXM,3209
|
| 371 |
+
pygments/lexers/bibtex.py,sha256=yuNoPxwrJf9DCGUT17hxfDzbq_HtCLkQkRbBtiTVmeQ,4811
|
| 372 |
+
pygments/lexers/blueprint.py,sha256=NzvWHMxCLDWt8hc6gB5jokltxVJgNa7Jwh4c61ng388,6188
|
| 373 |
+
pygments/lexers/boa.py,sha256=dOot1XWNZThPIio2UyAX67K6EpISjSRCFjotD7dcnwE,3921
|
| 374 |
+
pygments/lexers/bqn.py,sha256=nJiwrPKKbRF-qdai5tfqipwBkkko2P3weiZAjHUMimY,3671
|
| 375 |
+
pygments/lexers/business.py,sha256=lRtekOJfsDkb12AGbuz10-G67OJrVJgCBtihTQ8_aoY,28345
|
| 376 |
+
pygments/lexers/c_cpp.py,sha256=D7ZIswaHASlGBgoTlwnSqTQHf8_JyvvSt2L2q1W-F6g,18059
|
| 377 |
+
pygments/lexers/c_like.py,sha256=FTGp17ds6X2rDZOHup2hH6BEn3gKK4nLm9pydNEhm0E,32021
|
| 378 |
+
pygments/lexers/capnproto.py,sha256=XQJAh1WS-0ulqbTn9TdzR6gEgWLcuBqb4sj3jNsrhsY,2174
|
| 379 |
+
pygments/lexers/carbon.py,sha256=av12YuTGZGpOa1Cmxp3lppx3LfSJUWbvOu0ixmUVll0,3211
|
| 380 |
+
pygments/lexers/cddl.py,sha256=MKa70IwABgjBjYu15_Q9v8rsu2sr1a-i2jkiaPTI6sM,5076
|
| 381 |
+
pygments/lexers/chapel.py,sha256=0n_fL3ehLC4pw4YKnmq9jxIXOJcxGPka1Wr1t1zsXPc,5156
|
| 382 |
+
pygments/lexers/clean.py,sha256=dkDPAwF5BTALPeuKFoRKOSD3RfsKcGWbaRo6_G8LHng,6418
|
| 383 |
+
pygments/lexers/codeql.py,sha256=ebvghn2zbrnETV4buVozMDmRCVKSdGiIN8ycLlHpGsE,2576
|
| 384 |
+
pygments/lexers/comal.py,sha256=TC3NzcJ58ew5jw7qwK0kJ-okTA47psZje0yAIS39HR4,3179
|
| 385 |
+
pygments/lexers/compiled.py,sha256=Slfo1sjWqcPawUwf0dIIZLBCL5pkOIoAX2S8Lxs02Mc,1426
|
| 386 |
+
pygments/lexers/configs.py,sha256=wW8pY0Sa5a10pnAeTLGf48HhixQTVageIyHEf1aYMCc,50913
|
| 387 |
+
pygments/lexers/console.py,sha256=-jAG120dupvV3kG3zC70brLJvSLwTFqMubBQuj_GVnU,4180
|
| 388 |
+
pygments/lexers/cplint.py,sha256=DkbyE5EKydLgf6BRr1FhQrK-IeQPL7Zmjk0DVdlRFnQ,1389
|
| 389 |
+
pygments/lexers/crystal.py,sha256=xU-RnpIkpjrquoxtOuOcP8fcesSJl4xhU7kO9m42LZY,15754
|
| 390 |
+
pygments/lexers/csound.py,sha256=ioSw4Q04wdwjUAbnTZ1qLhUq1vxdWFxhh3QtEl5RAJc,16998
|
| 391 |
+
pygments/lexers/css.py,sha256=JN1RBYsee-jrpHWrSmhN3TKc4TkOBn-_BEGpgTCzcqE,25376
|
| 392 |
+
pygments/lexers/d.py,sha256=piOy0EJeiAwPHugiM3gVv0z7HNh3u2gZQoCUSASRbY4,9920
|
| 393 |
+
pygments/lexers/dalvik.py,sha256=deFg2JPBktJ9mEGb9EgxNkmd6vaMjJFQVzUHo8NKIa8,4606
|
| 394 |
+
pygments/lexers/data.py,sha256=o0x0SmB5ms_CPUPljEEEenOON4IQWn86DkwFjkJYCOg,27026
|
| 395 |
+
pygments/lexers/dax.py,sha256=ASi73qmr7OA7cVZXF2GTYGt01Ly1vY8CgD_Pnpm8k-4,8098
|
| 396 |
+
pygments/lexers/devicetree.py,sha256=RecSQCidt8DRE1QFCPUbwwR0hiRlNtsFihdGldeUn3k,4019
|
| 397 |
+
pygments/lexers/diff.py,sha256=F6vxZ64wm5Nag_97de1H_3F700ZwCVnYjKvtT5jilww,5382
|
| 398 |
+
pygments/lexers/dns.py,sha256=Hh5hJ7MXfrq36KgfyIRwK3X8o1LdR98IKERcV4eZ7HY,3891
|
| 399 |
+
pygments/lexers/dotnet.py,sha256=NDE0kOmpe96GLO-zwNLazmj77E9ORGmKpa4ZMCXDXxQ,39441
|
| 400 |
+
pygments/lexers/dsls.py,sha256=GnHKhGL5GxsRFnqC7-65NTPZLOZdmnllNrGP86x_fQE,36746
|
| 401 |
+
pygments/lexers/dylan.py,sha256=7zZ1EbHWXeVHqTD36AqykKqo3fhuIh4sM-whcxUaH_Y,10409
|
| 402 |
+
pygments/lexers/ecl.py,sha256=vhmpa2LBrHxsPkYcf3kPZ1ItVaLRDTebi186wY0xGZA,6371
|
| 403 |
+
pygments/lexers/eiffel.py,sha256=5ydYIEFcgcMoEj4BlK31hZ0aJb8OX0RdAvuCNdlxwqw,2690
|
| 404 |
+
pygments/lexers/elm.py,sha256=uRCddU8jK5vVkH6Y66y8KOsDJprIfrOgeYq3hv1PxAM,3152
|
| 405 |
+
pygments/lexers/elpi.py,sha256=O9j_WKBPyvNFjCRuPciVpW4etVSnILm_T79BhCPZYmo,6877
|
| 406 |
+
pygments/lexers/email.py,sha256=ZZL6yvwCRl1CEQyysuOu0lbabp5tjMutS7f3efFKGR4,4804
|
| 407 |
+
pygments/lexers/erlang.py,sha256=bU11eVHvooLwmVknzN6Xkb2DMk7HbenqdNlYSzhThDM,19147
|
| 408 |
+
pygments/lexers/esoteric.py,sha256=Jfp8UUKyKYsqLaqXRZT3GSM9dzkF65zduwfnH1GoGhU,10500
|
| 409 |
+
pygments/lexers/ezhil.py,sha256=22r-xjvvBVpExTqCI-HycAwunDb1p5gY4tIfDmM0vDw,3272
|
| 410 |
+
pygments/lexers/factor.py,sha256=urZ4En4uKFCLXdEkXLWg9EYUFGHQTTDCwNXtyq-ngok,19530
|
| 411 |
+
pygments/lexers/fantom.py,sha256=JJ13-NwykD-iIESnuzCefCYeQDO95cHMJA8TasF4gHA,10231
|
| 412 |
+
pygments/lexers/felix.py,sha256=F-v0si4zPtRelqzDQWXI1-tarCE-BvawziODxRU7378,9655
|
| 413 |
+
pygments/lexers/fift.py,sha256=rOCwp3v5ocK5YOWvt7Td3Md--97_8e-7Sonx52uS8mA,1644
|
| 414 |
+
pygments/lexers/floscript.py,sha256=aHh82k52jMuDuzl9LatrcSANJiXTCyjGU3SO53bwbb0,2667
|
| 415 |
+
pygments/lexers/forth.py,sha256=ZMtsHdNbnS_0IdSYlfAlfTSPEr0MEsRo-YZriQNueTQ,7193
|
| 416 |
+
pygments/lexers/fortran.py,sha256=1PE5dTxf4Df6LUeXFcmNtyeXWsC8tSiK5dYwPHIJeeQ,10382
|
| 417 |
+
pygments/lexers/foxpro.py,sha256=CBkW62Fuibz3yfyelZCaEO8GGdFJWsuRhqwtsSeBwLM,26295
|
| 418 |
+
pygments/lexers/freefem.py,sha256=LFBQk-m1-nNCgrl-VDH3QwnVWurvb7W29i06LoT207A,26913
|
| 419 |
+
pygments/lexers/func.py,sha256=OR2rkM7gf9fKvad5WcFQln-_U_pb-RUCM9eQatToF4A,3700
|
| 420 |
+
pygments/lexers/functional.py,sha256=fYT2AGZ642cRkIAId0rnXFBsx1c8LLEDRN_VuCEkUyM,693
|
| 421 |
+
pygments/lexers/futhark.py,sha256=Vf1i4t-tR3zqaktVjhTzFNg_ts_9CcyA4ZDfDizbCmk,3743
|
| 422 |
+
pygments/lexers/gcodelexer.py,sha256=4Xs9ax4-JZGupW_qSnHon39wQGpb-tNA3xorMKg841E,874
|
| 423 |
+
pygments/lexers/gdscript.py,sha256=Ws7JKxy0M0IyZ_1iMfRvJPrizEwmeCNLDoeMIFaM-CU,7566
|
| 424 |
+
pygments/lexers/gleam.py,sha256=XIlTcq6cB743pCqbNYo8PocSkjZyDPR6hHgdaJNJ1Vc,2392
|
| 425 |
+
pygments/lexers/go.py,sha256=4LezefgyuqZWHzLZHieUkKTi-ssY6aHJxx7Z-LFaLK0,3783
|
| 426 |
+
pygments/lexers/grammar_notation.py,sha256=LvzhRQHgwZzq9oceukZS_hwnKK58ee7Z5d0cwXOR734,8043
|
| 427 |
+
pygments/lexers/graph.py,sha256=WFqoPA1c_hHYrV0i_F7-eUw3Co4_HmZY3GJ-TyDr670,4108
|
| 428 |
+
pygments/lexers/graphics.py,sha256=tmF9NNALnvPnax8ywYC3pLOla45YXtp9UA0H-5EiTQY,39145
|
| 429 |
+
pygments/lexers/graphql.py,sha256=O_zcrGrBaDaKTlUoJGRruxqk7CJi-NR92Y0Cs-KkCvw,5601
|
| 430 |
+
pygments/lexers/graphviz.py,sha256=mzdXOMpwz9_V-be1eTAMyhkKCBl6UxCIXuq6C2yrtsw,1934
|
| 431 |
+
pygments/lexers/gsql.py,sha256=VPZk9sb26-DumRkWfEaSTeoc0lx5xt5n-6eDDLezMtc,3990
|
| 432 |
+
pygments/lexers/hare.py,sha256=PGCOuILktJsmtTpCZZKkMFtObfJuBpei8HM8HHuq1Tw,2649
|
| 433 |
+
pygments/lexers/haskell.py,sha256=MYr74-PAC8kGJRX-dZmvZsHTc7a2u6yFS2B19LfDD7g,33262
|
| 434 |
+
pygments/lexers/haxe.py,sha256=WHCy_nrXHnfLITfbdp3Ji3lqQU4HAsTUpXsLCp2_4sk,30974
|
| 435 |
+
pygments/lexers/hdl.py,sha256=MOWxhmAuE4Ei0CKDqqaON7T8tl43geancrNYM136Z0U,22738
|
| 436 |
+
pygments/lexers/hexdump.py,sha256=1lj9oJ-KiZXSVYvTMfGmEAQzNEW08WlMcC2I5aYvHK4,3653
|
| 437 |
+
pygments/lexers/html.py,sha256=MxYTI4EeT7QxoGleCAyQq-8n_Sgly6tD95H5zanCNmk,21977
|
| 438 |
+
pygments/lexers/idl.py,sha256=rcihUAGhfuGEaSW6pgFq6NzplT_pv0DagUoefg4zAmk,15449
|
| 439 |
+
pygments/lexers/igor.py,sha256=wVefbUjb3ftaW3LCKGtX1JgLgiY4EmRor5gVOn8vQA8,31633
|
| 440 |
+
pygments/lexers/inferno.py,sha256=ChE_5y5SLH_75Uv7D2dKWQMk2dlN6z1gY1IDjlJZ8rU,3135
|
| 441 |
+
pygments/lexers/installers.py,sha256=ZHliit4Pxz1tYKOIjKkDXI5djTkpzYUMVIPR1xvUrL8,14435
|
| 442 |
+
pygments/lexers/int_fiction.py,sha256=0ZzIa1sZDUQsltd1oHuS-BoNiOF8zKQfcVuDyK1Ttv8,56544
|
| 443 |
+
pygments/lexers/iolang.py,sha256=L6dNDCLH0kxkIUi00fI4Z14QnRu79UcNDrgv02c5Zw8,1905
|
| 444 |
+
pygments/lexers/j.py,sha256=DqNdwQGFLiZW3mCNLRg81gpmsy4Hgcai_9NP3LbWhNU,4853
|
| 445 |
+
pygments/lexers/javascript.py,sha256=TGKQLSrCprCKfhLLGAq_0EOdvqvJKX9pOdKo7tCRurQ,63243
|
| 446 |
+
pygments/lexers/jmespath.py,sha256=R5yA5LJ2nTIaDwnFIpSNGAThd0sAYFccwawA9xBptlg,2082
|
| 447 |
+
pygments/lexers/jslt.py,sha256=OeYQf8O2_9FCaf9W6Q3a7rPdAFLthePCtVSgCrOTcl8,3700
|
| 448 |
+
pygments/lexers/json5.py,sha256=8JZbc8EiTEZdKaIdQg3hXEh0mHWSzPlwd473a0nUuT0,2502
|
| 449 |
+
pygments/lexers/jsonnet.py,sha256=bx2G6J4tJqGrJV1PyZrIWzWHXcoefCX-4lIxxtbn2gw,5636
|
| 450 |
+
pygments/lexers/jsx.py,sha256=wGsoGSB40qAJrVfXwRPtan7OcK0O87RVsHHk0m6gogk,2693
|
| 451 |
+
pygments/lexers/julia.py,sha256=0ZDJ9X83V5GqJzA6T6p0TTN8WHy2JAjvu-FSBXvfXdc,11710
|
| 452 |
+
pygments/lexers/jvm.py,sha256=Yt1iQ3QodXRY-x_HUOGedhyuBBHn5jYH-I8NzOzHTlE,72667
|
| 453 |
+
pygments/lexers/kuin.py,sha256=3dKKJVJlskgrvMKv2tY9NOsFfDjyo-3MLcJ1lFKdXSg,11405
|
| 454 |
+
pygments/lexers/kusto.py,sha256=kaxkoPpEBDsBTCvCOkZZx7oGfv0jk_UNIRIRbfVAsBE,3477
|
| 455 |
+
pygments/lexers/ldap.py,sha256=77vF4t_19x9V522cxRCM5d3HW8Ne3giYsFsMPVYYBw4,6551
|
| 456 |
+
pygments/lexers/lean.py,sha256=7HWRgxFsxS1N9XKqw0vfKwaxl27s5YiVYtZeRUoTHFo,8570
|
| 457 |
+
pygments/lexers/lilypond.py,sha256=yd2Tuv67um6EyCIr-VwBnlPhTHxMaQsBJ4nGgO5fjIk,9752
|
| 458 |
+
pygments/lexers/lisp.py,sha256=EHUy1g4pzEsYPE-zGj2rAXm3YATE1j9dCQOr5-JPSkU,157668
|
| 459 |
+
pygments/lexers/macaulay2.py,sha256=zkV-vxjQYa0Jj9TGfFP1iMgpTZ4ApQuAAIdJVGWb2is,33366
|
| 460 |
+
pygments/lexers/make.py,sha256=YMI5DBCrxWca-pz9cVXcyfuHLcikPx9R_3pW_98Myqo,7831
|
| 461 |
+
pygments/lexers/maple.py,sha256=Rs0dEmOMD3C1YQPd0mntN-vzReq4XfHegH6xV4lvJWo,7960
|
| 462 |
+
pygments/lexers/markup.py,sha256=zWtxsyIx_1OxQzS6wLe8bEqglePv4RqvJjbia8AvV5c,65088
|
| 463 |
+
pygments/lexers/math.py,sha256=P3ZK1ePd8ZnLdlmHezo2irCA8T2-nlHBoSaBoT5mEVI,695
|
| 464 |
+
pygments/lexers/matlab.py,sha256=F9KO4qowIhfP8oVhCRRzE_1sqg4zmQbsB2NZH193PiM,133027
|
| 465 |
+
pygments/lexers/maxima.py,sha256=a0h9Ggs9JEovTrzbJT-BLVbOqI29yPnaMZlkU5f_FeY,2715
|
| 466 |
+
pygments/lexers/meson.py,sha256=BMrsDo6BH2lzTFw7JDwQ9SDNMTrRkXCNRDVf4aFHdsI,4336
|
| 467 |
+
pygments/lexers/mime.py,sha256=yGrf3h37LK4b6ERBpFiL_qzn3JgOfGR5KLagnbWFl6c,7582
|
| 468 |
+
pygments/lexers/minecraft.py,sha256=Nu88snDDPzM0D-742fFdUriczL-EE911pAd4_I4-pAw,13696
|
| 469 |
+
pygments/lexers/mips.py,sha256=STKiZT67b3QERXXn7XKVxlPBu7vwbPC5EyCpuf3Jfbw,4656
|
| 470 |
+
pygments/lexers/ml.py,sha256=t8sCv4BjvuBq6AihKKUwStEONIgdXCC2RMtO0RopNbM,35390
|
| 471 |
+
pygments/lexers/modeling.py,sha256=M7B58bGB-Zwd1EmPxKqtRvg7TgNCyem3MVUHv0_H2SQ,13683
|
| 472 |
+
pygments/lexers/modula2.py,sha256=NtpXBRoUCeHfflgB39LknSkCwhBHBKv2Er_pinjVsNE,53072
|
| 473 |
+
pygments/lexers/mojo.py,sha256=8JRVoftN1E-W2woG0K-4n8PQXTUM9iY6Sl5sWb2uGNg,24233
|
| 474 |
+
pygments/lexers/monte.py,sha256=baWU6zlXloenw9MO1MtEVGE9i3CfiXAYhqU621MIjRk,6289
|
| 475 |
+
pygments/lexers/mosel.py,sha256=gjRdedhA1jTjoYoM1Gpaoog_I9o7TRbYMHk97N1TXwg,9297
|
| 476 |
+
pygments/lexers/ncl.py,sha256=zJ6ahlitit4S0pBXc7Wu96PB7xOn59MwfR2HdY5_C60,63999
|
| 477 |
+
pygments/lexers/nimrod.py,sha256=Q1NSqEkLC5wWt7xJyKC-vzWw_Iw2SfDNP_pyMFBuIfA,6413
|
| 478 |
+
pygments/lexers/nit.py,sha256=p_hVD8GzMRl3CABVKHtYgnXFUQk0i5F2FbWFA6WXm6s,2725
|
| 479 |
+
pygments/lexers/nix.py,sha256=NOrv20gdq-2A7eZ6c2gElPHv1Xx2pvv20-qOymL9GMg,4421
|
| 480 |
+
pygments/lexers/numbair.py,sha256=fxkp2CXeXWKBMewfi1H4JSYkmm4kU58wZ2Sh9BDYAWQ,1758
|
| 481 |
+
pygments/lexers/oberon.py,sha256=jw403qUUs7zpTHAs5CbLjb8qiuwtxLk0spDIYqGZwAw,4210
|
| 482 |
+
pygments/lexers/objective.py,sha256=Fo1WB3JMj8sNeYnvB84H4_qwhOt4WNJtJWjVEOwrJGk,23297
|
| 483 |
+
pygments/lexers/ooc.py,sha256=kD1XaJZaihDF_s-Vyu1Bx68S_9zFt2rhox7NF8LpOZM,3002
|
| 484 |
+
pygments/lexers/openscad.py,sha256=h9I1k8kiuQmhX5vZm6VDSr2fa5Finy0sN8ZDIE-jx1c,3700
|
| 485 |
+
pygments/lexers/other.py,sha256=WLVyqPsvm9oSXIbZwbfyJloS6HGgoFW5nVTaU1uQpTw,1763
|
| 486 |
+
pygments/lexers/parasail.py,sha256=DWMGhtyQgGTXbIgQl_mID6CKqi-Dhbvs_dTkmvrZXfE,2719
|
| 487 |
+
pygments/lexers/parsers.py,sha256=feNgxroPoWRf0NEsON2mtmKDUfslIQppukw6ndEsQ3M,26596
|
| 488 |
+
pygments/lexers/pascal.py,sha256=N2tRAjlXnTxggAzzk2tOOAVzeC2MBzrXy97_HQl5n44,30989
|
| 489 |
+
pygments/lexers/pawn.py,sha256=LWUYQYsebMMt2d5oxX1HYWvBqbakR1h7Av_z8Vw94Wg,8253
|
| 490 |
+
pygments/lexers/pddl.py,sha256=Mk4_BzlROJCd0xR4KKRRSrbj0F7LLQcBRjmsmtWmrCg,2989
|
| 491 |
+
pygments/lexers/perl.py,sha256=9BXn3tyHMA49NvzbM9E2czSCHjeU7bvaPLUcoZrhz-4,39192
|
| 492 |
+
pygments/lexers/phix.py,sha256=hZqychqo5sFMBDESzDPXg1DYHQe_9sn294UfbjihaFk,23249
|
| 493 |
+
pygments/lexers/php.py,sha256=l4hzQrlm0525i5dSw9Vmjcai3TzbPT6DkjzxPg9l6Zc,13061
|
| 494 |
+
pygments/lexers/pointless.py,sha256=WSDjqQyGrNIGmTCdaMxl4zk7OZTlJAMzeUZ02kfgcTI,1974
|
| 495 |
+
pygments/lexers/pony.py,sha256=EXrMkacqMZblI7v4AvBRQe-3Py8__bx5FOgjCLdfXxQ,3279
|
| 496 |
+
pygments/lexers/praat.py,sha256=4UFK-nbC6WkZBhJgcQqEGqq9CocJkW7AmT_OJQbjWzk,12676
|
| 497 |
+
pygments/lexers/procfile.py,sha256=05W2fyofLTP-FbEdSXD1eles-PPqVNfF6RWXjQdW2us,1155
|
| 498 |
+
pygments/lexers/prolog.py,sha256=9Kc5YNUFqkfWu2sYoyzC3RX65abf1bm7oHr86z1s4kQ,12866
|
| 499 |
+
pygments/lexers/promql.py,sha256=n-0vo-o8-ZasqP3Va4ujs562UfZSLfZF-RzT71yL0Tk,4738
|
| 500 |
+
pygments/lexers/prql.py,sha256=PFReuvhbv4K5aeu6lvDfw4m-3hULkB3r43bKAy948os,8747
|
| 501 |
+
pygments/lexers/ptx.py,sha256=KSHAvbiNVUntKilQ6EPYoLFocmJpRsBy_7fW6_Nrs1Y,4501
|
| 502 |
+
pygments/lexers/python.py,sha256=WZe7fBAHKZ_BxPg8qIU26UGhk8qwUYyENJ3IyPW64mc,53805
|
| 503 |
+
pygments/lexers/q.py,sha256=WQFUh3JrpK2j-VGW_Ytn3uJ5frUNmQIFnLtMVGRA9DI,6936
|
| 504 |
+
pygments/lexers/qlik.py,sha256=2wqwdfIjrAz6RNBsP4MyeLX8Z7QpIGzxtf1CvaOlr_g,3693
|
| 505 |
+
pygments/lexers/qvt.py,sha256=XMBnsWRrvCDf989OuDeb-KpszAkeETiACyaghZeL1ns,6103
|
| 506 |
+
pygments/lexers/r.py,sha256=B6WgrD9SY1UTCV1fQBSlZbezPfpYsARn3FQIHcFYOiM,6474
|
| 507 |
+
pygments/lexers/rdf.py,sha256=qUzxLna9v071bHhZAjdsBi8dKaJNk_h9g1ZRUAYCfoo,16056
|
| 508 |
+
pygments/lexers/rebol.py,sha256=4u3N4kzui55HapopXDu3Kt0jczxDZ4buzwR7Mt4tQiM,18259
|
| 509 |
+
pygments/lexers/rego.py,sha256=Rx5Gphbktr9ojg5DbqlyxHeQqqtF7g8W-oF0rmloDNY,1748
|
| 510 |
+
pygments/lexers/resource.py,sha256=ioEzgWksB5HCjoz85XNkQPSd7n5kL0SZiuPkJP1hunQ,2927
|
| 511 |
+
pygments/lexers/ride.py,sha256=kCWdxuR3PclVi4wiA0uUx4CYEFwuTqoMsKjhSW4X3yg,5035
|
| 512 |
+
pygments/lexers/rita.py,sha256=Mj1QNxx1sWAZYC02kw8piVckaiw9B0MqQtiIiDFH0pA,1127
|
| 513 |
+
pygments/lexers/rnc.py,sha256=g7ZD334PMGUqy_Ij64laSN1vJerwHqVkegfMCa3E-y8,1972
|
| 514 |
+
pygments/lexers/roboconf.py,sha256=HbYuK5CqmQdd63SRY2nle01r7-p7mil0SnoauYDmEOY,2074
|
| 515 |
+
pygments/lexers/robotframework.py,sha256=c4U1B9Q9ITBCTohqJTZOvkfyeVbenN4xhzSWIoZh5eU,18448
|
| 516 |
+
pygments/lexers/ruby.py,sha256=uG617E5abBZcECRCqkhIfc-IbZcRb5cGuUZq_xpax90,22753
|
| 517 |
+
pygments/lexers/rust.py,sha256=ZY-9vtsreBP0NfDd0WCouLSp_9MChAL8U8Abe-m9PB8,8260
|
| 518 |
+
pygments/lexers/sas.py,sha256=C1Uz2s9DU6_s2kL-cB_PAGPtpyK5THlmhNmCumC1l48,9456
|
| 519 |
+
pygments/lexers/savi.py,sha256=jrmruK0GnXktgBTWXW3oN3TXtofn3HBbkMlHnR84cko,4878
|
| 520 |
+
pygments/lexers/scdoc.py,sha256=DXRmFDmYuc7h3gPAAVhfcL1OEbNBK5RdPpJqQzF3ZTk,2524
|
| 521 |
+
pygments/lexers/scripting.py,sha256=eaYlkDK-_cAwTcCBHP6QXBCz8n6OzbhzdkRe0uV0xWY,81814
|
| 522 |
+
pygments/lexers/sgf.py,sha256=w6C513ENaO2YCnqrduK7k03NaMDf-pgygvfzq2NaSRk,1985
|
| 523 |
+
pygments/lexers/shell.py,sha256=dCS1zwkf5KwTog4__MnMC7h3Xmwv4_d3fnEV29tSwXI,36381
|
| 524 |
+
pygments/lexers/sieve.py,sha256=eob-L84yf2jmhdNyYZUlbUJozdcd6GXcHW68lmAe8WE,2514
|
| 525 |
+
pygments/lexers/slash.py,sha256=I-cRepmaxhL1SgYvD1hHX3gNBFI8NPszdU7hn1o5JlA,8484
|
| 526 |
+
pygments/lexers/smalltalk.py,sha256=ue2PmqDK2sw0j75WdseiiENJBdZ1OwysH2Op1QN1r24,7204
|
| 527 |
+
pygments/lexers/smithy.py,sha256=VREWoeuz7ANap_Uiopn7rs0Tnsfc-xBisDJKRGQY_y8,2659
|
| 528 |
+
pygments/lexers/smv.py,sha256=He_VBSMbWONMWZmkrB5RYR0cfHVnMyKIXz68IFYl-a8,2805
|
| 529 |
+
pygments/lexers/snobol.py,sha256=qDzb41xQQWMNmjB2MtZs23pFoFgZ2gbRZhK_Ir03r7I,2778
|
| 530 |
+
pygments/lexers/solidity.py,sha256=Tixfnwku4Yezj6nNm8xVaw7EdV1qgAgdwahdTFP0St8,3163
|
| 531 |
+
pygments/lexers/soong.py,sha256=Vm18vV4g6T8UPgjjY2yTRlSXGDpZowmuqQUBFfm4A9A,2339
|
| 532 |
+
pygments/lexers/sophia.py,sha256=2YtYIT8iwAoW0B7TZuuoG_ZILhJV-2A7oBGat-98naE,3376
|
| 533 |
+
pygments/lexers/special.py,sha256=8JuR2Vex8X-RWnC36S0HXTHWp2qmZclc90-TrLUWyaY,3585
|
| 534 |
+
pygments/lexers/spice.py,sha256=m4nK0q4Sq_OFQez7kGWfki0No4ZV24YrONfHVj1Piqs,2790
|
| 535 |
+
pygments/lexers/sql.py,sha256=WSG6vOsR87EEEwSQefP_Z7TauUG_BjqMHUFmPaSOVj4,41476
|
| 536 |
+
pygments/lexers/srcinfo.py,sha256=B8vDs-sJogG3mWa5Hp_7JfHHUMyYRwGvKv6cKbFQXLM,1746
|
| 537 |
+
pygments/lexers/stata.py,sha256=Zr9BC52D5O_3BbdW0N-tzoUmy0NTguL2sC-saXRVM-c,6415
|
| 538 |
+
pygments/lexers/supercollider.py,sha256=_H5wDrn0DiGnlhB_cz6Rt_lo2TvqjSm0o6NPTd9R4Ko,3697
|
| 539 |
+
pygments/lexers/tablegen.py,sha256=1JjedXYY18BNiY9JtNGLOtGfiwduNDZpQLBGTeQ6jAw,3987
|
| 540 |
+
pygments/lexers/tact.py,sha256=X_lsxjFUMaC1TmYysXJq9tmAGifRnil83Bt1zA86Xdo,10809
|
| 541 |
+
pygments/lexers/tal.py,sha256=xS9PlaWQOPj8MVr56fUNq31vUQKRWoLTlyWj9ZHm8AM,2904
|
| 542 |
+
pygments/lexers/tcl.py,sha256=lK97ju4nikkt-oGOzIeyFEM98yq4dZSI8uEmYsq0R6c,5512
|
| 543 |
+
pygments/lexers/teal.py,sha256=t3dqy_Arwv8_yExbX_xiFxv1TqJLPv4vh1MVKjKwS4Y,3522
|
| 544 |
+
pygments/lexers/templates.py,sha256=BVdjYeoacIUuFyHTG39j4PxeNCe5E1oUURjH1rITrI4,75731
|
| 545 |
+
pygments/lexers/teraterm.py,sha256=ciwztagW5Drg2gr17Qykrh6GwMsKy7e4xdQshX95GyQ,9718
|
| 546 |
+
pygments/lexers/testing.py,sha256=YZgDgUEaLEYKSKEqpDsUi3Bn-Db_D42IlyiSsr1oX8U,10810
|
| 547 |
+
pygments/lexers/text.py,sha256=nOCQPssIlKdVWU3PKxZiBPkf_KFM2V48IOssSyqhFY8,1068
|
| 548 |
+
pygments/lexers/textedit.py,sha256=ttT4Ph-hIdgFLG6maRy_GskkziTFK0Wcg28yU0s6lek,7760
|
| 549 |
+
pygments/lexers/textfmts.py,sha256=mi9KLEq4mrzDJbEc8G3VM-mSki_Tylkzodu47yH6z84,15524
|
| 550 |
+
pygments/lexers/theorem.py,sha256=51ppBAEdhJmwU_lC916zMyjEoKLXqf89VAE_Lr0PNCc,17855
|
| 551 |
+
pygments/lexers/thingsdb.py,sha256=x_fHNkLA-hIJyeIs6rg_X8n5OLYvFqaSu1FhI3apI5Y,6017
|
| 552 |
+
pygments/lexers/tlb.py,sha256=ue2gqm45BI512lM13O8skAky9zAb7pLMrxZ8pbt5zRU,1450
|
| 553 |
+
pygments/lexers/tls.py,sha256=_uQUVuMRDOhN-XUyGR5DIlVCk1CUZ1fIOSN4_WQYPKk,1540
|
| 554 |
+
pygments/lexers/tnt.py,sha256=pK4LgoKON7u1xF66JYFncAPSbD8DZaeI_WTZ9HqEFlY,10456
|
| 555 |
+
pygments/lexers/trafficscript.py,sha256=X3B8kgxS54ecuok9ic6Hkp-UMn5DvOmCK0p70Tz27Cw,1506
|
| 556 |
+
pygments/lexers/typoscript.py,sha256=mBuePiVZUoAORPKsHwrx6fBWiy3fAIqG-2O67QmMiFI,8332
|
| 557 |
+
pygments/lexers/typst.py,sha256=zIJBEhUXtWp5OiyAmvFA5m8d1EQG-ocwrJ677dvTUAk,7167
|
| 558 |
+
pygments/lexers/ul4.py,sha256=rCaw0J9j3cdql9lX_HTilg65k9-9S118zOA6TAYfxaM,10499
|
| 559 |
+
pygments/lexers/unicon.py,sha256=RAqoCnAAJBYOAGdR8ng0g6FtB39bGemLRlIqv5mcg9E,18625
|
| 560 |
+
pygments/lexers/urbi.py,sha256=ajNP70NJg32jNnFDZsLvr_-4TToSGqRGkFyAPIJLfCU,6082
|
| 561 |
+
pygments/lexers/usd.py,sha256=2eEGouolodYS402P_gtBrn4lLzpg1z8uHwPCKqjUb_k,3304
|
| 562 |
+
pygments/lexers/varnish.py,sha256=dSh0Ku9SrjmlB29Fi_mWdWavN7M0cMKeepR4a34sOyI,7473
|
| 563 |
+
pygments/lexers/verification.py,sha256=Qu433Q_h3EK3uS4bJoLRFZK0kIVwzX5AFKsa4Z-qnxA,3934
|
| 564 |
+
pygments/lexers/verifpal.py,sha256=buyOOzCo_dGnoC40h0tthylHVVpgDt8qXu4olLvYy_4,2661
|
| 565 |
+
pygments/lexers/vip.py,sha256=2lEV4cLV9p4E37wctBL7zkZ4ZU4p3HVsiLJFzB1bie0,5711
|
| 566 |
+
pygments/lexers/vyper.py,sha256=Zq6sQIUBk6mBdpgOVgu3A6swGoBne0kDlRyjZznm2BY,5615
|
| 567 |
+
pygments/lexers/web.py,sha256=4W9a7vcskrGJnxt4KmoE3SZydWB1qLq7lP2XS85J_m8,913
|
| 568 |
+
pygments/lexers/webassembly.py,sha256=zgcMouzLawcbeFr6w_SOvGoUR68ZtqnnsbOcWEVleLk,5698
|
| 569 |
+
pygments/lexers/webidl.py,sha256=ODtVmw4gVzI8HQWxuEckP6KMwm8WP2G2lSZEjagDXts,10516
|
| 570 |
+
pygments/lexers/webmisc.py,sha256=-_-INDVdk47e2jlj-9bFcuLtntqVorBqIjlnwPfZFdI,40564
|
| 571 |
+
pygments/lexers/wgsl.py,sha256=9igd9dzixGIgNewruv9mPnFms-c9BahkZcCCrZygv84,11880
|
| 572 |
+
pygments/lexers/whiley.py,sha256=lMr750lA4MZsB4xqzVsIRtVMJIC3_dArhFYTHvOPwvA,4017
|
| 573 |
+
pygments/lexers/wowtoc.py,sha256=8xxvf0xGeYtf4PE7KtkHZ_ly9xY_XXHrpCitdKE42Ro,4076
|
| 574 |
+
pygments/lexers/wren.py,sha256=goGXnAMKKa13LLL40ybT3aMGPrk3gCRwZQFYAkKB_w0,3229
|
| 575 |
+
pygments/lexers/x10.py,sha256=Q-AmgdF2E-N7mtOPpZ07CsxrTVnikyqC4uRRv6H75sk,1943
|
| 576 |
+
pygments/lexers/xorg.py,sha256=9ttrBd3_Y2nXANsqtMposSgblYmMYqWXQ-Iz5RH9RsU,925
|
| 577 |
+
pygments/lexers/yang.py,sha256=13CWbSaNr9giOHz4o0SXSklh0bfWt0ah14jJGpTvcn0,4499
|
| 578 |
+
pygments/lexers/yara.py,sha256=jUSv78KTDfguCoAoAZKbYzQERkkyxBBWv5dInVrkDxo,2427
|
| 579 |
+
pygments/lexers/zig.py,sha256=f-80MVOSp1KnczAMokQLVM-_wAEOD16EcGFnaCNlsN0,3976
|
| 580 |
+
pygments/modeline.py,sha256=K5eSkR8GS1r5OkXXTHOcV0aM_6xpk9eWNEIAW-OOJ2g,1005
|
| 581 |
+
pygments/plugin.py,sha256=tPx0rJCTIZ9ioRgLNYG4pifCbAwTRUZddvLw-NfAk2w,1891
|
| 582 |
+
pygments/regexopt.py,sha256=wXaP9Gjp_hKAdnICqoDkRxAOQJSc4v3X6mcxx3z-TNs,3072
|
| 583 |
+
pygments/scanner.py,sha256=nNcETRR1tRuiTaHmHSTTECVYFPcLf6mDZu1e4u91A9E,3092
|
| 584 |
+
pygments/sphinxext.py,sha256=VEe_oHNgLoEGMHc2ROfbee2mF2PPREFyE6_m_JN5FvQ,7898
|
| 585 |
+
pygments/style.py,sha256=Cpw9dCAyW3_JAwFRXOJXmtKb5ZwO2_5KSmlq6q4fZw4,6408
|
| 586 |
+
pygments/styles/__init__.py,sha256=f9KCQXN4uKbe8aI8-L3qTC-_XPfT563FwTg6VTGVfwI,2006
|
| 587 |
+
pygments/styles/__pycache__/__init__.cpython-312.pyc,,
|
| 588 |
+
pygments/styles/__pycache__/_mapping.cpython-312.pyc,,
|
| 589 |
+
pygments/styles/__pycache__/abap.cpython-312.pyc,,
|
| 590 |
+
pygments/styles/__pycache__/algol.cpython-312.pyc,,
|
| 591 |
+
pygments/styles/__pycache__/algol_nu.cpython-312.pyc,,
|
| 592 |
+
pygments/styles/__pycache__/arduino.cpython-312.pyc,,
|
| 593 |
+
pygments/styles/__pycache__/autumn.cpython-312.pyc,,
|
| 594 |
+
pygments/styles/__pycache__/borland.cpython-312.pyc,,
|
| 595 |
+
pygments/styles/__pycache__/bw.cpython-312.pyc,,
|
| 596 |
+
pygments/styles/__pycache__/coffee.cpython-312.pyc,,
|
| 597 |
+
pygments/styles/__pycache__/colorful.cpython-312.pyc,,
|
| 598 |
+
pygments/styles/__pycache__/default.cpython-312.pyc,,
|
| 599 |
+
pygments/styles/__pycache__/dracula.cpython-312.pyc,,
|
| 600 |
+
pygments/styles/__pycache__/emacs.cpython-312.pyc,,
|
| 601 |
+
pygments/styles/__pycache__/friendly.cpython-312.pyc,,
|
| 602 |
+
pygments/styles/__pycache__/friendly_grayscale.cpython-312.pyc,,
|
| 603 |
+
pygments/styles/__pycache__/fruity.cpython-312.pyc,,
|
| 604 |
+
pygments/styles/__pycache__/gh_dark.cpython-312.pyc,,
|
| 605 |
+
pygments/styles/__pycache__/gruvbox.cpython-312.pyc,,
|
| 606 |
+
pygments/styles/__pycache__/igor.cpython-312.pyc,,
|
| 607 |
+
pygments/styles/__pycache__/inkpot.cpython-312.pyc,,
|
| 608 |
+
pygments/styles/__pycache__/lightbulb.cpython-312.pyc,,
|
| 609 |
+
pygments/styles/__pycache__/lilypond.cpython-312.pyc,,
|
| 610 |
+
pygments/styles/__pycache__/lovelace.cpython-312.pyc,,
|
| 611 |
+
pygments/styles/__pycache__/manni.cpython-312.pyc,,
|
| 612 |
+
pygments/styles/__pycache__/material.cpython-312.pyc,,
|
| 613 |
+
pygments/styles/__pycache__/monokai.cpython-312.pyc,,
|
| 614 |
+
pygments/styles/__pycache__/murphy.cpython-312.pyc,,
|
| 615 |
+
pygments/styles/__pycache__/native.cpython-312.pyc,,
|
| 616 |
+
pygments/styles/__pycache__/nord.cpython-312.pyc,,
|
| 617 |
+
pygments/styles/__pycache__/onedark.cpython-312.pyc,,
|
| 618 |
+
pygments/styles/__pycache__/paraiso_dark.cpython-312.pyc,,
|
| 619 |
+
pygments/styles/__pycache__/paraiso_light.cpython-312.pyc,,
|
| 620 |
+
pygments/styles/__pycache__/pastie.cpython-312.pyc,,
|
| 621 |
+
pygments/styles/__pycache__/perldoc.cpython-312.pyc,,
|
| 622 |
+
pygments/styles/__pycache__/rainbow_dash.cpython-312.pyc,,
|
| 623 |
+
pygments/styles/__pycache__/rrt.cpython-312.pyc,,
|
| 624 |
+
pygments/styles/__pycache__/sas.cpython-312.pyc,,
|
| 625 |
+
pygments/styles/__pycache__/solarized.cpython-312.pyc,,
|
| 626 |
+
pygments/styles/__pycache__/staroffice.cpython-312.pyc,,
|
| 627 |
+
pygments/styles/__pycache__/stata_dark.cpython-312.pyc,,
|
| 628 |
+
pygments/styles/__pycache__/stata_light.cpython-312.pyc,,
|
| 629 |
+
pygments/styles/__pycache__/tango.cpython-312.pyc,,
|
| 630 |
+
pygments/styles/__pycache__/trac.cpython-312.pyc,,
|
| 631 |
+
pygments/styles/__pycache__/vim.cpython-312.pyc,,
|
| 632 |
+
pygments/styles/__pycache__/vs.cpython-312.pyc,,
|
| 633 |
+
pygments/styles/__pycache__/xcode.cpython-312.pyc,,
|
| 634 |
+
pygments/styles/__pycache__/zenburn.cpython-312.pyc,,
|
| 635 |
+
pygments/styles/_mapping.py,sha256=6lovFUE29tz6EsV3XYY4hgozJ7q1JL7cfO3UOlgnS8w,3312
|
| 636 |
+
pygments/styles/abap.py,sha256=64Uwr8uPdEdcT-tE-Y2VveTXfH3SkqH9qdMgY49YHQI,749
|
| 637 |
+
pygments/styles/algol.py,sha256=fCuk8ITTehvbJSufiaKlgnFsKbl-xFxxR82xhltc-cQ,2262
|
| 638 |
+
pygments/styles/algol_nu.py,sha256=Gv9WfHJvYegGcUk1zcufQgsdXPNjCUNk8sAHyrSGGh4,2283
|
| 639 |
+
pygments/styles/arduino.py,sha256=NoUB8xk7M1HGPoLfuySOLU0sVwoTuLcZqllXl2EO_iE,4557
|
| 640 |
+
pygments/styles/autumn.py,sha256=fLLfjHXjxCl6crBAxEsBLH372ALMkFacA2bG6KFbJi4,2195
|
| 641 |
+
pygments/styles/borland.py,sha256=_0ySKp4KGCSgtYjPe8uzD6gQhlmAIR4T43i-FoRYNOM,1611
|
| 642 |
+
pygments/styles/bw.py,sha256=vhk8Xoj64fLPdA9IQU6mUVsYMel255jR-FDU7BjIHtI,1406
|
| 643 |
+
pygments/styles/coffee.py,sha256=NqLt-fc7LONma1BGggbceVRY9uDE70WBuZXqK4zwaco,2308
|
| 644 |
+
pygments/styles/colorful.py,sha256=mYcSbehtH7itH_QV9NqJp4Wna1X4lrwl2wkVXS2u-5A,2832
|
| 645 |
+
pygments/styles/default.py,sha256=RTgG2zKWWUxPTDCFxhTnyZI_WZBIVgu5XsUpNvFisCA,2588
|
| 646 |
+
pygments/styles/dracula.py,sha256=vRJmixBoSKV9o8NVQhXGViQqchhIYugfikLmvX0DoBw,2182
|
| 647 |
+
pygments/styles/emacs.py,sha256=TiOG9oc83qToMCRMnJrXtWYqnzAqYycRz_50OoCKtxc,2535
|
| 648 |
+
pygments/styles/friendly.py,sha256=oAi-l9anQTs9STDmUzXGDlOegatEOH4hpD0j6o6dZGM,2604
|
| 649 |
+
pygments/styles/friendly_grayscale.py,sha256=a7Cqkzt6-uTiXvj6GoYBXzRvX5_zviCjjRB04Kf_-Q0,2828
|
| 650 |
+
pygments/styles/fruity.py,sha256=GfSUTG0stlJr5Ow_saCaxbI2IB4-34Dp2TuRTpfUJBs,1324
|
| 651 |
+
pygments/styles/gh_dark.py,sha256=ruNX3d4rf22rx-8HnwvGbNbXRQpXCNcHU1HNq6N4uNg,3590
|
| 652 |
+
pygments/styles/gruvbox.py,sha256=KrFoHEoVnZW6XM9udyXncPomeGyZgIDsNWOH3kCrxFQ,3387
|
| 653 |
+
pygments/styles/igor.py,sha256=fYYPhM0dRCvcDTMVrMVO5oFKnYm-8YVlsuVBoczFLtY,737
|
| 654 |
+
pygments/styles/inkpot.py,sha256=jggSeX9NV15eOL2oJaVmZ6vmV7LWRzXJQRUqcWEqGRs,2404
|
| 655 |
+
pygments/styles/lightbulb.py,sha256=Y8u1qdvlHfBqI2jJex55SkvVatVo_FjEUzE6h-X7m-0,3172
|
| 656 |
+
pygments/styles/lilypond.py,sha256=Y6fp_sEL-zESmxAaMxzjtrKk90cuDC_DalNdC8wj0nw,2066
|
| 657 |
+
pygments/styles/lovelace.py,sha256=cA9uhmbnzY04MccsiYSgMY7fvb4WMRbegWBUrGvXh1M,3178
|
| 658 |
+
pygments/styles/manni.py,sha256=g9FyO7plTwfMm2cU4iiKgdlkMlvQLG6l2Lwkgz5ITS4,2443
|
| 659 |
+
pygments/styles/material.py,sha256=LDmgomAbgtJDZhbv446_zIwgYh50UAqEEtgYNUns1rQ,4201
|
| 660 |
+
pygments/styles/monokai.py,sha256=lrxTJpkBarV9gTLkBQryZ6oNSjekAVheJueKJP5iEYA,5184
|
| 661 |
+
pygments/styles/murphy.py,sha256=-AKZiLkpiWej-otjHMsYCE-I-_IzCOLJY-_GBdKRZRw,2805
|
| 662 |
+
pygments/styles/native.py,sha256=l6tezGSQTB8p_SyOXJ0PWI7KzCeEdtsPmVc4Yn4_CwU,2043
|
| 663 |
+
pygments/styles/nord.py,sha256=GDt3WAaqaWsiCeqpIBPxd8TEUX708fGfwaA7S0w0oy0,5391
|
| 664 |
+
pygments/styles/onedark.py,sha256=k80cZEppCEF-HLoxy_FEA0QmQDZze68nHVMNGyUVa28,1719
|
| 665 |
+
pygments/styles/paraiso_dark.py,sha256=Jkrg4nUKIVNF8U4fPNV_Smq_g9NFbb9eiUrjYpVgQZg,5662
|
| 666 |
+
pygments/styles/paraiso_light.py,sha256=MxN964ZEpze3wF0ss-igaa2I7E684MHe-Zq0rWPH3wo,5668
|
| 667 |
+
pygments/styles/pastie.py,sha256=ZvAs9UpBNYFC-5PFrCRGYnm3FoPKb-eKR-ozbWZP-4g,2525
|
| 668 |
+
pygments/styles/perldoc.py,sha256=HSxB93e4UpQkZspReQ34FeJbZ-59ksGvdaH-hToehi8,2230
|
| 669 |
+
pygments/styles/rainbow_dash.py,sha256=4ugL18Or7aNtaLfPfCLFRiFy0Gu2RA4a9G2LQUE9SrM,2390
|
| 670 |
+
pygments/styles/rrt.py,sha256=fgzfpC0PC_SCcLOMCNEIQTjPUMOncRe7SR10GfSRbXY,1006
|
| 671 |
+
pygments/styles/sas.py,sha256=yzoXmbfQ2ND1WWq93b4vVGYkQSZHPqb4ymes9YYRT3w,1440
|
| 672 |
+
pygments/styles/solarized.py,sha256=qupILFZn02WspnAF5SPYb-W8guo9xnUtjb1HeLw3XgE,4247
|
| 673 |
+
pygments/styles/staroffice.py,sha256=CLbBeMoxay21Xyu3Af2p4xUXyG1_6ydCbvs5RJKYe5w,831
|
| 674 |
+
pygments/styles/stata_dark.py,sha256=vX8SwHV__sG92F4CKribG08MJfSVq98dgs7gEA_n9yc,1257
|
| 675 |
+
pygments/styles/stata_light.py,sha256=uV3GE-ylvffQ0yN3py1YAVqBB5wflIKZbceyK1Lqvrc,1289
|
| 676 |
+
pygments/styles/tango.py,sha256=O2wcM4hHuU1Yt071M9CK7JPtiiSCqyxtT9tbiQICV28,7137
|
| 677 |
+
pygments/styles/trac.py,sha256=9kMv1ZZyMKACWlx2fQVjRP0I2pgcRYCNrd7iGGZg9qk,1981
|
| 678 |
+
pygments/styles/vim.py,sha256=J7_TqvrGkTX_XuTHW0In5wqPLAUPRWyr1122XueZWmM,2019
|
| 679 |
+
pygments/styles/vs.py,sha256=s7YnzbIPuFU3LIke27mc4lAQSn2R3vbbHc1baMGSU_U,1130
|
| 680 |
+
pygments/styles/xcode.py,sha256=PbQdzgGaA4a9LAU1i58alY9kM4IFlQX5jHQwOYmf_Rk,1504
|
| 681 |
+
pygments/styles/zenburn.py,sha256=suZEKzBTCYdhf2cxNwcY7UATJK1tq5eYhGdBcXdf6MU,2203
|
| 682 |
+
pygments/token.py,sha256=WbdWGhYm_Vosb0DDxW9lHNPgITXfWTsQmHt6cy9RbcM,6226
|
| 683 |
+
pygments/unistring.py,sha256=al-_rBemRuGvinsrM6atNsHTmJ6DUbw24q2O2Ru1cBc,63208
|
| 684 |
+
pygments/util.py,sha256=oRtSpiAo5jM9ulntkvVbgXUdiAW57jnuYGB7t9fYuhc,10031
|
source/pygments-2.19.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: hatchling 1.27.0
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
source/pygments-2.19.2.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[console_scripts]
|
| 2 |
+
pygmentize = pygments.cmdline:main
|
source/pygments-2.19.2.dist-info/licenses/AUTHORS
ADDED
|
@@ -0,0 +1,291 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Pygments is written and maintained by Georg Brandl <georg@python.org>.
|
| 2 |
+
|
| 3 |
+
Major developers are Tim Hatch <tim@timhatch.com> and Armin Ronacher
|
| 4 |
+
<armin.ronacher@active-4.com>.
|
| 5 |
+
|
| 6 |
+
Other contributors, listed alphabetically, are:
|
| 7 |
+
|
| 8 |
+
* Sam Aaron -- Ioke lexer
|
| 9 |
+
* Jean Abou Samra -- LilyPond lexer
|
| 10 |
+
* João Abecasis -- JSLT lexer
|
| 11 |
+
* Ali Afshar -- image formatter
|
| 12 |
+
* Thomas Aglassinger -- Easytrieve, JCL, Rexx, Transact-SQL and VBScript
|
| 13 |
+
lexers
|
| 14 |
+
* Maxence Ahlouche -- PostgreSQL Explain lexer
|
| 15 |
+
* Muthiah Annamalai -- Ezhil lexer
|
| 16 |
+
* Nikolay Antipov -- OpenSCAD lexer
|
| 17 |
+
* Kumar Appaiah -- Debian control lexer
|
| 18 |
+
* Andreas Amann -- AppleScript lexer
|
| 19 |
+
* Timothy Armstrong -- Dart lexer fixes
|
| 20 |
+
* Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers
|
| 21 |
+
* Eiríkr Åsheim -- Uxntal lexer
|
| 22 |
+
* Jeremy Ashkenas -- CoffeeScript lexer
|
| 23 |
+
* José Joaquín Atria -- Praat lexer
|
| 24 |
+
* Stefan Matthias Aust -- Smalltalk lexer
|
| 25 |
+
* Lucas Bajolet -- Nit lexer
|
| 26 |
+
* Ben Bangert -- Mako lexers
|
| 27 |
+
* Max Battcher -- Darcs patch lexer
|
| 28 |
+
* Thomas Baruchel -- APL lexer
|
| 29 |
+
* Tim Baumann -- (Literate) Agda lexer
|
| 30 |
+
* Paul Baumgart, 280 North, Inc. -- Objective-J lexer
|
| 31 |
+
* Michael Bayer -- Myghty lexers
|
| 32 |
+
* Thomas Beale -- Archetype lexers
|
| 33 |
+
* John Benediktsson -- Factor lexer
|
| 34 |
+
* David Benjamin, Google LLC -- TLS lexer
|
| 35 |
+
* Trevor Bergeron -- mIRC formatter
|
| 36 |
+
* Vincent Bernat -- LessCSS lexer
|
| 37 |
+
* Christopher Bertels -- Fancy lexer
|
| 38 |
+
* Sébastien Bigaret -- QVT Operational lexer
|
| 39 |
+
* Jarrett Billingsley -- MiniD lexer
|
| 40 |
+
* Adam Blinkinsop -- Haskell, Redcode lexers
|
| 41 |
+
* Stéphane Blondon -- Procfile, SGF and Sieve lexers
|
| 42 |
+
* Frits van Bommel -- assembler lexers
|
| 43 |
+
* Pierre Bourdon -- bugfixes
|
| 44 |
+
* Martijn Braam -- Kernel log lexer, BARE lexer
|
| 45 |
+
* JD Browne, Google LLC -- GoogleSQL lexer
|
| 46 |
+
* Matthias Bussonnier -- ANSI style handling for terminal-256 formatter
|
| 47 |
+
* chebee7i -- Python traceback lexer improvements
|
| 48 |
+
* Hiram Chirino -- Scaml and Jade lexers
|
| 49 |
+
* Mauricio Caceres -- SAS and Stata lexers.
|
| 50 |
+
* Michael Camilleri, John Gabriele, sogaiu -- Janet lexer
|
| 51 |
+
* Daren Chandisingh -- Gleam lexer
|
| 52 |
+
* Ian Cooper -- VGL lexer
|
| 53 |
+
* David Corbett -- Inform, Jasmin, JSGF, Snowball, and TADS 3 lexers
|
| 54 |
+
* Leaf Corcoran -- MoonScript lexer
|
| 55 |
+
* Fraser Cormack -- TableGen lexer
|
| 56 |
+
* Gabriel Corona -- ASN.1 lexer
|
| 57 |
+
* Christopher Creutzig -- MuPAD lexer
|
| 58 |
+
* Daniël W. Crompton -- Pike lexer
|
| 59 |
+
* Pete Curry -- bugfixes
|
| 60 |
+
* Bryan Davis -- EBNF lexer
|
| 61 |
+
* Bruno Deferrari -- Shen lexer
|
| 62 |
+
* Walter Dörwald -- UL4 lexer
|
| 63 |
+
* Luke Drummond -- Meson lexer
|
| 64 |
+
* Giedrius Dubinskas -- HTML formatter improvements
|
| 65 |
+
* Owen Durni -- Haxe lexer
|
| 66 |
+
* Alexander Dutton, Oxford University Computing Services -- SPARQL lexer
|
| 67 |
+
* James Edwards -- Terraform lexer
|
| 68 |
+
* Nick Efford -- Python 3 lexer
|
| 69 |
+
* Sven Efftinge -- Xtend lexer
|
| 70 |
+
* Artem Egorkine -- terminal256 formatter
|
| 71 |
+
* Matthew Fernandez -- CAmkES lexer
|
| 72 |
+
* Paweł Fertyk -- GDScript lexer, HTML formatter improvements
|
| 73 |
+
* Michael Ficarra -- CPSA lexer
|
| 74 |
+
* James H. Fisher -- PostScript lexer
|
| 75 |
+
* Amanda Fitch, Google LLC -- GoogleSQL lexer
|
| 76 |
+
* William S. Fulton -- SWIG lexer
|
| 77 |
+
* Carlos Galdino -- Elixir and Elixir Console lexers
|
| 78 |
+
* Michael Galloy -- IDL lexer
|
| 79 |
+
* Naveen Garg -- Autohotkey lexer
|
| 80 |
+
* Simon Garnotel -- FreeFem++ lexer
|
| 81 |
+
* Laurent Gautier -- R/S lexer
|
| 82 |
+
* Alex Gaynor -- PyPy log lexer
|
| 83 |
+
* Richard Gerkin -- Igor Pro lexer
|
| 84 |
+
* Alain Gilbert -- TypeScript lexer
|
| 85 |
+
* Alex Gilding -- BlitzBasic lexer
|
| 86 |
+
* GitHub, Inc -- DASM16, Augeas, TOML, and Slash lexers
|
| 87 |
+
* Bertrand Goetzmann -- Groovy lexer
|
| 88 |
+
* Krzysiek Goj -- Scala lexer
|
| 89 |
+
* Rostyslav Golda -- FloScript lexer
|
| 90 |
+
* Andrey Golovizin -- BibTeX lexers
|
| 91 |
+
* Matt Good -- Genshi, Cheetah lexers
|
| 92 |
+
* Michał Górny -- vim modeline support
|
| 93 |
+
* Alex Gosse -- TrafficScript lexer
|
| 94 |
+
* Patrick Gotthardt -- PHP namespaces support
|
| 95 |
+
* Hubert Gruniaux -- C and C++ lexer improvements
|
| 96 |
+
* Olivier Guibe -- Asymptote lexer
|
| 97 |
+
* Phil Hagelberg -- Fennel lexer
|
| 98 |
+
* Florian Hahn -- Boogie lexer
|
| 99 |
+
* Martin Harriman -- SNOBOL lexer
|
| 100 |
+
* Matthew Harrison -- SVG formatter
|
| 101 |
+
* Steven Hazel -- Tcl lexer
|
| 102 |
+
* Dan Michael Heggø -- Turtle lexer
|
| 103 |
+
* Aslak Hellesøy -- Gherkin lexer
|
| 104 |
+
* Greg Hendershott -- Racket lexer
|
| 105 |
+
* Justin Hendrick -- ParaSail lexer
|
| 106 |
+
* Jordi Gutiérrez Hermoso -- Octave lexer
|
| 107 |
+
* David Hess, Fish Software, Inc. -- Objective-J lexer
|
| 108 |
+
* Ken Hilton -- Typographic Number Theory and Arrow lexers
|
| 109 |
+
* Varun Hiremath -- Debian control lexer
|
| 110 |
+
* Rob Hoelz -- Perl 6 lexer
|
| 111 |
+
* Doug Hogan -- Mscgen lexer
|
| 112 |
+
* Ben Hollis -- Mason lexer
|
| 113 |
+
* Max Horn -- GAP lexer
|
| 114 |
+
* Fred Hornsey -- OMG IDL Lexer
|
| 115 |
+
* Alastair Houghton -- Lexer inheritance facility
|
| 116 |
+
* Tim Howard -- BlitzMax lexer
|
| 117 |
+
* Dustin Howett -- Logos lexer
|
| 118 |
+
* Ivan Inozemtsev -- Fantom lexer
|
| 119 |
+
* Hiroaki Itoh -- Shell console rewrite, Lexers for PowerShell session,
|
| 120 |
+
MSDOS session, BC, WDiff
|
| 121 |
+
* Brian R. Jackson -- Tea lexer
|
| 122 |
+
* Christian Jann -- ShellSession lexer
|
| 123 |
+
* Jonas Camillus Jeppesen -- Line numbers and line highlighting for
|
| 124 |
+
RTF-formatter
|
| 125 |
+
* Dennis Kaarsemaker -- sources.list lexer
|
| 126 |
+
* Dmitri Kabak -- Inferno Limbo lexer
|
| 127 |
+
* Igor Kalnitsky -- vhdl lexer
|
| 128 |
+
* Colin Kennedy - USD lexer
|
| 129 |
+
* Alexander Kit -- MaskJS lexer
|
| 130 |
+
* Pekka Klärck -- Robot Framework lexer
|
| 131 |
+
* Gerwin Klein -- Isabelle lexer
|
| 132 |
+
* Eric Knibbe -- Lasso lexer
|
| 133 |
+
* Stepan Koltsov -- Clay lexer
|
| 134 |
+
* Oliver Kopp - Friendly grayscale style
|
| 135 |
+
* Adam Koprowski -- Opa lexer
|
| 136 |
+
* Benjamin Kowarsch -- Modula-2 lexer
|
| 137 |
+
* Domen Kožar -- Nix lexer
|
| 138 |
+
* Oleh Krekel -- Emacs Lisp lexer
|
| 139 |
+
* Alexander Kriegisch -- Kconfig and AspectJ lexers
|
| 140 |
+
* Marek Kubica -- Scheme lexer
|
| 141 |
+
* Jochen Kupperschmidt -- Markdown processor
|
| 142 |
+
* Gerd Kurzbach -- Modelica lexer
|
| 143 |
+
* Jon Larimer, Google Inc. -- Smali lexer
|
| 144 |
+
* Olov Lassus -- Dart lexer
|
| 145 |
+
* Matt Layman -- TAP lexer
|
| 146 |
+
* Dan Lazin, Google LLC -- GoogleSQL lexer
|
| 147 |
+
* Kristian Lyngstøl -- Varnish lexers
|
| 148 |
+
* Sylvestre Ledru -- Scilab lexer
|
| 149 |
+
* Chee Sing Lee -- Flatline lexer
|
| 150 |
+
* Mark Lee -- Vala lexer
|
| 151 |
+
* Thomas Linder Puls -- Visual Prolog lexer
|
| 152 |
+
* Pete Lomax -- Phix lexer
|
| 153 |
+
* Valentin Lorentz -- C++ lexer improvements
|
| 154 |
+
* Ben Mabey -- Gherkin lexer
|
| 155 |
+
* Angus MacArthur -- QML lexer
|
| 156 |
+
* Louis Mandel -- X10 lexer
|
| 157 |
+
* Louis Marchand -- Eiffel lexer
|
| 158 |
+
* Simone Margaritelli -- Hybris lexer
|
| 159 |
+
* Tim Martin - World of Warcraft TOC lexer
|
| 160 |
+
* Kirk McDonald -- D lexer
|
| 161 |
+
* Gordon McGregor -- SystemVerilog lexer
|
| 162 |
+
* Stephen McKamey -- Duel/JBST lexer
|
| 163 |
+
* Brian McKenna -- F# lexer
|
| 164 |
+
* Charles McLaughlin -- Puppet lexer
|
| 165 |
+
* Kurt McKee -- Tera Term macro lexer, PostgreSQL updates, MySQL overhaul, JSON lexer
|
| 166 |
+
* Joe Eli McIlvain -- Savi lexer
|
| 167 |
+
* Lukas Meuser -- BBCode formatter, Lua lexer
|
| 168 |
+
* Cat Miller -- Pig lexer
|
| 169 |
+
* Paul Miller -- LiveScript lexer
|
| 170 |
+
* Hong Minhee -- HTTP lexer
|
| 171 |
+
* Michael Mior -- Awk lexer
|
| 172 |
+
* Bruce Mitchener -- Dylan lexer rewrite
|
| 173 |
+
* Reuben Morais -- SourcePawn lexer
|
| 174 |
+
* Jon Morton -- Rust lexer
|
| 175 |
+
* Paulo Moura -- Logtalk lexer
|
| 176 |
+
* Mher Movsisyan -- DTD lexer
|
| 177 |
+
* Dejan Muhamedagic -- Crmsh lexer
|
| 178 |
+
* Adrien Nayrat -- PostgreSQL Explain lexer
|
| 179 |
+
* Ana Nelson -- Ragel, ANTLR, R console lexers
|
| 180 |
+
* David Neto, Google LLC -- WebGPU Shading Language lexer
|
| 181 |
+
* Kurt Neufeld -- Markdown lexer
|
| 182 |
+
* Nam T. Nguyen -- Monokai style
|
| 183 |
+
* Jesper Noehr -- HTML formatter "anchorlinenos"
|
| 184 |
+
* Mike Nolta -- Julia lexer
|
| 185 |
+
* Avery Nortonsmith -- Pointless lexer
|
| 186 |
+
* Jonas Obrist -- BBCode lexer
|
| 187 |
+
* Edward O'Callaghan -- Cryptol lexer
|
| 188 |
+
* David Oliva -- Rebol lexer
|
| 189 |
+
* Pat Pannuto -- nesC lexer
|
| 190 |
+
* Jon Parise -- Protocol buffers and Thrift lexers
|
| 191 |
+
* Benjamin Peterson -- Test suite refactoring
|
| 192 |
+
* Ronny Pfannschmidt -- BBCode lexer
|
| 193 |
+
* Dominik Picheta -- Nimrod lexer
|
| 194 |
+
* Andrew Pinkham -- RTF Formatter Refactoring
|
| 195 |
+
* Clément Prévost -- UrbiScript lexer
|
| 196 |
+
* Tanner Prynn -- cmdline -x option and loading lexers from files
|
| 197 |
+
* Oleh Prypin -- Crystal lexer (based on Ruby lexer)
|
| 198 |
+
* Nick Psaris -- K and Q lexers
|
| 199 |
+
* Xidorn Quan -- Web IDL lexer
|
| 200 |
+
* Elias Rabel -- Fortran fixed form lexer
|
| 201 |
+
* raichoo -- Idris lexer
|
| 202 |
+
* Daniel Ramirez -- GDScript lexer
|
| 203 |
+
* Kashif Rasul -- CUDA lexer
|
| 204 |
+
* Nathan Reed -- HLSL lexer
|
| 205 |
+
* Justin Reidy -- MXML lexer
|
| 206 |
+
* Jonathon Reinhart, Google LLC -- Soong lexer
|
| 207 |
+
* Norman Richards -- JSON lexer
|
| 208 |
+
* Corey Richardson -- Rust lexer updates
|
| 209 |
+
* Fabrizio Riguzzi -- cplint leder
|
| 210 |
+
* Lubomir Rintel -- GoodData MAQL and CL lexers
|
| 211 |
+
* Andre Roberge -- Tango style
|
| 212 |
+
* Georg Rollinger -- HSAIL lexer
|
| 213 |
+
* Michiel Roos -- TypoScript lexer
|
| 214 |
+
* Konrad Rudolph -- LaTeX formatter enhancements
|
| 215 |
+
* Mario Ruggier -- Evoque lexers
|
| 216 |
+
* Miikka Salminen -- Lovelace style, Hexdump lexer, lexer enhancements
|
| 217 |
+
* Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers
|
| 218 |
+
* Matteo Sasso -- Common Lisp lexer
|
| 219 |
+
* Joe Schafer -- Ada lexer
|
| 220 |
+
* Max Schillinger -- TiddlyWiki5 lexer
|
| 221 |
+
* Andrew Schmidt -- X++ lexer
|
| 222 |
+
* Ken Schutte -- Matlab lexers
|
| 223 |
+
* René Schwaiger -- Rainbow Dash style
|
| 224 |
+
* Sebastian Schweizer -- Whiley lexer
|
| 225 |
+
* Tassilo Schweyer -- Io, MOOCode lexers
|
| 226 |
+
* Pablo Seminario -- PromQL lexer
|
| 227 |
+
* Ted Shaw -- AutoIt lexer
|
| 228 |
+
* Joerg Sieker -- ABAP lexer
|
| 229 |
+
* Robert Simmons -- Standard ML lexer
|
| 230 |
+
* Kirill Simonov -- YAML lexer
|
| 231 |
+
* Corbin Simpson -- Monte lexer
|
| 232 |
+
* Ville Skyttä -- ASCII armored lexer
|
| 233 |
+
* Alexander Smishlajev -- Visual FoxPro lexer
|
| 234 |
+
* Steve Spigarelli -- XQuery lexer
|
| 235 |
+
* Jerome St-Louis -- eC lexer
|
| 236 |
+
* Camil Staps -- Clean and NuSMV lexers; Solarized style
|
| 237 |
+
* James Strachan -- Kotlin lexer
|
| 238 |
+
* Tom Stuart -- Treetop lexer
|
| 239 |
+
* Colin Sullivan -- SuperCollider lexer
|
| 240 |
+
* Ben Swift -- Extempore lexer
|
| 241 |
+
* tatt61880 -- Kuin lexer
|
| 242 |
+
* Edoardo Tenani -- Arduino lexer
|
| 243 |
+
* Tiberius Teng -- default style overhaul
|
| 244 |
+
* Jeremy Thurgood -- Erlang, Squid config lexers
|
| 245 |
+
* Brian Tiffin -- OpenCOBOL lexer
|
| 246 |
+
* Bob Tolbert -- Hy lexer
|
| 247 |
+
* Doug Torrance -- Macaulay2 lexer
|
| 248 |
+
* Matthias Trute -- Forth lexer
|
| 249 |
+
* Tuoa Spi T4 -- Bdd lexer
|
| 250 |
+
* Erick Tryzelaar -- Felix lexer
|
| 251 |
+
* Alexander Udalov -- Kotlin lexer improvements
|
| 252 |
+
* Thomas Van Doren -- Chapel lexer
|
| 253 |
+
* Dave Van Ee -- Uxntal lexer updates
|
| 254 |
+
* Daniele Varrazzo -- PostgreSQL lexers
|
| 255 |
+
* Abe Voelker -- OpenEdge ABL lexer
|
| 256 |
+
* Pepijn de Vos -- HTML formatter CTags support
|
| 257 |
+
* Matthias Vallentin -- Bro lexer
|
| 258 |
+
* Benoît Vinot -- AMPL lexer
|
| 259 |
+
* Linh Vu Hong -- RSL lexer
|
| 260 |
+
* Taavi Väänänen -- Debian control lexer
|
| 261 |
+
* Immanuel Washington -- Smithy lexer
|
| 262 |
+
* Nathan Weizenbaum -- Haml and Sass lexers
|
| 263 |
+
* Nathan Whetsell -- Csound lexers
|
| 264 |
+
* Dietmar Winkler -- Modelica lexer
|
| 265 |
+
* Nils Winter -- Smalltalk lexer
|
| 266 |
+
* Davy Wybiral -- Clojure lexer
|
| 267 |
+
* Whitney Young -- ObjectiveC lexer
|
| 268 |
+
* Diego Zamboni -- CFengine3 lexer
|
| 269 |
+
* Enrique Zamudio -- Ceylon lexer
|
| 270 |
+
* Alex Zimin -- Nemerle lexer
|
| 271 |
+
* Rob Zimmerman -- Kal lexer
|
| 272 |
+
* Evgenii Zheltonozhskii -- Maple lexer
|
| 273 |
+
* Vincent Zurczak -- Roboconf lexer
|
| 274 |
+
* Hubert Gruniaux -- C and C++ lexer improvements
|
| 275 |
+
* Thomas Symalla -- AMDGPU Lexer
|
| 276 |
+
* 15b3 -- Image Formatter improvements
|
| 277 |
+
* Fabian Neumann -- CDDL lexer
|
| 278 |
+
* Thomas Duboucher -- CDDL lexer
|
| 279 |
+
* Philipp Imhof -- Pango Markup formatter
|
| 280 |
+
* Thomas Voss -- Sed lexer
|
| 281 |
+
* Martin Fischer -- WCAG contrast testing
|
| 282 |
+
* Marc Auberer -- Spice lexer
|
| 283 |
+
* Amr Hesham -- Carbon lexer
|
| 284 |
+
* diskdance -- Wikitext lexer
|
| 285 |
+
* vanillajonathan -- PRQL lexer
|
| 286 |
+
* Nikolay Antipov -- OpenSCAD lexer
|
| 287 |
+
* Markus Meyer, Nextron Systems -- YARA lexer
|
| 288 |
+
* Hannes Römer -- Mojo lexer
|
| 289 |
+
* Jan Frederik Schaefer -- PDDL lexer
|
| 290 |
+
|
| 291 |
+
Many thanks for all contributions!
|
source/pygments-2.19.2.dist-info/licenses/LICENSE
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2006-2022 by the respective authors (see AUTHORS file).
|
| 2 |
+
All rights reserved.
|
| 3 |
+
|
| 4 |
+
Redistribution and use in source and binary forms, with or without
|
| 5 |
+
modification, are permitted provided that the following conditions are
|
| 6 |
+
met:
|
| 7 |
+
|
| 8 |
+
* Redistributions of source code must retain the above copyright
|
| 9 |
+
notice, this list of conditions and the following disclaimer.
|
| 10 |
+
|
| 11 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 12 |
+
notice, this list of conditions and the following disclaimer in the
|
| 13 |
+
documentation and/or other materials provided with the distribution.
|
| 14 |
+
|
| 15 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 16 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 17 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 18 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 19 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 20 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 21 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 22 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 23 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 24 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 25 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
source/pygments/__init__.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Pygments
|
| 3 |
+
~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Pygments is a syntax highlighting package written in Python.
|
| 6 |
+
|
| 7 |
+
It is a generic syntax highlighter for general use in all kinds of software
|
| 8 |
+
such as forum systems, wikis or other applications that need to prettify
|
| 9 |
+
source code. Highlights are:
|
| 10 |
+
|
| 11 |
+
* a wide range of common languages and markup formats is supported
|
| 12 |
+
* special attention is paid to details, increasing quality by a fair amount
|
| 13 |
+
* support for new languages and formats are added easily
|
| 14 |
+
* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
|
| 15 |
+
formats that PIL supports, and ANSI sequences
|
| 16 |
+
* it is usable as a command-line tool and as a library
|
| 17 |
+
* ... and it highlights even Brainfuck!
|
| 18 |
+
|
| 19 |
+
The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
|
| 20 |
+
|
| 21 |
+
.. _Pygments master branch:
|
| 22 |
+
https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
|
| 23 |
+
|
| 24 |
+
:copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
|
| 25 |
+
:license: BSD, see LICENSE for details.
|
| 26 |
+
"""
|
| 27 |
+
from io import StringIO, BytesIO
|
| 28 |
+
|
| 29 |
+
__version__ = '2.19.2'
|
| 30 |
+
__docformat__ = 'restructuredtext'
|
| 31 |
+
|
| 32 |
+
__all__ = ['lex', 'format', 'highlight']
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def lex(code, lexer):
|
| 36 |
+
"""
|
| 37 |
+
Lex `code` with the `lexer` (must be a `Lexer` instance)
|
| 38 |
+
and return an iterable of tokens. Currently, this only calls
|
| 39 |
+
`lexer.get_tokens()`.
|
| 40 |
+
"""
|
| 41 |
+
try:
|
| 42 |
+
return lexer.get_tokens(code)
|
| 43 |
+
except TypeError:
|
| 44 |
+
# Heuristic to catch a common mistake.
|
| 45 |
+
from pygments.lexer import RegexLexer
|
| 46 |
+
if isinstance(lexer, type) and issubclass(lexer, RegexLexer):
|
| 47 |
+
raise TypeError('lex() argument must be a lexer instance, '
|
| 48 |
+
'not a class')
|
| 49 |
+
raise
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin
|
| 53 |
+
"""
|
| 54 |
+
Format ``tokens`` (an iterable of tokens) with the formatter ``formatter``
|
| 55 |
+
(a `Formatter` instance).
|
| 56 |
+
|
| 57 |
+
If ``outfile`` is given and a valid file object (an object with a
|
| 58 |
+
``write`` method), the result will be written to it, otherwise it
|
| 59 |
+
is returned as a string.
|
| 60 |
+
"""
|
| 61 |
+
try:
|
| 62 |
+
if not outfile:
|
| 63 |
+
realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
|
| 64 |
+
formatter.format(tokens, realoutfile)
|
| 65 |
+
return realoutfile.getvalue()
|
| 66 |
+
else:
|
| 67 |
+
formatter.format(tokens, outfile)
|
| 68 |
+
except TypeError:
|
| 69 |
+
# Heuristic to catch a common mistake.
|
| 70 |
+
from pygments.formatter import Formatter
|
| 71 |
+
if isinstance(formatter, type) and issubclass(formatter, Formatter):
|
| 72 |
+
raise TypeError('format() argument must be a formatter instance, '
|
| 73 |
+
'not a class')
|
| 74 |
+
raise
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def highlight(code, lexer, formatter, outfile=None):
|
| 78 |
+
"""
|
| 79 |
+
This is the most high-level highlighting function. It combines `lex` and
|
| 80 |
+
`format` in one function.
|
| 81 |
+
"""
|
| 82 |
+
return format(lex(code, lexer), formatter, outfile)
|
source/pygments/__main__.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.__main__
|
| 3 |
+
~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Main entry point for ``python -m pygments``.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
import pygments.cmdline
|
| 13 |
+
|
| 14 |
+
try:
|
| 15 |
+
sys.exit(pygments.cmdline.main(sys.argv))
|
| 16 |
+
except KeyboardInterrupt:
|
| 17 |
+
sys.exit(1)
|
source/pygments/cmdline.py
ADDED
|
@@ -0,0 +1,668 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.cmdline
|
| 3 |
+
~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Command line interface.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import os
|
| 12 |
+
import sys
|
| 13 |
+
import shutil
|
| 14 |
+
import argparse
|
| 15 |
+
from textwrap import dedent
|
| 16 |
+
|
| 17 |
+
from pygments import __version__, highlight
|
| 18 |
+
from pygments.util import ClassNotFound, OptionError, docstring_headline, \
|
| 19 |
+
guess_decode, guess_decode_from_terminal, terminal_encoding, \
|
| 20 |
+
UnclosingTextIOWrapper
|
| 21 |
+
from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
|
| 22 |
+
load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename
|
| 23 |
+
from pygments.lexers.special import TextLexer
|
| 24 |
+
from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
|
| 25 |
+
from pygments.formatters import get_all_formatters, get_formatter_by_name, \
|
| 26 |
+
load_formatter_from_file, get_formatter_for_filename, find_formatter_class
|
| 27 |
+
from pygments.formatters.terminal import TerminalFormatter
|
| 28 |
+
from pygments.formatters.terminal256 import Terminal256Formatter, TerminalTrueColorFormatter
|
| 29 |
+
from pygments.filters import get_all_filters, find_filter_class
|
| 30 |
+
from pygments.styles import get_all_styles, get_style_by_name
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def _parse_options(o_strs):
|
| 34 |
+
opts = {}
|
| 35 |
+
if not o_strs:
|
| 36 |
+
return opts
|
| 37 |
+
for o_str in o_strs:
|
| 38 |
+
if not o_str.strip():
|
| 39 |
+
continue
|
| 40 |
+
o_args = o_str.split(',')
|
| 41 |
+
for o_arg in o_args:
|
| 42 |
+
o_arg = o_arg.strip()
|
| 43 |
+
try:
|
| 44 |
+
o_key, o_val = o_arg.split('=', 1)
|
| 45 |
+
o_key = o_key.strip()
|
| 46 |
+
o_val = o_val.strip()
|
| 47 |
+
except ValueError:
|
| 48 |
+
opts[o_arg] = True
|
| 49 |
+
else:
|
| 50 |
+
opts[o_key] = o_val
|
| 51 |
+
return opts
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def _parse_filters(f_strs):
|
| 55 |
+
filters = []
|
| 56 |
+
if not f_strs:
|
| 57 |
+
return filters
|
| 58 |
+
for f_str in f_strs:
|
| 59 |
+
if ':' in f_str:
|
| 60 |
+
fname, fopts = f_str.split(':', 1)
|
| 61 |
+
filters.append((fname, _parse_options([fopts])))
|
| 62 |
+
else:
|
| 63 |
+
filters.append((f_str, {}))
|
| 64 |
+
return filters
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _print_help(what, name):
|
| 68 |
+
try:
|
| 69 |
+
if what == 'lexer':
|
| 70 |
+
cls = get_lexer_by_name(name)
|
| 71 |
+
print(f"Help on the {cls.name} lexer:")
|
| 72 |
+
print(dedent(cls.__doc__))
|
| 73 |
+
elif what == 'formatter':
|
| 74 |
+
cls = find_formatter_class(name)
|
| 75 |
+
print(f"Help on the {cls.name} formatter:")
|
| 76 |
+
print(dedent(cls.__doc__))
|
| 77 |
+
elif what == 'filter':
|
| 78 |
+
cls = find_filter_class(name)
|
| 79 |
+
print(f"Help on the {name} filter:")
|
| 80 |
+
print(dedent(cls.__doc__))
|
| 81 |
+
return 0
|
| 82 |
+
except (AttributeError, ValueError):
|
| 83 |
+
print(f"{what} not found!", file=sys.stderr)
|
| 84 |
+
return 1
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def _print_list(what):
|
| 88 |
+
if what == 'lexer':
|
| 89 |
+
print()
|
| 90 |
+
print("Lexers:")
|
| 91 |
+
print("~~~~~~~")
|
| 92 |
+
|
| 93 |
+
info = []
|
| 94 |
+
for fullname, names, exts, _ in get_all_lexers():
|
| 95 |
+
tup = (', '.join(names)+':', fullname,
|
| 96 |
+
exts and '(filenames ' + ', '.join(exts) + ')' or '')
|
| 97 |
+
info.append(tup)
|
| 98 |
+
info.sort()
|
| 99 |
+
for i in info:
|
| 100 |
+
print(('* {}\n {} {}').format(*i))
|
| 101 |
+
|
| 102 |
+
elif what == 'formatter':
|
| 103 |
+
print()
|
| 104 |
+
print("Formatters:")
|
| 105 |
+
print("~~~~~~~~~~~")
|
| 106 |
+
|
| 107 |
+
info = []
|
| 108 |
+
for cls in get_all_formatters():
|
| 109 |
+
doc = docstring_headline(cls)
|
| 110 |
+
tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
|
| 111 |
+
'(filenames ' + ', '.join(cls.filenames) + ')' or '')
|
| 112 |
+
info.append(tup)
|
| 113 |
+
info.sort()
|
| 114 |
+
for i in info:
|
| 115 |
+
print(('* {}\n {} {}').format(*i))
|
| 116 |
+
|
| 117 |
+
elif what == 'filter':
|
| 118 |
+
print()
|
| 119 |
+
print("Filters:")
|
| 120 |
+
print("~~~~~~~~")
|
| 121 |
+
|
| 122 |
+
for name in get_all_filters():
|
| 123 |
+
cls = find_filter_class(name)
|
| 124 |
+
print("* " + name + ':')
|
| 125 |
+
print(f" {docstring_headline(cls)}")
|
| 126 |
+
|
| 127 |
+
elif what == 'style':
|
| 128 |
+
print()
|
| 129 |
+
print("Styles:")
|
| 130 |
+
print("~~~~~~~")
|
| 131 |
+
|
| 132 |
+
for name in get_all_styles():
|
| 133 |
+
cls = get_style_by_name(name)
|
| 134 |
+
print("* " + name + ':')
|
| 135 |
+
print(f" {docstring_headline(cls)}")
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
def _print_list_as_json(requested_items):
|
| 139 |
+
import json
|
| 140 |
+
result = {}
|
| 141 |
+
if 'lexer' in requested_items:
|
| 142 |
+
info = {}
|
| 143 |
+
for fullname, names, filenames, mimetypes in get_all_lexers():
|
| 144 |
+
info[fullname] = {
|
| 145 |
+
'aliases': names,
|
| 146 |
+
'filenames': filenames,
|
| 147 |
+
'mimetypes': mimetypes
|
| 148 |
+
}
|
| 149 |
+
result['lexers'] = info
|
| 150 |
+
|
| 151 |
+
if 'formatter' in requested_items:
|
| 152 |
+
info = {}
|
| 153 |
+
for cls in get_all_formatters():
|
| 154 |
+
doc = docstring_headline(cls)
|
| 155 |
+
info[cls.name] = {
|
| 156 |
+
'aliases': cls.aliases,
|
| 157 |
+
'filenames': cls.filenames,
|
| 158 |
+
'doc': doc
|
| 159 |
+
}
|
| 160 |
+
result['formatters'] = info
|
| 161 |
+
|
| 162 |
+
if 'filter' in requested_items:
|
| 163 |
+
info = {}
|
| 164 |
+
for name in get_all_filters():
|
| 165 |
+
cls = find_filter_class(name)
|
| 166 |
+
info[name] = {
|
| 167 |
+
'doc': docstring_headline(cls)
|
| 168 |
+
}
|
| 169 |
+
result['filters'] = info
|
| 170 |
+
|
| 171 |
+
if 'style' in requested_items:
|
| 172 |
+
info = {}
|
| 173 |
+
for name in get_all_styles():
|
| 174 |
+
cls = get_style_by_name(name)
|
| 175 |
+
info[name] = {
|
| 176 |
+
'doc': docstring_headline(cls)
|
| 177 |
+
}
|
| 178 |
+
result['styles'] = info
|
| 179 |
+
|
| 180 |
+
json.dump(result, sys.stdout)
|
| 181 |
+
|
| 182 |
+
def main_inner(parser, argns):
|
| 183 |
+
if argns.help:
|
| 184 |
+
parser.print_help()
|
| 185 |
+
return 0
|
| 186 |
+
|
| 187 |
+
if argns.V:
|
| 188 |
+
print(f'Pygments version {__version__}, (c) 2006-2024 by Georg Brandl, Matthäus '
|
| 189 |
+
'Chajdas and contributors.')
|
| 190 |
+
return 0
|
| 191 |
+
|
| 192 |
+
def is_only_option(opt):
|
| 193 |
+
return not any(v for (k, v) in vars(argns).items() if k != opt)
|
| 194 |
+
|
| 195 |
+
# handle ``pygmentize -L``
|
| 196 |
+
if argns.L is not None:
|
| 197 |
+
arg_set = set()
|
| 198 |
+
for k, v in vars(argns).items():
|
| 199 |
+
if v:
|
| 200 |
+
arg_set.add(k)
|
| 201 |
+
|
| 202 |
+
arg_set.discard('L')
|
| 203 |
+
arg_set.discard('json')
|
| 204 |
+
|
| 205 |
+
if arg_set:
|
| 206 |
+
parser.print_help(sys.stderr)
|
| 207 |
+
return 2
|
| 208 |
+
|
| 209 |
+
# print version
|
| 210 |
+
if not argns.json:
|
| 211 |
+
main(['', '-V'])
|
| 212 |
+
allowed_types = {'lexer', 'formatter', 'filter', 'style'}
|
| 213 |
+
largs = [arg.rstrip('s') for arg in argns.L]
|
| 214 |
+
if any(arg not in allowed_types for arg in largs):
|
| 215 |
+
parser.print_help(sys.stderr)
|
| 216 |
+
return 0
|
| 217 |
+
if not largs:
|
| 218 |
+
largs = allowed_types
|
| 219 |
+
if not argns.json:
|
| 220 |
+
for arg in largs:
|
| 221 |
+
_print_list(arg)
|
| 222 |
+
else:
|
| 223 |
+
_print_list_as_json(largs)
|
| 224 |
+
return 0
|
| 225 |
+
|
| 226 |
+
# handle ``pygmentize -H``
|
| 227 |
+
if argns.H:
|
| 228 |
+
if not is_only_option('H'):
|
| 229 |
+
parser.print_help(sys.stderr)
|
| 230 |
+
return 2
|
| 231 |
+
what, name = argns.H
|
| 232 |
+
if what not in ('lexer', 'formatter', 'filter'):
|
| 233 |
+
parser.print_help(sys.stderr)
|
| 234 |
+
return 2
|
| 235 |
+
return _print_help(what, name)
|
| 236 |
+
|
| 237 |
+
# parse -O options
|
| 238 |
+
parsed_opts = _parse_options(argns.O or [])
|
| 239 |
+
|
| 240 |
+
# parse -P options
|
| 241 |
+
for p_opt in argns.P or []:
|
| 242 |
+
try:
|
| 243 |
+
name, value = p_opt.split('=', 1)
|
| 244 |
+
except ValueError:
|
| 245 |
+
parsed_opts[p_opt] = True
|
| 246 |
+
else:
|
| 247 |
+
parsed_opts[name] = value
|
| 248 |
+
|
| 249 |
+
# encodings
|
| 250 |
+
inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
|
| 251 |
+
outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
|
| 252 |
+
|
| 253 |
+
# handle ``pygmentize -N``
|
| 254 |
+
if argns.N:
|
| 255 |
+
lexer = find_lexer_class_for_filename(argns.N)
|
| 256 |
+
if lexer is None:
|
| 257 |
+
lexer = TextLexer
|
| 258 |
+
|
| 259 |
+
print(lexer.aliases[0])
|
| 260 |
+
return 0
|
| 261 |
+
|
| 262 |
+
# handle ``pygmentize -C``
|
| 263 |
+
if argns.C:
|
| 264 |
+
inp = sys.stdin.buffer.read()
|
| 265 |
+
try:
|
| 266 |
+
lexer = guess_lexer(inp, inencoding=inencoding)
|
| 267 |
+
except ClassNotFound:
|
| 268 |
+
lexer = TextLexer
|
| 269 |
+
|
| 270 |
+
print(lexer.aliases[0])
|
| 271 |
+
return 0
|
| 272 |
+
|
| 273 |
+
# handle ``pygmentize -S``
|
| 274 |
+
S_opt = argns.S
|
| 275 |
+
a_opt = argns.a
|
| 276 |
+
if S_opt is not None:
|
| 277 |
+
f_opt = argns.f
|
| 278 |
+
if not f_opt:
|
| 279 |
+
parser.print_help(sys.stderr)
|
| 280 |
+
return 2
|
| 281 |
+
if argns.l or argns.INPUTFILE:
|
| 282 |
+
parser.print_help(sys.stderr)
|
| 283 |
+
return 2
|
| 284 |
+
|
| 285 |
+
try:
|
| 286 |
+
parsed_opts['style'] = S_opt
|
| 287 |
+
fmter = get_formatter_by_name(f_opt, **parsed_opts)
|
| 288 |
+
except ClassNotFound as err:
|
| 289 |
+
print(err, file=sys.stderr)
|
| 290 |
+
return 1
|
| 291 |
+
|
| 292 |
+
print(fmter.get_style_defs(a_opt or ''))
|
| 293 |
+
return 0
|
| 294 |
+
|
| 295 |
+
# if no -S is given, -a is not allowed
|
| 296 |
+
if argns.a is not None:
|
| 297 |
+
parser.print_help(sys.stderr)
|
| 298 |
+
return 2
|
| 299 |
+
|
| 300 |
+
# parse -F options
|
| 301 |
+
F_opts = _parse_filters(argns.F or [])
|
| 302 |
+
|
| 303 |
+
# -x: allow custom (eXternal) lexers and formatters
|
| 304 |
+
allow_custom_lexer_formatter = bool(argns.x)
|
| 305 |
+
|
| 306 |
+
# select lexer
|
| 307 |
+
lexer = None
|
| 308 |
+
|
| 309 |
+
# given by name?
|
| 310 |
+
lexername = argns.l
|
| 311 |
+
if lexername:
|
| 312 |
+
# custom lexer, located relative to user's cwd
|
| 313 |
+
if allow_custom_lexer_formatter and '.py' in lexername:
|
| 314 |
+
try:
|
| 315 |
+
filename = None
|
| 316 |
+
name = None
|
| 317 |
+
if ':' in lexername:
|
| 318 |
+
filename, name = lexername.rsplit(':', 1)
|
| 319 |
+
|
| 320 |
+
if '.py' in name:
|
| 321 |
+
# This can happen on Windows: If the lexername is
|
| 322 |
+
# C:\lexer.py -- return to normal load path in that case
|
| 323 |
+
name = None
|
| 324 |
+
|
| 325 |
+
if filename and name:
|
| 326 |
+
lexer = load_lexer_from_file(filename, name,
|
| 327 |
+
**parsed_opts)
|
| 328 |
+
else:
|
| 329 |
+
lexer = load_lexer_from_file(lexername, **parsed_opts)
|
| 330 |
+
except ClassNotFound as err:
|
| 331 |
+
print('Error:', err, file=sys.stderr)
|
| 332 |
+
return 1
|
| 333 |
+
else:
|
| 334 |
+
try:
|
| 335 |
+
lexer = get_lexer_by_name(lexername, **parsed_opts)
|
| 336 |
+
except (OptionError, ClassNotFound) as err:
|
| 337 |
+
print('Error:', err, file=sys.stderr)
|
| 338 |
+
return 1
|
| 339 |
+
|
| 340 |
+
# read input code
|
| 341 |
+
code = None
|
| 342 |
+
|
| 343 |
+
if argns.INPUTFILE:
|
| 344 |
+
if argns.s:
|
| 345 |
+
print('Error: -s option not usable when input file specified',
|
| 346 |
+
file=sys.stderr)
|
| 347 |
+
return 2
|
| 348 |
+
|
| 349 |
+
infn = argns.INPUTFILE
|
| 350 |
+
try:
|
| 351 |
+
with open(infn, 'rb') as infp:
|
| 352 |
+
code = infp.read()
|
| 353 |
+
except Exception as err:
|
| 354 |
+
print('Error: cannot read infile:', err, file=sys.stderr)
|
| 355 |
+
return 1
|
| 356 |
+
if not inencoding:
|
| 357 |
+
code, inencoding = guess_decode(code)
|
| 358 |
+
|
| 359 |
+
# do we have to guess the lexer?
|
| 360 |
+
if not lexer:
|
| 361 |
+
try:
|
| 362 |
+
lexer = get_lexer_for_filename(infn, code, **parsed_opts)
|
| 363 |
+
except ClassNotFound as err:
|
| 364 |
+
if argns.g:
|
| 365 |
+
try:
|
| 366 |
+
lexer = guess_lexer(code, **parsed_opts)
|
| 367 |
+
except ClassNotFound:
|
| 368 |
+
lexer = TextLexer(**parsed_opts)
|
| 369 |
+
else:
|
| 370 |
+
print('Error:', err, file=sys.stderr)
|
| 371 |
+
return 1
|
| 372 |
+
except OptionError as err:
|
| 373 |
+
print('Error:', err, file=sys.stderr)
|
| 374 |
+
return 1
|
| 375 |
+
|
| 376 |
+
elif not argns.s: # treat stdin as full file (-s support is later)
|
| 377 |
+
# read code from terminal, always in binary mode since we want to
|
| 378 |
+
# decode ourselves and be tolerant with it
|
| 379 |
+
code = sys.stdin.buffer.read() # use .buffer to get a binary stream
|
| 380 |
+
if not inencoding:
|
| 381 |
+
code, inencoding = guess_decode_from_terminal(code, sys.stdin)
|
| 382 |
+
# else the lexer will do the decoding
|
| 383 |
+
if not lexer:
|
| 384 |
+
try:
|
| 385 |
+
lexer = guess_lexer(code, **parsed_opts)
|
| 386 |
+
except ClassNotFound:
|
| 387 |
+
lexer = TextLexer(**parsed_opts)
|
| 388 |
+
|
| 389 |
+
else: # -s option needs a lexer with -l
|
| 390 |
+
if not lexer:
|
| 391 |
+
print('Error: when using -s a lexer has to be selected with -l',
|
| 392 |
+
file=sys.stderr)
|
| 393 |
+
return 2
|
| 394 |
+
|
| 395 |
+
# process filters
|
| 396 |
+
for fname, fopts in F_opts:
|
| 397 |
+
try:
|
| 398 |
+
lexer.add_filter(fname, **fopts)
|
| 399 |
+
except ClassNotFound as err:
|
| 400 |
+
print('Error:', err, file=sys.stderr)
|
| 401 |
+
return 1
|
| 402 |
+
|
| 403 |
+
# select formatter
|
| 404 |
+
outfn = argns.o
|
| 405 |
+
fmter = argns.f
|
| 406 |
+
if fmter:
|
| 407 |
+
# custom formatter, located relative to user's cwd
|
| 408 |
+
if allow_custom_lexer_formatter and '.py' in fmter:
|
| 409 |
+
try:
|
| 410 |
+
filename = None
|
| 411 |
+
name = None
|
| 412 |
+
if ':' in fmter:
|
| 413 |
+
# Same logic as above for custom lexer
|
| 414 |
+
filename, name = fmter.rsplit(':', 1)
|
| 415 |
+
|
| 416 |
+
if '.py' in name:
|
| 417 |
+
name = None
|
| 418 |
+
|
| 419 |
+
if filename and name:
|
| 420 |
+
fmter = load_formatter_from_file(filename, name,
|
| 421 |
+
**parsed_opts)
|
| 422 |
+
else:
|
| 423 |
+
fmter = load_formatter_from_file(fmter, **parsed_opts)
|
| 424 |
+
except ClassNotFound as err:
|
| 425 |
+
print('Error:', err, file=sys.stderr)
|
| 426 |
+
return 1
|
| 427 |
+
else:
|
| 428 |
+
try:
|
| 429 |
+
fmter = get_formatter_by_name(fmter, **parsed_opts)
|
| 430 |
+
except (OptionError, ClassNotFound) as err:
|
| 431 |
+
print('Error:', err, file=sys.stderr)
|
| 432 |
+
return 1
|
| 433 |
+
|
| 434 |
+
if outfn:
|
| 435 |
+
if not fmter:
|
| 436 |
+
try:
|
| 437 |
+
fmter = get_formatter_for_filename(outfn, **parsed_opts)
|
| 438 |
+
except (OptionError, ClassNotFound) as err:
|
| 439 |
+
print('Error:', err, file=sys.stderr)
|
| 440 |
+
return 1
|
| 441 |
+
try:
|
| 442 |
+
outfile = open(outfn, 'wb')
|
| 443 |
+
except Exception as err:
|
| 444 |
+
print('Error: cannot open outfile:', err, file=sys.stderr)
|
| 445 |
+
return 1
|
| 446 |
+
else:
|
| 447 |
+
if not fmter:
|
| 448 |
+
if os.environ.get('COLORTERM','') in ('truecolor', '24bit'):
|
| 449 |
+
fmter = TerminalTrueColorFormatter(**parsed_opts)
|
| 450 |
+
elif '256' in os.environ.get('TERM', ''):
|
| 451 |
+
fmter = Terminal256Formatter(**parsed_opts)
|
| 452 |
+
else:
|
| 453 |
+
fmter = TerminalFormatter(**parsed_opts)
|
| 454 |
+
outfile = sys.stdout.buffer
|
| 455 |
+
|
| 456 |
+
# determine output encoding if not explicitly selected
|
| 457 |
+
if not outencoding:
|
| 458 |
+
if outfn:
|
| 459 |
+
# output file? use lexer encoding for now (can still be None)
|
| 460 |
+
fmter.encoding = inencoding
|
| 461 |
+
else:
|
| 462 |
+
# else use terminal encoding
|
| 463 |
+
fmter.encoding = terminal_encoding(sys.stdout)
|
| 464 |
+
|
| 465 |
+
# provide coloring under Windows, if possible
|
| 466 |
+
if not outfn and sys.platform in ('win32', 'cygwin') and \
|
| 467 |
+
fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover
|
| 468 |
+
# unfortunately colorama doesn't support binary streams on Py3
|
| 469 |
+
outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
|
| 470 |
+
fmter.encoding = None
|
| 471 |
+
try:
|
| 472 |
+
import colorama.initialise
|
| 473 |
+
except ImportError:
|
| 474 |
+
pass
|
| 475 |
+
else:
|
| 476 |
+
outfile = colorama.initialise.wrap_stream(
|
| 477 |
+
outfile, convert=None, strip=None, autoreset=False, wrap=True)
|
| 478 |
+
|
| 479 |
+
# When using the LaTeX formatter and the option `escapeinside` is
|
| 480 |
+
# specified, we need a special lexer which collects escaped text
|
| 481 |
+
# before running the chosen language lexer.
|
| 482 |
+
escapeinside = parsed_opts.get('escapeinside', '')
|
| 483 |
+
if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
|
| 484 |
+
left = escapeinside[0]
|
| 485 |
+
right = escapeinside[1]
|
| 486 |
+
lexer = LatexEmbeddedLexer(left, right, lexer)
|
| 487 |
+
|
| 488 |
+
# ... and do it!
|
| 489 |
+
if not argns.s:
|
| 490 |
+
# process whole input as per normal...
|
| 491 |
+
try:
|
| 492 |
+
highlight(code, lexer, fmter, outfile)
|
| 493 |
+
finally:
|
| 494 |
+
if outfn:
|
| 495 |
+
outfile.close()
|
| 496 |
+
return 0
|
| 497 |
+
else:
|
| 498 |
+
# line by line processing of stdin (eg: for 'tail -f')...
|
| 499 |
+
try:
|
| 500 |
+
while 1:
|
| 501 |
+
line = sys.stdin.buffer.readline()
|
| 502 |
+
if not line:
|
| 503 |
+
break
|
| 504 |
+
if not inencoding:
|
| 505 |
+
line = guess_decode_from_terminal(line, sys.stdin)[0]
|
| 506 |
+
highlight(line, lexer, fmter, outfile)
|
| 507 |
+
if hasattr(outfile, 'flush'):
|
| 508 |
+
outfile.flush()
|
| 509 |
+
return 0
|
| 510 |
+
except KeyboardInterrupt: # pragma: no cover
|
| 511 |
+
return 0
|
| 512 |
+
finally:
|
| 513 |
+
if outfn:
|
| 514 |
+
outfile.close()
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
class HelpFormatter(argparse.HelpFormatter):
|
| 518 |
+
def __init__(self, prog, indent_increment=2, max_help_position=16, width=None):
|
| 519 |
+
if width is None:
|
| 520 |
+
try:
|
| 521 |
+
width = shutil.get_terminal_size().columns - 2
|
| 522 |
+
except Exception:
|
| 523 |
+
pass
|
| 524 |
+
argparse.HelpFormatter.__init__(self, prog, indent_increment,
|
| 525 |
+
max_help_position, width)
|
| 526 |
+
|
| 527 |
+
|
| 528 |
+
def main(args=sys.argv):
|
| 529 |
+
"""
|
| 530 |
+
Main command line entry point.
|
| 531 |
+
"""
|
| 532 |
+
desc = "Highlight an input file and write the result to an output file."
|
| 533 |
+
parser = argparse.ArgumentParser(description=desc, add_help=False,
|
| 534 |
+
formatter_class=HelpFormatter)
|
| 535 |
+
|
| 536 |
+
operation = parser.add_argument_group('Main operation')
|
| 537 |
+
lexersel = operation.add_mutually_exclusive_group()
|
| 538 |
+
lexersel.add_argument(
|
| 539 |
+
'-l', metavar='LEXER',
|
| 540 |
+
help='Specify the lexer to use. (Query names with -L.) If not '
|
| 541 |
+
'given and -g is not present, the lexer is guessed from the filename.')
|
| 542 |
+
lexersel.add_argument(
|
| 543 |
+
'-g', action='store_true',
|
| 544 |
+
help='Guess the lexer from the file contents, or pass through '
|
| 545 |
+
'as plain text if nothing can be guessed.')
|
| 546 |
+
operation.add_argument(
|
| 547 |
+
'-F', metavar='FILTER[:options]', action='append',
|
| 548 |
+
help='Add a filter to the token stream. (Query names with -L.) '
|
| 549 |
+
'Filter options are given after a colon if necessary.')
|
| 550 |
+
operation.add_argument(
|
| 551 |
+
'-f', metavar='FORMATTER',
|
| 552 |
+
help='Specify the formatter to use. (Query names with -L.) '
|
| 553 |
+
'If not given, the formatter is guessed from the output filename, '
|
| 554 |
+
'and defaults to the terminal formatter if the output is to the '
|
| 555 |
+
'terminal or an unknown file extension.')
|
| 556 |
+
operation.add_argument(
|
| 557 |
+
'-O', metavar='OPTION=value[,OPTION=value,...]', action='append',
|
| 558 |
+
help='Give options to the lexer and formatter as a comma-separated '
|
| 559 |
+
'list of key-value pairs. '
|
| 560 |
+
'Example: `-O bg=light,python=cool`.')
|
| 561 |
+
operation.add_argument(
|
| 562 |
+
'-P', metavar='OPTION=value', action='append',
|
| 563 |
+
help='Give a single option to the lexer and formatter - with this '
|
| 564 |
+
'you can pass options whose value contains commas and equal signs. '
|
| 565 |
+
'Example: `-P "heading=Pygments, the Python highlighter"`.')
|
| 566 |
+
operation.add_argument(
|
| 567 |
+
'-o', metavar='OUTPUTFILE',
|
| 568 |
+
help='Where to write the output. Defaults to standard output.')
|
| 569 |
+
|
| 570 |
+
operation.add_argument(
|
| 571 |
+
'INPUTFILE', nargs='?',
|
| 572 |
+
help='Where to read the input. Defaults to standard input.')
|
| 573 |
+
|
| 574 |
+
flags = parser.add_argument_group('Operation flags')
|
| 575 |
+
flags.add_argument(
|
| 576 |
+
'-v', action='store_true',
|
| 577 |
+
help='Print a detailed traceback on unhandled exceptions, which '
|
| 578 |
+
'is useful for debugging and bug reports.')
|
| 579 |
+
flags.add_argument(
|
| 580 |
+
'-s', action='store_true',
|
| 581 |
+
help='Process lines one at a time until EOF, rather than waiting to '
|
| 582 |
+
'process the entire file. This only works for stdin, only for lexers '
|
| 583 |
+
'with no line-spanning constructs, and is intended for streaming '
|
| 584 |
+
'input such as you get from `tail -f`. '
|
| 585 |
+
'Example usage: `tail -f sql.log | pygmentize -s -l sql`.')
|
| 586 |
+
flags.add_argument(
|
| 587 |
+
'-x', action='store_true',
|
| 588 |
+
help='Allow custom lexers and formatters to be loaded from a .py file '
|
| 589 |
+
'relative to the current working directory. For example, '
|
| 590 |
+
'`-l ./customlexer.py -x`. By default, this option expects a file '
|
| 591 |
+
'with a class named CustomLexer or CustomFormatter; you can also '
|
| 592 |
+
'specify your own class name with a colon (`-l ./lexer.py:MyLexer`). '
|
| 593 |
+
'Users should be very careful not to use this option with untrusted '
|
| 594 |
+
'files, because it will import and run them.')
|
| 595 |
+
flags.add_argument('--json', help='Output as JSON. This can '
|
| 596 |
+
'be only used in conjunction with -L.',
|
| 597 |
+
default=False,
|
| 598 |
+
action='store_true')
|
| 599 |
+
|
| 600 |
+
special_modes_group = parser.add_argument_group(
|
| 601 |
+
'Special modes - do not do any highlighting')
|
| 602 |
+
special_modes = special_modes_group.add_mutually_exclusive_group()
|
| 603 |
+
special_modes.add_argument(
|
| 604 |
+
'-S', metavar='STYLE -f formatter',
|
| 605 |
+
help='Print style definitions for STYLE for a formatter '
|
| 606 |
+
'given with -f. The argument given by -a is formatter '
|
| 607 |
+
'dependent.')
|
| 608 |
+
special_modes.add_argument(
|
| 609 |
+
'-L', nargs='*', metavar='WHAT',
|
| 610 |
+
help='List lexers, formatters, styles or filters -- '
|
| 611 |
+
'give additional arguments for the thing(s) you want to list '
|
| 612 |
+
'(e.g. "styles"), or omit them to list everything.')
|
| 613 |
+
special_modes.add_argument(
|
| 614 |
+
'-N', metavar='FILENAME',
|
| 615 |
+
help='Guess and print out a lexer name based solely on the given '
|
| 616 |
+
'filename. Does not take input or highlight anything. If no specific '
|
| 617 |
+
'lexer can be determined, "text" is printed.')
|
| 618 |
+
special_modes.add_argument(
|
| 619 |
+
'-C', action='store_true',
|
| 620 |
+
help='Like -N, but print out a lexer name based solely on '
|
| 621 |
+
'a given content from standard input.')
|
| 622 |
+
special_modes.add_argument(
|
| 623 |
+
'-H', action='store', nargs=2, metavar=('NAME', 'TYPE'),
|
| 624 |
+
help='Print detailed help for the object <name> of type <type>, '
|
| 625 |
+
'where <type> is one of "lexer", "formatter" or "filter".')
|
| 626 |
+
special_modes.add_argument(
|
| 627 |
+
'-V', action='store_true',
|
| 628 |
+
help='Print the package version.')
|
| 629 |
+
special_modes.add_argument(
|
| 630 |
+
'-h', '--help', action='store_true',
|
| 631 |
+
help='Print this help.')
|
| 632 |
+
special_modes_group.add_argument(
|
| 633 |
+
'-a', metavar='ARG',
|
| 634 |
+
help='Formatter-specific additional argument for the -S (print '
|
| 635 |
+
'style sheet) mode.')
|
| 636 |
+
|
| 637 |
+
argns = parser.parse_args(args[1:])
|
| 638 |
+
|
| 639 |
+
try:
|
| 640 |
+
return main_inner(parser, argns)
|
| 641 |
+
except BrokenPipeError:
|
| 642 |
+
# someone closed our stdout, e.g. by quitting a pager.
|
| 643 |
+
return 0
|
| 644 |
+
except Exception:
|
| 645 |
+
if argns.v:
|
| 646 |
+
print(file=sys.stderr)
|
| 647 |
+
print('*' * 65, file=sys.stderr)
|
| 648 |
+
print('An unhandled exception occurred while highlighting.',
|
| 649 |
+
file=sys.stderr)
|
| 650 |
+
print('Please report the whole traceback to the issue tracker at',
|
| 651 |
+
file=sys.stderr)
|
| 652 |
+
print('<https://github.com/pygments/pygments/issues>.',
|
| 653 |
+
file=sys.stderr)
|
| 654 |
+
print('*' * 65, file=sys.stderr)
|
| 655 |
+
print(file=sys.stderr)
|
| 656 |
+
raise
|
| 657 |
+
import traceback
|
| 658 |
+
info = traceback.format_exception(*sys.exc_info())
|
| 659 |
+
msg = info[-1].strip()
|
| 660 |
+
if len(info) >= 3:
|
| 661 |
+
# extract relevant file and position info
|
| 662 |
+
msg += '\n (f{})'.format(info[-2].split('\n')[0].strip()[1:])
|
| 663 |
+
print(file=sys.stderr)
|
| 664 |
+
print('*** Error while highlighting:', file=sys.stderr)
|
| 665 |
+
print(msg, file=sys.stderr)
|
| 666 |
+
print('*** If this is a bug you want to report, please rerun with -v.',
|
| 667 |
+
file=sys.stderr)
|
| 668 |
+
return 1
|
source/pygments/console.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.console
|
| 3 |
+
~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Format colored console output.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
esc = "\x1b["
|
| 12 |
+
|
| 13 |
+
codes = {}
|
| 14 |
+
codes[""] = ""
|
| 15 |
+
codes["reset"] = esc + "39;49;00m"
|
| 16 |
+
|
| 17 |
+
codes["bold"] = esc + "01m"
|
| 18 |
+
codes["faint"] = esc + "02m"
|
| 19 |
+
codes["standout"] = esc + "03m"
|
| 20 |
+
codes["underline"] = esc + "04m"
|
| 21 |
+
codes["blink"] = esc + "05m"
|
| 22 |
+
codes["overline"] = esc + "06m"
|
| 23 |
+
|
| 24 |
+
dark_colors = ["black", "red", "green", "yellow", "blue",
|
| 25 |
+
"magenta", "cyan", "gray"]
|
| 26 |
+
light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brightblue",
|
| 27 |
+
"brightmagenta", "brightcyan", "white"]
|
| 28 |
+
|
| 29 |
+
x = 30
|
| 30 |
+
for dark, light in zip(dark_colors, light_colors):
|
| 31 |
+
codes[dark] = esc + "%im" % x
|
| 32 |
+
codes[light] = esc + "%im" % (60 + x)
|
| 33 |
+
x += 1
|
| 34 |
+
|
| 35 |
+
del dark, light, x
|
| 36 |
+
|
| 37 |
+
codes["white"] = codes["bold"]
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def reset_color():
|
| 41 |
+
return codes["reset"]
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def colorize(color_key, text):
|
| 45 |
+
return codes[color_key] + text + codes["reset"]
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def ansiformat(attr, text):
|
| 49 |
+
"""
|
| 50 |
+
Format ``text`` with a color and/or some attributes::
|
| 51 |
+
|
| 52 |
+
color normal color
|
| 53 |
+
*color* bold color
|
| 54 |
+
_color_ underlined color
|
| 55 |
+
+color+ blinking color
|
| 56 |
+
"""
|
| 57 |
+
result = []
|
| 58 |
+
if attr[:1] == attr[-1:] == '+':
|
| 59 |
+
result.append(codes['blink'])
|
| 60 |
+
attr = attr[1:-1]
|
| 61 |
+
if attr[:1] == attr[-1:] == '*':
|
| 62 |
+
result.append(codes['bold'])
|
| 63 |
+
attr = attr[1:-1]
|
| 64 |
+
if attr[:1] == attr[-1:] == '_':
|
| 65 |
+
result.append(codes['underline'])
|
| 66 |
+
attr = attr[1:-1]
|
| 67 |
+
result.append(codes[attr])
|
| 68 |
+
result.append(text)
|
| 69 |
+
result.append(codes['reset'])
|
| 70 |
+
return ''.join(result)
|
source/pygments/filter.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.filter
|
| 3 |
+
~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Module that implements the default filter.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def apply_filters(stream, filters, lexer=None):
|
| 13 |
+
"""
|
| 14 |
+
Use this method to apply an iterable of filters to
|
| 15 |
+
a stream. If lexer is given it's forwarded to the
|
| 16 |
+
filter, otherwise the filter receives `None`.
|
| 17 |
+
"""
|
| 18 |
+
def _apply(filter_, stream):
|
| 19 |
+
yield from filter_.filter(lexer, stream)
|
| 20 |
+
for filter_ in filters:
|
| 21 |
+
stream = _apply(filter_, stream)
|
| 22 |
+
return stream
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def simplefilter(f):
|
| 26 |
+
"""
|
| 27 |
+
Decorator that converts a function into a filter::
|
| 28 |
+
|
| 29 |
+
@simplefilter
|
| 30 |
+
def lowercase(self, lexer, stream, options):
|
| 31 |
+
for ttype, value in stream:
|
| 32 |
+
yield ttype, value.lower()
|
| 33 |
+
"""
|
| 34 |
+
return type(f.__name__, (FunctionFilter,), {
|
| 35 |
+
'__module__': getattr(f, '__module__'),
|
| 36 |
+
'__doc__': f.__doc__,
|
| 37 |
+
'function': f,
|
| 38 |
+
})
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class Filter:
|
| 42 |
+
"""
|
| 43 |
+
Default filter. Subclass this class or use the `simplefilter`
|
| 44 |
+
decorator to create own filters.
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
def __init__(self, **options):
|
| 48 |
+
self.options = options
|
| 49 |
+
|
| 50 |
+
def filter(self, lexer, stream):
|
| 51 |
+
raise NotImplementedError()
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class FunctionFilter(Filter):
|
| 55 |
+
"""
|
| 56 |
+
Abstract class used by `simplefilter` to create simple
|
| 57 |
+
function filters on the fly. The `simplefilter` decorator
|
| 58 |
+
automatically creates subclasses of this class for
|
| 59 |
+
functions passed to it.
|
| 60 |
+
"""
|
| 61 |
+
function = None
|
| 62 |
+
|
| 63 |
+
def __init__(self, **options):
|
| 64 |
+
if not hasattr(self, 'function'):
|
| 65 |
+
raise TypeError(f'{self.__class__.__name__!r} used without bound function')
|
| 66 |
+
Filter.__init__(self, **options)
|
| 67 |
+
|
| 68 |
+
def filter(self, lexer, stream):
|
| 69 |
+
# pylint: disable=not-callable
|
| 70 |
+
yield from self.function(lexer, stream, self.options)
|
source/pygments/filters/__init__.py
ADDED
|
@@ -0,0 +1,940 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.filters
|
| 3 |
+
~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Module containing filter lookup functions and default
|
| 6 |
+
filters.
|
| 7 |
+
|
| 8 |
+
:copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
|
| 9 |
+
:license: BSD, see LICENSE for details.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import re
|
| 13 |
+
|
| 14 |
+
from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
|
| 15 |
+
string_to_tokentype
|
| 16 |
+
from pygments.filter import Filter
|
| 17 |
+
from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
|
| 18 |
+
get_choice_opt, ClassNotFound, OptionError
|
| 19 |
+
from pygments.plugin import find_plugin_filters
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def find_filter_class(filtername):
|
| 23 |
+
"""Lookup a filter by name. Return None if not found."""
|
| 24 |
+
if filtername in FILTERS:
|
| 25 |
+
return FILTERS[filtername]
|
| 26 |
+
for name, cls in find_plugin_filters():
|
| 27 |
+
if name == filtername:
|
| 28 |
+
return cls
|
| 29 |
+
return None
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def get_filter_by_name(filtername, **options):
|
| 33 |
+
"""Return an instantiated filter.
|
| 34 |
+
|
| 35 |
+
Options are passed to the filter initializer if wanted.
|
| 36 |
+
Raise a ClassNotFound if not found.
|
| 37 |
+
"""
|
| 38 |
+
cls = find_filter_class(filtername)
|
| 39 |
+
if cls:
|
| 40 |
+
return cls(**options)
|
| 41 |
+
else:
|
| 42 |
+
raise ClassNotFound(f'filter {filtername!r} not found')
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def get_all_filters():
|
| 46 |
+
"""Return a generator of all filter names."""
|
| 47 |
+
yield from FILTERS
|
| 48 |
+
for name, _ in find_plugin_filters():
|
| 49 |
+
yield name
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def _replace_special(ttype, value, regex, specialttype,
|
| 53 |
+
replacefunc=lambda x: x):
|
| 54 |
+
last = 0
|
| 55 |
+
for match in regex.finditer(value):
|
| 56 |
+
start, end = match.start(), match.end()
|
| 57 |
+
if start != last:
|
| 58 |
+
yield ttype, value[last:start]
|
| 59 |
+
yield specialttype, replacefunc(value[start:end])
|
| 60 |
+
last = end
|
| 61 |
+
if last != len(value):
|
| 62 |
+
yield ttype, value[last:]
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class CodeTagFilter(Filter):
|
| 66 |
+
"""Highlight special code tags in comments and docstrings.
|
| 67 |
+
|
| 68 |
+
Options accepted:
|
| 69 |
+
|
| 70 |
+
`codetags` : list of strings
|
| 71 |
+
A list of strings that are flagged as code tags. The default is to
|
| 72 |
+
highlight ``XXX``, ``TODO``, ``FIXME``, ``BUG`` and ``NOTE``.
|
| 73 |
+
|
| 74 |
+
.. versionchanged:: 2.13
|
| 75 |
+
Now recognizes ``FIXME`` by default.
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
def __init__(self, **options):
|
| 79 |
+
Filter.__init__(self, **options)
|
| 80 |
+
tags = get_list_opt(options, 'codetags',
|
| 81 |
+
['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE'])
|
| 82 |
+
self.tag_re = re.compile(r'\b({})\b'.format('|'.join([
|
| 83 |
+
re.escape(tag) for tag in tags if tag
|
| 84 |
+
])))
|
| 85 |
+
|
| 86 |
+
def filter(self, lexer, stream):
|
| 87 |
+
regex = self.tag_re
|
| 88 |
+
for ttype, value in stream:
|
| 89 |
+
if ttype in String.Doc or \
|
| 90 |
+
ttype in Comment and \
|
| 91 |
+
ttype not in Comment.Preproc:
|
| 92 |
+
yield from _replace_special(ttype, value, regex, Comment.Special)
|
| 93 |
+
else:
|
| 94 |
+
yield ttype, value
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
class SymbolFilter(Filter):
|
| 98 |
+
"""Convert mathematical symbols such as \\<longrightarrow> in Isabelle
|
| 99 |
+
or \\longrightarrow in LaTeX into Unicode characters.
|
| 100 |
+
|
| 101 |
+
This is mostly useful for HTML or console output when you want to
|
| 102 |
+
approximate the source rendering you'd see in an IDE.
|
| 103 |
+
|
| 104 |
+
Options accepted:
|
| 105 |
+
|
| 106 |
+
`lang` : string
|
| 107 |
+
The symbol language. Must be one of ``'isabelle'`` or
|
| 108 |
+
``'latex'``. The default is ``'isabelle'``.
|
| 109 |
+
"""
|
| 110 |
+
|
| 111 |
+
latex_symbols = {
|
| 112 |
+
'\\alpha' : '\U000003b1',
|
| 113 |
+
'\\beta' : '\U000003b2',
|
| 114 |
+
'\\gamma' : '\U000003b3',
|
| 115 |
+
'\\delta' : '\U000003b4',
|
| 116 |
+
'\\varepsilon' : '\U000003b5',
|
| 117 |
+
'\\zeta' : '\U000003b6',
|
| 118 |
+
'\\eta' : '\U000003b7',
|
| 119 |
+
'\\vartheta' : '\U000003b8',
|
| 120 |
+
'\\iota' : '\U000003b9',
|
| 121 |
+
'\\kappa' : '\U000003ba',
|
| 122 |
+
'\\lambda' : '\U000003bb',
|
| 123 |
+
'\\mu' : '\U000003bc',
|
| 124 |
+
'\\nu' : '\U000003bd',
|
| 125 |
+
'\\xi' : '\U000003be',
|
| 126 |
+
'\\pi' : '\U000003c0',
|
| 127 |
+
'\\varrho' : '\U000003c1',
|
| 128 |
+
'\\sigma' : '\U000003c3',
|
| 129 |
+
'\\tau' : '\U000003c4',
|
| 130 |
+
'\\upsilon' : '\U000003c5',
|
| 131 |
+
'\\varphi' : '\U000003c6',
|
| 132 |
+
'\\chi' : '\U000003c7',
|
| 133 |
+
'\\psi' : '\U000003c8',
|
| 134 |
+
'\\omega' : '\U000003c9',
|
| 135 |
+
'\\Gamma' : '\U00000393',
|
| 136 |
+
'\\Delta' : '\U00000394',
|
| 137 |
+
'\\Theta' : '\U00000398',
|
| 138 |
+
'\\Lambda' : '\U0000039b',
|
| 139 |
+
'\\Xi' : '\U0000039e',
|
| 140 |
+
'\\Pi' : '\U000003a0',
|
| 141 |
+
'\\Sigma' : '\U000003a3',
|
| 142 |
+
'\\Upsilon' : '\U000003a5',
|
| 143 |
+
'\\Phi' : '\U000003a6',
|
| 144 |
+
'\\Psi' : '\U000003a8',
|
| 145 |
+
'\\Omega' : '\U000003a9',
|
| 146 |
+
'\\leftarrow' : '\U00002190',
|
| 147 |
+
'\\longleftarrow' : '\U000027f5',
|
| 148 |
+
'\\rightarrow' : '\U00002192',
|
| 149 |
+
'\\longrightarrow' : '\U000027f6',
|
| 150 |
+
'\\Leftarrow' : '\U000021d0',
|
| 151 |
+
'\\Longleftarrow' : '\U000027f8',
|
| 152 |
+
'\\Rightarrow' : '\U000021d2',
|
| 153 |
+
'\\Longrightarrow' : '\U000027f9',
|
| 154 |
+
'\\leftrightarrow' : '\U00002194',
|
| 155 |
+
'\\longleftrightarrow' : '\U000027f7',
|
| 156 |
+
'\\Leftrightarrow' : '\U000021d4',
|
| 157 |
+
'\\Longleftrightarrow' : '\U000027fa',
|
| 158 |
+
'\\mapsto' : '\U000021a6',
|
| 159 |
+
'\\longmapsto' : '\U000027fc',
|
| 160 |
+
'\\relbar' : '\U00002500',
|
| 161 |
+
'\\Relbar' : '\U00002550',
|
| 162 |
+
'\\hookleftarrow' : '\U000021a9',
|
| 163 |
+
'\\hookrightarrow' : '\U000021aa',
|
| 164 |
+
'\\leftharpoondown' : '\U000021bd',
|
| 165 |
+
'\\rightharpoondown' : '\U000021c1',
|
| 166 |
+
'\\leftharpoonup' : '\U000021bc',
|
| 167 |
+
'\\rightharpoonup' : '\U000021c0',
|
| 168 |
+
'\\rightleftharpoons' : '\U000021cc',
|
| 169 |
+
'\\leadsto' : '\U0000219d',
|
| 170 |
+
'\\downharpoonleft' : '\U000021c3',
|
| 171 |
+
'\\downharpoonright' : '\U000021c2',
|
| 172 |
+
'\\upharpoonleft' : '\U000021bf',
|
| 173 |
+
'\\upharpoonright' : '\U000021be',
|
| 174 |
+
'\\restriction' : '\U000021be',
|
| 175 |
+
'\\uparrow' : '\U00002191',
|
| 176 |
+
'\\Uparrow' : '\U000021d1',
|
| 177 |
+
'\\downarrow' : '\U00002193',
|
| 178 |
+
'\\Downarrow' : '\U000021d3',
|
| 179 |
+
'\\updownarrow' : '\U00002195',
|
| 180 |
+
'\\Updownarrow' : '\U000021d5',
|
| 181 |
+
'\\langle' : '\U000027e8',
|
| 182 |
+
'\\rangle' : '\U000027e9',
|
| 183 |
+
'\\lceil' : '\U00002308',
|
| 184 |
+
'\\rceil' : '\U00002309',
|
| 185 |
+
'\\lfloor' : '\U0000230a',
|
| 186 |
+
'\\rfloor' : '\U0000230b',
|
| 187 |
+
'\\flqq' : '\U000000ab',
|
| 188 |
+
'\\frqq' : '\U000000bb',
|
| 189 |
+
'\\bot' : '\U000022a5',
|
| 190 |
+
'\\top' : '\U000022a4',
|
| 191 |
+
'\\wedge' : '\U00002227',
|
| 192 |
+
'\\bigwedge' : '\U000022c0',
|
| 193 |
+
'\\vee' : '\U00002228',
|
| 194 |
+
'\\bigvee' : '\U000022c1',
|
| 195 |
+
'\\forall' : '\U00002200',
|
| 196 |
+
'\\exists' : '\U00002203',
|
| 197 |
+
'\\nexists' : '\U00002204',
|
| 198 |
+
'\\neg' : '\U000000ac',
|
| 199 |
+
'\\Box' : '\U000025a1',
|
| 200 |
+
'\\Diamond' : '\U000025c7',
|
| 201 |
+
'\\vdash' : '\U000022a2',
|
| 202 |
+
'\\models' : '\U000022a8',
|
| 203 |
+
'\\dashv' : '\U000022a3',
|
| 204 |
+
'\\surd' : '\U0000221a',
|
| 205 |
+
'\\le' : '\U00002264',
|
| 206 |
+
'\\ge' : '\U00002265',
|
| 207 |
+
'\\ll' : '\U0000226a',
|
| 208 |
+
'\\gg' : '\U0000226b',
|
| 209 |
+
'\\lesssim' : '\U00002272',
|
| 210 |
+
'\\gtrsim' : '\U00002273',
|
| 211 |
+
'\\lessapprox' : '\U00002a85',
|
| 212 |
+
'\\gtrapprox' : '\U00002a86',
|
| 213 |
+
'\\in' : '\U00002208',
|
| 214 |
+
'\\notin' : '\U00002209',
|
| 215 |
+
'\\subset' : '\U00002282',
|
| 216 |
+
'\\supset' : '\U00002283',
|
| 217 |
+
'\\subseteq' : '\U00002286',
|
| 218 |
+
'\\supseteq' : '\U00002287',
|
| 219 |
+
'\\sqsubset' : '\U0000228f',
|
| 220 |
+
'\\sqsupset' : '\U00002290',
|
| 221 |
+
'\\sqsubseteq' : '\U00002291',
|
| 222 |
+
'\\sqsupseteq' : '\U00002292',
|
| 223 |
+
'\\cap' : '\U00002229',
|
| 224 |
+
'\\bigcap' : '\U000022c2',
|
| 225 |
+
'\\cup' : '\U0000222a',
|
| 226 |
+
'\\bigcup' : '\U000022c3',
|
| 227 |
+
'\\sqcup' : '\U00002294',
|
| 228 |
+
'\\bigsqcup' : '\U00002a06',
|
| 229 |
+
'\\sqcap' : '\U00002293',
|
| 230 |
+
'\\Bigsqcap' : '\U00002a05',
|
| 231 |
+
'\\setminus' : '\U00002216',
|
| 232 |
+
'\\propto' : '\U0000221d',
|
| 233 |
+
'\\uplus' : '\U0000228e',
|
| 234 |
+
'\\bigplus' : '\U00002a04',
|
| 235 |
+
'\\sim' : '\U0000223c',
|
| 236 |
+
'\\doteq' : '\U00002250',
|
| 237 |
+
'\\simeq' : '\U00002243',
|
| 238 |
+
'\\approx' : '\U00002248',
|
| 239 |
+
'\\asymp' : '\U0000224d',
|
| 240 |
+
'\\cong' : '\U00002245',
|
| 241 |
+
'\\equiv' : '\U00002261',
|
| 242 |
+
'\\Join' : '\U000022c8',
|
| 243 |
+
'\\bowtie' : '\U00002a1d',
|
| 244 |
+
'\\prec' : '\U0000227a',
|
| 245 |
+
'\\succ' : '\U0000227b',
|
| 246 |
+
'\\preceq' : '\U0000227c',
|
| 247 |
+
'\\succeq' : '\U0000227d',
|
| 248 |
+
'\\parallel' : '\U00002225',
|
| 249 |
+
'\\mid' : '\U000000a6',
|
| 250 |
+
'\\pm' : '\U000000b1',
|
| 251 |
+
'\\mp' : '\U00002213',
|
| 252 |
+
'\\times' : '\U000000d7',
|
| 253 |
+
'\\div' : '\U000000f7',
|
| 254 |
+
'\\cdot' : '\U000022c5',
|
| 255 |
+
'\\star' : '\U000022c6',
|
| 256 |
+
'\\circ' : '\U00002218',
|
| 257 |
+
'\\dagger' : '\U00002020',
|
| 258 |
+
'\\ddagger' : '\U00002021',
|
| 259 |
+
'\\lhd' : '\U000022b2',
|
| 260 |
+
'\\rhd' : '\U000022b3',
|
| 261 |
+
'\\unlhd' : '\U000022b4',
|
| 262 |
+
'\\unrhd' : '\U000022b5',
|
| 263 |
+
'\\triangleleft' : '\U000025c3',
|
| 264 |
+
'\\triangleright' : '\U000025b9',
|
| 265 |
+
'\\triangle' : '\U000025b3',
|
| 266 |
+
'\\triangleq' : '\U0000225c',
|
| 267 |
+
'\\oplus' : '\U00002295',
|
| 268 |
+
'\\bigoplus' : '\U00002a01',
|
| 269 |
+
'\\otimes' : '\U00002297',
|
| 270 |
+
'\\bigotimes' : '\U00002a02',
|
| 271 |
+
'\\odot' : '\U00002299',
|
| 272 |
+
'\\bigodot' : '\U00002a00',
|
| 273 |
+
'\\ominus' : '\U00002296',
|
| 274 |
+
'\\oslash' : '\U00002298',
|
| 275 |
+
'\\dots' : '\U00002026',
|
| 276 |
+
'\\cdots' : '\U000022ef',
|
| 277 |
+
'\\sum' : '\U00002211',
|
| 278 |
+
'\\prod' : '\U0000220f',
|
| 279 |
+
'\\coprod' : '\U00002210',
|
| 280 |
+
'\\infty' : '\U0000221e',
|
| 281 |
+
'\\int' : '\U0000222b',
|
| 282 |
+
'\\oint' : '\U0000222e',
|
| 283 |
+
'\\clubsuit' : '\U00002663',
|
| 284 |
+
'\\diamondsuit' : '\U00002662',
|
| 285 |
+
'\\heartsuit' : '\U00002661',
|
| 286 |
+
'\\spadesuit' : '\U00002660',
|
| 287 |
+
'\\aleph' : '\U00002135',
|
| 288 |
+
'\\emptyset' : '\U00002205',
|
| 289 |
+
'\\nabla' : '\U00002207',
|
| 290 |
+
'\\partial' : '\U00002202',
|
| 291 |
+
'\\flat' : '\U0000266d',
|
| 292 |
+
'\\natural' : '\U0000266e',
|
| 293 |
+
'\\sharp' : '\U0000266f',
|
| 294 |
+
'\\angle' : '\U00002220',
|
| 295 |
+
'\\copyright' : '\U000000a9',
|
| 296 |
+
'\\textregistered' : '\U000000ae',
|
| 297 |
+
'\\textonequarter' : '\U000000bc',
|
| 298 |
+
'\\textonehalf' : '\U000000bd',
|
| 299 |
+
'\\textthreequarters' : '\U000000be',
|
| 300 |
+
'\\textordfeminine' : '\U000000aa',
|
| 301 |
+
'\\textordmasculine' : '\U000000ba',
|
| 302 |
+
'\\euro' : '\U000020ac',
|
| 303 |
+
'\\pounds' : '\U000000a3',
|
| 304 |
+
'\\yen' : '\U000000a5',
|
| 305 |
+
'\\textcent' : '\U000000a2',
|
| 306 |
+
'\\textcurrency' : '\U000000a4',
|
| 307 |
+
'\\textdegree' : '\U000000b0',
|
| 308 |
+
}
|
| 309 |
+
|
| 310 |
+
isabelle_symbols = {
|
| 311 |
+
'\\<zero>' : '\U0001d7ec',
|
| 312 |
+
'\\<one>' : '\U0001d7ed',
|
| 313 |
+
'\\<two>' : '\U0001d7ee',
|
| 314 |
+
'\\<three>' : '\U0001d7ef',
|
| 315 |
+
'\\<four>' : '\U0001d7f0',
|
| 316 |
+
'\\<five>' : '\U0001d7f1',
|
| 317 |
+
'\\<six>' : '\U0001d7f2',
|
| 318 |
+
'\\<seven>' : '\U0001d7f3',
|
| 319 |
+
'\\<eight>' : '\U0001d7f4',
|
| 320 |
+
'\\<nine>' : '\U0001d7f5',
|
| 321 |
+
'\\<A>' : '\U0001d49c',
|
| 322 |
+
'\\<B>' : '\U0000212c',
|
| 323 |
+
'\\<C>' : '\U0001d49e',
|
| 324 |
+
'\\<D>' : '\U0001d49f',
|
| 325 |
+
'\\<E>' : '\U00002130',
|
| 326 |
+
'\\<F>' : '\U00002131',
|
| 327 |
+
'\\<G>' : '\U0001d4a2',
|
| 328 |
+
'\\<H>' : '\U0000210b',
|
| 329 |
+
'\\<I>' : '\U00002110',
|
| 330 |
+
'\\<J>' : '\U0001d4a5',
|
| 331 |
+
'\\<K>' : '\U0001d4a6',
|
| 332 |
+
'\\<L>' : '\U00002112',
|
| 333 |
+
'\\<M>' : '\U00002133',
|
| 334 |
+
'\\<N>' : '\U0001d4a9',
|
| 335 |
+
'\\<O>' : '\U0001d4aa',
|
| 336 |
+
'\\<P>' : '\U0001d4ab',
|
| 337 |
+
'\\<Q>' : '\U0001d4ac',
|
| 338 |
+
'\\<R>' : '\U0000211b',
|
| 339 |
+
'\\<S>' : '\U0001d4ae',
|
| 340 |
+
'\\<T>' : '\U0001d4af',
|
| 341 |
+
'\\<U>' : '\U0001d4b0',
|
| 342 |
+
'\\<V>' : '\U0001d4b1',
|
| 343 |
+
'\\<W>' : '\U0001d4b2',
|
| 344 |
+
'\\<X>' : '\U0001d4b3',
|
| 345 |
+
'\\<Y>' : '\U0001d4b4',
|
| 346 |
+
'\\<Z>' : '\U0001d4b5',
|
| 347 |
+
'\\<a>' : '\U0001d5ba',
|
| 348 |
+
'\\<b>' : '\U0001d5bb',
|
| 349 |
+
'\\<c>' : '\U0001d5bc',
|
| 350 |
+
'\\<d>' : '\U0001d5bd',
|
| 351 |
+
'\\<e>' : '\U0001d5be',
|
| 352 |
+
'\\<f>' : '\U0001d5bf',
|
| 353 |
+
'\\<g>' : '\U0001d5c0',
|
| 354 |
+
'\\<h>' : '\U0001d5c1',
|
| 355 |
+
'\\<i>' : '\U0001d5c2',
|
| 356 |
+
'\\<j>' : '\U0001d5c3',
|
| 357 |
+
'\\<k>' : '\U0001d5c4',
|
| 358 |
+
'\\<l>' : '\U0001d5c5',
|
| 359 |
+
'\\<m>' : '\U0001d5c6',
|
| 360 |
+
'\\<n>' : '\U0001d5c7',
|
| 361 |
+
'\\<o>' : '\U0001d5c8',
|
| 362 |
+
'\\<p>' : '\U0001d5c9',
|
| 363 |
+
'\\<q>' : '\U0001d5ca',
|
| 364 |
+
'\\<r>' : '\U0001d5cb',
|
| 365 |
+
'\\<s>' : '\U0001d5cc',
|
| 366 |
+
'\\<t>' : '\U0001d5cd',
|
| 367 |
+
'\\<u>' : '\U0001d5ce',
|
| 368 |
+
'\\<v>' : '\U0001d5cf',
|
| 369 |
+
'\\<w>' : '\U0001d5d0',
|
| 370 |
+
'\\<x>' : '\U0001d5d1',
|
| 371 |
+
'\\<y>' : '\U0001d5d2',
|
| 372 |
+
'\\<z>' : '\U0001d5d3',
|
| 373 |
+
'\\<AA>' : '\U0001d504',
|
| 374 |
+
'\\<BB>' : '\U0001d505',
|
| 375 |
+
'\\<CC>' : '\U0000212d',
|
| 376 |
+
'\\<DD>' : '\U0001d507',
|
| 377 |
+
'\\<EE>' : '\U0001d508',
|
| 378 |
+
'\\<FF>' : '\U0001d509',
|
| 379 |
+
'\\<GG>' : '\U0001d50a',
|
| 380 |
+
'\\<HH>' : '\U0000210c',
|
| 381 |
+
'\\<II>' : '\U00002111',
|
| 382 |
+
'\\<JJ>' : '\U0001d50d',
|
| 383 |
+
'\\<KK>' : '\U0001d50e',
|
| 384 |
+
'\\<LL>' : '\U0001d50f',
|
| 385 |
+
'\\<MM>' : '\U0001d510',
|
| 386 |
+
'\\<NN>' : '\U0001d511',
|
| 387 |
+
'\\<OO>' : '\U0001d512',
|
| 388 |
+
'\\<PP>' : '\U0001d513',
|
| 389 |
+
'\\<QQ>' : '\U0001d514',
|
| 390 |
+
'\\<RR>' : '\U0000211c',
|
| 391 |
+
'\\<SS>' : '\U0001d516',
|
| 392 |
+
'\\<TT>' : '\U0001d517',
|
| 393 |
+
'\\<UU>' : '\U0001d518',
|
| 394 |
+
'\\<VV>' : '\U0001d519',
|
| 395 |
+
'\\<WW>' : '\U0001d51a',
|
| 396 |
+
'\\<XX>' : '\U0001d51b',
|
| 397 |
+
'\\<YY>' : '\U0001d51c',
|
| 398 |
+
'\\<ZZ>' : '\U00002128',
|
| 399 |
+
'\\<aa>' : '\U0001d51e',
|
| 400 |
+
'\\<bb>' : '\U0001d51f',
|
| 401 |
+
'\\<cc>' : '\U0001d520',
|
| 402 |
+
'\\<dd>' : '\U0001d521',
|
| 403 |
+
'\\<ee>' : '\U0001d522',
|
| 404 |
+
'\\<ff>' : '\U0001d523',
|
| 405 |
+
'\\<gg>' : '\U0001d524',
|
| 406 |
+
'\\<hh>' : '\U0001d525',
|
| 407 |
+
'\\<ii>' : '\U0001d526',
|
| 408 |
+
'\\<jj>' : '\U0001d527',
|
| 409 |
+
'\\<kk>' : '\U0001d528',
|
| 410 |
+
'\\<ll>' : '\U0001d529',
|
| 411 |
+
'\\<mm>' : '\U0001d52a',
|
| 412 |
+
'\\<nn>' : '\U0001d52b',
|
| 413 |
+
'\\<oo>' : '\U0001d52c',
|
| 414 |
+
'\\<pp>' : '\U0001d52d',
|
| 415 |
+
'\\<qq>' : '\U0001d52e',
|
| 416 |
+
'\\<rr>' : '\U0001d52f',
|
| 417 |
+
'\\<ss>' : '\U0001d530',
|
| 418 |
+
'\\<tt>' : '\U0001d531',
|
| 419 |
+
'\\<uu>' : '\U0001d532',
|
| 420 |
+
'\\<vv>' : '\U0001d533',
|
| 421 |
+
'\\<ww>' : '\U0001d534',
|
| 422 |
+
'\\<xx>' : '\U0001d535',
|
| 423 |
+
'\\<yy>' : '\U0001d536',
|
| 424 |
+
'\\<zz>' : '\U0001d537',
|
| 425 |
+
'\\<alpha>' : '\U000003b1',
|
| 426 |
+
'\\<beta>' : '\U000003b2',
|
| 427 |
+
'\\<gamma>' : '\U000003b3',
|
| 428 |
+
'\\<delta>' : '\U000003b4',
|
| 429 |
+
'\\<epsilon>' : '\U000003b5',
|
| 430 |
+
'\\<zeta>' : '\U000003b6',
|
| 431 |
+
'\\<eta>' : '\U000003b7',
|
| 432 |
+
'\\<theta>' : '\U000003b8',
|
| 433 |
+
'\\<iota>' : '\U000003b9',
|
| 434 |
+
'\\<kappa>' : '\U000003ba',
|
| 435 |
+
'\\<lambda>' : '\U000003bb',
|
| 436 |
+
'\\<mu>' : '\U000003bc',
|
| 437 |
+
'\\<nu>' : '\U000003bd',
|
| 438 |
+
'\\<xi>' : '\U000003be',
|
| 439 |
+
'\\<pi>' : '\U000003c0',
|
| 440 |
+
'\\<rho>' : '\U000003c1',
|
| 441 |
+
'\\<sigma>' : '\U000003c3',
|
| 442 |
+
'\\<tau>' : '\U000003c4',
|
| 443 |
+
'\\<upsilon>' : '\U000003c5',
|
| 444 |
+
'\\<phi>' : '\U000003c6',
|
| 445 |
+
'\\<chi>' : '\U000003c7',
|
| 446 |
+
'\\<psi>' : '\U000003c8',
|
| 447 |
+
'\\<omega>' : '\U000003c9',
|
| 448 |
+
'\\<Gamma>' : '\U00000393',
|
| 449 |
+
'\\<Delta>' : '\U00000394',
|
| 450 |
+
'\\<Theta>' : '\U00000398',
|
| 451 |
+
'\\<Lambda>' : '\U0000039b',
|
| 452 |
+
'\\<Xi>' : '\U0000039e',
|
| 453 |
+
'\\<Pi>' : '\U000003a0',
|
| 454 |
+
'\\<Sigma>' : '\U000003a3',
|
| 455 |
+
'\\<Upsilon>' : '\U000003a5',
|
| 456 |
+
'\\<Phi>' : '\U000003a6',
|
| 457 |
+
'\\<Psi>' : '\U000003a8',
|
| 458 |
+
'\\<Omega>' : '\U000003a9',
|
| 459 |
+
'\\<bool>' : '\U0001d539',
|
| 460 |
+
'\\<complex>' : '\U00002102',
|
| 461 |
+
'\\<nat>' : '\U00002115',
|
| 462 |
+
'\\<rat>' : '\U0000211a',
|
| 463 |
+
'\\<real>' : '\U0000211d',
|
| 464 |
+
'\\<int>' : '\U00002124',
|
| 465 |
+
'\\<leftarrow>' : '\U00002190',
|
| 466 |
+
'\\<longleftarrow>' : '\U000027f5',
|
| 467 |
+
'\\<rightarrow>' : '\U00002192',
|
| 468 |
+
'\\<longrightarrow>' : '\U000027f6',
|
| 469 |
+
'\\<Leftarrow>' : '\U000021d0',
|
| 470 |
+
'\\<Longleftarrow>' : '\U000027f8',
|
| 471 |
+
'\\<Rightarrow>' : '\U000021d2',
|
| 472 |
+
'\\<Longrightarrow>' : '\U000027f9',
|
| 473 |
+
'\\<leftrightarrow>' : '\U00002194',
|
| 474 |
+
'\\<longleftrightarrow>' : '\U000027f7',
|
| 475 |
+
'\\<Leftrightarrow>' : '\U000021d4',
|
| 476 |
+
'\\<Longleftrightarrow>' : '\U000027fa',
|
| 477 |
+
'\\<mapsto>' : '\U000021a6',
|
| 478 |
+
'\\<longmapsto>' : '\U000027fc',
|
| 479 |
+
'\\<midarrow>' : '\U00002500',
|
| 480 |
+
'\\<Midarrow>' : '\U00002550',
|
| 481 |
+
'\\<hookleftarrow>' : '\U000021a9',
|
| 482 |
+
'\\<hookrightarrow>' : '\U000021aa',
|
| 483 |
+
'\\<leftharpoondown>' : '\U000021bd',
|
| 484 |
+
'\\<rightharpoondown>' : '\U000021c1',
|
| 485 |
+
'\\<leftharpoonup>' : '\U000021bc',
|
| 486 |
+
'\\<rightharpoonup>' : '\U000021c0',
|
| 487 |
+
'\\<rightleftharpoons>' : '\U000021cc',
|
| 488 |
+
'\\<leadsto>' : '\U0000219d',
|
| 489 |
+
'\\<downharpoonleft>' : '\U000021c3',
|
| 490 |
+
'\\<downharpoonright>' : '\U000021c2',
|
| 491 |
+
'\\<upharpoonleft>' : '\U000021bf',
|
| 492 |
+
'\\<upharpoonright>' : '\U000021be',
|
| 493 |
+
'\\<restriction>' : '\U000021be',
|
| 494 |
+
'\\<Colon>' : '\U00002237',
|
| 495 |
+
'\\<up>' : '\U00002191',
|
| 496 |
+
'\\<Up>' : '\U000021d1',
|
| 497 |
+
'\\<down>' : '\U00002193',
|
| 498 |
+
'\\<Down>' : '\U000021d3',
|
| 499 |
+
'\\<updown>' : '\U00002195',
|
| 500 |
+
'\\<Updown>' : '\U000021d5',
|
| 501 |
+
'\\<langle>' : '\U000027e8',
|
| 502 |
+
'\\<rangle>' : '\U000027e9',
|
| 503 |
+
'\\<lceil>' : '\U00002308',
|
| 504 |
+
'\\<rceil>' : '\U00002309',
|
| 505 |
+
'\\<lfloor>' : '\U0000230a',
|
| 506 |
+
'\\<rfloor>' : '\U0000230b',
|
| 507 |
+
'\\<lparr>' : '\U00002987',
|
| 508 |
+
'\\<rparr>' : '\U00002988',
|
| 509 |
+
'\\<lbrakk>' : '\U000027e6',
|
| 510 |
+
'\\<rbrakk>' : '\U000027e7',
|
| 511 |
+
'\\<lbrace>' : '\U00002983',
|
| 512 |
+
'\\<rbrace>' : '\U00002984',
|
| 513 |
+
'\\<guillemotleft>' : '\U000000ab',
|
| 514 |
+
'\\<guillemotright>' : '\U000000bb',
|
| 515 |
+
'\\<bottom>' : '\U000022a5',
|
| 516 |
+
'\\<top>' : '\U000022a4',
|
| 517 |
+
'\\<and>' : '\U00002227',
|
| 518 |
+
'\\<And>' : '\U000022c0',
|
| 519 |
+
'\\<or>' : '\U00002228',
|
| 520 |
+
'\\<Or>' : '\U000022c1',
|
| 521 |
+
'\\<forall>' : '\U00002200',
|
| 522 |
+
'\\<exists>' : '\U00002203',
|
| 523 |
+
'\\<nexists>' : '\U00002204',
|
| 524 |
+
'\\<not>' : '\U000000ac',
|
| 525 |
+
'\\<box>' : '\U000025a1',
|
| 526 |
+
'\\<diamond>' : '\U000025c7',
|
| 527 |
+
'\\<turnstile>' : '\U000022a2',
|
| 528 |
+
'\\<Turnstile>' : '\U000022a8',
|
| 529 |
+
'\\<tturnstile>' : '\U000022a9',
|
| 530 |
+
'\\<TTurnstile>' : '\U000022ab',
|
| 531 |
+
'\\<stileturn>' : '\U000022a3',
|
| 532 |
+
'\\<surd>' : '\U0000221a',
|
| 533 |
+
'\\<le>' : '\U00002264',
|
| 534 |
+
'\\<ge>' : '\U00002265',
|
| 535 |
+
'\\<lless>' : '\U0000226a',
|
| 536 |
+
'\\<ggreater>' : '\U0000226b',
|
| 537 |
+
'\\<lesssim>' : '\U00002272',
|
| 538 |
+
'\\<greatersim>' : '\U00002273',
|
| 539 |
+
'\\<lessapprox>' : '\U00002a85',
|
| 540 |
+
'\\<greaterapprox>' : '\U00002a86',
|
| 541 |
+
'\\<in>' : '\U00002208',
|
| 542 |
+
'\\<notin>' : '\U00002209',
|
| 543 |
+
'\\<subset>' : '\U00002282',
|
| 544 |
+
'\\<supset>' : '\U00002283',
|
| 545 |
+
'\\<subseteq>' : '\U00002286',
|
| 546 |
+
'\\<supseteq>' : '\U00002287',
|
| 547 |
+
'\\<sqsubset>' : '\U0000228f',
|
| 548 |
+
'\\<sqsupset>' : '\U00002290',
|
| 549 |
+
'\\<sqsubseteq>' : '\U00002291',
|
| 550 |
+
'\\<sqsupseteq>' : '\U00002292',
|
| 551 |
+
'\\<inter>' : '\U00002229',
|
| 552 |
+
'\\<Inter>' : '\U000022c2',
|
| 553 |
+
'\\<union>' : '\U0000222a',
|
| 554 |
+
'\\<Union>' : '\U000022c3',
|
| 555 |
+
'\\<squnion>' : '\U00002294',
|
| 556 |
+
'\\<Squnion>' : '\U00002a06',
|
| 557 |
+
'\\<sqinter>' : '\U00002293',
|
| 558 |
+
'\\<Sqinter>' : '\U00002a05',
|
| 559 |
+
'\\<setminus>' : '\U00002216',
|
| 560 |
+
'\\<propto>' : '\U0000221d',
|
| 561 |
+
'\\<uplus>' : '\U0000228e',
|
| 562 |
+
'\\<Uplus>' : '\U00002a04',
|
| 563 |
+
'\\<noteq>' : '\U00002260',
|
| 564 |
+
'\\<sim>' : '\U0000223c',
|
| 565 |
+
'\\<doteq>' : '\U00002250',
|
| 566 |
+
'\\<simeq>' : '\U00002243',
|
| 567 |
+
'\\<approx>' : '\U00002248',
|
| 568 |
+
'\\<asymp>' : '\U0000224d',
|
| 569 |
+
'\\<cong>' : '\U00002245',
|
| 570 |
+
'\\<smile>' : '\U00002323',
|
| 571 |
+
'\\<equiv>' : '\U00002261',
|
| 572 |
+
'\\<frown>' : '\U00002322',
|
| 573 |
+
'\\<Join>' : '\U000022c8',
|
| 574 |
+
'\\<bowtie>' : '\U00002a1d',
|
| 575 |
+
'\\<prec>' : '\U0000227a',
|
| 576 |
+
'\\<succ>' : '\U0000227b',
|
| 577 |
+
'\\<preceq>' : '\U0000227c',
|
| 578 |
+
'\\<succeq>' : '\U0000227d',
|
| 579 |
+
'\\<parallel>' : '\U00002225',
|
| 580 |
+
'\\<bar>' : '\U000000a6',
|
| 581 |
+
'\\<plusminus>' : '\U000000b1',
|
| 582 |
+
'\\<minusplus>' : '\U00002213',
|
| 583 |
+
'\\<times>' : '\U000000d7',
|
| 584 |
+
'\\<div>' : '\U000000f7',
|
| 585 |
+
'\\<cdot>' : '\U000022c5',
|
| 586 |
+
'\\<star>' : '\U000022c6',
|
| 587 |
+
'\\<bullet>' : '\U00002219',
|
| 588 |
+
'\\<circ>' : '\U00002218',
|
| 589 |
+
'\\<dagger>' : '\U00002020',
|
| 590 |
+
'\\<ddagger>' : '\U00002021',
|
| 591 |
+
'\\<lhd>' : '\U000022b2',
|
| 592 |
+
'\\<rhd>' : '\U000022b3',
|
| 593 |
+
'\\<unlhd>' : '\U000022b4',
|
| 594 |
+
'\\<unrhd>' : '\U000022b5',
|
| 595 |
+
'\\<triangleleft>' : '\U000025c3',
|
| 596 |
+
'\\<triangleright>' : '\U000025b9',
|
| 597 |
+
'\\<triangle>' : '\U000025b3',
|
| 598 |
+
'\\<triangleq>' : '\U0000225c',
|
| 599 |
+
'\\<oplus>' : '\U00002295',
|
| 600 |
+
'\\<Oplus>' : '\U00002a01',
|
| 601 |
+
'\\<otimes>' : '\U00002297',
|
| 602 |
+
'\\<Otimes>' : '\U00002a02',
|
| 603 |
+
'\\<odot>' : '\U00002299',
|
| 604 |
+
'\\<Odot>' : '\U00002a00',
|
| 605 |
+
'\\<ominus>' : '\U00002296',
|
| 606 |
+
'\\<oslash>' : '\U00002298',
|
| 607 |
+
'\\<dots>' : '\U00002026',
|
| 608 |
+
'\\<cdots>' : '\U000022ef',
|
| 609 |
+
'\\<Sum>' : '\U00002211',
|
| 610 |
+
'\\<Prod>' : '\U0000220f',
|
| 611 |
+
'\\<Coprod>' : '\U00002210',
|
| 612 |
+
'\\<infinity>' : '\U0000221e',
|
| 613 |
+
'\\<integral>' : '\U0000222b',
|
| 614 |
+
'\\<ointegral>' : '\U0000222e',
|
| 615 |
+
'\\<clubsuit>' : '\U00002663',
|
| 616 |
+
'\\<diamondsuit>' : '\U00002662',
|
| 617 |
+
'\\<heartsuit>' : '\U00002661',
|
| 618 |
+
'\\<spadesuit>' : '\U00002660',
|
| 619 |
+
'\\<aleph>' : '\U00002135',
|
| 620 |
+
'\\<emptyset>' : '\U00002205',
|
| 621 |
+
'\\<nabla>' : '\U00002207',
|
| 622 |
+
'\\<partial>' : '\U00002202',
|
| 623 |
+
'\\<flat>' : '\U0000266d',
|
| 624 |
+
'\\<natural>' : '\U0000266e',
|
| 625 |
+
'\\<sharp>' : '\U0000266f',
|
| 626 |
+
'\\<angle>' : '\U00002220',
|
| 627 |
+
'\\<copyright>' : '\U000000a9',
|
| 628 |
+
'\\<registered>' : '\U000000ae',
|
| 629 |
+
'\\<hyphen>' : '\U000000ad',
|
| 630 |
+
'\\<inverse>' : '\U000000af',
|
| 631 |
+
'\\<onequarter>' : '\U000000bc',
|
| 632 |
+
'\\<onehalf>' : '\U000000bd',
|
| 633 |
+
'\\<threequarters>' : '\U000000be',
|
| 634 |
+
'\\<ordfeminine>' : '\U000000aa',
|
| 635 |
+
'\\<ordmasculine>' : '\U000000ba',
|
| 636 |
+
'\\<section>' : '\U000000a7',
|
| 637 |
+
'\\<paragraph>' : '\U000000b6',
|
| 638 |
+
'\\<exclamdown>' : '\U000000a1',
|
| 639 |
+
'\\<questiondown>' : '\U000000bf',
|
| 640 |
+
'\\<euro>' : '\U000020ac',
|
| 641 |
+
'\\<pounds>' : '\U000000a3',
|
| 642 |
+
'\\<yen>' : '\U000000a5',
|
| 643 |
+
'\\<cent>' : '\U000000a2',
|
| 644 |
+
'\\<currency>' : '\U000000a4',
|
| 645 |
+
'\\<degree>' : '\U000000b0',
|
| 646 |
+
'\\<amalg>' : '\U00002a3f',
|
| 647 |
+
'\\<mho>' : '\U00002127',
|
| 648 |
+
'\\<lozenge>' : '\U000025ca',
|
| 649 |
+
'\\<wp>' : '\U00002118',
|
| 650 |
+
'\\<wrong>' : '\U00002240',
|
| 651 |
+
'\\<struct>' : '\U000022c4',
|
| 652 |
+
'\\<acute>' : '\U000000b4',
|
| 653 |
+
'\\<index>' : '\U00000131',
|
| 654 |
+
'\\<dieresis>' : '\U000000a8',
|
| 655 |
+
'\\<cedilla>' : '\U000000b8',
|
| 656 |
+
'\\<hungarumlaut>' : '\U000002dd',
|
| 657 |
+
'\\<some>' : '\U000003f5',
|
| 658 |
+
'\\<newline>' : '\U000023ce',
|
| 659 |
+
'\\<open>' : '\U00002039',
|
| 660 |
+
'\\<close>' : '\U0000203a',
|
| 661 |
+
'\\<here>' : '\U00002302',
|
| 662 |
+
'\\<^sub>' : '\U000021e9',
|
| 663 |
+
'\\<^sup>' : '\U000021e7',
|
| 664 |
+
'\\<^bold>' : '\U00002759',
|
| 665 |
+
'\\<^bsub>' : '\U000021d8',
|
| 666 |
+
'\\<^esub>' : '\U000021d9',
|
| 667 |
+
'\\<^bsup>' : '\U000021d7',
|
| 668 |
+
'\\<^esup>' : '\U000021d6',
|
| 669 |
+
}
|
| 670 |
+
|
| 671 |
+
lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols}
|
| 672 |
+
|
| 673 |
+
def __init__(self, **options):
|
| 674 |
+
Filter.__init__(self, **options)
|
| 675 |
+
lang = get_choice_opt(options, 'lang',
|
| 676 |
+
['isabelle', 'latex'], 'isabelle')
|
| 677 |
+
self.symbols = self.lang_map[lang]
|
| 678 |
+
|
| 679 |
+
def filter(self, lexer, stream):
|
| 680 |
+
for ttype, value in stream:
|
| 681 |
+
if value in self.symbols:
|
| 682 |
+
yield ttype, self.symbols[value]
|
| 683 |
+
else:
|
| 684 |
+
yield ttype, value
|
| 685 |
+
|
| 686 |
+
|
| 687 |
+
class KeywordCaseFilter(Filter):
|
| 688 |
+
"""Convert keywords to lowercase or uppercase or capitalize them, which
|
| 689 |
+
means first letter uppercase, rest lowercase.
|
| 690 |
+
|
| 691 |
+
This can be useful e.g. if you highlight Pascal code and want to adapt the
|
| 692 |
+
code to your styleguide.
|
| 693 |
+
|
| 694 |
+
Options accepted:
|
| 695 |
+
|
| 696 |
+
`case` : string
|
| 697 |
+
The casing to convert keywords to. Must be one of ``'lower'``,
|
| 698 |
+
``'upper'`` or ``'capitalize'``. The default is ``'lower'``.
|
| 699 |
+
"""
|
| 700 |
+
|
| 701 |
+
def __init__(self, **options):
|
| 702 |
+
Filter.__init__(self, **options)
|
| 703 |
+
case = get_choice_opt(options, 'case',
|
| 704 |
+
['lower', 'upper', 'capitalize'], 'lower')
|
| 705 |
+
self.convert = getattr(str, case)
|
| 706 |
+
|
| 707 |
+
def filter(self, lexer, stream):
|
| 708 |
+
for ttype, value in stream:
|
| 709 |
+
if ttype in Keyword:
|
| 710 |
+
yield ttype, self.convert(value)
|
| 711 |
+
else:
|
| 712 |
+
yield ttype, value
|
| 713 |
+
|
| 714 |
+
|
| 715 |
+
class NameHighlightFilter(Filter):
|
| 716 |
+
"""Highlight a normal Name (and Name.*) token with a different token type.
|
| 717 |
+
|
| 718 |
+
Example::
|
| 719 |
+
|
| 720 |
+
filter = NameHighlightFilter(
|
| 721 |
+
names=['foo', 'bar', 'baz'],
|
| 722 |
+
tokentype=Name.Function,
|
| 723 |
+
)
|
| 724 |
+
|
| 725 |
+
This would highlight the names "foo", "bar" and "baz"
|
| 726 |
+
as functions. `Name.Function` is the default token type.
|
| 727 |
+
|
| 728 |
+
Options accepted:
|
| 729 |
+
|
| 730 |
+
`names` : list of strings
|
| 731 |
+
A list of names that should be given the different token type.
|
| 732 |
+
There is no default.
|
| 733 |
+
`tokentype` : TokenType or string
|
| 734 |
+
A token type or a string containing a token type name that is
|
| 735 |
+
used for highlighting the strings in `names`. The default is
|
| 736 |
+
`Name.Function`.
|
| 737 |
+
"""
|
| 738 |
+
|
| 739 |
+
def __init__(self, **options):
|
| 740 |
+
Filter.__init__(self, **options)
|
| 741 |
+
self.names = set(get_list_opt(options, 'names', []))
|
| 742 |
+
tokentype = options.get('tokentype')
|
| 743 |
+
if tokentype:
|
| 744 |
+
self.tokentype = string_to_tokentype(tokentype)
|
| 745 |
+
else:
|
| 746 |
+
self.tokentype = Name.Function
|
| 747 |
+
|
| 748 |
+
def filter(self, lexer, stream):
|
| 749 |
+
for ttype, value in stream:
|
| 750 |
+
if ttype in Name and value in self.names:
|
| 751 |
+
yield self.tokentype, value
|
| 752 |
+
else:
|
| 753 |
+
yield ttype, value
|
| 754 |
+
|
| 755 |
+
|
| 756 |
+
class ErrorToken(Exception):
|
| 757 |
+
pass
|
| 758 |
+
|
| 759 |
+
|
| 760 |
+
class RaiseOnErrorTokenFilter(Filter):
|
| 761 |
+
"""Raise an exception when the lexer generates an error token.
|
| 762 |
+
|
| 763 |
+
Options accepted:
|
| 764 |
+
|
| 765 |
+
`excclass` : Exception class
|
| 766 |
+
The exception class to raise.
|
| 767 |
+
The default is `pygments.filters.ErrorToken`.
|
| 768 |
+
|
| 769 |
+
.. versionadded:: 0.8
|
| 770 |
+
"""
|
| 771 |
+
|
| 772 |
+
def __init__(self, **options):
|
| 773 |
+
Filter.__init__(self, **options)
|
| 774 |
+
self.exception = options.get('excclass', ErrorToken)
|
| 775 |
+
try:
|
| 776 |
+
# issubclass() will raise TypeError if first argument is not a class
|
| 777 |
+
if not issubclass(self.exception, Exception):
|
| 778 |
+
raise TypeError
|
| 779 |
+
except TypeError:
|
| 780 |
+
raise OptionError('excclass option is not an exception class')
|
| 781 |
+
|
| 782 |
+
def filter(self, lexer, stream):
|
| 783 |
+
for ttype, value in stream:
|
| 784 |
+
if ttype is Error:
|
| 785 |
+
raise self.exception(value)
|
| 786 |
+
yield ttype, value
|
| 787 |
+
|
| 788 |
+
|
| 789 |
+
class VisibleWhitespaceFilter(Filter):
|
| 790 |
+
"""Convert tabs, newlines and/or spaces to visible characters.
|
| 791 |
+
|
| 792 |
+
Options accepted:
|
| 793 |
+
|
| 794 |
+
`spaces` : string or bool
|
| 795 |
+
If this is a one-character string, spaces will be replaces by this string.
|
| 796 |
+
If it is another true value, spaces will be replaced by ``·`` (unicode
|
| 797 |
+
MIDDLE DOT). If it is a false value, spaces will not be replaced. The
|
| 798 |
+
default is ``False``.
|
| 799 |
+
`tabs` : string or bool
|
| 800 |
+
The same as for `spaces`, but the default replacement character is ``»``
|
| 801 |
+
(unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value
|
| 802 |
+
is ``False``. Note: this will not work if the `tabsize` option for the
|
| 803 |
+
lexer is nonzero, as tabs will already have been expanded then.
|
| 804 |
+
`tabsize` : int
|
| 805 |
+
If tabs are to be replaced by this filter (see the `tabs` option), this
|
| 806 |
+
is the total number of characters that a tab should be expanded to.
|
| 807 |
+
The default is ``8``.
|
| 808 |
+
`newlines` : string or bool
|
| 809 |
+
The same as for `spaces`, but the default replacement character is ``¶``
|
| 810 |
+
(unicode PILCROW SIGN). The default value is ``False``.
|
| 811 |
+
`wstokentype` : bool
|
| 812 |
+
If true, give whitespace the special `Whitespace` token type. This allows
|
| 813 |
+
styling the visible whitespace differently (e.g. greyed out), but it can
|
| 814 |
+
disrupt background colors. The default is ``True``.
|
| 815 |
+
|
| 816 |
+
.. versionadded:: 0.8
|
| 817 |
+
"""
|
| 818 |
+
|
| 819 |
+
def __init__(self, **options):
|
| 820 |
+
Filter.__init__(self, **options)
|
| 821 |
+
for name, default in [('spaces', '·'),
|
| 822 |
+
('tabs', '»'),
|
| 823 |
+
('newlines', '¶')]:
|
| 824 |
+
opt = options.get(name, False)
|
| 825 |
+
if isinstance(opt, str) and len(opt) == 1:
|
| 826 |
+
setattr(self, name, opt)
|
| 827 |
+
else:
|
| 828 |
+
setattr(self, name, (opt and default or ''))
|
| 829 |
+
tabsize = get_int_opt(options, 'tabsize', 8)
|
| 830 |
+
if self.tabs:
|
| 831 |
+
self.tabs += ' ' * (tabsize - 1)
|
| 832 |
+
if self.newlines:
|
| 833 |
+
self.newlines += '\n'
|
| 834 |
+
self.wstt = get_bool_opt(options, 'wstokentype', True)
|
| 835 |
+
|
| 836 |
+
def filter(self, lexer, stream):
|
| 837 |
+
if self.wstt:
|
| 838 |
+
spaces = self.spaces or ' '
|
| 839 |
+
tabs = self.tabs or '\t'
|
| 840 |
+
newlines = self.newlines or '\n'
|
| 841 |
+
regex = re.compile(r'\s')
|
| 842 |
+
|
| 843 |
+
def replacefunc(wschar):
|
| 844 |
+
if wschar == ' ':
|
| 845 |
+
return spaces
|
| 846 |
+
elif wschar == '\t':
|
| 847 |
+
return tabs
|
| 848 |
+
elif wschar == '\n':
|
| 849 |
+
return newlines
|
| 850 |
+
return wschar
|
| 851 |
+
|
| 852 |
+
for ttype, value in stream:
|
| 853 |
+
yield from _replace_special(ttype, value, regex, Whitespace,
|
| 854 |
+
replacefunc)
|
| 855 |
+
else:
|
| 856 |
+
spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
|
| 857 |
+
# simpler processing
|
| 858 |
+
for ttype, value in stream:
|
| 859 |
+
if spaces:
|
| 860 |
+
value = value.replace(' ', spaces)
|
| 861 |
+
if tabs:
|
| 862 |
+
value = value.replace('\t', tabs)
|
| 863 |
+
if newlines:
|
| 864 |
+
value = value.replace('\n', newlines)
|
| 865 |
+
yield ttype, value
|
| 866 |
+
|
| 867 |
+
|
| 868 |
+
class GobbleFilter(Filter):
|
| 869 |
+
"""Gobbles source code lines (eats initial characters).
|
| 870 |
+
|
| 871 |
+
This filter drops the first ``n`` characters off every line of code. This
|
| 872 |
+
may be useful when the source code fed to the lexer is indented by a fixed
|
| 873 |
+
amount of space that isn't desired in the output.
|
| 874 |
+
|
| 875 |
+
Options accepted:
|
| 876 |
+
|
| 877 |
+
`n` : int
|
| 878 |
+
The number of characters to gobble.
|
| 879 |
+
|
| 880 |
+
.. versionadded:: 1.2
|
| 881 |
+
"""
|
| 882 |
+
def __init__(self, **options):
|
| 883 |
+
Filter.__init__(self, **options)
|
| 884 |
+
self.n = get_int_opt(options, 'n', 0)
|
| 885 |
+
|
| 886 |
+
def gobble(self, value, left):
|
| 887 |
+
if left < len(value):
|
| 888 |
+
return value[left:], 0
|
| 889 |
+
else:
|
| 890 |
+
return '', left - len(value)
|
| 891 |
+
|
| 892 |
+
def filter(self, lexer, stream):
|
| 893 |
+
n = self.n
|
| 894 |
+
left = n # How many characters left to gobble.
|
| 895 |
+
for ttype, value in stream:
|
| 896 |
+
# Remove ``left`` tokens from first line, ``n`` from all others.
|
| 897 |
+
parts = value.split('\n')
|
| 898 |
+
(parts[0], left) = self.gobble(parts[0], left)
|
| 899 |
+
for i in range(1, len(parts)):
|
| 900 |
+
(parts[i], left) = self.gobble(parts[i], n)
|
| 901 |
+
value = '\n'.join(parts)
|
| 902 |
+
|
| 903 |
+
if value != '':
|
| 904 |
+
yield ttype, value
|
| 905 |
+
|
| 906 |
+
|
| 907 |
+
class TokenMergeFilter(Filter):
|
| 908 |
+
"""Merges consecutive tokens with the same token type in the output
|
| 909 |
+
stream of a lexer.
|
| 910 |
+
|
| 911 |
+
.. versionadded:: 1.2
|
| 912 |
+
"""
|
| 913 |
+
def __init__(self, **options):
|
| 914 |
+
Filter.__init__(self, **options)
|
| 915 |
+
|
| 916 |
+
def filter(self, lexer, stream):
|
| 917 |
+
current_type = None
|
| 918 |
+
current_value = None
|
| 919 |
+
for ttype, value in stream:
|
| 920 |
+
if ttype is current_type:
|
| 921 |
+
current_value += value
|
| 922 |
+
else:
|
| 923 |
+
if current_type is not None:
|
| 924 |
+
yield current_type, current_value
|
| 925 |
+
current_type = ttype
|
| 926 |
+
current_value = value
|
| 927 |
+
if current_type is not None:
|
| 928 |
+
yield current_type, current_value
|
| 929 |
+
|
| 930 |
+
|
| 931 |
+
FILTERS = {
|
| 932 |
+
'codetagify': CodeTagFilter,
|
| 933 |
+
'keywordcase': KeywordCaseFilter,
|
| 934 |
+
'highlight': NameHighlightFilter,
|
| 935 |
+
'raiseonerror': RaiseOnErrorTokenFilter,
|
| 936 |
+
'whitespace': VisibleWhitespaceFilter,
|
| 937 |
+
'gobble': GobbleFilter,
|
| 938 |
+
'tokenmerge': TokenMergeFilter,
|
| 939 |
+
'symbols': SymbolFilter,
|
| 940 |
+
}
|
source/pygments/formatter.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.formatter
|
| 3 |
+
~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Base formatter class.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import codecs
|
| 12 |
+
|
| 13 |
+
from pygments.util import get_bool_opt
|
| 14 |
+
from pygments.styles import get_style_by_name
|
| 15 |
+
|
| 16 |
+
__all__ = ['Formatter']
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def _lookup_style(style):
|
| 20 |
+
if isinstance(style, str):
|
| 21 |
+
return get_style_by_name(style)
|
| 22 |
+
return style
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class Formatter:
|
| 26 |
+
"""
|
| 27 |
+
Converts a token stream to text.
|
| 28 |
+
|
| 29 |
+
Formatters should have attributes to help selecting them. These
|
| 30 |
+
are similar to the corresponding :class:`~pygments.lexer.Lexer`
|
| 31 |
+
attributes.
|
| 32 |
+
|
| 33 |
+
.. autoattribute:: name
|
| 34 |
+
:no-value:
|
| 35 |
+
|
| 36 |
+
.. autoattribute:: aliases
|
| 37 |
+
:no-value:
|
| 38 |
+
|
| 39 |
+
.. autoattribute:: filenames
|
| 40 |
+
:no-value:
|
| 41 |
+
|
| 42 |
+
You can pass options as keyword arguments to the constructor.
|
| 43 |
+
All formatters accept these basic options:
|
| 44 |
+
|
| 45 |
+
``style``
|
| 46 |
+
The style to use, can be a string or a Style subclass
|
| 47 |
+
(default: "default"). Not used by e.g. the
|
| 48 |
+
TerminalFormatter.
|
| 49 |
+
``full``
|
| 50 |
+
Tells the formatter to output a "full" document, i.e.
|
| 51 |
+
a complete self-contained document. This doesn't have
|
| 52 |
+
any effect for some formatters (default: false).
|
| 53 |
+
``title``
|
| 54 |
+
If ``full`` is true, the title that should be used to
|
| 55 |
+
caption the document (default: '').
|
| 56 |
+
``encoding``
|
| 57 |
+
If given, must be an encoding name. This will be used to
|
| 58 |
+
convert the Unicode token strings to byte strings in the
|
| 59 |
+
output. If it is "" or None, Unicode strings will be written
|
| 60 |
+
to the output file, which most file-like objects do not
|
| 61 |
+
support (default: None).
|
| 62 |
+
``outencoding``
|
| 63 |
+
Overrides ``encoding`` if given.
|
| 64 |
+
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
#: Full name for the formatter, in human-readable form.
|
| 68 |
+
name = None
|
| 69 |
+
|
| 70 |
+
#: A list of short, unique identifiers that can be used to lookup
|
| 71 |
+
#: the formatter from a list, e.g. using :func:`.get_formatter_by_name()`.
|
| 72 |
+
aliases = []
|
| 73 |
+
|
| 74 |
+
#: A list of fnmatch patterns that match filenames for which this
|
| 75 |
+
#: formatter can produce output. The patterns in this list should be unique
|
| 76 |
+
#: among all formatters.
|
| 77 |
+
filenames = []
|
| 78 |
+
|
| 79 |
+
#: If True, this formatter outputs Unicode strings when no encoding
|
| 80 |
+
#: option is given.
|
| 81 |
+
unicodeoutput = True
|
| 82 |
+
|
| 83 |
+
def __init__(self, **options):
|
| 84 |
+
"""
|
| 85 |
+
As with lexers, this constructor takes arbitrary optional arguments,
|
| 86 |
+
and if you override it, you should first process your own options, then
|
| 87 |
+
call the base class implementation.
|
| 88 |
+
"""
|
| 89 |
+
self.style = _lookup_style(options.get('style', 'default'))
|
| 90 |
+
self.full = get_bool_opt(options, 'full', False)
|
| 91 |
+
self.title = options.get('title', '')
|
| 92 |
+
self.encoding = options.get('encoding', None) or None
|
| 93 |
+
if self.encoding in ('guess', 'chardet'):
|
| 94 |
+
# can happen for e.g. pygmentize -O encoding=guess
|
| 95 |
+
self.encoding = 'utf-8'
|
| 96 |
+
self.encoding = options.get('outencoding') or self.encoding
|
| 97 |
+
self.options = options
|
| 98 |
+
|
| 99 |
+
def get_style_defs(self, arg=''):
|
| 100 |
+
"""
|
| 101 |
+
This method must return statements or declarations suitable to define
|
| 102 |
+
the current style for subsequent highlighted text (e.g. CSS classes
|
| 103 |
+
in the `HTMLFormatter`).
|
| 104 |
+
|
| 105 |
+
The optional argument `arg` can be used to modify the generation and
|
| 106 |
+
is formatter dependent (it is standardized because it can be given on
|
| 107 |
+
the command line).
|
| 108 |
+
|
| 109 |
+
This method is called by the ``-S`` :doc:`command-line option <cmdline>`,
|
| 110 |
+
the `arg` is then given by the ``-a`` option.
|
| 111 |
+
"""
|
| 112 |
+
return ''
|
| 113 |
+
|
| 114 |
+
def format(self, tokensource, outfile):
|
| 115 |
+
"""
|
| 116 |
+
This method must format the tokens from the `tokensource` iterable and
|
| 117 |
+
write the formatted version to the file object `outfile`.
|
| 118 |
+
|
| 119 |
+
Formatter options can control how exactly the tokens are converted.
|
| 120 |
+
"""
|
| 121 |
+
if self.encoding:
|
| 122 |
+
# wrap the outfile in a StreamWriter
|
| 123 |
+
outfile = codecs.lookup(self.encoding)[3](outfile)
|
| 124 |
+
return self.format_unencoded(tokensource, outfile)
|
| 125 |
+
|
| 126 |
+
# Allow writing Formatter[str] or Formatter[bytes]. That's equivalent to
|
| 127 |
+
# Formatter. This helps when using third-party type stubs from typeshed.
|
| 128 |
+
def __class_getitem__(cls, name):
|
| 129 |
+
return cls
|
source/pygments/formatters/__init__.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.formatters
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Pygments formatters.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import re
|
| 12 |
+
import sys
|
| 13 |
+
import types
|
| 14 |
+
import fnmatch
|
| 15 |
+
from os.path import basename
|
| 16 |
+
|
| 17 |
+
from pygments.formatters._mapping import FORMATTERS
|
| 18 |
+
from pygments.plugin import find_plugin_formatters
|
| 19 |
+
from pygments.util import ClassNotFound
|
| 20 |
+
|
| 21 |
+
__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
|
| 22 |
+
'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS)
|
| 23 |
+
|
| 24 |
+
_formatter_cache = {} # classes by name
|
| 25 |
+
_pattern_cache = {}
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def _fn_matches(fn, glob):
|
| 29 |
+
"""Return whether the supplied file name fn matches pattern filename."""
|
| 30 |
+
if glob not in _pattern_cache:
|
| 31 |
+
pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
|
| 32 |
+
return pattern.match(fn)
|
| 33 |
+
return _pattern_cache[glob].match(fn)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def _load_formatters(module_name):
|
| 37 |
+
"""Load a formatter (and all others in the module too)."""
|
| 38 |
+
mod = __import__(module_name, None, None, ['__all__'])
|
| 39 |
+
for formatter_name in mod.__all__:
|
| 40 |
+
cls = getattr(mod, formatter_name)
|
| 41 |
+
_formatter_cache[cls.name] = cls
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def get_all_formatters():
|
| 45 |
+
"""Return a generator for all formatter classes."""
|
| 46 |
+
# NB: this returns formatter classes, not info like get_all_lexers().
|
| 47 |
+
for info in FORMATTERS.values():
|
| 48 |
+
if info[1] not in _formatter_cache:
|
| 49 |
+
_load_formatters(info[0])
|
| 50 |
+
yield _formatter_cache[info[1]]
|
| 51 |
+
for _, formatter in find_plugin_formatters():
|
| 52 |
+
yield formatter
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def find_formatter_class(alias):
|
| 56 |
+
"""Lookup a formatter by alias.
|
| 57 |
+
|
| 58 |
+
Returns None if not found.
|
| 59 |
+
"""
|
| 60 |
+
for module_name, name, aliases, _, _ in FORMATTERS.values():
|
| 61 |
+
if alias in aliases:
|
| 62 |
+
if name not in _formatter_cache:
|
| 63 |
+
_load_formatters(module_name)
|
| 64 |
+
return _formatter_cache[name]
|
| 65 |
+
for _, cls in find_plugin_formatters():
|
| 66 |
+
if alias in cls.aliases:
|
| 67 |
+
return cls
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def get_formatter_by_name(_alias, **options):
|
| 71 |
+
"""
|
| 72 |
+
Return an instance of a :class:`.Formatter` subclass that has `alias` in its
|
| 73 |
+
aliases list. The formatter is given the `options` at its instantiation.
|
| 74 |
+
|
| 75 |
+
Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that
|
| 76 |
+
alias is found.
|
| 77 |
+
"""
|
| 78 |
+
cls = find_formatter_class(_alias)
|
| 79 |
+
if cls is None:
|
| 80 |
+
raise ClassNotFound(f"no formatter found for name {_alias!r}")
|
| 81 |
+
return cls(**options)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def load_formatter_from_file(filename, formattername="CustomFormatter", **options):
|
| 85 |
+
"""
|
| 86 |
+
Return a `Formatter` subclass instance loaded from the provided file, relative
|
| 87 |
+
to the current directory.
|
| 88 |
+
|
| 89 |
+
The file is expected to contain a Formatter class named ``formattername``
|
| 90 |
+
(by default, CustomFormatter). Users should be very careful with the input, because
|
| 91 |
+
this method is equivalent to running ``eval()`` on the input file. The formatter is
|
| 92 |
+
given the `options` at its instantiation.
|
| 93 |
+
|
| 94 |
+
:exc:`pygments.util.ClassNotFound` is raised if there are any errors loading
|
| 95 |
+
the formatter.
|
| 96 |
+
|
| 97 |
+
.. versionadded:: 2.2
|
| 98 |
+
"""
|
| 99 |
+
try:
|
| 100 |
+
# This empty dict will contain the namespace for the exec'd file
|
| 101 |
+
custom_namespace = {}
|
| 102 |
+
with open(filename, 'rb') as f:
|
| 103 |
+
exec(f.read(), custom_namespace)
|
| 104 |
+
# Retrieve the class `formattername` from that namespace
|
| 105 |
+
if formattername not in custom_namespace:
|
| 106 |
+
raise ClassNotFound(f'no valid {formattername} class found in {filename}')
|
| 107 |
+
formatter_class = custom_namespace[formattername]
|
| 108 |
+
# And finally instantiate it with the options
|
| 109 |
+
return formatter_class(**options)
|
| 110 |
+
except OSError as err:
|
| 111 |
+
raise ClassNotFound(f'cannot read {filename}: {err}')
|
| 112 |
+
except ClassNotFound:
|
| 113 |
+
raise
|
| 114 |
+
except Exception as err:
|
| 115 |
+
raise ClassNotFound(f'error when loading custom formatter: {err}')
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def get_formatter_for_filename(fn, **options):
|
| 119 |
+
"""
|
| 120 |
+
Return a :class:`.Formatter` subclass instance that has a filename pattern
|
| 121 |
+
matching `fn`. The formatter is given the `options` at its instantiation.
|
| 122 |
+
|
| 123 |
+
Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename
|
| 124 |
+
is found.
|
| 125 |
+
"""
|
| 126 |
+
fn = basename(fn)
|
| 127 |
+
for modname, name, _, filenames, _ in FORMATTERS.values():
|
| 128 |
+
for filename in filenames:
|
| 129 |
+
if _fn_matches(fn, filename):
|
| 130 |
+
if name not in _formatter_cache:
|
| 131 |
+
_load_formatters(modname)
|
| 132 |
+
return _formatter_cache[name](**options)
|
| 133 |
+
for _name, cls in find_plugin_formatters():
|
| 134 |
+
for filename in cls.filenames:
|
| 135 |
+
if _fn_matches(fn, filename):
|
| 136 |
+
return cls(**options)
|
| 137 |
+
raise ClassNotFound(f"no formatter found for file name {fn!r}")
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
class _automodule(types.ModuleType):
|
| 141 |
+
"""Automatically import formatters."""
|
| 142 |
+
|
| 143 |
+
def __getattr__(self, name):
|
| 144 |
+
info = FORMATTERS.get(name)
|
| 145 |
+
if info:
|
| 146 |
+
_load_formatters(info[0])
|
| 147 |
+
cls = _formatter_cache[info[1]]
|
| 148 |
+
setattr(self, name, cls)
|
| 149 |
+
return cls
|
| 150 |
+
raise AttributeError(name)
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
oldmod = sys.modules[__name__]
|
| 154 |
+
newmod = _automodule(__name__)
|
| 155 |
+
newmod.__dict__.update(oldmod.__dict__)
|
| 156 |
+
sys.modules[__name__] = newmod
|
| 157 |
+
del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
|
source/pygments/formatters/_mapping.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Automatically generated by scripts/gen_mapfiles.py.
|
| 2 |
+
# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead.
|
| 3 |
+
|
| 4 |
+
FORMATTERS = {
|
| 5 |
+
'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
|
| 6 |
+
'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
|
| 7 |
+
'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
|
| 8 |
+
'GroffFormatter': ('pygments.formatters.groff', 'groff', ('groff', 'troff', 'roff'), (), 'Format tokens with groff escapes to change their color and font style.'),
|
| 9 |
+
'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags. By default, the content is enclosed in a ``<pre>`` tag, itself wrapped in a ``<div>`` tag (but see the `nowrap` option). The ``<div>``'s CSS class can be set by the `cssclass` option."),
|
| 10 |
+
'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'),
|
| 11 |
+
'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
|
| 12 |
+
'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
|
| 13 |
+
'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
|
| 14 |
+
'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
|
| 15 |
+
'PangoMarkupFormatter': ('pygments.formatters.pangomarkup', 'Pango Markup', ('pango', 'pangomarkup'), (), 'Format tokens as Pango Markup code. It can then be rendered to an SVG.'),
|
| 16 |
+
'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
|
| 17 |
+
'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'),
|
| 18 |
+
'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
|
| 19 |
+
'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
|
| 20 |
+
'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'),
|
| 21 |
+
'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
|
| 22 |
+
'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.'),
|
| 23 |
+
}
|