diff --git a/.gitattributes b/.gitattributes
index a6344aac8c09253b3b630fb776ae94478aa0275b..97ef41b07e8990cfcd0fd6773135c0dc9879f7f5 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -33,3 +33,22 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
+figures/joyai-logo.png filter=lfs diff=lfs merge=lfs -text
+venv/bin/python filter=lfs diff=lfs merge=lfs -text
+venv/bin/python3 filter=lfs diff=lfs merge=lfs -text
+venv/bin/python3.10 filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/hf_xet/hf_xet.abi3.so filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/hf_api.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_client.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/pkg_resources/__pycache__/__init__.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
+venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/lisp.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
diff --git a/figures/joyai-logo.png b/figures/joyai-logo.png
new file mode 100644
index 0000000000000000000000000000000000000000..ddf378cec23455d52aa283209499f50440487d6d
--- /dev/null
+++ b/figures/joyai-logo.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4ea9d6a20a7707ca8dc427d6dcb5db6e2489f7730d5bffea26d8db20b1c54365
+size 249524
diff --git a/model-1-of-40.safetensors b/model-1-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..f8454d5788c39ed29e6d8c8f97fbacb1ed248657
--- /dev/null
+++ b/model-1-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d7d2f4fca392ea83b71c94991028ce8c22108082e5853cd8b5d4bac9a6567a5d
+size 70417432
diff --git a/model-10-of-40.safetensors b/model-10-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9e0528cc2a9cea56712b9b27507fd73d80718170
--- /dev/null
+++ b/model-10-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3cf0ae859901c7edffb068245fbc6604d94b4d4891f825dc9c39e831bb7d8231
+size 1240579000
diff --git a/model-11-of-40.safetensors b/model-11-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..bb3e19a5d3050901b5f648f12388167ea4fc3b88
--- /dev/null
+++ b/model-11-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:adf0ebaa860a6d3540672da76461e417695b22d198ae37f7ce982ea130704225
+size 1240580552
diff --git a/model-12-of-40.safetensors b/model-12-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..d441805c9f6796d97967e478b7ab4a81cc353597
--- /dev/null
+++ b/model-12-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dd4c356929aacd52345d948ba514d1959938add01290a1373adaced169a5615a
+size 1240580552
diff --git a/model-13-of-40.safetensors b/model-13-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b5d8244a6a718a64f61b14545b3825b4090431c6
--- /dev/null
+++ b/model-13-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:46e718a4c1f759e985beb479164851c20f80aab1ae673cb6339fd34601681c20
+size 1240580552
diff --git a/model-14-of-40.safetensors b/model-14-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..5a9b4de532ec27528b9674c356aad4f56b403282
--- /dev/null
+++ b/model-14-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c8412f9d9bb91e57e32821b7a97c0fff5418d6a9a845dc36041e451661faf15a
+size 1240580552
diff --git a/model-15-of-40.safetensors b/model-15-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..ec62c8a07dbe808024841694e18db5a21e42c53e
--- /dev/null
+++ b/model-15-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:aa0fb469858a28e57990b479096b79108ae13cff06c6ac67c9b7caa235b7e5ca
+size 1240580552
diff --git a/model-16-of-40.safetensors b/model-16-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..27e7015ee6e93ef225bbb49ff3c74a29d717a58f
--- /dev/null
+++ b/model-16-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d764ca0f5a313145e07b99d029e87bb2cb473d63d431cd9557cd11eebdabc735
+size 1240580552
diff --git a/model-17-of-40.safetensors b/model-17-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..77798b8ff05a61f049cdf7ab52df36818d786de9
--- /dev/null
+++ b/model-17-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3484cdfbbfa11d7dd7fd3c6822ee76de89c15c524b0b3f600b9a8c43dd164971
+size 1240580552
diff --git a/model-18-of-40.safetensors b/model-18-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e5f40f66a66e6afb09fca01b7969ad287f45af7c
--- /dev/null
+++ b/model-18-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:20a4b4fdeea04a49aaf0bffbe1a83e9be2933ddd823132f65578346710c86ee7
+size 1240580552
diff --git a/model-19-of-40.safetensors b/model-19-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..f01964da6ae75c6e1e044abc624f1fa4b32d415d
--- /dev/null
+++ b/model-19-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8a016c68e4c75beebe21973b5943fe6cd35496a3c278cfb14589834781357bef
+size 1240580552
diff --git a/model-2-of-40.safetensors b/model-2-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..596fef17939bf766654bf541b5f8be63fc23b79b
--- /dev/null
+++ b/model-2-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2ca44e797ad60b01c1389e28840fab385b787239d2fd4592eb614e9f0605f213
+size 1240579000
diff --git a/model-20-of-40.safetensors b/model-20-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..ee1e19933ade777a9ba53e2aedf1bb470cb5daef
--- /dev/null
+++ b/model-20-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dd2c7c6ca3658d53f7e3690334c8ceeba73d6e91fe44754e39a42f67eb560b7e
+size 1240580552
diff --git a/model-21-of-40.safetensors b/model-21-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..074c340c94f25953db172b7c2a22d39fc48cb628
--- /dev/null
+++ b/model-21-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f7f47f18b836de4cde8e212801a08ff00ce1bb9e728ecd62197b083f33463192
+size 1240580552
diff --git a/model-22-of-40.safetensors b/model-22-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..50e12977e576a57be352da444087ae3926b38645
--- /dev/null
+++ b/model-22-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c93488f761433d508fc79d57ca12c076497d0d76607b55e9b92a1e00857fdd06
+size 1240580552
diff --git a/model-23-of-40.safetensors b/model-23-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..d431fe2b839be28c4cb154040fc71374206c23ee
--- /dev/null
+++ b/model-23-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a4a141d99d73779c8a41a07a414a040b3eb8ee2da921b3d39057db1268126b4f
+size 1240580552
diff --git a/model-24-of-40.safetensors b/model-24-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b3858f9ddfd025c99c1f278d20c122f55982158f
--- /dev/null
+++ b/model-24-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5b6cae888ae0203ab72ff226fca2f509a0065c8749149a3be49a894961e2e386
+size 1240580552
diff --git a/model-25-of-40.safetensors b/model-25-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..d20ac6daf0a8642d213f8d7d10079b22faa8df50
--- /dev/null
+++ b/model-25-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0080dacaa12cca4be6a63399903c46cc17fe81e7362e0b6912d137c5e942197a
+size 1240580552
diff --git a/model-26-of-40.safetensors b/model-26-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9065bea30171b5e90e463ba72b189362be49e121
--- /dev/null
+++ b/model-26-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a21eb4f08465c03e0629f5deab8aefcb5041cf65be98c876d3648b61610ade59
+size 1240580552
diff --git a/model-27-of-40.safetensors b/model-27-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..592977526acb467192607f38416f3b108f383cd9
--- /dev/null
+++ b/model-27-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3f77abe8bb7c8d48a46b074eae99bb136eed54b5457d0fd3fb5589ee3fe3cd40
+size 1240580552
diff --git a/model-28-of-40.safetensors b/model-28-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..01f0db5543189e0e0393d9dbb2add72773c531cf
--- /dev/null
+++ b/model-28-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fc0b237b6b5284710d2c32d84d516a2fa57511b65ecf815ce104a69f2562bf53
+size 1240580552
diff --git a/model-29-of-40.safetensors b/model-29-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..db8ad9d117983535e1327db16416cd928d1c0d73
--- /dev/null
+++ b/model-29-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:452f7f362ca865ef16d152502e30836391f24f9420b72b33123fbec7fb275815
+size 1240580552
diff --git a/model-3-of-40.safetensors b/model-3-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..012fa4e2aa439c19ef097be9f9922d97f0949619
--- /dev/null
+++ b/model-3-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8f3fd8abe76ba31fa3ee2b7a1bd2083fe3f4c89e10e2aed00e10aacd80c61df9
+size 1240579000
diff --git a/model-30-of-40.safetensors b/model-30-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..21598e7118989722b896b9abac4e6cf96fcfb1ee
--- /dev/null
+++ b/model-30-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dcd4999348779b34b203564c7403965bd6279ad10897a56e4b6acb7f1f7c5baf
+size 1240580552
diff --git a/model-31-of-40.safetensors b/model-31-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..f1f639bdf1b32afde1700b137cc809bf5f9bf01f
--- /dev/null
+++ b/model-31-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2563e54f2a5536989dbab99d5aa1483488075ec467ee5758f92242b7358de61b
+size 1240580552
diff --git a/model-32-of-40.safetensors b/model-32-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..27b2124545710a32d015af879d7d729fa04962e9
--- /dev/null
+++ b/model-32-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8b6350967e09c581d1533c0d5bdbd7dafa49b102c438b4739010b5eb6d7f5938
+size 1240580552
diff --git a/model-33-of-40.safetensors b/model-33-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..3e4f16e5251873c2859b015d5f4840b01f24c717
--- /dev/null
+++ b/model-33-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:72f04cf97b30ed4f78c621a17a0baba64768daadf834dcbd40c2b34fa9ff35d0
+size 1240580552
diff --git a/model-34-of-40.safetensors b/model-34-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..ebace598ccbb48d97390e0493a4797e80e7e2c55
--- /dev/null
+++ b/model-34-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5e7c200d90d14deff777ad6173c89812f216465d121ca2ea1321b7eba6ff2bff
+size 1240580552
diff --git a/model-35-of-40.safetensors b/model-35-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..7211aa68e863d05ee1a8780e63066de0b1900993
--- /dev/null
+++ b/model-35-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dc19f9f4a6ab0061b3d1a3745112e18e25ca20596bd4b2062e10eccf562c09f0
+size 1240580552
diff --git a/model-36-of-40.safetensors b/model-36-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..5b9aa3a8793534d2670625b2d85e78db087a5c23
--- /dev/null
+++ b/model-36-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e70a79a57258e4fced6967888ceaf461526e6e55dbb4499e03914d28211d7cbc
+size 1240580552
diff --git a/model-37-of-40.safetensors b/model-37-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..add2b945b4a93427e105d003131a65496e4d842d
--- /dev/null
+++ b/model-37-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9c46036a5be022ca09c88a83034e60d00620e16ad79c917e4f4309353b639bb5
+size 1240580552
diff --git a/model-38-of-40.safetensors b/model-38-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e99615ec1a1eb90c874010d7003c10460c03df4d
--- /dev/null
+++ b/model-38-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:90bf3a173dbf30d34e63ee1572130c0b5f32314179cb3673423be8762bd3bf2c
+size 1240580552
diff --git a/model-39-of-40.safetensors b/model-39-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..0da0428943723a4900f1cdb36723cc408df4c54b
--- /dev/null
+++ b/model-39-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6884d2526b5fbba4b6ac7f395ce57ae94d6db67f352c4612e80ed52dd0c5916e
+size 1240580552
diff --git a/model-4-of-40.safetensors b/model-4-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..bc68658adfaf07500aedce82d0cab8ba1c64d71b
--- /dev/null
+++ b/model-4-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2b4ce9efb22eb6ee73bb906316495f579ee857e7674b6a5f785e5e1aa2a50427
+size 1240579000
diff --git a/model-40-of-40.safetensors b/model-40-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b917f6ff476b73eeaf001971d475dc668d02b3dc
--- /dev/null
+++ b/model-40-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:315cb2bfd43a9002670978ba4a864bf541d4bd042e4a22ee7661aa03343bf42d
+size 1240580552
diff --git a/model-5-of-40.safetensors b/model-5-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..8506eeca35cef4fa8290b71bd855bc2f44991104
--- /dev/null
+++ b/model-5-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b7b45d4a132f074f147b83208193a5b93e99ad3b736cb1febd428ec8dc22f17a
+size 1240579000
diff --git a/model-6-of-40.safetensors b/model-6-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..95dfaf65b5a0018cbfd4c2ff751d353e15e6063d
--- /dev/null
+++ b/model-6-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:38ce3a586f486ea09aab0dacedf31688fb5aaf7ee5583337219f1d3815fd0585
+size 1240579000
diff --git a/model-7-of-40.safetensors b/model-7-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..64e0285b84256c0a686c47e680ed18f257be2c70
--- /dev/null
+++ b/model-7-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1f37d97a025c306b4807d3c9b7b12c9d8d422db6279d63eb2cdee623e432ab0f
+size 1240579000
diff --git a/model-8-of-40.safetensors b/model-8-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..617dde9169457f256628ff4032f51ac1c7517913
--- /dev/null
+++ b/model-8-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3ad8fdbaad07cb38452a17a44937f4208ab58b3ace70fabf423976535a6ac962
+size 1240579000
diff --git a/model-9-of-40.safetensors b/model-9-of-40.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..c9707bbf341c4044965902a4def23bd73ce3714e
--- /dev/null
+++ b/model-9-of-40.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5831a26448dee958644d5d9383d5748726e8931ebbee18e034f285ad1c169596
+size 1240579000
diff --git a/model-non-layer.safetensors b/model-non-layer.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..c1cf1f0dddcf7c7bf995648e7722aa999a0f4298
--- /dev/null
+++ b/model-non-layer.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f5b0f8e8ace4f013693d24123ee0db03d731a409cdc5401b8beb87b0c84b7b1f
+size 1059066152
diff --git a/mtp-1-of-1.safetensors b/mtp-1-of-1.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..d5d3d72c5900bd26bc56b36720ee992336a213ed
--- /dev/null
+++ b/mtp-1-of-1.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2c03c864759878b2837e5aa10a9e0558c6bc0daa7ee0ec6b61b46854f63740f3
+size 616738408
diff --git a/venv/bin/python b/venv/bin/python
new file mode 100644
index 0000000000000000000000000000000000000000..84d6fe34b073ce9c78a61afab00b8a56ab8f9200
--- /dev/null
+++ b/venv/bin/python
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f54854453331ddc3cb58abb15d4d24cd4964d9474fc31c3932348351b991b398
+size 5937672
diff --git a/venv/bin/python3 b/venv/bin/python3
new file mode 100644
index 0000000000000000000000000000000000000000..84d6fe34b073ce9c78a61afab00b8a56ab8f9200
--- /dev/null
+++ b/venv/bin/python3
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f54854453331ddc3cb58abb15d4d24cd4964d9474fc31c3932348351b991b398
+size 5937672
diff --git a/venv/bin/python3.10 b/venv/bin/python3.10
new file mode 100644
index 0000000000000000000000000000000000000000..84d6fe34b073ce9c78a61afab00b8a56ab8f9200
--- /dev/null
+++ b/venv/bin/python3.10
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f54854453331ddc3cb58abb15d4d24cd4964d9474fc31c3932348351b991b398
+size 5937672
diff --git a/venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc b/venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..34d102061deb5be6563b1653a4d110b00eb87191
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:996f8c7f9bce309b7dddbfca49de5e2405f1927645499a2fe3207ed41a341208
+size 115714
diff --git a/venv/lib/python3.10/site-packages/distutils-precedence.pth b/venv/lib/python3.10/site-packages/distutils-precedence.pth
new file mode 100644
index 0000000000000000000000000000000000000000..10c404f6ad452c148c46a39e11ddd4bc58530d16
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/distutils-precedence.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7ea7ffef3fe2a117ee12c68ed6553617f0d7fd2f0590257c25c484959a3b7373
+size 152
diff --git a/venv/lib/python3.10/site-packages/hf_xet/hf_xet.abi3.so b/venv/lib/python3.10/site-packages/hf_xet/hf_xet.abi3.so
new file mode 100644
index 0000000000000000000000000000000000000000..1edadd90c398d8d4116c88b9b0b0dda241284d15
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/hf_xet/hf_xet.abi3.so
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:acd592c734655b46338cec117cc020cc013cd075db016dff1d1f276acfb658e8
+size 10589672
diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/hf_api.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/hf_api.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8220a8853487f871cd24cd476318ddc220ae1313
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/huggingface_hub/__pycache__/hf_api.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:beef8ee2404a288f20bb5eabf4fad9123e82657f966c9d1c09207a649ffb54ec
+size 425424
diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_client.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_client.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..67c4703de5c4c54078b6f87f48b78137533abe09
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_client.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:392aaa092ffd591172910dd1716386db9b3404a379f653b19f1512aae09b4d5a
+size 132560
diff --git a/venv/lib/python3.10/site-packages/huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-310.pyc b/venv/lib/python3.10/site-packages/huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2825911482607ea4be4f53413c22998f7134ab09
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ec4c8e57fd5b897d2124b8561f9472c111fba4696164ae0bad0c8d9e5438437e
+size 136165
diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..13002e74103e181fe88745f76406a379c0e0b7ac
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:12694f99d6bfa73c54a0af2f8581f170ac63abb891341641b1f240a37fddbeaf
+size 198738
diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0853922af992810672ceda0d1291d75093545a13
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4e0b41e9d5d8f716eddb2c9766c76449d23a52291c50915ef2fcdc842dbd1823
+size 155307
diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c8960b501e5333b152491a4e26b0823bb936800a
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c49f86642ed45435706607b363984d8faa30163deab6060d3e089bc6725e4d9b
+size 161360
diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..29c5ea02e222018218683f4c4b58c354a49584d0
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:00e81be321cefb78a821522e965c97957773ed2aed357ebfd22a177c039cbabc
+size 194423
diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7cab949c2a06adca1937a7d53d1b891a2793386f
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f34f941bef05197c281c5d3b9b01d3a8ab621ee6bd335d7153a4a23c876638b5
+size 151045
diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4dd466b7eca5a773cdbe55ffcd9a5978f4407e2a
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b70d0ab44d20c41f6ad87d1094fc9dcedb3081ad6f1c008c1ddf82c795356a7c
+size 175334
diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..868b51bb7cb3a844023d9c3a670206806d4f0fab
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:34af3c286abaf4cd16be014b896e3d01e895b2896dc19cb5ee0789f322adc01a
+size 360153
diff --git a/venv/lib/python3.10/site-packages/pkg_resources/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/pkg_resources/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e6e58c8746c265ec42e0af2734e8c9d0bd283dd7
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pkg_resources/__pycache__/__init__.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:79cbbe5bc4e9e7854a928a4d70d700c45b97a7a2e8194c2efbd20ad568f5f035
+size 100700
diff --git a/venv/lib/python3.10/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-310.pyc b/venv/lib/python3.10/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a4326d1175c0ee40f05f8a4b53f5d20eb17720ab
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7e6a96b367737f96fd456d79ee985f2bd8ed3b5a97b2f46b1d407f9d0e2da96e
+size 198845
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dc840f58ea5b079e52adcaa9cfa82a8070712a18
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/__init__.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_ada_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_ada_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..21dce10412fe91786acc26464f47079bcedfd00a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_ada_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_asy_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_asy_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2b4668b13fd059e13ff269bb31842b8ead5f64ba
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_asy_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_cl_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_cl_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e798a3952948a732ce5a6b51c6470e17b38d5688
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_cl_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_cocoa_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_cocoa_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..41fafecef5423faf898dc118b4b710a3d382bddb
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_cocoa_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_csound_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_csound_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ded93eb689bea175f4fd3d4009697fb8f6b1570c
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_csound_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_css_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_css_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..551fe1fc47121879ebfe14a01ce5007cd60d863f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_css_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_googlesql_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_googlesql_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8536cea0bff4468db7fdbb41c4cced2ab324c373
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_googlesql_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_julia_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_julia_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ecf7e06716edc84feb8bc55e36aeb00ef03971ac
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_julia_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_lasso_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_lasso_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cb7e95caf9bc524ea50240bbf257617fb5c5b814
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_lasso_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_lilypond_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_lilypond_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0afbcf7ab69b3ba21817e607598032fd9b3a90c7
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_lilypond_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_lua_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_lua_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f88971d7f3e8254e9b78b26ba21137a1a904ac72
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_lua_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_luau_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_luau_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eacb6640c4fff6ec0e83881e523e293f510a657d
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_luau_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_mapping.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_mapping.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bb0255eca4bb233b882c36d1f0b59ddf81a234ab
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_mapping.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_mql_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_mql_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f1a8d663a824b6912fe531ea63e659f94b0ba6eb
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_mql_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_mysql_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_mysql_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..762b939901a112cc42d474f92849df00625e344b
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_mysql_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_openedge_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_openedge_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d63b96741d482595ed54b42064114c54df1c3d8b
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_openedge_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_php_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_php_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a89cccc6876f8cb1972a6fc83aebfd19c28c575a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_php_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_postgres_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_postgres_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4d75c1b1ca68b0190cb4f0ee4a840419ef879692
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_postgres_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_qlik_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_qlik_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bf587c64e2ab4c605c9c5c96b4060e6001408985
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_qlik_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d7e0e5dac322f1843287bcb7d00b30b19e1c8d94
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_scilab_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_scilab_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..efc25b3a1c4ce06b4a0065c7eb3f562ef44269b2
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_scilab_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_sourcemod_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_sourcemod_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f3f9201d5258b813c5734da116db4fb0b4c951ab
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_sourcemod_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_sql_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_sql_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3881b764093d840af277e4914f9801581a51c2d0
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_sql_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_stan_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_stan_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2ed34f1b3e21a5e1e99b821ae58fdf1f3f4bb7b2
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_stan_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_stata_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_stata_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..da6a422dc60f8ab241d043da723f4101ef0d88df
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_stata_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_tsql_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_tsql_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..575e059e1b5a1fb26fe6d8aef8806f171da8b0f2
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_tsql_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_usd_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_usd_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..386a6282f41bbe53736956acebdcc0a706913c3a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_usd_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_vbscript_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_vbscript_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..adf2292360c2425abe74dcfd04d0125a52608784
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_vbscript_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_vim_builtins.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_vim_builtins.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2b20cd5023646923a73458f9018a76e89ab856b2
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/_vim_builtins.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/actionscript.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/actionscript.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f52a1d44cd55c02d9c0d33e9e0c82cbbca118146
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/actionscript.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ada.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ada.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2682442a293a236efef378e8c418f1935f31eaf5
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ada.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/agile.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/agile.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..45fdb93c15601e24968d36ef4e2220ae6adb75fc
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/agile.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/algebra.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/algebra.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4e80ec6c5af85728547b225eb5826fc532b029d1
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/algebra.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ambient.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ambient.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..95c0aae4f77bf1768f9c9954eefd5729fd1e8acc
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ambient.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/amdgpu.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/amdgpu.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..73232df0724eae05ea0dc62beb87fdec7179a884
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/amdgpu.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ampl.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ampl.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..148ed7201fb9f9697f9e21a26e454551d71e32f8
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ampl.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/apdlexer.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/apdlexer.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a3fa93870fe52a34f27483d44595a94ff7ab9150
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/apdlexer.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/apl.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/apl.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ea6a286f7883e43984aca2cd4beeea4ca98d9f70
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/apl.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/archetype.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/archetype.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..02079e2db749677ed9ff3c66a896d1f1e7c62f9a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/archetype.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/arrow.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/arrow.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..977278f5047f4a8ce8bba6bde6fa7dc8a3794d3c
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/arrow.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/arturo.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/arturo.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8bc8ff03ce48c49e1a65439f44b796a7709b16aa
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/arturo.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/asc.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/asc.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1d44808e609456c4c641997371153f4b7cbe05ee
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/asc.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/asm.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/asm.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..96c26e4f47dc918ef8e0c0ee7cfd7d11764ccc01
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/asm.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/asn1.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/asn1.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ca845d1e204f980c014438db4d6a5d28debcab43
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/asn1.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/automation.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/automation.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..84fb1ec8dff6d3abce427f0b12e2d53d931405ee
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/automation.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bare.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bare.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e9688ce75a3ffa5c37a463093b2a31310b4d81f6
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bare.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/basic.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/basic.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2f77d98d4501e8ebe67aa65a6718a7c301da4357
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/basic.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bdd.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bdd.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f0421d1820f6779c8ecde1fedc6527ed3d840bfc
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bdd.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/berry.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/berry.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3d3680c0dc29f08dcba82715352b583c96fda768
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/berry.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bibtex.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bibtex.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..89fcdf006ad5f1485c496c09dc2c24cd812cb58c
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bibtex.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/blueprint.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/blueprint.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bad3f6f0797d5a6ddf6bc7729554b9b418f4a159
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/blueprint.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/boa.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/boa.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0baa2faaa105b3de26e5542d478b9838c0e2dbcf
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/boa.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bqn.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bqn.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6711ff867727b3abd7c9a711115f849eaeae348c
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/bqn.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/business.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/business.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..622c32762e3352f6883c3f79febf467a60f22d7f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/business.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/c_cpp.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/c_cpp.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1e64976a87cd1ea26151662fb20924a5eb08bf44
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/c_cpp.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/c_like.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/c_like.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f589f8c9269bbef6386dcb0f3319cc1b5aa531dc
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/c_like.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/capnproto.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/capnproto.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d8d211c22eaf6052a3dfcadacd781de93ad3a25e
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/capnproto.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/carbon.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/carbon.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a8ffed1bab478840bf7b694b0f10d9acf8ecd325
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/carbon.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/cddl.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/cddl.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fd20fa031ac689fd630f666ab5085d4e644485b6
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/cddl.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/chapel.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/chapel.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2ffec888f6758d0a2d894ee494fe981a1c471785
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/chapel.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/clean.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/clean.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e4d3201fb7741a70a2c32a60b198001f1424b0c1
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/clean.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/codeql.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/codeql.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ad2bc781043f92a66d7828db8c62f2671acafdf1
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/codeql.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/comal.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/comal.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a36c9427cc8411309ece406920bd4c5e5da62dee
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/comal.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/compiled.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/compiled.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b9b3f70cc7c49c6510a8868d095f3d6a5ffd040c
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/compiled.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/configs.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/configs.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..18988ba0dcc4369f5a3e96005e5058cc30ab3fdf
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/configs.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/console.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/console.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c0a594968b4a4a827251b1f548234c8b9de075b8
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/console.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/cplint.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/cplint.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..513a33a6dab2075e34a69080825c10e2e3cbd366
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/cplint.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/crystal.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/crystal.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b2e5ba7b8083aee2d0a2acc9b89e50b79412fe96
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/crystal.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/csound.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/csound.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7ff7320df8be575233ed6235ef700f6678f7130c
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/csound.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/css.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/css.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..325e055db0ea7edee3af9cb523863b962dca9740
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/css.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/d.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/d.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3dd64ac3e1736d223930b8d8d10289ef8b181c2b
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/d.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dalvik.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dalvik.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1b2d9a19019a851eeab54bd6991ff341563b0c40
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dalvik.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/data.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/data.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2a94fa477d1e0d8b63d3032ff7ec290fcf527936
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/data.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dax.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dax.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0ea7fe67ee79cc5c89872c5d77c47f58cddbc9c3
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dax.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/devicetree.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/devicetree.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..45ca055a245373f6888e6d814545ea3f21880615
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/devicetree.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/diff.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/diff.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4313d7f99c3a1eefb2f791f621b41919afb57494
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/diff.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dns.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dns.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..811cbaa02159690f95f5ec826feeb3527ac26044
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dns.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dotnet.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dotnet.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c24b3ab9d2ab84bb46a7893f87cb31f73c0623a7
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dotnet.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dsls.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dsls.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2725445963a623aa73f23493a4df0f1170e0f906
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dsls.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dylan.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dylan.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ade728d6e0bc1c3563ec74cd531f932624d42289
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/dylan.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ecl.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ecl.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..675f1eaf8b0a1d00046ebf17e5a52a4c7110152f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ecl.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/eiffel.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/eiffel.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4f8fee5c898a74b531e3565ce7fff8fbc3e4b237
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/eiffel.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/elm.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/elm.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9ce8387517384df42e9e8252c14f119c20d16b52
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/elm.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/elpi.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/elpi.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a57e797e5db2706d179df9870a102e1a4f0bcef6
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/elpi.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/email.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/email.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..566873ebf57188736aed63035e4a15fd91aa21bb
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/email.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/erlang.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/erlang.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f5879b3ea4597adc0ef9a9e3c3cc772b24fc965e
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/erlang.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/esoteric.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/esoteric.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fded09719a20f8195d010b95a7f316d9b7d4af69
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/esoteric.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ezhil.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ezhil.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e40687b99141c7f569b33e93bdf9d568b4f47630
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ezhil.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/factor.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/factor.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dafe40256045c18ba6b30a678e8668313dcc0a3f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/factor.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/fantom.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/fantom.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8a5f5178da53b287ab11d5e81e00e1014bc1bdca
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/fantom.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/felix.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/felix.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d906ff1f36e7a806a26d9d81ed5a2e782e295723
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/felix.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/fift.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/fift.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6eb5c24cf29702a350de0e673510c6bbc1d5a894
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/fift.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/floscript.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/floscript.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c5f786e99663bb1eb4697f008529274613140812
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/floscript.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/forth.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/forth.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7f39f361a47601de6e4d100bda2388cba1d58f50
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/forth.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/fortran.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/fortran.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9b53c9c7c1f3a592a2e773bb716abcb5dfdc6634
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/fortran.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/foxpro.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/foxpro.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b1df586d65b2ac31e10116be466130cf59cd6766
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/foxpro.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/freefem.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/freefem.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a1cfff449ad9976f0d4e673a40b3d8435e843833
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/freefem.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/func.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/func.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..73dda3377ceec1ab6a6071b941ed968be1572622
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/func.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/functional.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/functional.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b3506d92002835d5ec381b13d4f1dd03e19052ab
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/functional.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/futhark.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/futhark.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e0bb24d25bc1cba556183e387faef71d7dea8f86
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/futhark.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gcodelexer.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gcodelexer.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1abffa711c876f0a047370a399039e20a5c9d6ec
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gcodelexer.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gdscript.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gdscript.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f0af5b624cadadef2613005d7cc0b86c9deee771
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gdscript.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gleam.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gleam.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..934281a7754e38e5e44390ddcd51faf47c512e79
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gleam.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/go.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/go.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3a0717d1ede3c4e4db019bee833c2d717b9f92be
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/go.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/grammar_notation.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/grammar_notation.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a6b5d4afa176c0f073502985888f9d1ad54674f6
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/grammar_notation.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graph.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graph.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eab83d48ff36c2c318937c95f1ec29ee698b0f55
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graph.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graphics.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graphics.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..998843cb3e715e28b14719864137ef7a9d863a2a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graphics.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graphql.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graphql.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d8fce8e54120195d833256bfd9ae98bd1b07380c
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graphql.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graphviz.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graphviz.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cb706dc11601de3fd95ba96124f32cd35116c1b8
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/graphviz.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gsql.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gsql.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..027fad3f817ff9c8397539f248f37a9c0cb85e35
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/gsql.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/hare.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/hare.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1459e12d5aa2f631c2444f6c6e99c2c5f7162775
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/hare.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/haskell.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/haskell.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a02bb696ec69a2f9df6e19d00c144aea1f746a15
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/haskell.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/haxe.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/haxe.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d07671211e1835a992a727369489f4eda0d16831
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/haxe.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/hdl.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/hdl.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4868005880622474e2dd8bb00a815a2f6f2b2cf1
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/hdl.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/hexdump.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/hexdump.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9d7e2e42e1b8b9a56925d1cb32040e42334c3e06
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/hexdump.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/html.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/html.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9420354ec333f47ce8d6a6f78f9fe6d998e75918
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/html.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/idl.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/idl.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8961bdddd9e9fcb737dc8308b583100aa9244d72
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/idl.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/igor.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/igor.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..51929b600926fa53193e3746f22a8d5283d9d83d
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/igor.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/inferno.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/inferno.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6c30df6696b794c7ec2a866f7e543cabbb92c5a7
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/inferno.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/installers.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/installers.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c9a0374091d210eaee8e503c1f10dc7f8466f35d
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/installers.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/int_fiction.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/int_fiction.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..545ded3f0b81f1eaf8706c499ef08b61f002234a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/int_fiction.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/iolang.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/iolang.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..af90dde102ec66aed9f63cee7dd749a5cc89e6d9
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/iolang.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/j.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/j.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e73fcd93e9eda2bc8d5d7c12e4a41b7055bf5dee
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/j.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/javascript.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/javascript.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..60f7acec83bbf81627a97d5c2ecabcf1a0939dd8
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/javascript.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jmespath.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jmespath.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..50209adc5aa8e14762ac0e78969307456e7b1c27
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jmespath.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jslt.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jslt.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cc3f55f2dae0e633295575c12ff2d95e3004efab
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jslt.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/json5.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/json5.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4ffe767d816364c5f75d2851b5199312e5d93a6b
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/json5.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jsonnet.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jsonnet.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4d35a1984cd5eb2698afb5887454fcfd1cdb078a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jsonnet.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jsx.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jsx.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f7f9c4bc9fff3f77ac7bafbbedef049c1f0c3b65
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jsx.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/julia.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/julia.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6f2a57de561f6efd7363f5e37fbd1e6fad7da0ee
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/julia.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jvm.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jvm.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..30bc50ac1037ae52f8da45b7978a19b92477537c
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/jvm.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/kuin.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/kuin.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d7a2355037199d81f71052b122a2b2ba8f2a7f8f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/kuin.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/kusto.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/kusto.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b40bb92b5f84ec6fd0cf92e81acac6902a3fcd1f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/kusto.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ldap.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ldap.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..51af5396d6dca6bb512514b4d9cab4be7b2d1cc2
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ldap.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/lean.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/lean.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0e41d5af63659aa6b705fe53a19d8851465e172b
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/lean.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/lilypond.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/lilypond.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4cfaa6072b7ad4b30546e1fdefb0b9d2c516fe89
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/lilypond.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/lisp.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/lisp.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ba9748b288a81b2290bde176afdcc0ad967a4c1e
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/lisp.cpython-310.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:857398ee6d70d1e3a9b8e39646f5477da1d80088522849dd7433a9a581809ca6
+size 107619
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/macaulay2.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/macaulay2.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3aef1d283ca7f827393dbd1231aef33863b99288
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/macaulay2.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/make.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/make.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a8531758483f9cf6cb98fa4b3e0cc14c4dc08f5a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/make.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/maple.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/maple.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4f80161215f5819059409f5690cad3b546e5ed0f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/maple.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/markup.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/markup.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3790f61eca369b3d982661f2b1637732daac32d4
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/markup.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/math.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/math.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0a44f05a3017904bcb6921c5c7ca7c03bccafe3a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/math.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/matlab.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/matlab.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0e1cbab9cc8932d338fdf4a63d05017786700c75
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/matlab.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/maxima.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/maxima.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e78f91bc484ca8ad9792638fa4603b2f7788a970
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/maxima.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/meson.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/meson.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..18797ec65d37fd2d090636880cde05e5124501ce
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/meson.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mime.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mime.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3af455181f66c60bd79e3a8051004695ede26bc9
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mime.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/minecraft.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/minecraft.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1fd0fdae3c3ad948150fb088919996ff73c7291f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/minecraft.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mips.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mips.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..38cd41bec6df24c350a53eecf6c084a70faa8017
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mips.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ml.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ml.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3707c754195484cf1a1f27c7cdfbb53030c8a42f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ml.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/modeling.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/modeling.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c7430491111b54c43ac9a4eb8bdd367916e6dba2
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/modeling.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/modula2.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/modula2.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..153f33d762f92851d3f6fbf59d3645dcf34ae8a0
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/modula2.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mojo.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mojo.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7296086d785825a6d97fba0c9d778546cf7a5e90
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mojo.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/monte.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/monte.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..33e9649c56bcb07f726a7d3a965b07978794a45f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/monte.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mosel.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mosel.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4b1a0f7d75461f71e421b927a63aba3ddde84e61
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/mosel.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ncl.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ncl.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bf838b5ee4f9bd516610717090cede77ae2ffc49
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ncl.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/nimrod.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/nimrod.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8ea2680acf675dbd51bd67a793e98e15c4b84e06
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/nimrod.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/nit.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/nit.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..18037dfa634bd5ba5a905ceaaf5157bd3d2f37de
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/nit.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/nix.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/nix.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9f280416deebefd6115d50be5526015d79525faa
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/nix.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/numbair.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/numbair.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..87d83e05720b31fb778a98704749ad00363164cb
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/numbair.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/oberon.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/oberon.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eceed89a78a581a6c7d5d05f4550094545dadb45
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/oberon.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/objective.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/objective.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f180d307116555666233c49d979f248ac3145a37
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/objective.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ooc.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ooc.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bb4ead19da895ccb8b7db2471789ab74cd0a7838
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ooc.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/openscad.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/openscad.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b1127e8c6872c364ef30bbac692d32e26c127599
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/openscad.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/other.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/other.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2a9faf3203e8e3706d14e7d4f9595b59026324ff
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/other.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/parasail.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/parasail.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4141da15af902e5365d0602e8d288cf6e239015b
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/parasail.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/parsers.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/parsers.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cde0a43b6830d2fd3ba3e7effce0141851297221
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/parsers.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pascal.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pascal.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..138d732e47c9504ecc4d94cf1efb347c3af843a2
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pascal.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pawn.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pawn.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e9bf021a2d6ebb1af734b3723d65c9e8afa65a10
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pawn.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pddl.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pddl.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a940a0308768bf5a20d906b731e273d3d6ae54b6
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pddl.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/perl.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/perl.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..be7b0751e5edd1da278ae35e3d966ae444ab1c77
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/perl.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/phix.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/phix.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eb93585be83d8f8cded2a53595df1696b368d229
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/phix.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/php.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/php.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..069147da68974a701ed55a03611bd0f7cc1379d2
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/php.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pointless.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pointless.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6d8b4aeaebcb90e779c0cf479465c14efceb95e3
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pointless.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pony.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pony.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..163ac3f72319c5d734444d3f026c946d82a3bd10
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/pony.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/praat.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/praat.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fd29dec3ebda75f952369feb329cdb00cdcad8c6
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/praat.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/procfile.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/procfile.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..32ca1531fbd1f3614f30977e5fee0920624809cd
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/procfile.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/prolog.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/prolog.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..658973e642439bf0e3ad3cc5834ba1d97ed0a5ae
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/prolog.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/promql.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/promql.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f3e8c35d76e3b515d06b5601547585feb1a9caef
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/promql.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/prql.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/prql.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a568f01eb96b842db6ff6654a132d68bd1966537
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/prql.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ptx.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ptx.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cdfce7a73e36fb9a7ab0609afeba49eca029b7c5
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ptx.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/python.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/python.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1945f48a33889a5cc26bec6bfc9740396faee581
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/python.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/q.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/q.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7aedc43f28663fe04e781a858ed3b6f12fbd798b
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/q.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/qlik.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/qlik.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1808f75918c7c2819b26c9df05bd9fbf41f8825a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/qlik.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/qvt.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/qvt.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a3a221e219774355fa4a7a8b8e9e48652967682c
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/qvt.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/r.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/r.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4ba87bbc6a33043e7aba396c37a32e8241e96b2f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/r.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rdf.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rdf.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1da60a9ef1d95c93beb2298d85a68bc376e23be0
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rdf.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rebol.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rebol.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7f120af8e34f8d249ede6d89b607186eb6c32061
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rebol.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rego.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rego.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5ee8b9771764367f03d285e5bae980ee7aca47e0
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rego.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/resource.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/resource.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..83853fac6162d0c1e861662acff0f270c6e393a2
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/resource.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ride.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ride.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..37a7a61b7f6c2b0f71cee665cb13d00c2d9fb6b4
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ride.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rita.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rita.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4d84cf4fdc817e82b5234b2735ed856946a74a3c
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rita.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rnc.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rnc.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0010b87ae9aa8a720b2710f4470acca6f92d4aae
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rnc.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/roboconf.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/roboconf.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..be5d9c815be820a7ea51c1cf318628477cf470cf
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/roboconf.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/robotframework.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/robotframework.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1dc6dd9144864a544add65e4cac167b360543a02
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/robotframework.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ruby.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ruby.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b30d18c666256428f1396f6e05cb207358e431c4
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ruby.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rust.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rust.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b90d1509728a8a11a250da980b70212d4d4701a4
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/rust.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sas.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sas.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..45f86c20bd7b64ae49598fa236d43111af9efb08
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sas.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/savi.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/savi.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a0d7c4fab831f2974fe08ae786f8ce5078210e54
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/savi.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/scdoc.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/scdoc.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f7e63818f6c49c36dacad96b76b49fa46b7d814b
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/scdoc.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/scripting.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/scripting.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3f6d5637c6c85967ac5561bcd7e96d401d93ad51
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/scripting.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sgf.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sgf.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..577183d4091746b2922fa5c405cc6a652622446a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sgf.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/shell.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/shell.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bc5261471fe97979e6b061136dc8b981ddb17621
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/shell.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sieve.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sieve.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ba7a551138647003743d47f9b572f3026243b00f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sieve.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/slash.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/slash.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..131fb9b27a62459e4fb5fad92394f2f88ba862b6
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/slash.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/smalltalk.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/smalltalk.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7994480fb05810f4521ba69be1ca249ca6ed3918
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/smalltalk.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/smithy.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/smithy.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ec55d62d8be381b3168b2ecbb7e77e972edf4010
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/smithy.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/smv.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/smv.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f92c7bbe250c6f11c400f2138af7b01cac4a2afb
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/smv.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/snobol.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/snobol.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7aa7715e582f2a8e31c63c65d437ddbcb8b485fa
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/snobol.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/solidity.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/solidity.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..df06b60861bf6c8b8a745a8cd1e8abf053123332
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/solidity.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/soong.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/soong.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a93b0b512699ee7d751b4ef28074a253bbe40f15
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/soong.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sophia.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sophia.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e6bd44aecf5b947daf77ac10984ea097d188927e
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sophia.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/special.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/special.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..07b55c24ef3b5329e43b06973f7e39c7b9c48deb
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/special.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/spice.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/spice.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c7ac6b687839e8ad8128dd1eb578fb0a72262662
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/spice.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sql.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sql.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..866b613d2b42c8417d7be1389004d9d5fe72a75f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/sql.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/srcinfo.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/srcinfo.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2c9d70fbe3963e7e130bbe852d0f85a0867c6f38
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/srcinfo.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/stata.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/stata.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..27e7b1b0d40aed83dff092d04b9710550f6051df
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/stata.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/supercollider.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/supercollider.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e72b8d8553f424db1a2d4617288fff9755baad5a
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/supercollider.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tablegen.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tablegen.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fc52feb1fa4e0188d101fd3f846ab5ea40a70daa
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tablegen.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tact.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tact.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4e628dba9295092b303a49b631539997a7a10d37
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tact.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tal.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tal.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6f4e4a8f9ad3395be45eab750150a1e490740cbf
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tal.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tcl.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tcl.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3ce7e17e6405c628d1dee50699b3c8a876bacb46
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tcl.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/teal.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/teal.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6bc3fba0553036ab780849a024b62cf82bc0e7e9
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/teal.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/templates.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/templates.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2ce165ed274b1085a670a94e346fd554fad86b23
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/templates.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/teraterm.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/teraterm.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..55035c48922669bab165363ce283555dc6480a25
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/teraterm.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/testing.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/testing.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..174452c9a7bb6048ae1600b3fbfc63b27d04d958
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/testing.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/text.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/text.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f3bf641d855ea6b62a2e57cbb5882134cb8c2f15
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/text.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/textedit.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/textedit.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0a2e8a6a2b0216310e326956dccc3965dfa3811b
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/textedit.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/textfmts.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/textfmts.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1bfc0ffcc672d148d6218c99a2365ab99386c617
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/textfmts.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/theorem.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/theorem.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e81a9893353a0d29d15ad89f84c2af6ced0a389b
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/theorem.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/thingsdb.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/thingsdb.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9008360ed436354dccf2587c6cf430aab1bd4802
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/thingsdb.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tlb.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tlb.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f9b1c95c4627e9f9403ae6680b14cef5da71f0ba
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tlb.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tls.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tls.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..649c90b3ca3516289962bfaa97cc74948b4820e5
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tls.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tnt.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tnt.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a513f4199ffed84d138b0312f44a04c2830d3748
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/tnt.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/trafficscript.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/trafficscript.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1a008079d318f79dcb8f3a6803cb479da9ca87df
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/trafficscript.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/typoscript.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/typoscript.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5c09c44edb0b90d00821cebb2bb611c7dff75987
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/typoscript.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/typst.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/typst.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c770ef260882a90e1d918362cc877d4ebe57387f
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/typst.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ul4.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ul4.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d99cab133ab0e1f8c34f44833cea7cd999e28f7c
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/ul4.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/unicon.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/unicon.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2ab033839a43e6ad384fe576fce3866dd5814298
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/unicon.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/urbi.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/urbi.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..db05a34daedcbc3ca26e4a4a7ce751dbf74989fd
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/urbi.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/usd.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/usd.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..847289899ef47456e000cf57d1b4339cd469a444
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/usd.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/verification.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/verification.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9947747b7d43ebd2b535a85e6e240c3bacdaf9b7
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/verification.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/vyper.cpython-310.pyc b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/vyper.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5ad9902a2d097b9fa635140e7577c71d177cd06d
Binary files /dev/null and b/venv/lib/python3.10/site-packages/pygments/lexers/__pycache__/vyper.cpython-310.pyc differ
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/praat.py b/venv/lib/python3.10/site-packages/pygments/lexers/praat.py
new file mode 100644
index 0000000000000000000000000000000000000000..054f5b61e56a53d2bf04d2bba2fc15c1fa85566d
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/praat.py
@@ -0,0 +1,303 @@
+"""
+ pygments.lexers.praat
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Praat
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, bygroups, include
+from pygments.token import Name, Text, Comment, Keyword, String, Punctuation, \
+ Number, Operator, Whitespace
+
+__all__ = ['PraatLexer']
+
+
+class PraatLexer(RegexLexer):
+ """
+ For Praat scripts.
+ """
+
+ name = 'Praat'
+ url = 'http://www.praat.org'
+ aliases = ['praat']
+ filenames = ['*.praat', '*.proc', '*.psc']
+ version_added = '2.1'
+
+ keywords = (
+ 'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to',
+ 'endfor', 'endproc', 'while', 'endwhile', 'repeat', 'until', 'select', 'plus',
+ 'minus', 'demo', 'assert', 'stopwatch', 'nocheck', 'nowarn', 'noprogress',
+ 'editor', 'endeditor', 'clearinfo',
+ )
+
+ functions_string = (
+ 'backslashTrigraphsToUnicode', 'chooseDirectory', 'chooseReadFile',
+ 'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine',
+ 'extractWord', 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace',
+ 'replace_regex', 'right', 'selected', 'string', 'unicodeToBackslashTrigraphs',
+ )
+
+ functions_numeric = (
+ 'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos',
+ 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz',
+ 'beginPause', 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2',
+ 'binomialP', 'binomialQ', 'boolean', 'ceiling', 'chiSquareP', 'chiSquareQ',
+ 'choice', 'comment', 'cos', 'cosh', 'createDirectory', 'deleteFile',
+ 'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed',
+ 'demoExtraControlKeyPressed', 'demoInput', 'demoKeyPressed',
+ 'demoOptionKeyPressed', 'demoShiftKeyPressed', 'demoShow', 'demoWaitForInput',
+ 'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor',
+ 'endPause', 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc',
+ 'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
+ 'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
+ 'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
+ 'index_regex', 'integer', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
+ 'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
+ 'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
+ 'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
+ 'option', 'optionMenu', 'pauseScript', 'phonToDifferenceLimens', 'plusObject',
+ 'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
+ 'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
+ 'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
+ 'selected', 'semitonesToHertz', 'sentence', 'sentencetext', 'sigmoid', 'sin', 'sinc',
+ 'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
+ 'studentQ', 'tan', 'tanh', 'text', 'variableExists', 'word', 'writeFile', 'writeFileLine',
+ 'writeInfo', 'writeInfoLine',
+ )
+
+ functions_array = (
+ 'linear', 'randomGauss', 'randomInteger', 'randomUniform', 'zero',
+ )
+
+ objects = (
+ 'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword',
+ 'Autosegment', 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories',
+ 'Cepstrogram', 'Cepstrum', 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable',
+ 'Cochleagram', 'Collection', 'ComplexSpectrogram', 'Configuration', 'Confusion',
+ 'ContingencyTable', 'Corpus', 'Correlation', 'Covariance',
+ 'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', 'DataModeler',
+ 'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', 'Distributions',
+ 'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', 'EditDistanceTable',
+ 'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', 'FeatureWeights',
+ 'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', 'FormantGrid',
+ 'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM',
+ 'HMM_Observation', 'HMM_ObservationSequence', 'HMM_State', 'HMM_StateSequence',
+ 'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier',
+ 'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
+ 'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
+ 'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
+ 'MixingMatrix', 'Movie', 'Network', 'Object', 'OTGrammar', 'OTHistory', 'OTMulti',
+ 'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo',
+ 'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
+ 'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
+ 'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
+ 'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
+ 'Spectrogram', 'Spectrum', 'SpectrumTier', 'SpeechSynthesizer', 'SpellingChecker',
+ 'Strings', 'StringsIndex', 'Table', 'TableOfReal', 'TextGrid', 'TextInterval',
+ 'TextPoint', 'TextTier', 'Tier', 'Transition', 'VocalTract', 'VocalTractTier',
+ 'Weight', 'WordList',
+ )
+
+ variables_numeric = (
+ 'macintosh', 'windows', 'unix', 'praatVersion', 'pi', 'e', 'undefined',
+ )
+
+ variables_string = (
+ 'praatVersion', 'tab', 'shellDirectory', 'homeDirectory',
+ 'preferencesDirectory', 'newline', 'temporaryDirectory',
+ 'defaultDirectory',
+ )
+
+ object_attributes = (
+ 'ncol', 'nrow', 'xmin', 'ymin', 'xmax', 'ymax', 'nx', 'ny', 'dx', 'dy',
+ )
+
+ tokens = {
+ 'root': [
+ (r'(\s+)(#.*?$)', bygroups(Whitespace, Comment.Single)),
+ (r'^#.*?$', Comment.Single),
+ (r';[^\n]*', Comment.Single),
+ (r'\s+', Whitespace),
+
+ (r'\bprocedure\b', Keyword, 'procedure_definition'),
+ (r'\bcall\b', Keyword, 'procedure_call'),
+ (r'@', Name.Function, 'procedure_call'),
+
+ include('function_call'),
+
+ (words(keywords, suffix=r'\b'), Keyword),
+
+ (r'(\bform\b)(\s+)([^\n]+)',
+ bygroups(Keyword, Whitespace, String), 'old_form'),
+
+ (r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|'
+ r'include|execute|system(?:_nocheck)?)(\s+)',
+ bygroups(Keyword, Whitespace), 'string_unquoted'),
+
+ (r'(goto|label)(\s+)(\w+)', bygroups(Keyword, Whitespace, Name.Label)),
+
+ include('variable_name'),
+ include('number'),
+
+ (r'"', String, 'string'),
+
+ (words((objects), suffix=r'(?=\s+\S+\n)'), Name.Class, 'string_unquoted'),
+
+ (r'\b[A-Z]', Keyword, 'command'),
+ (r'(\.{3}|[)(,])', Punctuation),
+ ],
+ 'command': [
+ (r'( ?[\w()-]+ ?)', Keyword),
+
+ include('string_interpolated'),
+
+ (r'\.{3}', Keyword, ('#pop', 'old_arguments')),
+ (r':', Keyword, ('#pop', 'comma_list')),
+ (r'\s', Whitespace, '#pop'),
+ ],
+ 'procedure_call': [
+ (r'\s+', Whitespace),
+ (r'([\w.]+)(?:(:)|(?:(\s*)(\()))',
+ bygroups(Name.Function, Punctuation,
+ Text.Whitespace, Punctuation), '#pop'),
+ (r'([\w.]+)', Name.Function, ('#pop', 'old_arguments')),
+ ],
+ 'procedure_definition': [
+ (r'\s', Whitespace),
+ (r'([\w.]+)(\s*?[(:])',
+ bygroups(Name.Function, Whitespace), '#pop'),
+ (r'([\w.]+)([^\n]*)',
+ bygroups(Name.Function, Text), '#pop'),
+ ],
+ 'function_call': [
+ (words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'),
+ (words(functions_array, suffix=r'#(?=\s*[:(])'), Name.Function, 'function'),
+ (words(functions_numeric, suffix=r'(?=\s*[:(])'), Name.Function, 'function'),
+ ],
+ 'function': [
+ (r'\s+', Whitespace),
+ (r':', Punctuation, ('#pop', 'comma_list')),
+ (r'\s*\(', Punctuation, ('#pop', 'comma_list')),
+ ],
+ 'comma_list': [
+ (r'(\s*\n\s*)(\.{3})', bygroups(Whitespace, Punctuation)),
+
+ (r'(\s*)(?:([)\]])|(\n))', bygroups(
+ Whitespace, Punctuation, Whitespace), '#pop'),
+
+ (r'\s+', Whitespace),
+ (r'"', String, 'string'),
+ (r'\b(if|then|else|fi|endif)\b', Keyword),
+
+ include('function_call'),
+ include('variable_name'),
+ include('operator'),
+ include('number'),
+
+ (r'[()]', Text),
+ (r',', Punctuation),
+ ],
+ 'old_arguments': [
+ (r'\n', Whitespace, '#pop'),
+
+ include('variable_name'),
+ include('operator'),
+ include('number'),
+
+ (r'"', String, 'string'),
+ (r'[^\n]', Text),
+ ],
+ 'number': [
+ (r'\n', Whitespace, '#pop'),
+ (r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
+ ],
+ 'object_reference': [
+ include('string_interpolated'),
+ (r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
+
+ (words(object_attributes, prefix=r'\.'), Name.Builtin, '#pop'),
+
+ (r'\$', Name.Builtin),
+ (r'\[', Text, '#pop'),
+ ],
+ 'variable_name': [
+ include('operator'),
+ include('number'),
+
+ (words(variables_string, suffix=r'\$'), Name.Variable.Global),
+ (words(variables_numeric,
+ suffix=r'(?=[^a-zA-Z0-9_."\'$#\[:(]|\s|^|$)'),
+ Name.Variable.Global),
+
+ (words(objects, prefix=r'\b', suffix=r"(_)"),
+ bygroups(Name.Builtin, Name.Builtin),
+ 'object_reference'),
+
+ (r'\.?_?[a-z][\w.]*(\$|#)?', Text),
+ (r'[\[\]]', Punctuation, 'comma_list'),
+
+ include('string_interpolated'),
+ ],
+ 'operator': [
+ (r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)', Operator),
+ (r'(?', Punctuation),
+ (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
+ r'\\[0-7]+\\|\\["\\abcefnrstv]|[^\\"])*"', String.Double),
+ (r"'(?:''|[^'])*'", String.Atom), # quoted atom
+ # Needs to not be followed by an atom.
+ # (r'=(?=\s|[a-zA-Z\[])', Operator),
+ (r'is\b', Operator),
+ (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
+ Operator),
+ (r'(mod|div|not)\b', Operator),
+ (r'_', Keyword), # The don't-care variable
+ (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
+ (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+ r'(\s*)(:-|-->)',
+ bygroups(Name.Function, Text, Operator)), # function defn
+ (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+ r'(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
+ String.Atom), # atom, characters
+ # This one includes !
+ (r'[#&*+\-./:<=>?@\\^~\u00a1-\u00bf\u2010-\u303f]+',
+ String.Atom), # atom, graphics
+ (r'[A-Z_]\w*', Name.Variable),
+ (r'\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
+ ],
+ 'nested-comment': [
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'[^*/]+', Comment.Multiline),
+ (r'[*/]', Comment.Multiline),
+ ],
+ }
+
+ def analyse_text(text):
+ """Competes with IDL and Visual Prolog on *.pro"""
+ if ':-' in text:
+ # Visual Prolog also uses :-
+ return 0.5
+ else:
+ return 0
+
+
+class LogtalkLexer(RegexLexer):
+ """
+ For Logtalk source code.
+ """
+
+ name = 'Logtalk'
+ url = 'http://logtalk.org/'
+ aliases = ['logtalk']
+ filenames = ['*.lgt', '*.logtalk']
+ mimetypes = ['text/x-logtalk']
+ version_added = '0.10'
+
+ tokens = {
+ 'root': [
+ # Directives
+ (r'^\s*:-\s', Punctuation, 'directive'),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/', Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Numbers
+ (r"0'[\\]?.", Number),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ # Event handlers
+ (r'(after|before)(?=[(])', Keyword),
+ # Message forwarding handler
+ (r'forward(?=[(])', Keyword),
+ # Execution-context methods
+ (r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword),
+ # Reflection
+ (r'(current_predicate|predicate_property)(?=[(])', Keyword),
+ # DCGs and term expansion
+ (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword),
+ # Entity
+ (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
+ (r'(object|protocol|category)_property(?=[(])', Keyword),
+ # Entity relations
+ (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
+ (r'extends_(object|protocol|category)(?=[(])', Keyword),
+ (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
+ (r'(instantiat|specializ)es_class(?=[(])', Keyword),
+ # Events
+ (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
+ # Flags
+ (r'(create|current|set)_logtalk_flag(?=[(])', Keyword),
+ # Compiling, loading, and library paths
+ (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword),
+ (r'\blogtalk_make\b', Keyword),
+ # Database
+ (r'(clause|retract(all)?)(?=[(])', Keyword),
+ (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
+ # Control constructs
+ (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
+ (r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
+ (r'(uninstantiation|type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
+ # All solutions
+ (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
+ # Multi-threading predicates
+ (r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
+ # Engine predicates
+ (r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword),
+ # Term unification
+ (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
+ # Term creation and decomposition
+ (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword),
+ # Evaluable functors
+ (r'(div|rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword),
+ (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
+ (r'(floor|t(an|runcate)|round|ceiling)(?=[(])', Keyword),
+ # Other arithmetic functors
+ (r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword),
+ # Term testing
+ (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword),
+ # Term comparison
+ (r'compare(?=[(])', Keyword),
+ # Stream selection and control
+ (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
+ (r'(open|close)(?=[(])', Keyword),
+ (r'flush_output(?=[(])', Keyword),
+ (r'(at_end_of_stream|flush_output)\b', Keyword),
+ (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
+ # Character and byte input/output
+ (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
+ (r'\bnl\b', Keyword),
+ # Term input/output
+ (r'read(_term)?(?=[(])', Keyword),
+ (r'write(q|_(canonical|term))?(?=[(])', Keyword),
+ (r'(current_)?op(?=[(])', Keyword),
+ (r'(current_)?char_conversion(?=[(])', Keyword),
+ # Atomic term processing
+ (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
+ (r'(char_code|sub_atom)(?=[(])', Keyword),
+ (r'number_c(har|ode)s(?=[(])', Keyword),
+ # Implementation defined hooks functions
+ (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
+ (r'\bhalt\b', Keyword),
+ (r'halt(?=[(])', Keyword),
+ # Message sending operators
+ (r'(::|:|\^\^)', Operator),
+ # External call
+ (r'[{}]', Keyword),
+ # Logic and control
+ (r'(ignore|once)(?=[(])', Keyword),
+ (r'\brepeat\b', Keyword),
+ # Sorting
+ (r'(key)?sort(?=[(])', Keyword),
+ # Bitwise functors
+ (r'(>>|<<|/\\|\\\\|\\)', Operator),
+ # Predicate aliases
+ (r'\bas\b', Operator),
+ # Arithmetic evaluation
+ (r'\bis\b', Keyword),
+ # Arithmetic comparison
+ (r'(=:=|=\\=|<|=<|>=|>)', Operator),
+ # Term creation and decomposition
+ (r'=\.\.', Operator),
+ # Term unification
+ (r'(=|\\=)', Operator),
+ # Term comparison
+ (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
+ # Evaluable functors
+ (r'(//|[-+*/])', Operator),
+ (r'\b(e|pi|div|mod|rem)\b', Operator),
+ # Other arithmetic functors
+ (r'\b\*\*\b', Operator),
+ # DCG rules
+ (r'-->', Operator),
+ # Control constructs
+ (r'([!;]|->)', Operator),
+ # Logic and control
+ (r'\\+', Operator),
+ # Mode operators
+ (r'[?@]', Operator),
+ # Existential quantifier
+ (r'\^', Operator),
+ # Punctuation
+ (r'[()\[\],.|]', Text),
+ # Atoms
+ (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"'", String, 'quoted_atom'),
+ # Double-quoted terms
+ (r'"', String, 'double_quoted_term'),
+ ],
+
+ 'quoted_atom': [
+ (r"''", String),
+ (r"'", String, '#pop'),
+ (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
+ (r"[^\\'\n]+", String),
+ (r'\\', String),
+ ],
+
+ 'double_quoted_term': [
+ (r'""', String),
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
+ (r'[^\\"\n]+', String),
+ (r'\\', String),
+ ],
+
+ 'directive': [
+ # Conditional compilation directives
+ (r'(el)?if(?=[(])', Keyword, 'root'),
+ (r'(e(lse|ndif))(?=[.])', Keyword, 'root'),
+ # Entity directives
+ (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
+ (r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'),
+ # Predicate scope directives
+ (r'(public|protected|private)(?=[(])', Keyword, 'root'),
+ # Other directives
+ (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
+ (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
+ (r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'),
+ (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
+ (r'op(?=[(])', Keyword, 'root'),
+ (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
+ (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
+ (r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'),
+ ],
+
+ 'entityrelations': [
+ (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
+ # Numbers
+ (r"0'[\\]?.", Number),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ # Atoms
+ (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"'", String, 'quoted_atom'),
+ # Double-quoted terms
+ (r'"', String, 'double_quoted_term'),
+ # End of entity-opening directive
+ (r'([)]\.)', Text, 'root'),
+ # Scope operator
+ (r'(::)', Operator),
+ # Punctuation
+ (r'[()\[\],.|]', Text),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/', Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ ]
+ }
+
+ def analyse_text(text):
+ if ':- object(' in text:
+ return 1.0
+ elif ':- protocol(' in text:
+ return 1.0
+ elif ':- category(' in text:
+ return 1.0
+ elif re.search(r'^:-\s[a-z]', text, re.M):
+ return 0.9
+ else:
+ return 0.0
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/promql.py b/venv/lib/python3.10/site-packages/pygments/lexers/promql.py
new file mode 100644
index 0000000000000000000000000000000000000000..cad3c254a1202682f4a831d4c09472bce34c33f0
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/promql.py
@@ -0,0 +1,176 @@
+"""
+ pygments.lexers.promql
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Prometheus Query Language.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, default, words
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+ Punctuation, String, Whitespace
+
+__all__ = ["PromQLLexer"]
+
+
+class PromQLLexer(RegexLexer):
+ """
+ For PromQL queries.
+
+ For details about the grammar see:
+ https://github.com/prometheus/prometheus/tree/master/promql/parser
+
+ .. versionadded: 2.7
+ """
+
+ name = "PromQL"
+ url = 'https://prometheus.io/docs/prometheus/latest/querying/basics/'
+ aliases = ["promql"]
+ filenames = ["*.promql"]
+ version_added = ''
+
+ base_keywords = (
+ words(
+ (
+ "bool",
+ "by",
+ "group_left",
+ "group_right",
+ "ignoring",
+ "offset",
+ "on",
+ "without",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword,
+ )
+
+ aggregator_keywords = (
+ words(
+ (
+ "sum",
+ "min",
+ "max",
+ "avg",
+ "group",
+ "stddev",
+ "stdvar",
+ "count",
+ "count_values",
+ "bottomk",
+ "topk",
+ "quantile",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword,
+ )
+
+ function_keywords = (
+ words(
+ (
+ "abs",
+ "absent",
+ "absent_over_time",
+ "avg_over_time",
+ "ceil",
+ "changes",
+ "clamp_max",
+ "clamp_min",
+ "count_over_time",
+ "day_of_month",
+ "day_of_week",
+ "days_in_month",
+ "delta",
+ "deriv",
+ "exp",
+ "floor",
+ "histogram_quantile",
+ "holt_winters",
+ "hour",
+ "idelta",
+ "increase",
+ "irate",
+ "label_join",
+ "label_replace",
+ "ln",
+ "log10",
+ "log2",
+ "max_over_time",
+ "min_over_time",
+ "minute",
+ "month",
+ "predict_linear",
+ "quantile_over_time",
+ "rate",
+ "resets",
+ "round",
+ "scalar",
+ "sort",
+ "sort_desc",
+ "sqrt",
+ "stddev_over_time",
+ "stdvar_over_time",
+ "sum_over_time",
+ "time",
+ "timestamp",
+ "vector",
+ "year",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword.Reserved,
+ )
+
+ tokens = {
+ "root": [
+ (r"\n", Whitespace),
+ (r"\s+", Whitespace),
+ (r",", Punctuation),
+ # Keywords
+ base_keywords,
+ aggregator_keywords,
+ function_keywords,
+ # Offsets
+ (r"[1-9][0-9]*[smhdwy]", String),
+ # Numbers
+ (r"-?[0-9]+\.[0-9]+", Number.Float),
+ (r"-?[0-9]+", Number.Integer),
+ # Comments
+ (r"#.*?$", Comment.Single),
+ # Operators
+ (r"(\+|\-|\*|\/|\%|\^)", Operator),
+ (r"==|!=|>=|<=|<|>", Operator),
+ (r"and|or|unless", Operator.Word),
+ # Metrics
+ (r"[_a-zA-Z][a-zA-Z0-9_]+", Name.Variable),
+ # Params
+ (r'(["\'])(.*?)(["\'])', bygroups(Punctuation, String, Punctuation)),
+ # Other states
+ (r"\(", Operator, "function"),
+ (r"\)", Operator),
+ (r"\{", Punctuation, "labels"),
+ (r"\[", Punctuation, "range"),
+ ],
+ "labels": [
+ (r"\}", Punctuation, "#pop"),
+ (r"\n", Whitespace),
+ (r"\s+", Whitespace),
+ (r",", Punctuation),
+ (r'([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|!~)(\s*?)("|\')(.*?)("|\')',
+ bygroups(Name.Label, Whitespace, Operator, Whitespace,
+ Punctuation, String, Punctuation)),
+ ],
+ "range": [
+ (r"\]", Punctuation, "#pop"),
+ (r"[1-9][0-9]*[smhdwy]", String),
+ ],
+ "function": [
+ (r"\)", Operator, "#pop"),
+ (r"\(", Operator, "#push"),
+ default("#pop"),
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/prql.py b/venv/lib/python3.10/site-packages/pygments/lexers/prql.py
new file mode 100644
index 0000000000000000000000000000000000000000..ee95d2d47491a45479aa79a0e7c2c24a4cef01cf
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/prql.py
@@ -0,0 +1,251 @@
+"""
+ pygments.lexers.prql
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the PRQL query language.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, combined, words, include, bygroups
+from pygments.token import Comment, Literal, Keyword, Name, Number, Operator, \
+ Punctuation, String, Text, Whitespace
+
+__all__ = ['PrqlLexer']
+
+
+class PrqlLexer(RegexLexer):
+ """
+ For PRQL source code.
+
+ grammar: https://github.com/PRQL/prql/tree/main/grammars
+ """
+
+ name = 'PRQL'
+ url = 'https://prql-lang.org/'
+ aliases = ['prql']
+ filenames = ['*.prql']
+ mimetypes = ['application/prql', 'application/x-prql']
+ version_added = '2.17'
+
+ builtinTypes = words((
+ "bool",
+ "int",
+ "int8", "int16", "int32", "int64", "int128",
+ "float",
+ "text",
+ "set"), suffix=r'\b')
+
+ def innerstring_rules(ttype):
+ return [
+ # the new style '{}'.format(...) string formatting
+ (r'\{'
+ r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
+ r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
+ r'\}', String.Interpol),
+
+ (r'[^\\\'"%{\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ (r'%|(\{{1,2})', ttype)
+ ]
+
+ def fstring_rules(ttype):
+ return [
+ (r'\}', String.Interpol),
+ (r'\{', String.Interpol, 'expr-inside-fstring'),
+ (r'[^\\\'"{}\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ ]
+
+ tokens = {
+ 'root': [
+
+ # Comments
+ (r'#!.*', String.Doc),
+ (r'#.*', Comment.Single),
+
+ # Whitespace
+ (r'\s+', Whitespace),
+
+ # Modules
+ (r'^(\s*)(module)(\s*)',
+ bygroups(Whitespace, Keyword.Namespace, Whitespace),
+ 'imports'),
+
+ (builtinTypes, Keyword.Type),
+
+ # Main
+ (r'^prql ', Keyword.Reserved),
+
+ ('let', Keyword.Declaration),
+
+ include('keywords'),
+ include('expr'),
+
+ # Transforms
+ (r'^[A-Za-z_][a-zA-Z0-9_]*', Keyword),
+ ],
+ 'expr': [
+ # non-raw f-strings
+ ('(f)(""")', bygroups(String.Affix, String.Double),
+ combined('fstringescape', 'tdqf')),
+ ("(f)(''')", bygroups(String.Affix, String.Single),
+ combined('fstringescape', 'tsqf')),
+ ('(f)(")', bygroups(String.Affix, String.Double),
+ combined('fstringescape', 'dqf')),
+ ("(f)(')", bygroups(String.Affix, String.Single),
+ combined('fstringescape', 'sqf')),
+
+ # non-raw s-strings
+ ('(s)(""")', bygroups(String.Affix, String.Double),
+ combined('stringescape', 'tdqf')),
+ ("(s)(''')", bygroups(String.Affix, String.Single),
+ combined('stringescape', 'tsqf')),
+ ('(s)(")', bygroups(String.Affix, String.Double),
+ combined('stringescape', 'dqf')),
+ ("(s)(')", bygroups(String.Affix, String.Single),
+ combined('stringescape', 'sqf')),
+
+ # raw strings
+ ('(?i)(r)(""")',
+ bygroups(String.Affix, String.Double), 'tdqs'),
+ ("(?i)(r)(''')",
+ bygroups(String.Affix, String.Single), 'tsqs'),
+ ('(?i)(r)(")',
+ bygroups(String.Affix, String.Double), 'dqs'),
+ ("(?i)(r)(')",
+ bygroups(String.Affix, String.Single), 'sqs'),
+
+ # non-raw strings
+ ('"""', String.Double, combined('stringescape', 'tdqs')),
+ ("'''", String.Single, combined('stringescape', 'tsqs')),
+ ('"', String.Double, combined('stringescape', 'dqs')),
+ ("'", String.Single, combined('stringescape', 'sqs')),
+
+ # Time and dates
+ (r'@\d{4}-\d{2}-\d{2}T\d{2}(:\d{2})?(:\d{2})?(\.\d{1,6})?(Z|[+-]\d{1,2}(:\d{1,2})?)?', Literal.Date),
+ (r'@\d{4}-\d{2}-\d{2}', Literal.Date),
+ (r'@\d{2}(:\d{2})?(:\d{2})?(\.\d{1,6})?(Z|[+-]\d{1,2}(:\d{1,2})?)?', Literal.Date),
+
+ (r'[^\S\n]+', Text),
+ include('numbers'),
+ (r'->|=>|==|!=|>=|<=|~=|&&|\|\||\?\?|\/\/', Operator),
+ (r'[-~+/*%=<>&^|.@]', Operator),
+ (r'[]{}:(),;[]', Punctuation),
+ include('functions'),
+
+ # Variable Names
+ (r'[A-Za-z_][a-zA-Z0-9_]*', Name.Variable),
+ ],
+ 'numbers': [
+ (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
+ r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
+ (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
+ (r'0[oO](?:_?[0-7])+', Number.Oct),
+ (r'0[bB](?:_?[01])+', Number.Bin),
+ (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
+ (r'\d(?:_?\d)*', Number.Integer),
+ ],
+ 'fstringescape': [
+ include('stringescape'),
+ ],
+ 'bytesescape': [
+ (r'\\([\\bfnrt"\']|\n|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'stringescape': [
+ (r'\\(N\{.*?\}|u\{[a-fA-F0-9]{1,6}\})', String.Escape),
+ include('bytesescape')
+ ],
+ 'fstrings-single': fstring_rules(String.Single),
+ 'fstrings-double': fstring_rules(String.Double),
+ 'strings-single': innerstring_rules(String.Single),
+ 'strings-double': innerstring_rules(String.Double),
+ 'dqf': [
+ (r'"', String.Double, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include('fstrings-double')
+ ],
+ 'sqf': [
+ (r"'", String.Single, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include('fstrings-single')
+ ],
+ 'dqs': [
+ (r'"', String.Double, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include('strings-double')
+ ],
+ 'sqs': [
+ (r"'", String.Single, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include('strings-single')
+ ],
+ 'tdqf': [
+ (r'"""', String.Double, '#pop'),
+ include('fstrings-double'),
+ (r'\n', String.Double)
+ ],
+ 'tsqf': [
+ (r"'''", String.Single, '#pop'),
+ include('fstrings-single'),
+ (r'\n', String.Single)
+ ],
+ 'tdqs': [
+ (r'"""', String.Double, '#pop'),
+ include('strings-double'),
+ (r'\n', String.Double)
+ ],
+ 'tsqs': [
+ (r"'''", String.Single, '#pop'),
+ include('strings-single'),
+ (r'\n', String.Single)
+ ],
+
+ 'expr-inside-fstring': [
+ (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
+ # without format specifier
+ (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
+ r'\}', String.Interpol, '#pop'),
+ # with format specifier
+ # we'll catch the remaining '}' in the outer scope
+ (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
+ r':', String.Interpol, '#pop'),
+ (r'\s+', Whitespace), # allow new lines
+ include('expr'),
+ ],
+ 'expr-inside-fstring-inner': [
+ (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
+ (r'[])}]', Punctuation, '#pop'),
+ (r'\s+', Whitespace), # allow new lines
+ include('expr'),
+ ],
+ 'keywords': [
+ (words((
+ 'into', 'case', 'type', 'module', 'internal',
+ ), suffix=r'\b'),
+ Keyword),
+ (words(('true', 'false', 'null'), suffix=r'\b'), Keyword.Constant),
+ ],
+ 'functions': [
+ (words((
+ "min", "max", "sum", "average", "stddev", "every", "any",
+ "concat_array", "count", "lag", "lead", "first", "last",
+ "rank", "rank_dense", "row_number", "round", "as", "in",
+ "tuple_every", "tuple_map", "tuple_zip", "_eq", "_is_null",
+ "from_text", "lower", "upper", "read_parquet", "read_csv"),
+ suffix=r'\b'),
+ Name.Function),
+ ],
+
+ 'comment': [
+ (r'-(?!\})', Comment.Multiline),
+ (r'\{-', Comment.Multiline, 'comment'),
+ (r'[^-}]', Comment.Multiline),
+ (r'-\}', Comment.Multiline, '#pop'),
+ ],
+
+ 'imports': [
+ (r'\w+(\.\w+)*', Name.Class, '#pop'),
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/ptx.py b/venv/lib/python3.10/site-packages/pygments/lexers/ptx.py
new file mode 100644
index 0000000000000000000000000000000000000000..784ca13a6f0c0ba5464706f68c84eeae9d36c9e9
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/ptx.py
@@ -0,0 +1,119 @@
+"""
+ pygments.lexers.ptx
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for other PTX language.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Comment, Keyword, Name, String, Number, \
+ Punctuation, Whitespace, Operator
+
+__all__ = ["PtxLexer"]
+
+
+class PtxLexer(RegexLexer):
+ """
+ For NVIDIA `PTX `_
+ source.
+ """
+ name = 'PTX'
+ url = "https://docs.nvidia.com/cuda/parallel-thread-execution/"
+ filenames = ['*.ptx']
+ aliases = ['ptx']
+ mimetypes = ['text/x-ptx']
+ version_added = '2.16'
+
+ #: optional Comment or Whitespace
+ string = r'"[^"]*?"'
+ followsym = r'[a-zA-Z0-9_$]'
+ identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
+ block_label = r'(' + identifier + r'|(\d+))'
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ (block_label + r'\s*:', Name.Label),
+
+ include('keyword'),
+
+ (r'%' + identifier, Name.Variable),
+ (r'%\d+', Name.Variable.Anonymous),
+ (r'c?' + string, String),
+ (identifier, Name.Variable),
+ (r';', Punctuation),
+ (r'[*+-/]', Operator),
+
+ (r'0[xX][a-fA-F0-9]+', Number),
+ (r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
+
+ (r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
+
+ ],
+ 'whitespace': [
+ (r'(\n|\s+)+', Whitespace),
+ (r'//.*?\n', Comment)
+ ],
+
+ 'keyword': [
+ # Instruction keywords
+ (words((
+ 'abs', 'discard', 'min', 'shf', 'vadd',
+ 'activemask', 'div', 'mma', 'shfl', 'vadd2',
+ 'add', 'dp2a', 'mov', 'shl', 'vadd4',
+ 'addc', 'dp4a', 'movmatrix', 'shr', 'vavrg2',
+ 'alloca', 'elect', 'mul', 'sin', 'vavrg4',
+ 'and', 'ex2', 'mul24', 'slct', 'vmad',
+ 'applypriority', 'exit', 'multimem', 'sqrt', 'vmax',
+ 'atom', 'fence', 'nanosleep', 'st', 'vmax2',
+ 'bar', 'fma', 'neg', 'stackrestore', 'vmax4',
+ 'barrier', 'fns', 'not', 'stacksave', 'vmin',
+ 'bfe', 'getctarank', 'or', 'stmatrix', 'vmin2',
+ 'bfi', 'griddepcontrol', 'pmevent', 'sub', 'vmin4',
+ 'bfind', 'isspacep', 'popc', 'subc', 'vote',
+ 'bmsk', 'istypep', 'prefetch', 'suld', 'vset',
+ 'bra', 'ld', 'prefetchu', 'suq', 'vset2',
+ 'brev', 'ldmatrix', 'prmt', 'sured', 'vset4',
+ 'brkpt', 'ldu', 'rcp', 'sust', 'vshl',
+ 'brx', 'lg2', 'red', 'szext', 'vshr',
+ 'call', 'lop3', 'redux', 'tanh', 'vsub',
+ 'clz', 'mad', 'rem', 'testp', 'vsub2',
+ 'cnot', 'mad24', 'ret', 'tex', 'vsub4',
+ 'copysign', 'madc', 'rsqrt', 'tld4', 'wgmma',
+ 'cos', 'mapa', 'sad', 'trap', 'wmma',
+ 'cp', 'match', 'selp', 'txq', 'xor',
+ 'createpolicy', 'max', 'set', 'vabsdiff', 'cvt',
+ 'mbarrier', 'setmaxnreg', 'vabsdiff2', 'cvta',
+ 'membar', 'setp', 'vabsdiff4')), Keyword),
+ # State Spaces and Suffixes
+ (words((
+ 'reg', '.sreg', '.const', '.global',
+ '.local', '.param', '.shared', '.tex',
+ '.wide', '.loc'
+ )), Keyword.Pseudo),
+ # PTX Directives
+ (words((
+ '.address_size', '.explicitcluster', '.maxnreg', '.section',
+ '.alias', '.extern', '.maxntid', '.shared',
+ '.align', '.file', '.minnctapersm', '.sreg',
+ '.branchtargets', '.func', '.noreturn', '.target',
+ '.callprototype', '.global', '.param', '.tex',
+ '.calltargets', '.loc', '.pragma', '.version',
+ '.common', '.local', '.reg', '.visible',
+ '.const', '.maxclusterrank', '.reqnctapercluster', '.weak',
+ '.entry', '.maxnctapersm', '.reqntid')), Keyword.Reserved),
+ # Fundamental Types
+ (words((
+ '.s8', '.s16', '.s32', '.s64',
+ '.u8', '.u16', '.u32', '.u64',
+ '.f16', '.f16x2', '.f32', '.f64',
+ '.b8', '.b16', '.b32', '.b64',
+ '.pred'
+ )), Keyword.Type)
+ ],
+
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/python.py b/venv/lib/python3.10/site-packages/pygments/lexers/python.py
new file mode 100644
index 0000000000000000000000000000000000000000..805f6ff2ac778176926cbdb40eca17ef4c93ee70
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/python.py
@@ -0,0 +1,1201 @@
+"""
+ pygments.lexers.python
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Python and related languages.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import keyword
+
+from pygments.lexer import DelegatingLexer, RegexLexer, include, \
+ bygroups, using, default, words, combined, this
+from pygments.util import get_bool_opt, shebang_matches
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Other, Error, Whitespace
+from pygments import unistring as uni
+
+__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
+ 'Python2Lexer', 'Python2TracebackLexer',
+ 'CythonLexer', 'DgLexer', 'NumPyLexer']
+
+
+class PythonLexer(RegexLexer):
+ """
+ For Python source code (version 3.x).
+
+ .. versionchanged:: 2.5
+ This is now the default ``PythonLexer``. It is still available as the
+ alias ``Python3Lexer``.
+ """
+
+ name = 'Python'
+ url = 'https://www.python.org'
+ aliases = ['python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark', 'pyi']
+ filenames = [
+ '*.py',
+ '*.pyw',
+ # Type stubs
+ '*.pyi',
+ # Jython
+ '*.jy',
+ # Sage
+ '*.sage',
+ # SCons
+ '*.sc',
+ 'SConstruct',
+ 'SConscript',
+ # Skylark/Starlark (used by Bazel, Buck, and Pants)
+ '*.bzl',
+ 'BUCK',
+ 'BUILD',
+ 'BUILD.bazel',
+ 'WORKSPACE',
+ # Twisted Application infrastructure
+ '*.tac',
+ ]
+ mimetypes = ['text/x-python', 'application/x-python',
+ 'text/x-python3', 'application/x-python3']
+ version_added = '0.10'
+
+ uni_name = f"[{uni.xid_start}][{uni.xid_continue}]*"
+
+ def innerstring_rules(ttype):
+ return [
+ # the old style '%s' % (...) string formatting (still valid in Py3)
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[E-GXc-giorsaux%]', String.Interpol),
+ # the new style '{}'.format(...) string formatting
+ (r'\{'
+ r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
+ r'(\![sra])?' # conversion
+ r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
+ r'\}', String.Interpol),
+
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"%{\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ # unhandled string formatting sign
+ (r'%|(\{{1,2})', ttype)
+ # newlines are an error (use "nl" state)
+ ]
+
+ def fstring_rules(ttype):
+ return [
+ # Assuming that a '}' is the closing brace after format specifier.
+ # Sadly, this means that we won't detect syntax error. But it's
+ # more important to parse correct syntax correctly, than to
+ # highlight invalid syntax.
+ (r'\}', String.Interpol),
+ (r'\{', String.Interpol, 'expr-inside-fstring'),
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"{}\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ # newlines are an error (use "nl" state)
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\n', Whitespace),
+ (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
+ bygroups(Whitespace, String.Affix, String.Doc)),
+ (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
+ bygroups(Whitespace, String.Affix, String.Doc)),
+ (r'\A#!.+$', Comment.Hashbang),
+ (r'#.*$', Comment.Single),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ include('keywords'),
+ include('soft-keywords'),
+ (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'funcname'),
+ (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'classname'),
+ (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
+ 'fromimport'),
+ (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
+ 'import'),
+ include('expr'),
+ ],
+ 'expr': [
+ # raw f-strings
+ ('(?i)(rf|fr)(""")',
+ bygroups(String.Affix, String.Double),
+ combined('rfstringescape', 'tdqf')),
+ ("(?i)(rf|fr)(''')",
+ bygroups(String.Affix, String.Single),
+ combined('rfstringescape', 'tsqf')),
+ ('(?i)(rf|fr)(")',
+ bygroups(String.Affix, String.Double),
+ combined('rfstringescape', 'dqf')),
+ ("(?i)(rf|fr)(')",
+ bygroups(String.Affix, String.Single),
+ combined('rfstringescape', 'sqf')),
+ # non-raw f-strings
+ ('([fF])(""")', bygroups(String.Affix, String.Double),
+ combined('fstringescape', 'tdqf')),
+ ("([fF])(''')", bygroups(String.Affix, String.Single),
+ combined('fstringescape', 'tsqf')),
+ ('([fF])(")', bygroups(String.Affix, String.Double),
+ combined('fstringescape', 'dqf')),
+ ("([fF])(')", bygroups(String.Affix, String.Single),
+ combined('fstringescape', 'sqf')),
+ # raw bytes and strings
+ ('(?i)(rb|br|r)(""")',
+ bygroups(String.Affix, String.Double), 'tdqs'),
+ ("(?i)(rb|br|r)(''')",
+ bygroups(String.Affix, String.Single), 'tsqs'),
+ ('(?i)(rb|br|r)(")',
+ bygroups(String.Affix, String.Double), 'dqs'),
+ ("(?i)(rb|br|r)(')",
+ bygroups(String.Affix, String.Single), 'sqs'),
+ # non-raw strings
+ ('([uU]?)(""")', bygroups(String.Affix, String.Double),
+ combined('stringescape', 'tdqs')),
+ ("([uU]?)(''')", bygroups(String.Affix, String.Single),
+ combined('stringescape', 'tsqs')),
+ ('([uU]?)(")', bygroups(String.Affix, String.Double),
+ combined('stringescape', 'dqs')),
+ ("([uU]?)(')", bygroups(String.Affix, String.Single),
+ combined('stringescape', 'sqs')),
+ # non-raw bytes
+ ('([bB])(""")', bygroups(String.Affix, String.Double),
+ combined('bytesescape', 'tdqs')),
+ ("([bB])(''')", bygroups(String.Affix, String.Single),
+ combined('bytesescape', 'tsqs')),
+ ('([bB])(")', bygroups(String.Affix, String.Double),
+ combined('bytesescape', 'dqs')),
+ ("([bB])(')", bygroups(String.Affix, String.Single),
+ combined('bytesescape', 'sqs')),
+
+ (r'[^\S\n]+', Text),
+ include('numbers'),
+ (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ include('expr-keywords'),
+ include('builtins'),
+ include('magicfuncs'),
+ include('magicvars'),
+ include('name'),
+ ],
+ 'expr-inside-fstring': [
+ (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
+ # without format specifier
+ (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
+ r'(\![sraf])?' # conversion
+ r'\}', String.Interpol, '#pop'),
+ # with format specifier
+ # we'll catch the remaining '}' in the outer scope
+ (r'(=\s*)?' # debug (https://bugs.python.org/issue36817)
+ r'(\![sraf])?' # conversion
+ r':', String.Interpol, '#pop'),
+ (r'\s+', Whitespace), # allow new lines
+ include('expr'),
+ ],
+ 'expr-inside-fstring-inner': [
+ (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
+ (r'[])}]', Punctuation, '#pop'),
+ (r'\s+', Whitespace), # allow new lines
+ include('expr'),
+ ],
+ 'expr-keywords': [
+ # Based on https://docs.python.org/3/reference/expressions.html
+ (words((
+ 'async for', 'await', 'else', 'for', 'if', 'lambda',
+ 'yield', 'yield from'), suffix=r'\b'),
+ Keyword),
+ (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
+ 'else', 'except', 'finally', 'for', 'global', 'if', 'lambda',
+ 'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield',
+ 'yield from', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
+ ],
+ 'soft-keywords': [
+ # `match`, `case` and `_` soft keywords
+ (r'(^[ \t]*)' # at beginning of line + possible indentation
+ r'(match|case)\b' # a possible keyword
+ r'(?![ \t]*(?:' # not followed by...
+ r'[:,;=^&|@~)\]}]|(?:' + # characters and keywords that mean this isn't
+ # pattern matching (but None/True/False is ok)
+ r'|'.join(k for k in keyword.kwlist if k[0].islower()) + r')\b))',
+ bygroups(Text, Keyword), 'soft-keywords-inner'),
+ ],
+ 'soft-keywords-inner': [
+ # optional `_` keyword
+ (r'(\s+)([^\n_]*)(_\b)', bygroups(Whitespace, using(this), Keyword)),
+ default('#pop')
+ ],
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'aiter', 'all', 'any', 'bin', 'bool', 'bytearray',
+ 'breakpoint', 'bytes', 'callable', 'chr', 'classmethod', 'compile',
+ 'complex', 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval',
+ 'filter', 'float', 'format', 'frozenset', 'getattr', 'globals',
+ 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'isinstance',
+ 'issubclass', 'iter', 'len', 'list', 'locals', 'map', 'max',
+ 'memoryview', 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow',
+ 'print', 'property', 'range', 'repr', 'reversed', 'round', 'set',
+ 'setattr', 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super',
+ 'tuple', 'type', 'vars', 'zip'), prefix=r'(?>|[-~+/*%=<>&^|.]', Operator),
+ include('keywords'),
+ (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'funcname'),
+ (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'classname'),
+ (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
+ 'fromimport'),
+ (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
+ 'import'),
+ include('builtins'),
+ include('magicfuncs'),
+ include('magicvars'),
+ include('backtick'),
+ ('([rR]|[uUbB][rR]|[rR][uUbB])(""")',
+ bygroups(String.Affix, String.Double), 'tdqs'),
+ ("([rR]|[uUbB][rR]|[rR][uUbB])(''')",
+ bygroups(String.Affix, String.Single), 'tsqs'),
+ ('([rR]|[uUbB][rR]|[rR][uUbB])(")',
+ bygroups(String.Affix, String.Double), 'dqs'),
+ ("([rR]|[uUbB][rR]|[rR][uUbB])(')",
+ bygroups(String.Affix, String.Single), 'sqs'),
+ ('([uUbB]?)(""")', bygroups(String.Affix, String.Double),
+ combined('stringescape', 'tdqs')),
+ ("([uUbB]?)(''')", bygroups(String.Affix, String.Single),
+ combined('stringescape', 'tsqs')),
+ ('([uUbB]?)(")', bygroups(String.Affix, String.Double),
+ combined('stringescape', 'dqs')),
+ ("([uUbB]?)(')", bygroups(String.Affix, String.Single),
+ combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
+ 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
+ 'print', 'raise', 'return', 'try', 'while', 'yield',
+ 'yield from', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ ],
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
+ 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
+ 'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',
+ 'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',
+ 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
+ 'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',
+ 'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object',
+ 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',
+ 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',
+ 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',
+ 'unichr', 'unicode', 'vars', 'xrange', 'zip'),
+ prefix=r'(?>> )(.*\n)', bygroups(Generic.Prompt, Other.Code), 'continuations'),
+ # This happens, e.g., when tracebacks are embedded in documentation;
+ # trailing whitespaces are often stripped in such contexts.
+ (r'(>>>)(\n)', bygroups(Generic.Prompt, Whitespace)),
+ (r'(\^C)?Traceback \(most recent call last\):\n', Other.Traceback, 'traceback'),
+ # SyntaxError starts with this
+ (r' File "[^"]+", line \d+', Other.Traceback, 'traceback'),
+ (r'.*\n', Generic.Output),
+ ],
+ 'continuations': [
+ (r'(\.\.\. )(.*\n)', bygroups(Generic.Prompt, Other.Code)),
+ # See above.
+ (r'(\.\.\.)(\n)', bygroups(Generic.Prompt, Whitespace)),
+ default('#pop'),
+ ],
+ 'traceback': [
+ # As soon as we see a traceback, consume everything until the next
+ # >>> prompt.
+ (r'(?=>>>( |$))', Text, '#pop'),
+ (r'(KeyboardInterrupt)(\n)', bygroups(Name.Class, Whitespace)),
+ (r'.*\n', Other.Traceback),
+ ],
+ }
+
+
+class PythonConsoleLexer(DelegatingLexer):
+ """
+ For Python console output or doctests, such as:
+
+ .. sourcecode:: pycon
+
+ >>> a = 'foo'
+ >>> print(a)
+ foo
+ >>> 1 / 0
+ Traceback (most recent call last):
+ File "", line 1, in
+ ZeroDivisionError: integer division or modulo by zero
+
+ Additional options:
+
+ `python3`
+ Use Python 3 lexer for code. Default is ``True``.
+
+ .. versionadded:: 1.0
+ .. versionchanged:: 2.5
+ Now defaults to ``True``.
+ """
+
+ name = 'Python console session'
+ aliases = ['pycon', 'python-console']
+ mimetypes = ['text/x-python-doctest']
+ url = 'https://python.org'
+ version_added = ''
+
+ def __init__(self, **options):
+ python3 = get_bool_opt(options, 'python3', True)
+ if python3:
+ pylexer = PythonLexer
+ tblexer = PythonTracebackLexer
+ else:
+ pylexer = Python2Lexer
+ tblexer = Python2TracebackLexer
+ # We have two auxiliary lexers. Use DelegatingLexer twice with
+ # different tokens. TODO: DelegatingLexer should support this
+ # directly, by accepting a tuplet of auxiliary lexers and a tuple of
+ # distinguishing tokens. Then we wouldn't need this intermediary
+ # class.
+ class _ReplaceInnerCode(DelegatingLexer):
+ def __init__(self, **options):
+ super().__init__(pylexer, _PythonConsoleLexerBase, Other.Code, **options)
+ super().__init__(tblexer, _ReplaceInnerCode, Other.Traceback, **options)
+
+
+class PythonTracebackLexer(RegexLexer):
+ """
+ For Python 3.x tracebacks, with support for chained exceptions.
+
+ .. versionchanged:: 2.5
+ This is now the default ``PythonTracebackLexer``. It is still available
+ as the alias ``Python3TracebackLexer``.
+ """
+
+ name = 'Python Traceback'
+ aliases = ['pytb', 'py3tb']
+ filenames = ['*.pytb', '*.py3tb']
+ mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
+ url = 'https://python.org'
+ version_added = '1.0'
+
+ tokens = {
+ 'root': [
+ (r'\n', Whitespace),
+ (r'^(\^C)?Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
+ (r'^During handling of the above exception, another '
+ r'exception occurred:\n\n', Generic.Traceback),
+ (r'^The above exception was the direct cause of the '
+ r'following exception:\n\n', Generic.Traceback),
+ (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+ (r'^.*\n', Other),
+ ],
+ 'intb': [
+ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)),
+ (r'^( File )("[^"]+")(, line )(\d+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Whitespace)),
+ (r'^( )(.+)(\n)',
+ bygroups(Whitespace, using(PythonLexer), Whitespace), 'markers'),
+ (r'^([ \t]*)(\.\.\.)(\n)',
+ bygroups(Whitespace, Comment, Whitespace)), # for doctests...
+ (r'^([^:]+)(: )(.+)(\n)',
+ bygroups(Generic.Error, Text, Name, Whitespace), '#pop'),
+ (r'^([a-zA-Z_][\w.]*)(:?\n)',
+ bygroups(Generic.Error, Whitespace), '#pop'),
+ default('#pop'),
+ ],
+ 'markers': [
+ # Either `PEP 657 `
+ # error locations in Python 3.11+, or single-caret markers
+ # for syntax errors before that.
+ (r'^( {4,})([~^]+)(\n)',
+ bygroups(Whitespace, Punctuation.Marker, Whitespace),
+ '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+Python3TracebackLexer = PythonTracebackLexer
+
+
+class Python2TracebackLexer(RegexLexer):
+ """
+ For Python tracebacks.
+
+ .. versionchanged:: 2.5
+ This class has been renamed from ``PythonTracebackLexer``.
+ ``PythonTracebackLexer`` now refers to the Python 3 variant.
+ """
+
+ name = 'Python 2.x Traceback'
+ aliases = ['py2tb']
+ filenames = ['*.py2tb']
+ mimetypes = ['text/x-python2-traceback']
+ url = 'https://python.org'
+ version_added = '0.7'
+
+ tokens = {
+ 'root': [
+ # Cover both (most recent call last) and (innermost last)
+ # The optional ^C allows us to catch keyboard interrupt signals.
+ (r'^(\^C)?(Traceback.*\n)',
+ bygroups(Text, Generic.Traceback), 'intb'),
+ # SyntaxError starts with this.
+ (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+ (r'^.*\n', Other),
+ ],
+ 'intb': [
+ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)),
+ (r'^( File )("[^"]+")(, line )(\d+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Whitespace)),
+ (r'^( )(.+)(\n)',
+ bygroups(Text, using(Python2Lexer), Whitespace), 'marker'),
+ (r'^([ \t]*)(\.\.\.)(\n)',
+ bygroups(Text, Comment, Whitespace)), # for doctests...
+ (r'^([^:]+)(: )(.+)(\n)',
+ bygroups(Generic.Error, Text, Name, Whitespace), '#pop'),
+ (r'^([a-zA-Z_]\w*)(:?\n)',
+ bygroups(Generic.Error, Whitespace), '#pop')
+ ],
+ 'marker': [
+ # For syntax errors.
+ (r'( {4,})(\^)', bygroups(Text, Punctuation.Marker), '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class CythonLexer(RegexLexer):
+ """
+ For Pyrex and Cython source code.
+ """
+
+ name = 'Cython'
+ url = 'https://cython.org'
+ aliases = ['cython', 'pyx', 'pyrex']
+ filenames = ['*.pyx', '*.pxd', '*.pxi']
+ mimetypes = ['text/x-cython', 'application/x-cython']
+ version_added = '1.1'
+
+ tokens = {
+ 'root': [
+ (r'\n', Whitespace),
+ (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Whitespace, String.Doc)),
+ (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Whitespace, String.Doc)),
+ (r'[^\S\n]+', Text),
+ (r'#.*$', Comment),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\\\n', Whitespace),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'(<)([a-zA-Z0-9.?]+)(>)',
+ bygroups(Punctuation, Keyword.Type, Punctuation)),
+ (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
+ (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
+ bygroups(Keyword, Number.Integer, Operator, Whitespace, Operator,
+ Name, Punctuation)),
+ include('keywords'),
+ (r'(def|property)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
+ (r'(cp?def)(\s+)', bygroups(Keyword, Whitespace), 'cdef'),
+ # (should actually start a block with only cdefs)
+ (r'(cdef)(:)', bygroups(Keyword, Punctuation)),
+ (r'(class|struct)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
+ (r'(from)(\s+)', bygroups(Keyword, Whitespace), 'fromimport'),
+ (r'(c?import)(\s+)', bygroups(Keyword, Whitespace), 'import'),
+ include('builtins'),
+ include('backtick'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
+ ('[uU]?"""', String, combined('stringescape', 'tdqs')),
+ ("[uU]?'''", String, combined('stringescape', 'tsqs')),
+ ('[uU]?"', String, combined('stringescape', 'dqs')),
+ ("[uU]?'", String, combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'async', 'await', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
+ 'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
+ 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
+ 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
+ ],
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bint',
+ 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
+ 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
+ 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
+ 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
+ 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
+ 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
+ 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'Py_ssize_t',
+ 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
+ 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
+ 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned',
+ 'vars', 'xrange', 'zip'), prefix=r'(??/\\:']?:)(\s*)(\{)",
+ bygroups(Name.Function, Whitespace, Operator, Whitespace, Punctuation),
+ "functions"),
+ # Variable Names
+ (r"([.]?[a-zA-Z][\w.]*)(\s*)([-.~=!@#$%^&*_+|,<>?/\\:']?:)",
+ bygroups(Name.Variable, Whitespace, Operator)),
+ # Functions
+ (r"\{", Punctuation, "functions"),
+ # Parentheses
+ (r"\(", Punctuation, "parentheses"),
+ # Brackets
+ (r"\[", Punctuation, "brackets"),
+ # Errors
+ (r"'`([a-zA-Z][\w.]*)?", Name.Exception),
+ # File Symbols
+ (r"`:([a-zA-Z/][\w./]*)?", String.Symbol),
+ # Symbols
+ (r"`([a-zA-Z][\w.]*)?", String.Symbol),
+ # Numbers
+ include("numbers"),
+ # Variable Names
+ (r"[a-zA-Z][\w.]*", Name),
+ # Operators
+ (r"[-=+*#$%@!~^&:.,<>'\\|/?_]", Operator),
+ # Punctuation
+ (r";", Punctuation),
+ ],
+ "functions": [
+ include("root"),
+ (r"\}", Punctuation, "#pop"),
+ ],
+ "parentheses": [
+ include("root"),
+ (r"\)", Punctuation, "#pop"),
+ ],
+ "brackets": [
+ include("root"),
+ (r"\]", Punctuation, "#pop"),
+ ],
+ "numbers": [
+ # Binary Values
+ (r"[01]+b", Number.Bin),
+ # Nulls/Infinities
+ (r"0[nNwW][cefghijmndzuvtp]?", Number),
+ # Timestamps
+ ((r"(?:[0-9]{4}[.][0-9]{2}[.][0-9]{2}|[0-9]+)"
+ "D(?:[0-9](?:[0-9](?::[0-9]{2}"
+ "(?::[0-9]{2}(?:[.][0-9]*)?)?)?)?)?"), Literal.Date),
+ # Datetimes
+ ((r"[0-9]{4}[.][0-9]{2}"
+ "(?:m|[.][0-9]{2}(?:T(?:[0-9]{2}:[0-9]{2}"
+ "(?::[0-9]{2}(?:[.][0-9]*)?)?)?)?)"), Literal.Date),
+ # Times
+ (r"[0-9]{2}:[0-9]{2}(?::[0-9]{2}(?:[.][0-9]{1,3})?)?",
+ Literal.Date),
+ # GUIDs
+ (r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}",
+ Number.Hex),
+ # Byte Vectors
+ (r"0x[0-9a-fA-F]+", Number.Hex),
+ # Floats
+ (r"([0-9]*[.]?[0-9]+|[0-9]+[.]?[0-9]*)[eE][+-]?[0-9]+[ef]?",
+ Number.Float),
+ (r"([0-9]*[.][0-9]+|[0-9]+[.][0-9]*)[ef]?", Number.Float),
+ (r"[0-9]+[ef]", Number.Float),
+ # Characters
+ (r"[0-9]+c", Number),
+ # Integers
+ (r"[0-9]+[ihtuv]", Number.Integer),
+ # Long Integers
+ (r"[0-9]+[jnp]?", Number.Integer.Long),
+ ],
+ "comments": [
+ (r"[^\\]+", Comment.Multiline),
+ (r"^\\", Comment.Multiline, "#pop"),
+ (r"\\", Comment.Multiline),
+ ],
+ "strings": [
+ (r'[^"\\]+', String.Double),
+ (r"\\.", String.Escape),
+ (r'"', String.Double, "#pop"),
+ ],
+ }
+
+
+class QLexer(KLexer):
+ """
+ For `Q `_ source code.
+ """
+
+ name = "Q"
+ aliases = ["q"]
+ filenames = ["*.q"]
+ version_added = '2.12'
+
+ tokens = {
+ "root": [
+ (words(("aj", "aj0", "ajf", "ajf0", "all", "and", "any", "asc",
+ "asof", "attr", "avgs", "ceiling", "cols", "count", "cross",
+ "csv", "cut", "deltas", "desc", "differ", "distinct", "dsave",
+ "each", "ej", "ema", "eval", "except", "fby", "fills", "first",
+ "fkeys", "flip", "floor", "get", "group", "gtime", "hclose",
+ "hcount", "hdel", "hsym", "iasc", "idesc", "ij", "ijf",
+ "inter", "inv", "key", "keys", "lj", "ljf", "load", "lower",
+ "lsq", "ltime", "ltrim", "mavg", "maxs", "mcount", "md5",
+ "mdev", "med", "meta", "mins", "mmax", "mmin", "mmu", "mod",
+ "msum", "neg", "next", "not", "null", "or", "over", "parse",
+ "peach", "pj", "prds", "prior", "prev", "rand", "rank", "ratios",
+ "raze", "read0", "read1", "reciprocal", "reval", "reverse",
+ "rload", "rotate", "rsave", "rtrim", "save", "scan", "scov",
+ "sdev", "set", "show", "signum", "ssr", "string", "sublist",
+ "sums", "sv", "svar", "system", "tables", "til", "trim", "txf",
+ "type", "uj", "ujf", "ungroup", "union", "upper", "upsert",
+ "value", "view", "views", "vs", "where", "wj", "wj1", "ww",
+ "xasc", "xbar", "xcol", "xcols", "xdesc", "xgroup", "xkey",
+ "xlog", "xprev", "xrank"),
+ suffix=r"\b"), Name.Builtin,
+ ),
+ inherit,
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/qlik.py b/venv/lib/python3.10/site-packages/pygments/lexers/qlik.py
new file mode 100644
index 0000000000000000000000000000000000000000..a29f89f35a9b1f4e8be514da1259e42feb2a9642
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/qlik.py
@@ -0,0 +1,117 @@
+"""
+ pygments.lexers.qlik
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the qlik scripting language
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, words
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+ Punctuation, String, Text
+from pygments.lexers._qlik_builtins import OPERATORS_LIST, STATEMENT_LIST, \
+ SCRIPT_FUNCTIONS, CONSTANT_LIST
+
+__all__ = ["QlikLexer"]
+
+
+class QlikLexer(RegexLexer):
+ """
+ Lexer for qlik code, including .qvs files
+ """
+
+ name = "Qlik"
+ aliases = ["qlik", "qlikview", "qliksense", "qlikscript"]
+ filenames = ["*.qvs", "*.qvw"]
+ url = "https://qlik.com"
+ version_added = '2.12'
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ # Handle multi-line comments
+ "comment": [
+ (r"\*/", Comment.Multiline, "#pop"),
+ (r"[^*]+", Comment.Multiline),
+ ],
+ # Handle numbers
+ "numerics": [
+ (r"\b\d+\.\d+(e\d+)?[fd]?\b", Number.Float),
+ (r"\b\d+\b", Number.Integer),
+ ],
+ # Handle variable names in things
+ "interp": [
+ (
+ r"(\$\()(\w+)(\))",
+ bygroups(String.Interpol, Name.Variable, String.Interpol),
+ ),
+ ],
+ # Handle strings
+ "string": [
+ (r"'", String, "#pop"),
+ include("interp"),
+ (r"[^'$]+", String),
+ (r"\$", String),
+ ],
+ #
+ "assignment": [
+ (r";", Punctuation, "#pop"),
+ include("root"),
+ ],
+ "field_name_quote": [
+ (r'"', String.Symbol, "#pop"),
+ include("interp"),
+ (r"[^\"$]+", String.Symbol),
+ (r"\$", String.Symbol),
+ ],
+ "field_name_bracket": [
+ (r"\]", String.Symbol, "#pop"),
+ include("interp"),
+ (r"[^\]$]+", String.Symbol),
+ (r"\$", String.Symbol),
+ ],
+ "function": [(r"\)", Punctuation, "#pop"), include("root")],
+ "root": [
+ # Whitespace and comments
+ (r"\s+", Text.Whitespace),
+ (r"/\*", Comment.Multiline, "comment"),
+ (r"//.*\n", Comment.Single),
+ # variable assignment
+ (r"(let|set)(\s+)", bygroups(Keyword.Declaration, Text.Whitespace),
+ "assignment"),
+ # Word operators
+ (words(OPERATORS_LIST["words"], prefix=r"\b", suffix=r"\b"),
+ Operator.Word),
+ # Statements
+ (words(STATEMENT_LIST, suffix=r"\b"), Keyword),
+ # Table names
+ (r"[a-z]\w*:", Keyword.Declaration),
+ # Constants
+ (words(CONSTANT_LIST, suffix=r"\b"), Keyword.Constant),
+ # Functions
+ (words(SCRIPT_FUNCTIONS, suffix=r"(?=\s*\()"), Name.Builtin,
+ "function"),
+ # interpolation - e.g. $(variableName)
+ include("interp"),
+ # Quotes denote a field/file name
+ (r'"', String.Symbol, "field_name_quote"),
+ # Square brackets denote a field/file name
+ (r"\[", String.Symbol, "field_name_bracket"),
+ # Strings
+ (r"'", String, "string"),
+ # Numbers
+ include("numerics"),
+ # Operator symbols
+ (words(OPERATORS_LIST["symbols"]), Operator),
+ # Strings denoted by single quotes
+ (r"'.+?'", String),
+ # Words as text
+ (r"\b\w+\b", Text),
+ # Basic punctuation
+ (r"[,;.()\\/]", Punctuation),
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/qvt.py b/venv/lib/python3.10/site-packages/pygments/lexers/qvt.py
new file mode 100644
index 0000000000000000000000000000000000000000..302d1b6ed894ef6a928fd09ba7b679791d4ddaa7
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/qvt.py
@@ -0,0 +1,153 @@
+"""
+ pygments.lexers.qvt
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for QVT Operational language.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include, combined, default, \
+ words
+from pygments.token import Text, Comment, Operator, Keyword, Punctuation, \
+ Name, String, Number
+
+__all__ = ['QVToLexer']
+
+
+class QVToLexer(RegexLexer):
+ """
+ For the QVT Operational Mapping language.
+
+ Reference for implementing this: «Meta Object Facility (MOF) 2.0
+ Query/View/Transformation Specification», Version 1.1 - January 2011
+ (https://www.omg.org/spec/QVT/1.1/), see §8.4, «Concrete Syntax» in
+ particular.
+
+ Notable tokens assignments:
+
+ - Name.Class is assigned to the identifier following any of the following
+ keywords: metamodel, class, exception, primitive, enum, transformation
+ or library
+
+ - Name.Function is assigned to the names of mappings and queries
+
+ - Name.Builtin.Pseudo is assigned to the pre-defined variables 'this',
+ 'self' and 'result'.
+ """
+ # With obvious borrowings & inspiration from the Java, Python and C lexers
+
+ name = 'QVTO'
+ aliases = ['qvto', 'qvt']
+ filenames = ['*.qvto']
+ url = 'https://www.omg.org/spec/QVT/1.1'
+ version_added = ''
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'(--|//)(\s*)(directive:)?(.*)$',
+ bygroups(Comment, Comment, Comment.Preproc, Comment)),
+ # Uncomment the following if you want to distinguish between
+ # '/*' and '/**', à la javadoc
+ # (r'/[*]{2}(.|\n)*?[*]/', Comment.Multiline),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'\\\n', Text),
+ (r'(and|not|or|xor|##?)\b', Operator.Word),
+ (r'(:{1,2}=|[-+]=)\b', Operator.Word),
+ (r'(@|<<|>>)\b', Keyword), # stereotypes
+ (r'!=|<>|==|=|!->|->|>=|<=|[.]{3}|[+/*%=<>&|.~]', Operator),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'(true|false|unlimited|null)\b', Keyword.Constant),
+ (r'(this|self|result)\b', Name.Builtin.Pseudo),
+ (r'(var)\b', Keyword.Declaration),
+ (r'(from|import)\b', Keyword.Namespace, 'fromimport'),
+ (r'(metamodel|class|exception|primitive|enum|transformation|'
+ r'library)(\s+)(\w+)',
+ bygroups(Keyword.Word, Text, Name.Class)),
+ (r'(exception)(\s+)(\w+)',
+ bygroups(Keyword.Word, Text, Name.Exception)),
+ (r'(main)\b', Name.Function),
+ (r'(mapping|helper|query)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'operation'),
+ (r'(assert)(\s+)\b', bygroups(Keyword, Text), 'assert'),
+ (r'(Bag|Collection|Dict|OrderedSet|Sequence|Set|Tuple|List)\b',
+ Keyword.Type),
+ include('keywords'),
+ ('"', String, combined('stringescape', 'dqs')),
+ ("'", String, combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
+ # (r'([a-zA-Z_]\w*)(::)([a-zA-Z_]\w*)',
+ # bygroups(Text, Text, Text)),
+ ],
+
+ 'fromimport': [
+ (r'(?:[ \t]|\\\n)+', Text),
+ (r'[a-zA-Z_][\w.]*', Name.Namespace),
+ default('#pop'),
+ ],
+
+ 'operation': [
+ (r'::', Text),
+ (r'(.*::)([a-zA-Z_]\w*)([ \t]*)(\()',
+ bygroups(Text, Name.Function, Text, Punctuation), '#pop')
+ ],
+
+ 'assert': [
+ (r'(warning|error|fatal)\b', Keyword, '#pop'),
+ default('#pop'), # all else: go back
+ ],
+
+ 'keywords': [
+ (words((
+ 'abstract', 'access', 'any', 'assert', 'blackbox', 'break',
+ 'case', 'collect', 'collectNested', 'collectOne', 'collectselect',
+ 'collectselectOne', 'composes', 'compute', 'configuration',
+ 'constructor', 'continue', 'datatype', 'default', 'derived',
+ 'disjuncts', 'do', 'elif', 'else', 'end', 'endif', 'except',
+ 'exists', 'extends', 'forAll', 'forEach', 'forOne', 'from', 'if',
+ 'implies', 'in', 'inherits', 'init', 'inout', 'intermediate',
+ 'invresolve', 'invresolveIn', 'invresolveone', 'invresolveoneIn',
+ 'isUnique', 'iterate', 'late', 'let', 'literal', 'log', 'map',
+ 'merges', 'modeltype', 'new', 'object', 'one', 'ordered', 'out',
+ 'package', 'population', 'property', 'raise', 'readonly',
+ 'references', 'refines', 'reject', 'resolve', 'resolveIn',
+ 'resolveone', 'resolveoneIn', 'return', 'select', 'selectOne',
+ 'sortedBy', 'static', 'switch', 'tag', 'then', 'try', 'typedef',
+ 'unlimited', 'uses', 'when', 'where', 'while', 'with', 'xcollect',
+ 'xmap', 'xselect'), suffix=r'\b'), Keyword),
+ ],
+
+ # There is no need to distinguish between String.Single and
+ # String.Double: 'strings' is factorised for 'dqs' and 'sqs'
+ 'strings': [
+ (r'[^\\\'"\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ ],
+ 'stringescape': [
+ (r'\\([\\btnfr"\']|u[0-3][0-7]{2}|u[0-7]{1,2})', String.Escape)
+ ],
+ 'dqs': [ # double-quoted string
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"', String.Escape),
+ include('strings')
+ ],
+ 'sqs': [ # single-quoted string
+ (r"'", String, '#pop'),
+ (r"\\\\|\\'", String.Escape),
+ include('strings')
+ ],
+ 'name': [
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ # numbers: excerpt taken from the python lexer
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/r.py b/venv/lib/python3.10/site-packages/pygments/lexers/r.py
new file mode 100644
index 0000000000000000000000000000000000000000..d3f65ba2d8644a6faa4f50526d2e31b08ed4d339
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/r.py
@@ -0,0 +1,196 @@
+"""
+ pygments.lexers.r
+ ~~~~~~~~~~~~~~~~~
+
+ Lexers for the R/S languages.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, do_insertions
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Whitespace
+
+__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class RConsoleLexer(Lexer):
+ """
+ For R console transcripts or R CMD BATCH output files.
+ """
+
+ name = 'RConsole'
+ aliases = ['rconsole', 'rout']
+ filenames = ['*.Rout']
+ url = 'https://www.r-project.org'
+ version_added = ''
+ _example = "rconsole/r-console-transcript.Rout"
+
+ def get_tokens_unprocessed(self, text):
+ slexer = SLexer(**self.options)
+
+ current_code_block = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith('>') or line.startswith('+'):
+ # Colorize the prompt as such,
+ # then put rest of line into current_code_block
+ insertions.append((len(current_code_block),
+ [(0, Generic.Prompt, line[:2])]))
+ current_code_block += line[2:]
+ else:
+ # We have reached a non-prompt line!
+ # If we have stored prompt lines, need to process them first.
+ if current_code_block:
+ # Weave together the prompts and highlight code.
+ yield from do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block))
+ # Reset vars for next code block.
+ current_code_block = ''
+ insertions = []
+ # Now process the actual line itself, this is output from R.
+ yield match.start(), Generic.Output, line
+
+ # If we happen to end on a code block with nothing after it, need to
+ # process the last code block. This is neither elegant nor DRY so
+ # should be changed.
+ if current_code_block:
+ yield from do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block))
+
+
+class SLexer(RegexLexer):
+ """
+ For S, S-plus, and R source code.
+ """
+
+ name = 'S'
+ aliases = ['splus', 's', 'r']
+ filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
+ mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
+ 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
+ url = 'https://www.r-project.org'
+ version_added = '0.10'
+
+ valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w.]*|\.'
+ tokens = {
+ 'comments': [
+ (r'#.*$', Comment.Single),
+ ],
+ 'valid_name': [
+ (valid_name, Name),
+ ],
+ 'function_name': [
+ (rf'({valid_name})\s*(?=\()', Name.Function),
+ ],
+ 'punctuation': [
+ (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
+ ],
+ 'keywords': [
+ (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
+ r'(?![\w.])',
+ Keyword.Reserved),
+ ],
+ 'operators': [
+ (r'<-|->>?|-|==|<=|>=|\|>|<|>|&&?|!=|\|\|?|\?', Operator),
+ (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator),
+ ],
+ 'builtin_symbols': [
+ (r'(NULL|NA(_(integer|real|complex|character)_)?|'
+ r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
+ r'(?![\w.])',
+ Keyword.Constant),
+ (r'(T|F)\b', Name.Builtin.Pseudo),
+ ],
+ 'numbers': [
+ # hex number
+ (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
+ # decimal number
+ (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
+ Number),
+ ],
+ 'statements': [
+ include('comments'),
+ # whitespaces
+ (r'\s+', Whitespace),
+ (r'\'', String, 'string_squote'),
+ (r'\"', String, 'string_dquote'),
+ include('builtin_symbols'),
+ include('keywords'),
+ include('function_name'),
+ include('valid_name'),
+ include('numbers'),
+ include('punctuation'),
+ include('operators'),
+ ],
+ 'root': [
+ # calls:
+ include('statements'),
+ # blocks:
+ (r'\{|\}', Punctuation),
+ # (r'\{', Punctuation, 'block'),
+ (r'.', Text),
+ ],
+ # 'block': [
+ # include('statements'),
+ # ('\{', Punctuation, '#push'),
+ # ('\}', Punctuation, '#pop')
+ # ],
+ 'string_squote': [
+ (r'([^\'\\]|\\.)*\'', String, '#pop'),
+ ],
+ 'string_dquote': [
+ (r'([^"\\]|\\.)*"', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
+ return 0.11
+
+
+class RdLexer(RegexLexer):
+ """
+ Pygments Lexer for R documentation (Rd) files
+
+ This is a very minimal implementation, highlighting little more
+ than the macros. A description of Rd syntax is found in `Writing R
+ Extensions `_
+ and `Parsing Rd files `_.
+ """
+ name = 'Rd'
+ aliases = ['rd']
+ filenames = ['*.Rd']
+ mimetypes = ['text/x-r-doc']
+ url = 'http://cran.r-project.org/doc/manuals/R-exts.html'
+ version_added = '1.6'
+
+ # To account for verbatim / LaTeX-like / and R-like areas
+ # would require parsing.
+ tokens = {
+ 'root': [
+ # catch escaped brackets and percent sign
+ (r'\\[\\{}%]', String.Escape),
+ # comments
+ (r'%.*$', Comment),
+ # special macros with no arguments
+ (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
+ # macros
+ (r'\\[a-zA-Z]+\b', Keyword),
+ # special preprocessor macros
+ (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
+ # non-escaped brackets
+ (r'[{}]', Name.Builtin),
+ # everything else
+ (r'[^\\%\n{}]+', Text),
+ (r'.', Text),
+ ]
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/rdf.py b/venv/lib/python3.10/site-packages/pygments/lexers/rdf.py
new file mode 100644
index 0000000000000000000000000000000000000000..4930c1b387a6f098074fc15e26a1f7a62f3554c1
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/rdf.py
@@ -0,0 +1,468 @@
+"""
+ pygments.lexers.rdf
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for semantic web and RDF query languages and markup.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default
+from pygments.token import Keyword, Punctuation, String, Number, Operator, \
+ Generic, Whitespace, Name, Literal, Comment, Text
+
+__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
+
+
+class SparqlLexer(RegexLexer):
+ """
+ Lexer for SPARQL query language.
+ """
+ name = 'SPARQL'
+ aliases = ['sparql']
+ filenames = ['*.rq', '*.sparql']
+ mimetypes = ['application/sparql-query']
+ url = 'https://www.w3.org/TR/sparql11-query'
+ version_added = '2.0'
+
+ # character group definitions ::
+
+ PN_CHARS_BASE_GRP = ('a-zA-Z'
+ '\u00c0-\u00d6'
+ '\u00d8-\u00f6'
+ '\u00f8-\u02ff'
+ '\u0370-\u037d'
+ '\u037f-\u1fff'
+ '\u200c-\u200d'
+ '\u2070-\u218f'
+ '\u2c00-\u2fef'
+ '\u3001-\ud7ff'
+ '\uf900-\ufdcf'
+ '\ufdf0-\ufffd')
+
+ PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
+
+ PN_CHARS_GRP = (PN_CHARS_U_GRP +
+ r'\-' +
+ r'0-9' +
+ '\u00b7' +
+ '\u0300-\u036f' +
+ '\u203f-\u2040')
+
+ HEX_GRP = '0-9A-Fa-f'
+
+ PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
+
+ # terminal productions ::
+
+ PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
+
+ PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
+
+ PN_CHARS = '[' + PN_CHARS_GRP + ']'
+
+ HEX = '[' + HEX_GRP + ']'
+
+ PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
+
+ IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
+
+ BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
+ '.]*' + PN_CHARS + ')?'
+
+ PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
+
+ VARNAME = '[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
+ '0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
+
+ PERCENT = '%' + HEX + HEX
+
+ PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+ PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
+ PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
+ '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
+ PN_CHARS_GRP + ':]|' + PLX + '))?')
+
+ EXPONENT = r'[eE][+-]?\d+'
+
+ # Lexer token definitions ::
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # keywords ::
+ (r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|'
+ r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
+ r'offset|values|bindings|load|into|clear|drop|create|add|move|copy|'
+ r'insert\s+data|delete\s+data|delete\s+where|with|delete|insert|'
+ r'using\s+named|using|graph|default|named|all|optional|service|'
+ r'silent|bind|undef|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
+ (r'(a)\b', Keyword),
+ # IRIs ::
+ ('(' + IRIREF + ')', Name.Label),
+ # blank nodes ::
+ ('(' + BLANK_NODE_LABEL + ')', Name.Label),
+ # # variables ::
+ ('[?$]' + VARNAME, Name.Variable),
+ # prefixed names ::
+ (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
+ bygroups(Name.Namespace, Punctuation, Name.Tag)),
+ # function names ::
+ (r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
+ r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
+ r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
+ r'hours|minutes|seconds|timezone|tz|now|uuid|struuid|md5|sha1|sha256|sha384|'
+ r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
+ r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
+ r'count|sum|min|max|avg|sample|group_concat|separator)\b',
+ Name.Function),
+ # boolean literals ::
+ (r'(true|false)', Keyword.Constant),
+ # double literals ::
+ (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
+ # decimal literals ::
+ (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
+ # integer literals ::
+ (r'[+\-]?\d+', Number.Integer),
+ # operators ::
+ (r'(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)', Operator),
+ # punctuation characters ::
+ (r'[(){}.;,:^\[\]]', Punctuation),
+ # line comments ::
+ (r'#[^\n]*', Comment),
+ # strings ::
+ (r'"""', String, 'triple-double-quoted-string'),
+ (r'"', String, 'single-double-quoted-string'),
+ (r"'''", String, 'triple-single-quoted-string'),
+ (r"'", String, 'single-single-quoted-string'),
+ ],
+ 'triple-double-quoted-string': [
+ (r'"""', String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-double-quoted-string': [
+ (r'"', String, 'end-of-string'),
+ (r'[^"\\\n]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'triple-single-quoted-string': [
+ (r"'''", String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String.Escape, 'string-escape'),
+ ],
+ 'single-single-quoted-string': [
+ (r"'", String, 'end-of-string'),
+ (r"[^'\\\n]+", String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'string-escape': [
+ (r'u' + HEX + '{4}', String.Escape, '#pop'),
+ (r'U' + HEX + '{8}', String.Escape, '#pop'),
+ (r'.', String.Escape, '#pop'),
+ ],
+ 'end-of-string': [
+ (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+ bygroups(Operator, Name.Function), '#pop:2'),
+ (r'\^\^', Operator, '#pop:2'),
+ default('#pop:2'),
+ ],
+ }
+
+
+class TurtleLexer(RegexLexer):
+ """
+ Lexer for Turtle data language.
+ """
+ name = 'Turtle'
+ aliases = ['turtle']
+ filenames = ['*.ttl']
+ mimetypes = ['text/turtle', 'application/x-turtle']
+ url = 'https://www.w3.org/TR/turtle'
+ version_added = '2.1'
+
+ # character group definitions ::
+ PN_CHARS_BASE_GRP = ('a-zA-Z'
+ '\u00c0-\u00d6'
+ '\u00d8-\u00f6'
+ '\u00f8-\u02ff'
+ '\u0370-\u037d'
+ '\u037f-\u1fff'
+ '\u200c-\u200d'
+ '\u2070-\u218f'
+ '\u2c00-\u2fef'
+ '\u3001-\ud7ff'
+ '\uf900-\ufdcf'
+ '\ufdf0-\ufffd')
+
+ PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
+
+ PN_CHARS_GRP = (PN_CHARS_U_GRP +
+ r'\-' +
+ r'0-9' +
+ '\u00b7' +
+ '\u0300-\u036f' +
+ '\u203f-\u2040')
+
+ PN_CHARS = '[' + PN_CHARS_GRP + ']'
+
+ PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
+
+ PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
+
+ HEX_GRP = '0-9A-Fa-f'
+
+ HEX = '[' + HEX_GRP + ']'
+
+ PERCENT = '%' + HEX + HEX
+
+ PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
+
+ PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
+
+ PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+ PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
+ PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
+ '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
+ PN_CHARS_GRP + ':]|' + PLX + '))?')
+
+ patterns = {
+ 'PNAME_NS': r'((?:[a-zA-Z][\w-]*)?\:)', # Simplified character range
+ 'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
+ }
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+
+ # Base / prefix
+ (r'(@base|BASE)(\s+){IRIREF}(\s*)(\.?)'.format(**patterns),
+ bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
+ Punctuation)),
+ (r'(@prefix|PREFIX)(\s+){PNAME_NS}(\s+){IRIREF}(\s*)(\.?)'.format(**patterns),
+ bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
+ Name.Variable, Whitespace, Punctuation)),
+
+ # The shorthand predicate 'a'
+ (r'(?<=\s)a(?=\s)', Keyword.Type),
+
+ # IRIREF
+ (r'{IRIREF}'.format(**patterns), Name.Variable),
+
+ # PrefixedName
+ (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
+ bygroups(Name.Namespace, Punctuation, Name.Tag)),
+
+ # BlankNodeLabel
+ (r'(_)(:)([' + PN_CHARS_U_GRP + r'0-9]([' + PN_CHARS_GRP + r'.]*' + PN_CHARS + ')?)',
+ bygroups(Name.Namespace, Punctuation, Name.Tag)),
+
+ # Comment
+ (r'#[^\n]+', Comment),
+
+ (r'\b(true|false)\b', Literal),
+ (r'[+\-]?\d*\.\d+', Number.Float),
+ (r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
+ (r'[+\-]?\d+', Number.Integer),
+ (r'[\[\](){}.;,:^]', Punctuation),
+
+ (r'"""', String, 'triple-double-quoted-string'),
+ (r'"', String, 'single-double-quoted-string'),
+ (r"'''", String, 'triple-single-quoted-string'),
+ (r"'", String, 'single-single-quoted-string'),
+ ],
+ 'triple-double-quoted-string': [
+ (r'"""', String, 'end-of-string'),
+ (r'[^\\]+(?=""")', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-double-quoted-string': [
+ (r'"', String, 'end-of-string'),
+ (r'[^"\\\n]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'triple-single-quoted-string': [
+ (r"'''", String, 'end-of-string'),
+ (r"[^\\]+(?=''')", String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-single-quoted-string': [
+ (r"'", String, 'end-of-string'),
+ (r"[^'\\\n]+", String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'string-escape': [
+ (r'.', String, '#pop'),
+ ],
+ 'end-of-string': [
+ (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+ bygroups(Operator, Generic.Emph), '#pop:2'),
+
+ (r'(\^\^){IRIREF}'.format(**patterns), bygroups(Operator, Generic.Emph), '#pop:2'),
+
+ default('#pop:2'),
+
+ ],
+ }
+
+ # Turtle and Tera Term macro files share the same file extension
+ # but each has a recognizable and distinct syntax.
+ def analyse_text(text):
+ for t in ('@base ', 'BASE ', '@prefix ', 'PREFIX '):
+ if re.search(rf'^\s*{t}', text):
+ return 0.80
+
+
+class ShExCLexer(RegexLexer):
+ """
+ Lexer for ShExC shape expressions language syntax.
+ """
+ name = 'ShExC'
+ aliases = ['shexc', 'shex']
+ filenames = ['*.shex']
+ mimetypes = ['text/shex']
+ url = 'https://shex.io/shex-semantics/#shexc'
+ version_added = ''
+
+ # character group definitions ::
+
+ PN_CHARS_BASE_GRP = ('a-zA-Z'
+ '\u00c0-\u00d6'
+ '\u00d8-\u00f6'
+ '\u00f8-\u02ff'
+ '\u0370-\u037d'
+ '\u037f-\u1fff'
+ '\u200c-\u200d'
+ '\u2070-\u218f'
+ '\u2c00-\u2fef'
+ '\u3001-\ud7ff'
+ '\uf900-\ufdcf'
+ '\ufdf0-\ufffd')
+
+ PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
+
+ PN_CHARS_GRP = (PN_CHARS_U_GRP +
+ r'\-' +
+ r'0-9' +
+ '\u00b7' +
+ '\u0300-\u036f' +
+ '\u203f-\u2040')
+
+ HEX_GRP = '0-9A-Fa-f'
+
+ PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%"
+
+ # terminal productions ::
+
+ PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
+
+ PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
+
+ PN_CHARS = '[' + PN_CHARS_GRP + ']'
+
+ HEX = '[' + HEX_GRP + ']'
+
+ PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
+
+ UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})'
+
+ UCHAR = r'\\' + UCHAR_NO_BACKSLASH
+
+ IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>'
+
+ BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
+ '.]*' + PN_CHARS + ')?'
+
+ PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
+
+ PERCENT = '%' + HEX + HEX
+
+ PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+ PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
+ PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
+ '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
+ PN_CHARS_GRP + ':]|' + PLX + '))?')
+
+ EXPONENT = r'[eE][+-]?\d+'
+
+ # Lexer token definitions ::
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # keywords ::
+ (r'(?i)(base|prefix|start|external|'
+ r'literal|iri|bnode|nonliteral|length|minlength|maxlength|'
+ r'mininclusive|minexclusive|maxinclusive|maxexclusive|'
+ r'totaldigits|fractiondigits|'
+ r'closed|extra)\b', Keyword),
+ (r'(a)\b', Keyword),
+ # IRIs ::
+ ('(' + IRIREF + ')', Name.Label),
+ # blank nodes ::
+ ('(' + BLANK_NODE_LABEL + ')', Name.Label),
+ # prefixed names ::
+ (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?',
+ bygroups(Name.Namespace, Punctuation, Name.Tag)),
+ # boolean literals ::
+ (r'(true|false)', Keyword.Constant),
+ # double literals ::
+ (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
+ # decimal literals ::
+ (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
+ # integer literals ::
+ (r'[+\-]?\d+', Number.Integer),
+ # operators ::
+ (r'[@|$&=*+?^\-~]', Operator),
+ # operator keywords ::
+ (r'(?i)(and|or|not)\b', Operator.Word),
+ # punctuation characters ::
+ (r'[(){}.;,:^\[\]]', Punctuation),
+ # line comments ::
+ (r'#[^\n]*', Comment),
+ # strings ::
+ (r'"""', String, 'triple-double-quoted-string'),
+ (r'"', String, 'single-double-quoted-string'),
+ (r"'''", String, 'triple-single-quoted-string'),
+ (r"'", String, 'single-single-quoted-string'),
+ ],
+ 'triple-double-quoted-string': [
+ (r'"""', String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-double-quoted-string': [
+ (r'"', String, 'end-of-string'),
+ (r'[^"\\\n]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'triple-single-quoted-string': [
+ (r"'''", String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String.Escape, 'string-escape'),
+ ],
+ 'single-single-quoted-string': [
+ (r"'", String, 'end-of-string'),
+ (r"[^'\\\n]+", String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'string-escape': [
+ (UCHAR_NO_BACKSLASH, String.Escape, '#pop'),
+ (r'.', String.Escape, '#pop'),
+ ],
+ 'end-of-string': [
+ (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+ bygroups(Operator, Name.Function), '#pop:2'),
+ (r'\^\^', Operator, '#pop:2'),
+ default('#pop:2'),
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/rebol.py b/venv/lib/python3.10/site-packages/pygments/lexers/rebol.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b37a749450472b4adf9c6a613bc1b8ab503f5a2
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/rebol.py
@@ -0,0 +1,419 @@
+"""
+ pygments.lexers.rebol
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the REBOL and related languages.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Generic, Whitespace
+
+__all__ = ['RebolLexer', 'RedLexer']
+
+
+class RebolLexer(RegexLexer):
+ """
+ A REBOL lexer.
+ """
+ name = 'REBOL'
+ aliases = ['rebol']
+ filenames = ['*.r', '*.r3', '*.reb']
+ mimetypes = ['text/x-rebol']
+ url = 'http://www.rebol.com'
+ version_added = '1.1'
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(
+ r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
+ r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
+ r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
+ r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
+ r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
+ r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
+ r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
+ r'while|compress|decompress|secure|open|close|read|read-io|'
+ r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
+ r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
+ r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
+ r'browse|launch|stats|get-modes|set-modes|to-local-file|'
+ r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
+ r'hide|draw|show|size-text|textinfo|offset-to-caret|'
+ r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
+ r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
+ r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
+ r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
+ r'rsa-encrypt)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(
+ r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
+ r'minimum|maximum|negate|complement|absolute|random|head|tail|'
+ r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
+ r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
+ r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
+ r'copy)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(
+ r'(error|source|input|license|help|install|echo|Usage|with|func|'
+ r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
+ r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
+ r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
+ r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
+ r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
+ r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
+ r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
+ r'write-user|save-user|set-user-name|protect-system|parse-xml|'
+ r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
+ r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
+ r'request-dir|center-face|do-events|net-error|decode-url|'
+ r'parse-header|parse-header-date|parse-email-addrs|import-email|'
+ r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
+ r'find-key-face|do-face|viewtop|confine|find-window|'
+ r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
+ r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
+ r'read-thru|load-thru|do-thru|launch-thru|load-image|'
+ r'request-download|do-face-alt|set-font|set-para|get-style|'
+ r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
+ r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
+ r'resize-face|load-stock|load-stock-block|notify|request|flash|'
+ r'request-color|request-pass|request-text|request-list|'
+ r'request-date|request-file|dbug|editor|link-relative-path|'
+ r'emailer|parse-error)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(
+ r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
+ r'return|exit|break)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match('REBOL$', word):
+ yield match.start(), Generic.Heading, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
+ elif re.match(r'(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
+ word):
+ yield match.start(), Operator, word
+ elif re.match(r".*\?$", word):
+ yield match.start(), Keyword, word
+ elif re.match(r".*\!$", word):
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#\{[0-9a-f]*\}', Number.Hex),
+ (r'2#\{', Number.Hex, 'bin2'),
+ (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
+ (r'"', String, 'string'),
+ (r'\{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(^{")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-/][0-9a-z]+[\-/]\d+(\/\d+\:\d+((\:\d+)?'
+ r'([.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+X\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]()]', Generic.Strong),
+ (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
+ (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s"', Comment, 'commentString1'),
+ (r'comment\s\{', Comment, 'commentString2'),
+ (r'comment\s\[', Comment, 'commentBlock'),
+ (r'comment\s[^(\s{"\[]+', Comment),
+ (r'/[^(^{")\s/[\]]*', Name.Attribute),
+ (r'([^(^{")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[\w:.-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(^{")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(^")]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(^{})]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[(|)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([01]\s*){8}', Number.Hex),
+ (r'\}', Number.Hex, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(^")]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(^{})]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'\{', Comment, '#push'),
+ (r'\}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'"', Comment, "commentString1"),
+ (r'\{', Comment, "commentString2"),
+ (r'[^(\[\]"{)]+', Comment),
+ ],
+ }
+
+ def analyse_text(text):
+ """
+ Check if code contains REBOL header and so it probably not R code
+ """
+ if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
+ # The code starts with REBOL header
+ return 1.0
+ elif re.search(r'\s*REBOL\s*\[', text, re.IGNORECASE):
+ # The code contains REBOL header but also some text before it
+ return 0.5
+
+
+class RedLexer(RegexLexer):
+ """
+ A Red-language lexer.
+ """
+ name = 'Red'
+ aliases = ['red', 'red/system']
+ filenames = ['*.red', '*.reds']
+ mimetypes = ['text/x-red', 'text/x-red-system']
+ url = 'https://www.red-lang.org'
+ version_added = '2.0'
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(r'(if|unless|either|any|all|while|until|loop|repeat|'
+ r'foreach|forall|func|function|does|has|switch|'
+ r'case|reduce|compose|get|set|print|prin|equal\?|'
+ r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
+ r'greater-or-equal\?|same\?|not|type\?|stats|'
+ r'bind|union|replace|charset|routine)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
+ r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
+ r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
+ r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
+ r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|'
+ r'update|write)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|'
+ r'none|crlf|dot|null-byte)$', word):
+ yield match.start(), Name.Builtin.Pseudo, word
+ elif re.match(r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|'
+ r'#switch|#default|#get-definition)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|'
+ r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|'
+ r'quote|forever)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match(r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|'
+ r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|'
+ r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|'
+ r'any-struct\?|none\?|word\?|any-series\?)$', word):
+ yield match.start(), Keyword, word
+ elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
+ elif re.match(r'(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|'
+ r'<<<|>>>|<<|>>|<|>%)$', word):
+ yield match.start(), Operator, word
+ elif re.match(r".*\!$", word):
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ elif re.match(":.*", word):
+ yield match.start(), Generic.Subheading, word # get-word
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#\{[0-9a-f\s]*\}', Number.Hex),
+ (r'2#\{', Number.Hex, 'bin2'),
+ (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
+ (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}"()]))',
+ bygroups(Number.Hex, Name.Variable, Whitespace)),
+ (r'"', String, 'string'),
+ (r'\{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(^{")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-/][0-9a-z]+[\-/]\d+(/\d+:\d+((:\d+)?'
+ r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+X\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]()]', Generic.Strong),
+ (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
+ (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s"', Comment, 'commentString1'),
+ (r'comment\s\{', Comment, 'commentString2'),
+ (r'comment\s\[', Comment, 'commentBlock'),
+ (r'comment\s[^(\s{"\[]+', Comment),
+ (r'/[^(^{^")\s/[\]]*', Name.Attribute),
+ (r'([^(^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[\w:.-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(^{")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(^")]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(^{})]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[(|)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([01]\s*){8}', Number.Hex),
+ (r'\}', Number.Hex, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(^")]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(^{})]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'\{', Comment, '#push'),
+ (r'\}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'"', Comment, "commentString1"),
+ (r'\{', Comment, "commentString2"),
+ (r'[^(\[\]"{)]+', Comment),
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/rego.py b/venv/lib/python3.10/site-packages/pygments/lexers/rego.py
new file mode 100644
index 0000000000000000000000000000000000000000..6f2e3e9e669bcf30a05f6e0e8deef87d79f65bd2
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/rego.py
@@ -0,0 +1,57 @@
+"""
+ pygments.lexers.rego
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Rego policy languages.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Comment, Operator, Keyword, Name, String, Number, Punctuation, Whitespace
+
+class RegoLexer(RegexLexer):
+ """
+ For Rego source.
+ """
+ name = 'Rego'
+ url = 'https://www.openpolicyagent.org/docs/latest/policy-language/'
+ filenames = ['*.rego']
+ aliases = ['rego']
+ mimetypes = ['text/x-rego']
+ version_added = '2.19'
+
+ reserved_words = (
+ 'as', 'contains', 'data', 'default', 'else', 'every', 'false',
+ 'if', 'in', 'import', 'package', 'not', 'null',
+ 'some', 'true', 'with'
+ )
+
+ builtins = (
+ # https://www.openpolicyagent.org/docs/latest/philosophy/#the-opa-document-model
+ 'data', # Global variable for accessing base and virtual documents
+ 'input', # Represents synchronously pushed base documents
+ )
+
+ tokens = {
+ 'root': [
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ (r'#.*?$', Comment.Single),
+ (words(reserved_words, suffix=r'\b'), Keyword),
+ (words(builtins, suffix=r'\b'), Name.Builtin),
+ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r'`[^`]*`', String.Backtick),
+ (r'-?\d+(\.\d+)?', Number),
+ (r'(==|!=|<=|>=|:=)', Operator), # Compound operators
+ (r'[=<>+\-*/%&|]', Operator), # Single-character operators
+ (r'[\[\]{}(),.:;]', Punctuation),
+ ]
+ }
+
+__all__ = ['RegoLexer']
+
+
+
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/resource.py b/venv/lib/python3.10/site-packages/pygments/lexers/resource.py
new file mode 100644
index 0000000000000000000000000000000000000000..9593c2124c1388aa2b7e821fada19347ab395d54
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/resource.py
@@ -0,0 +1,83 @@
+"""
+ pygments.lexers.resource
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for resource definition files.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Comment, String, Number, Operator, Text, \
+ Keyword, Name
+
+__all__ = ['ResourceLexer']
+
+
+class ResourceLexer(RegexLexer):
+ """Lexer for ICU Resource bundles.
+ """
+ name = 'ResourceBundle'
+ aliases = ['resourcebundle', 'resource']
+ filenames = []
+ url = 'https://unicode-org.github.io/icu/userguide/locale/resources.html'
+ version_added = '2.0'
+
+ _types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
+ ':int', ':alias')
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment),
+ (r'"', String, 'string'),
+ (r'-?\d+', Number.Integer),
+ (r'[,{}]', Operator),
+ (r'([^\s{{:]+)(\s*)({}?)'.format('|'.join(_types)),
+ bygroups(Name, Text, Keyword)),
+ (r'\s+', Text),
+ (words(_types), Keyword),
+ ],
+ 'string': [
+ (r'(\\x[0-9a-f]{2}|\\u[0-9a-f]{4}|\\U00[0-9a-f]{6}|'
+ r'\\[0-7]{1,3}|\\c.|\\[abtnvfre\'"?\\]|\\\{|[^"{\\])+', String),
+ (r'\{', String.Escape, 'msgname'),
+ (r'"', String, '#pop')
+ ],
+ 'msgname': [
+ (r'([^{},]+)(\s*)', bygroups(Name, String.Escape), ('#pop', 'message'))
+ ],
+ 'message': [
+ (r'\{', String.Escape, 'msgname'),
+ (r'\}', String.Escape, '#pop'),
+ (r'(,)(\s*)([a-z]+)(\s*\})',
+ bygroups(Operator, String.Escape, Keyword, String.Escape), '#pop'),
+ (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)(offset)(\s*)(:)(\s*)(-?\d+)(\s*)',
+ bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
+ String.Escape, Operator.Word, String.Escape, Operator,
+ String.Escape, Number.Integer, String.Escape), 'choice'),
+ (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)',
+ bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
+ String.Escape), 'choice'),
+ (r'\s+', String.Escape)
+ ],
+ 'choice': [
+ (r'(=|<|>|<=|>=|!=)(-?\d+)(\s*\{)',
+ bygroups(Operator, Number.Integer, String.Escape), 'message'),
+ (r'([a-z]+)(\s*\{)', bygroups(Keyword.Type, String.Escape), 'str'),
+ (r'\}', String.Escape, ('#pop', '#pop')),
+ (r'\s+', String.Escape)
+ ],
+ 'str': [
+ (r'\}', String.Escape, '#pop'),
+ (r'\{', String.Escape, 'msgname'),
+ (r'[^{}]+', String)
+ ]
+ }
+
+ def analyse_text(text):
+ if text.startswith('root:table'):
+ return 1.0
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/ride.py b/venv/lib/python3.10/site-packages/pygments/lexers/ride.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d60c29cbb259a7a1d70cbf0a3ec08438f1f55b2
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/ride.py
@@ -0,0 +1,138 @@
+"""
+ pygments.lexers.ride
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Ride programming language.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, include
+from pygments.token import Comment, Keyword, Name, Number, Punctuation, \
+ String, Text
+
+__all__ = ['RideLexer']
+
+
+class RideLexer(RegexLexer):
+ """
+ For Ride source code.
+ """
+
+ name = 'Ride'
+ aliases = ['ride']
+ filenames = ['*.ride']
+ mimetypes = ['text/x-ride']
+ url = 'https://docs.waves.tech/en/ride'
+ version_added = '2.6'
+
+ validName = r'[a-zA-Z_][a-zA-Z0-9_\']*'
+
+ builtinOps = (
+ '||', '|', '>=', '>', '==', '!',
+ '=', '<=', '<', '::', ':+', ':', '!=', '/',
+ '.', '=>', '-', '+', '*', '&&', '%', '++',
+ )
+
+ globalVariablesName = (
+ 'NOALG', 'MD5', 'SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512',
+ 'SHA3224', 'SHA3256', 'SHA3384', 'SHA3512', 'nil', 'this', 'unit',
+ 'height', 'lastBlock', 'Buy', 'Sell', 'CEILING', 'FLOOR', 'DOWN',
+ 'HALFDOWN', 'HALFEVEN', 'HALFUP', 'UP',
+ )
+
+ typesName = (
+ 'Unit', 'Int', 'Boolean', 'ByteVector', 'String', 'Address', 'Alias',
+ 'Transfer', 'AssetPair', 'DataEntry', 'Order', 'Transaction',
+ 'GenesisTransaction', 'PaymentTransaction', 'ReissueTransaction',
+ 'BurnTransaction', 'MassTransferTransaction', 'ExchangeTransaction',
+ 'TransferTransaction', 'SetAssetScriptTransaction',
+ 'InvokeScriptTransaction', 'IssueTransaction', 'LeaseTransaction',
+ 'LeaseCancelTransaction', 'CreateAliasTransaction',
+ 'SetScriptTransaction', 'SponsorFeeTransaction', 'DataTransaction',
+ 'WriteSet', 'AttachedPayment', 'ScriptTransfer', 'TransferSet',
+ 'ScriptResult', 'Invocation', 'Asset', 'BlockInfo', 'Issue', 'Reissue',
+ 'Burn', 'NoAlg', 'Md5', 'Sha1', 'Sha224', 'Sha256', 'Sha384', 'Sha512',
+ 'Sha3224', 'Sha3256', 'Sha3384', 'Sha3512', 'BinaryEntry',
+ 'BooleanEntry', 'IntegerEntry', 'StringEntry', 'List', 'Ceiling',
+ 'Down', 'Floor', 'HalfDown', 'HalfEven', 'HalfUp', 'Up',
+ )
+
+ functionsName = (
+ 'fraction', 'size', 'toBytes', 'take', 'drop', 'takeRight', 'dropRight',
+ 'toString', 'isDefined', 'extract', 'throw', 'getElement', 'value',
+ 'cons', 'toUtf8String', 'toInt', 'indexOf', 'lastIndexOf', 'split',
+ 'parseInt', 'parseIntValue', 'keccak256', 'blake2b256', 'sha256',
+ 'sigVerify', 'toBase58String', 'fromBase58String', 'toBase64String',
+ 'fromBase64String', 'transactionById', 'transactionHeightById',
+ 'getInteger', 'getBoolean', 'getBinary', 'getString',
+ 'addressFromPublicKey', 'addressFromString', 'addressFromRecipient',
+ 'assetBalance', 'wavesBalance', 'getIntegerValue', 'getBooleanValue',
+ 'getBinaryValue', 'getStringValue', 'addressFromStringValue',
+ 'assetInfo', 'rsaVerify', 'checkMerkleProof', 'median',
+ 'valueOrElse', 'valueOrErrorMessage', 'contains', 'log', 'pow',
+ 'toBase16String', 'fromBase16String', 'blockInfoByHeight',
+ 'transferTransactionById',
+ )
+
+ reservedWords = words((
+ 'match', 'case', 'else', 'func', 'if',
+ 'let', 'then', '@Callable', '@Verifier',
+ ), suffix=r'\b')
+
+ tokens = {
+ 'root': [
+ # Comments
+ (r'#.*', Comment.Single),
+ # Whitespace
+ (r'\s+', Text),
+ # Strings
+ (r'"', String, 'doublequote'),
+ (r'utf8\'', String, 'utf8quote'),
+ (r'base(58|64|16)\'', String, 'singlequote'),
+ # Keywords
+ (reservedWords, Keyword.Reserved),
+ (r'\{-#.*?#-\}', Keyword.Reserved),
+ (r'FOLD<\d+>', Keyword.Reserved),
+ # Types
+ (words(typesName), Keyword.Type),
+ # Main
+ # (specialName, Keyword.Reserved),
+ # Prefix Operators
+ (words(builtinOps, prefix=r'\(', suffix=r'\)'), Name.Function),
+ # Infix Operators
+ (words(builtinOps), Name.Function),
+ (words(globalVariablesName), Name.Function),
+ (words(functionsName), Name.Function),
+ # Numbers
+ include('numbers'),
+ # Variable Names
+ (validName, Name.Variable),
+ # Parens
+ (r'[,()\[\]{}]', Punctuation),
+ ],
+
+ 'doublequote': [
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\[nrfvb\\"]', String.Escape),
+ (r'[^"]', String),
+ (r'"', String, '#pop'),
+ ],
+
+ 'utf8quote': [
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\[nrfvb\\\']', String.Escape),
+ (r'[^\']', String),
+ (r'\'', String, '#pop'),
+ ],
+
+ 'singlequote': [
+ (r'[^\']', String),
+ (r'\'', String, '#pop'),
+ ],
+
+ 'numbers': [
+ (r'_?\d+', Number.Integer),
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/rita.py b/venv/lib/python3.10/site-packages/pygments/lexers/rita.py
new file mode 100644
index 0000000000000000000000000000000000000000..536aafffd2e88c9a5a4b6b4ff9d6cd42872a2162
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/rita.py
@@ -0,0 +1,42 @@
+"""
+ pygments.lexers.rita
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for RITA language
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Comment, Operator, Keyword, Name, Literal, \
+ Punctuation, Whitespace
+
+__all__ = ['RitaLexer']
+
+
+class RitaLexer(RegexLexer):
+ """
+ Lexer for RITA.
+ """
+ name = 'Rita'
+ url = 'https://github.com/zaibacu/rita-dsl'
+ filenames = ['*.rita']
+ aliases = ['rita']
+ mimetypes = ['text/rita']
+ version_added = '2.11'
+
+ tokens = {
+ 'root': [
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ (r'#(.*?)\n', Comment.Single),
+ (r'@(.*?)\n', Operator), # Yes, whole line as an operator
+ (r'"(\w|\d|\s|(\\")|[\'_\-./,\?\!])+?"', Literal),
+ (r'\'(\w|\d|\s|(\\\')|["_\-./,\?\!])+?\'', Literal),
+ (r'([A-Z_]+)', Keyword),
+ (r'([a-z0-9_]+)', Name),
+ (r'((->)|[!?+*|=])', Operator),
+ (r'[\(\),\{\}]', Punctuation)
+ ]
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/rnc.py b/venv/lib/python3.10/site-packages/pygments/lexers/rnc.py
new file mode 100644
index 0000000000000000000000000000000000000000..b7a06bb918390f59753808c2f4f99009da5c51b1
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/rnc.py
@@ -0,0 +1,66 @@
+"""
+ pygments.lexers.rnc
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Relax-NG Compact syntax
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation
+
+__all__ = ['RNCCompactLexer']
+
+
+class RNCCompactLexer(RegexLexer):
+ """
+ For RelaxNG-compact syntax.
+ """
+
+ name = 'Relax-NG Compact'
+ url = 'http://relaxng.org'
+ aliases = ['rng-compact', 'rnc']
+ filenames = ['*.rnc']
+ version_added = '2.2'
+
+ tokens = {
+ 'root': [
+ (r'namespace\b', Keyword.Namespace),
+ (r'(?:default|datatypes)\b', Keyword.Declaration),
+ (r'##.*$', Comment.Preproc),
+ (r'#.*$', Comment.Single),
+ (r'"[^"]*"', String.Double),
+ # TODO single quoted strings and escape sequences outside of
+ # double-quoted strings
+ (r'(?:element|attribute|mixed)\b', Keyword.Declaration, 'variable'),
+ (r'(text\b|xsd:[^ ]+)', Keyword.Type, 'maybe_xsdattributes'),
+ (r'[,?&*=|~]|>>', Operator),
+ (r'[(){}]', Punctuation),
+ (r'.', Text),
+ ],
+
+ # a variable has been declared using `element` or `attribute`
+ 'variable': [
+ (r'[^{]+', Name.Variable),
+ (r'\{', Punctuation, '#pop'),
+ ],
+
+ # after an xsd: declaration there may be attributes
+ 'maybe_xsdattributes': [
+ (r'\{', Punctuation, 'xsdattributes'),
+ (r'\}', Punctuation, '#pop'),
+ (r'.', Text),
+ ],
+
+ # attributes take the form { key1 = value1 key2 = value2 ... }
+ 'xsdattributes': [
+ (r'[^ =}]', Name.Attribute),
+ (r'=', Operator),
+ (r'"[^"]*"', String.Double),
+ (r'\}', Punctuation, '#pop'),
+ (r'.', Text),
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/roboconf.py b/venv/lib/python3.10/site-packages/pygments/lexers/roboconf.py
new file mode 100644
index 0000000000000000000000000000000000000000..31adba9f48f99a4f9c38941e3c0d73714051a994
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/roboconf.py
@@ -0,0 +1,81 @@
+"""
+ pygments.lexers.roboconf
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Roboconf DSL.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, re
+from pygments.token import Text, Operator, Keyword, Name, Comment
+
+__all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer']
+
+
+class RoboconfGraphLexer(RegexLexer):
+ """
+ Lexer for Roboconf graph files.
+ """
+ name = 'Roboconf Graph'
+ aliases = ['roboconf-graph']
+ filenames = ['*.graph']
+ url = 'https://roboconf.github.io/en/user-guide/graph-definition.html'
+ version_added = '2.1'
+
+ flags = re.IGNORECASE | re.MULTILINE
+ tokens = {
+ 'root': [
+ # Skip white spaces
+ (r'\s+', Text),
+
+ # There is one operator
+ (r'=', Operator),
+
+ # Keywords
+ (words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
+ (words((
+ 'installer', 'extends', 'exports', 'imports', 'facets',
+ 'children'), suffix=r'\s*:?', prefix=r'\b'), Name),
+
+ # Comments
+ (r'#.*\n', Comment),
+
+ # Default
+ (r'[^#]', Text),
+ (r'.*\n', Text)
+ ]
+ }
+
+
+class RoboconfInstancesLexer(RegexLexer):
+ """
+ Lexer for Roboconf instances files.
+ """
+ name = 'Roboconf Instances'
+ aliases = ['roboconf-instances']
+ filenames = ['*.instances']
+ url = 'https://roboconf.github.io'
+ version_added = '2.1'
+
+ flags = re.IGNORECASE | re.MULTILINE
+ tokens = {
+ 'root': [
+
+ # Skip white spaces
+ (r'\s+', Text),
+
+ # Keywords
+ (words(('instance of', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
+ (words(('name', 'count'), suffix=r's*:?', prefix=r'\b'), Name),
+ (r'\s*[\w.-]+\s*:', Name),
+
+ # Comments
+ (r'#.*\n', Comment),
+
+ # Default
+ (r'[^#]', Text),
+ (r'.*\n', Text)
+ ]
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/robotframework.py b/venv/lib/python3.10/site-packages/pygments/lexers/robotframework.py
new file mode 100644
index 0000000000000000000000000000000000000000..f92d567503fbb371a224d531f504c59fbf77e1a8
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/robotframework.py
@@ -0,0 +1,551 @@
+"""
+ pygments.lexers.robotframework
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Robot Framework.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+# Copyright 2012 Nokia Siemens Networks Oyj
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+from pygments.lexer import Lexer
+from pygments.token import Token
+
+__all__ = ['RobotFrameworkLexer']
+
+
+HEADING = Token.Generic.Heading
+SETTING = Token.Keyword.Namespace
+IMPORT = Token.Name.Namespace
+TC_KW_NAME = Token.Generic.Subheading
+KEYWORD = Token.Name.Function
+ARGUMENT = Token.String
+VARIABLE = Token.Name.Variable
+COMMENT = Token.Comment
+SEPARATOR = Token.Punctuation
+SYNTAX = Token.Punctuation
+GHERKIN = Token.Generic.Emph
+ERROR = Token.Error
+
+
+def normalize(string, remove=''):
+ string = string.lower()
+ for char in remove + ' ':
+ if char in string:
+ string = string.replace(char, '')
+ return string
+
+
+class RobotFrameworkLexer(Lexer):
+ """
+ For Robot Framework test data.
+
+ Supports both space and pipe separated plain text formats.
+ """
+ name = 'RobotFramework'
+ url = 'http://robotframework.org'
+ aliases = ['robotframework']
+ filenames = ['*.robot', '*.resource']
+ mimetypes = ['text/x-robotframework']
+ version_added = '1.6'
+
+ def __init__(self, **options):
+ options['tabsize'] = 2
+ options['encoding'] = 'UTF-8'
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ row_tokenizer = RowTokenizer()
+ var_tokenizer = VariableTokenizer()
+ index = 0
+ for row in text.splitlines():
+ for value, token in row_tokenizer.tokenize(row):
+ for value, token in var_tokenizer.tokenize(value, token):
+ if value:
+ yield index, token, str(value)
+ index += len(value)
+
+
+class VariableTokenizer:
+
+ def tokenize(self, string, token):
+ var = VariableSplitter(string, identifiers='$@%&')
+ if var.start < 0 or token in (COMMENT, ERROR):
+ yield string, token
+ return
+ for value, token in self._tokenize(var, string, token):
+ if value:
+ yield value, token
+
+ def _tokenize(self, var, string, orig_token):
+ before = string[:var.start]
+ yield before, orig_token
+ yield var.identifier + '{', SYNTAX
+ yield from self.tokenize(var.base, VARIABLE)
+ yield '}', SYNTAX
+ if var.index is not None:
+ yield '[', SYNTAX
+ yield from self.tokenize(var.index, VARIABLE)
+ yield ']', SYNTAX
+ yield from self.tokenize(string[var.end:], orig_token)
+
+
+class RowTokenizer:
+
+ def __init__(self):
+ self._table = UnknownTable()
+ self._splitter = RowSplitter()
+ testcases = TestCaseTable()
+ settings = SettingTable(testcases.set_default_template)
+ variables = VariableTable()
+ keywords = KeywordTable()
+ self._tables = {'settings': settings, 'setting': settings,
+ 'metadata': settings,
+ 'variables': variables, 'variable': variables,
+ 'testcases': testcases, 'testcase': testcases,
+ 'tasks': testcases, 'task': testcases,
+ 'keywords': keywords, 'keyword': keywords,
+ 'userkeywords': keywords, 'userkeyword': keywords}
+
+ def tokenize(self, row):
+ commented = False
+ heading = False
+ for index, value in enumerate(self._splitter.split(row)):
+ # First value, and every second after that, is a separator.
+ index, separator = divmod(index-1, 2)
+ if value.startswith('#'):
+ commented = True
+ elif index == 0 and value.startswith('*'):
+ self._table = self._start_table(value)
+ heading = True
+ yield from self._tokenize(value, index, commented,
+ separator, heading)
+ self._table.end_row()
+
+ def _start_table(self, header):
+ name = normalize(header, remove='*')
+ return self._tables.get(name, UnknownTable())
+
+ def _tokenize(self, value, index, commented, separator, heading):
+ if commented:
+ yield value, COMMENT
+ elif separator:
+ yield value, SEPARATOR
+ elif heading:
+ yield value, HEADING
+ else:
+ yield from self._table.tokenize(value, index)
+
+
+class RowSplitter:
+ _space_splitter = re.compile('( {2,})')
+ _pipe_splitter = re.compile(r'((?:^| +)\|(?: +|$))')
+
+ def split(self, row):
+ splitter = (row.startswith('| ') and self._split_from_pipes
+ or self._split_from_spaces)
+ yield from splitter(row)
+ yield '\n'
+
+ def _split_from_spaces(self, row):
+ yield '' # Start with (pseudo)separator similarly as with pipes
+ yield from self._space_splitter.split(row)
+
+ def _split_from_pipes(self, row):
+ _, separator, rest = self._pipe_splitter.split(row, 1)
+ yield separator
+ while self._pipe_splitter.search(rest):
+ cell, separator, rest = self._pipe_splitter.split(rest, 1)
+ yield cell
+ yield separator
+ yield rest
+
+
+class Tokenizer:
+ _tokens = None
+
+ def __init__(self):
+ self._index = 0
+
+ def tokenize(self, value):
+ values_and_tokens = self._tokenize(value, self._index)
+ self._index += 1
+ if isinstance(values_and_tokens, type(Token)):
+ values_and_tokens = [(value, values_and_tokens)]
+ return values_and_tokens
+
+ def _tokenize(self, value, index):
+ index = min(index, len(self._tokens) - 1)
+ return self._tokens[index]
+
+ def _is_assign(self, value):
+ if value.endswith('='):
+ value = value[:-1].strip()
+ var = VariableSplitter(value, identifiers='$@&')
+ return var.start == 0 and var.end == len(value)
+
+
+class Comment(Tokenizer):
+ _tokens = (COMMENT,)
+
+
+class Setting(Tokenizer):
+ _tokens = (SETTING, ARGUMENT)
+ _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
+ 'suitepostcondition', 'testsetup', 'tasksetup', 'testprecondition',
+ 'testteardown','taskteardown', 'testpostcondition', 'testtemplate', 'tasktemplate')
+ _import_settings = ('library', 'resource', 'variables')
+ _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
+ 'testtimeout','tasktimeout')
+ _custom_tokenizer = None
+
+ def __init__(self, template_setter=None):
+ Tokenizer.__init__(self)
+ self._template_setter = template_setter
+
+ def _tokenize(self, value, index):
+ if index == 1 and self._template_setter:
+ self._template_setter(value)
+ if index == 0:
+ normalized = normalize(value)
+ if normalized in self._keyword_settings:
+ self._custom_tokenizer = KeywordCall(support_assign=False)
+ elif normalized in self._import_settings:
+ self._custom_tokenizer = ImportSetting()
+ elif normalized not in self._other_settings:
+ return ERROR
+ elif self._custom_tokenizer:
+ return self._custom_tokenizer.tokenize(value)
+ return Tokenizer._tokenize(self, value, index)
+
+
+class ImportSetting(Tokenizer):
+ _tokens = (IMPORT, ARGUMENT)
+
+
+class TestCaseSetting(Setting):
+ _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition',
+ 'template')
+ _import_settings = ()
+ _other_settings = ('documentation', 'tags', 'timeout')
+
+ def _tokenize(self, value, index):
+ if index == 0:
+ type = Setting._tokenize(self, value[1:-1], index)
+ return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)]
+ return Setting._tokenize(self, value, index)
+
+
+class KeywordSetting(TestCaseSetting):
+ _keyword_settings = ('teardown',)
+ _other_settings = ('documentation', 'arguments', 'return', 'timeout', 'tags')
+
+
+class Variable(Tokenizer):
+ _tokens = (SYNTAX, ARGUMENT)
+
+ def _tokenize(self, value, index):
+ if index == 0 and not self._is_assign(value):
+ return ERROR
+ return Tokenizer._tokenize(self, value, index)
+
+
+class KeywordCall(Tokenizer):
+ _tokens = (KEYWORD, ARGUMENT)
+
+ def __init__(self, support_assign=True):
+ Tokenizer.__init__(self)
+ self._keyword_found = not support_assign
+ self._assigns = 0
+
+ def _tokenize(self, value, index):
+ if not self._keyword_found and self._is_assign(value):
+ self._assigns += 1
+ return SYNTAX # VariableTokenizer tokenizes this later.
+ if self._keyword_found:
+ return Tokenizer._tokenize(self, value, index - self._assigns)
+ self._keyword_found = True
+ return GherkinTokenizer().tokenize(value, KEYWORD)
+
+
+class GherkinTokenizer:
+ _gherkin_prefix = re.compile('^(Given|When|Then|And|But) ', re.IGNORECASE)
+
+ def tokenize(self, value, token):
+ match = self._gherkin_prefix.match(value)
+ if not match:
+ return [(value, token)]
+ end = match.end()
+ return [(value[:end], GHERKIN), (value[end:], token)]
+
+
+class TemplatedKeywordCall(Tokenizer):
+ _tokens = (ARGUMENT,)
+
+
+class ForLoop(Tokenizer):
+
+ def __init__(self):
+ Tokenizer.__init__(self)
+ self._in_arguments = False
+
+ def _tokenize(self, value, index):
+ token = self._in_arguments and ARGUMENT or SYNTAX
+ if value.upper() in ('IN', 'IN RANGE'):
+ self._in_arguments = True
+ return token
+
+
+class _Table:
+ _tokenizer_class = None
+
+ def __init__(self, prev_tokenizer=None):
+ self._tokenizer = self._tokenizer_class()
+ self._prev_tokenizer = prev_tokenizer
+ self._prev_values_on_row = []
+
+ def tokenize(self, value, index):
+ if self._continues(value, index):
+ self._tokenizer = self._prev_tokenizer
+ yield value, SYNTAX
+ else:
+ yield from self._tokenize(value, index)
+ self._prev_values_on_row.append(value)
+
+ def _continues(self, value, index):
+ return value == '...' and all(self._is_empty(t)
+ for t in self._prev_values_on_row)
+
+ def _is_empty(self, value):
+ return value in ('', '\\')
+
+ def _tokenize(self, value, index):
+ return self._tokenizer.tokenize(value)
+
+ def end_row(self):
+ self.__init__(prev_tokenizer=self._tokenizer)
+
+
+class UnknownTable(_Table):
+ _tokenizer_class = Comment
+
+ def _continues(self, value, index):
+ return False
+
+
+class VariableTable(_Table):
+ _tokenizer_class = Variable
+
+
+class SettingTable(_Table):
+ _tokenizer_class = Setting
+
+ def __init__(self, template_setter, prev_tokenizer=None):
+ _Table.__init__(self, prev_tokenizer)
+ self._template_setter = template_setter
+
+ def _tokenize(self, value, index):
+ if index == 0 and normalize(value) == 'testtemplate':
+ self._tokenizer = Setting(self._template_setter)
+ return _Table._tokenize(self, value, index)
+
+ def end_row(self):
+ self.__init__(self._template_setter, prev_tokenizer=self._tokenizer)
+
+
+class TestCaseTable(_Table):
+ _setting_class = TestCaseSetting
+ _test_template = None
+ _default_template = None
+
+ @property
+ def _tokenizer_class(self):
+ if self._test_template or (self._default_template and
+ self._test_template is not False):
+ return TemplatedKeywordCall
+ return KeywordCall
+
+ def _continues(self, value, index):
+ return index > 0 and _Table._continues(self, value, index)
+
+ def _tokenize(self, value, index):
+ if index == 0:
+ if value:
+ self._test_template = None
+ return GherkinTokenizer().tokenize(value, TC_KW_NAME)
+ if index == 1 and self._is_setting(value):
+ if self._is_template(value):
+ self._test_template = False
+ self._tokenizer = self._setting_class(self.set_test_template)
+ else:
+ self._tokenizer = self._setting_class()
+ if index == 1 and self._is_for_loop(value):
+ self._tokenizer = ForLoop()
+ if index == 1 and self._is_empty(value):
+ return [(value, SYNTAX)]
+ return _Table._tokenize(self, value, index)
+
+ def _is_setting(self, value):
+ return value.startswith('[') and value.endswith(']')
+
+ def _is_template(self, value):
+ return normalize(value) == '[template]'
+
+ def _is_for_loop(self, value):
+ return value.startswith(':') and normalize(value, remove=':') == 'for'
+
+ def set_test_template(self, template):
+ self._test_template = self._is_template_set(template)
+
+ def set_default_template(self, template):
+ self._default_template = self._is_template_set(template)
+
+ def _is_template_set(self, template):
+ return normalize(template) not in ('', '\\', 'none', '${empty}')
+
+
+class KeywordTable(TestCaseTable):
+ _tokenizer_class = KeywordCall
+ _setting_class = KeywordSetting
+
+ def _is_template(self, value):
+ return False
+
+
+# Following code copied directly from Robot Framework 2.7.5.
+
+class VariableSplitter:
+
+ def __init__(self, string, identifiers):
+ self.identifier = None
+ self.base = None
+ self.index = None
+ self.start = -1
+ self.end = -1
+ self._identifiers = identifiers
+ self._may_have_internal_variables = False
+ try:
+ self._split(string)
+ except ValueError:
+ pass
+ else:
+ self._finalize()
+
+ def get_replaced_base(self, variables):
+ if self._may_have_internal_variables:
+ return variables.replace_string(self.base)
+ return self.base
+
+ def _finalize(self):
+ self.identifier = self._variable_chars[0]
+ self.base = ''.join(self._variable_chars[2:-1])
+ self.end = self.start + len(self._variable_chars)
+ if self._has_list_or_dict_variable_index():
+ self.index = ''.join(self._list_and_dict_variable_index_chars[1:-1])
+ self.end += len(self._list_and_dict_variable_index_chars)
+
+ def _has_list_or_dict_variable_index(self):
+ return self._list_and_dict_variable_index_chars\
+ and self._list_and_dict_variable_index_chars[-1] == ']'
+
+ def _split(self, string):
+ start_index, max_index = self._find_variable(string)
+ self.start = start_index
+ self._open_curly = 1
+ self._state = self._variable_state
+ self._variable_chars = [string[start_index], '{']
+ self._list_and_dict_variable_index_chars = []
+ self._string = string
+ start_index += 2
+ for index, char in enumerate(string[start_index:]):
+ index += start_index # Giving start to enumerate only in Py 2.6+
+ try:
+ self._state(char, index)
+ except StopIteration:
+ return
+ if index == max_index and not self._scanning_list_variable_index():
+ return
+
+ def _scanning_list_variable_index(self):
+ return self._state in [self._waiting_list_variable_index_state,
+ self._list_variable_index_state]
+
+ def _find_variable(self, string):
+ max_end_index = string.rfind('}')
+ if max_end_index == -1:
+ raise ValueError('No variable end found')
+ if self._is_escaped(string, max_end_index):
+ return self._find_variable(string[:max_end_index])
+ start_index = self._find_start_index(string, 1, max_end_index)
+ if start_index == -1:
+ raise ValueError('No variable start found')
+ return start_index, max_end_index
+
+ def _find_start_index(self, string, start, end):
+ index = string.find('{', start, end) - 1
+ if index < 0:
+ return -1
+ if self._start_index_is_ok(string, index):
+ return index
+ return self._find_start_index(string, index+2, end)
+
+ def _start_index_is_ok(self, string, index):
+ return string[index] in self._identifiers\
+ and not self._is_escaped(string, index)
+
+ def _is_escaped(self, string, index):
+ escaped = False
+ while index > 0 and string[index-1] == '\\':
+ index -= 1
+ escaped = not escaped
+ return escaped
+
+ def _variable_state(self, char, index):
+ self._variable_chars.append(char)
+ if char == '}' and not self._is_escaped(self._string, index):
+ self._open_curly -= 1
+ if self._open_curly == 0:
+ if not self._is_list_or_dict_variable():
+ raise StopIteration
+ self._state = self._waiting_list_variable_index_state
+ elif char in self._identifiers:
+ self._state = self._internal_variable_start_state
+
+ def _is_list_or_dict_variable(self):
+ return self._variable_chars[0] in ('@','&')
+
+ def _internal_variable_start_state(self, char, index):
+ self._state = self._variable_state
+ if char == '{':
+ self._variable_chars.append(char)
+ self._open_curly += 1
+ self._may_have_internal_variables = True
+ else:
+ self._variable_state(char, index)
+
+ def _waiting_list_variable_index_state(self, char, index):
+ if char != '[':
+ raise StopIteration
+ self._list_and_dict_variable_index_chars.append(char)
+ self._state = self._list_variable_index_state
+
+ def _list_variable_index_state(self, char, index):
+ self._list_and_dict_variable_index_chars.append(char)
+ if char == ']':
+ raise StopIteration
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/ruby.py b/venv/lib/python3.10/site-packages/pygments/lexers/ruby.py
new file mode 100644
index 0000000000000000000000000000000000000000..72aaeb5fec08c7c14e3f827d9c297c485a087e67
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/ruby.py
@@ -0,0 +1,518 @@
+"""
+ pygments.lexers.ruby
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Ruby and related languages.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
+ bygroups, default, LexerContext, do_insertions, words, line_re
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Generic, Whitespace
+from pygments.util import shebang_matches
+
+__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
+
+
+RUBY_OPERATORS = (
+ '*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
+ '[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
+)
+
+
+class RubyLexer(ExtendedRegexLexer):
+ """
+ For Ruby source code.
+ """
+
+ name = 'Ruby'
+ url = 'http://www.ruby-lang.org'
+ aliases = ['ruby', 'rb', 'duby']
+ filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
+ '*.rbx', '*.duby', 'Gemfile', 'Vagrantfile']
+ mimetypes = ['text/x-ruby', 'application/x-ruby']
+ version_added = ''
+
+ flags = re.DOTALL | re.MULTILINE
+
+ def heredoc_callback(self, match, ctx):
+ # okay, this is the hardest part of parsing Ruby...
+ # match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
+
+ start = match.start(1)
+ yield start, Operator, match.group(1) # <<[-~]?
+ yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
+ yield match.start(3), String.Delimiter, match.group(3) # heredoc name
+ yield match.start(4), String.Heredoc, match.group(4) # quote again
+
+ heredocstack = ctx.__dict__.setdefault('heredocstack', [])
+ outermost = not bool(heredocstack)
+ heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
+
+ ctx.pos = match.start(5)
+ ctx.end = match.end(5)
+ # this may find other heredocs, so limit the recursion depth
+ if len(heredocstack) < 100:
+ yield from self.get_tokens_unprocessed(context=ctx)
+ else:
+ yield ctx.pos, String.Heredoc, match.group(5)
+ ctx.pos = match.end()
+
+ if outermost:
+ # this is the outer heredoc again, now we can process them all
+ for tolerant, hdname in heredocstack:
+ lines = []
+ for match in line_re.finditer(ctx.text, ctx.pos):
+ if tolerant:
+ check = match.group().strip()
+ else:
+ check = match.group().rstrip()
+ if check == hdname:
+ for amatch in lines:
+ yield amatch.start(), String.Heredoc, amatch.group()
+ yield match.start(), String.Delimiter, match.group()
+ ctx.pos = match.end()
+ break
+ else:
+ lines.append(match)
+ else:
+ # end of heredoc not found -- error!
+ for amatch in lines:
+ yield amatch.start(), Error, amatch.group()
+ ctx.end = len(ctx.text)
+ del heredocstack[:]
+
+ def gen_rubystrings_rules():
+ def intp_regex_callback(self, match, ctx):
+ yield match.start(1), String.Regex, match.group(1) # begin
+ nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
+ ctx.pos = match.end()
+
+ def intp_string_callback(self, match, ctx):
+ yield match.start(1), String.Other, match.group(1)
+ nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Other, match.group(4) # end
+ ctx.pos = match.end()
+
+ states = {}
+ states['strings'] = [
+ # easy ones
+ (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
+ (words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
+ (r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
+ (r':"', String.Symbol, 'simple-sym'),
+ (r'([a-zA-Z_]\w*)(:)(?!:)',
+ bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
+ (r'"', String.Double, 'simple-string-double'),
+ (r"'", String.Single, 'simple-string-single'),
+ (r'(?', '<>', 'ab'):
+ states[name+'-intp-string'] = [
+ (r'\\[\\' + bracecc + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ include('string-intp-escaped'),
+ (r'[\\#' + bracecc + ']', String.Other),
+ (r'[^\\#' + bracecc + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
+ name+'-intp-string'))
+ states[name+'-string'] = [
+ (r'\\[\\' + bracecc + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ (r'[\\#' + bracecc + ']', String.Other),
+ (r'[^\\#' + bracecc + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[qsw]' + lbrace, String.Other,
+ name+'-string'))
+ states[name+'-regex'] = [
+ (r'\\[\\' + bracecc + ']', String.Regex),
+ (lbrace, String.Regex, '#push'),
+ (rbrace + '[mixounse]*', String.Regex, '#pop'),
+ include('string-intp'),
+ (r'[\\#' + bracecc + ']', String.Regex),
+ (r'[^\\#' + bracecc + ']+', String.Regex),
+ ]
+ states['strings'].append((r'%r' + lbrace, String.Regex,
+ name+'-regex'))
+
+ # these must come after %!
+ states['strings'] += [
+ # %r regex
+ (r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
+ intp_regex_callback),
+ # regular fancy strings with qsw
+ (r'%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1', String.Other),
+ (r'(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ # special forms of fancy strings after operators or
+ # in method calls with braces
+ (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Whitespace, String.Other, None)),
+ # and because of fixed width lookbehinds the whole thing a
+ # second time for line startings...
+ (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Whitespace, String.Other, None)),
+ # all regular fancy strings without qsw
+ (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ ]
+
+ return states
+
+ tokens = {
+ 'root': [
+ (r'\A#!.+?$', Comment.Hashbang),
+ (r'#.*?$', Comment.Single),
+ (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
+ # keywords
+ (words((
+ 'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
+ 'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
+ 'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
+ 'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
+ Keyword),
+ # start of function, class and module names
+ (r'(module)(\s+)([a-zA-Z_]\w*'
+ r'(?:::[a-zA-Z_]\w*)*)',
+ bygroups(Keyword, Whitespace, Name.Namespace)),
+ (r'(def)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
+ (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
+ (r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
+ # special methods
+ (words((
+ 'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
+ 'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
+ 'module_function', 'public', 'protected', 'true', 'false', 'nil'),
+ suffix=r'\b'),
+ Keyword.Pseudo),
+ (r'(not|and|or)\b', Operator.Word),
+ (words((
+ 'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
+ 'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
+ 'private_method_defined', 'protected_method_defined',
+ 'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
+ Name.Builtin),
+ (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
+ (words((
+ 'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
+ 'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
+ 'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
+ 'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
+ 'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
+ 'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
+ 'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
+ 'instance_method', 'instance_methods',
+ 'instance_variable_get', 'instance_variable_set', 'instance_variables',
+ 'lambda', 'load', 'local_variables', 'loop',
+ 'method', 'method_missing', 'methods', 'module_eval', 'name',
+ 'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
+ 'private_instance_methods',
+ 'private_methods', 'proc', 'protected_instance_methods',
+ 'protected_methods', 'public_class_method',
+ 'public_instance_methods', 'public_methods',
+ 'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
+ 'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
+ 'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
+ 'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
+ 'untrace_var', 'warn'), prefix=r'(?~!:])|'
+ r'(?<=(?:\s|;)when\s)|'
+ r'(?<=(?:\s|;)or\s)|'
+ r'(?<=(?:\s|;)and\s)|'
+ r'(?<=\.index\s)|'
+ r'(?<=\.scan\s)|'
+ r'(?<=\.sub\s)|'
+ r'(?<=\.sub!\s)|'
+ r'(?<=\.gsub\s)|'
+ r'(?<=\.gsub!\s)|'
+ r'(?<=\.match\s)|'
+ r'(?<=(?:\s|;)if\s)|'
+ r'(?<=(?:\s|;)elsif\s)|'
+ r'(?<=^when\s)|'
+ r'(?<=^index\s)|'
+ r'(?<=^scan\s)|'
+ r'(?<=^sub\s)|'
+ r'(?<=^gsub\s)|'
+ r'(?<=^sub!\s)|'
+ r'(?<=^gsub!\s)|'
+ r'(?<=^match\s)|'
+ r'(?<=^if\s)|'
+ r'(?<=^elsif\s)'
+ r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
+ # multiline regex (in method calls or subscripts)
+ (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
+ # multiline regex (this time the funny no whitespace rule)
+ (r'(\s+)(/)(?![\s=])', bygroups(Whitespace, String.Regex),
+ 'multiline-regex'),
+ # lex numbers and ignore following regular expressions which
+ # are division operators in fact (grrrr. i hate that. any
+ # better ideas?)
+ # since pygments 0.7 we also eat a "?" operator after numbers
+ # so that the char operator does not work. Chars are not allowed
+ # there so that you can use the ternary operator.
+ # stupid example:
+ # x>=0?n[x]:""
+ (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+ bygroups(Number.Oct, Whitespace, Operator)),
+ (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+ bygroups(Number.Hex, Whitespace, Operator)),
+ (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
+ bygroups(Number.Bin, Whitespace, Operator)),
+ (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+ bygroups(Number.Integer, Whitespace, Operator)),
+ # Names
+ (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
+ (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
+ (r'\$\w+', Name.Variable.Global),
+ (r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
+ (r'\$-[0adFiIlpvw]', Name.Variable.Global),
+ (r'::', Operator),
+ include('strings'),
+ # chars
+ (r'\?(\\[MC]-)*' # modifiers
+ r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
+ r'(?!\w)',
+ String.Char),
+ (r'[A-Z]\w+', Name.Constant),
+ # this is needed because ruby attributes can look
+ # like keywords (class) or like this: ` ?!?
+ (words(RUBY_OPERATORS, prefix=r'(\.|::)'),
+ bygroups(Operator, Name.Operator)),
+ (r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
+ bygroups(Operator, Name)),
+ (r'[a-zA-Z_]\w*[!?]?', Name),
+ (r'(\[|\]|\*\*|<|>>?|>=|<=|<=>|=~|={3}|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&!^|~]=?', Operator),
+ (r'[(){};,/?:\\]', Punctuation),
+ (r'\s+', Whitespace)
+ ],
+ 'funcname': [
+ (r'\(', Punctuation, 'defexpr'),
+ (r'(?:([a-zA-Z_]\w*)(\.))?' # optional scope name, like "self."
+ r'('
+ r'[a-zA-Z\u0080-\uffff][a-zA-Z0-9_\u0080-\uffff]*[!?=]?' # method name
+ r'|!=|!~|=~|\*\*?|[-+!~]@?|[/%&|^]|<=>|<[<=]?|>[>=]?|===?' # or operator override
+ r'|\[\]=?' # or element reference/assignment override
+ r'|`' # or the undocumented backtick override
+ r')',
+ bygroups(Name.Class, Operator, Name.Function), '#pop'),
+ default('#pop')
+ ],
+ 'classname': [
+ (r'\(', Punctuation, 'defexpr'),
+ (r'<<', Operator, '#pop'),
+ (r'[A-Z_]\w*', Name.Class, '#pop'),
+ default('#pop')
+ ],
+ 'defexpr': [
+ (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
+ (r'\(', Operator, '#push'),
+ include('root')
+ ],
+ 'in-intp': [
+ (r'\{', String.Interpol, '#push'),
+ (r'\}', String.Interpol, '#pop'),
+ include('root'),
+ ],
+ 'string-intp': [
+ (r'#\{', String.Interpol, 'in-intp'),
+ (r'#@@?[a-zA-Z_]\w*', String.Interpol),
+ (r'#\$[a-zA-Z_]\w*', String.Interpol)
+ ],
+ 'string-intp-escaped': [
+ include('string-intp'),
+ (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
+ String.Escape)
+ ],
+ 'interpolated-regex': [
+ include('string-intp'),
+ (r'[\\#]', String.Regex),
+ (r'[^\\#]+', String.Regex),
+ ],
+ 'interpolated-string': [
+ include('string-intp'),
+ (r'[\\#]', String.Other),
+ (r'[^\\#]+', String.Other),
+ ],
+ 'multiline-regex': [
+ include('string-intp'),
+ (r'\\\\', String.Regex),
+ (r'\\/', String.Regex),
+ (r'[\\#]', String.Regex),
+ (r'[^\\/#]+', String.Regex),
+ (r'/[mixounse]*', String.Regex, '#pop'),
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ]
+ }
+ tokens.update(gen_rubystrings_rules())
+
+ def analyse_text(text):
+ return shebang_matches(text, r'ruby(1\.\d)?')
+
+
+class RubyConsoleLexer(Lexer):
+ """
+ For Ruby interactive console (**irb**) output.
+ """
+ name = 'Ruby irb session'
+ aliases = ['rbcon', 'irb']
+ mimetypes = ['text/x-ruby-shellsession']
+ url = 'https://www.ruby-lang.org'
+ version_added = ''
+ _example = 'rbcon/console'
+
+ _prompt_re = re.compile(r'irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
+ r'|>> |\?> ')
+
+ def get_tokens_unprocessed(self, text):
+ rblexer = RubyLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ yield from do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode))
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
+ yield from do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode))
+
+
+class FancyLexer(RegexLexer):
+ """
+ Pygments Lexer For Fancy.
+
+ Fancy is a self-hosted, pure object-oriented, dynamic,
+ class-based, concurrent general-purpose programming language
+ running on Rubinius, the Ruby VM.
+ """
+ name = 'Fancy'
+ url = 'https://github.com/bakkdoor/fancy'
+ filenames = ['*.fy', '*.fancypack']
+ aliases = ['fancy', 'fy']
+ mimetypes = ['text/x-fancysrc']
+ version_added = '1.5'
+
+ tokens = {
+ # copied from PerlLexer:
+ 'balanced-regex': [
+ (r'/(\\\\|\\[^\\]|[^/\\])*/[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\[^\\]|[^!\\])*![egimosx]*', String.Regex, '#pop'),
+ (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+ (r'\{(\\\\|\\[^\\]|[^}\\])*\}[egimosx]*', String.Regex, '#pop'),
+ (r'<(\\\\|\\[^\\]|[^>\\])*>[egimosx]*', String.Regex, '#pop'),
+ (r'\[(\\\\|\\[^\\]|[^\]\\])*\][egimosx]*', String.Regex, '#pop'),
+ (r'\((\\\\|\\[^\\]|[^)\\])*\)[egimosx]*', String.Regex, '#pop'),
+ (r'@(\\\\|\\[^\\]|[^@\\])*@[egimosx]*', String.Regex, '#pop'),
+ (r'%(\\\\|\\[^\\]|[^%\\])*%[egimosx]*', String.Regex, '#pop'),
+ (r'\$(\\\\|\\[^\\]|[^$\\])*\$[egimosx]*', String.Regex, '#pop'),
+ ],
+ 'root': [
+ (r'\s+', Whitespace),
+
+ # balanced delimiters (copied from PerlLexer):
+ (r's\{(\\\\|\\[^\\]|[^}\\])*\}\s*', String.Regex, 'balanced-regex'),
+ (r's<(\\\\|\\[^\\]|[^>\\])*>\s*', String.Regex, 'balanced-regex'),
+ (r's\[(\\\\|\\[^\\]|[^\]\\])*\]\s*', String.Regex, 'balanced-regex'),
+ (r's\((\\\\|\\[^\\]|[^)\\])*\)\s*', String.Regex, 'balanced-regex'),
+ (r'm?/(\\\\|\\[^\\]|[^///\n])*/[gcimosx]*', String.Regex),
+ (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
+
+ # Comments
+ (r'#(.*?)\n', Comment.Single),
+ # Symbols
+ (r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
+ # Multi-line DoubleQuotedString
+ (r'"""(\\\\|\\[^\\]|[^\\])*?"""', String),
+ # DoubleQuotedString
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ # keywords
+ (r'(def|class|try|catch|finally|retry|return|return_local|match|'
+ r'case|->|=>)\b', Keyword),
+ # constants
+ (r'(self|super|nil|false|true)\b', Name.Constant),
+ (r'[(){};,/?|:\\]', Punctuation),
+ # names
+ (words((
+ 'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
+ 'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
+ 'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
+ 'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
+ Name.Builtin),
+ # functions
+ (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
+ # operators, must be below functions
+ (r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
+ (r'[A-Z]\w*', Name.Constant),
+ (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
+ (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
+ ('@@?', Operator),
+ (r'[a-zA-Z_]\w*', Name),
+ # numbers - / checks are necessary to avoid mismarking regexes,
+ # see comment in RubyLexer
+ (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+ bygroups(Number.Oct, Whitespace, Operator)),
+ (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+ bygroups(Number.Hex, Whitespace, Operator)),
+ (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
+ bygroups(Number.Bin, Whitespace, Operator)),
+ (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+ bygroups(Number.Integer, Whitespace, Operator)),
+ (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ]
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/rust.py b/venv/lib/python3.10/site-packages/pygments/lexers/rust.py
new file mode 100644
index 0000000000000000000000000000000000000000..63410475553d44d7aa080e075ac71180f3160d6c
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/rust.py
@@ -0,0 +1,222 @@
+"""
+ pygments.lexers.rust
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Rust language.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+
+__all__ = ['RustLexer']
+
+
+class RustLexer(RegexLexer):
+ """
+ Lexer for the Rust programming language (version 1.47).
+ """
+ name = 'Rust'
+ url = 'https://www.rust-lang.org/'
+ filenames = ['*.rs', '*.rs.in']
+ aliases = ['rust', 'rs']
+ mimetypes = ['text/rust', 'text/x-rust']
+ version_added = '1.6'
+
+ keyword_types = (words((
+ 'u8', 'u16', 'u32', 'u64', 'u128', 'i8', 'i16', 'i32', 'i64', 'i128',
+ 'usize', 'isize', 'f32', 'f64', 'char', 'str', 'bool',
+ ), suffix=r'\b'), Keyword.Type)
+
+ builtin_funcs_types = (words((
+ 'Copy', 'Send', 'Sized', 'Sync', 'Unpin',
+ 'Drop', 'Fn', 'FnMut', 'FnOnce', 'drop',
+ 'Box', 'ToOwned', 'Clone',
+ 'PartialEq', 'PartialOrd', 'Eq', 'Ord',
+ 'AsRef', 'AsMut', 'Into', 'From', 'Default',
+ 'Iterator', 'Extend', 'IntoIterator', 'DoubleEndedIterator',
+ 'ExactSizeIterator',
+ 'Option', 'Some', 'None',
+ 'Result', 'Ok', 'Err',
+ 'String', 'ToString', 'Vec',
+ ), suffix=r'\b'), Name.Builtin)
+
+ builtin_macros = (words((
+ 'asm', 'assert', 'assert_eq', 'assert_ne', 'cfg', 'column',
+ 'compile_error', 'concat', 'concat_idents', 'dbg', 'debug_assert',
+ 'debug_assert_eq', 'debug_assert_ne', 'env', 'eprint', 'eprintln',
+ 'file', 'format', 'format_args', 'format_args_nl', 'global_asm',
+ 'include', 'include_bytes', 'include_str',
+ 'is_aarch64_feature_detected',
+ 'is_arm_feature_detected',
+ 'is_mips64_feature_detected',
+ 'is_mips_feature_detected',
+ 'is_powerpc64_feature_detected',
+ 'is_powerpc_feature_detected',
+ 'is_x86_feature_detected',
+ 'line', 'llvm_asm', 'log_syntax', 'macro_rules', 'matches',
+ 'module_path', 'option_env', 'panic', 'print', 'println', 'stringify',
+ 'thread_local', 'todo', 'trace_macros', 'unimplemented', 'unreachable',
+ 'vec', 'write', 'writeln',
+ ), suffix=r'!'), Name.Function.Magic)
+
+ tokens = {
+ 'root': [
+ # rust allows a file to start with a shebang, but if the first line
+ # starts with #![ then it's not a shebang but a crate attribute.
+ (r'#![^[\r\n].*$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ # Whitespace and Comments
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ (r'//!.*?\n', String.Doc),
+ (r'///(\n|[^/].*?\n)', String.Doc),
+ (r'//(.*?)\n', Comment.Single),
+ (r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
+ (r'/\*!', String.Doc, 'doccomment'),
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # Macro parameters
+ (r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
+ # Keywords
+ (words(('as', 'async', 'await', 'box', 'const', 'crate', 'dyn',
+ 'else', 'extern', 'for', 'if', 'impl', 'in', 'loop',
+ 'match', 'move', 'mut', 'pub', 'ref', 'return', 'static',
+ 'super', 'trait', 'unsafe', 'use', 'where', 'while'),
+ suffix=r'\b'), Keyword),
+ (words(('abstract', 'become', 'do', 'final', 'macro', 'override',
+ 'priv', 'typeof', 'try', 'unsized', 'virtual', 'yield'),
+ suffix=r'\b'), Keyword.Reserved),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'self\b', Name.Builtin.Pseudo),
+ (r'mod\b', Keyword, 'modname'),
+ (r'let\b', Keyword.Declaration),
+ (r'fn\b', Keyword, 'funcname'),
+ (r'(struct|enum|type|union)\b', Keyword, 'typename'),
+ (r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Whitespace, Keyword)),
+ keyword_types,
+ (r'[sS]elf\b', Name.Builtin.Pseudo),
+ # Prelude (taken from Rust's src/libstd/prelude.rs)
+ builtin_funcs_types,
+ builtin_macros,
+ # Path separators, so types don't catch them.
+ (r'::\b', Punctuation),
+ # Types in positions.
+ (r'(?::|->)', Punctuation, 'typename'),
+ # Labels
+ (r'(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?',
+ bygroups(Keyword, Text.Whitespace, Name.Label)),
+
+ # Character literals
+ (r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
+ String.Char),
+ (r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
+ String.Char),
+
+ # Binary literals
+ (r'0b[01_]+', Number.Bin, 'number_lit'),
+ # Octal literals
+ (r'0o[0-7_]+', Number.Oct, 'number_lit'),
+ # Hexadecimal literals
+ (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
+ # Decimal literals
+ (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float,
+ 'number_lit'),
+ (r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
+
+ # String literals
+ (r'b"', String, 'bytestring'),
+ (r'"', String, 'string'),
+ (r'(?s)b?r(#*)".*?"\1', String),
+
+ # Lifetime names
+ (r"'", Operator, 'lifetime'),
+
+ # Operators and Punctuation
+ (r'\.\.=?', Operator),
+ (r'[{}()\[\],.;]', Punctuation),
+ (r'[+\-*/%&|<>^!~@=:?]', Operator),
+
+ # Identifiers
+ (r'[a-zA-Z_]\w*', Name),
+ # Raw identifiers
+ (r'r#[a-zA-Z_]\w*', Name),
+
+ # Attributes
+ (r'#!?\[', Comment.Preproc, 'attribute['),
+
+ # Misc
+ # Lone hashes: not used in Rust syntax, but allowed in macro
+ # arguments, most famously for quote::quote!()
+ (r'#', Punctuation),
+ ],
+ 'comment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'doccomment': [
+ (r'[^*/]+', String.Doc),
+ (r'/\*', String.Doc, '#push'),
+ (r'\*/', String.Doc, '#pop'),
+ (r'[*/]', String.Doc),
+ ],
+ 'modname': [
+ (r'\s+', Whitespace),
+ (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
+ default('#pop'),
+ ],
+ 'funcname': [
+ (r'\s+', Whitespace),
+ (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
+ default('#pop'),
+ ],
+ 'typename': [
+ (r'\s+', Whitespace),
+ (r'&', Keyword.Pseudo),
+ (r"'", Operator, 'lifetime'),
+ builtin_funcs_types,
+ keyword_types,
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ default('#pop'),
+ ],
+ 'lifetime': [
+ (r"(static|_)", Name.Builtin),
+ (r"[a-zA-Z_]+\w*", Name.Attribute),
+ default('#pop'),
+ ],
+ 'number_lit': [
+ (r'[ui](8|16|32|64|size)', Keyword, '#pop'),
+ (r'f(32|64)', Keyword, '#pop'),
+ default('#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
+ (r'[^\\"]+', String),
+ (r'\\', String),
+ ],
+ 'bytestring': [
+ (r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
+ include('string'),
+ ],
+ 'attribute_common': [
+ (r'"', String, 'string'),
+ (r'\[', Comment.Preproc, 'attribute['),
+ ],
+ 'attribute[': [
+ include('attribute_common'),
+ (r'\]', Comment.Preproc, '#pop'),
+ (r'[^"\]\[]+', Comment.Preproc),
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/sas.py b/venv/lib/python3.10/site-packages/pygments/lexers/sas.py
new file mode 100644
index 0000000000000000000000000000000000000000..1b2ad432d231d21192c4f3653ef2e541588afd1d
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/sas.py
@@ -0,0 +1,227 @@
+"""
+ pygments.lexers.sas
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for SAS.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Comment, Keyword, Name, Number, String, Text, \
+ Other, Generic
+
+__all__ = ['SASLexer']
+
+
+class SASLexer(RegexLexer):
+ """
+ For SAS files.
+ """
+ # Syntax from syntax/sas.vim by James Kidd
+
+ name = 'SAS'
+ aliases = ['sas']
+ filenames = ['*.SAS', '*.sas']
+ mimetypes = ['text/x-sas', 'text/sas', 'application/x-sas']
+ url = 'https://en.wikipedia.org/wiki/SAS_(software)'
+ version_added = '2.2'
+ flags = re.IGNORECASE | re.MULTILINE
+
+ builtins_macros = (
+ "bquote", "nrbquote", "cmpres", "qcmpres", "compstor", "datatyp",
+ "display", "do", "else", "end", "eval", "global", "goto", "if",
+ "index", "input", "keydef", "label", "left", "length", "let",
+ "local", "lowcase", "macro", "mend", "nrquote",
+ "nrstr", "put", "qleft", "qlowcase", "qscan",
+ "qsubstr", "qsysfunc", "qtrim", "quote", "qupcase", "scan",
+ "str", "substr", "superq", "syscall", "sysevalf", "sysexec",
+ "sysfunc", "sysget", "syslput", "sysprod", "sysrc", "sysrput",
+ "then", "to", "trim", "unquote", "until", "upcase", "verify",
+ "while", "window"
+ )
+
+ builtins_conditionals = (
+ "do", "if", "then", "else", "end", "until", "while"
+ )
+
+ builtins_statements = (
+ "abort", "array", "attrib", "by", "call", "cards", "cards4",
+ "catname", "continue", "datalines", "datalines4", "delete", "delim",
+ "delimiter", "display", "dm", "drop", "endsas", "error", "file",
+ "filename", "footnote", "format", "goto", "in", "infile", "informat",
+ "input", "keep", "label", "leave", "length", "libname", "link",
+ "list", "lostcard", "merge", "missing", "modify", "options", "output",
+ "out", "page", "put", "redirect", "remove", "rename", "replace",
+ "retain", "return", "select", "set", "skip", "startsas", "stop",
+ "title", "update", "waitsas", "where", "window", "x", "systask"
+ )
+
+ builtins_sql = (
+ "add", "and", "alter", "as", "cascade", "check", "create",
+ "delete", "describe", "distinct", "drop", "foreign", "from",
+ "group", "having", "index", "insert", "into", "in", "key", "like",
+ "message", "modify", "msgtype", "not", "null", "on", "or",
+ "order", "primary", "references", "reset", "restrict", "select",
+ "set", "table", "unique", "update", "validate", "view", "where"
+ )
+
+ builtins_functions = (
+ "abs", "addr", "airy", "arcos", "arsin", "atan", "attrc",
+ "attrn", "band", "betainv", "blshift", "bnot", "bor",
+ "brshift", "bxor", "byte", "cdf", "ceil", "cexist", "cinv",
+ "close", "cnonct", "collate", "compbl", "compound",
+ "compress", "cos", "cosh", "css", "curobs", "cv", "daccdb",
+ "daccdbsl", "daccsl", "daccsyd", "dacctab", "dairy", "date",
+ "datejul", "datepart", "datetime", "day", "dclose", "depdb",
+ "depdbsl", "depsl", "depsyd",
+ "deptab", "dequote", "dhms", "dif", "digamma",
+ "dim", "dinfo", "dnum", "dopen", "doptname", "doptnum",
+ "dread", "dropnote", "dsname", "erf", "erfc", "exist", "exp",
+ "fappend", "fclose", "fcol", "fdelete", "fetch", "fetchobs",
+ "fexist", "fget", "fileexist", "filename", "fileref",
+ "finfo", "finv", "fipname", "fipnamel", "fipstate", "floor",
+ "fnonct", "fnote", "fopen", "foptname", "foptnum", "fpoint",
+ "fpos", "fput", "fread", "frewind", "frlen", "fsep", "fuzz",
+ "fwrite", "gaminv", "gamma", "getoption", "getvarc", "getvarn",
+ "hbound", "hms", "hosthelp", "hour", "ibessel", "index",
+ "indexc", "indexw", "input", "inputc", "inputn", "int",
+ "intck", "intnx", "intrr", "irr", "jbessel", "juldate",
+ "kurtosis", "lag", "lbound", "left", "length", "lgamma",
+ "libname", "libref", "log", "log10", "log2", "logpdf", "logpmf",
+ "logsdf", "lowcase", "max", "mdy", "mean", "min", "minute",
+ "mod", "month", "mopen", "mort", "n", "netpv", "nmiss",
+ "normal", "note", "npv", "open", "ordinal", "pathname",
+ "pdf", "peek", "peekc", "pmf", "point", "poisson", "poke",
+ "probbeta", "probbnml", "probchi", "probf", "probgam",
+ "probhypr", "probit", "probnegb", "probnorm", "probt",
+ "put", "putc", "putn", "qtr", "quote", "ranbin", "rancau",
+ "ranexp", "rangam", "range", "rank", "rannor", "ranpoi",
+ "rantbl", "rantri", "ranuni", "repeat", "resolve", "reverse",
+ "rewind", "right", "round", "saving", "scan", "sdf", "second",
+ "sign", "sin", "sinh", "skewness", "soundex", "spedis",
+ "sqrt", "std", "stderr", "stfips", "stname", "stnamel",
+ "substr", "sum", "symget", "sysget", "sysmsg", "sysprod",
+ "sysrc", "system", "tan", "tanh", "time", "timepart", "tinv",
+ "tnonct", "today", "translate", "tranwrd", "trigamma",
+ "trim", "trimn", "trunc", "uniform", "upcase", "uss", "var",
+ "varfmt", "varinfmt", "varlabel", "varlen", "varname",
+ "varnum", "varray", "varrayx", "vartype", "verify", "vformat",
+ "vformatd", "vformatdx", "vformatn", "vformatnx", "vformatw",
+ "vformatwx", "vformatx", "vinarray", "vinarrayx", "vinformat",
+ "vinformatd", "vinformatdx", "vinformatn", "vinformatnx",
+ "vinformatw", "vinformatwx", "vinformatx", "vlabel",
+ "vlabelx", "vlength", "vlengthx", "vname", "vnamex", "vtype",
+ "vtypex", "weekday", "year", "yyq", "zipfips", "zipname",
+ "zipnamel", "zipstate"
+ )
+
+ tokens = {
+ 'root': [
+ include('comments'),
+ include('proc-data'),
+ include('cards-datalines'),
+ include('logs'),
+ include('general'),
+ (r'.', Text),
+ ],
+ # SAS is multi-line regardless, but * is ended by ;
+ 'comments': [
+ (r'^\s*\*.*?;', Comment),
+ (r'/\*.*?\*/', Comment),
+ (r'^\s*\*(.|\n)*?;', Comment.Multiline),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ ],
+ # Special highlight for proc, data, quit, run
+ 'proc-data': [
+ (r'(^|;)\s*(proc \w+|data|run|quit)[\s;]',
+ Keyword.Reserved),
+ ],
+ # Special highlight cards and datalines
+ 'cards-datalines': [
+ (r'^\s*(datalines|cards)\s*;\s*$', Keyword, 'data'),
+ ],
+ 'data': [
+ (r'(.|\n)*^\s*;\s*$', Other, '#pop'),
+ ],
+ # Special highlight for put NOTE|ERROR|WARNING (order matters)
+ 'logs': [
+ (r'\n?^\s*%?put ', Keyword, 'log-messages'),
+ ],
+ 'log-messages': [
+ (r'NOTE(:|-).*', Generic, '#pop'),
+ (r'WARNING(:|-).*', Generic.Emph, '#pop'),
+ (r'ERROR(:|-).*', Generic.Error, '#pop'),
+ include('general'),
+ ],
+ 'general': [
+ include('keywords'),
+ include('vars-strings'),
+ include('special'),
+ include('numbers'),
+ ],
+ # Keywords, statements, functions, macros
+ 'keywords': [
+ (words(builtins_statements,
+ prefix = r'\b',
+ suffix = r'\b'),
+ Keyword),
+ (words(builtins_sql,
+ prefix = r'\b',
+ suffix = r'\b'),
+ Keyword),
+ (words(builtins_conditionals,
+ prefix = r'\b',
+ suffix = r'\b'),
+ Keyword),
+ (words(builtins_macros,
+ prefix = r'%',
+ suffix = r'\b'),
+ Name.Builtin),
+ (words(builtins_functions,
+ prefix = r'\b',
+ suffix = r'\('),
+ Name.Builtin),
+ ],
+ # Strings and user-defined variables and macros (order matters)
+ 'vars-strings': [
+ (r'&[a-z_]\w{0,31}\.?', Name.Variable),
+ (r'%[a-z_]\w{0,31}', Name.Function),
+ (r'\'', String, 'string_squote'),
+ (r'"', String, 'string_dquote'),
+ ],
+ 'string_squote': [
+ ('\'', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape),
+ # AFAIK, macro variables are not evaluated in single quotes
+ # (r'&', Name.Variable, 'validvar'),
+ (r'[^$\'\\]+', String),
+ (r'[$\'\\]', String),
+ ],
+ 'string_dquote': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape),
+ (r'&', Name.Variable, 'validvar'),
+ (r'[^$&"\\]+', String),
+ (r'[$"\\]', String),
+ ],
+ 'validvar': [
+ (r'[a-z_]\w{0,31}\.?', Name.Variable, '#pop'),
+ ],
+ # SAS numbers and special variables
+ 'numbers': [
+ (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)(E[+-]?[0-9]+)?i?\b',
+ Number),
+ ],
+ 'special': [
+ (r'(null|missing|_all_|_automatic_|_character_|_n_|'
+ r'_infile_|_name_|_null_|_numeric_|_user_|_webout_)',
+ Keyword.Constant),
+ ],
+ # 'operators': [
+ # (r'(-|=|<=|>=|<|>|<>|&|!=|'
+ # r'\||\*|\+|\^|/|!|~|~=)', Operator)
+ # ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/savi.py b/venv/lib/python3.10/site-packages/pygments/lexers/savi.py
new file mode 100644
index 0000000000000000000000000000000000000000..1e443ae302a647b6c401d9c0e6fe451e12732dde
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/savi.py
@@ -0,0 +1,171 @@
+"""
+ pygments.lexers.savi
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Savi.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include
+from pygments.token import Whitespace, Keyword, Name, String, Number, \
+ Operator, Punctuation, Comment, Generic, Error
+
+__all__ = ['SaviLexer']
+
+
+# The canonical version of this file can be found in the following repository,
+# where it is kept in sync with any language changes, as well as the other
+# pygments-like lexers that are maintained for use with other tools:
+# - https://github.com/savi-lang/savi/blob/main/tooling/pygments/lexers/savi.py
+#
+# If you're changing this file in the pygments repository, please ensure that
+# any changes you make are also propagated to the official Savi repository,
+# in order to avoid accidental clobbering of your changes later when an update
+# from the Savi repository flows forward into the pygments repository.
+#
+# If you're changing this file in the Savi repository, please ensure that
+# any changes you make are also reflected in the other pygments-like lexers
+# (rouge, vscode, etc) so that all of the lexers can be kept cleanly in sync.
+
+class SaviLexer(RegexLexer):
+ """
+ For Savi source code.
+
+ .. versionadded: 2.10
+ """
+
+ name = 'Savi'
+ url = 'https://github.com/savi-lang/savi'
+ aliases = ['savi']
+ filenames = ['*.savi']
+ version_added = ''
+
+ tokens = {
+ "root": [
+ # Line Comment
+ (r'//.*?$', Comment.Single),
+
+ # Doc Comment
+ (r'::.*?$', Comment.Single),
+
+ # Capability Operator
+ (r'(\')(\w+)(?=[^\'])', bygroups(Operator, Name)),
+
+ # Double-Quote String
+ (r'\w?"', String.Double, "string.double"),
+
+ # Single-Char String
+ (r"'", String.Char, "string.char"),
+
+ # Type Name
+ (r'(_?[A-Z]\w*)', Name.Class),
+
+ # Nested Type Name
+ (r'(\.)(\s*)(_?[A-Z]\w*)', bygroups(Punctuation, Whitespace, Name.Class)),
+
+ # Declare
+ (r'^([ \t]*)(:\w+)',
+ bygroups(Whitespace, Name.Tag),
+ "decl"),
+
+ # Error-Raising Calls/Names
+ (r'((\w+|\+|\-|\*)\!)', Generic.Deleted),
+
+ # Numeric Values
+ (r'\b\d([\d_]*(\.[\d_]+)?)\b', Number),
+
+ # Hex Numeric Values
+ (r'\b0x([0-9a-fA-F_]+)\b', Number.Hex),
+
+ # Binary Numeric Values
+ (r'\b0b([01_]+)\b', Number.Bin),
+
+ # Function Call (with braces)
+ (r'\w+(?=\()', Name.Function),
+
+ # Function Call (with receiver)
+ (r'(\.)(\s*)(\w+)', bygroups(Punctuation, Whitespace, Name.Function)),
+
+ # Function Call (with self receiver)
+ (r'(@)(\w+)', bygroups(Punctuation, Name.Function)),
+
+ # Parenthesis
+ (r'\(', Punctuation, "root"),
+ (r'\)', Punctuation, "#pop"),
+
+ # Brace
+ (r'\{', Punctuation, "root"),
+ (r'\}', Punctuation, "#pop"),
+
+ # Bracket
+ (r'\[', Punctuation, "root"),
+ (r'(\])(\!)', bygroups(Punctuation, Generic.Deleted), "#pop"),
+ (r'\]', Punctuation, "#pop"),
+
+ # Punctuation
+ (r'[,;:\.@]', Punctuation),
+
+ # Piping Operators
+ (r'(\|\>)', Operator),
+
+ # Branching Operators
+ (r'(\&\&|\|\||\?\?|\&\?|\|\?|\.\?)', Operator),
+
+ # Comparison Operators
+ (r'(\<\=\>|\=\~|\=\=|\<\=|\>\=|\<|\>)', Operator),
+
+ # Arithmetic Operators
+ (r'(\+|\-|\/|\*|\%)', Operator),
+
+ # Assignment Operators
+ (r'(\=)', Operator),
+
+ # Other Operators
+ (r'(\!|\<\<|\<|\&|\|)', Operator),
+
+ # Identifiers
+ (r'\b\w+\b', Name),
+
+ # Whitespace
+ (r'[ \t\r]+\n*|\n+', Whitespace),
+ ],
+
+ # Declare (nested rules)
+ "decl": [
+ (r'\b[a-z_]\w*\b(?!\!)', Keyword.Declaration),
+ (r':', Punctuation, "#pop"),
+ (r'\n', Whitespace, "#pop"),
+ include("root"),
+ ],
+
+ # Double-Quote String (nested rules)
+ "string.double": [
+ (r'\\\(', String.Interpol, "string.interpolation"),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ (r'\\[bfnrt\\\']', String.Escape),
+ (r'\\"', String.Escape),
+ (r'"', String.Double, "#pop"),
+ (r'[^\\"]+', String.Double),
+ (r'.', Error),
+ ],
+
+ # Single-Char String (nested rules)
+ "string.char": [
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ (r'\\[bfnrt\\\']', String.Escape),
+ (r"\\'", String.Escape),
+ (r"'", String.Char, "#pop"),
+ (r"[^\\']+", String.Char),
+ (r'.', Error),
+ ],
+
+ # Interpolation inside String (nested rules)
+ "string.interpolation": [
+ (r"\)", String.Interpol, "#pop"),
+ include("root"),
+ ]
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/scdoc.py b/venv/lib/python3.10/site-packages/pygments/lexers/scdoc.py
new file mode 100644
index 0000000000000000000000000000000000000000..8e850d02ed8ca77a90c9de217fac89dccd60b9ab
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/scdoc.py
@@ -0,0 +1,85 @@
+"""
+ pygments.lexers.scdoc
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for scdoc, a simple man page generator.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this
+from pygments.token import Text, Comment, Keyword, String, Generic
+
+__all__ = ['ScdocLexer']
+
+
+class ScdocLexer(RegexLexer):
+ """
+ `scdoc` is a simple man page generator for POSIX systems written in C99.
+ """
+ name = 'scdoc'
+ url = 'https://git.sr.ht/~sircmpwn/scdoc'
+ aliases = ['scdoc', 'scd']
+ filenames = ['*.scd', '*.scdoc']
+ version_added = '2.5'
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ # comment
+ (r'^(;.+\n)', bygroups(Comment)),
+
+ # heading with pound prefix
+ (r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)),
+ (r'^(#{2})(.+\n)', bygroups(Generic.Subheading, Text)),
+ # bulleted lists
+ (r'^(\s*)([*-])(\s)(.+\n)',
+ bygroups(Text, Keyword, Text, using(this, state='inline'))),
+ # numbered lists
+ (r'^(\s*)(\.+\.)( .+\n)',
+ bygroups(Text, Keyword, using(this, state='inline'))),
+ # quote
+ (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
+ # text block
+ (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
+
+ include('inline'),
+ ],
+ 'inline': [
+ # escape
+ (r'\\.', Text),
+ # underlines
+ (r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)),
+ # bold
+ (r'(\s)(\*[^*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
+ # inline code
+ (r'`[^`]+`', String.Backtick),
+
+ # general text, must come last!
+ (r'[^\\\s]+', Text),
+ (r'.', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ """We checks for bold and underline text with * and _. Also
+ every scdoc file must start with a strictly defined first line."""
+ result = 0
+
+ if '*' in text:
+ result += 0.01
+
+ if '_' in text:
+ result += 0.01
+
+ # name(section) ["left_footer" ["center_header"]]
+ first_line = text.partition('\n')[0]
+ scdoc_preamble_pattern = r'^.*\([1-7]\)( "[^"]+"){0,2}$'
+
+ if re.search(scdoc_preamble_pattern, first_line):
+ result += 0.5
+
+ return result
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/scripting.py b/venv/lib/python3.10/site-packages/pygments/lexers/scripting.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e494c33b8ec2c939c6e55921a03aba232f5a456
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/scripting.py
@@ -0,0 +1,1616 @@
+"""
+ pygments.lexers.scripting
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for scripting and embedded languages.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, combined, \
+ words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Whitespace, Other
+from pygments.util import get_bool_opt, get_list_opt
+
+__all__ = ['LuaLexer', 'LuauLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
+ 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer',
+ 'EasytrieveLexer', 'JclLexer', 'MiniScriptLexer']
+
+
+def all_lua_builtins():
+ from pygments.lexers._lua_builtins import MODULES
+ return [w for values in MODULES.values() for w in values]
+
+class LuaLexer(RegexLexer):
+ """
+ For Lua source code.
+
+ Additional options accepted:
+
+ `func_name_highlighting`
+ If given and ``True``, highlight builtin function names
+ (default: ``True``).
+ `disabled_modules`
+ If given, must be a list of module names whose function names
+ should not be highlighted. By default all modules are highlighted.
+
+ To get a list of allowed modules have a look into the
+ `_lua_builtins` module:
+
+ .. sourcecode:: pycon
+
+ >>> from pygments.lexers._lua_builtins import MODULES
+ >>> MODULES.keys()
+ ['string', 'coroutine', 'modules', 'io', 'basic', ...]
+ """
+
+ name = 'Lua'
+ url = 'https://www.lua.org/'
+ aliases = ['lua']
+ filenames = ['*.lua', '*.wlua']
+ mimetypes = ['text/x-lua', 'application/x-lua']
+ version_added = ''
+
+ _comment_multiline = r'(?:--\[(?P=*)\[[\w\W]*?\](?P=level)\])'
+ _comment_single = r'(?:--.*$)'
+ _space = r'(?:\s+(?!\s))'
+ _s = rf'(?:{_comment_multiline}|{_comment_single}|{_space})'
+ _name = r'(?:[^\W\d]\w*)'
+
+ tokens = {
+ 'root': [
+ # Lua allows a file to start with a shebang.
+ (r'#!.*', Comment.Preproc),
+ default('base'),
+ ],
+ 'ws': [
+ (_comment_multiline, Comment.Multiline),
+ (_comment_single, Comment.Single),
+ (_space, Whitespace),
+ ],
+ 'base': [
+ include('ws'),
+
+ (r'(?i)0x[\da-f]*(\.[\da-f]*)?(p[+-]?\d+)?', Number.Hex),
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ # multiline strings
+ (r'(?s)\[(=*)\[.*?\]\1\]', String),
+
+ (r'::', Punctuation, 'label'),
+ (r'\.{3}', Punctuation),
+ (r'[=<>|~&+\-*/%#^]+|\.\.', Operator),
+ (r'[\[\]{}().,:;]+', Punctuation),
+ (r'(and|or|not)\b', Operator.Word),
+
+ (words([
+ 'break', 'do', 'else', 'elseif', 'end', 'for', 'if', 'in',
+ 'repeat', 'return', 'then', 'until', 'while'
+ ], suffix=r'\b'), Keyword.Reserved),
+ (r'goto\b', Keyword.Reserved, 'goto'),
+ (r'(local)\b', Keyword.Declaration),
+ (r'(true|false|nil)\b', Keyword.Constant),
+
+ (r'(function)\b', Keyword.Reserved, 'funcname'),
+
+ (words(all_lua_builtins(), suffix=r"\b"), Name.Builtin),
+ (fr'[A-Za-z_]\w*(?={_s}*[.:])', Name.Variable, 'varname'),
+ (fr'[A-Za-z_]\w*(?={_s}*\()', Name.Function),
+ (r'[A-Za-z_]\w*', Name.Variable),
+
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+
+ 'varname': [
+ include('ws'),
+ (r'\.\.', Operator, '#pop'),
+ (r'[.:]', Punctuation),
+ (rf'{_name}(?={_s}*[.:])', Name.Property),
+ (rf'{_name}(?={_s}*\()', Name.Function, '#pop'),
+ (_name, Name.Property, '#pop'),
+ ],
+
+ 'funcname': [
+ include('ws'),
+ (r'[.:]', Punctuation),
+ (rf'{_name}(?={_s}*[.:])', Name.Class),
+ (_name, Name.Function, '#pop'),
+ # inline function
+ (r'\(', Punctuation, '#pop'),
+ ],
+
+ 'goto': [
+ include('ws'),
+ (_name, Name.Label, '#pop'),
+ ],
+
+ 'label': [
+ include('ws'),
+ (r'::', Punctuation, '#pop'),
+ (_name, Name.Label),
+ ],
+
+ 'stringescape': [
+ (r'\\([abfnrtv\\"\']|[\r\n]{1,2}|z\s*|x[0-9a-fA-F]{2}|\d{1,3}|'
+ r'u\{[0-9a-fA-F]+\})', String.Escape),
+ ],
+
+ 'sqs': [
+ (r"'", String.Single, '#pop'),
+ (r"[^\\']+", String.Single),
+ ],
+
+ 'dqs': [
+ (r'"', String.Double, '#pop'),
+ (r'[^\\"]+', String.Double),
+ ]
+ }
+
+ def __init__(self, **options):
+ self.func_name_highlighting = get_bool_opt(
+ options, 'func_name_highlighting', True)
+ self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
+
+ self._functions = set()
+ if self.func_name_highlighting:
+ from pygments.lexers._lua_builtins import MODULES
+ for mod, func in MODULES.items():
+ if mod not in self.disabled_modules:
+ self._functions.update(func)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name.Builtin and value not in self._functions:
+ if '.' in value:
+ a, b = value.split('.')
+ yield index, Name, a
+ yield index + len(a), Punctuation, '.'
+ yield index + len(a) + 1, Name, b
+ else:
+ yield index, Name, value
+ continue
+ yield index, token, value
+
+def _luau_make_expression(should_pop, _s):
+ temp_list = [
+ (r'0[xX][\da-fA-F_]*', Number.Hex, '#pop'),
+ (r'0[bB][\d_]*', Number.Bin, '#pop'),
+ (r'\.?\d[\d_]*(?:\.[\d_]*)?(?:[eE][+-]?[\d_]+)?', Number.Float, '#pop'),
+
+ (words((
+ 'true', 'false', 'nil'
+ ), suffix=r'\b'), Keyword.Constant, '#pop'),
+
+ (r'\[(=*)\[[.\n]*?\]\1\]', String, '#pop'),
+
+ (r'(\.)([a-zA-Z_]\w*)(?=%s*[({"\'])', bygroups(Punctuation, Name.Function), '#pop'),
+ (r'(\.)([a-zA-Z_]\w*)', bygroups(Punctuation, Name.Variable), '#pop'),
+
+ (rf'[a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*(?={_s}*[({{"\'])', Name.Other, '#pop'),
+ (r'[a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*', Name, '#pop'),
+ ]
+ if should_pop:
+ return temp_list
+ return [entry[:2] for entry in temp_list]
+
+def _luau_make_expression_special(should_pop):
+ temp_list = [
+ (r'\{', Punctuation, ('#pop', 'closing_brace_base', 'expression')),
+ (r'\(', Punctuation, ('#pop', 'closing_parenthesis_base', 'expression')),
+
+ (r'::?', Punctuation, ('#pop', 'type_end', 'type_start')),
+
+ (r"'", String.Single, ('#pop', 'string_single')),
+ (r'"', String.Double, ('#pop', 'string_double')),
+ (r'`', String.Backtick, ('#pop', 'string_interpolated')),
+ ]
+ if should_pop:
+ return temp_list
+ return [(entry[0], entry[1], entry[2][1:]) for entry in temp_list]
+
+class LuauLexer(RegexLexer):
+ """
+ For Luau source code.
+
+ Additional options accepted:
+
+ `include_luau_builtins`
+ If given and ``True``, automatically highlight Luau builtins
+ (default: ``True``).
+ `include_roblox_builtins`
+ If given and ``True``, automatically highlight Roblox-specific builtins
+ (default: ``False``).
+ `additional_builtins`
+ If given, must be a list of additional builtins to highlight.
+ `disabled_builtins`
+ If given, must be a list of builtins that will not be highlighted.
+ """
+
+ name = 'Luau'
+ url = 'https://luau-lang.org/'
+ aliases = ['luau']
+ filenames = ['*.luau']
+ version_added = '2.18'
+
+ _comment_multiline = r'(?:--\[(?P=*)\[[\w\W]*?\](?P=level)\])'
+ _comment_single = r'(?:--.*$)'
+ _s = r'(?:{}|{}|{})'.format(_comment_multiline, _comment_single, r'\s+')
+
+ tokens = {
+ 'root': [
+ (r'#!.*', Comment.Hashbang, 'base'),
+ default('base'),
+ ],
+
+ 'ws': [
+ (_comment_multiline, Comment.Multiline),
+ (_comment_single, Comment.Single),
+ (r'\s+', Whitespace),
+ ],
+
+ 'base': [
+ include('ws'),
+
+ *_luau_make_expression_special(False),
+ (r'\.\.\.', Punctuation),
+
+ (rf'type\b(?={_s}+[a-zA-Z_])', Keyword.Reserved, 'type_declaration'),
+ (rf'export\b(?={_s}+[a-zA-Z_])', Keyword.Reserved),
+
+ (r'(?:\.\.|//|[+\-*\/%^<>=])=?', Operator, 'expression'),
+ (r'~=', Operator, 'expression'),
+
+ (words((
+ 'and', 'or', 'not'
+ ), suffix=r'\b'), Operator.Word, 'expression'),
+
+ (words((
+ 'elseif', 'for', 'if', 'in', 'repeat', 'return', 'until',
+ 'while'), suffix=r'\b'), Keyword.Reserved, 'expression'),
+ (r'local\b', Keyword.Declaration, 'expression'),
+
+ (r'function\b', Keyword.Reserved, ('expression', 'func_name')),
+
+ (r'[\])};]+', Punctuation),
+
+ include('expression_static'),
+ *_luau_make_expression(False, _s),
+
+ (r'[\[.,]', Punctuation, 'expression'),
+ ],
+ 'expression_static': [
+ (words((
+ 'break', 'continue', 'do', 'else', 'elseif', 'end', 'for',
+ 'if', 'in', 'repeat', 'return', 'then', 'until', 'while'),
+ suffix=r'\b'), Keyword.Reserved),
+ ],
+ 'expression': [
+ include('ws'),
+
+ (r'if\b', Keyword.Reserved, ('ternary', 'expression')),
+
+ (r'local\b', Keyword.Declaration),
+ *_luau_make_expression_special(True),
+ (r'\.\.\.', Punctuation, '#pop'),
+
+ (r'function\b', Keyword.Reserved, 'func_name'),
+
+ include('expression_static'),
+ *_luau_make_expression(True, _s),
+
+ default('#pop'),
+ ],
+ 'ternary': [
+ include('ws'),
+
+ (r'else\b', Keyword.Reserved, '#pop'),
+ (words((
+ 'then', 'elseif',
+ ), suffix=r'\b'), Operator.Reserved, 'expression'),
+
+ default('#pop'),
+ ],
+
+ 'closing_brace_pop': [
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'closing_parenthesis_pop': [
+ (r'\)', Punctuation, '#pop'),
+ ],
+ 'closing_gt_pop': [
+ (r'>', Punctuation, '#pop'),
+ ],
+
+ 'closing_parenthesis_base': [
+ include('closing_parenthesis_pop'),
+ include('base'),
+ ],
+ 'closing_parenthesis_type': [
+ include('closing_parenthesis_pop'),
+ include('type'),
+ ],
+ 'closing_brace_base': [
+ include('closing_brace_pop'),
+ include('base'),
+ ],
+ 'closing_brace_type': [
+ include('closing_brace_pop'),
+ include('type'),
+ ],
+ 'closing_gt_type': [
+ include('closing_gt_pop'),
+ include('type'),
+ ],
+
+ 'string_escape': [
+ (r'\\z\s*', String.Escape),
+ (r'\\(?:[abfnrtvz\\"\'`\{\n])|[\r\n]{1,2}|x[\da-fA-F]{2}|\d{1,3}|'
+ r'u\{\}[\da-fA-F]*\}', String.Escape),
+ ],
+ 'string_single': [
+ include('string_escape'),
+
+ (r"'", String.Single, "#pop"),
+ (r"[^\\']+", String.Single),
+ ],
+ 'string_double': [
+ include('string_escape'),
+
+ (r'"', String.Double, "#pop"),
+ (r'[^\\"]+', String.Double),
+ ],
+ 'string_interpolated': [
+ include('string_escape'),
+
+ (r'\{', Punctuation, ('closing_brace_base', 'expression')),
+
+ (r'`', String.Backtick, "#pop"),
+ (r'[^\\`\{]+', String.Backtick),
+ ],
+
+ 'func_name': [
+ include('ws'),
+
+ (r'[.:]', Punctuation),
+ (rf'[a-zA-Z_]\w*(?={_s}*[.:])', Name.Class),
+ (r'[a-zA-Z_]\w*', Name.Function),
+
+ (r'<', Punctuation, 'closing_gt_type'),
+
+ (r'\(', Punctuation, '#pop'),
+ ],
+
+ 'type': [
+ include('ws'),
+
+ (r'\(', Punctuation, 'closing_parenthesis_type'),
+ (r'\{', Punctuation, 'closing_brace_type'),
+ (r'<', Punctuation, 'closing_gt_type'),
+
+ (r"'", String.Single, 'string_single'),
+ (r'"', String.Double, 'string_double'),
+
+ (r'[|&\.,\[\]:=]+', Punctuation),
+ (r'->', Punctuation),
+
+ (r'typeof\(', Name.Builtin, ('closing_parenthesis_base',
+ 'expression')),
+ (r'[a-zA-Z_]\w*', Name.Class),
+ ],
+ 'type_start': [
+ include('ws'),
+
+ (r'\(', Punctuation, ('#pop', 'closing_parenthesis_type')),
+ (r'\{', Punctuation, ('#pop', 'closing_brace_type')),
+ (r'<', Punctuation, ('#pop', 'closing_gt_type')),
+
+ (r"'", String.Single, ('#pop', 'string_single')),
+ (r'"', String.Double, ('#pop', 'string_double')),
+
+ (r'typeof\(', Name.Builtin, ('#pop', 'closing_parenthesis_base',
+ 'expression')),
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ ],
+ 'type_end': [
+ include('ws'),
+
+ (r'[|&\.]', Punctuation, 'type_start'),
+ (r'->', Punctuation, 'type_start'),
+
+ (r'<', Punctuation, 'closing_gt_type'),
+
+ default('#pop'),
+ ],
+ 'type_declaration': [
+ include('ws'),
+
+ (r'[a-zA-Z_]\w*', Name.Class),
+ (r'<', Punctuation, 'closing_gt_type'),
+
+ (r'=', Punctuation, ('#pop', 'type_end', 'type_start')),
+ ],
+ }
+
+ def __init__(self, **options):
+ self.include_luau_builtins = get_bool_opt(
+ options, 'include_luau_builtins', True)
+ self.include_roblox_builtins = get_bool_opt(
+ options, 'include_roblox_builtins', False)
+ self.additional_builtins = get_list_opt(options, 'additional_builtins', [])
+ self.disabled_builtins = get_list_opt(options, 'disabled_builtins', [])
+
+ self._builtins = set(self.additional_builtins)
+ if self.include_luau_builtins:
+ from pygments.lexers._luau_builtins import LUAU_BUILTINS
+ self._builtins.update(LUAU_BUILTINS)
+ if self.include_roblox_builtins:
+ from pygments.lexers._luau_builtins import ROBLOX_BUILTINS
+ self._builtins.update(ROBLOX_BUILTINS)
+ if self.additional_builtins:
+ self._builtins.update(self.additional_builtins)
+ self._builtins.difference_update(self.disabled_builtins)
+
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name or token is Name.Other:
+ split_value = value.split('.')
+ complete_value = []
+ new_index = index
+ for position in range(len(split_value), 0, -1):
+ potential_string = '.'.join(split_value[:position])
+ if potential_string in self._builtins:
+ yield index, Name.Builtin, potential_string
+ new_index += len(potential_string)
+
+ if complete_value:
+ yield new_index, Punctuation, '.'
+ new_index += 1
+ break
+ complete_value.insert(0, split_value[position - 1])
+
+ for position, substring in enumerate(complete_value):
+ if position + 1 == len(complete_value):
+ if token is Name:
+ yield new_index, Name.Variable, substring
+ continue
+ yield new_index, Name.Function, substring
+ continue
+ yield new_index, Name.Variable, substring
+ new_index += len(substring)
+ yield new_index, Punctuation, '.'
+ new_index += 1
+
+ continue
+ yield index, token, value
+
+class MoonScriptLexer(LuaLexer):
+ """
+ For MoonScript source code.
+ """
+
+ name = 'MoonScript'
+ url = 'http://moonscript.org'
+ aliases = ['moonscript', 'moon']
+ filenames = ['*.moon']
+ mimetypes = ['text/x-moonscript', 'application/x-moonscript']
+ version_added = '1.5'
+
+ tokens = {
+ 'root': [
+ (r'#!(.*?)$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ ('--.*$', Comment.Single),
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ (r'(?i)0x[0-9a-f]*', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'\n', Whitespace),
+ (r'[^\S\n]+', Text),
+ (r'(?s)\[(=*)\[.*?\]\1\]', String),
+ (r'(->|=>)', Name.Function),
+ (r':[a-zA-Z_]\w*', Name.Variable),
+ (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
+ (r'[;,]', Punctuation),
+ (r'[\[\]{}()]', Keyword.Type),
+ (r'[a-zA-Z_]\w*:', Name.Variable),
+ (words((
+ 'class', 'extends', 'if', 'then', 'super', 'do', 'with',
+ 'import', 'export', 'while', 'elseif', 'return', 'for', 'in',
+ 'from', 'when', 'using', 'else', 'and', 'or', 'not', 'switch',
+ 'break'), suffix=r'\b'),
+ Keyword),
+ (r'(true|false|nil)\b', Keyword.Constant),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'(self)\b', Name.Builtin.Pseudo),
+ (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class),
+ (r'[A-Z]\w*', Name.Class), # proper name
+ (words(all_lua_builtins(), suffix=r"\b"), Name.Builtin),
+ (r'[A-Za-z_]\w*', Name),
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+ 'stringescape': [
+ (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
+ ],
+ 'sqs': [
+ ("'", String.Single, '#pop'),
+ ("[^']+", String)
+ ],
+ 'dqs': [
+ ('"', String.Double, '#pop'),
+ ('[^"]+', String)
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ # set . as Operator instead of Punctuation
+ for index, token, value in LuaLexer.get_tokens_unprocessed(self, text):
+ if token == Punctuation and value == ".":
+ token = Operator
+ yield index, token, value
+
+
+class ChaiscriptLexer(RegexLexer):
+ """
+ For ChaiScript source code.
+ """
+
+ name = 'ChaiScript'
+ url = 'http://chaiscript.com/'
+ aliases = ['chaiscript', 'chai']
+ filenames = ['*.chai']
+ mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
+ version_added = '2.0'
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'^\#.*?\n', Comment.Single)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'[=+\-*/]', Operator),
+ (r'(for|in|while|do|break|return|continue|if|else|'
+ r'throw|try|catch'
+ r')\b', Keyword, 'slashstartsregex'),
+ (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(attr|def|fun)\b', Keyword.Reserved),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(eval|throw)\b', Name.Builtin),
+ (r'`\S+`', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"', String.Double, 'dqstring'),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ ],
+ 'dqstring': [
+ (r'\$\{[^"}]+?\}', String.Interpol),
+ (r'\$', String.Double),
+ (r'\\\\', String.Double),
+ (r'\\"', String.Double),
+ (r'[^\\"$]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ }
+
+
+class LSLLexer(RegexLexer):
+ """
+ For Second Life's Linden Scripting Language source code.
+ """
+
+ name = 'LSL'
+ aliases = ['lsl']
+ filenames = ['*.lsl']
+ mimetypes = ['text/x-lsl']
+ url = 'https://wiki.secondlife.com/wiki/Linden_Scripting_Language'
+ version_added = '2.0'
+
+ flags = re.MULTILINE
+
+ lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b'
+ lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b'
+ lsl_states = r'\b(?:(?:state)\s+\w+|default)\b'
+ lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b'
+ lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b'
+ lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b'
+ lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[A-D]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b'
+ lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b'
+ lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b'
+ lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b'
+ lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b'
+ lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b'
+ lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b'
+ lsl_invalid_illegal = r'\b(?:event)\b'
+ lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b'
+ lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b'
+ lsl_reserved_log = r'\b(?:print)\b'
+ lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-/]=?'
+
+ tokens = {
+ 'root':
+ [
+ (r'//.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"', String.Double, 'string'),
+ (lsl_keywords, Keyword),
+ (lsl_types, Keyword.Type),
+ (lsl_states, Name.Class),
+ (lsl_events, Name.Builtin),
+ (lsl_functions_builtin, Name.Function),
+ (lsl_constants_float, Keyword.Constant),
+ (lsl_constants_integer, Keyword.Constant),
+ (lsl_constants_integer_boolean, Keyword.Constant),
+ (lsl_constants_rotation, Keyword.Constant),
+ (lsl_constants_string, Keyword.Constant),
+ (lsl_constants_vector, Keyword.Constant),
+ (lsl_invalid_broken, Error),
+ (lsl_invalid_deprecated, Error),
+ (lsl_invalid_illegal, Error),
+ (lsl_invalid_unimplemented, Error),
+ (lsl_reserved_godmode, Keyword.Reserved),
+ (lsl_reserved_log, Keyword.Reserved),
+ (r'\b([a-zA-Z_]\w*)\b', Name.Variable),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (lsl_operators, Operator),
+ (r':=?', Error),
+ (r'[,;{}()\[\]]', Punctuation),
+ (r'\n+', Whitespace),
+ (r'\s+', Whitespace)
+ ],
+ 'comment':
+ [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'string':
+ [
+ (r'\\([nt"\\])', String.Escape),
+ (r'"', String.Double, '#pop'),
+ (r'\\.', Error),
+ (r'[^"\\]+', String.Double),
+ ]
+ }
+
+
+class AppleScriptLexer(RegexLexer):
+ """
+ For AppleScript source code,
+ including `AppleScript Studio
+ `_.
+ Contributed by Andreas Amann .
+ """
+
+ name = 'AppleScript'
+ url = 'https://developer.apple.com/library/archive/documentation/AppleScript/Conceptual/AppleScriptLangGuide/introduction/ASLR_intro.html'
+ aliases = ['applescript']
+ filenames = ['*.applescript']
+ version_added = '1.0'
+
+ flags = re.MULTILINE | re.DOTALL
+
+ Identifiers = r'[a-zA-Z]\w*'
+
+ # XXX: use words() for all of these
+ Literals = ('AppleScript', 'current application', 'false', 'linefeed',
+ 'missing value', 'pi', 'quote', 'result', 'return', 'space',
+ 'tab', 'text item delimiters', 'true', 'version')
+ Classes = ('alias ', 'application ', 'boolean ', 'class ', 'constant ',
+ 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
+ 'real ', 'record ', 'reference ', 'RGB color ', 'script ',
+ 'text ', 'unit types', '(?:Unicode )?text', 'string')
+ BuiltIn = ('attachment', 'attribute run', 'character', 'day', 'month',
+ 'paragraph', 'word', 'year')
+ HandlerParams = ('about', 'above', 'against', 'apart from', 'around',
+ 'aside from', 'at', 'below', 'beneath', 'beside',
+ 'between', 'for', 'given', 'instead of', 'on', 'onto',
+ 'out of', 'over', 'since')
+ Commands = ('ASCII (character|number)', 'activate', 'beep', 'choose URL',
+ 'choose application', 'choose color', 'choose file( name)?',
+ 'choose folder', 'choose from list',
+ 'choose remote application', 'clipboard info',
+ 'close( access)?', 'copy', 'count', 'current date', 'delay',
+ 'delete', 'display (alert|dialog)', 'do shell script',
+ 'duplicate', 'exists', 'get eof', 'get volume settings',
+ 'info for', 'launch', 'list (disks|folder)', 'load script',
+ 'log', 'make', 'mount volume', 'new', 'offset',
+ 'open( (for access|location))?', 'path to', 'print', 'quit',
+ 'random number', 'read', 'round', 'run( script)?',
+ 'say', 'scripting components',
+ 'set (eof|the clipboard to|volume)', 'store script',
+ 'summarize', 'system attribute', 'system info',
+ 'the clipboard', 'time to GMT', 'write', 'quoted form')
+ References = ('(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
+ 'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
+ 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
+ 'before', 'behind', 'every', 'front', 'index', 'last',
+ 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose')
+ Operators = ("and", "or", "is equal", "equals", "(is )?equal to", "is not",
+ "isn't", "isn't equal( to)?", "is not equal( to)?",
+ "doesn't equal", "does not equal", "(is )?greater than",
+ "comes after", "is not less than or equal( to)?",
+ "isn't less than or equal( to)?", "(is )?less than",
+ "comes before", "is not greater than or equal( to)?",
+ "isn't greater than or equal( to)?",
+ "(is )?greater than or equal( to)?", "is not less than",
+ "isn't less than", "does not come before",
+ "doesn't come before", "(is )?less than or equal( to)?",
+ "is not greater than", "isn't greater than",
+ "does not come after", "doesn't come after", "starts? with",
+ "begins? with", "ends? with", "contains?", "does not contain",
+ "doesn't contain", "is in", "is contained by", "is not in",
+ "is not contained by", "isn't contained by", "div", "mod",
+ "not", "(a )?(ref( to)?|reference to)", "is", "does")
+ Control = ('considering', 'else', 'error', 'exit', 'from', 'if',
+ 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
+ 'try', 'until', 'using terms from', 'while', 'whith',
+ 'with timeout( of)?', 'with transaction', 'by', 'continue',
+ 'end', 'its?', 'me', 'my', 'return', 'of', 'as')
+ Declarations = ('global', 'local', 'prop(erty)?', 'set', 'get')
+ Reserved = ('but', 'put', 'returning', 'the')
+ StudioClasses = ('action cell', 'alert reply', 'application', 'box',
+ 'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
+ 'clip view', 'color well', 'color-panel',
+ 'combo box( item)?', 'control',
+ 'data( (cell|column|item|row|source))?', 'default entry',
+ 'dialog reply', 'document', 'drag info', 'drawer',
+ 'event', 'font(-panel)?', 'formatter',
+ 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
+ 'movie( view)?', 'open-panel', 'outline view', 'panel',
+ 'pasteboard', 'plugin', 'popup button',
+ 'progress indicator', 'responder', 'save-panel',
+ 'scroll view', 'secure text field( cell)?', 'slider',
+ 'sound', 'split view', 'stepper', 'tab view( item)?',
+ 'table( (column|header cell|header view|view))',
+ 'text( (field( cell)?|view))?', 'toolbar( item)?',
+ 'user-defaults', 'view', 'window')
+ StudioEvents = ('accept outline drop', 'accept table drop', 'action',
+ 'activated', 'alert ended', 'awake from nib', 'became key',
+ 'became main', 'begin editing', 'bounds changed',
+ 'cell value', 'cell value changed', 'change cell value',
+ 'change item value', 'changed', 'child of item',
+ 'choose menu item', 'clicked', 'clicked toolbar item',
+ 'closed', 'column clicked', 'column moved',
+ 'column resized', 'conclude drop', 'data representation',
+ 'deminiaturized', 'dialog ended', 'document nib name',
+ 'double clicked', 'drag( (entered|exited|updated))?',
+ 'drop', 'end editing', 'exposed', 'idle', 'item expandable',
+ 'item value', 'item value changed', 'items changed',
+ 'keyboard down', 'keyboard up', 'launched',
+ 'load data representation', 'miniaturized', 'mouse down',
+ 'mouse dragged', 'mouse entered', 'mouse exited',
+ 'mouse moved', 'mouse up', 'moved',
+ 'number of browser rows', 'number of items',
+ 'number of rows', 'open untitled', 'opened', 'panel ended',
+ 'parameters updated', 'plugin loaded', 'prepare drop',
+ 'prepare outline drag', 'prepare outline drop',
+ 'prepare table drag', 'prepare table drop',
+ 'read from file', 'resigned active', 'resigned key',
+ 'resigned main', 'resized( sub views)?',
+ 'right mouse down', 'right mouse dragged',
+ 'right mouse up', 'rows changed', 'scroll wheel',
+ 'selected tab view item', 'selection changed',
+ 'selection changing', 'should begin editing',
+ 'should close', 'should collapse item',
+ 'should end editing', 'should expand item',
+ 'should open( untitled)?',
+ 'should quit( after last window closed)?',
+ 'should select column', 'should select item',
+ 'should select row', 'should select tab view item',
+ 'should selection change', 'should zoom', 'shown',
+ 'update menu item', 'update parameters',
+ 'update toolbar item', 'was hidden', 'was miniaturized',
+ 'will become active', 'will close', 'will dismiss',
+ 'will display browser cell', 'will display cell',
+ 'will display item cell', 'will display outline cell',
+ 'will finish launching', 'will hide', 'will miniaturize',
+ 'will move', 'will open', 'will pop up', 'will quit',
+ 'will resign active', 'will resize( sub views)?',
+ 'will select tab view item', 'will show', 'will zoom',
+ 'write to file', 'zoomed')
+ StudioCommands = ('animate', 'append', 'call method', 'center',
+ 'close drawer', 'close panel', 'display',
+ 'display alert', 'display dialog', 'display panel', 'go',
+ 'hide', 'highlight', 'increment', 'item for',
+ 'load image', 'load movie', 'load nib', 'load panel',
+ 'load sound', 'localized string', 'lock focus', 'log',
+ 'open drawer', 'path for', 'pause', 'perform action',
+ 'play', 'register', 'resume', 'scroll', 'select( all)?',
+ 'show', 'size to fit', 'start', 'step back',
+ 'step forward', 'stop', 'synchronize', 'unlock focus',
+ 'update')
+ StudioProperties = ('accepts arrow key', 'action method', 'active',
+ 'alignment', 'allowed identifiers',
+ 'allows branch selection', 'allows column reordering',
+ 'allows column resizing', 'allows column selection',
+ 'allows customization',
+ 'allows editing text attributes',
+ 'allows empty selection', 'allows mixed state',
+ 'allows multiple selection', 'allows reordering',
+ 'allows undo', 'alpha( value)?', 'alternate image',
+ 'alternate increment value', 'alternate title',
+ 'animation delay', 'associated file name',
+ 'associated object', 'auto completes', 'auto display',
+ 'auto enables items', 'auto repeat',
+ 'auto resizes( outline column)?',
+ 'auto save expanded items', 'auto save name',
+ 'auto save table columns', 'auto saves configuration',
+ 'auto scroll', 'auto sizes all columns to fit',
+ 'auto sizes cells', 'background color', 'bezel state',
+ 'bezel style', 'bezeled', 'border rect', 'border type',
+ 'bordered', 'bounds( rotation)?', 'box type',
+ 'button returned', 'button type',
+ 'can choose directories', 'can choose files',
+ 'can draw', 'can hide',
+ 'cell( (background color|size|type))?', 'characters',
+ 'class', 'click count', 'clicked( data)? column',
+ 'clicked data item', 'clicked( data)? row',
+ 'closeable', 'collating', 'color( (mode|panel))',
+ 'command key down', 'configuration',
+ 'content(s| (size|view( margins)?))?', 'context',
+ 'continuous', 'control key down', 'control size',
+ 'control tint', 'control view',
+ 'controller visible', 'coordinate system',
+ 'copies( on scroll)?', 'corner view', 'current cell',
+ 'current column', 'current( field)? editor',
+ 'current( menu)? item', 'current row',
+ 'current tab view item', 'data source',
+ 'default identifiers', 'delta (x|y|z)',
+ 'destination window', 'directory', 'display mode',
+ 'displayed cell', 'document( (edited|rect|view))?',
+ 'double value', 'dragged column', 'dragged distance',
+ 'dragged items', 'draws( cell)? background',
+ 'draws grid', 'dynamically scrolls', 'echos bullets',
+ 'edge', 'editable', 'edited( data)? column',
+ 'edited data item', 'edited( data)? row', 'enabled',
+ 'enclosing scroll view', 'ending page',
+ 'error handling', 'event number', 'event type',
+ 'excluded from windows menu', 'executable path',
+ 'expanded', 'fax number', 'field editor', 'file kind',
+ 'file name', 'file type', 'first responder',
+ 'first visible column', 'flipped', 'floating',
+ 'font( panel)?', 'formatter', 'frameworks path',
+ 'frontmost', 'gave up', 'grid color', 'has data items',
+ 'has horizontal ruler', 'has horizontal scroller',
+ 'has parent data item', 'has resize indicator',
+ 'has shadow', 'has sub menu', 'has vertical ruler',
+ 'has vertical scroller', 'header cell', 'header view',
+ 'hidden', 'hides when deactivated', 'highlights by',
+ 'horizontal line scroll', 'horizontal page scroll',
+ 'horizontal ruler view', 'horizontally resizable',
+ 'icon image', 'id', 'identifier',
+ 'ignores multiple clicks',
+ 'image( (alignment|dims when disabled|frame style|scaling))?',
+ 'imports graphics', 'increment value',
+ 'indentation per level', 'indeterminate', 'index',
+ 'integer value', 'intercell spacing', 'item height',
+ 'key( (code|equivalent( modifier)?|window))?',
+ 'knob thickness', 'label', 'last( visible)? column',
+ 'leading offset', 'leaf', 'level', 'line scroll',
+ 'loaded', 'localized sort', 'location', 'loop mode',
+ 'main( (bunde|menu|window))?', 'marker follows cell',
+ 'matrix mode', 'maximum( content)? size',
+ 'maximum visible columns',
+ 'menu( form representation)?', 'miniaturizable',
+ 'miniaturized', 'minimized image', 'minimized title',
+ 'minimum column width', 'minimum( content)? size',
+ 'modal', 'modified', 'mouse down state',
+ 'movie( (controller|file|rect))?', 'muted', 'name',
+ 'needs display', 'next state', 'next text',
+ 'number of tick marks', 'only tick mark values',
+ 'opaque', 'open panel', 'option key down',
+ 'outline table column', 'page scroll', 'pages across',
+ 'pages down', 'palette label', 'pane splitter',
+ 'parent data item', 'parent window', 'pasteboard',
+ 'path( (names|separator))?', 'playing',
+ 'plays every frame', 'plays selection only', 'position',
+ 'preferred edge', 'preferred type', 'pressure',
+ 'previous text', 'prompt', 'properties',
+ 'prototype cell', 'pulls down', 'rate',
+ 'released when closed', 'repeated',
+ 'requested print time', 'required file type',
+ 'resizable', 'resized column', 'resource path',
+ 'returns records', 'reuses columns', 'rich text',
+ 'roll over', 'row height', 'rulers visible',
+ 'save panel', 'scripts path', 'scrollable',
+ 'selectable( identifiers)?', 'selected cell',
+ 'selected( data)? columns?', 'selected data items?',
+ 'selected( data)? rows?', 'selected item identifier',
+ 'selection by rect', 'send action on arrow key',
+ 'sends action when done editing', 'separates columns',
+ 'separator item', 'sequence number', 'services menu',
+ 'shared frameworks path', 'shared support path',
+ 'sheet', 'shift key down', 'shows alpha',
+ 'shows state by', 'size( mode)?',
+ 'smart insert delete enabled', 'sort case sensitivity',
+ 'sort column', 'sort order', 'sort type',
+ 'sorted( data rows)?', 'sound', 'source( mask)?',
+ 'spell checking enabled', 'starting page', 'state',
+ 'string value', 'sub menu', 'super menu', 'super view',
+ 'tab key traverses cells', 'tab state', 'tab type',
+ 'tab view', 'table view', 'tag', 'target( printer)?',
+ 'text color', 'text container insert',
+ 'text container origin', 'text returned',
+ 'tick mark position', 'time stamp',
+ 'title(d| (cell|font|height|position|rect))?',
+ 'tool tip', 'toolbar', 'trailing offset', 'transparent',
+ 'treat packages as directories', 'truncated labels',
+ 'types', 'unmodified characters', 'update views',
+ 'use sort indicator', 'user defaults',
+ 'uses data source', 'uses ruler',
+ 'uses threaded animation',
+ 'uses title from previous column', 'value wraps',
+ 'version',
+ 'vertical( (line scroll|page scroll|ruler view))?',
+ 'vertically resizable', 'view',
+ 'visible( document rect)?', 'volume', 'width', 'window',
+ 'windows menu', 'wraps', 'zoomable', 'zoomed')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'¬\n', String.Escape),
+ (r"'s\s+", Text), # This is a possessive, consider moving
+ (r'(--|#).*?$', Comment),
+ (r'\(\*', Comment.Multiline, 'comment'),
+ (r'[(){}!,.:]', Punctuation),
+ (r'(«)([^»]+)(»)',
+ bygroups(Text, Name.Builtin, Text)),
+ (r'\b((?:considering|ignoring)\s*)'
+ r'(application responses|case|diacriticals|hyphens|'
+ r'numeric strings|punctuation|white space)',
+ bygroups(Keyword, Name.Builtin)),
+ (r'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator),
+ (r"\b({})\b".format('|'.join(Operators)), Operator.Word),
+ (r'^(\s*(?:on|end)\s+)'
+ r'({})'.format('|'.join(StudioEvents[::-1])),
+ bygroups(Keyword, Name.Function)),
+ (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'\b(as )({})\b'.format('|'.join(Classes)),
+ bygroups(Keyword, Name.Class)),
+ (r'\b({})\b'.format('|'.join(Literals)), Name.Constant),
+ (r'\b({})\b'.format('|'.join(Commands)), Name.Builtin),
+ (r'\b({})\b'.format('|'.join(Control)), Keyword),
+ (r'\b({})\b'.format('|'.join(Declarations)), Keyword),
+ (r'\b({})\b'.format('|'.join(Reserved)), Name.Builtin),
+ (r'\b({})s?\b'.format('|'.join(BuiltIn)), Name.Builtin),
+ (r'\b({})\b'.format('|'.join(HandlerParams)), Name.Builtin),
+ (r'\b({})\b'.format('|'.join(StudioProperties)), Name.Attribute),
+ (r'\b({})s?\b'.format('|'.join(StudioClasses)), Name.Builtin),
+ (r'\b({})\b'.format('|'.join(StudioCommands)), Name.Builtin),
+ (r'\b({})\b'.format('|'.join(References)), Name.Builtin),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
+ (rf'\b({Identifiers})\b', Name.Variable),
+ (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
+ (r'[-+]?\d+', Number.Integer),
+ ],
+ 'comment': [
+ (r'\(\*', Comment.Multiline, '#push'),
+ (r'\*\)', Comment.Multiline, '#pop'),
+ ('[^*(]+', Comment.Multiline),
+ ('[*(]', Comment.Multiline),
+ ],
+ }
+
+
+class RexxLexer(RegexLexer):
+ """
+ Rexx is a scripting language available for
+ a wide range of different platforms with its roots found on mainframe
+ systems. It is popular for I/O- and data based tasks and can act as glue
+ language to bind different applications together.
+ """
+ name = 'Rexx'
+ url = 'http://www.rexxinfo.org/'
+ aliases = ['rexx', 'arexx']
+ filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
+ mimetypes = ['text/x-rexx']
+ version_added = '2.0'
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"', String, 'string_double'),
+ (r"'", String, 'string_single'),
+ (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
+ (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b',
+ bygroups(Name.Function, Whitespace, Operator, Whitespace,
+ Keyword.Declaration)),
+ (r'([a-z_]\w*)(\s*)(:)',
+ bygroups(Name.Label, Whitespace, Operator)),
+ include('function'),
+ include('keyword'),
+ include('operator'),
+ (r'[a-z_]\w*', Text),
+ ],
+ 'function': [
+ (words((
+ 'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor',
+ 'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare',
+ 'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr',
+ 'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert',
+ 'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max',
+ 'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign',
+ 'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol',
+ 'time', 'trace', 'translate', 'trunc', 'value', 'verify', 'word',
+ 'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', 'x2d',
+ 'xrange'), suffix=r'(\s*)(\()'),
+ bygroups(Name.Builtin, Whitespace, Operator)),
+ ],
+ 'keyword': [
+ (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
+ r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
+ r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
+ r'while)\b', Keyword.Reserved),
+ ],
+ 'operator': [
+ (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
+ r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
+ r'¬>>|¬>|¬|\.|,)', Operator),
+ ],
+ 'string_double': [
+ (r'[^"\n]+', String),
+ (r'""', String),
+ (r'"', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'string_single': [
+ (r'[^\'\n]+', String),
+ (r'\'\'', String),
+ (r'\'', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'comment': [
+ (r'[^*]+', Comment.Multiline),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'\*', Comment.Multiline),
+ ]
+ }
+
+ def _c(s):
+ return re.compile(s, re.MULTILINE)
+ _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
+ _ADDRESS_PATTERN = _c(r'^\s*address\s+')
+ _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
+ _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
+ _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b')
+ _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
+ _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
+ PATTERNS_AND_WEIGHTS = (
+ (_ADDRESS_COMMAND_PATTERN, 0.2),
+ (_ADDRESS_PATTERN, 0.05),
+ (_DO_WHILE_PATTERN, 0.1),
+ (_ELSE_DO_PATTERN, 0.1),
+ (_IF_THEN_DO_PATTERN, 0.1),
+ (_PROCEDURE_PATTERN, 0.5),
+ (_PARSE_ARG_PATTERN, 0.2),
+ )
+
+ def analyse_text(text):
+ """
+ Check for initial comment and patterns that distinguish Rexx from other
+ C-like languages.
+ """
+ if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
+ # Header matches MVS Rexx requirements, this is certainly a Rexx
+ # script.
+ return 1.0
+ elif text.startswith('/*'):
+ # Header matches general Rexx requirements; the source code might
+ # still be any language using C comments such as C++, C# or Java.
+ lowerText = text.lower()
+ result = sum(weight
+ for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
+ if pattern.search(lowerText)) + 0.01
+ return min(result, 1.0)
+
+
+class MOOCodeLexer(RegexLexer):
+ """
+ For MOOCode (the MOO scripting language).
+ """
+ name = 'MOOCode'
+ url = 'http://www.moo.mud.org/'
+ filenames = ['*.moo']
+ aliases = ['moocode', 'moo']
+ mimetypes = ['text/x-moocode']
+ version_added = '0.9'
+
+ tokens = {
+ 'root': [
+ # Numbers
+ (r'(0|[1-9][0-9_]*)', Number.Integer),
+ # Strings
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ # exceptions
+ (r'(E_PERM|E_DIV)', Name.Exception),
+ # db-refs
+ (r'((#[-0-9]+)|(\$\w+))', Name.Entity),
+ # Keywords
+ (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
+ r'|endwhile|break|continue|return|try'
+ r'|except|endtry|finally|in)\b', Keyword),
+ # builtins
+ (r'(random|length)', Name.Builtin),
+ # special variables
+ (r'(player|caller|this|args)', Name.Variable.Instance),
+ # skip whitespace
+ (r'\s+', Text),
+ (r'\n', Text),
+ # other operators
+ (r'([!;=,{}&|:.\[\]@()<>?]+)', Operator),
+ # function call
+ (r'(\w+)(\()', bygroups(Name.Function, Operator)),
+ # variables
+ (r'(\w+)', Text),
+ ]
+ }
+
+
+class HybrisLexer(RegexLexer):
+ """
+ For Hybris source code.
+ """
+
+ name = 'Hybris'
+ aliases = ['hybris']
+ filenames = ['*.hyb']
+ mimetypes = ['text/x-hybris', 'application/x-hybris']
+ url = 'https://github.com/evilsocket/hybris'
+ version_added = '1.4'
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:function|method|operator\s+)+?)'
+ r'([a-zA-Z_]\w*)'
+ r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
+ r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
+ (r'(extends|private|protected|public|static|throws|function|method|'
+ r'operator)\b', Keyword.Declaration),
+ (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
+ r'__INC_PATH__)\b', Keyword.Constant),
+ (r'(class|struct)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(import|include)(\s+)',
+ bygroups(Keyword.Namespace, Text), 'import'),
+ (words((
+ 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold',
+ 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32',
+ 'sha2', 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos',
+ 'cosh', 'exp', 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin',
+ 'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', 'isstring',
+ 'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring',
+ 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names',
+ 'var_values', 'user_functions', 'dyn_functions', 'methods', 'call',
+ 'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks',
+ 'usleep', 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink',
+ 'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', 'fork', 'getpid',
+ 'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create',
+ 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill',
+ 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind',
+ 'listen', 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect',
+ 'server', 'recv', 'send', 'close', 'print', 'println', 'printf', 'input',
+ 'readline', 'serial_open', 'serial_fcntl', 'serial_get_attr',
+ 'serial_get_ispeed', 'serial_get_ospeed', 'serial_set_attr',
+ 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', 'serial_read',
+ 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell',
+ 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir',
+ 'pcre_replace', 'size', 'pop', 'unmap', 'has', 'keys', 'values',
+ 'length', 'find', 'substr', 'replace', 'split', 'trim', 'remove',
+ 'contains', 'join'), suffix=r'\b'),
+ Name.Builtin),
+ (words((
+ 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process',
+ 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket',
+ 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'),
+ Keyword.Type),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+ (r'(\.)([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>|+=:;,./?\-@]+', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text),
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
+
+ def analyse_text(text):
+ """public method and private method don't seem to be quite common
+ elsewhere."""
+ result = 0
+ if re.search(r'\b(?:public|private)\s+method\b', text):
+ result += 0.01
+ return result
+
+
+
+class EasytrieveLexer(RegexLexer):
+ """
+ Easytrieve Plus is a programming language for extracting, filtering and
+ converting sequential data. Furthermore it can layout data for reports.
+ It is mainly used on mainframe platforms and can access several of the
+ mainframe's native file formats. It is somewhat comparable to awk.
+ """
+ name = 'Easytrieve'
+ aliases = ['easytrieve']
+ filenames = ['*.ezt', '*.mac']
+ mimetypes = ['text/x-easytrieve']
+ url = 'https://www.broadcom.com/products/mainframe/application-development/easytrieve-report-generator'
+ version_added = '2.1'
+ flags = 0
+
+ # Note: We cannot use r'\b' at the start and end of keywords because
+ # Easytrieve Plus delimiter characters are:
+ #
+ # * space ( )
+ # * apostrophe (')
+ # * period (.)
+ # * comma (,)
+ # * parenthesis ( and )
+ # * colon (:)
+ #
+ # Additionally words end once a '*' appears, indicatins a comment.
+ _DELIMITERS = r' \'.,():\n'
+ _DELIMITERS_OR_COMENT = _DELIMITERS + '*'
+ _DELIMITER_PATTERN = '[' + _DELIMITERS + ']'
+ _DELIMITER_PATTERN_CAPTURE = '(' + _DELIMITER_PATTERN + ')'
+ _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']'
+ _OPERATORS_PATTERN = '[.+\\-/=\\[\\](){}<>;,&%¬]'
+ _KEYWORDS = [
+ 'AFTER-BREAK', 'AFTER-LINE', 'AFTER-SCREEN', 'AIM', 'AND', 'ATTR',
+ 'BEFORE', 'BEFORE-BREAK', 'BEFORE-LINE', 'BEFORE-SCREEN', 'BUSHU',
+ 'BY', 'CALL', 'CASE', 'CHECKPOINT', 'CHKP', 'CHKP-STATUS', 'CLEAR',
+ 'CLOSE', 'COL', 'COLOR', 'COMMIT', 'CONTROL', 'COPY', 'CURSOR', 'D',
+ 'DECLARE', 'DEFAULT', 'DEFINE', 'DELETE', 'DENWA', 'DISPLAY', 'DLI',
+ 'DO', 'DUPLICATE', 'E', 'ELSE', 'ELSE-IF', 'END', 'END-CASE',
+ 'END-DO', 'END-IF', 'END-PROC', 'ENDPAGE', 'ENDTABLE', 'ENTER', 'EOF',
+ 'EQ', 'ERROR', 'EXIT', 'EXTERNAL', 'EZLIB', 'F1', 'F10', 'F11', 'F12',
+ 'F13', 'F14', 'F15', 'F16', 'F17', 'F18', 'F19', 'F2', 'F20', 'F21',
+ 'F22', 'F23', 'F24', 'F25', 'F26', 'F27', 'F28', 'F29', 'F3', 'F30',
+ 'F31', 'F32', 'F33', 'F34', 'F35', 'F36', 'F4', 'F5', 'F6', 'F7',
+ 'F8', 'F9', 'FETCH', 'FILE-STATUS', 'FILL', 'FINAL', 'FIRST',
+ 'FIRST-DUP', 'FOR', 'GE', 'GET', 'GO', 'GOTO', 'GQ', 'GR', 'GT',
+ 'HEADING', 'HEX', 'HIGH-VALUES', 'IDD', 'IDMS', 'IF', 'IN', 'INSERT',
+ 'JUSTIFY', 'KANJI-DATE', 'KANJI-DATE-LONG', 'KANJI-TIME', 'KEY',
+ 'KEY-PRESSED', 'KOKUGO', 'KUN', 'LAST-DUP', 'LE', 'LEVEL', 'LIKE',
+ 'LINE', 'LINE-COUNT', 'LINE-NUMBER', 'LINK', 'LIST', 'LOW-VALUES',
+ 'LQ', 'LS', 'LT', 'MACRO', 'MASK', 'MATCHED', 'MEND', 'MESSAGE',
+ 'MOVE', 'MSTART', 'NE', 'NEWPAGE', 'NOMASK', 'NOPRINT', 'NOT',
+ 'NOTE', 'NOVERIFY', 'NQ', 'NULL', 'OF', 'OR', 'OTHERWISE', 'PA1',
+ 'PA2', 'PA3', 'PAGE-COUNT', 'PAGE-NUMBER', 'PARM-REGISTER',
+ 'PATH-ID', 'PATTERN', 'PERFORM', 'POINT', 'POS', 'PRIMARY', 'PRINT',
+ 'PROCEDURE', 'PROGRAM', 'PUT', 'READ', 'RECORD', 'RECORD-COUNT',
+ 'RECORD-LENGTH', 'REFRESH', 'RELEASE', 'RENUM', 'REPEAT', 'REPORT',
+ 'REPORT-INPUT', 'RESHOW', 'RESTART', 'RETRIEVE', 'RETURN-CODE',
+ 'ROLLBACK', 'ROW', 'S', 'SCREEN', 'SEARCH', 'SECONDARY', 'SELECT',
+ 'SEQUENCE', 'SIZE', 'SKIP', 'SOKAKU', 'SORT', 'SQL', 'STOP', 'SUM',
+ 'SYSDATE', 'SYSDATE-LONG', 'SYSIN', 'SYSIPT', 'SYSLST', 'SYSPRINT',
+ 'SYSSNAP', 'SYSTIME', 'TALLY', 'TERM-COLUMNS', 'TERM-NAME',
+ 'TERM-ROWS', 'TERMINATION', 'TITLE', 'TO', 'TRANSFER', 'TRC',
+ 'UNIQUE', 'UNTIL', 'UPDATE', 'UPPERCASE', 'USER', 'USERID', 'VALUE',
+ 'VERIFY', 'W', 'WHEN', 'WHILE', 'WORK', 'WRITE', 'X', 'XDM', 'XRST'
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\*.*\n', Comment.Single),
+ (r'\n+', Whitespace),
+ # Macro argument
+ (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable,
+ 'after_macro_argument'),
+ # Macro call
+ (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable),
+ (r'(FILE|MACRO|REPORT)(\s+)',
+ bygroups(Keyword.Declaration, Whitespace), 'after_declaration'),
+ (r'(JOB|PARM)' + r'(' + _DELIMITER_PATTERN + r')',
+ bygroups(Keyword.Declaration, Operator)),
+ (words(_KEYWORDS, suffix=_DELIMITER_PATTERN_CAPTURE),
+ bygroups(Keyword.Reserved, Operator)),
+ (_OPERATORS_PATTERN, Operator),
+ # Procedure declaration
+ (r'(' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+)(\s*)(\.?)(\s*)(PROC)(\s*\n)',
+ bygroups(Name.Function, Whitespace, Operator, Whitespace,
+ Keyword.Declaration, Whitespace)),
+ (r'[0-9]+\.[0-9]*', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r"'(''|[^'])*'", String),
+ (r'\s+', Whitespace),
+ # Everything else just belongs to a name
+ (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
+ ],
+ 'after_declaration': [
+ (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Function),
+ default('#pop'),
+ ],
+ 'after_macro_argument': [
+ (r'\*.*\n', Comment.Single, '#pop'),
+ (r'\s+', Whitespace, '#pop'),
+ (_OPERATORS_PATTERN, Operator, '#pop'),
+ (r"'(''|[^'])*'", String, '#pop'),
+ # Everything else just belongs to a name
+ (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
+ ],
+ }
+ _COMMENT_LINE_REGEX = re.compile(r'^\s*\*')
+ _MACRO_HEADER_REGEX = re.compile(r'^\s*MACRO')
+
+ def analyse_text(text):
+ """
+ Perform a structural analysis for basic Easytrieve constructs.
+ """
+ result = 0.0
+ lines = text.split('\n')
+ hasEndProc = False
+ hasHeaderComment = False
+ hasFile = False
+ hasJob = False
+ hasProc = False
+ hasParm = False
+ hasReport = False
+
+ def isCommentLine(line):
+ return EasytrieveLexer._COMMENT_LINE_REGEX.match(lines[0]) is not None
+
+ def isEmptyLine(line):
+ return not bool(line.strip())
+
+ # Remove possible empty lines and header comments.
+ while lines and (isEmptyLine(lines[0]) or isCommentLine(lines[0])):
+ if not isEmptyLine(lines[0]):
+ hasHeaderComment = True
+ del lines[0]
+
+ if EasytrieveLexer._MACRO_HEADER_REGEX.match(lines[0]):
+ # Looks like an Easytrieve macro.
+ result = 0.4
+ if hasHeaderComment:
+ result += 0.4
+ else:
+ # Scan the source for lines starting with indicators.
+ for line in lines:
+ words = line.split()
+ if (len(words) >= 2):
+ firstWord = words[0]
+ if not hasReport:
+ if not hasJob:
+ if not hasFile:
+ if not hasParm:
+ if firstWord == 'PARM':
+ hasParm = True
+ if firstWord == 'FILE':
+ hasFile = True
+ if firstWord == 'JOB':
+ hasJob = True
+ elif firstWord == 'PROC':
+ hasProc = True
+ elif firstWord == 'END-PROC':
+ hasEndProc = True
+ elif firstWord == 'REPORT':
+ hasReport = True
+
+ # Weight the findings.
+ if hasJob and (hasProc == hasEndProc):
+ if hasHeaderComment:
+ result += 0.1
+ if hasParm:
+ if hasProc:
+ # Found PARM, JOB and PROC/END-PROC:
+ # pretty sure this is Easytrieve.
+ result += 0.8
+ else:
+ # Found PARAM and JOB: probably this is Easytrieve
+ result += 0.5
+ else:
+ # Found JOB and possibly other keywords: might be Easytrieve
+ result += 0.11
+ if hasParm:
+ # Note: PARAM is not a proper English word, so this is
+ # regarded a much better indicator for Easytrieve than
+ # the other words.
+ result += 0.2
+ if hasFile:
+ result += 0.01
+ if hasReport:
+ result += 0.01
+ assert 0.0 <= result <= 1.0
+ return result
+
+
+class JclLexer(RegexLexer):
+ """
+ Job Control Language (JCL)
+ is a scripting language used on mainframe platforms to instruct the system
+ on how to run a batch job or start a subsystem. It is somewhat
+ comparable to MS DOS batch and Unix shell scripts.
+ """
+ name = 'JCL'
+ aliases = ['jcl']
+ filenames = ['*.jcl']
+ mimetypes = ['text/x-jcl']
+ url = 'https://en.wikipedia.org/wiki/Job_Control_Language'
+ version_added = '2.1'
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'//\*.*\n', Comment.Single),
+ (r'//', Keyword.Pseudo, 'statement'),
+ (r'/\*', Keyword.Pseudo, 'jes2_statement'),
+ # TODO: JES3 statement
+ (r'.*\n', Other) # Input text or inline code in any language.
+ ],
+ 'statement': [
+ (r'\s*\n', Whitespace, '#pop'),
+ (r'([a-z]\w*)(\s+)(exec|job)(\s*)',
+ bygroups(Name.Label, Whitespace, Keyword.Reserved, Whitespace),
+ 'option'),
+ (r'[a-z]\w*', Name.Variable, 'statement_command'),
+ (r'\s+', Whitespace, 'statement_command'),
+ ],
+ 'statement_command': [
+ (r'\s+(command|cntl|dd|endctl|endif|else|include|jcllib|'
+ r'output|pend|proc|set|then|xmit)\s+', Keyword.Reserved, 'option'),
+ include('option')
+ ],
+ 'jes2_statement': [
+ (r'\s*\n', Whitespace, '#pop'),
+ (r'\$', Keyword, 'option'),
+ (r'\b(jobparam|message|netacct|notify|output|priority|route|'
+ r'setup|signoff|xeq|xmit)\b', Keyword, 'option'),
+ ],
+ 'option': [
+ # (r'\n', Text, 'root'),
+ (r'\*', Name.Builtin),
+ (r'[\[\](){}<>;,]', Punctuation),
+ (r'[-+*/=&%]', Operator),
+ (r'[a-z_]\w*', Name),
+ (r'\d+\.\d*', Number.Float),
+ (r'\.\d+', Number.Float),
+ (r'\d+', Number.Integer),
+ (r"'", String, 'option_string'),
+ (r'[ \t]+', Whitespace, 'option_comment'),
+ (r'\.', Punctuation),
+ ],
+ 'option_string': [
+ (r"(\n)(//)", bygroups(Text, Keyword.Pseudo)),
+ (r"''", String),
+ (r"[^']", String),
+ (r"'", String, '#pop'),
+ ],
+ 'option_comment': [
+ # (r'\n', Text, 'root'),
+ (r'.+', Comment.Single),
+ ]
+ }
+
+ _JOB_HEADER_PATTERN = re.compile(r'^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$',
+ re.IGNORECASE)
+
+ def analyse_text(text):
+ """
+ Recognize JCL job by header.
+ """
+ result = 0.0
+ lines = text.split('\n')
+ if len(lines) > 0:
+ if JclLexer._JOB_HEADER_PATTERN.match(lines[0]):
+ result = 1.0
+ assert 0.0 <= result <= 1.0
+ return result
+
+
+class MiniScriptLexer(RegexLexer):
+ """
+ For MiniScript source code.
+ """
+
+ name = 'MiniScript'
+ url = 'https://miniscript.org'
+ aliases = ['miniscript', 'ms']
+ filenames = ['*.ms']
+ mimetypes = ['text/x-minicript', 'application/x-miniscript']
+ version_added = '2.6'
+
+ tokens = {
+ 'root': [
+ (r'#!(.*?)$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ ('//.*$', Comment.Single),
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number),
+ (r'(?i)\d+e[+-]?\d+', Number),
+ (r'\d+', Number),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'"', String, 'string_double'),
+ (r'(==|!=|<=|>=|[=+\-*/%^<>.:])', Operator),
+ (r'[;,\[\]{}()]', Punctuation),
+ (words((
+ 'break', 'continue', 'else', 'end', 'for', 'function', 'if',
+ 'in', 'isa', 'then', 'repeat', 'return', 'while'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'abs', 'acos', 'asin', 'atan', 'ceil', 'char', 'cos', 'floor',
+ 'log', 'round', 'rnd', 'pi', 'sign', 'sin', 'sqrt', 'str', 'tan',
+ 'hasIndex', 'indexOf', 'len', 'val', 'code', 'remove', 'lower',
+ 'upper', 'replace', 'split', 'indexes', 'values', 'join', 'sum',
+ 'sort', 'shuffle', 'push', 'pop', 'pull', 'range',
+ 'print', 'input', 'time', 'wait', 'locals', 'globals', 'outer',
+ 'yield'), suffix=r'\b'),
+ Name.Builtin),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(and|or|not|new)\b', Operator.Word),
+ (r'(self|super|__isa)\b', Name.Builtin.Pseudo),
+ (r'[a-zA-Z_]\w*', Name.Variable)
+ ],
+ 'string_double': [
+ (r'[^"\n]+', String),
+ (r'""', String),
+ (r'"', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ]
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/sgf.py b/venv/lib/python3.10/site-packages/pygments/lexers/sgf.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0e56cba55242ba7874e1e71186be35986537407
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/sgf.py
@@ -0,0 +1,59 @@
+"""
+ pygments.lexers.sgf
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Smart Game Format (sgf) file format.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Name, Literal, String, Punctuation, Whitespace
+
+__all__ = ["SmartGameFormatLexer"]
+
+
+class SmartGameFormatLexer(RegexLexer):
+ """
+ Lexer for Smart Game Format (sgf) file format.
+
+ The format is used to store game records of board games for two players
+ (mainly Go game).
+ """
+ name = 'SmartGameFormat'
+ url = 'https://www.red-bean.com/sgf/'
+ aliases = ['sgf']
+ filenames = ['*.sgf']
+ version_added = '2.4'
+
+ tokens = {
+ 'root': [
+ (r'[():;]+', Punctuation),
+ # tokens:
+ (r'(A[BW]|AE|AN|AP|AR|AS|[BW]L|BM|[BW]R|[BW]S|[BW]T|CA|CH|CP|CR|'
+ r'DD|DM|DO|DT|EL|EV|EX|FF|FG|G[BW]|GC|GM|GN|HA|HO|ID|IP|IT|IY|KM|'
+ r'KO|LB|LN|LT|L|MA|MN|M|N|OB|OM|ON|OP|OT|OV|P[BW]|PC|PL|PM|RE|RG|'
+ r'RO|RU|SO|SC|SE|SI|SL|SO|SQ|ST|SU|SZ|T[BW]|TC|TE|TM|TR|UC|US|VW|'
+ r'V|[BW]|C)',
+ Name.Builtin),
+ # number:
+ (r'(\[)([0-9.]+)(\])',
+ bygroups(Punctuation, Literal.Number, Punctuation)),
+ # date:
+ (r'(\[)([0-9]{4}-[0-9]{2}-[0-9]{2})(\])',
+ bygroups(Punctuation, Literal.Date, Punctuation)),
+ # point:
+ (r'(\[)([a-z]{2})(\])',
+ bygroups(Punctuation, String, Punctuation)),
+ # double points:
+ (r'(\[)([a-z]{2})(:)([a-z]{2})(\])',
+ bygroups(Punctuation, String, Punctuation, String, Punctuation)),
+
+ (r'(\[)([\w\s#()+,\-.:?]+)(\])',
+ bygroups(Punctuation, String, Punctuation)),
+ (r'(\[)(\s.*)(\])',
+ bygroups(Punctuation, Whitespace, Punctuation)),
+ (r'\s+', Whitespace)
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/shell.py b/venv/lib/python3.10/site-packages/pygments/lexers/shell.py
new file mode 100644
index 0000000000000000000000000000000000000000..744767a1d4464f838b7c062f63c850e3a2f18554
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/shell.py
@@ -0,0 +1,902 @@
+"""
+ pygments.lexers.shell
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various shells.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \
+ include, default, this, using, words, line_re
+from pygments.token import Punctuation, Whitespace, \
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic
+from pygments.util import shebang_matches
+
+__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
+ 'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer',
+ 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer',
+ 'ExeclineLexer']
+
+
+class BashLexer(RegexLexer):
+ """
+ Lexer for (ba|k|z|)sh shell scripts.
+ """
+
+ name = 'Bash'
+ aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell', 'openrc']
+ filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
+ '*.exheres-0', '*.exlib', '*.zsh',
+ '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
+ '.kshrc', 'kshrc',
+ 'PKGBUILD']
+ mimetypes = ['application/x-sh', 'application/x-shellscript', 'text/x-shellscript']
+ url = 'https://en.wikipedia.org/wiki/Unix_shell'
+ version_added = '0.6'
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'`', String.Backtick, 'backticks'),
+ include('data'),
+ include('interp'),
+ ],
+ 'interp': [
+ (r'\$\(\(', Keyword, 'math'),
+ (r'\$\(', Keyword, 'paren'),
+ (r'\$\{#?', String.Interpol, 'curly'),
+ (r'\$[a-zA-Z_]\w*', Name.Variable), # user variable
+ (r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
+ (r'\$', Text),
+ ],
+ 'basic': [
+ (r'\b(if|fi|else|while|in|do|done|for|then|return|function|case|'
+ r'select|break|continue|until|esac|elif)(\s*)\b',
+ bygroups(Keyword, Whitespace)),
+ (r'\b(alias|bg|bind|builtin|caller|cd|command|compgen|'
+ r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
+ r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
+ r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
+ r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
+ r'ulimit|umask|unalias|unset|wait)(?=[\s)`])',
+ Name.Builtin),
+ (r'\A#!.+\n', Comment.Hashbang),
+ (r'#.*\n', Comment.Single),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(\+?=)', bygroups(Name.Variable, Whitespace, Operator)),
+ (r'[\[\]{}()=]', Operator),
+ (r'<<<', Operator), # here-string
+ (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r'&&|\|\|', Operator),
+ ],
+ 'data': [
+ (r'(?s)\$?"(\\.|[^"\\$])*"', String.Double),
+ (r'"', String.Double, 'string'),
+ (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r"(?s)'.*?'", String.Single),
+ (r';', Punctuation),
+ (r'&', Punctuation),
+ (r'\|', Punctuation),
+ (r'\s+', Whitespace),
+ (r'\d+\b', Number),
+ (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
+ (r'<', Text),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
+ include('interp'),
+ ],
+ 'curly': [
+ (r'\}', String.Interpol, '#pop'),
+ (r':-', Keyword),
+ (r'\w+', Name.Variable),
+ (r'[^}:"\'`$\\]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ 'math': [
+ (r'\)\)', Keyword, '#pop'),
+ (r'\*\*|\|\||<<|>>|[-+*/%^|&<>]', Operator),
+ (r'\d+#[\da-zA-Z]+', Number),
+ (r'\d+#(?! )', Number),
+ (r'0[xX][\da-fA-F]+', Number),
+ (r'\d+', Number),
+ (r'[a-zA-Z_]\w*', Name.Variable), # user variable
+ include('root'),
+ ],
+ 'backticks': [
+ (r'`', String.Backtick, '#pop'),
+ include('root'),
+ ],
+ }
+
+ def analyse_text(text):
+ if shebang_matches(text, r'(ba|z|)sh'):
+ return 1
+ if text.startswith('$ '):
+ return 0.2
+
+
+class SlurmBashLexer(BashLexer):
+ """
+ Lexer for (ba|k|z|)sh Slurm scripts.
+ """
+
+ name = 'Slurm'
+ aliases = ['slurm', 'sbatch']
+ filenames = ['*.sl']
+ mimetypes = []
+ version_added = '2.4'
+ EXTRA_KEYWORDS = {'srun'}
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in BashLexer.get_tokens_unprocessed(self, text):
+ if token is Text and value in self.EXTRA_KEYWORDS:
+ yield index, Name.Builtin, value
+ elif token is Comment.Single and 'SBATCH' in value:
+ yield index, Keyword.Pseudo, value
+ else:
+ yield index, token, value
+
+
+class ShellSessionBaseLexer(Lexer):
+ """
+ Base lexer for shell sessions.
+
+ .. versionadded:: 2.1
+ """
+
+ _bare_continuation = False
+ _venv = re.compile(r'^(\([^)]*\))(\s*)')
+
+ def get_tokens_unprocessed(self, text):
+ innerlexer = self._innerLexerCls(**self.options)
+
+ pos = 0
+ curcode = ''
+ insertions = []
+ backslash_continuation = False
+
+ for match in line_re.finditer(text):
+ line = match.group()
+
+ venv_match = self._venv.match(line)
+ if venv_match:
+ venv = venv_match.group(1)
+ venv_whitespace = venv_match.group(2)
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt.VirtualEnv, venv)]))
+ if venv_whitespace:
+ insertions.append((len(curcode),
+ [(0, Text, venv_whitespace)]))
+ line = line[venv_match.end():]
+
+ m = self._ps1rgx.match(line)
+ if m:
+ # To support output lexers (say diff output), the output
+ # needs to be broken by prompts whenever the output lexer
+ # changes.
+ if not insertions:
+ pos = match.start()
+
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, m.group(1))]))
+ curcode += m.group(2)
+ backslash_continuation = curcode.endswith('\\\n')
+ elif backslash_continuation:
+ if line.startswith(self._ps2):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt,
+ line[:len(self._ps2)])]))
+ curcode += line[len(self._ps2):]
+ else:
+ curcode += line
+ backslash_continuation = curcode.endswith('\\\n')
+ elif self._bare_continuation and line.startswith(self._ps2):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt,
+ line[:len(self._ps2)])]))
+ curcode += line[len(self._ps2):]
+ else:
+ if insertions:
+ toks = innerlexer.get_tokens_unprocessed(curcode)
+ for i, t, v in do_insertions(insertions, toks):
+ yield pos+i, t, v
+ yield match.start(), Generic.Output, line
+ insertions = []
+ curcode = ''
+ if insertions:
+ for i, t, v in do_insertions(insertions,
+ innerlexer.get_tokens_unprocessed(curcode)):
+ yield pos+i, t, v
+
+
+class BashSessionLexer(ShellSessionBaseLexer):
+ """
+ Lexer for Bash shell sessions, i.e. command lines, including a
+ prompt, interspersed with output.
+ """
+
+ name = 'Bash Session'
+ aliases = ['console', 'shell-session']
+ filenames = ['*.sh-session', '*.shell-session']
+ mimetypes = ['application/x-shell-session', 'application/x-sh-session']
+ url = 'https://en.wikipedia.org/wiki/Unix_shell'
+ version_added = '1.1'
+ _example = "console/example.sh-session"
+
+ _innerLexerCls = BashLexer
+ _ps1rgx = re.compile(
+ r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
+ r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)')
+ _ps2 = '> '
+
+
+class BatchLexer(RegexLexer):
+ """
+ Lexer for the DOS/Windows Batch file format.
+ """
+ name = 'Batchfile'
+ aliases = ['batch', 'bat', 'dosbatch', 'winbatch']
+ filenames = ['*.bat', '*.cmd']
+ mimetypes = ['application/x-dos-batch']
+ url = 'https://en.wikipedia.org/wiki/Batch_file'
+ version_added = '0.7'
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ _nl = r'\n\x1a'
+ _punct = r'&<>|'
+ _ws = r'\t\v\f\r ,;=\xa0'
+ _nlws = r'\s\x1a\xa0,;='
+ _space = rf'(?:(?:(?:\^[{_nl}])?[{_ws}])+)'
+ _keyword_terminator = (rf'(?=(?:\^[{_nl}]?)?[{_ws}+./:[\\\]]|[{_nl}{_punct}(])')
+ _token_terminator = rf'(?=\^?[{_ws}]|[{_punct}{_nl}])'
+ _start_label = rf'((?:(?<=^[^:])|^[^:]?)[{_ws}]*)(:)'
+ _label = rf'(?:(?:[^{_nlws}{_punct}+:^]|\^[{_nl}]?[\w\W])*)'
+ _label_compound = rf'(?:(?:[^{_nlws}{_punct}+:^)]|\^[{_nl}]?[^)])*)'
+ _number = rf'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+){_token_terminator})'
+ _opword = r'(?:equ|geq|gtr|leq|lss|neq)'
+ _string = rf'(?:"[^{_nl}"]*(?:"|(?=[{_nl}])))'
+ _variable = (r'(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
+ rf'[^%:{_nl}]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%{_nl}^]|'
+ rf'\^[^%{_nl}])[^={_nl}]*=(?:[^%{_nl}^]|\^[^%{_nl}])*)?)?%))|'
+ rf'(?:\^?![^!:{_nl}]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
+ rf'[^!{_nl}^]|\^[^!{_nl}])[^={_nl}]*=(?:[^!{_nl}^]|\^[^!{_nl}])*)?)?\^?!))')
+ _core_token = rf'(?:(?:(?:\^[{_nl}]?)?[^"{_nlws}{_punct}])+)'
+ _core_token_compound = rf'(?:(?:(?:\^[{_nl}]?)?[^"{_nlws}{_punct})])+)'
+ _token = rf'(?:[{_punct}]+|{_core_token})'
+ _token_compound = rf'(?:[{_punct}]+|{_core_token_compound})'
+ _stoken = (rf'(?:[{_punct}]+|(?:{_string}|{_variable}|{_core_token})+)')
+
+ def _make_begin_state(compound, _core_token=_core_token,
+ _core_token_compound=_core_token_compound,
+ _keyword_terminator=_keyword_terminator,
+ _nl=_nl, _punct=_punct, _string=_string,
+ _space=_space, _start_label=_start_label,
+ _stoken=_stoken, _token_terminator=_token_terminator,
+ _variable=_variable, _ws=_ws):
+ rest = '(?:{}|{}|[^"%{}{}{}])*'.format(_string, _variable, _nl, _punct,
+ ')' if compound else '')
+ rest_of_line = rf'(?:(?:[^{_nl}^]|\^[{_nl}]?[\w\W])*)'
+ rest_of_line_compound = rf'(?:(?:[^{_nl}^)]|\^[{_nl}]?[^)])*)'
+ set_space = rf'((?:(?:\^[{_nl}]?)?[^\S\n])*)'
+ suffix = ''
+ if compound:
+ _keyword_terminator = rf'(?:(?=\))|{_keyword_terminator})'
+ _token_terminator = rf'(?:(?=\))|{_token_terminator})'
+ suffix = '/compound'
+ return [
+ ((r'\)', Punctuation, '#pop') if compound else
+ (rf'\)((?=\()|{_token_terminator}){rest_of_line}',
+ Comment.Single)),
+ (rf'(?={_start_label})', Text, f'follow{suffix}'),
+ (_space, using(this, state='text')),
+ include(f'redirect{suffix}'),
+ (rf'[{_nl}]+', Text),
+ (r'\(', Punctuation, 'root/compound'),
+ (r'@+', Punctuation),
+ (rf'((?:for|if|rem)(?:(?=(?:\^[{_nl}]?)?/)|(?:(?!\^)|'
+ rf'(?<=m))(?:(?=\()|{_token_terminator})))({_space}?{_core_token_compound if compound else _core_token}?(?:\^[{_nl}]?)?/(?:\^[{_nl}]?)?\?)',
+ bygroups(Keyword, using(this, state='text')),
+ f'follow{suffix}'),
+ (rf'(goto{_keyword_terminator})({rest}(?:\^[{_nl}]?)?/(?:\^[{_nl}]?)?\?{rest})',
+ bygroups(Keyword, using(this, state='text')),
+ f'follow{suffix}'),
+ (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
+ 'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
+ 'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
+ 'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
+ 'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
+ 'title', 'type', 'ver', 'verify', 'vol'),
+ suffix=_keyword_terminator), Keyword, f'follow{suffix}'),
+ (rf'(call)({_space}?)(:)',
+ bygroups(Keyword, using(this, state='text'), Punctuation),
+ f'call{suffix}'),
+ (rf'call{_keyword_terminator}', Keyword),
+ (rf'(for{_token_terminator}(?!\^))({_space})(/f{_token_terminator})',
+ bygroups(Keyword, using(this, state='text'), Keyword),
+ ('for/f', 'for')),
+ (rf'(for{_token_terminator}(?!\^))({_space})(/l{_token_terminator})',
+ bygroups(Keyword, using(this, state='text'), Keyword),
+ ('for/l', 'for')),
+ (rf'for{_token_terminator}(?!\^)', Keyword, ('for2', 'for')),
+ (rf'(goto{_keyword_terminator})({_space}?)(:?)',
+ bygroups(Keyword, using(this, state='text'), Punctuation),
+ f'label{suffix}'),
+ (rf'(if(?:(?=\()|{_token_terminator})(?!\^))({_space}?)((?:/i{_token_terminator})?)({_space}?)((?:not{_token_terminator})?)({_space}?)',
+ bygroups(Keyword, using(this, state='text'), Keyword,
+ using(this, state='text'), Keyword,
+ using(this, state='text')), ('(?', 'if')),
+ (rf'rem(((?=\()|{_token_terminator}){_space}?{_stoken}?.*|{_keyword_terminator}{rest_of_line_compound if compound else rest_of_line})',
+ Comment.Single, f'follow{suffix}'),
+ (rf'(set{_keyword_terminator}){set_space}(/a)',
+ bygroups(Keyword, using(this, state='text'), Keyword),
+ f'arithmetic{suffix}'),
+ (r'(set{}){}((?:/p)?){}((?:(?:(?:\^[{}]?)?[^"{}{}^={}]|'
+ r'\^[{}]?[^"=])+)?)((?:(?:\^[{}]?)?=)?)'.format(_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
+ ')' if compound else '', _nl, _nl),
+ bygroups(Keyword, using(this, state='text'), Keyword,
+ using(this, state='text'), using(this, state='variable'),
+ Punctuation),
+ f'follow{suffix}'),
+ default(f'follow{suffix}')
+ ]
+
+ def _make_follow_state(compound, _label=_label,
+ _label_compound=_label_compound, _nl=_nl,
+ _space=_space, _start_label=_start_label,
+ _token=_token, _token_compound=_token_compound,
+ _ws=_ws):
+ suffix = '/compound' if compound else ''
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state += [
+ (rf'{_start_label}([{_ws}]*)({_label_compound if compound else _label})(.*)',
+ bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
+ include(f'redirect{suffix}'),
+ (rf'(?=[{_nl}])', Text, '#pop'),
+ (r'\|\|?|&&?', Punctuation, '#pop'),
+ include('text')
+ ]
+ return state
+
+ def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
+ _string=_string, _variable=_variable,
+ _ws=_ws, _nlws=_nlws):
+ op = r'=+\-*/!~'
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state += [
+ (r'0[0-7]+', Number.Oct),
+ (r'0x[\da-f]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'[(),]+', Punctuation),
+ (rf'([{op}]|%|\^\^)+', Operator),
+ (r'({}|{}|(\^[{}]?)?[^(){}%\^"{}{}]|\^[{}]?{})+'.format(_string, _variable, _nl, op, _nlws, _punct, _nlws,
+ r'[^)]' if compound else r'[\w\W]'),
+ using(this, state='variable')),
+ (r'(?=[\x00|&])', Text, '#pop'),
+ include('follow')
+ ]
+ return state
+
+ def _make_call_state(compound, _label=_label,
+ _label_compound=_label_compound):
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state.append((r'(:?)(%s)' % (_label_compound if compound else _label),
+ bygroups(Punctuation, Name.Label), '#pop'))
+ return state
+
+ def _make_label_state(compound, _label=_label,
+ _label_compound=_label_compound, _nl=_nl,
+ _punct=_punct, _string=_string, _variable=_variable):
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state.append((r'({}?)((?:{}|{}|\^[{}]?{}|[^"%^{}{}{}])*)'.format(_label_compound if compound else _label, _string,
+ _variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
+ _punct, r')' if compound else ''),
+ bygroups(Name.Label, Comment.Single), '#pop'))
+ return state
+
+ def _make_redirect_state(compound,
+ _core_token_compound=_core_token_compound,
+ _nl=_nl, _punct=_punct, _stoken=_stoken,
+ _string=_string, _space=_space,
+ _variable=_variable, _nlws=_nlws):
+ stoken_compound = (rf'(?:[{_punct}]+|(?:{_string}|{_variable}|{_core_token_compound})+)')
+ return [
+ (rf'((?:(?<=[{_nlws}])\d)?)(>>?&|<&)([{_nlws}]*)(\d)',
+ bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
+ (rf'((?:(?<=[{_nlws}])(?>?|<)({_space}?{stoken_compound if compound else _stoken})',
+ bygroups(Number.Integer, Punctuation, using(this, state='text')))
+ ]
+
+ tokens = {
+ 'root': _make_begin_state(False),
+ 'follow': _make_follow_state(False),
+ 'arithmetic': _make_arithmetic_state(False),
+ 'call': _make_call_state(False),
+ 'label': _make_label_state(False),
+ 'redirect': _make_redirect_state(False),
+ 'root/compound': _make_begin_state(True),
+ 'follow/compound': _make_follow_state(True),
+ 'arithmetic/compound': _make_arithmetic_state(True),
+ 'call/compound': _make_call_state(True),
+ 'label/compound': _make_label_state(True),
+ 'redirect/compound': _make_redirect_state(True),
+ 'variable-or-escape': [
+ (_variable, Name.Variable),
+ (rf'%%|\^[{_nl}]?(\^!|[\w\W])', String.Escape)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (_variable, Name.Variable),
+ (r'\^!|%%', String.Escape),
+ (rf'[^"%^{_nl}]+|[%^]', String.Double),
+ default('#pop')
+ ],
+ 'sqstring': [
+ include('variable-or-escape'),
+ (r'[^%]+|%', String.Single)
+ ],
+ 'bqstring': [
+ include('variable-or-escape'),
+ (r'[^%]+|%', String.Backtick)
+ ],
+ 'text': [
+ (r'"', String.Double, 'string'),
+ include('variable-or-escape'),
+ (rf'[^"%^{_nlws}{_punct}\d)]+|.', Text)
+ ],
+ 'variable': [
+ (r'"', String.Double, 'string'),
+ include('variable-or-escape'),
+ (rf'[^"%^{_nl}]+|.', Name.Variable)
+ ],
+ 'for': [
+ (rf'({_space})(in)({_space})(\()',
+ bygroups(using(this, state='text'), Keyword,
+ using(this, state='text'), Punctuation), '#pop'),
+ include('follow')
+ ],
+ 'for2': [
+ (r'\)', Punctuation),
+ (rf'({_space})(do{_token_terminator})',
+ bygroups(using(this, state='text'), Keyword), '#pop'),
+ (rf'[{_nl}]+', Text),
+ include('follow')
+ ],
+ 'for/f': [
+ (rf'(")((?:{_variable}|[^"])*?")([{_nlws}]*)(\))',
+ bygroups(String.Double, using(this, state='string'), Text,
+ Punctuation)),
+ (r'"', String.Double, ('#pop', 'for2', 'string')),
+ (rf"('(?:%%|{_variable}|[\w\W])*?')([{_nlws}]*)(\))",
+ bygroups(using(this, state='sqstring'), Text, Punctuation)),
+ (rf'(`(?:%%|{_variable}|[\w\W])*?`)([{_nlws}]*)(\))',
+ bygroups(using(this, state='bqstring'), Text, Punctuation)),
+ include('for2')
+ ],
+ 'for/l': [
+ (r'-?\d+', Number.Integer),
+ include('for2')
+ ],
+ 'if': [
+ (rf'((?:cmdextversion|errorlevel){_token_terminator})({_space})(\d+)',
+ bygroups(Keyword, using(this, state='text'),
+ Number.Integer), '#pop'),
+ (rf'(defined{_token_terminator})({_space})({_stoken})',
+ bygroups(Keyword, using(this, state='text'),
+ using(this, state='variable')), '#pop'),
+ (rf'(exist{_token_terminator})({_space}{_stoken})',
+ bygroups(Keyword, using(this, state='text')), '#pop'),
+ (rf'({_number}{_space})({_opword})({_space}{_number})',
+ bygroups(using(this, state='arithmetic'), Operator.Word,
+ using(this, state='arithmetic')), '#pop'),
+ (_stoken, using(this, state='text'), ('#pop', 'if2')),
+ ],
+ 'if2': [
+ (rf'({_space}?)(==)({_space}?{_stoken})',
+ bygroups(using(this, state='text'), Operator,
+ using(this, state='text')), '#pop'),
+ (rf'({_space})({_opword})({_space}{_stoken})',
+ bygroups(using(this, state='text'), Operator.Word,
+ using(this, state='text')), '#pop')
+ ],
+ '(?': [
+ (_space, using(this, state='text')),
+ (r'\(', Punctuation, ('#pop', 'else?', 'root/compound')),
+ default('#pop')
+ ],
+ 'else?': [
+ (_space, using(this, state='text')),
+ (rf'else{_token_terminator}', Keyword, '#pop'),
+ default('#pop')
+ ]
+ }
+
+
+class MSDOSSessionLexer(ShellSessionBaseLexer):
+ """
+ Lexer for MS DOS shell sessions, i.e. command lines, including a
+ prompt, interspersed with output.
+ """
+
+ name = 'MSDOS Session'
+ aliases = ['doscon']
+ filenames = []
+ mimetypes = []
+ url = 'https://en.wikipedia.org/wiki/MS-DOS'
+ version_added = '2.1'
+ _example = "doscon/session"
+
+ _innerLexerCls = BatchLexer
+ _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
+ _ps2 = 'More? '
+
+
+class TcshLexer(RegexLexer):
+ """
+ Lexer for tcsh scripts.
+ """
+
+ name = 'Tcsh'
+ aliases = ['tcsh', 'csh']
+ filenames = ['*.tcsh', '*.csh']
+ mimetypes = ['application/x-csh']
+ url = 'https://www.tcsh.org'
+ version_added = '0.10'
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'\$\(', Keyword, 'paren'),
+ (r'\$\{#?', Keyword, 'curly'),
+ (r'`', String.Backtick, 'backticks'),
+ include('data'),
+ ],
+ 'basic': [
+ (r'\b(if|endif|else|while|then|foreach|case|default|'
+ r'break|continue|goto|breaksw|end|switch|endsw)\s*\b',
+ Keyword),
+ (r'\b(alias|alloc|bg|bindkey|builtins|bye|caller|cd|chdir|'
+ r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
+ r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
+ r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
+ r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
+ r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
+ r'source|stop|suspend|source|suspend|telltc|time|'
+ r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
+ r'ver|wait|warp|watchlog|where|which)\s*\b',
+ Name.Builtin),
+ (r'#.*', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]{}()=]+', Operator),
+ (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
+ ],
+ 'data': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'\s+', Text),
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
+ (r'\d+(?= |\Z)', Number),
+ (r'\$#?(\w+|.)', Name.Variable),
+ ],
+ 'curly': [
+ (r'\}', Keyword, '#pop'),
+ (r':-', Keyword),
+ (r'\w+', Name.Variable),
+ (r'[^}:"\'`$]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ 'backticks': [
+ (r'`', String.Backtick, '#pop'),
+ include('root'),
+ ],
+ }
+
+
+class TcshSessionLexer(ShellSessionBaseLexer):
+ """
+ Lexer for Tcsh sessions, i.e. command lines, including a
+ prompt, interspersed with output.
+ """
+
+ name = 'Tcsh Session'
+ aliases = ['tcshcon']
+ filenames = []
+ mimetypes = []
+ url = 'https://www.tcsh.org'
+ version_added = '2.1'
+ _example = "tcshcon/session"
+
+ _innerLexerCls = TcshLexer
+ _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
+ _ps2 = '? '
+
+
+class PowerShellLexer(RegexLexer):
+ """
+ For Windows PowerShell code.
+ """
+ name = 'PowerShell'
+ aliases = ['powershell', 'pwsh', 'posh', 'ps1', 'psm1']
+ filenames = ['*.ps1', '*.psm1']
+ mimetypes = ['text/x-powershell']
+ url = 'https://learn.microsoft.com/en-us/powershell'
+ version_added = '1.5'
+
+ flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
+
+ keywords = (
+ 'while validateset validaterange validatepattern validatelength '
+ 'validatecount until trap switch return ref process param parameter in '
+ 'if global: local: function foreach for finally filter end elseif else '
+ 'dynamicparam do default continue cmdletbinding break begin alias \\? '
+ '% #script #private #local #global mandatory parametersetname position '
+ 'valuefrompipeline valuefrompipelinebypropertyname '
+ 'valuefromremainingarguments helpmessage try catch throw').split()
+
+ operators = (
+ 'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
+ 'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
+ 'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
+ 'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
+ 'lt match ne not notcontains notlike notmatch or regex replace '
+ 'wildcard').split()
+
+ verbs = (
+ 'write where watch wait use update unregister unpublish unprotect '
+ 'unlock uninstall undo unblock trace test tee take sync switch '
+ 'suspend submit stop step start split sort skip show set send select '
+ 'search scroll save revoke resume restore restart resolve resize '
+ 'reset request repair rename remove register redo receive read push '
+ 'publish protect pop ping out optimize open new move mount merge '
+ 'measure lock limit join invoke install initialize import hide group '
+ 'grant get format foreach find export expand exit enter enable edit '
+ 'dismount disconnect disable deny debug cxnew copy convertto '
+ 'convertfrom convert connect confirm compress complete compare close '
+ 'clear checkpoint block backup assert approve aggregate add').split()
+
+ aliases_ = (
+ 'ac asnp cat cd cfs chdir clc clear clhy cli clp cls clv cnsn '
+ 'compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo epal '
+ 'epcsv epsn erase etsn exsn fc fhx fl foreach ft fw gal gbp gc gci gcm '
+ 'gcs gdr ghy gi gjb gl gm gmo gp gps gpv group gsn gsnp gsv gu gv gwmi '
+ 'h history icm iex ihy ii ipal ipcsv ipmo ipsn irm ise iwmi iwr kill lp '
+ 'ls man md measure mi mount move mp mv nal ndr ni nmo npssc nsn nv ogv '
+ 'oh popd ps pushd pwd r rbp rcjb rcsn rd rdr ren ri rjb rm rmdir rmo '
+ 'rni rnp rp rsn rsnp rujb rv rvpa rwmi sajb sal saps sasv sbp sc select '
+ 'set shcm si sl sleep sls sort sp spjb spps spsv start sujb sv swmi tee '
+ 'trcm type wget where wjb write').split()
+
+ commenthelp = (
+ 'component description example externalhelp forwardhelpcategory '
+ 'forwardhelptargetname functionality inputs link '
+ 'notes outputs parameter remotehelprunspace role synopsis').split()
+
+ tokens = {
+ 'root': [
+ # we need to count pairs of parentheses for correct highlight
+ # of '$(...)' blocks in strings
+ (r'\(', Punctuation, 'child'),
+ (r'\s+', Text),
+ (r'^(\s*#[#\s]*)(\.(?:{}))([^\n]*$)'.format('|'.join(commenthelp)),
+ bygroups(Comment, String.Doc, Comment)),
+ (r'#[^\n]*?$', Comment),
+ (r'(<|<)#', Comment.Multiline, 'multline'),
+ (r'@"\n', String.Heredoc, 'heredoc-double'),
+ (r"@'\n.*?\n'@", String.Heredoc),
+ # escaped syntax
+ (r'`[\'"$@-]', Punctuation),
+ (r'"', String.Double, 'string'),
+ (r"'([^']|'')*'", String.Single),
+ (r'(\$|@@|@)((global|script|private|env):)?\w+',
+ Name.Variable),
+ (r'({})\b'.format('|'.join(keywords)), Keyword),
+ (r'-({})\b'.format('|'.join(operators)), Operator),
+ (r'({})-[a-z_]\w*\b'.format('|'.join(verbs)), Name.Builtin),
+ (r'({})\s'.format('|'.join(aliases_)), Name.Builtin),
+ (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
+ (r'-[a-z_]\w*', Name),
+ (r'\w+', Name),
+ (r'[.,;:@{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
+ ],
+ 'child': [
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'multline': [
+ (r'[^#&.]+', Comment.Multiline),
+ (r'#(>|>)', Comment.Multiline, '#pop'),
+ (r'\.({})'.format('|'.join(commenthelp)), String.Doc),
+ (r'[#&.]', Comment.Multiline),
+ ],
+ 'string': [
+ (r"`[0abfnrtv'\"$`]", String.Escape),
+ (r'[^$`"]+', String.Double),
+ (r'\$\(', Punctuation, 'child'),
+ (r'""', String.Double),
+ (r'[`$]', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'heredoc-double': [
+ (r'\n"@', String.Heredoc, '#pop'),
+ (r'\$\(', Punctuation, 'child'),
+ (r'[^@\n]+"]', String.Heredoc),
+ (r".", String.Heredoc),
+ ]
+ }
+
+
+class PowerShellSessionLexer(ShellSessionBaseLexer):
+ """
+ Lexer for PowerShell sessions, i.e. command lines, including a
+ prompt, interspersed with output.
+ """
+
+ name = 'PowerShell Session'
+ aliases = ['pwsh-session', 'ps1con']
+ filenames = []
+ mimetypes = []
+ url = 'https://learn.microsoft.com/en-us/powershell'
+ version_added = '2.1'
+ _example = "pwsh-session/session"
+
+ _innerLexerCls = PowerShellLexer
+ _bare_continuation = True
+ _ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS[^>]*> ?)(.*\n?)')
+ _ps2 = '> '
+
+
+class FishShellLexer(RegexLexer):
+ """
+ Lexer for Fish shell scripts.
+ """
+
+ name = 'Fish'
+ aliases = ['fish', 'fishshell']
+ filenames = ['*.fish', '*.load']
+ mimetypes = ['application/x-fish']
+ url = 'https://fishshell.com'
+ version_added = '2.1'
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ include('data'),
+ include('interp'),
+ ],
+ 'interp': [
+ (r'\$\(\(', Keyword, 'math'),
+ (r'\(', Keyword, 'paren'),
+ (r'\$#?(\w+|.)', Name.Variable),
+ ],
+ 'basic': [
+ (r'\b(begin|end|if|else|while|break|for|in|return|function|block|'
+ r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|'
+ r'cd|count|test)(\s*)\b',
+ bygroups(Keyword, Text)),
+ (r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|'
+ r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|'
+ r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|'
+ r'fish_update_completions|fishd|funced|funcsave|functions|help|'
+ r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|'
+ r'pushd|random|read|set_color|source|status|trap|type|ulimit|'
+ r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)',
+ Name.Builtin),
+ (r'#.*\n', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Whitespace, Operator)),
+ (r'[\[\]()=]', Operator),
+ (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ ],
+ 'data': [
+ (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
+ (r'"', String.Double, 'string'),
+ (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r"(?s)'.*?'", String.Single),
+ (r';', Punctuation),
+ (r'&|\||\^|<|>', Operator),
+ (r'\s+', Text),
+ (r'\d+(?= |\Z)', Number),
+ (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
+ include('interp'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ 'math': [
+ (r'\)\)', Keyword, '#pop'),
+ (r'[-+*/%^|&]|\*\*|\|\|', Operator),
+ (r'\d+#\d+', Number),
+ (r'\d+#(?! )', Number),
+ (r'\d+', Number),
+ include('root'),
+ ],
+ }
+
+class ExeclineLexer(RegexLexer):
+ """
+ Lexer for Laurent Bercot's execline language.
+ """
+
+ name = 'execline'
+ aliases = ['execline']
+ filenames = ['*.exec']
+ url = 'https://skarnet.org/software/execline'
+ version_added = '2.7'
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ include('data'),
+ include('interp')
+ ],
+ 'interp': [
+ (r'\$\{', String.Interpol, 'curly'),
+ (r'\$[\w@#]+', Name.Variable), # user variable
+ (r'\$', Text),
+ ],
+ 'basic': [
+ (r'\b(background|backtick|cd|define|dollarat|elgetopt|'
+ r'elgetpositionals|elglob|emptyenv|envfile|exec|execlineb|'
+ r'exit|export|fdblock|fdclose|fdmove|fdreserve|fdswap|'
+ r'forbacktickx|foreground|forstdin|forx|getcwd|getpid|heredoc|'
+ r'homeof|if|ifelse|ifte|ifthenelse|importas|loopwhilex|'
+ r'multidefine|multisubstitute|pipeline|piperw|posix-cd|'
+ r'redirfd|runblock|shift|trap|tryexec|umask|unexport|wait|'
+ r'withstdinas)\b', Name.Builtin),
+ (r'\A#!.+\n', Comment.Hashbang),
+ (r'#.*\n', Comment.Single),
+ (r'[{}]', Operator)
+ ],
+ 'data': [
+ (r'(?s)"(\\.|[^"\\$])*"', String.Double),
+ (r'"', String.Double, 'string'),
+ (r'\s+', Text),
+ (r'[^\s{}$"\\]+', Text)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)(\\\\|\\.|[^"\\$])+', String.Double),
+ include('interp'),
+ ],
+ 'curly': [
+ (r'\}', String.Interpol, '#pop'),
+ (r'[\w#@]+', Name.Variable),
+ include('root')
+ ]
+
+ }
+
+ def analyse_text(text):
+ if shebang_matches(text, r'execlineb'):
+ return 1
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/sieve.py b/venv/lib/python3.10/site-packages/pygments/lexers/sieve.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc48980c49921d811b5bed122c531cc8213be463
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/sieve.py
@@ -0,0 +1,78 @@
+"""
+ pygments.lexers.sieve
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Sieve file format.
+
+ https://tools.ietf.org/html/rfc5228
+ https://tools.ietf.org/html/rfc5173
+ https://tools.ietf.org/html/rfc5229
+ https://tools.ietf.org/html/rfc5230
+ https://tools.ietf.org/html/rfc5232
+ https://tools.ietf.org/html/rfc5235
+ https://tools.ietf.org/html/rfc5429
+ https://tools.ietf.org/html/rfc8580
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Comment, Name, Literal, String, Text, Punctuation, \
+ Keyword
+
+__all__ = ["SieveLexer"]
+
+
+class SieveLexer(RegexLexer):
+ """
+ Lexer for sieve format.
+ """
+ name = 'Sieve'
+ filenames = ['*.siv', '*.sieve']
+ aliases = ['sieve']
+ url = 'https://en.wikipedia.org/wiki/Sieve_(mail_filtering_language)'
+ version_added = '2.6'
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'[();,{}\[\]]', Punctuation),
+ # import:
+ (r'(?i)require',
+ Keyword.Namespace),
+ # tags:
+ (r'(?i)(:)(addresses|all|contains|content|create|copy|comparator|'
+ r'count|days|detail|domain|fcc|flags|from|handle|importance|is|'
+ r'localpart|length|lowerfirst|lower|matches|message|mime|options|'
+ r'over|percent|quotewildcard|raw|regex|specialuse|subject|text|'
+ r'under|upperfirst|upper|value)',
+ bygroups(Name.Tag, Name.Tag)),
+ # tokens:
+ (r'(?i)(address|addflag|allof|anyof|body|discard|elsif|else|envelope|'
+ r'ereject|exists|false|fileinto|if|hasflag|header|keep|'
+ r'notify_method_capability|notify|not|redirect|reject|removeflag|'
+ r'setflag|size|spamtest|stop|string|true|vacation|virustest)',
+ Name.Builtin),
+ (r'(?i)set',
+ Keyword.Declaration),
+ # number:
+ (r'([0-9.]+)([kmgKMG])?',
+ bygroups(Literal.Number, Literal.Number)),
+ # comment:
+ (r'#.*$',
+ Comment.Single),
+ (r'/\*.*\*/',
+ Comment.Multiline),
+ # string:
+ (r'"[^"]*?"',
+ String),
+ # text block:
+ (r'text:',
+ Name.Tag, 'text'),
+ ],
+ 'text': [
+ (r'[^.].*?\n', String),
+ (r'^\.', Punctuation, "#pop"),
+ ]
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/slash.py b/venv/lib/python3.10/site-packages/pygments/lexers/slash.py
new file mode 100644
index 0000000000000000000000000000000000000000..1c439d0db0fd51d6870f1294b2eb276aa64e8a6c
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/slash.py
@@ -0,0 +1,183 @@
+"""
+ pygments.lexers.slash
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Slash programming language.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import ExtendedRegexLexer, bygroups, DelegatingLexer
+from pygments.token import Name, Number, String, Comment, Punctuation, \
+ Other, Keyword, Operator, Whitespace
+
+__all__ = ['SlashLexer']
+
+
+class SlashLanguageLexer(ExtendedRegexLexer):
+ _nkw = r'(?=[^a-zA-Z_0-9])'
+
+ def move_state(new_state):
+ return ("#pop", new_state)
+
+ def right_angle_bracket(lexer, match, ctx):
+ if len(ctx.stack) > 1 and ctx.stack[-2] == "string":
+ ctx.stack.pop()
+ yield match.start(), String.Interpol, '}'
+ ctx.pos = match.end()
+ pass
+
+ tokens = {
+ "root": [
+ (r"<%=", Comment.Preproc, move_state("slash")),
+ (r"<%!!", Comment.Preproc, move_state("slash")),
+ (r"<%#.*?%>", Comment.Multiline),
+ (r"<%", Comment.Preproc, move_state("slash")),
+ (r".|\n", Other),
+ ],
+ "string": [
+ (r"\\", String.Escape, move_state("string_e")),
+ (r"\"", String, move_state("slash")),
+ (r"#\{", String.Interpol, "slash"),
+ (r'.|\n', String),
+ ],
+ "string_e": [
+ (r'n', String.Escape, move_state("string")),
+ (r't', String.Escape, move_state("string")),
+ (r'r', String.Escape, move_state("string")),
+ (r'e', String.Escape, move_state("string")),
+ (r'x[a-fA-F0-9]{2}', String.Escape, move_state("string")),
+ (r'.', String.Escape, move_state("string")),
+ ],
+ "regexp": [
+ (r'}[a-z]*', String.Regex, move_state("slash")),
+ (r'\\(.|\n)', String.Regex),
+ (r'{', String.Regex, "regexp_r"),
+ (r'.|\n', String.Regex),
+ ],
+ "regexp_r": [
+ (r'}[a-z]*', String.Regex, "#pop"),
+ (r'\\(.|\n)', String.Regex),
+ (r'{', String.Regex, "regexp_r"),
+ ],
+ "slash": [
+ (r"%>", Comment.Preproc, move_state("root")),
+ (r"\"", String, move_state("string")),
+ (r"'[a-zA-Z0-9_]+", String),
+ (r'%r{', String.Regex, move_state("regexp")),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r"(#|//).*?\n", Comment.Single),
+ (r'-?[0-9]+e[+-]?[0-9]+', Number.Float),
+ (r'-?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?', Number.Float),
+ (r'-?[0-9]+', Number.Integer),
+ (r'nil'+_nkw, Name.Builtin),
+ (r'true'+_nkw, Name.Builtin),
+ (r'false'+_nkw, Name.Builtin),
+ (r'self'+_nkw, Name.Builtin),
+ (r'(class)(\s+)([A-Z][a-zA-Z0-9_\']*)',
+ bygroups(Keyword, Whitespace, Name.Class)),
+ (r'class'+_nkw, Keyword),
+ (r'extends'+_nkw, Keyword),
+ (r'(def)(\s+)(self)(\s*)(\.)(\s*)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)',
+ bygroups(Keyword, Whitespace, Name.Builtin, Whitespace, Punctuation, Whitespace, Name.Function)),
+ (r'(def)(\s+)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)',
+ bygroups(Keyword, Whitespace, Name.Function)),
+ (r'def'+_nkw, Keyword),
+ (r'if'+_nkw, Keyword),
+ (r'elsif'+_nkw, Keyword),
+ (r'else'+_nkw, Keyword),
+ (r'unless'+_nkw, Keyword),
+ (r'for'+_nkw, Keyword),
+ (r'in'+_nkw, Keyword),
+ (r'while'+_nkw, Keyword),
+ (r'until'+_nkw, Keyword),
+ (r'and'+_nkw, Keyword),
+ (r'or'+_nkw, Keyword),
+ (r'not'+_nkw, Keyword),
+ (r'lambda'+_nkw, Keyword),
+ (r'try'+_nkw, Keyword),
+ (r'catch'+_nkw, Keyword),
+ (r'return'+_nkw, Keyword),
+ (r'next'+_nkw, Keyword),
+ (r'last'+_nkw, Keyword),
+ (r'throw'+_nkw, Keyword),
+ (r'use'+_nkw, Keyword),
+ (r'switch'+_nkw, Keyword),
+ (r'\\', Keyword),
+ (r'λ', Keyword),
+ (r'__FILE__'+_nkw, Name.Builtin.Pseudo),
+ (r'__LINE__'+_nkw, Name.Builtin.Pseudo),
+ (r'[A-Z][a-zA-Z0-9_\']*'+_nkw, Name.Constant),
+ (r'[a-z_][a-zA-Z0-9_\']*'+_nkw, Name),
+ (r'@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Instance),
+ (r'@@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Class),
+ (r'\(', Punctuation),
+ (r'\)', Punctuation),
+ (r'\[', Punctuation),
+ (r'\]', Punctuation),
+ (r'\{', Punctuation),
+ (r'\}', right_angle_bracket),
+ (r';', Punctuation),
+ (r',', Punctuation),
+ (r'<<=', Operator),
+ (r'>>=', Operator),
+ (r'<<', Operator),
+ (r'>>', Operator),
+ (r'==', Operator),
+ (r'!=', Operator),
+ (r'=>', Operator),
+ (r'=', Operator),
+ (r'<=>', Operator),
+ (r'<=', Operator),
+ (r'>=', Operator),
+ (r'<', Operator),
+ (r'>', Operator),
+ (r'\+\+', Operator),
+ (r'\+=', Operator),
+ (r'-=', Operator),
+ (r'\*\*=', Operator),
+ (r'\*=', Operator),
+ (r'\*\*', Operator),
+ (r'\*', Operator),
+ (r'/=', Operator),
+ (r'\+', Operator),
+ (r'-', Operator),
+ (r'/', Operator),
+ (r'%=', Operator),
+ (r'%', Operator),
+ (r'^=', Operator),
+ (r'&&=', Operator),
+ (r'&=', Operator),
+ (r'&&', Operator),
+ (r'&', Operator),
+ (r'\|\|=', Operator),
+ (r'\|=', Operator),
+ (r'\|\|', Operator),
+ (r'\|', Operator),
+ (r'!', Operator),
+ (r'\.\.\.', Operator),
+ (r'\.\.', Operator),
+ (r'\.', Operator),
+ (r'::', Operator),
+ (r':', Operator),
+ (r'(\s|\n)+', Whitespace),
+ (r'[a-z_][a-zA-Z0-9_\']*', Name.Variable),
+ ],
+ }
+
+
+class SlashLexer(DelegatingLexer):
+ """
+ Lexer for the Slash programming language.
+ """
+
+ name = 'Slash'
+ aliases = ['slash']
+ filenames = ['*.sla']
+ url = 'https://github.com/arturadib/Slash-A'
+ version_added = '2.4'
+
+ def __init__(self, **options):
+ from pygments.lexers.web import HtmlLexer
+ super().__init__(HtmlLexer, SlashLanguageLexer, **options)
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/smalltalk.py b/venv/lib/python3.10/site-packages/pygments/lexers/smalltalk.py
new file mode 100644
index 0000000000000000000000000000000000000000..674b7b4b345a4b968f080bcd1552979201da6164
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/smalltalk.py
@@ -0,0 +1,194 @@
+"""
+ pygments.lexers.smalltalk
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Smalltalk and related languages.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SmalltalkLexer', 'NewspeakLexer']
+
+
+class SmalltalkLexer(RegexLexer):
+ """
+ For Smalltalk syntax.
+ Contributed by Stefan Matthias Aust.
+ Rewritten by Nils Winter.
+ """
+ name = 'Smalltalk'
+ url = 'http://www.smalltalk.org/'
+ filenames = ['*.st']
+ aliases = ['smalltalk', 'squeak', 'st']
+ mimetypes = ['text/x-smalltalk']
+ version_added = '0.10'
+
+ tokens = {
+ 'root': [
+ (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
+ include('squeak fileout'),
+ include('whitespaces'),
+ include('method definition'),
+ (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
+ include('objects'),
+ (r'\^|\:=|\_', Operator),
+ # temporaries
+ (r'[\]({}.;!]', Text),
+ ],
+ 'method definition': [
+ # Not perfect can't allow whitespaces at the beginning and the
+ # without breaking everything
+ (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
+ bygroups(Name.Function, Text, Name.Variable)),
+ (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
+ (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
+ bygroups(Name.Function, Text, Name.Variable, Text)),
+ ],
+ 'blockvariables': [
+ include('whitespaces'),
+ (r'(:)(\s*)(\w+)',
+ bygroups(Operator, Text, Name.Variable)),
+ (r'\|', Operator, '#pop'),
+ default('#pop'), # else pop
+ ],
+ 'literals': [
+ (r"'(''|[^'])*'", String, 'afterobject'),
+ (r'\$.', String.Char, 'afterobject'),
+ (r'#\(', String.Symbol, 'parenth'),
+ (r'\)', Text, 'afterobject'),
+ (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
+ ],
+ '_parenth_helper': [
+ include('whitespaces'),
+ (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
+ (r'[-+*/\\~<>=|!?,@%\w:]+', String.Symbol),
+ # literals
+ (r"'(''|[^'])*'", String),
+ (r'\$.', String.Char),
+ (r'#*\(', String.Symbol, 'inner_parenth'),
+ ],
+ 'parenth': [
+ # This state is a bit tricky since
+ # we can't just pop this state
+ (r'\)', String.Symbol, ('root', 'afterobject')),
+ include('_parenth_helper'),
+ ],
+ 'inner_parenth': [
+ (r'\)', String.Symbol, '#pop'),
+ include('_parenth_helper'),
+ ],
+ 'whitespaces': [
+ # skip whitespace and comments
+ (r'\s+', Text),
+ (r'"(""|[^"])*"', Comment),
+ ],
+ 'objects': [
+ (r'\[', Text, 'blockvariables'),
+ (r'\]', Text, 'afterobject'),
+ (r'\b(self|super|true|false|nil|thisContext)\b',
+ Name.Builtin.Pseudo, 'afterobject'),
+ (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
+ (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
+ (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
+ String.Symbol, 'afterobject'),
+ include('literals'),
+ ],
+ 'afterobject': [
+ (r'! !$', Keyword, '#pop'), # squeak chunk delimiter
+ include('whitespaces'),
+ (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
+ Name.Builtin, '#pop'),
+ (r'\b(new\b(?!:))', Name.Builtin),
+ (r'\:=|\_', Operator, '#pop'),
+ (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
+ (r'\b[a-zA-Z]+\w*', Name.Function),
+ (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
+ (r'\.', Punctuation, '#pop'),
+ (r';', Punctuation),
+ (r'[\])}]', Text),
+ (r'[\[({]', Text, '#pop'),
+ ],
+ 'squeak fileout': [
+ # Squeak fileout format (optional)
+ (r'^"(""|[^"])*"!', Keyword),
+ (r"^'(''|[^'])*'!", Keyword),
+ (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
+ bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
+ (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
+ bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
+ (r'^(\w+)( subclass: )(#\w+)'
+ r'(\s+instanceVariableNames: )(.*?)'
+ r'(\s+classVariableNames: )(.*?)'
+ r'(\s+poolDictionaries: )(.*?)'
+ r'(\s+category: )(.*?)(!)',
+ bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
+ String, Keyword, String, Keyword, String, Keyword)),
+ (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
+ bygroups(Name.Class, Keyword, String, Keyword)),
+ (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
+ (r'! !$', Keyword),
+ ],
+ }
+
+
+class NewspeakLexer(RegexLexer):
+ """
+ For Newspeak syntax.
+ """
+ name = 'Newspeak'
+ url = 'http://newspeaklanguage.org/'
+ filenames = ['*.ns2']
+ aliases = ['newspeak', ]
+ mimetypes = ['text/x-newspeak']
+ version_added = '1.1'
+
+ tokens = {
+ 'root': [
+ (r'\b(Newsqueak2)\b', Keyword.Declaration),
+ (r"'[^']*'", String),
+ (r'\b(class)(\s+)(\w+)(\s*)',
+ bygroups(Keyword.Declaration, Text, Name.Class, Text)),
+ (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
+ Keyword),
+ (r'(\w+\:)(\s*)([a-zA-Z_]\w+)',
+ bygroups(Name.Function, Text, Name.Variable)),
+ (r'(\w+)(\s*)(=)',
+ bygroups(Name.Attribute, Text, Operator)),
+ (r'<\w+>', Comment.Special),
+ include('expressionstat'),
+ include('whitespace')
+ ],
+
+ 'expressionstat': [
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'\d+', Number.Integer),
+ (r':\w+', Name.Variable),
+ (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
+ (r'\w+:', Name.Function),
+ (r'\w+', Name.Variable),
+ (r'\(|\)', Punctuation),
+ (r'\[|\]', Punctuation),
+ (r'\{|\}', Punctuation),
+
+ (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
+ (r'\.|;', Punctuation),
+ include('whitespace'),
+ include('literals'),
+ ],
+ 'literals': [
+ (r'\$.', String),
+ (r"'[^']*'", String),
+ (r"#'[^']*'", String.Symbol),
+ (r"#\w+:?", String.Symbol),
+ (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'"[^"]*"', Comment)
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/smithy.py b/venv/lib/python3.10/site-packages/pygments/lexers/smithy.py
new file mode 100644
index 0000000000000000000000000000000000000000..bd479aec40c596032921b5cd24f249052058e04e
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/smithy.py
@@ -0,0 +1,77 @@
+"""
+ pygments.lexers.smithy
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Smithy IDL.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Keyword, Name, String, \
+ Number, Whitespace, Punctuation
+
+__all__ = ['SmithyLexer']
+
+
+class SmithyLexer(RegexLexer):
+ """
+ For Smithy IDL
+ """
+ name = 'Smithy'
+ url = 'https://awslabs.github.io/smithy/'
+ filenames = ['*.smithy']
+ aliases = ['smithy']
+ version_added = '2.10'
+
+ unquoted = r'[A-Za-z0-9_\.#$-]+'
+ identifier = r"[A-Za-z0-9_\.#$-]+"
+
+ simple_shapes = (
+ 'use', 'byte', 'short', 'integer', 'long', 'float', 'document',
+ 'double', 'bigInteger', 'bigDecimal', 'boolean', 'blob', 'string',
+ 'timestamp',
+ )
+
+ aggregate_shapes = (
+ 'apply', 'list', 'map', 'set', 'structure', 'union', 'resource',
+ 'operation', 'service', 'trait'
+ )
+
+ tokens = {
+ 'root': [
+ (r'///.*$', Comment.Multiline),
+ (r'//.*$', Comment),
+ (r'@[0-9a-zA-Z\.#-]*', Name.Decorator),
+ (r'(=)', Name.Decorator),
+ (r'^(\$version)(:)(.+)',
+ bygroups(Keyword.Declaration, Name.Decorator, Name.Class)),
+ (r'^(namespace)(\s+' + identifier + r')\b',
+ bygroups(Keyword.Declaration, Name.Class)),
+ (words(simple_shapes,
+ prefix=r'^', suffix=r'(\s+' + identifier + r')\b'),
+ bygroups(Keyword.Declaration, Name.Class)),
+ (words(aggregate_shapes,
+ prefix=r'^', suffix=r'(\s+' + identifier + r')'),
+ bygroups(Keyword.Declaration, Name.Class)),
+ (r'^(metadata)(\s+)((?:\S+)|(?:\"[^"]+\"))(\s*)(=)',
+ bygroups(Keyword.Declaration, Whitespace, Name.Class,
+ Whitespace, Name.Decorator)),
+ (r"(true|false|null)", Keyword.Constant),
+ (r"(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)", Number),
+ (identifier + ":", Name.Label),
+ (identifier, Name.Variable.Class),
+ (r'\[', Text, "#push"),
+ (r'\]', Text, "#pop"),
+ (r'\(', Text, "#push"),
+ (r'\)', Text, "#pop"),
+ (r'\{', Text, "#push"),
+ (r'\}', Text, "#pop"),
+ (r'"{3}(\\\\|\n|\\")*"{3}', String.Doc),
+ (r'"(\\\\|\n|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\n|\\'|[^'])*'", String.Single),
+ (r'[:,]+', Punctuation),
+ (r'\s+', Whitespace),
+ ]
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/smv.py b/venv/lib/python3.10/site-packages/pygments/lexers/smv.py
new file mode 100644
index 0000000000000000000000000000000000000000..bf97b52a5ce1953909b43b66255f0324fd8f051e
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/smv.py
@@ -0,0 +1,78 @@
+"""
+ pygments.lexers.smv
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the SMV languages.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+ Punctuation, Text
+
+__all__ = ['NuSMVLexer']
+
+
+class NuSMVLexer(RegexLexer):
+ """
+ Lexer for the NuSMV language.
+ """
+
+ name = 'NuSMV'
+ aliases = ['nusmv']
+ filenames = ['*.smv']
+ mimetypes = []
+ url = 'https://nusmv.fbk.eu'
+ version_added = '2.2'
+
+ tokens = {
+ 'root': [
+ # Comments
+ (r'(?s)\/\-\-.*?\-\-/', Comment),
+ (r'--.*\n', Comment),
+
+ # Reserved
+ (words(('MODULE', 'DEFINE', 'MDEFINE', 'CONSTANTS', 'VAR', 'IVAR',
+ 'FROZENVAR', 'INIT', 'TRANS', 'INVAR', 'SPEC', 'CTLSPEC',
+ 'LTLSPEC', 'PSLSPEC', 'COMPUTE', 'NAME', 'INVARSPEC',
+ 'FAIRNESS', 'JUSTICE', 'COMPASSION', 'ISA', 'ASSIGN',
+ 'CONSTRAINT', 'SIMPWFF', 'CTLWFF', 'LTLWFF', 'PSLWFF',
+ 'COMPWFF', 'IN', 'MIN', 'MAX', 'MIRROR', 'PRED',
+ 'PREDICATES'), suffix=r'(?![\w$#-])'),
+ Keyword.Declaration),
+ (r'process(?![\w$#-])', Keyword),
+ (words(('array', 'of', 'boolean', 'integer', 'real', 'word'),
+ suffix=r'(?![\w$#-])'), Keyword.Type),
+ (words(('case', 'esac'), suffix=r'(?![\w$#-])'), Keyword),
+ (words(('word1', 'bool', 'signed', 'unsigned', 'extend', 'resize',
+ 'sizeof', 'uwconst', 'swconst', 'init', 'self', 'count',
+ 'abs', 'max', 'min'), suffix=r'(?![\w$#-])'),
+ Name.Builtin),
+ (words(('EX', 'AX', 'EF', 'AF', 'EG', 'AG', 'E', 'F', 'O', 'G',
+ 'H', 'X', 'Y', 'Z', 'A', 'U', 'S', 'V', 'T', 'BU', 'EBF',
+ 'ABF', 'EBG', 'ABG', 'next', 'mod', 'union', 'in', 'xor',
+ 'xnor'), suffix=r'(?![\w$#-])'),
+ Operator.Word),
+ (words(('TRUE', 'FALSE'), suffix=r'(?![\w$#-])'), Keyword.Constant),
+
+ # Names
+ (r'[a-zA-Z_][\w$#-]*', Name.Variable),
+
+ # Operators
+ (r':=', Operator),
+ (r'[-&|+*/<>!=]', Operator),
+
+ # Literals
+ (r'\-?\d+\b', Number.Integer),
+ (r'0[su][bB]\d*_[01_]+', Number.Bin),
+ (r'0[su][oO]\d*_[0-7_]+', Number.Oct),
+ (r'0[su][dD]\d*_[\d_]+', Number.Decimal),
+ (r'0[su][hH]\d*_[\da-fA-F_]+', Number.Hex),
+
+ # Whitespace, punctuation and the rest
+ (r'\s+', Text.Whitespace),
+ (r'[()\[\]{};?:.,]', Punctuation),
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/snobol.py b/venv/lib/python3.10/site-packages/pygments/lexers/snobol.py
new file mode 100644
index 0000000000000000000000000000000000000000..bab51e9b1107c5e295febaa842c68ac93ee1b1fc
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/snobol.py
@@ -0,0 +1,82 @@
+"""
+ pygments.lexers.snobol
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the SNOBOL language.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SnobolLexer']
+
+
+class SnobolLexer(RegexLexer):
+ """
+ Lexer for the SNOBOL4 programming language.
+
+ Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
+ Does not require spaces around binary operators.
+ """
+
+ name = "Snobol"
+ aliases = ["snobol"]
+ filenames = ['*.snobol']
+ mimetypes = ['text/x-snobol']
+ url = 'https://www.regressive.org/snobol4'
+ version_added = '1.5'
+
+ tokens = {
+ # root state, start of line
+ # comments, continuation lines, and directives start in column 1
+ # as do labels
+ 'root': [
+ (r'\*.*\n', Comment),
+ (r'[+.] ', Punctuation, 'statement'),
+ (r'-.*\n', Comment),
+ (r'END\s*\n', Name.Label, 'heredoc'),
+ (r'[A-Za-z$][\w$]*', Name.Label, 'statement'),
+ (r'\s+', Text, 'statement'),
+ ],
+ # statement state, line after continuation or label
+ 'statement': [
+ (r'\s*\n', Text, '#pop'),
+ (r'\s+', Text),
+ (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
+ r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
+ r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
+ r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
+ Name.Builtin),
+ (r'[A-Za-z][\w.]*', Name),
+ # ASCII equivalents of original operators
+ # | for the EBCDIC equivalent, ! likewise
+ # \ for EBCDIC negation
+ (r'\*\*|[?$.!%*/#+\-@|&\\=]', Operator),
+ (r'"[^"]*"', String),
+ (r"'[^']*'", String),
+ # Accept SPITBOL syntax for real numbers
+ # as well as Macro SNOBOL4
+ (r'[0-9]+(?=[^.EeDd])', Number.Integer),
+ (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
+ # Goto
+ (r':', Punctuation, 'goto'),
+ (r'[()<>,;]', Punctuation),
+ ],
+ # Goto block
+ 'goto': [
+ (r'\s*\n', Text, "#pop:2"),
+ (r'\s+', Text),
+ (r'F|S', Keyword),
+ (r'(\()([A-Za-z][\w.]*)(\))',
+ bygroups(Punctuation, Name.Label, Punctuation))
+ ],
+ # everything after the END statement is basically one
+ # big heredoc.
+ 'heredoc': [
+ (r'.*\n', String.Heredoc)
+ ]
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/solidity.py b/venv/lib/python3.10/site-packages/pygments/lexers/solidity.py
new file mode 100644
index 0000000000000000000000000000000000000000..3182a148a3830c6b55e6b4f194010121dfe0abc4
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/solidity.py
@@ -0,0 +1,87 @@
+"""
+ pygments.lexers.solidity
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Solidity.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+
+__all__ = ['SolidityLexer']
+
+
+class SolidityLexer(RegexLexer):
+ """
+ For Solidity source code.
+ """
+
+ name = 'Solidity'
+ aliases = ['solidity']
+ filenames = ['*.sol']
+ mimetypes = []
+ url = 'https://soliditylang.org'
+ version_added = '2.5'
+
+ datatype = (
+ r'\b(address|bool|(?:(?:bytes|hash|int|string|uint)(?:8|16|24|32|40|48|56|64'
+ r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208'
+ r'|216|224|232|240|248|256)?))\b'
+ )
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comments'),
+ (r'\bpragma\s+solidity\b', Keyword, 'pragma'),
+ (r'\b(contract)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword, Whitespace, Name.Entity)),
+ (datatype + r'(\s+)((?:external|public|internal|private)\s+)?' +
+ r'([a-zA-Z_]\w*)',
+ bygroups(Keyword.Type, Whitespace, Keyword, Name.Variable)),
+ (r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword.Type, Whitespace, Name.Variable)),
+ (r'\b(msg|block|tx)\.([A-Za-z_][a-zA-Z0-9_]*)\b', Keyword),
+ (words((
+ 'block', 'break', 'constant', 'constructor', 'continue',
+ 'contract', 'do', 'else', 'external', 'false', 'for',
+ 'function', 'if', 'import', 'inherited', 'internal', 'is',
+ 'library', 'mapping', 'memory', 'modifier', 'msg', 'new',
+ 'payable', 'private', 'public', 'require', 'return',
+ 'returns', 'struct', 'suicide', 'throw', 'this', 'true',
+ 'tx', 'var', 'while'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Type),
+ (words(('keccak256',), prefix=r'\b', suffix=r'\b'), Name.Builtin),
+ (datatype, Keyword.Type),
+ include('constants'),
+ (r'[a-zA-Z_]\w*', Text),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[.;{}(),\[\]]', Punctuation)
+ ],
+ 'comments': [
+ (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
+ (r'/(\\\n)?[*][\w\W]*', Comment.Multiline)
+ ],
+ 'constants': [
+ (r'("(\\"|.)*?")', String.Double),
+ (r"('(\\'|.)*?')", String.Single),
+ (r'\b0[xX][0-9a-fA-F]+\b', Number.Hex),
+ (r'\b\d+\b', Number.Decimal),
+ ],
+ 'pragma': [
+ include('whitespace'),
+ include('comments'),
+ (r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)',
+ bygroups(Operator, Whitespace, Keyword)),
+ (r';', Punctuation, '#pop')
+ ],
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ (r'\n', Whitespace)
+ ]
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/soong.py b/venv/lib/python3.10/site-packages/pygments/lexers/soong.py
new file mode 100644
index 0000000000000000000000000000000000000000..bbf204dd22543f06c65849d54613c3a573dda899
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/soong.py
@@ -0,0 +1,78 @@
+"""
+ pygments.lexers.soong
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Soong (Android.bp Blueprint) files.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include
+from pygments.token import Comment, Name, Number, Operator, Punctuation, \
+ String, Whitespace
+
+__all__ = ['SoongLexer']
+
+class SoongLexer(RegexLexer):
+ name = 'Soong'
+ version_added = '2.18'
+ url = 'https://source.android.com/docs/setup/reference/androidbp'
+ aliases = ['androidbp', 'bp', 'soong']
+ filenames = ['Android.bp']
+
+ tokens = {
+ 'root': [
+ # A variable assignment
+ (r'(\w*)(\s*)(\+?=)(\s*)',
+ bygroups(Name.Variable, Whitespace, Operator, Whitespace),
+ 'assign-rhs'),
+
+ # A top-level module
+ (r'(\w*)(\s*)(\{)',
+ bygroups(Name.Function, Whitespace, Punctuation),
+ 'in-rule'),
+
+ # Everything else
+ include('comments'),
+ (r'\s+', Whitespace), # newlines okay
+ ],
+ 'assign-rhs': [
+ include('expr'),
+ (r'\n', Whitespace, '#pop'),
+ ],
+ 'in-list': [
+ include('expr'),
+ include('comments'),
+ (r'\s+', Whitespace), # newlines okay in a list
+ (r',', Punctuation),
+ (r'\]', Punctuation, '#pop'),
+ ],
+ 'in-map': [
+ # A map key
+ (r'(\w+)(:)(\s*)', bygroups(Name, Punctuation, Whitespace)),
+
+ include('expr'),
+ include('comments'),
+ (r'\s+', Whitespace), # newlines okay in a map
+ (r',', Punctuation),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'in-rule': [
+ # Just re-use map syntax
+ include('in-map'),
+ ],
+ 'comments': [
+ (r'//.*', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+ 'expr': [
+ (r'(true|false)\b', Name.Builtin),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'".*?"', String),
+ (r'\{', Punctuation, 'in-map'),
+ (r'\[', Punctuation, 'in-list'),
+ (r'\w+', Name),
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/sophia.py b/venv/lib/python3.10/site-packages/pygments/lexers/sophia.py
new file mode 100644
index 0000000000000000000000000000000000000000..37fcec5c39bfe4e8f52bf735618971684ab27739
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/sophia.py
@@ -0,0 +1,102 @@
+"""
+ pygments.lexers.sophia
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Sophia.
+
+ Derived from pygments/lexers/reason.py.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, default, words
+from pygments.token import Comment, Keyword, Name, Number, Operator, \
+ Punctuation, String, Text
+
+__all__ = ['SophiaLexer']
+
+class SophiaLexer(RegexLexer):
+ """
+ A Sophia lexer.
+ """
+
+ name = 'Sophia'
+ aliases = ['sophia']
+ filenames = ['*.aes']
+ mimetypes = []
+ url = 'https://docs.aeternity.com/aesophia'
+ version_added = '2.11'
+
+ keywords = (
+ 'contract', 'include', 'let', 'switch', 'type', 'record', 'datatype',
+ 'if', 'elif', 'else', 'function', 'stateful', 'payable', 'public',
+ 'entrypoint', 'private', 'indexed', 'namespace', 'interface', 'main',
+ 'using', 'as', 'for', 'hiding',
+ )
+
+ builtins = ('state', 'put', 'abort', 'require')
+
+ word_operators = ('mod', 'band', 'bor', 'bxor', 'bnot')
+
+ primitive_types = ('int', 'address', 'bool', 'bits', 'bytes', 'string',
+ 'list', 'option', 'char', 'unit', 'map', 'event',
+ 'hash', 'signature', 'oracle', 'oracle_query')
+
+ tokens = {
+ 'escape-sequence': [
+ (r'\\[\\"\'ntbr]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+ 'root': [
+ (r'\s+', Text.Whitespace),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Class, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name.Function),
+ (r'//.*?\n', Comment.Single),
+ (r'\/\*(?!/)', Comment.Multiline, 'comment'),
+
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'#[\da-fA-F][\da-fA-F_]*', Name.Label),
+ (r'\d[\d_]*', Number.Integer),
+
+ (words(keywords, suffix=r'\b'), Keyword),
+ (words(builtins, suffix=r'\b'), Name.Builtin),
+ (words(word_operators, prefix=r'\b', suffix=r'\b'), Operator.Word),
+ (words(primitive_types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+
+ (r'[=!<>+\\*/:&|?~@^-]', Operator.Word),
+ (r'[.;:{}(),\[\]]', Punctuation),
+
+ (r"(ak_|ok_|oq_|ct_)[\w']*", Name.Label),
+ (r"[^\W\d][\w']*", Name),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'[a-z][\w]*", Name.Variable),
+
+ (r'"', String.Double, 'string')
+ ],
+ 'comment': [
+ (r'[^/*]+', Comment.Multiline),
+ (r'\/\*', Comment.Multiline, '#push'),
+ (r'\*\/', Comment.Multiline, '#pop'),
+ (r'\*', Comment.Multiline),
+ ],
+ 'string': [
+ (r'[^\\"]+', String.Double),
+ include('escape-sequence'),
+ (r'\\\n', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Function),
+ (r'[A-Z][\w\']*', Name.Function, '#pop'),
+ (r'[a-z_][\w\']*', Name, '#pop'),
+ default('#pop'),
+ ],
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/special.py b/venv/lib/python3.10/site-packages/pygments/lexers/special.py
new file mode 100644
index 0000000000000000000000000000000000000000..524946fc310a6acef08db483ac7f12f09a5e8706
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/special.py
@@ -0,0 +1,122 @@
+"""
+ pygments.lexers.special
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Special lexers.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import ast
+
+from pygments.lexer import Lexer, line_re
+from pygments.token import Token, Error, Text, Generic
+from pygments.util import get_choice_opt
+
+
+__all__ = ['TextLexer', 'OutputLexer', 'RawTokenLexer']
+
+
+class TextLexer(Lexer):
+ """
+ "Null" lexer, doesn't highlight anything.
+ """
+ name = 'Text only'
+ aliases = ['text']
+ filenames = ['*.txt']
+ mimetypes = ['text/plain']
+ url = ""
+ version_added = ''
+
+ priority = 0.01
+
+ def get_tokens_unprocessed(self, text):
+ yield 0, Text, text
+
+ def analyse_text(text):
+ return TextLexer.priority
+
+
+class OutputLexer(Lexer):
+ """
+ Simple lexer that highlights everything as ``Token.Generic.Output``.
+ """
+ name = 'Text output'
+ aliases = ['output']
+ url = ""
+ version_added = '2.10'
+ _example = "output/output"
+
+ def get_tokens_unprocessed(self, text):
+ yield 0, Generic.Output, text
+
+
+_ttype_cache = {}
+
+
+class RawTokenLexer(Lexer):
+ """
+ Recreate a token stream formatted with the `RawTokenFormatter`.
+
+ Additional options accepted:
+
+ `compress`
+ If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
+ the given compression algorithm before lexing (default: ``""``).
+ """
+ name = 'Raw token data'
+ aliases = []
+ filenames = []
+ mimetypes = ['application/x-pygments-tokens']
+ url = 'https://pygments.org/docs/formatters/#RawTokenFormatter'
+ version_added = ''
+
+ def __init__(self, **options):
+ self.compress = get_choice_opt(options, 'compress',
+ ['', 'none', 'gz', 'bz2'], '')
+ Lexer.__init__(self, **options)
+
+ def get_tokens(self, text):
+ if self.compress:
+ if isinstance(text, str):
+ text = text.encode('latin1')
+ try:
+ if self.compress == 'gz':
+ import gzip
+ text = gzip.decompress(text)
+ elif self.compress == 'bz2':
+ import bz2
+ text = bz2.decompress(text)
+ except OSError:
+ yield Error, text.decode('latin1')
+ if isinstance(text, bytes):
+ text = text.decode('latin1')
+
+ # do not call Lexer.get_tokens() because stripping is not optional.
+ text = text.strip('\n') + '\n'
+ for i, t, v in self.get_tokens_unprocessed(text):
+ yield t, v
+
+ def get_tokens_unprocessed(self, text):
+ length = 0
+ for match in line_re.finditer(text):
+ try:
+ ttypestr, val = match.group().rstrip().split('\t', 1)
+ ttype = _ttype_cache.get(ttypestr)
+ if not ttype:
+ ttype = Token
+ ttypes = ttypestr.split('.')[1:]
+ for ttype_ in ttypes:
+ if not ttype_ or not ttype_[0].isupper():
+ raise ValueError('malformed token name')
+ ttype = getattr(ttype, ttype_)
+ _ttype_cache[ttypestr] = ttype
+ val = ast.literal_eval(val)
+ if not isinstance(val, str):
+ raise ValueError('expected str')
+ except (SyntaxError, ValueError):
+ val = match.group()
+ ttype = Error
+ yield length, ttype, val
+ length += len(val)
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/spice.py b/venv/lib/python3.10/site-packages/pygments/lexers/spice.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d2b1a1a81a47b9ac830b204df3ce015b8f66985
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/spice.py
@@ -0,0 +1,70 @@
+"""
+ pygments.lexers.spice
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Spice programming language.
+
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+
+__all__ = ['SpiceLexer']
+
+
+class SpiceLexer(RegexLexer):
+ """
+ For Spice source.
+ """
+ name = 'Spice'
+ url = 'https://www.spicelang.com'
+ filenames = ['*.spice']
+ aliases = ['spice', 'spicelang']
+ mimetypes = ['text/x-spice']
+ version_added = '2.11'
+
+ tokens = {
+ 'root': [
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ (r'\\\n', Text),
+ # comments
+ (r'//(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*]{2}(.|\n)*?[*](\\\n)?/', String.Doc),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ # keywords
+ (r'(import|as)\b', Keyword.Namespace),
+ (r'(f|p|type|struct|interface|enum|alias|operator)\b', Keyword.Declaration),
+ (words(('if', 'else', 'switch', 'case', 'default', 'for', 'foreach', 'do',
+ 'while', 'break', 'continue', 'fallthrough', 'return', 'assert',
+ 'unsafe', 'ext'), suffix=r'\b'), Keyword),
+ (words(('const', 'signed', 'unsigned', 'inline', 'public', 'heap', 'compose'),
+ suffix=r'\b'), Keyword.Pseudo),
+ (words(('new', 'yield', 'stash', 'pick', 'sync', 'class'), suffix=r'\b'),
+ Keyword.Reserved),
+ (r'(true|false|nil)\b', Keyword.Constant),
+ (words(('double', 'int', 'short', 'long', 'byte', 'char', 'string',
+ 'bool', 'dyn'), suffix=r'\b'), Keyword.Type),
+ (words(('printf', 'sizeof', 'alignof', 'len', 'panic'), suffix=r'\b(\()'),
+ bygroups(Name.Builtin, Punctuation)),
+ # numeric literals
+ (r'[-]?[0-9]*[.][0-9]+([eE][+-]?[0-9]+)?', Number.Double),
+ (r'0[bB][01]+[slu]?', Number.Bin),
+ (r'0[oO][0-7]+[slu]?', Number.Oct),
+ (r'0[xXhH][0-9a-fA-F]+[slu]?', Number.Hex),
+ (r'(0[dD])?[0-9]+[slu]?', Number.Integer),
+ # string literal
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ # char literal
+ (r'\'(\\\\|\\[^\\]|[^\'\\])\'', String.Char),
+ # tokens
+ (r'<<=|>>=|<<|>>|<=|>=|\+=|-=|\*=|/=|\%=|\|=|&=|\^=|&&|\|\||&|\||'
+ r'\+\+|--|\%|\^|\~|==|!=|->|::|[.]{3}|#!|#|[+\-*/&]', Operator),
+ (r'[|<>=!()\[\]{}.,;:\?]', Punctuation),
+ # identifiers
+ (r'[^\W\d]\w*', Name.Other),
+ ]
+ }
diff --git a/venv/lib/python3.10/site-packages/pygments/lexers/sql.py b/venv/lib/python3.10/site-packages/pygments/lexers/sql.py
new file mode 100644
index 0000000000000000000000000000000000000000..d3e6f17f39491b43cc621ca6981ff33ca90c380f
--- /dev/null
+++ b/venv/lib/python3.10/site-packages/pygments/lexers/sql.py
@@ -0,0 +1,1109 @@
+"""
+ pygments.lexers.sql
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various SQL dialects and related interactive sessions.
+
+ Postgres specific lexers:
+
+ `PostgresLexer`
+ A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
+ lexer are:
+
+ - keywords and data types list parsed from the PG docs (run the
+ `_postgres_builtins` module to update them);
+ - Content of $-strings parsed using a specific lexer, e.g. the content
+ of a PL/Python function is parsed using the Python lexer;
+ - parse PG specific constructs: E-strings, $-strings, U&-strings,
+ different operators and punctuation.
+
+ `PlPgsqlLexer`
+ A lexer for the PL/pgSQL language. Adds a few specific construct on
+ top of the PG SQL lexer (such as <