Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- parrot/lib/libquadmath.so.0.0.0 +3 -0
- parrot/lib/python3.10/site-packages/git/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/__pycache__/cmd.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/__pycache__/compat.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/__pycache__/db.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/__pycache__/exc.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/__pycache__/remote.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/__pycache__/types.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/config.py +944 -0
- parrot/lib/python3.10/site-packages/git/index/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/index/__pycache__/base.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/index/__pycache__/fun.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/index/fun.py +465 -0
- parrot/lib/python3.10/site-packages/git/index/typ.py +202 -0
- parrot/lib/python3.10/site-packages/git/objects/__init__.py +25 -0
- parrot/lib/python3.10/site-packages/git/objects/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/objects/__pycache__/blob.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/objects/__pycache__/commit.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/objects/__pycache__/tag.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/objects/__pycache__/tree.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/objects/base.py +301 -0
- parrot/lib/python3.10/site-packages/git/objects/blob.py +48 -0
- parrot/lib/python3.10/site-packages/git/objects/commit.py +899 -0
- parrot/lib/python3.10/site-packages/git/objects/fun.py +281 -0
- parrot/lib/python3.10/site-packages/git/objects/submodule/__pycache__/base.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/objects/submodule/root.py +467 -0
- parrot/lib/python3.10/site-packages/git/objects/tag.py +141 -0
- parrot/lib/python3.10/site-packages/git/objects/tree.py +414 -0
- parrot/lib/python3.10/site-packages/git/objects/util.py +700 -0
- parrot/lib/python3.10/site-packages/git/refs/__init__.py +21 -0
- parrot/lib/python3.10/site-packages/git/refs/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/refs/__pycache__/head.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/refs/__pycache__/log.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/refs/__pycache__/reference.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/refs/__pycache__/remote.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/refs/__pycache__/symbolic.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/refs/__pycache__/tag.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/refs/head.py +304 -0
- parrot/lib/python3.10/site-packages/git/refs/log.py +399 -0
- parrot/lib/python3.10/site-packages/git/refs/reference.py +176 -0
- parrot/lib/python3.10/site-packages/git/refs/remote.py +79 -0
- parrot/lib/python3.10/site-packages/git/refs/symbolic.py +926 -0
- parrot/lib/python3.10/site-packages/git/refs/tag.py +155 -0
- parrot/lib/python3.10/site-packages/git/repo/__init__.py +8 -0
- parrot/lib/python3.10/site-packages/git/repo/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/repo/__pycache__/base.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/repo/__pycache__/fun.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/git/repo/fun.py +419 -0
- parrot/lib/python3.10/site-packages/git/util.py +1344 -0
.gitattributes
CHANGED
|
@@ -155,3 +155,4 @@ parrot/lib/python3.10/site-packages/wandb/sdk/__pycache__/wandb_run.cpython-310.
|
|
| 155 |
parrot/lib/python3.10/site-packages/wandb/sdk/internal/__pycache__/internal_api.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 156 |
parrot/lib/libgomp.so.1 filter=lfs diff=lfs merge=lfs -text
|
| 157 |
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 155 |
parrot/lib/python3.10/site-packages/wandb/sdk/internal/__pycache__/internal_api.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 156 |
parrot/lib/libgomp.so.1 filter=lfs diff=lfs merge=lfs -text
|
| 157 |
parrot/lib/python3.10/site-packages/huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 158 |
+
parrot/lib/libquadmath.so.0.0.0 filter=lfs diff=lfs merge=lfs -text
|
parrot/lib/libquadmath.so.0.0.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:10c6fadba4c2f6d77e836a50aadbd92e95b137a85eb01b1ca183b50d8f39a2c6
|
| 3 |
+
size 1009408
|
parrot/lib/python3.10/site-packages/git/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (5.28 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/__pycache__/cmd.cpython-310.pyc
ADDED
|
Binary file (47.9 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/__pycache__/compat.cpython-310.pyc
ADDED
|
Binary file (3.26 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/__pycache__/db.cpython-310.pyc
ADDED
|
Binary file (2.6 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/__pycache__/exc.cpython-310.pyc
ADDED
|
Binary file (7.71 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/__pycache__/remote.cpython-310.pyc
ADDED
|
Binary file (34.8 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/__pycache__/types.cpython-310.pyc
ADDED
|
Binary file (4.78 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/config.py
ADDED
|
@@ -0,0 +1,944 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitPython and is released under the
|
| 4 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 5 |
+
|
| 6 |
+
"""Parser for reading and writing configuration files."""
|
| 7 |
+
|
| 8 |
+
__all__ = ["GitConfigParser", "SectionConstraint"]
|
| 9 |
+
|
| 10 |
+
import abc
|
| 11 |
+
import configparser as cp
|
| 12 |
+
import fnmatch
|
| 13 |
+
from functools import wraps
|
| 14 |
+
import inspect
|
| 15 |
+
from io import BufferedReader, IOBase
|
| 16 |
+
import logging
|
| 17 |
+
import os
|
| 18 |
+
import os.path as osp
|
| 19 |
+
import re
|
| 20 |
+
import sys
|
| 21 |
+
|
| 22 |
+
from git.compat import defenc, force_text
|
| 23 |
+
from git.util import LockFile
|
| 24 |
+
|
| 25 |
+
# typing-------------------------------------------------------
|
| 26 |
+
|
| 27 |
+
from typing import (
|
| 28 |
+
Any,
|
| 29 |
+
Callable,
|
| 30 |
+
Generic,
|
| 31 |
+
IO,
|
| 32 |
+
List,
|
| 33 |
+
Dict,
|
| 34 |
+
Sequence,
|
| 35 |
+
TYPE_CHECKING,
|
| 36 |
+
Tuple,
|
| 37 |
+
TypeVar,
|
| 38 |
+
Union,
|
| 39 |
+
cast,
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
from git.types import Lit_config_levels, ConfigLevels_Tup, PathLike, assert_never, _T
|
| 43 |
+
|
| 44 |
+
if TYPE_CHECKING:
|
| 45 |
+
from io import BytesIO
|
| 46 |
+
|
| 47 |
+
from git.repo.base import Repo
|
| 48 |
+
|
| 49 |
+
T_ConfigParser = TypeVar("T_ConfigParser", bound="GitConfigParser")
|
| 50 |
+
T_OMD_value = TypeVar("T_OMD_value", str, bytes, int, float, bool)
|
| 51 |
+
|
| 52 |
+
if sys.version_info[:3] < (3, 7, 2):
|
| 53 |
+
# typing.Ordereddict not added until Python 3.7.2.
|
| 54 |
+
from collections import OrderedDict
|
| 55 |
+
|
| 56 |
+
OrderedDict_OMD = OrderedDict
|
| 57 |
+
else:
|
| 58 |
+
from typing import OrderedDict
|
| 59 |
+
|
| 60 |
+
OrderedDict_OMD = OrderedDict[str, List[T_OMD_value]] # type: ignore[assignment, misc]
|
| 61 |
+
|
| 62 |
+
# -------------------------------------------------------------
|
| 63 |
+
|
| 64 |
+
_logger = logging.getLogger(__name__)
|
| 65 |
+
|
| 66 |
+
CONFIG_LEVELS: ConfigLevels_Tup = ("system", "user", "global", "repository")
|
| 67 |
+
"""The configuration level of a configuration file."""
|
| 68 |
+
|
| 69 |
+
CONDITIONAL_INCLUDE_REGEXP = re.compile(r"(?<=includeIf )\"(gitdir|gitdir/i|onbranch):(.+)\"")
|
| 70 |
+
"""Section pattern to detect conditional includes.
|
| 71 |
+
|
| 72 |
+
See: https://git-scm.com/docs/git-config#_conditional_includes
|
| 73 |
+
"""
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class MetaParserBuilder(abc.ABCMeta): # noqa: B024
|
| 77 |
+
"""Utility class wrapping base-class methods into decorators that assure read-only
|
| 78 |
+
properties."""
|
| 79 |
+
|
| 80 |
+
def __new__(cls, name: str, bases: Tuple, clsdict: Dict[str, Any]) -> "MetaParserBuilder":
|
| 81 |
+
"""Equip all base-class methods with a needs_values decorator, and all non-const
|
| 82 |
+
methods with a :func:`set_dirty_and_flush_changes` decorator in addition to
|
| 83 |
+
that.
|
| 84 |
+
"""
|
| 85 |
+
kmm = "_mutating_methods_"
|
| 86 |
+
if kmm in clsdict:
|
| 87 |
+
mutating_methods = clsdict[kmm]
|
| 88 |
+
for base in bases:
|
| 89 |
+
methods = (t for t in inspect.getmembers(base, inspect.isroutine) if not t[0].startswith("_"))
|
| 90 |
+
for name, method in methods:
|
| 91 |
+
if name in clsdict:
|
| 92 |
+
continue
|
| 93 |
+
method_with_values = needs_values(method)
|
| 94 |
+
if name in mutating_methods:
|
| 95 |
+
method_with_values = set_dirty_and_flush_changes(method_with_values)
|
| 96 |
+
# END mutating methods handling
|
| 97 |
+
|
| 98 |
+
clsdict[name] = method_with_values
|
| 99 |
+
# END for each name/method pair
|
| 100 |
+
# END for each base
|
| 101 |
+
# END if mutating methods configuration is set
|
| 102 |
+
|
| 103 |
+
new_type = super().__new__(cls, name, bases, clsdict)
|
| 104 |
+
return new_type
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def needs_values(func: Callable[..., _T]) -> Callable[..., _T]:
|
| 108 |
+
"""Return a method for ensuring we read values (on demand) before we try to access
|
| 109 |
+
them."""
|
| 110 |
+
|
| 111 |
+
@wraps(func)
|
| 112 |
+
def assure_data_present(self: "GitConfigParser", *args: Any, **kwargs: Any) -> _T:
|
| 113 |
+
self.read()
|
| 114 |
+
return func(self, *args, **kwargs)
|
| 115 |
+
|
| 116 |
+
# END wrapper method
|
| 117 |
+
return assure_data_present
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def set_dirty_and_flush_changes(non_const_func: Callable[..., _T]) -> Callable[..., _T]:
|
| 121 |
+
"""Return a method that checks whether given non constant function may be called.
|
| 122 |
+
|
| 123 |
+
If so, the instance will be set dirty. Additionally, we flush the changes right to
|
| 124 |
+
disk.
|
| 125 |
+
"""
|
| 126 |
+
|
| 127 |
+
def flush_changes(self: "GitConfigParser", *args: Any, **kwargs: Any) -> _T:
|
| 128 |
+
rval = non_const_func(self, *args, **kwargs)
|
| 129 |
+
self._dirty = True
|
| 130 |
+
self.write()
|
| 131 |
+
return rval
|
| 132 |
+
|
| 133 |
+
# END wrapper method
|
| 134 |
+
flush_changes.__name__ = non_const_func.__name__
|
| 135 |
+
return flush_changes
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
class SectionConstraint(Generic[T_ConfigParser]):
|
| 139 |
+
"""Constrains a ConfigParser to only option commands which are constrained to
|
| 140 |
+
always use the section we have been initialized with.
|
| 141 |
+
|
| 142 |
+
It supports all ConfigParser methods that operate on an option.
|
| 143 |
+
|
| 144 |
+
:note:
|
| 145 |
+
If used as a context manager, will release the wrapped ConfigParser.
|
| 146 |
+
"""
|
| 147 |
+
|
| 148 |
+
__slots__ = ("_config", "_section_name")
|
| 149 |
+
|
| 150 |
+
_valid_attrs_ = (
|
| 151 |
+
"get_value",
|
| 152 |
+
"set_value",
|
| 153 |
+
"get",
|
| 154 |
+
"set",
|
| 155 |
+
"getint",
|
| 156 |
+
"getfloat",
|
| 157 |
+
"getboolean",
|
| 158 |
+
"has_option",
|
| 159 |
+
"remove_section",
|
| 160 |
+
"remove_option",
|
| 161 |
+
"options",
|
| 162 |
+
)
|
| 163 |
+
|
| 164 |
+
def __init__(self, config: T_ConfigParser, section: str) -> None:
|
| 165 |
+
self._config = config
|
| 166 |
+
self._section_name = section
|
| 167 |
+
|
| 168 |
+
def __del__(self) -> None:
|
| 169 |
+
# Yes, for some reason, we have to call it explicitly for it to work in PY3 !
|
| 170 |
+
# Apparently __del__ doesn't get call anymore if refcount becomes 0
|
| 171 |
+
# Ridiculous ... .
|
| 172 |
+
self._config.release()
|
| 173 |
+
|
| 174 |
+
def __getattr__(self, attr: str) -> Any:
|
| 175 |
+
if attr in self._valid_attrs_:
|
| 176 |
+
return lambda *args, **kwargs: self._call_config(attr, *args, **kwargs)
|
| 177 |
+
return super().__getattribute__(attr)
|
| 178 |
+
|
| 179 |
+
def _call_config(self, method: str, *args: Any, **kwargs: Any) -> Any:
|
| 180 |
+
"""Call the configuration at the given method which must take a section name as
|
| 181 |
+
first argument."""
|
| 182 |
+
return getattr(self._config, method)(self._section_name, *args, **kwargs)
|
| 183 |
+
|
| 184 |
+
@property
|
| 185 |
+
def config(self) -> T_ConfigParser:
|
| 186 |
+
"""return: ConfigParser instance we constrain"""
|
| 187 |
+
return self._config
|
| 188 |
+
|
| 189 |
+
def release(self) -> None:
|
| 190 |
+
"""Equivalent to :meth:`GitConfigParser.release`, which is called on our
|
| 191 |
+
underlying parser instance."""
|
| 192 |
+
return self._config.release()
|
| 193 |
+
|
| 194 |
+
def __enter__(self) -> "SectionConstraint[T_ConfigParser]":
|
| 195 |
+
self._config.__enter__()
|
| 196 |
+
return self
|
| 197 |
+
|
| 198 |
+
def __exit__(self, exception_type: str, exception_value: str, traceback: str) -> None:
|
| 199 |
+
self._config.__exit__(exception_type, exception_value, traceback)
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
class _OMD(OrderedDict_OMD):
|
| 203 |
+
"""Ordered multi-dict."""
|
| 204 |
+
|
| 205 |
+
def __setitem__(self, key: str, value: _T) -> None:
|
| 206 |
+
super().__setitem__(key, [value])
|
| 207 |
+
|
| 208 |
+
def add(self, key: str, value: Any) -> None:
|
| 209 |
+
if key not in self:
|
| 210 |
+
super().__setitem__(key, [value])
|
| 211 |
+
return
|
| 212 |
+
|
| 213 |
+
super().__getitem__(key).append(value)
|
| 214 |
+
|
| 215 |
+
def setall(self, key: str, values: List[_T]) -> None:
|
| 216 |
+
super().__setitem__(key, values)
|
| 217 |
+
|
| 218 |
+
def __getitem__(self, key: str) -> Any:
|
| 219 |
+
return super().__getitem__(key)[-1]
|
| 220 |
+
|
| 221 |
+
def getlast(self, key: str) -> Any:
|
| 222 |
+
return super().__getitem__(key)[-1]
|
| 223 |
+
|
| 224 |
+
def setlast(self, key: str, value: Any) -> None:
|
| 225 |
+
if key not in self:
|
| 226 |
+
super().__setitem__(key, [value])
|
| 227 |
+
return
|
| 228 |
+
|
| 229 |
+
prior = super().__getitem__(key)
|
| 230 |
+
prior[-1] = value
|
| 231 |
+
|
| 232 |
+
def get(self, key: str, default: Union[_T, None] = None) -> Union[_T, None]:
|
| 233 |
+
return super().get(key, [default])[-1]
|
| 234 |
+
|
| 235 |
+
def getall(self, key: str) -> List[_T]:
|
| 236 |
+
return super().__getitem__(key)
|
| 237 |
+
|
| 238 |
+
def items(self) -> List[Tuple[str, _T]]: # type: ignore[override]
|
| 239 |
+
"""List of (key, last value for key)."""
|
| 240 |
+
return [(k, self[k]) for k in self]
|
| 241 |
+
|
| 242 |
+
def items_all(self) -> List[Tuple[str, List[_T]]]:
|
| 243 |
+
"""List of (key, list of values for key)."""
|
| 244 |
+
return [(k, self.getall(k)) for k in self]
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def get_config_path(config_level: Lit_config_levels) -> str:
|
| 248 |
+
# We do not support an absolute path of the gitconfig on Windows.
|
| 249 |
+
# Use the global config instead.
|
| 250 |
+
if sys.platform == "win32" and config_level == "system":
|
| 251 |
+
config_level = "global"
|
| 252 |
+
|
| 253 |
+
if config_level == "system":
|
| 254 |
+
return "/etc/gitconfig"
|
| 255 |
+
elif config_level == "user":
|
| 256 |
+
config_home = os.environ.get("XDG_CONFIG_HOME") or osp.join(os.environ.get("HOME", "~"), ".config")
|
| 257 |
+
return osp.normpath(osp.expanduser(osp.join(config_home, "git", "config")))
|
| 258 |
+
elif config_level == "global":
|
| 259 |
+
return osp.normpath(osp.expanduser("~/.gitconfig"))
|
| 260 |
+
elif config_level == "repository":
|
| 261 |
+
raise ValueError("No repo to get repository configuration from. Use Repo._get_config_path")
|
| 262 |
+
else:
|
| 263 |
+
# Should not reach here. Will raise ValueError if does. Static typing will warn
|
| 264 |
+
# about missing elifs.
|
| 265 |
+
assert_never( # type: ignore[unreachable]
|
| 266 |
+
config_level,
|
| 267 |
+
ValueError(f"Invalid configuration level: {config_level!r}"),
|
| 268 |
+
)
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
|
| 272 |
+
"""Implements specifics required to read git style configuration files.
|
| 273 |
+
|
| 274 |
+
This variation behaves much like the :manpage:`git-config(1)` command, such that the
|
| 275 |
+
configuration will be read on demand based on the filepath given during
|
| 276 |
+
initialization.
|
| 277 |
+
|
| 278 |
+
The changes will automatically be written once the instance goes out of scope, but
|
| 279 |
+
can be triggered manually as well.
|
| 280 |
+
|
| 281 |
+
The configuration file will be locked if you intend to change values preventing
|
| 282 |
+
other instances to write concurrently.
|
| 283 |
+
|
| 284 |
+
:note:
|
| 285 |
+
The config is case-sensitive even when queried, hence section and option names
|
| 286 |
+
must match perfectly.
|
| 287 |
+
|
| 288 |
+
:note:
|
| 289 |
+
If used as a context manager, this will release the locked file.
|
| 290 |
+
"""
|
| 291 |
+
|
| 292 |
+
# { Configuration
|
| 293 |
+
t_lock = LockFile
|
| 294 |
+
"""The lock type determines the type of lock to use in new configuration readers.
|
| 295 |
+
|
| 296 |
+
They must be compatible to the :class:`~git.util.LockFile` interface.
|
| 297 |
+
A suitable alternative would be the :class:`~git.util.BlockingLockFile`.
|
| 298 |
+
"""
|
| 299 |
+
|
| 300 |
+
re_comment = re.compile(r"^\s*[#;]")
|
| 301 |
+
# } END configuration
|
| 302 |
+
|
| 303 |
+
optvalueonly_source = r"\s*(?P<option>[^:=\s][^:=]*)"
|
| 304 |
+
|
| 305 |
+
OPTVALUEONLY = re.compile(optvalueonly_source)
|
| 306 |
+
|
| 307 |
+
OPTCRE = re.compile(optvalueonly_source + r"\s*(?P<vi>[:=])\s*" + r"(?P<value>.*)$")
|
| 308 |
+
|
| 309 |
+
del optvalueonly_source
|
| 310 |
+
|
| 311 |
+
_mutating_methods_ = ("add_section", "remove_section", "remove_option", "set")
|
| 312 |
+
"""Names of :class:`~configparser.RawConfigParser` methods able to change the
|
| 313 |
+
instance."""
|
| 314 |
+
|
| 315 |
+
def __init__(
|
| 316 |
+
self,
|
| 317 |
+
file_or_files: Union[None, PathLike, "BytesIO", Sequence[Union[PathLike, "BytesIO"]]] = None,
|
| 318 |
+
read_only: bool = True,
|
| 319 |
+
merge_includes: bool = True,
|
| 320 |
+
config_level: Union[Lit_config_levels, None] = None,
|
| 321 |
+
repo: Union["Repo", None] = None,
|
| 322 |
+
) -> None:
|
| 323 |
+
"""Initialize a configuration reader to read the given `file_or_files` and to
|
| 324 |
+
possibly allow changes to it by setting `read_only` False.
|
| 325 |
+
|
| 326 |
+
:param file_or_files:
|
| 327 |
+
A file path or file object, or a sequence of possibly more than one of them.
|
| 328 |
+
|
| 329 |
+
:param read_only:
|
| 330 |
+
If ``True``, the ConfigParser may only read the data, but not change it.
|
| 331 |
+
If ``False``, only a single file path or file object may be given. We will
|
| 332 |
+
write back the changes when they happen, or when the ConfigParser is
|
| 333 |
+
released. This will not happen if other configuration files have been
|
| 334 |
+
included.
|
| 335 |
+
|
| 336 |
+
:param merge_includes:
|
| 337 |
+
If ``True``, we will read files mentioned in ``[include]`` sections and
|
| 338 |
+
merge their contents into ours. This makes it impossible to write back an
|
| 339 |
+
individual configuration file. Thus, if you want to modify a single
|
| 340 |
+
configuration file, turn this off to leave the original dataset unaltered
|
| 341 |
+
when reading it.
|
| 342 |
+
|
| 343 |
+
:param repo:
|
| 344 |
+
Reference to repository to use if ``[includeIf]`` sections are found in
|
| 345 |
+
configuration files.
|
| 346 |
+
"""
|
| 347 |
+
cp.RawConfigParser.__init__(self, dict_type=_OMD)
|
| 348 |
+
self._dict: Callable[..., _OMD]
|
| 349 |
+
self._defaults: _OMD
|
| 350 |
+
self._sections: _OMD
|
| 351 |
+
|
| 352 |
+
# Used in Python 3. Needs to stay in sync with sections for underlying
|
| 353 |
+
# implementation to work.
|
| 354 |
+
if not hasattr(self, "_proxies"):
|
| 355 |
+
self._proxies = self._dict()
|
| 356 |
+
|
| 357 |
+
if file_or_files is not None:
|
| 358 |
+
self._file_or_files: Union[PathLike, "BytesIO", Sequence[Union[PathLike, "BytesIO"]]] = file_or_files
|
| 359 |
+
else:
|
| 360 |
+
if config_level is None:
|
| 361 |
+
if read_only:
|
| 362 |
+
self._file_or_files = [
|
| 363 |
+
get_config_path(cast(Lit_config_levels, f)) for f in CONFIG_LEVELS if f != "repository"
|
| 364 |
+
]
|
| 365 |
+
else:
|
| 366 |
+
raise ValueError("No configuration level or configuration files specified")
|
| 367 |
+
else:
|
| 368 |
+
self._file_or_files = [get_config_path(config_level)]
|
| 369 |
+
|
| 370 |
+
self._read_only = read_only
|
| 371 |
+
self._dirty = False
|
| 372 |
+
self._is_initialized = False
|
| 373 |
+
self._merge_includes = merge_includes
|
| 374 |
+
self._repo = repo
|
| 375 |
+
self._lock: Union["LockFile", None] = None
|
| 376 |
+
self._acquire_lock()
|
| 377 |
+
|
| 378 |
+
def _acquire_lock(self) -> None:
|
| 379 |
+
if not self._read_only:
|
| 380 |
+
if not self._lock:
|
| 381 |
+
if isinstance(self._file_or_files, (str, os.PathLike)):
|
| 382 |
+
file_or_files = self._file_or_files
|
| 383 |
+
elif isinstance(self._file_or_files, (tuple, list, Sequence)):
|
| 384 |
+
raise ValueError(
|
| 385 |
+
"Write-ConfigParsers can operate on a single file only, multiple files have been passed"
|
| 386 |
+
)
|
| 387 |
+
else:
|
| 388 |
+
file_or_files = self._file_or_files.name
|
| 389 |
+
|
| 390 |
+
# END get filename from handle/stream
|
| 391 |
+
# Initialize lock base - we want to write.
|
| 392 |
+
self._lock = self.t_lock(file_or_files)
|
| 393 |
+
# END lock check
|
| 394 |
+
|
| 395 |
+
self._lock._obtain_lock()
|
| 396 |
+
# END read-only check
|
| 397 |
+
|
| 398 |
+
def __del__(self) -> None:
|
| 399 |
+
"""Write pending changes if required and release locks."""
|
| 400 |
+
# NOTE: Only consistent in Python 2.
|
| 401 |
+
self.release()
|
| 402 |
+
|
| 403 |
+
def __enter__(self) -> "GitConfigParser":
|
| 404 |
+
self._acquire_lock()
|
| 405 |
+
return self
|
| 406 |
+
|
| 407 |
+
def __exit__(self, *args: Any) -> None:
|
| 408 |
+
self.release()
|
| 409 |
+
|
| 410 |
+
def release(self) -> None:
|
| 411 |
+
"""Flush changes and release the configuration write lock. This instance must
|
| 412 |
+
not be used anymore afterwards.
|
| 413 |
+
|
| 414 |
+
In Python 3, it's required to explicitly release locks and flush changes, as
|
| 415 |
+
``__del__`` is not called deterministically anymore.
|
| 416 |
+
"""
|
| 417 |
+
# Checking for the lock here makes sure we do not raise during write()
|
| 418 |
+
# in case an invalid parser was created who could not get a lock.
|
| 419 |
+
if self.read_only or (self._lock and not self._lock._has_lock()):
|
| 420 |
+
return
|
| 421 |
+
|
| 422 |
+
try:
|
| 423 |
+
self.write()
|
| 424 |
+
except IOError:
|
| 425 |
+
_logger.error("Exception during destruction of GitConfigParser", exc_info=True)
|
| 426 |
+
except ReferenceError:
|
| 427 |
+
# This happens in Python 3... and usually means that some state cannot be
|
| 428 |
+
# written as the sections dict cannot be iterated. This usually happens when
|
| 429 |
+
# the interpreter is shutting down. Can it be fixed?
|
| 430 |
+
pass
|
| 431 |
+
finally:
|
| 432 |
+
if self._lock is not None:
|
| 433 |
+
self._lock._release_lock()
|
| 434 |
+
|
| 435 |
+
def optionxform(self, optionstr: str) -> str:
|
| 436 |
+
"""Do not transform options in any way when writing."""
|
| 437 |
+
return optionstr
|
| 438 |
+
|
| 439 |
+
def _read(self, fp: Union[BufferedReader, IO[bytes]], fpname: str) -> None:
|
| 440 |
+
"""Originally a direct copy of the Python 2.4 version of
|
| 441 |
+
:meth:`RawConfigParser._read <configparser.RawConfigParser._read>`, to ensure it
|
| 442 |
+
uses ordered dicts.
|
| 443 |
+
|
| 444 |
+
The ordering bug was fixed in Python 2.4, and dict itself keeps ordering since
|
| 445 |
+
Python 3.7. This has some other changes, especially that it ignores initial
|
| 446 |
+
whitespace, since git uses tabs. (Big comments are removed to be more compact.)
|
| 447 |
+
"""
|
| 448 |
+
cursect = None # None, or a dictionary.
|
| 449 |
+
optname = None
|
| 450 |
+
lineno = 0
|
| 451 |
+
is_multi_line = False
|
| 452 |
+
e = None # None, or an exception.
|
| 453 |
+
|
| 454 |
+
def string_decode(v: str) -> str:
|
| 455 |
+
if v[-1] == "\\":
|
| 456 |
+
v = v[:-1]
|
| 457 |
+
# END cut trailing escapes to prevent decode error
|
| 458 |
+
|
| 459 |
+
return v.encode(defenc).decode("unicode_escape")
|
| 460 |
+
|
| 461 |
+
# END string_decode
|
| 462 |
+
|
| 463 |
+
while True:
|
| 464 |
+
# We assume to read binary!
|
| 465 |
+
line = fp.readline().decode(defenc)
|
| 466 |
+
if not line:
|
| 467 |
+
break
|
| 468 |
+
lineno = lineno + 1
|
| 469 |
+
# Comment or blank line?
|
| 470 |
+
if line.strip() == "" or self.re_comment.match(line):
|
| 471 |
+
continue
|
| 472 |
+
if line.split(None, 1)[0].lower() == "rem" and line[0] in "rR":
|
| 473 |
+
# No leading whitespace.
|
| 474 |
+
continue
|
| 475 |
+
|
| 476 |
+
# Is it a section header?
|
| 477 |
+
mo = self.SECTCRE.match(line.strip())
|
| 478 |
+
if not is_multi_line and mo:
|
| 479 |
+
sectname: str = mo.group("header").strip()
|
| 480 |
+
if sectname in self._sections:
|
| 481 |
+
cursect = self._sections[sectname]
|
| 482 |
+
elif sectname == cp.DEFAULTSECT:
|
| 483 |
+
cursect = self._defaults
|
| 484 |
+
else:
|
| 485 |
+
cursect = self._dict((("__name__", sectname),))
|
| 486 |
+
self._sections[sectname] = cursect
|
| 487 |
+
self._proxies[sectname] = None
|
| 488 |
+
# So sections can't start with a continuation line.
|
| 489 |
+
optname = None
|
| 490 |
+
# No section header in the file?
|
| 491 |
+
elif cursect is None:
|
| 492 |
+
raise cp.MissingSectionHeaderError(fpname, lineno, line)
|
| 493 |
+
# An option line?
|
| 494 |
+
elif not is_multi_line:
|
| 495 |
+
mo = self.OPTCRE.match(line)
|
| 496 |
+
if mo:
|
| 497 |
+
# We might just have handled the last line, which could contain a quotation we want to remove.
|
| 498 |
+
optname, vi, optval = mo.group("option", "vi", "value")
|
| 499 |
+
if vi in ("=", ":") and ";" in optval and not optval.strip().startswith('"'):
|
| 500 |
+
pos = optval.find(";")
|
| 501 |
+
if pos != -1 and optval[pos - 1].isspace():
|
| 502 |
+
optval = optval[:pos]
|
| 503 |
+
optval = optval.strip()
|
| 504 |
+
if optval == '""':
|
| 505 |
+
optval = ""
|
| 506 |
+
# END handle empty string
|
| 507 |
+
optname = self.optionxform(optname.rstrip())
|
| 508 |
+
if len(optval) > 1 and optval[0] == '"' and optval[-1] != '"':
|
| 509 |
+
is_multi_line = True
|
| 510 |
+
optval = string_decode(optval[1:])
|
| 511 |
+
# END handle multi-line
|
| 512 |
+
# Preserves multiple values for duplicate optnames.
|
| 513 |
+
cursect.add(optname, optval)
|
| 514 |
+
else:
|
| 515 |
+
# Check if it's an option with no value - it's just ignored by git.
|
| 516 |
+
if not self.OPTVALUEONLY.match(line):
|
| 517 |
+
if not e:
|
| 518 |
+
e = cp.ParsingError(fpname)
|
| 519 |
+
e.append(lineno, repr(line))
|
| 520 |
+
continue
|
| 521 |
+
else:
|
| 522 |
+
line = line.rstrip()
|
| 523 |
+
if line.endswith('"'):
|
| 524 |
+
is_multi_line = False
|
| 525 |
+
line = line[:-1]
|
| 526 |
+
# END handle quotations
|
| 527 |
+
optval = cursect.getlast(optname)
|
| 528 |
+
cursect.setlast(optname, optval + string_decode(line))
|
| 529 |
+
# END parse section or option
|
| 530 |
+
# END while reading
|
| 531 |
+
|
| 532 |
+
# If any parsing errors occurred, raise an exception.
|
| 533 |
+
if e:
|
| 534 |
+
raise e
|
| 535 |
+
|
| 536 |
+
def _has_includes(self) -> Union[bool, int]:
|
| 537 |
+
return self._merge_includes and len(self._included_paths())
|
| 538 |
+
|
| 539 |
+
def _included_paths(self) -> List[Tuple[str, str]]:
|
| 540 |
+
"""List all paths that must be included to configuration.
|
| 541 |
+
|
| 542 |
+
:return:
|
| 543 |
+
The list of paths, where each path is a tuple of (option, value).
|
| 544 |
+
"""
|
| 545 |
+
paths = []
|
| 546 |
+
|
| 547 |
+
for section in self.sections():
|
| 548 |
+
if section == "include":
|
| 549 |
+
paths += self.items(section)
|
| 550 |
+
|
| 551 |
+
match = CONDITIONAL_INCLUDE_REGEXP.search(section)
|
| 552 |
+
if match is None or self._repo is None:
|
| 553 |
+
continue
|
| 554 |
+
|
| 555 |
+
keyword = match.group(1)
|
| 556 |
+
value = match.group(2).strip()
|
| 557 |
+
|
| 558 |
+
if keyword in ["gitdir", "gitdir/i"]:
|
| 559 |
+
value = osp.expanduser(value)
|
| 560 |
+
|
| 561 |
+
if not any(value.startswith(s) for s in ["./", "/"]):
|
| 562 |
+
value = "**/" + value
|
| 563 |
+
if value.endswith("/"):
|
| 564 |
+
value += "**"
|
| 565 |
+
|
| 566 |
+
# Ensure that glob is always case insensitive if required.
|
| 567 |
+
if keyword.endswith("/i"):
|
| 568 |
+
value = re.sub(
|
| 569 |
+
r"[a-zA-Z]",
|
| 570 |
+
lambda m: "[{}{}]".format(m.group().lower(), m.group().upper()),
|
| 571 |
+
value,
|
| 572 |
+
)
|
| 573 |
+
if self._repo.git_dir:
|
| 574 |
+
if fnmatch.fnmatchcase(str(self._repo.git_dir), value):
|
| 575 |
+
paths += self.items(section)
|
| 576 |
+
|
| 577 |
+
elif keyword == "onbranch":
|
| 578 |
+
try:
|
| 579 |
+
branch_name = self._repo.active_branch.name
|
| 580 |
+
except TypeError:
|
| 581 |
+
# Ignore section if active branch cannot be retrieved.
|
| 582 |
+
continue
|
| 583 |
+
|
| 584 |
+
if fnmatch.fnmatchcase(branch_name, value):
|
| 585 |
+
paths += self.items(section)
|
| 586 |
+
|
| 587 |
+
return paths
|
| 588 |
+
|
| 589 |
+
def read(self) -> None: # type: ignore[override]
|
| 590 |
+
"""Read the data stored in the files we have been initialized with.
|
| 591 |
+
|
| 592 |
+
This will ignore files that cannot be read, possibly leaving an empty
|
| 593 |
+
configuration.
|
| 594 |
+
|
| 595 |
+
:raise IOError:
|
| 596 |
+
If a file cannot be handled.
|
| 597 |
+
"""
|
| 598 |
+
if self._is_initialized:
|
| 599 |
+
return
|
| 600 |
+
self._is_initialized = True
|
| 601 |
+
|
| 602 |
+
files_to_read: List[Union[PathLike, IO]] = [""]
|
| 603 |
+
if isinstance(self._file_or_files, (str, os.PathLike)):
|
| 604 |
+
# For str or Path, as str is a type of Sequence.
|
| 605 |
+
files_to_read = [self._file_or_files]
|
| 606 |
+
elif not isinstance(self._file_or_files, (tuple, list, Sequence)):
|
| 607 |
+
# Could merge with above isinstance once runtime type known.
|
| 608 |
+
files_to_read = [self._file_or_files]
|
| 609 |
+
else: # For lists or tuples.
|
| 610 |
+
files_to_read = list(self._file_or_files)
|
| 611 |
+
# END ensure we have a copy of the paths to handle
|
| 612 |
+
|
| 613 |
+
seen = set(files_to_read)
|
| 614 |
+
num_read_include_files = 0
|
| 615 |
+
while files_to_read:
|
| 616 |
+
file_path = files_to_read.pop(0)
|
| 617 |
+
file_ok = False
|
| 618 |
+
|
| 619 |
+
if hasattr(file_path, "seek"):
|
| 620 |
+
# Must be a file-object.
|
| 621 |
+
# TODO: Replace cast with assert to narrow type, once sure.
|
| 622 |
+
file_path = cast(IO[bytes], file_path)
|
| 623 |
+
self._read(file_path, file_path.name)
|
| 624 |
+
else:
|
| 625 |
+
# Assume a path if it is not a file-object.
|
| 626 |
+
file_path = cast(PathLike, file_path)
|
| 627 |
+
try:
|
| 628 |
+
with open(file_path, "rb") as fp:
|
| 629 |
+
file_ok = True
|
| 630 |
+
self._read(fp, fp.name)
|
| 631 |
+
except IOError:
|
| 632 |
+
continue
|
| 633 |
+
|
| 634 |
+
# Read includes and append those that we didn't handle yet. We expect all
|
| 635 |
+
# paths to be normalized and absolute (and will ensure that is the case).
|
| 636 |
+
if self._has_includes():
|
| 637 |
+
for _, include_path in self._included_paths():
|
| 638 |
+
if include_path.startswith("~"):
|
| 639 |
+
include_path = osp.expanduser(include_path)
|
| 640 |
+
if not osp.isabs(include_path):
|
| 641 |
+
if not file_ok:
|
| 642 |
+
continue
|
| 643 |
+
# END ignore relative paths if we don't know the configuration file path
|
| 644 |
+
file_path = cast(PathLike, file_path)
|
| 645 |
+
assert osp.isabs(file_path), "Need absolute paths to be sure our cycle checks will work"
|
| 646 |
+
include_path = osp.join(osp.dirname(file_path), include_path)
|
| 647 |
+
# END make include path absolute
|
| 648 |
+
include_path = osp.normpath(include_path)
|
| 649 |
+
if include_path in seen or not os.access(include_path, os.R_OK):
|
| 650 |
+
continue
|
| 651 |
+
seen.add(include_path)
|
| 652 |
+
# Insert included file to the top to be considered first.
|
| 653 |
+
files_to_read.insert(0, include_path)
|
| 654 |
+
num_read_include_files += 1
|
| 655 |
+
# END each include path in configuration file
|
| 656 |
+
# END handle includes
|
| 657 |
+
# END for each file object to read
|
| 658 |
+
|
| 659 |
+
# If there was no file included, we can safely write back (potentially) the
|
| 660 |
+
# configuration file without altering its meaning.
|
| 661 |
+
if num_read_include_files == 0:
|
| 662 |
+
self._merge_includes = False
|
| 663 |
+
|
| 664 |
+
def _write(self, fp: IO) -> None:
|
| 665 |
+
"""Write an .ini-format representation of the configuration state in
|
| 666 |
+
git compatible format."""
|
| 667 |
+
|
| 668 |
+
def write_section(name: str, section_dict: _OMD) -> None:
|
| 669 |
+
fp.write(("[%s]\n" % name).encode(defenc))
|
| 670 |
+
|
| 671 |
+
values: Sequence[str] # Runtime only gets str in tests, but should be whatever _OMD stores.
|
| 672 |
+
v: str
|
| 673 |
+
for key, values in section_dict.items_all():
|
| 674 |
+
if key == "__name__":
|
| 675 |
+
continue
|
| 676 |
+
|
| 677 |
+
for v in values:
|
| 678 |
+
fp.write(("\t%s = %s\n" % (key, self._value_to_string(v).replace("\n", "\n\t"))).encode(defenc))
|
| 679 |
+
# END if key is not __name__
|
| 680 |
+
|
| 681 |
+
# END section writing
|
| 682 |
+
|
| 683 |
+
if self._defaults:
|
| 684 |
+
write_section(cp.DEFAULTSECT, self._defaults)
|
| 685 |
+
value: _OMD
|
| 686 |
+
|
| 687 |
+
for name, value in self._sections.items():
|
| 688 |
+
write_section(name, value)
|
| 689 |
+
|
| 690 |
+
def items(self, section_name: str) -> List[Tuple[str, str]]: # type: ignore[override]
|
| 691 |
+
""":return: list((option, value), ...) pairs of all items in the given section"""
|
| 692 |
+
return [(k, v) for k, v in super().items(section_name) if k != "__name__"]
|
| 693 |
+
|
| 694 |
+
def items_all(self, section_name: str) -> List[Tuple[str, List[str]]]:
|
| 695 |
+
""":return: list((option, [values...]), ...) pairs of all items in the given section"""
|
| 696 |
+
rv = _OMD(self._defaults)
|
| 697 |
+
|
| 698 |
+
for k, vs in self._sections[section_name].items_all():
|
| 699 |
+
if k == "__name__":
|
| 700 |
+
continue
|
| 701 |
+
|
| 702 |
+
if k in rv and rv.getall(k) == vs:
|
| 703 |
+
continue
|
| 704 |
+
|
| 705 |
+
for v in vs:
|
| 706 |
+
rv.add(k, v)
|
| 707 |
+
|
| 708 |
+
return rv.items_all()
|
| 709 |
+
|
| 710 |
+
@needs_values
|
| 711 |
+
def write(self) -> None:
|
| 712 |
+
"""Write changes to our file, if there are changes at all.
|
| 713 |
+
|
| 714 |
+
:raise IOError:
|
| 715 |
+
If this is a read-only writer instance or if we could not obtain a file
|
| 716 |
+
lock.
|
| 717 |
+
"""
|
| 718 |
+
self._assure_writable("write")
|
| 719 |
+
if not self._dirty:
|
| 720 |
+
return
|
| 721 |
+
|
| 722 |
+
if isinstance(self._file_or_files, (list, tuple)):
|
| 723 |
+
raise AssertionError(
|
| 724 |
+
"Cannot write back if there is not exactly a single file to write to, have %i files"
|
| 725 |
+
% len(self._file_or_files)
|
| 726 |
+
)
|
| 727 |
+
# END assert multiple files
|
| 728 |
+
|
| 729 |
+
if self._has_includes():
|
| 730 |
+
_logger.debug(
|
| 731 |
+
"Skipping write-back of configuration file as include files were merged in."
|
| 732 |
+
+ "Set merge_includes=False to prevent this."
|
| 733 |
+
)
|
| 734 |
+
return
|
| 735 |
+
# END stop if we have include files
|
| 736 |
+
|
| 737 |
+
fp = self._file_or_files
|
| 738 |
+
|
| 739 |
+
# We have a physical file on disk, so get a lock.
|
| 740 |
+
is_file_lock = isinstance(fp, (str, os.PathLike, IOBase)) # TODO: Use PathLike (having dropped 3.5).
|
| 741 |
+
if is_file_lock and self._lock is not None: # Else raise error?
|
| 742 |
+
self._lock._obtain_lock()
|
| 743 |
+
|
| 744 |
+
if not hasattr(fp, "seek"):
|
| 745 |
+
fp = cast(PathLike, fp)
|
| 746 |
+
with open(fp, "wb") as fp_open:
|
| 747 |
+
self._write(fp_open)
|
| 748 |
+
else:
|
| 749 |
+
fp = cast("BytesIO", fp)
|
| 750 |
+
fp.seek(0)
|
| 751 |
+
# Make sure we do not overwrite into an existing file.
|
| 752 |
+
if hasattr(fp, "truncate"):
|
| 753 |
+
fp.truncate()
|
| 754 |
+
self._write(fp)
|
| 755 |
+
|
| 756 |
+
def _assure_writable(self, method_name: str) -> None:
|
| 757 |
+
if self.read_only:
|
| 758 |
+
raise IOError("Cannot execute non-constant method %s.%s" % (self, method_name))
|
| 759 |
+
|
| 760 |
+
def add_section(self, section: str) -> None:
|
| 761 |
+
"""Assures added options will stay in order."""
|
| 762 |
+
return super().add_section(section)
|
| 763 |
+
|
| 764 |
+
@property
|
| 765 |
+
def read_only(self) -> bool:
|
| 766 |
+
""":return: ``True`` if this instance may change the configuration file"""
|
| 767 |
+
return self._read_only
|
| 768 |
+
|
| 769 |
+
# FIXME: Figure out if default or return type can really include bool.
|
| 770 |
+
def get_value(
|
| 771 |
+
self,
|
| 772 |
+
section: str,
|
| 773 |
+
option: str,
|
| 774 |
+
default: Union[int, float, str, bool, None] = None,
|
| 775 |
+
) -> Union[int, float, str, bool]:
|
| 776 |
+
"""Get an option's value.
|
| 777 |
+
|
| 778 |
+
If multiple values are specified for this option in the section, the last one
|
| 779 |
+
specified is returned.
|
| 780 |
+
|
| 781 |
+
:param default:
|
| 782 |
+
If not ``None``, the given default value will be returned in case the option
|
| 783 |
+
did not exist.
|
| 784 |
+
|
| 785 |
+
:return:
|
| 786 |
+
A properly typed value, either int, float or string
|
| 787 |
+
|
| 788 |
+
:raise TypeError:
|
| 789 |
+
In case the value could not be understood.
|
| 790 |
+
Otherwise the exceptions known to the ConfigParser will be raised.
|
| 791 |
+
"""
|
| 792 |
+
try:
|
| 793 |
+
valuestr = self.get(section, option)
|
| 794 |
+
except Exception:
|
| 795 |
+
if default is not None:
|
| 796 |
+
return default
|
| 797 |
+
raise
|
| 798 |
+
|
| 799 |
+
return self._string_to_value(valuestr)
|
| 800 |
+
|
| 801 |
+
def get_values(
|
| 802 |
+
self,
|
| 803 |
+
section: str,
|
| 804 |
+
option: str,
|
| 805 |
+
default: Union[int, float, str, bool, None] = None,
|
| 806 |
+
) -> List[Union[int, float, str, bool]]:
|
| 807 |
+
"""Get an option's values.
|
| 808 |
+
|
| 809 |
+
If multiple values are specified for this option in the section, all are
|
| 810 |
+
returned.
|
| 811 |
+
|
| 812 |
+
:param default:
|
| 813 |
+
If not ``None``, a list containing the given default value will be returned
|
| 814 |
+
in case the option did not exist.
|
| 815 |
+
|
| 816 |
+
:return:
|
| 817 |
+
A list of properly typed values, either int, float or string
|
| 818 |
+
|
| 819 |
+
:raise TypeError:
|
| 820 |
+
In case the value could not be understood.
|
| 821 |
+
Otherwise the exceptions known to the ConfigParser will be raised.
|
| 822 |
+
"""
|
| 823 |
+
try:
|
| 824 |
+
self.sections()
|
| 825 |
+
lst = self._sections[section].getall(option)
|
| 826 |
+
except Exception:
|
| 827 |
+
if default is not None:
|
| 828 |
+
return [default]
|
| 829 |
+
raise
|
| 830 |
+
|
| 831 |
+
return [self._string_to_value(valuestr) for valuestr in lst]
|
| 832 |
+
|
| 833 |
+
def _string_to_value(self, valuestr: str) -> Union[int, float, str, bool]:
|
| 834 |
+
types = (int, float)
|
| 835 |
+
for numtype in types:
|
| 836 |
+
try:
|
| 837 |
+
val = numtype(valuestr)
|
| 838 |
+
# truncated value ?
|
| 839 |
+
if val != float(valuestr):
|
| 840 |
+
continue
|
| 841 |
+
return val
|
| 842 |
+
except (ValueError, TypeError):
|
| 843 |
+
continue
|
| 844 |
+
# END for each numeric type
|
| 845 |
+
|
| 846 |
+
# Try boolean values as git uses them.
|
| 847 |
+
vl = valuestr.lower()
|
| 848 |
+
if vl == "false":
|
| 849 |
+
return False
|
| 850 |
+
if vl == "true":
|
| 851 |
+
return True
|
| 852 |
+
|
| 853 |
+
if not isinstance(valuestr, str):
|
| 854 |
+
raise TypeError(
|
| 855 |
+
"Invalid value type: only int, long, float and str are allowed",
|
| 856 |
+
valuestr,
|
| 857 |
+
)
|
| 858 |
+
|
| 859 |
+
return valuestr
|
| 860 |
+
|
| 861 |
+
def _value_to_string(self, value: Union[str, bytes, int, float, bool]) -> str:
|
| 862 |
+
if isinstance(value, (int, float, bool)):
|
| 863 |
+
return str(value)
|
| 864 |
+
return force_text(value)
|
| 865 |
+
|
| 866 |
+
@needs_values
|
| 867 |
+
@set_dirty_and_flush_changes
|
| 868 |
+
def set_value(self, section: str, option: str, value: Union[str, bytes, int, float, bool]) -> "GitConfigParser":
|
| 869 |
+
"""Set the given option in section to the given value.
|
| 870 |
+
|
| 871 |
+
This will create the section if required, and will not throw as opposed to the
|
| 872 |
+
default ConfigParser ``set`` method.
|
| 873 |
+
|
| 874 |
+
:param section:
|
| 875 |
+
Name of the section in which the option resides or should reside.
|
| 876 |
+
|
| 877 |
+
:param option:
|
| 878 |
+
Name of the options whose value to set.
|
| 879 |
+
|
| 880 |
+
:param value:
|
| 881 |
+
Value to set the option to. It must be a string or convertible to a string.
|
| 882 |
+
|
| 883 |
+
:return:
|
| 884 |
+
This instance
|
| 885 |
+
"""
|
| 886 |
+
if not self.has_section(section):
|
| 887 |
+
self.add_section(section)
|
| 888 |
+
self.set(section, option, self._value_to_string(value))
|
| 889 |
+
return self
|
| 890 |
+
|
| 891 |
+
@needs_values
|
| 892 |
+
@set_dirty_and_flush_changes
|
| 893 |
+
def add_value(self, section: str, option: str, value: Union[str, bytes, int, float, bool]) -> "GitConfigParser":
|
| 894 |
+
"""Add a value for the given option in section.
|
| 895 |
+
|
| 896 |
+
This will create the section if required, and will not throw as opposed to the
|
| 897 |
+
default ConfigParser ``set`` method. The value becomes the new value of the
|
| 898 |
+
option as returned by :meth:`get_value`, and appends to the list of values
|
| 899 |
+
returned by :meth:`get_values`.
|
| 900 |
+
|
| 901 |
+
:param section:
|
| 902 |
+
Name of the section in which the option resides or should reside.
|
| 903 |
+
|
| 904 |
+
:param option:
|
| 905 |
+
Name of the option.
|
| 906 |
+
|
| 907 |
+
:param value:
|
| 908 |
+
Value to add to option. It must be a string or convertible to a string.
|
| 909 |
+
|
| 910 |
+
:return:
|
| 911 |
+
This instance
|
| 912 |
+
"""
|
| 913 |
+
if not self.has_section(section):
|
| 914 |
+
self.add_section(section)
|
| 915 |
+
self._sections[section].add(option, self._value_to_string(value))
|
| 916 |
+
return self
|
| 917 |
+
|
| 918 |
+
def rename_section(self, section: str, new_name: str) -> "GitConfigParser":
|
| 919 |
+
"""Rename the given section to `new_name`.
|
| 920 |
+
|
| 921 |
+
:raise ValueError:
|
| 922 |
+
If:
|
| 923 |
+
|
| 924 |
+
* `section` doesn't exist.
|
| 925 |
+
* A section with `new_name` does already exist.
|
| 926 |
+
|
| 927 |
+
:return:
|
| 928 |
+
This instance
|
| 929 |
+
"""
|
| 930 |
+
if not self.has_section(section):
|
| 931 |
+
raise ValueError("Source section '%s' doesn't exist" % section)
|
| 932 |
+
if self.has_section(new_name):
|
| 933 |
+
raise ValueError("Destination section '%s' already exists" % new_name)
|
| 934 |
+
|
| 935 |
+
super().add_section(new_name)
|
| 936 |
+
new_section = self._sections[new_name]
|
| 937 |
+
for k, vs in self.items_all(section):
|
| 938 |
+
new_section.setall(k, vs)
|
| 939 |
+
# END for each value to copy
|
| 940 |
+
|
| 941 |
+
# This call writes back the changes, which is why we don't have the respective
|
| 942 |
+
# decorator.
|
| 943 |
+
self.remove_section(section)
|
| 944 |
+
return self
|
parrot/lib/python3.10/site-packages/git/index/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (424 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/git/index/__pycache__/base.cpython-310.pyc
ADDED
|
Binary file (46.9 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/index/__pycache__/fun.cpython-310.pyc
ADDED
|
Binary file (10.6 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/index/fun.py
ADDED
|
@@ -0,0 +1,465 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
"""Standalone functions to accompany the index implementation and make it more
|
| 5 |
+
versatile."""
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
"write_cache",
|
| 9 |
+
"read_cache",
|
| 10 |
+
"write_tree_from_cache",
|
| 11 |
+
"entry_key",
|
| 12 |
+
"stat_mode_to_index_mode",
|
| 13 |
+
"S_IFGITLINK",
|
| 14 |
+
"run_commit_hook",
|
| 15 |
+
"hook_path",
|
| 16 |
+
]
|
| 17 |
+
|
| 18 |
+
from io import BytesIO
|
| 19 |
+
import os
|
| 20 |
+
import os.path as osp
|
| 21 |
+
from pathlib import Path
|
| 22 |
+
from stat import S_IFDIR, S_IFLNK, S_IFMT, S_IFREG, S_ISDIR, S_ISLNK, S_IXUSR
|
| 23 |
+
import subprocess
|
| 24 |
+
import sys
|
| 25 |
+
|
| 26 |
+
from gitdb.base import IStream
|
| 27 |
+
from gitdb.typ import str_tree_type
|
| 28 |
+
|
| 29 |
+
from git.cmd import handle_process_output, safer_popen
|
| 30 |
+
from git.compat import defenc, force_bytes, force_text, safe_decode
|
| 31 |
+
from git.exc import HookExecutionError, UnmergedEntriesError
|
| 32 |
+
from git.objects.fun import (
|
| 33 |
+
traverse_tree_recursive,
|
| 34 |
+
traverse_trees_recursive,
|
| 35 |
+
tree_to_stream,
|
| 36 |
+
)
|
| 37 |
+
from git.util import IndexFileSHA1Writer, finalize_process
|
| 38 |
+
|
| 39 |
+
from .typ import BaseIndexEntry, IndexEntry, CE_NAMEMASK, CE_STAGESHIFT
|
| 40 |
+
from .util import pack, unpack
|
| 41 |
+
|
| 42 |
+
# typing -----------------------------------------------------------------------------
|
| 43 |
+
|
| 44 |
+
from typing import Dict, IO, List, Sequence, TYPE_CHECKING, Tuple, Type, Union, cast
|
| 45 |
+
|
| 46 |
+
from git.types import PathLike
|
| 47 |
+
|
| 48 |
+
if TYPE_CHECKING:
|
| 49 |
+
from git.db import GitCmdObjectDB
|
| 50 |
+
from git.objects.tree import TreeCacheTup
|
| 51 |
+
|
| 52 |
+
from .base import IndexFile
|
| 53 |
+
|
| 54 |
+
# ------------------------------------------------------------------------------------
|
| 55 |
+
|
| 56 |
+
S_IFGITLINK = S_IFLNK | S_IFDIR
|
| 57 |
+
"""Flags for a submodule."""
|
| 58 |
+
|
| 59 |
+
CE_NAMEMASK_INV = ~CE_NAMEMASK
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def hook_path(name: str, git_dir: PathLike) -> str:
|
| 63 |
+
""":return: path to the given named hook in the given git repository directory"""
|
| 64 |
+
return osp.join(git_dir, "hooks", name)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _has_file_extension(path: str) -> str:
|
| 68 |
+
return osp.splitext(path)[1]
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def run_commit_hook(name: str, index: "IndexFile", *args: str) -> None:
|
| 72 |
+
"""Run the commit hook of the given name. Silently ignore hooks that do not exist.
|
| 73 |
+
|
| 74 |
+
:param name:
|
| 75 |
+
Name of hook, like ``pre-commit``.
|
| 76 |
+
|
| 77 |
+
:param index:
|
| 78 |
+
:class:`~git.index.base.IndexFile` instance.
|
| 79 |
+
|
| 80 |
+
:param args:
|
| 81 |
+
Arguments passed to hook file.
|
| 82 |
+
|
| 83 |
+
:raise git.exc.HookExecutionError:
|
| 84 |
+
"""
|
| 85 |
+
hp = hook_path(name, index.repo.git_dir)
|
| 86 |
+
if not os.access(hp, os.X_OK):
|
| 87 |
+
return
|
| 88 |
+
|
| 89 |
+
env = os.environ.copy()
|
| 90 |
+
env["GIT_INDEX_FILE"] = safe_decode(str(index.path))
|
| 91 |
+
env["GIT_EDITOR"] = ":"
|
| 92 |
+
cmd = [hp]
|
| 93 |
+
try:
|
| 94 |
+
if sys.platform == "win32" and not _has_file_extension(hp):
|
| 95 |
+
# Windows only uses extensions to determine how to open files
|
| 96 |
+
# (doesn't understand shebangs). Try using bash to run the hook.
|
| 97 |
+
relative_hp = Path(hp).relative_to(index.repo.working_dir).as_posix()
|
| 98 |
+
cmd = ["bash.exe", relative_hp]
|
| 99 |
+
|
| 100 |
+
process = safer_popen(
|
| 101 |
+
cmd + list(args),
|
| 102 |
+
env=env,
|
| 103 |
+
stdout=subprocess.PIPE,
|
| 104 |
+
stderr=subprocess.PIPE,
|
| 105 |
+
cwd=index.repo.working_dir,
|
| 106 |
+
)
|
| 107 |
+
except Exception as ex:
|
| 108 |
+
raise HookExecutionError(hp, ex) from ex
|
| 109 |
+
else:
|
| 110 |
+
stdout_list: List[str] = []
|
| 111 |
+
stderr_list: List[str] = []
|
| 112 |
+
handle_process_output(process, stdout_list.append, stderr_list.append, finalize_process)
|
| 113 |
+
stdout = "".join(stdout_list)
|
| 114 |
+
stderr = "".join(stderr_list)
|
| 115 |
+
if process.returncode != 0:
|
| 116 |
+
stdout = force_text(stdout, defenc)
|
| 117 |
+
stderr = force_text(stderr, defenc)
|
| 118 |
+
raise HookExecutionError(hp, process.returncode, stderr, stdout)
|
| 119 |
+
# END handle return code
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def stat_mode_to_index_mode(mode: int) -> int:
|
| 123 |
+
"""Convert the given mode from a stat call to the corresponding index mode and
|
| 124 |
+
return it."""
|
| 125 |
+
if S_ISLNK(mode): # symlinks
|
| 126 |
+
return S_IFLNK
|
| 127 |
+
if S_ISDIR(mode) or S_IFMT(mode) == S_IFGITLINK: # submodules
|
| 128 |
+
return S_IFGITLINK
|
| 129 |
+
return S_IFREG | (mode & S_IXUSR and 0o755 or 0o644) # blobs with or without executable bit
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def write_cache(
|
| 133 |
+
entries: Sequence[Union[BaseIndexEntry, "IndexEntry"]],
|
| 134 |
+
stream: IO[bytes],
|
| 135 |
+
extension_data: Union[None, bytes] = None,
|
| 136 |
+
ShaStreamCls: Type[IndexFileSHA1Writer] = IndexFileSHA1Writer,
|
| 137 |
+
) -> None:
|
| 138 |
+
"""Write the cache represented by entries to a stream.
|
| 139 |
+
|
| 140 |
+
:param entries:
|
| 141 |
+
**Sorted** list of entries.
|
| 142 |
+
|
| 143 |
+
:param stream:
|
| 144 |
+
Stream to wrap into the AdapterStreamCls - it is used for final output.
|
| 145 |
+
|
| 146 |
+
:param ShaStreamCls:
|
| 147 |
+
Type to use when writing to the stream. It produces a sha while writing to it,
|
| 148 |
+
before the data is passed on to the wrapped stream.
|
| 149 |
+
|
| 150 |
+
:param extension_data:
|
| 151 |
+
Any kind of data to write as a trailer, it must begin a 4 byte identifier,
|
| 152 |
+
followed by its size (4 bytes).
|
| 153 |
+
"""
|
| 154 |
+
# Wrap the stream into a compatible writer.
|
| 155 |
+
stream_sha = ShaStreamCls(stream)
|
| 156 |
+
|
| 157 |
+
tell = stream_sha.tell
|
| 158 |
+
write = stream_sha.write
|
| 159 |
+
|
| 160 |
+
# Header
|
| 161 |
+
version = 2
|
| 162 |
+
write(b"DIRC")
|
| 163 |
+
write(pack(">LL", version, len(entries)))
|
| 164 |
+
|
| 165 |
+
# Body
|
| 166 |
+
for entry in entries:
|
| 167 |
+
beginoffset = tell()
|
| 168 |
+
write(entry.ctime_bytes) # ctime
|
| 169 |
+
write(entry.mtime_bytes) # mtime
|
| 170 |
+
path_str = str(entry.path)
|
| 171 |
+
path: bytes = force_bytes(path_str, encoding=defenc)
|
| 172 |
+
plen = len(path) & CE_NAMEMASK # Path length
|
| 173 |
+
assert plen == len(path), "Path %s too long to fit into index" % entry.path
|
| 174 |
+
flags = plen | (entry.flags & CE_NAMEMASK_INV) # Clear possible previous values.
|
| 175 |
+
write(
|
| 176 |
+
pack(
|
| 177 |
+
">LLLLLL20sH",
|
| 178 |
+
entry.dev,
|
| 179 |
+
entry.inode,
|
| 180 |
+
entry.mode,
|
| 181 |
+
entry.uid,
|
| 182 |
+
entry.gid,
|
| 183 |
+
entry.size,
|
| 184 |
+
entry.binsha,
|
| 185 |
+
flags,
|
| 186 |
+
)
|
| 187 |
+
)
|
| 188 |
+
write(path)
|
| 189 |
+
real_size = (tell() - beginoffset + 8) & ~7
|
| 190 |
+
write(b"\0" * ((beginoffset + real_size) - tell()))
|
| 191 |
+
# END for each entry
|
| 192 |
+
|
| 193 |
+
# Write previously cached extensions data.
|
| 194 |
+
if extension_data is not None:
|
| 195 |
+
stream_sha.write(extension_data)
|
| 196 |
+
|
| 197 |
+
# Write the sha over the content.
|
| 198 |
+
stream_sha.write_sha()
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def read_header(stream: IO[bytes]) -> Tuple[int, int]:
|
| 202 |
+
"""Return tuple(version_long, num_entries) from the given stream."""
|
| 203 |
+
type_id = stream.read(4)
|
| 204 |
+
if type_id != b"DIRC":
|
| 205 |
+
raise AssertionError("Invalid index file header: %r" % type_id)
|
| 206 |
+
unpacked = cast(Tuple[int, int], unpack(">LL", stream.read(4 * 2)))
|
| 207 |
+
version, num_entries = unpacked
|
| 208 |
+
|
| 209 |
+
# TODO: Handle version 3: extended data, see read-cache.c.
|
| 210 |
+
assert version in (1, 2)
|
| 211 |
+
return version, num_entries
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def entry_key(*entry: Union[BaseIndexEntry, PathLike, int]) -> Tuple[PathLike, int]:
|
| 215 |
+
"""
|
| 216 |
+
:return:
|
| 217 |
+
Key suitable to be used for the
|
| 218 |
+
:attr:`index.entries <git.index.base.IndexFile.entries>` dictionary.
|
| 219 |
+
|
| 220 |
+
:param entry:
|
| 221 |
+
One instance of type BaseIndexEntry or the path and the stage.
|
| 222 |
+
"""
|
| 223 |
+
|
| 224 |
+
# def is_entry_key_tup(entry_key: Tuple) -> TypeGuard[Tuple[PathLike, int]]:
|
| 225 |
+
# return isinstance(entry_key, tuple) and len(entry_key) == 2
|
| 226 |
+
|
| 227 |
+
if len(entry) == 1:
|
| 228 |
+
entry_first = entry[0]
|
| 229 |
+
assert isinstance(entry_first, BaseIndexEntry)
|
| 230 |
+
return (entry_first.path, entry_first.stage)
|
| 231 |
+
else:
|
| 232 |
+
# assert is_entry_key_tup(entry)
|
| 233 |
+
entry = cast(Tuple[PathLike, int], entry)
|
| 234 |
+
return entry
|
| 235 |
+
# END handle entry
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
def read_cache(
|
| 239 |
+
stream: IO[bytes],
|
| 240 |
+
) -> Tuple[int, Dict[Tuple[PathLike, int], "IndexEntry"], bytes, bytes]:
|
| 241 |
+
"""Read a cache file from the given stream.
|
| 242 |
+
|
| 243 |
+
:return:
|
| 244 |
+
tuple(version, entries_dict, extension_data, content_sha)
|
| 245 |
+
|
| 246 |
+
* *version* is the integer version number.
|
| 247 |
+
* *entries_dict* is a dictionary which maps IndexEntry instances to a path at a
|
| 248 |
+
stage.
|
| 249 |
+
* *extension_data* is ``""`` or 4 bytes of type + 4 bytes of size + size bytes.
|
| 250 |
+
* *content_sha* is a 20 byte sha on all cache file contents.
|
| 251 |
+
"""
|
| 252 |
+
version, num_entries = read_header(stream)
|
| 253 |
+
count = 0
|
| 254 |
+
entries: Dict[Tuple[PathLike, int], "IndexEntry"] = {}
|
| 255 |
+
|
| 256 |
+
read = stream.read
|
| 257 |
+
tell = stream.tell
|
| 258 |
+
while count < num_entries:
|
| 259 |
+
beginoffset = tell()
|
| 260 |
+
ctime = unpack(">8s", read(8))[0]
|
| 261 |
+
mtime = unpack(">8s", read(8))[0]
|
| 262 |
+
(dev, ino, mode, uid, gid, size, sha, flags) = unpack(">LLLLLL20sH", read(20 + 4 * 6 + 2))
|
| 263 |
+
path_size = flags & CE_NAMEMASK
|
| 264 |
+
path = read(path_size).decode(defenc)
|
| 265 |
+
|
| 266 |
+
real_size = (tell() - beginoffset + 8) & ~7
|
| 267 |
+
read((beginoffset + real_size) - tell())
|
| 268 |
+
entry = IndexEntry((mode, sha, flags, path, ctime, mtime, dev, ino, uid, gid, size))
|
| 269 |
+
# entry_key would be the method to use, but we save the effort.
|
| 270 |
+
entries[(path, entry.stage)] = entry
|
| 271 |
+
count += 1
|
| 272 |
+
# END for each entry
|
| 273 |
+
|
| 274 |
+
# The footer contains extension data and a sha on the content so far.
|
| 275 |
+
# Keep the extension footer,and verify we have a sha in the end.
|
| 276 |
+
# Extension data format is:
|
| 277 |
+
# 4 bytes ID
|
| 278 |
+
# 4 bytes length of chunk
|
| 279 |
+
# Repeated 0 - N times
|
| 280 |
+
extension_data = stream.read(~0)
|
| 281 |
+
assert len(extension_data) > 19, (
|
| 282 |
+
"Index Footer was not at least a sha on content as it was only %i bytes in size" % len(extension_data)
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
content_sha = extension_data[-20:]
|
| 286 |
+
|
| 287 |
+
# Truncate the sha in the end as we will dynamically create it anyway.
|
| 288 |
+
extension_data = extension_data[:-20]
|
| 289 |
+
|
| 290 |
+
return (version, entries, extension_data, content_sha)
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
def write_tree_from_cache(
|
| 294 |
+
entries: List[IndexEntry], odb: "GitCmdObjectDB", sl: slice, si: int = 0
|
| 295 |
+
) -> Tuple[bytes, List["TreeCacheTup"]]:
|
| 296 |
+
R"""Create a tree from the given sorted list of entries and put the respective
|
| 297 |
+
trees into the given object database.
|
| 298 |
+
|
| 299 |
+
:param entries:
|
| 300 |
+
**Sorted** list of :class:`~git.index.typ.IndexEntry`\s.
|
| 301 |
+
|
| 302 |
+
:param odb:
|
| 303 |
+
Object database to store the trees in.
|
| 304 |
+
|
| 305 |
+
:param si:
|
| 306 |
+
Start index at which we should start creating subtrees.
|
| 307 |
+
|
| 308 |
+
:param sl:
|
| 309 |
+
Slice indicating the range we should process on the entries list.
|
| 310 |
+
|
| 311 |
+
:return:
|
| 312 |
+
tuple(binsha, list(tree_entry, ...))
|
| 313 |
+
|
| 314 |
+
A tuple of a sha and a list of tree entries being a tuple of hexsha, mode, name.
|
| 315 |
+
"""
|
| 316 |
+
tree_items: List["TreeCacheTup"] = []
|
| 317 |
+
|
| 318 |
+
ci = sl.start
|
| 319 |
+
end = sl.stop
|
| 320 |
+
while ci < end:
|
| 321 |
+
entry = entries[ci]
|
| 322 |
+
if entry.stage != 0:
|
| 323 |
+
raise UnmergedEntriesError(entry)
|
| 324 |
+
# END abort on unmerged
|
| 325 |
+
ci += 1
|
| 326 |
+
rbound = entry.path.find("/", si)
|
| 327 |
+
if rbound == -1:
|
| 328 |
+
# It's not a tree.
|
| 329 |
+
tree_items.append((entry.binsha, entry.mode, entry.path[si:]))
|
| 330 |
+
else:
|
| 331 |
+
# Find common base range.
|
| 332 |
+
base = entry.path[si:rbound]
|
| 333 |
+
xi = ci
|
| 334 |
+
while xi < end:
|
| 335 |
+
oentry = entries[xi]
|
| 336 |
+
orbound = oentry.path.find("/", si)
|
| 337 |
+
if orbound == -1 or oentry.path[si:orbound] != base:
|
| 338 |
+
break
|
| 339 |
+
# END abort on base mismatch
|
| 340 |
+
xi += 1
|
| 341 |
+
# END find common base
|
| 342 |
+
|
| 343 |
+
# Enter recursion.
|
| 344 |
+
# ci - 1 as we want to count our current item as well.
|
| 345 |
+
sha, _tree_entry_list = write_tree_from_cache(entries, odb, slice(ci - 1, xi), rbound + 1)
|
| 346 |
+
tree_items.append((sha, S_IFDIR, base))
|
| 347 |
+
|
| 348 |
+
# Skip ahead.
|
| 349 |
+
ci = xi
|
| 350 |
+
# END handle bounds
|
| 351 |
+
# END for each entry
|
| 352 |
+
|
| 353 |
+
# Finally create the tree.
|
| 354 |
+
sio = BytesIO()
|
| 355 |
+
tree_to_stream(tree_items, sio.write) # Writes to stream as bytes, but doesn't change tree_items.
|
| 356 |
+
sio.seek(0)
|
| 357 |
+
|
| 358 |
+
istream = odb.store(IStream(str_tree_type, len(sio.getvalue()), sio))
|
| 359 |
+
return (istream.binsha, tree_items)
|
| 360 |
+
|
| 361 |
+
|
| 362 |
+
def _tree_entry_to_baseindexentry(tree_entry: "TreeCacheTup", stage: int) -> BaseIndexEntry:
|
| 363 |
+
return BaseIndexEntry((tree_entry[1], tree_entry[0], stage << CE_STAGESHIFT, tree_entry[2]))
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
def aggressive_tree_merge(odb: "GitCmdObjectDB", tree_shas: Sequence[bytes]) -> List[BaseIndexEntry]:
|
| 367 |
+
R"""
|
| 368 |
+
:return:
|
| 369 |
+
List of :class:`~git.index.typ.BaseIndexEntry`\s representing the aggressive
|
| 370 |
+
merge of the given trees. All valid entries are on stage 0, whereas the
|
| 371 |
+
conflicting ones are left on stage 1, 2 or 3, whereas stage 1 corresponds to the
|
| 372 |
+
common ancestor tree, 2 to our tree and 3 to 'their' tree.
|
| 373 |
+
|
| 374 |
+
:param tree_shas:
|
| 375 |
+
1, 2 or 3 trees as identified by their binary 20 byte shas. If 1 or two, the
|
| 376 |
+
entries will effectively correspond to the last given tree. If 3 are given, a 3
|
| 377 |
+
way merge is performed.
|
| 378 |
+
"""
|
| 379 |
+
out: List[BaseIndexEntry] = []
|
| 380 |
+
|
| 381 |
+
# One and two way is the same for us, as we don't have to handle an existing
|
| 382 |
+
# index, instrea
|
| 383 |
+
if len(tree_shas) in (1, 2):
|
| 384 |
+
for entry in traverse_tree_recursive(odb, tree_shas[-1], ""):
|
| 385 |
+
out.append(_tree_entry_to_baseindexentry(entry, 0))
|
| 386 |
+
# END for each entry
|
| 387 |
+
return out
|
| 388 |
+
# END handle single tree
|
| 389 |
+
|
| 390 |
+
if len(tree_shas) > 3:
|
| 391 |
+
raise ValueError("Cannot handle %i trees at once" % len(tree_shas))
|
| 392 |
+
|
| 393 |
+
# Three trees.
|
| 394 |
+
for base, ours, theirs in traverse_trees_recursive(odb, tree_shas, ""):
|
| 395 |
+
if base is not None:
|
| 396 |
+
# Base version exists.
|
| 397 |
+
if ours is not None:
|
| 398 |
+
# Ours exists.
|
| 399 |
+
if theirs is not None:
|
| 400 |
+
# It exists in all branches. Ff it was changed in both
|
| 401 |
+
# its a conflict. Otherwise, we take the changed version.
|
| 402 |
+
# This should be the most common branch, so it comes first.
|
| 403 |
+
if (base[0] != ours[0] and base[0] != theirs[0] and ours[0] != theirs[0]) or (
|
| 404 |
+
base[1] != ours[1] and base[1] != theirs[1] and ours[1] != theirs[1]
|
| 405 |
+
):
|
| 406 |
+
# Changed by both.
|
| 407 |
+
out.append(_tree_entry_to_baseindexentry(base, 1))
|
| 408 |
+
out.append(_tree_entry_to_baseindexentry(ours, 2))
|
| 409 |
+
out.append(_tree_entry_to_baseindexentry(theirs, 3))
|
| 410 |
+
elif base[0] != ours[0] or base[1] != ours[1]:
|
| 411 |
+
# Only we changed it.
|
| 412 |
+
out.append(_tree_entry_to_baseindexentry(ours, 0))
|
| 413 |
+
else:
|
| 414 |
+
# Either nobody changed it, or they did. In either
|
| 415 |
+
# case, use theirs.
|
| 416 |
+
out.append(_tree_entry_to_baseindexentry(theirs, 0))
|
| 417 |
+
# END handle modification
|
| 418 |
+
else:
|
| 419 |
+
if ours[0] != base[0] or ours[1] != base[1]:
|
| 420 |
+
# They deleted it, we changed it, conflict.
|
| 421 |
+
out.append(_tree_entry_to_baseindexentry(base, 1))
|
| 422 |
+
out.append(_tree_entry_to_baseindexentry(ours, 2))
|
| 423 |
+
# else:
|
| 424 |
+
# # We didn't change it, ignore.
|
| 425 |
+
# pass
|
| 426 |
+
# END handle our change
|
| 427 |
+
# END handle theirs
|
| 428 |
+
else:
|
| 429 |
+
if theirs is None:
|
| 430 |
+
# Deleted in both, its fine - it's out.
|
| 431 |
+
pass
|
| 432 |
+
else:
|
| 433 |
+
if theirs[0] != base[0] or theirs[1] != base[1]:
|
| 434 |
+
# Deleted in ours, changed theirs, conflict.
|
| 435 |
+
out.append(_tree_entry_to_baseindexentry(base, 1))
|
| 436 |
+
out.append(_tree_entry_to_baseindexentry(theirs, 3))
|
| 437 |
+
# END theirs changed
|
| 438 |
+
# else:
|
| 439 |
+
# # Theirs didn't change.
|
| 440 |
+
# pass
|
| 441 |
+
# END handle theirs
|
| 442 |
+
# END handle ours
|
| 443 |
+
else:
|
| 444 |
+
# All three can't be None.
|
| 445 |
+
if ours is None:
|
| 446 |
+
# Added in their branch.
|
| 447 |
+
assert theirs is not None
|
| 448 |
+
out.append(_tree_entry_to_baseindexentry(theirs, 0))
|
| 449 |
+
elif theirs is None:
|
| 450 |
+
# Added in our branch.
|
| 451 |
+
out.append(_tree_entry_to_baseindexentry(ours, 0))
|
| 452 |
+
else:
|
| 453 |
+
# Both have it, except for the base, see whether it changed.
|
| 454 |
+
if ours[0] != theirs[0] or ours[1] != theirs[1]:
|
| 455 |
+
out.append(_tree_entry_to_baseindexentry(ours, 2))
|
| 456 |
+
out.append(_tree_entry_to_baseindexentry(theirs, 3))
|
| 457 |
+
else:
|
| 458 |
+
# It was added the same in both.
|
| 459 |
+
out.append(_tree_entry_to_baseindexentry(ours, 0))
|
| 460 |
+
# END handle two items
|
| 461 |
+
# END handle heads
|
| 462 |
+
# END handle base exists
|
| 463 |
+
# END for each entries tuple
|
| 464 |
+
|
| 465 |
+
return out
|
parrot/lib/python3.10/site-packages/git/index/typ.py
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
"""Additional types used by the index."""
|
| 5 |
+
|
| 6 |
+
__all__ = ["BlobFilter", "BaseIndexEntry", "IndexEntry", "StageType"]
|
| 7 |
+
|
| 8 |
+
from binascii import b2a_hex
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
|
| 11 |
+
from git.objects import Blob
|
| 12 |
+
|
| 13 |
+
from .util import pack, unpack
|
| 14 |
+
|
| 15 |
+
# typing ----------------------------------------------------------------------
|
| 16 |
+
|
| 17 |
+
from typing import NamedTuple, Sequence, TYPE_CHECKING, Tuple, Union, cast
|
| 18 |
+
|
| 19 |
+
from git.types import PathLike
|
| 20 |
+
|
| 21 |
+
if TYPE_CHECKING:
|
| 22 |
+
from git.repo import Repo
|
| 23 |
+
|
| 24 |
+
StageType = int
|
| 25 |
+
|
| 26 |
+
# ---------------------------------------------------------------------------------
|
| 27 |
+
|
| 28 |
+
# { Invariants
|
| 29 |
+
CE_NAMEMASK = 0x0FFF
|
| 30 |
+
CE_STAGEMASK = 0x3000
|
| 31 |
+
CE_EXTENDED = 0x4000
|
| 32 |
+
CE_VALID = 0x8000
|
| 33 |
+
CE_STAGESHIFT = 12
|
| 34 |
+
|
| 35 |
+
# } END invariants
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class BlobFilter:
|
| 39 |
+
"""Predicate to be used by
|
| 40 |
+
:meth:`IndexFile.iter_blobs <git.index.base.IndexFile.iter_blobs>` allowing to
|
| 41 |
+
filter only return blobs which match the given list of directories or files.
|
| 42 |
+
|
| 43 |
+
The given paths are given relative to the repository.
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
__slots__ = ("paths",)
|
| 47 |
+
|
| 48 |
+
def __init__(self, paths: Sequence[PathLike]) -> None:
|
| 49 |
+
"""
|
| 50 |
+
:param paths:
|
| 51 |
+
Tuple or list of paths which are either pointing to directories or to files
|
| 52 |
+
relative to the current repository.
|
| 53 |
+
"""
|
| 54 |
+
self.paths = paths
|
| 55 |
+
|
| 56 |
+
def __call__(self, stage_blob: Tuple[StageType, Blob]) -> bool:
|
| 57 |
+
blob_pathlike: PathLike = stage_blob[1].path
|
| 58 |
+
blob_path: Path = blob_pathlike if isinstance(blob_pathlike, Path) else Path(blob_pathlike)
|
| 59 |
+
for pathlike in self.paths:
|
| 60 |
+
path: Path = pathlike if isinstance(pathlike, Path) else Path(pathlike)
|
| 61 |
+
# TODO: Change to use `PosixPath.is_relative_to` once Python 3.8 is no
|
| 62 |
+
# longer supported.
|
| 63 |
+
filter_parts = path.parts
|
| 64 |
+
blob_parts = blob_path.parts
|
| 65 |
+
if len(filter_parts) > len(blob_parts):
|
| 66 |
+
continue
|
| 67 |
+
if all(i == j for i, j in zip(filter_parts, blob_parts)):
|
| 68 |
+
return True
|
| 69 |
+
return False
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class BaseIndexEntryHelper(NamedTuple):
|
| 73 |
+
"""Typed named tuple to provide named attribute access for :class:`BaseIndexEntry`.
|
| 74 |
+
|
| 75 |
+
This is needed to allow overriding ``__new__`` in child class to preserve backwards
|
| 76 |
+
compatibility.
|
| 77 |
+
"""
|
| 78 |
+
|
| 79 |
+
mode: int
|
| 80 |
+
binsha: bytes
|
| 81 |
+
flags: int
|
| 82 |
+
path: PathLike
|
| 83 |
+
ctime_bytes: bytes = pack(">LL", 0, 0)
|
| 84 |
+
mtime_bytes: bytes = pack(">LL", 0, 0)
|
| 85 |
+
dev: int = 0
|
| 86 |
+
inode: int = 0
|
| 87 |
+
uid: int = 0
|
| 88 |
+
gid: int = 0
|
| 89 |
+
size: int = 0
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class BaseIndexEntry(BaseIndexEntryHelper):
|
| 93 |
+
R"""Small brother of an index entry which can be created to describe changes
|
| 94 |
+
done to the index in which case plenty of additional information is not required.
|
| 95 |
+
|
| 96 |
+
As the first 4 data members match exactly to the :class:`IndexEntry` type, methods
|
| 97 |
+
expecting a :class:`BaseIndexEntry` can also handle full :class:`IndexEntry`\s even
|
| 98 |
+
if they use numeric indices for performance reasons.
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
def __new__(
|
| 102 |
+
cls,
|
| 103 |
+
inp_tuple: Union[
|
| 104 |
+
Tuple[int, bytes, int, PathLike],
|
| 105 |
+
Tuple[int, bytes, int, PathLike, bytes, bytes, int, int, int, int, int],
|
| 106 |
+
],
|
| 107 |
+
) -> "BaseIndexEntry":
|
| 108 |
+
"""Override ``__new__`` to allow construction from a tuple for backwards
|
| 109 |
+
compatibility."""
|
| 110 |
+
return super().__new__(cls, *inp_tuple)
|
| 111 |
+
|
| 112 |
+
def __str__(self) -> str:
|
| 113 |
+
return "%o %s %i\t%s" % (self.mode, self.hexsha, self.stage, self.path)
|
| 114 |
+
|
| 115 |
+
def __repr__(self) -> str:
|
| 116 |
+
return "(%o, %s, %i, %s)" % (self.mode, self.hexsha, self.stage, self.path)
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
def hexsha(self) -> str:
|
| 120 |
+
"""hex version of our sha"""
|
| 121 |
+
return b2a_hex(self.binsha).decode("ascii")
|
| 122 |
+
|
| 123 |
+
@property
|
| 124 |
+
def stage(self) -> int:
|
| 125 |
+
"""Stage of the entry, either:
|
| 126 |
+
|
| 127 |
+
* 0 = default stage
|
| 128 |
+
* 1 = stage before a merge or common ancestor entry in case of a 3 way merge
|
| 129 |
+
* 2 = stage of entries from the 'left' side of the merge
|
| 130 |
+
* 3 = stage of entries from the 'right' side of the merge
|
| 131 |
+
|
| 132 |
+
:note:
|
| 133 |
+
For more information, see :manpage:`git-read-tree(1)`.
|
| 134 |
+
"""
|
| 135 |
+
return (self.flags & CE_STAGEMASK) >> CE_STAGESHIFT
|
| 136 |
+
|
| 137 |
+
@classmethod
|
| 138 |
+
def from_blob(cls, blob: Blob, stage: int = 0) -> "BaseIndexEntry":
|
| 139 |
+
""":return: Fully equipped BaseIndexEntry at the given stage"""
|
| 140 |
+
return cls((blob.mode, blob.binsha, stage << CE_STAGESHIFT, blob.path))
|
| 141 |
+
|
| 142 |
+
def to_blob(self, repo: "Repo") -> Blob:
|
| 143 |
+
""":return: Blob using the information of this index entry"""
|
| 144 |
+
return Blob(repo, self.binsha, self.mode, self.path)
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class IndexEntry(BaseIndexEntry):
|
| 148 |
+
"""Allows convenient access to index entry data as defined in
|
| 149 |
+
:class:`BaseIndexEntry` without completely unpacking it.
|
| 150 |
+
|
| 151 |
+
Attributes usually accessed often are cached in the tuple whereas others are
|
| 152 |
+
unpacked on demand.
|
| 153 |
+
|
| 154 |
+
See the properties for a mapping between names and tuple indices.
|
| 155 |
+
"""
|
| 156 |
+
|
| 157 |
+
@property
|
| 158 |
+
def ctime(self) -> Tuple[int, int]:
|
| 159 |
+
"""
|
| 160 |
+
:return:
|
| 161 |
+
Tuple(int_time_seconds_since_epoch, int_nano_seconds) of the
|
| 162 |
+
file's creation time
|
| 163 |
+
"""
|
| 164 |
+
return cast(Tuple[int, int], unpack(">LL", self.ctime_bytes))
|
| 165 |
+
|
| 166 |
+
@property
|
| 167 |
+
def mtime(self) -> Tuple[int, int]:
|
| 168 |
+
"""See :attr:`ctime` property, but returns modification time."""
|
| 169 |
+
return cast(Tuple[int, int], unpack(">LL", self.mtime_bytes))
|
| 170 |
+
|
| 171 |
+
@classmethod
|
| 172 |
+
def from_base(cls, base: "BaseIndexEntry") -> "IndexEntry":
|
| 173 |
+
"""
|
| 174 |
+
:return:
|
| 175 |
+
Minimal entry as created from the given :class:`BaseIndexEntry` instance.
|
| 176 |
+
Missing values will be set to null-like values.
|
| 177 |
+
|
| 178 |
+
:param base:
|
| 179 |
+
Instance of type :class:`BaseIndexEntry`.
|
| 180 |
+
"""
|
| 181 |
+
time = pack(">LL", 0, 0)
|
| 182 |
+
return IndexEntry((base.mode, base.binsha, base.flags, base.path, time, time, 0, 0, 0, 0, 0))
|
| 183 |
+
|
| 184 |
+
@classmethod
|
| 185 |
+
def from_blob(cls, blob: Blob, stage: int = 0) -> "IndexEntry":
|
| 186 |
+
""":return: Minimal entry resembling the given blob object"""
|
| 187 |
+
time = pack(">LL", 0, 0)
|
| 188 |
+
return IndexEntry(
|
| 189 |
+
(
|
| 190 |
+
blob.mode,
|
| 191 |
+
blob.binsha,
|
| 192 |
+
stage << CE_STAGESHIFT,
|
| 193 |
+
blob.path,
|
| 194 |
+
time,
|
| 195 |
+
time,
|
| 196 |
+
0,
|
| 197 |
+
0,
|
| 198 |
+
0,
|
| 199 |
+
0,
|
| 200 |
+
blob.size,
|
| 201 |
+
)
|
| 202 |
+
)
|
parrot/lib/python3.10/site-packages/git/objects/__init__.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
"""Import all submodules' main classes into the package space."""
|
| 5 |
+
|
| 6 |
+
__all__ = [
|
| 7 |
+
"IndexObject",
|
| 8 |
+
"Object",
|
| 9 |
+
"Blob",
|
| 10 |
+
"Commit",
|
| 11 |
+
"Submodule",
|
| 12 |
+
"UpdateProgress",
|
| 13 |
+
"RootModule",
|
| 14 |
+
"RootUpdateProgress",
|
| 15 |
+
"TagObject",
|
| 16 |
+
"Tree",
|
| 17 |
+
"TreeModifier",
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
from .base import IndexObject, Object
|
| 21 |
+
from .blob import Blob
|
| 22 |
+
from .commit import Commit
|
| 23 |
+
from .submodule import RootModule, RootUpdateProgress, Submodule, UpdateProgress
|
| 24 |
+
from .tag import TagObject
|
| 25 |
+
from .tree import Tree, TreeModifier
|
parrot/lib/python3.10/site-packages/git/objects/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (653 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/git/objects/__pycache__/blob.cpython-310.pyc
ADDED
|
Binary file (1.34 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/objects/__pycache__/commit.cpython-310.pyc
ADDED
|
Binary file (22 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/objects/__pycache__/tag.cpython-310.pyc
ADDED
|
Binary file (3.62 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/objects/__pycache__/tree.cpython-310.pyc
ADDED
|
Binary file (12.7 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/objects/base.py
ADDED
|
@@ -0,0 +1,301 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitPython and is released under the
|
| 4 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 5 |
+
|
| 6 |
+
__all__ = ["Object", "IndexObject"]
|
| 7 |
+
|
| 8 |
+
import os.path as osp
|
| 9 |
+
|
| 10 |
+
import gitdb.typ as dbtyp
|
| 11 |
+
|
| 12 |
+
from git.exc import WorkTreeRepositoryUnsupported
|
| 13 |
+
from git.util import LazyMixin, bin_to_hex, join_path_native, stream_copy
|
| 14 |
+
|
| 15 |
+
from .util import get_object_type_by_name
|
| 16 |
+
|
| 17 |
+
# typing ------------------------------------------------------------------
|
| 18 |
+
|
| 19 |
+
from typing import Any, TYPE_CHECKING, Union
|
| 20 |
+
|
| 21 |
+
from git.types import AnyGitObject, GitObjectTypeString, PathLike
|
| 22 |
+
|
| 23 |
+
if TYPE_CHECKING:
|
| 24 |
+
from gitdb.base import OStream
|
| 25 |
+
|
| 26 |
+
from git.refs.reference import Reference
|
| 27 |
+
from git.repo import Repo
|
| 28 |
+
|
| 29 |
+
from .blob import Blob
|
| 30 |
+
from .submodule.base import Submodule
|
| 31 |
+
from .tree import Tree
|
| 32 |
+
|
| 33 |
+
IndexObjUnion = Union["Tree", "Blob", "Submodule"]
|
| 34 |
+
|
| 35 |
+
# --------------------------------------------------------------------------
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class Object(LazyMixin):
|
| 39 |
+
"""Base class for classes representing git object types.
|
| 40 |
+
|
| 41 |
+
The following four leaf classes represent specific kinds of git objects:
|
| 42 |
+
|
| 43 |
+
* :class:`Blob <git.objects.blob.Blob>`
|
| 44 |
+
* :class:`Tree <git.objects.tree.Tree>`
|
| 45 |
+
* :class:`Commit <git.objects.commit.Commit>`
|
| 46 |
+
* :class:`TagObject <git.objects.tag.TagObject>`
|
| 47 |
+
|
| 48 |
+
See :manpage:`gitglossary(7)` on:
|
| 49 |
+
|
| 50 |
+
* "object": https://git-scm.com/docs/gitglossary#def_object
|
| 51 |
+
* "object type": https://git-scm.com/docs/gitglossary#def_object_type
|
| 52 |
+
* "blob": https://git-scm.com/docs/gitglossary#def_blob_object
|
| 53 |
+
* "tree object": https://git-scm.com/docs/gitglossary#def_tree_object
|
| 54 |
+
* "commit object": https://git-scm.com/docs/gitglossary#def_commit_object
|
| 55 |
+
* "tag object": https://git-scm.com/docs/gitglossary#def_tag_object
|
| 56 |
+
|
| 57 |
+
:note:
|
| 58 |
+
See the :class:`~git.types.AnyGitObject` union type of the four leaf subclasses
|
| 59 |
+
that represent actual git object types.
|
| 60 |
+
|
| 61 |
+
:note:
|
| 62 |
+
:class:`~git.objects.submodule.base.Submodule` is defined under the hierarchy
|
| 63 |
+
rooted at this :class:`Object` class, even though submodules are not really a
|
| 64 |
+
type of git object. (This also applies to its
|
| 65 |
+
:class:`~git.objects.submodule.root.RootModule` subclass.)
|
| 66 |
+
|
| 67 |
+
:note:
|
| 68 |
+
This :class:`Object` class should not be confused with :class:`object` (the root
|
| 69 |
+
of the class hierarchy in Python).
|
| 70 |
+
"""
|
| 71 |
+
|
| 72 |
+
NULL_HEX_SHA = "0" * 40
|
| 73 |
+
NULL_BIN_SHA = b"\0" * 20
|
| 74 |
+
|
| 75 |
+
TYPES = (
|
| 76 |
+
dbtyp.str_blob_type,
|
| 77 |
+
dbtyp.str_tree_type,
|
| 78 |
+
dbtyp.str_commit_type,
|
| 79 |
+
dbtyp.str_tag_type,
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
__slots__ = ("repo", "binsha", "size")
|
| 83 |
+
|
| 84 |
+
type: Union[GitObjectTypeString, None] = None
|
| 85 |
+
"""String identifying (a concrete :class:`Object` subtype for) a git object type.
|
| 86 |
+
|
| 87 |
+
The subtypes that this may name correspond to the kinds of git objects that exist,
|
| 88 |
+
i.e., the objects that may be present in a git repository.
|
| 89 |
+
|
| 90 |
+
:note:
|
| 91 |
+
Most subclasses represent specific types of git objects and override this class
|
| 92 |
+
attribute accordingly. This attribute is ``None`` in the :class:`Object` base
|
| 93 |
+
class, as well as the :class:`IndexObject` intermediate subclass, but never
|
| 94 |
+
``None`` in concrete leaf subclasses representing specific git object types.
|
| 95 |
+
|
| 96 |
+
:note:
|
| 97 |
+
See also :class:`~git.types.GitObjectTypeString`.
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
def __init__(self, repo: "Repo", binsha: bytes) -> None:
|
| 101 |
+
"""Initialize an object by identifying it by its binary sha.
|
| 102 |
+
|
| 103 |
+
All keyword arguments will be set on demand if ``None``.
|
| 104 |
+
|
| 105 |
+
:param repo:
|
| 106 |
+
Repository this object is located in.
|
| 107 |
+
|
| 108 |
+
:param binsha:
|
| 109 |
+
20 byte SHA1
|
| 110 |
+
"""
|
| 111 |
+
super().__init__()
|
| 112 |
+
self.repo = repo
|
| 113 |
+
self.binsha = binsha
|
| 114 |
+
assert len(binsha) == 20, "Require 20 byte binary sha, got %r, len = %i" % (
|
| 115 |
+
binsha,
|
| 116 |
+
len(binsha),
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
@classmethod
|
| 120 |
+
def new(cls, repo: "Repo", id: Union[str, "Reference"]) -> AnyGitObject:
|
| 121 |
+
"""
|
| 122 |
+
:return:
|
| 123 |
+
New :class:`Object` instance of a type appropriate to the object type behind
|
| 124 |
+
`id`. The id of the newly created object will be a binsha even though the
|
| 125 |
+
input id may have been a `~git.refs.reference.Reference` or rev-spec.
|
| 126 |
+
|
| 127 |
+
:param id:
|
| 128 |
+
:class:`~git.refs.reference.Reference`, rev-spec, or hexsha.
|
| 129 |
+
|
| 130 |
+
:note:
|
| 131 |
+
This cannot be a ``__new__`` method as it would always call :meth:`__init__`
|
| 132 |
+
with the input id which is not necessarily a binsha.
|
| 133 |
+
"""
|
| 134 |
+
return repo.rev_parse(str(id))
|
| 135 |
+
|
| 136 |
+
@classmethod
|
| 137 |
+
def new_from_sha(cls, repo: "Repo", sha1: bytes) -> AnyGitObject:
|
| 138 |
+
"""
|
| 139 |
+
:return:
|
| 140 |
+
New object instance of a type appropriate to represent the given binary sha1
|
| 141 |
+
|
| 142 |
+
:param sha1:
|
| 143 |
+
20 byte binary sha1.
|
| 144 |
+
"""
|
| 145 |
+
if sha1 == cls.NULL_BIN_SHA:
|
| 146 |
+
# The NULL binsha is always the root commit.
|
| 147 |
+
return get_object_type_by_name(b"commit")(repo, sha1)
|
| 148 |
+
# END handle special case
|
| 149 |
+
oinfo = repo.odb.info(sha1)
|
| 150 |
+
inst = get_object_type_by_name(oinfo.type)(repo, oinfo.binsha)
|
| 151 |
+
inst.size = oinfo.size
|
| 152 |
+
return inst
|
| 153 |
+
|
| 154 |
+
def _set_cache_(self, attr: str) -> None:
|
| 155 |
+
"""Retrieve object information."""
|
| 156 |
+
if attr == "size":
|
| 157 |
+
oinfo = self.repo.odb.info(self.binsha)
|
| 158 |
+
self.size = oinfo.size # type: int
|
| 159 |
+
else:
|
| 160 |
+
super()._set_cache_(attr)
|
| 161 |
+
|
| 162 |
+
def __eq__(self, other: Any) -> bool:
|
| 163 |
+
""":return: ``True`` if the objects have the same SHA1"""
|
| 164 |
+
if not hasattr(other, "binsha"):
|
| 165 |
+
return False
|
| 166 |
+
return self.binsha == other.binsha
|
| 167 |
+
|
| 168 |
+
def __ne__(self, other: Any) -> bool:
|
| 169 |
+
""":return: ``True`` if the objects do not have the same SHA1"""
|
| 170 |
+
if not hasattr(other, "binsha"):
|
| 171 |
+
return True
|
| 172 |
+
return self.binsha != other.binsha
|
| 173 |
+
|
| 174 |
+
def __hash__(self) -> int:
|
| 175 |
+
""":return: Hash of our id allowing objects to be used in dicts and sets"""
|
| 176 |
+
return hash(self.binsha)
|
| 177 |
+
|
| 178 |
+
def __str__(self) -> str:
|
| 179 |
+
""":return: String of our SHA1 as understood by all git commands"""
|
| 180 |
+
return self.hexsha
|
| 181 |
+
|
| 182 |
+
def __repr__(self) -> str:
|
| 183 |
+
""":return: String with pythonic representation of our object"""
|
| 184 |
+
return '<git.%s "%s">' % (self.__class__.__name__, self.hexsha)
|
| 185 |
+
|
| 186 |
+
@property
|
| 187 |
+
def hexsha(self) -> str:
|
| 188 |
+
""":return: 40 byte hex version of our 20 byte binary sha"""
|
| 189 |
+
# b2a_hex produces bytes.
|
| 190 |
+
return bin_to_hex(self.binsha).decode("ascii")
|
| 191 |
+
|
| 192 |
+
@property
|
| 193 |
+
def data_stream(self) -> "OStream":
|
| 194 |
+
"""
|
| 195 |
+
:return:
|
| 196 |
+
File-object compatible stream to the uncompressed raw data of the object
|
| 197 |
+
|
| 198 |
+
:note:
|
| 199 |
+
Returned streams must be read in order.
|
| 200 |
+
"""
|
| 201 |
+
return self.repo.odb.stream(self.binsha)
|
| 202 |
+
|
| 203 |
+
def stream_data(self, ostream: "OStream") -> "Object":
|
| 204 |
+
"""Write our data directly to the given output stream.
|
| 205 |
+
|
| 206 |
+
:param ostream:
|
| 207 |
+
File-object compatible stream object.
|
| 208 |
+
|
| 209 |
+
:return:
|
| 210 |
+
self
|
| 211 |
+
"""
|
| 212 |
+
istream = self.repo.odb.stream(self.binsha)
|
| 213 |
+
stream_copy(istream, ostream)
|
| 214 |
+
return self
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
class IndexObject(Object):
|
| 218 |
+
"""Base for all objects that can be part of the index file.
|
| 219 |
+
|
| 220 |
+
The classes representing git object types that can be part of the index file are
|
| 221 |
+
:class:`~git.objects.tree.Tree and :class:`~git.objects.blob.Blob`. In addition,
|
| 222 |
+
:class:`~git.objects.submodule.base.Submodule`, which is not really a git object
|
| 223 |
+
type but can be part of an index file, is also a subclass.
|
| 224 |
+
"""
|
| 225 |
+
|
| 226 |
+
__slots__ = ("path", "mode")
|
| 227 |
+
|
| 228 |
+
# For compatibility with iterable lists.
|
| 229 |
+
_id_attribute_ = "path"
|
| 230 |
+
|
| 231 |
+
def __init__(
|
| 232 |
+
self,
|
| 233 |
+
repo: "Repo",
|
| 234 |
+
binsha: bytes,
|
| 235 |
+
mode: Union[None, int] = None,
|
| 236 |
+
path: Union[None, PathLike] = None,
|
| 237 |
+
) -> None:
|
| 238 |
+
"""Initialize a newly instanced :class:`IndexObject`.
|
| 239 |
+
|
| 240 |
+
:param repo:
|
| 241 |
+
The :class:`~git.repo.base.Repo` we are located in.
|
| 242 |
+
|
| 243 |
+
:param binsha:
|
| 244 |
+
20 byte sha1.
|
| 245 |
+
|
| 246 |
+
:param mode:
|
| 247 |
+
The stat-compatible file mode as :class:`int`.
|
| 248 |
+
Use the :mod:`stat` module to evaluate the information.
|
| 249 |
+
|
| 250 |
+
:param path:
|
| 251 |
+
The path to the file in the file system, relative to the git repository
|
| 252 |
+
root, like ``file.ext`` or ``folder/other.ext``.
|
| 253 |
+
|
| 254 |
+
:note:
|
| 255 |
+
Path may not be set if the index object has been created directly, as it
|
| 256 |
+
cannot be retrieved without knowing the parent tree.
|
| 257 |
+
"""
|
| 258 |
+
super().__init__(repo, binsha)
|
| 259 |
+
if mode is not None:
|
| 260 |
+
self.mode = mode
|
| 261 |
+
if path is not None:
|
| 262 |
+
self.path = path
|
| 263 |
+
|
| 264 |
+
def __hash__(self) -> int:
|
| 265 |
+
"""
|
| 266 |
+
:return:
|
| 267 |
+
Hash of our path as index items are uniquely identifiable by path, not by
|
| 268 |
+
their data!
|
| 269 |
+
"""
|
| 270 |
+
return hash(self.path)
|
| 271 |
+
|
| 272 |
+
def _set_cache_(self, attr: str) -> None:
|
| 273 |
+
if attr in IndexObject.__slots__:
|
| 274 |
+
# They cannot be retrieved later on (not without searching for them).
|
| 275 |
+
raise AttributeError(
|
| 276 |
+
"Attribute '%s' unset: path and mode attributes must have been set during %s object creation"
|
| 277 |
+
% (attr, type(self).__name__)
|
| 278 |
+
)
|
| 279 |
+
else:
|
| 280 |
+
super()._set_cache_(attr)
|
| 281 |
+
# END handle slot attribute
|
| 282 |
+
|
| 283 |
+
@property
|
| 284 |
+
def name(self) -> str:
|
| 285 |
+
""":return: Name portion of the path, effectively being the basename"""
|
| 286 |
+
return osp.basename(self.path)
|
| 287 |
+
|
| 288 |
+
@property
|
| 289 |
+
def abspath(self) -> PathLike:
|
| 290 |
+
R"""
|
| 291 |
+
:return:
|
| 292 |
+
Absolute path to this index object in the file system (as opposed to the
|
| 293 |
+
:attr:`path` field which is a path relative to the git repository).
|
| 294 |
+
|
| 295 |
+
The returned path will be native to the system and contains ``\`` on
|
| 296 |
+
Windows.
|
| 297 |
+
"""
|
| 298 |
+
if self.repo.working_tree_dir is not None:
|
| 299 |
+
return join_path_native(self.repo.working_tree_dir, self.path)
|
| 300 |
+
else:
|
| 301 |
+
raise WorkTreeRepositoryUnsupported("working_tree_dir was None or empty")
|
parrot/lib/python3.10/site-packages/git/objects/blob.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitPython and is released under the
|
| 4 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 5 |
+
|
| 6 |
+
__all__ = ["Blob"]
|
| 7 |
+
|
| 8 |
+
from mimetypes import guess_type
|
| 9 |
+
import sys
|
| 10 |
+
|
| 11 |
+
if sys.version_info >= (3, 8):
|
| 12 |
+
from typing import Literal
|
| 13 |
+
else:
|
| 14 |
+
from typing_extensions import Literal
|
| 15 |
+
|
| 16 |
+
from . import base
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class Blob(base.IndexObject):
|
| 20 |
+
"""A Blob encapsulates a git blob object.
|
| 21 |
+
|
| 22 |
+
See :manpage:`gitglossary(7)` on "blob":
|
| 23 |
+
https://git-scm.com/docs/gitglossary#def_blob_object
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
DEFAULT_MIME_TYPE = "text/plain"
|
| 27 |
+
type: Literal["blob"] = "blob"
|
| 28 |
+
|
| 29 |
+
# Valid blob modes
|
| 30 |
+
executable_mode = 0o100755
|
| 31 |
+
file_mode = 0o100644
|
| 32 |
+
link_mode = 0o120000
|
| 33 |
+
|
| 34 |
+
__slots__ = ()
|
| 35 |
+
|
| 36 |
+
@property
|
| 37 |
+
def mime_type(self) -> str:
|
| 38 |
+
"""
|
| 39 |
+
:return:
|
| 40 |
+
String describing the mime type of this file (based on the filename)
|
| 41 |
+
|
| 42 |
+
:note:
|
| 43 |
+
Defaults to ``text/plain`` in case the actual file type is unknown.
|
| 44 |
+
"""
|
| 45 |
+
guesses = None
|
| 46 |
+
if self.path:
|
| 47 |
+
guesses = guess_type(str(self.path))
|
| 48 |
+
return guesses and guesses[0] or self.DEFAULT_MIME_TYPE
|
parrot/lib/python3.10/site-packages/git/objects/commit.py
ADDED
|
@@ -0,0 +1,899 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitPython and is released under the
|
| 4 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 5 |
+
|
| 6 |
+
__all__ = ["Commit"]
|
| 7 |
+
|
| 8 |
+
from collections import defaultdict
|
| 9 |
+
import datetime
|
| 10 |
+
from io import BytesIO
|
| 11 |
+
import logging
|
| 12 |
+
import os
|
| 13 |
+
import re
|
| 14 |
+
from subprocess import Popen, PIPE
|
| 15 |
+
import sys
|
| 16 |
+
from time import altzone, daylight, localtime, time, timezone
|
| 17 |
+
import warnings
|
| 18 |
+
|
| 19 |
+
from gitdb import IStream
|
| 20 |
+
|
| 21 |
+
from git.cmd import Git
|
| 22 |
+
from git.diff import Diffable
|
| 23 |
+
from git.util import Actor, Stats, finalize_process, hex_to_bin
|
| 24 |
+
|
| 25 |
+
from . import base
|
| 26 |
+
from .tree import Tree
|
| 27 |
+
from .util import (
|
| 28 |
+
Serializable,
|
| 29 |
+
TraversableIterableObj,
|
| 30 |
+
altz_to_utctz_str,
|
| 31 |
+
from_timestamp,
|
| 32 |
+
parse_actor_and_date,
|
| 33 |
+
parse_date,
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
# typing ------------------------------------------------------------------
|
| 37 |
+
|
| 38 |
+
from typing import (
|
| 39 |
+
Any,
|
| 40 |
+
Dict,
|
| 41 |
+
IO,
|
| 42 |
+
Iterator,
|
| 43 |
+
List,
|
| 44 |
+
Sequence,
|
| 45 |
+
Tuple,
|
| 46 |
+
TYPE_CHECKING,
|
| 47 |
+
Union,
|
| 48 |
+
cast,
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
if sys.version_info >= (3, 8):
|
| 52 |
+
from typing import Literal
|
| 53 |
+
else:
|
| 54 |
+
from typing_extensions import Literal
|
| 55 |
+
|
| 56 |
+
from git.types import PathLike
|
| 57 |
+
|
| 58 |
+
if TYPE_CHECKING:
|
| 59 |
+
from git.refs import SymbolicReference
|
| 60 |
+
from git.repo import Repo
|
| 61 |
+
|
| 62 |
+
# ------------------------------------------------------------------------
|
| 63 |
+
|
| 64 |
+
_logger = logging.getLogger(__name__)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
|
| 68 |
+
"""Wraps a git commit object.
|
| 69 |
+
|
| 70 |
+
See :manpage:`gitglossary(7)` on "commit object":
|
| 71 |
+
https://git-scm.com/docs/gitglossary#def_commit_object
|
| 72 |
+
|
| 73 |
+
:note:
|
| 74 |
+
This class will act lazily on some of its attributes and will query the value on
|
| 75 |
+
demand only if it involves calling the git binary.
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
# ENVIRONMENT VARIABLES
|
| 79 |
+
# Read when creating new commits.
|
| 80 |
+
env_author_date = "GIT_AUTHOR_DATE"
|
| 81 |
+
env_committer_date = "GIT_COMMITTER_DATE"
|
| 82 |
+
|
| 83 |
+
# CONFIGURATION KEYS
|
| 84 |
+
conf_encoding = "i18n.commitencoding"
|
| 85 |
+
|
| 86 |
+
# INVARIANTS
|
| 87 |
+
default_encoding = "UTF-8"
|
| 88 |
+
|
| 89 |
+
type: Literal["commit"] = "commit"
|
| 90 |
+
|
| 91 |
+
__slots__ = (
|
| 92 |
+
"tree",
|
| 93 |
+
"author",
|
| 94 |
+
"authored_date",
|
| 95 |
+
"author_tz_offset",
|
| 96 |
+
"committer",
|
| 97 |
+
"committed_date",
|
| 98 |
+
"committer_tz_offset",
|
| 99 |
+
"message",
|
| 100 |
+
"parents",
|
| 101 |
+
"encoding",
|
| 102 |
+
"gpgsig",
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
_id_attribute_ = "hexsha"
|
| 106 |
+
|
| 107 |
+
parents: Sequence["Commit"]
|
| 108 |
+
|
| 109 |
+
def __init__(
|
| 110 |
+
self,
|
| 111 |
+
repo: "Repo",
|
| 112 |
+
binsha: bytes,
|
| 113 |
+
tree: Union[Tree, None] = None,
|
| 114 |
+
author: Union[Actor, None] = None,
|
| 115 |
+
authored_date: Union[int, None] = None,
|
| 116 |
+
author_tz_offset: Union[None, float] = None,
|
| 117 |
+
committer: Union[Actor, None] = None,
|
| 118 |
+
committed_date: Union[int, None] = None,
|
| 119 |
+
committer_tz_offset: Union[None, float] = None,
|
| 120 |
+
message: Union[str, bytes, None] = None,
|
| 121 |
+
parents: Union[Sequence["Commit"], None] = None,
|
| 122 |
+
encoding: Union[str, None] = None,
|
| 123 |
+
gpgsig: Union[str, None] = None,
|
| 124 |
+
) -> None:
|
| 125 |
+
"""Instantiate a new :class:`Commit`. All keyword arguments taking ``None`` as
|
| 126 |
+
default will be implicitly set on first query.
|
| 127 |
+
|
| 128 |
+
:param binsha:
|
| 129 |
+
20 byte sha1.
|
| 130 |
+
|
| 131 |
+
:param tree:
|
| 132 |
+
A :class:`~git.objects.tree.Tree` object.
|
| 133 |
+
|
| 134 |
+
:param author:
|
| 135 |
+
The author :class:`~git.util.Actor` object.
|
| 136 |
+
|
| 137 |
+
:param authored_date: int_seconds_since_epoch
|
| 138 |
+
The authored DateTime - use :func:`time.gmtime` to convert it into a
|
| 139 |
+
different format.
|
| 140 |
+
|
| 141 |
+
:param author_tz_offset: int_seconds_west_of_utc
|
| 142 |
+
The timezone that the `authored_date` is in.
|
| 143 |
+
|
| 144 |
+
:param committer:
|
| 145 |
+
The committer string, as an :class:`~git.util.Actor` object.
|
| 146 |
+
|
| 147 |
+
:param committed_date: int_seconds_since_epoch
|
| 148 |
+
The committed DateTime - use :func:`time.gmtime` to convert it into a
|
| 149 |
+
different format.
|
| 150 |
+
|
| 151 |
+
:param committer_tz_offset: int_seconds_west_of_utc
|
| 152 |
+
The timezone that the `committed_date` is in.
|
| 153 |
+
|
| 154 |
+
:param message: string
|
| 155 |
+
The commit message.
|
| 156 |
+
|
| 157 |
+
:param encoding: string
|
| 158 |
+
Encoding of the message, defaults to UTF-8.
|
| 159 |
+
|
| 160 |
+
:param parents:
|
| 161 |
+
List or tuple of :class:`Commit` objects which are our parent(s) in the
|
| 162 |
+
commit dependency graph.
|
| 163 |
+
|
| 164 |
+
:return:
|
| 165 |
+
:class:`Commit`
|
| 166 |
+
|
| 167 |
+
:note:
|
| 168 |
+
Timezone information is in the same format and in the same sign as what
|
| 169 |
+
:func:`time.altzone` returns. The sign is inverted compared to git's UTC
|
| 170 |
+
timezone.
|
| 171 |
+
"""
|
| 172 |
+
super().__init__(repo, binsha)
|
| 173 |
+
self.binsha = binsha
|
| 174 |
+
if tree is not None:
|
| 175 |
+
assert isinstance(tree, Tree), "Tree needs to be a Tree instance, was %s" % type(tree)
|
| 176 |
+
if tree is not None:
|
| 177 |
+
self.tree = tree
|
| 178 |
+
if author is not None:
|
| 179 |
+
self.author = author
|
| 180 |
+
if authored_date is not None:
|
| 181 |
+
self.authored_date = authored_date
|
| 182 |
+
if author_tz_offset is not None:
|
| 183 |
+
self.author_tz_offset = author_tz_offset
|
| 184 |
+
if committer is not None:
|
| 185 |
+
self.committer = committer
|
| 186 |
+
if committed_date is not None:
|
| 187 |
+
self.committed_date = committed_date
|
| 188 |
+
if committer_tz_offset is not None:
|
| 189 |
+
self.committer_tz_offset = committer_tz_offset
|
| 190 |
+
if message is not None:
|
| 191 |
+
self.message = message
|
| 192 |
+
if parents is not None:
|
| 193 |
+
self.parents = parents
|
| 194 |
+
if encoding is not None:
|
| 195 |
+
self.encoding = encoding
|
| 196 |
+
if gpgsig is not None:
|
| 197 |
+
self.gpgsig = gpgsig
|
| 198 |
+
|
| 199 |
+
@classmethod
|
| 200 |
+
def _get_intermediate_items(cls, commit: "Commit") -> Tuple["Commit", ...]:
|
| 201 |
+
return tuple(commit.parents)
|
| 202 |
+
|
| 203 |
+
@classmethod
|
| 204 |
+
def _calculate_sha_(cls, repo: "Repo", commit: "Commit") -> bytes:
|
| 205 |
+
"""Calculate the sha of a commit.
|
| 206 |
+
|
| 207 |
+
:param repo:
|
| 208 |
+
:class:`~git.repo.base.Repo` object the commit should be part of.
|
| 209 |
+
|
| 210 |
+
:param commit:
|
| 211 |
+
:class:`Commit` object for which to generate the sha.
|
| 212 |
+
"""
|
| 213 |
+
|
| 214 |
+
stream = BytesIO()
|
| 215 |
+
commit._serialize(stream)
|
| 216 |
+
streamlen = stream.tell()
|
| 217 |
+
stream.seek(0)
|
| 218 |
+
|
| 219 |
+
istream = repo.odb.store(IStream(cls.type, streamlen, stream))
|
| 220 |
+
return istream.binsha
|
| 221 |
+
|
| 222 |
+
def replace(self, **kwargs: Any) -> "Commit":
|
| 223 |
+
"""Create new commit object from an existing commit object.
|
| 224 |
+
|
| 225 |
+
Any values provided as keyword arguments will replace the corresponding
|
| 226 |
+
attribute in the new object.
|
| 227 |
+
"""
|
| 228 |
+
|
| 229 |
+
attrs = {k: getattr(self, k) for k in self.__slots__}
|
| 230 |
+
|
| 231 |
+
for attrname in kwargs:
|
| 232 |
+
if attrname not in self.__slots__:
|
| 233 |
+
raise ValueError("invalid attribute name")
|
| 234 |
+
|
| 235 |
+
attrs.update(kwargs)
|
| 236 |
+
new_commit = self.__class__(self.repo, self.NULL_BIN_SHA, **attrs)
|
| 237 |
+
new_commit.binsha = self._calculate_sha_(self.repo, new_commit)
|
| 238 |
+
|
| 239 |
+
return new_commit
|
| 240 |
+
|
| 241 |
+
def _set_cache_(self, attr: str) -> None:
|
| 242 |
+
if attr in Commit.__slots__:
|
| 243 |
+
# Read the data in a chunk, its faster - then provide a file wrapper.
|
| 244 |
+
_binsha, _typename, self.size, stream = self.repo.odb.stream(self.binsha)
|
| 245 |
+
self._deserialize(BytesIO(stream.read()))
|
| 246 |
+
else:
|
| 247 |
+
super()._set_cache_(attr)
|
| 248 |
+
# END handle attrs
|
| 249 |
+
|
| 250 |
+
@property
|
| 251 |
+
def authored_datetime(self) -> datetime.datetime:
|
| 252 |
+
return from_timestamp(self.authored_date, self.author_tz_offset)
|
| 253 |
+
|
| 254 |
+
@property
|
| 255 |
+
def committed_datetime(self) -> datetime.datetime:
|
| 256 |
+
return from_timestamp(self.committed_date, self.committer_tz_offset)
|
| 257 |
+
|
| 258 |
+
@property
|
| 259 |
+
def summary(self) -> Union[str, bytes]:
|
| 260 |
+
""":return: First line of the commit message"""
|
| 261 |
+
if isinstance(self.message, str):
|
| 262 |
+
return self.message.split("\n", 1)[0]
|
| 263 |
+
else:
|
| 264 |
+
return self.message.split(b"\n", 1)[0]
|
| 265 |
+
|
| 266 |
+
def count(self, paths: Union[PathLike, Sequence[PathLike]] = "", **kwargs: Any) -> int:
|
| 267 |
+
"""Count the number of commits reachable from this commit.
|
| 268 |
+
|
| 269 |
+
:param paths:
|
| 270 |
+
An optional path or a list of paths restricting the return value to commits
|
| 271 |
+
actually containing the paths.
|
| 272 |
+
|
| 273 |
+
:param kwargs:
|
| 274 |
+
Additional options to be passed to :manpage:`git-rev-list(1)`. They must not
|
| 275 |
+
alter the output style of the command, or parsing will yield incorrect
|
| 276 |
+
results.
|
| 277 |
+
|
| 278 |
+
:return:
|
| 279 |
+
An int defining the number of reachable commits
|
| 280 |
+
"""
|
| 281 |
+
# Yes, it makes a difference whether empty paths are given or not in our case as
|
| 282 |
+
# the empty paths version will ignore merge commits for some reason.
|
| 283 |
+
if paths:
|
| 284 |
+
return len(self.repo.git.rev_list(self.hexsha, "--", paths, **kwargs).splitlines())
|
| 285 |
+
return len(self.repo.git.rev_list(self.hexsha, **kwargs).splitlines())
|
| 286 |
+
|
| 287 |
+
@property
|
| 288 |
+
def name_rev(self) -> str:
|
| 289 |
+
"""
|
| 290 |
+
:return:
|
| 291 |
+
String describing the commits hex sha based on the closest
|
| 292 |
+
`~git.refs.reference.Reference`.
|
| 293 |
+
|
| 294 |
+
:note:
|
| 295 |
+
Mostly useful for UI purposes.
|
| 296 |
+
"""
|
| 297 |
+
return self.repo.git.name_rev(self)
|
| 298 |
+
|
| 299 |
+
@classmethod
|
| 300 |
+
def iter_items(
|
| 301 |
+
cls,
|
| 302 |
+
repo: "Repo",
|
| 303 |
+
rev: Union[str, "Commit", "SymbolicReference"],
|
| 304 |
+
paths: Union[PathLike, Sequence[PathLike]] = "",
|
| 305 |
+
**kwargs: Any,
|
| 306 |
+
) -> Iterator["Commit"]:
|
| 307 |
+
R"""Find all commits matching the given criteria.
|
| 308 |
+
|
| 309 |
+
:param repo:
|
| 310 |
+
The :class:`~git.repo.base.Repo`.
|
| 311 |
+
|
| 312 |
+
:param rev:
|
| 313 |
+
Revision specifier. See :manpage:`git-rev-parse(1)` for viable options.
|
| 314 |
+
|
| 315 |
+
:param paths:
|
| 316 |
+
An optional path or list of paths. If set only :class:`Commit`\s that
|
| 317 |
+
include the path or paths will be considered.
|
| 318 |
+
|
| 319 |
+
:param kwargs:
|
| 320 |
+
Optional keyword arguments to :manpage:`git-rev-list(1)` where:
|
| 321 |
+
|
| 322 |
+
* ``max_count`` is the maximum number of commits to fetch.
|
| 323 |
+
* ``skip`` is the number of commits to skip.
|
| 324 |
+
* ``since`` selects all commits since some date, e.g. ``"1970-01-01"``.
|
| 325 |
+
|
| 326 |
+
:return:
|
| 327 |
+
Iterator yielding :class:`Commit` items.
|
| 328 |
+
"""
|
| 329 |
+
if "pretty" in kwargs:
|
| 330 |
+
raise ValueError("--pretty cannot be used as parsing expects single sha's only")
|
| 331 |
+
# END handle pretty
|
| 332 |
+
|
| 333 |
+
# Use -- in all cases, to prevent possibility of ambiguous arguments.
|
| 334 |
+
# See https://github.com/gitpython-developers/GitPython/issues/264.
|
| 335 |
+
|
| 336 |
+
args_list: List[PathLike] = ["--"]
|
| 337 |
+
|
| 338 |
+
if paths:
|
| 339 |
+
paths_tup: Tuple[PathLike, ...]
|
| 340 |
+
if isinstance(paths, (str, os.PathLike)):
|
| 341 |
+
paths_tup = (paths,)
|
| 342 |
+
else:
|
| 343 |
+
paths_tup = tuple(paths)
|
| 344 |
+
|
| 345 |
+
args_list.extend(paths_tup)
|
| 346 |
+
# END if paths
|
| 347 |
+
|
| 348 |
+
proc = repo.git.rev_list(rev, args_list, as_process=True, **kwargs)
|
| 349 |
+
return cls._iter_from_process_or_stream(repo, proc)
|
| 350 |
+
|
| 351 |
+
def iter_parents(self, paths: Union[PathLike, Sequence[PathLike]] = "", **kwargs: Any) -> Iterator["Commit"]:
|
| 352 |
+
R"""Iterate _all_ parents of this commit.
|
| 353 |
+
|
| 354 |
+
:param paths:
|
| 355 |
+
Optional path or list of paths limiting the :class:`Commit`\s to those that
|
| 356 |
+
contain at least one of the paths.
|
| 357 |
+
|
| 358 |
+
:param kwargs:
|
| 359 |
+
All arguments allowed by :manpage:`git-rev-list(1)`.
|
| 360 |
+
|
| 361 |
+
:return:
|
| 362 |
+
Iterator yielding :class:`Commit` objects which are parents of ``self``
|
| 363 |
+
"""
|
| 364 |
+
# skip ourselves
|
| 365 |
+
skip = kwargs.get("skip", 1)
|
| 366 |
+
if skip == 0: # skip ourselves
|
| 367 |
+
skip = 1
|
| 368 |
+
kwargs["skip"] = skip
|
| 369 |
+
|
| 370 |
+
return self.iter_items(self.repo, self, paths, **kwargs)
|
| 371 |
+
|
| 372 |
+
@property
|
| 373 |
+
def stats(self) -> Stats:
|
| 374 |
+
"""Create a git stat from changes between this commit and its first parent
|
| 375 |
+
or from all changes done if this is the very first commit.
|
| 376 |
+
|
| 377 |
+
:return:
|
| 378 |
+
:class:`Stats`
|
| 379 |
+
"""
|
| 380 |
+
if not self.parents:
|
| 381 |
+
text = self.repo.git.diff_tree(self.hexsha, "--", numstat=True, no_renames=True, root=True)
|
| 382 |
+
text2 = ""
|
| 383 |
+
for line in text.splitlines()[1:]:
|
| 384 |
+
(insertions, deletions, filename) = line.split("\t")
|
| 385 |
+
text2 += "%s\t%s\t%s\n" % (insertions, deletions, filename)
|
| 386 |
+
text = text2
|
| 387 |
+
else:
|
| 388 |
+
text = self.repo.git.diff(self.parents[0].hexsha, self.hexsha, "--", numstat=True, no_renames=True)
|
| 389 |
+
return Stats._list_from_string(self.repo, text)
|
| 390 |
+
|
| 391 |
+
@property
|
| 392 |
+
def trailers(self) -> Dict[str, str]:
|
| 393 |
+
"""Deprecated. Get the trailers of the message as a dictionary.
|
| 394 |
+
|
| 395 |
+
:note:
|
| 396 |
+
This property is deprecated, please use either :attr:`trailers_list` or
|
| 397 |
+
:attr:`trailers_dict`.
|
| 398 |
+
|
| 399 |
+
:return:
|
| 400 |
+
Dictionary containing whitespace stripped trailer information.
|
| 401 |
+
Only contains the latest instance of each trailer key.
|
| 402 |
+
"""
|
| 403 |
+
warnings.warn(
|
| 404 |
+
"Commit.trailers is deprecated, use Commit.trailers_list or Commit.trailers_dict instead",
|
| 405 |
+
DeprecationWarning,
|
| 406 |
+
stacklevel=2,
|
| 407 |
+
)
|
| 408 |
+
return {k: v[0] for k, v in self.trailers_dict.items()}
|
| 409 |
+
|
| 410 |
+
@property
|
| 411 |
+
def trailers_list(self) -> List[Tuple[str, str]]:
|
| 412 |
+
"""Get the trailers of the message as a list.
|
| 413 |
+
|
| 414 |
+
Git messages can contain trailer information that are similar to :rfc:`822`
|
| 415 |
+
e-mail headers. See :manpage:`git-interpret-trailers(1)`.
|
| 416 |
+
|
| 417 |
+
This function calls ``git interpret-trailers --parse`` onto the message to
|
| 418 |
+
extract the trailer information, returns the raw trailer data as a list.
|
| 419 |
+
|
| 420 |
+
Valid message with trailer::
|
| 421 |
+
|
| 422 |
+
Subject line
|
| 423 |
+
|
| 424 |
+
some body information
|
| 425 |
+
|
| 426 |
+
another information
|
| 427 |
+
|
| 428 |
+
key1: value1.1
|
| 429 |
+
key1: value1.2
|
| 430 |
+
key2 : value 2 with inner spaces
|
| 431 |
+
|
| 432 |
+
Returned list will look like this::
|
| 433 |
+
|
| 434 |
+
[
|
| 435 |
+
("key1", "value1.1"),
|
| 436 |
+
("key1", "value1.2"),
|
| 437 |
+
("key2", "value 2 with inner spaces"),
|
| 438 |
+
]
|
| 439 |
+
|
| 440 |
+
:return:
|
| 441 |
+
List containing key-value tuples of whitespace stripped trailer information.
|
| 442 |
+
"""
|
| 443 |
+
cmd = ["git", "interpret-trailers", "--parse"]
|
| 444 |
+
proc: Git.AutoInterrupt = self.repo.git.execute( # type: ignore[call-overload]
|
| 445 |
+
cmd,
|
| 446 |
+
as_process=True,
|
| 447 |
+
istream=PIPE,
|
| 448 |
+
)
|
| 449 |
+
trailer: str = proc.communicate(str(self.message).encode())[0].decode("utf8")
|
| 450 |
+
trailer = trailer.strip()
|
| 451 |
+
|
| 452 |
+
if not trailer:
|
| 453 |
+
return []
|
| 454 |
+
|
| 455 |
+
trailer_list = []
|
| 456 |
+
for t in trailer.split("\n"):
|
| 457 |
+
key, val = t.split(":", 1)
|
| 458 |
+
trailer_list.append((key.strip(), val.strip()))
|
| 459 |
+
|
| 460 |
+
return trailer_list
|
| 461 |
+
|
| 462 |
+
@property
|
| 463 |
+
def trailers_dict(self) -> Dict[str, List[str]]:
|
| 464 |
+
"""Get the trailers of the message as a dictionary.
|
| 465 |
+
|
| 466 |
+
Git messages can contain trailer information that are similar to :rfc:`822`
|
| 467 |
+
e-mail headers. See :manpage:`git-interpret-trailers(1)`.
|
| 468 |
+
|
| 469 |
+
This function calls ``git interpret-trailers --parse`` onto the message to
|
| 470 |
+
extract the trailer information. The key value pairs are stripped of leading and
|
| 471 |
+
trailing whitespaces before they get saved into a dictionary.
|
| 472 |
+
|
| 473 |
+
Valid message with trailer::
|
| 474 |
+
|
| 475 |
+
Subject line
|
| 476 |
+
|
| 477 |
+
some body information
|
| 478 |
+
|
| 479 |
+
another information
|
| 480 |
+
|
| 481 |
+
key1: value1.1
|
| 482 |
+
key1: value1.2
|
| 483 |
+
key2 : value 2 with inner spaces
|
| 484 |
+
|
| 485 |
+
Returned dictionary will look like this::
|
| 486 |
+
|
| 487 |
+
{
|
| 488 |
+
"key1": ["value1.1", "value1.2"],
|
| 489 |
+
"key2": ["value 2 with inner spaces"],
|
| 490 |
+
}
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
:return:
|
| 494 |
+
Dictionary containing whitespace stripped trailer information, mapping
|
| 495 |
+
trailer keys to a list of their corresponding values.
|
| 496 |
+
"""
|
| 497 |
+
d = defaultdict(list)
|
| 498 |
+
for key, val in self.trailers_list:
|
| 499 |
+
d[key].append(val)
|
| 500 |
+
return dict(d)
|
| 501 |
+
|
| 502 |
+
@classmethod
|
| 503 |
+
def _iter_from_process_or_stream(cls, repo: "Repo", proc_or_stream: Union[Popen, IO]) -> Iterator["Commit"]:
|
| 504 |
+
"""Parse out commit information into a list of :class:`Commit` objects.
|
| 505 |
+
|
| 506 |
+
We expect one line per commit, and parse the actual commit information directly
|
| 507 |
+
from our lighting fast object database.
|
| 508 |
+
|
| 509 |
+
:param proc:
|
| 510 |
+
:manpage:`git-rev-list(1)` process instance - one sha per line.
|
| 511 |
+
|
| 512 |
+
:return:
|
| 513 |
+
Iterator supplying :class:`Commit` objects
|
| 514 |
+
"""
|
| 515 |
+
|
| 516 |
+
# def is_proc(inp) -> TypeGuard[Popen]:
|
| 517 |
+
# return hasattr(proc_or_stream, 'wait') and not hasattr(proc_or_stream, 'readline')
|
| 518 |
+
|
| 519 |
+
# def is_stream(inp) -> TypeGuard[IO]:
|
| 520 |
+
# return hasattr(proc_or_stream, 'readline')
|
| 521 |
+
|
| 522 |
+
if hasattr(proc_or_stream, "wait"):
|
| 523 |
+
proc_or_stream = cast(Popen, proc_or_stream)
|
| 524 |
+
if proc_or_stream.stdout is not None:
|
| 525 |
+
stream = proc_or_stream.stdout
|
| 526 |
+
elif hasattr(proc_or_stream, "readline"):
|
| 527 |
+
proc_or_stream = cast(IO, proc_or_stream) # type: ignore[redundant-cast]
|
| 528 |
+
stream = proc_or_stream
|
| 529 |
+
|
| 530 |
+
readline = stream.readline
|
| 531 |
+
while True:
|
| 532 |
+
line = readline()
|
| 533 |
+
if not line:
|
| 534 |
+
break
|
| 535 |
+
hexsha = line.strip()
|
| 536 |
+
if len(hexsha) > 40:
|
| 537 |
+
# Split additional information, as returned by bisect for instance.
|
| 538 |
+
hexsha, _ = line.split(None, 1)
|
| 539 |
+
# END handle extra info
|
| 540 |
+
|
| 541 |
+
assert len(hexsha) == 40, "Invalid line: %s" % hexsha
|
| 542 |
+
yield cls(repo, hex_to_bin(hexsha))
|
| 543 |
+
# END for each line in stream
|
| 544 |
+
|
| 545 |
+
# TODO: Review this - it seems process handling got a bit out of control due to
|
| 546 |
+
# many developers trying to fix the open file handles issue.
|
| 547 |
+
if hasattr(proc_or_stream, "wait"):
|
| 548 |
+
proc_or_stream = cast(Popen, proc_or_stream)
|
| 549 |
+
finalize_process(proc_or_stream)
|
| 550 |
+
|
| 551 |
+
@classmethod
|
| 552 |
+
def create_from_tree(
|
| 553 |
+
cls,
|
| 554 |
+
repo: "Repo",
|
| 555 |
+
tree: Union[Tree, str],
|
| 556 |
+
message: str,
|
| 557 |
+
parent_commits: Union[None, List["Commit"]] = None,
|
| 558 |
+
head: bool = False,
|
| 559 |
+
author: Union[None, Actor] = None,
|
| 560 |
+
committer: Union[None, Actor] = None,
|
| 561 |
+
author_date: Union[None, str, datetime.datetime] = None,
|
| 562 |
+
commit_date: Union[None, str, datetime.datetime] = None,
|
| 563 |
+
) -> "Commit":
|
| 564 |
+
"""Commit the given tree, creating a :class:`Commit` object.
|
| 565 |
+
|
| 566 |
+
:param repo:
|
| 567 |
+
:class:`~git.repo.base.Repo` object the commit should be part of.
|
| 568 |
+
|
| 569 |
+
:param tree:
|
| 570 |
+
:class:`~git.objects.tree.Tree` object or hex or bin sha.
|
| 571 |
+
The tree of the new commit.
|
| 572 |
+
|
| 573 |
+
:param message:
|
| 574 |
+
Commit message. It may be an empty string if no message is provided. It will
|
| 575 |
+
be converted to a string, in any case.
|
| 576 |
+
|
| 577 |
+
:param parent_commits:
|
| 578 |
+
Optional :class:`Commit` objects to use as parents for the new commit. If
|
| 579 |
+
empty list, the commit will have no parents at all and become a root commit.
|
| 580 |
+
If ``None``, the current head commit will be the parent of the new commit
|
| 581 |
+
object.
|
| 582 |
+
|
| 583 |
+
:param head:
|
| 584 |
+
If ``True``, the HEAD will be advanced to the new commit automatically.
|
| 585 |
+
Otherwise the HEAD will remain pointing on the previous commit. This could
|
| 586 |
+
lead to undesired results when diffing files.
|
| 587 |
+
|
| 588 |
+
:param author:
|
| 589 |
+
The name of the author, optional.
|
| 590 |
+
If unset, the repository configuration is used to obtain this value.
|
| 591 |
+
|
| 592 |
+
:param committer:
|
| 593 |
+
The name of the committer, optional.
|
| 594 |
+
If unset, the repository configuration is used to obtain this value.
|
| 595 |
+
|
| 596 |
+
:param author_date:
|
| 597 |
+
The timestamp for the author field.
|
| 598 |
+
|
| 599 |
+
:param commit_date:
|
| 600 |
+
The timestamp for the committer field.
|
| 601 |
+
|
| 602 |
+
:return:
|
| 603 |
+
:class:`Commit` object representing the new commit.
|
| 604 |
+
|
| 605 |
+
:note:
|
| 606 |
+
Additional information about the committer and author are taken from the
|
| 607 |
+
environment or from the git configuration. See :manpage:`git-commit-tree(1)`
|
| 608 |
+
for more information.
|
| 609 |
+
"""
|
| 610 |
+
if parent_commits is None:
|
| 611 |
+
try:
|
| 612 |
+
parent_commits = [repo.head.commit]
|
| 613 |
+
except ValueError:
|
| 614 |
+
# Empty repositories have no head commit.
|
| 615 |
+
parent_commits = []
|
| 616 |
+
# END handle parent commits
|
| 617 |
+
else:
|
| 618 |
+
for p in parent_commits:
|
| 619 |
+
if not isinstance(p, cls):
|
| 620 |
+
raise ValueError(f"Parent commit '{p!r}' must be of type {cls}")
|
| 621 |
+
# END check parent commit types
|
| 622 |
+
# END if parent commits are unset
|
| 623 |
+
|
| 624 |
+
# Retrieve all additional information, create a commit object, and serialize it.
|
| 625 |
+
# Generally:
|
| 626 |
+
# * Environment variables override configuration values.
|
| 627 |
+
# * Sensible defaults are set according to the git documentation.
|
| 628 |
+
|
| 629 |
+
# COMMITTER AND AUTHOR INFO
|
| 630 |
+
cr = repo.config_reader()
|
| 631 |
+
env = os.environ
|
| 632 |
+
|
| 633 |
+
committer = committer or Actor.committer(cr)
|
| 634 |
+
author = author or Actor.author(cr)
|
| 635 |
+
|
| 636 |
+
# PARSE THE DATES
|
| 637 |
+
unix_time = int(time())
|
| 638 |
+
is_dst = daylight and localtime().tm_isdst > 0
|
| 639 |
+
offset = altzone if is_dst else timezone
|
| 640 |
+
|
| 641 |
+
author_date_str = env.get(cls.env_author_date, "")
|
| 642 |
+
if author_date:
|
| 643 |
+
author_time, author_offset = parse_date(author_date)
|
| 644 |
+
elif author_date_str:
|
| 645 |
+
author_time, author_offset = parse_date(author_date_str)
|
| 646 |
+
else:
|
| 647 |
+
author_time, author_offset = unix_time, offset
|
| 648 |
+
# END set author time
|
| 649 |
+
|
| 650 |
+
committer_date_str = env.get(cls.env_committer_date, "")
|
| 651 |
+
if commit_date:
|
| 652 |
+
committer_time, committer_offset = parse_date(commit_date)
|
| 653 |
+
elif committer_date_str:
|
| 654 |
+
committer_time, committer_offset = parse_date(committer_date_str)
|
| 655 |
+
else:
|
| 656 |
+
committer_time, committer_offset = unix_time, offset
|
| 657 |
+
# END set committer time
|
| 658 |
+
|
| 659 |
+
# Assume UTF-8 encoding.
|
| 660 |
+
enc_section, enc_option = cls.conf_encoding.split(".")
|
| 661 |
+
conf_encoding = cr.get_value(enc_section, enc_option, cls.default_encoding)
|
| 662 |
+
if not isinstance(conf_encoding, str):
|
| 663 |
+
raise TypeError("conf_encoding could not be coerced to str")
|
| 664 |
+
|
| 665 |
+
# If the tree is no object, make sure we create one - otherwise the created
|
| 666 |
+
# commit object is invalid.
|
| 667 |
+
if isinstance(tree, str):
|
| 668 |
+
tree = repo.tree(tree)
|
| 669 |
+
# END tree conversion
|
| 670 |
+
|
| 671 |
+
# CREATE NEW COMMIT
|
| 672 |
+
new_commit = cls(
|
| 673 |
+
repo,
|
| 674 |
+
cls.NULL_BIN_SHA,
|
| 675 |
+
tree,
|
| 676 |
+
author,
|
| 677 |
+
author_time,
|
| 678 |
+
author_offset,
|
| 679 |
+
committer,
|
| 680 |
+
committer_time,
|
| 681 |
+
committer_offset,
|
| 682 |
+
message,
|
| 683 |
+
parent_commits,
|
| 684 |
+
conf_encoding,
|
| 685 |
+
)
|
| 686 |
+
|
| 687 |
+
new_commit.binsha = cls._calculate_sha_(repo, new_commit)
|
| 688 |
+
|
| 689 |
+
if head:
|
| 690 |
+
# Need late import here, importing git at the very beginning throws as
|
| 691 |
+
# well...
|
| 692 |
+
import git.refs
|
| 693 |
+
|
| 694 |
+
try:
|
| 695 |
+
repo.head.set_commit(new_commit, logmsg=message)
|
| 696 |
+
except ValueError:
|
| 697 |
+
# head is not yet set to the ref our HEAD points to.
|
| 698 |
+
# Happens on first commit.
|
| 699 |
+
master = git.refs.Head.create(
|
| 700 |
+
repo,
|
| 701 |
+
repo.head.ref,
|
| 702 |
+
new_commit,
|
| 703 |
+
logmsg="commit (initial): %s" % message,
|
| 704 |
+
)
|
| 705 |
+
repo.head.set_reference(master, logmsg="commit: Switching to %s" % master)
|
| 706 |
+
# END handle empty repositories
|
| 707 |
+
# END advance head handling
|
| 708 |
+
|
| 709 |
+
return new_commit
|
| 710 |
+
|
| 711 |
+
# { Serializable Implementation
|
| 712 |
+
|
| 713 |
+
def _serialize(self, stream: BytesIO) -> "Commit":
|
| 714 |
+
write = stream.write
|
| 715 |
+
write(("tree %s\n" % self.tree).encode("ascii"))
|
| 716 |
+
for p in self.parents:
|
| 717 |
+
write(("parent %s\n" % p).encode("ascii"))
|
| 718 |
+
|
| 719 |
+
a = self.author
|
| 720 |
+
aname = a.name
|
| 721 |
+
c = self.committer
|
| 722 |
+
fmt = "%s %s <%s> %s %s\n"
|
| 723 |
+
write(
|
| 724 |
+
(
|
| 725 |
+
fmt
|
| 726 |
+
% (
|
| 727 |
+
"author",
|
| 728 |
+
aname,
|
| 729 |
+
a.email,
|
| 730 |
+
self.authored_date,
|
| 731 |
+
altz_to_utctz_str(self.author_tz_offset),
|
| 732 |
+
)
|
| 733 |
+
).encode(self.encoding)
|
| 734 |
+
)
|
| 735 |
+
|
| 736 |
+
# Encode committer.
|
| 737 |
+
aname = c.name
|
| 738 |
+
write(
|
| 739 |
+
(
|
| 740 |
+
fmt
|
| 741 |
+
% (
|
| 742 |
+
"committer",
|
| 743 |
+
aname,
|
| 744 |
+
c.email,
|
| 745 |
+
self.committed_date,
|
| 746 |
+
altz_to_utctz_str(self.committer_tz_offset),
|
| 747 |
+
)
|
| 748 |
+
).encode(self.encoding)
|
| 749 |
+
)
|
| 750 |
+
|
| 751 |
+
if self.encoding != self.default_encoding:
|
| 752 |
+
write(("encoding %s\n" % self.encoding).encode("ascii"))
|
| 753 |
+
|
| 754 |
+
try:
|
| 755 |
+
if self.__getattribute__("gpgsig"):
|
| 756 |
+
write(b"gpgsig")
|
| 757 |
+
for sigline in self.gpgsig.rstrip("\n").split("\n"):
|
| 758 |
+
write((" " + sigline + "\n").encode("ascii"))
|
| 759 |
+
except AttributeError:
|
| 760 |
+
pass
|
| 761 |
+
|
| 762 |
+
write(b"\n")
|
| 763 |
+
|
| 764 |
+
# Write plain bytes, be sure its encoded according to our encoding.
|
| 765 |
+
if isinstance(self.message, str):
|
| 766 |
+
write(self.message.encode(self.encoding))
|
| 767 |
+
else:
|
| 768 |
+
write(self.message)
|
| 769 |
+
# END handle encoding
|
| 770 |
+
return self
|
| 771 |
+
|
| 772 |
+
def _deserialize(self, stream: BytesIO) -> "Commit":
|
| 773 |
+
readline = stream.readline
|
| 774 |
+
self.tree = Tree(self.repo, hex_to_bin(readline().split()[1]), Tree.tree_id << 12, "")
|
| 775 |
+
|
| 776 |
+
self.parents = []
|
| 777 |
+
next_line = None
|
| 778 |
+
while True:
|
| 779 |
+
parent_line = readline()
|
| 780 |
+
if not parent_line.startswith(b"parent"):
|
| 781 |
+
next_line = parent_line
|
| 782 |
+
break
|
| 783 |
+
# END abort reading parents
|
| 784 |
+
self.parents.append(type(self)(self.repo, hex_to_bin(parent_line.split()[-1].decode("ascii"))))
|
| 785 |
+
# END for each parent line
|
| 786 |
+
self.parents = tuple(self.parents)
|
| 787 |
+
|
| 788 |
+
# We don't know actual author encoding before we have parsed it, so keep the
|
| 789 |
+
# lines around.
|
| 790 |
+
author_line = next_line
|
| 791 |
+
committer_line = readline()
|
| 792 |
+
|
| 793 |
+
# We might run into one or more mergetag blocks, skip those for now.
|
| 794 |
+
next_line = readline()
|
| 795 |
+
while next_line.startswith(b"mergetag "):
|
| 796 |
+
next_line = readline()
|
| 797 |
+
while next_line.startswith(b" "):
|
| 798 |
+
next_line = readline()
|
| 799 |
+
# END skip mergetags
|
| 800 |
+
|
| 801 |
+
# Now we can have the encoding line, or an empty line followed by the optional
|
| 802 |
+
# message.
|
| 803 |
+
self.encoding = self.default_encoding
|
| 804 |
+
self.gpgsig = ""
|
| 805 |
+
|
| 806 |
+
# Read headers.
|
| 807 |
+
enc = next_line
|
| 808 |
+
buf = enc.strip()
|
| 809 |
+
while buf:
|
| 810 |
+
if buf[0:10] == b"encoding ":
|
| 811 |
+
self.encoding = buf[buf.find(b" ") + 1 :].decode(self.encoding, "ignore")
|
| 812 |
+
elif buf[0:7] == b"gpgsig ":
|
| 813 |
+
sig = buf[buf.find(b" ") + 1 :] + b"\n"
|
| 814 |
+
is_next_header = False
|
| 815 |
+
while True:
|
| 816 |
+
sigbuf = readline()
|
| 817 |
+
if not sigbuf:
|
| 818 |
+
break
|
| 819 |
+
if sigbuf[0:1] != b" ":
|
| 820 |
+
buf = sigbuf.strip()
|
| 821 |
+
is_next_header = True
|
| 822 |
+
break
|
| 823 |
+
sig += sigbuf[1:]
|
| 824 |
+
# END read all signature
|
| 825 |
+
self.gpgsig = sig.rstrip(b"\n").decode(self.encoding, "ignore")
|
| 826 |
+
if is_next_header:
|
| 827 |
+
continue
|
| 828 |
+
buf = readline().strip()
|
| 829 |
+
|
| 830 |
+
# Decode the author's name.
|
| 831 |
+
try:
|
| 832 |
+
(
|
| 833 |
+
self.author,
|
| 834 |
+
self.authored_date,
|
| 835 |
+
self.author_tz_offset,
|
| 836 |
+
) = parse_actor_and_date(author_line.decode(self.encoding, "replace"))
|
| 837 |
+
except UnicodeDecodeError:
|
| 838 |
+
_logger.error(
|
| 839 |
+
"Failed to decode author line '%s' using encoding %s",
|
| 840 |
+
author_line,
|
| 841 |
+
self.encoding,
|
| 842 |
+
exc_info=True,
|
| 843 |
+
)
|
| 844 |
+
|
| 845 |
+
try:
|
| 846 |
+
(
|
| 847 |
+
self.committer,
|
| 848 |
+
self.committed_date,
|
| 849 |
+
self.committer_tz_offset,
|
| 850 |
+
) = parse_actor_and_date(committer_line.decode(self.encoding, "replace"))
|
| 851 |
+
except UnicodeDecodeError:
|
| 852 |
+
_logger.error(
|
| 853 |
+
"Failed to decode committer line '%s' using encoding %s",
|
| 854 |
+
committer_line,
|
| 855 |
+
self.encoding,
|
| 856 |
+
exc_info=True,
|
| 857 |
+
)
|
| 858 |
+
# END handle author's encoding
|
| 859 |
+
|
| 860 |
+
# A stream from our data simply gives us the plain message.
|
| 861 |
+
# The end of our message stream is marked with a newline that we strip.
|
| 862 |
+
self.message = stream.read()
|
| 863 |
+
try:
|
| 864 |
+
self.message = self.message.decode(self.encoding, "replace")
|
| 865 |
+
except UnicodeDecodeError:
|
| 866 |
+
_logger.error(
|
| 867 |
+
"Failed to decode message '%s' using encoding %s",
|
| 868 |
+
self.message,
|
| 869 |
+
self.encoding,
|
| 870 |
+
exc_info=True,
|
| 871 |
+
)
|
| 872 |
+
# END exception handling
|
| 873 |
+
|
| 874 |
+
return self
|
| 875 |
+
|
| 876 |
+
# } END serializable implementation
|
| 877 |
+
|
| 878 |
+
@property
|
| 879 |
+
def co_authors(self) -> List[Actor]:
|
| 880 |
+
"""Search the commit message for any co-authors of this commit.
|
| 881 |
+
|
| 882 |
+
Details on co-authors:
|
| 883 |
+
https://github.blog/2018-01-29-commit-together-with-co-authors/
|
| 884 |
+
|
| 885 |
+
:return:
|
| 886 |
+
List of co-authors for this commit (as :class:`~git.util.Actor` objects).
|
| 887 |
+
"""
|
| 888 |
+
co_authors = []
|
| 889 |
+
|
| 890 |
+
if self.message:
|
| 891 |
+
results = re.findall(
|
| 892 |
+
r"^Co-authored-by: (.*) <(.*?)>$",
|
| 893 |
+
self.message,
|
| 894 |
+
re.MULTILINE,
|
| 895 |
+
)
|
| 896 |
+
for author in results:
|
| 897 |
+
co_authors.append(Actor(*author))
|
| 898 |
+
|
| 899 |
+
return co_authors
|
parrot/lib/python3.10/site-packages/git/objects/fun.py
ADDED
|
@@ -0,0 +1,281 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
"""Functions that are supposed to be as fast as possible."""
|
| 5 |
+
|
| 6 |
+
__all__ = [
|
| 7 |
+
"tree_to_stream",
|
| 8 |
+
"tree_entries_from_data",
|
| 9 |
+
"traverse_trees_recursive",
|
| 10 |
+
"traverse_tree_recursive",
|
| 11 |
+
]
|
| 12 |
+
|
| 13 |
+
from stat import S_ISDIR
|
| 14 |
+
|
| 15 |
+
from git.compat import safe_decode, defenc
|
| 16 |
+
|
| 17 |
+
# typing ----------------------------------------------
|
| 18 |
+
|
| 19 |
+
from typing import (
|
| 20 |
+
Callable,
|
| 21 |
+
List,
|
| 22 |
+
MutableSequence,
|
| 23 |
+
Sequence,
|
| 24 |
+
Tuple,
|
| 25 |
+
TYPE_CHECKING,
|
| 26 |
+
Union,
|
| 27 |
+
overload,
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
if TYPE_CHECKING:
|
| 31 |
+
from _typeshed import ReadableBuffer
|
| 32 |
+
|
| 33 |
+
from git import GitCmdObjectDB
|
| 34 |
+
|
| 35 |
+
EntryTup = Tuple[bytes, int, str] # Same as TreeCacheTup in tree.py.
|
| 36 |
+
EntryTupOrNone = Union[EntryTup, None]
|
| 37 |
+
|
| 38 |
+
# ---------------------------------------------------
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def tree_to_stream(entries: Sequence[EntryTup], write: Callable[["ReadableBuffer"], Union[int, None]]) -> None:
|
| 42 |
+
"""Write the given list of entries into a stream using its ``write`` method.
|
| 43 |
+
|
| 44 |
+
:param entries:
|
| 45 |
+
**Sorted** list of tuples with (binsha, mode, name).
|
| 46 |
+
|
| 47 |
+
:param write:
|
| 48 |
+
A ``write`` method which takes a data string.
|
| 49 |
+
"""
|
| 50 |
+
ord_zero = ord("0")
|
| 51 |
+
bit_mask = 7 # 3 bits set.
|
| 52 |
+
|
| 53 |
+
for binsha, mode, name in entries:
|
| 54 |
+
mode_str = b""
|
| 55 |
+
for i in range(6):
|
| 56 |
+
mode_str = bytes([((mode >> (i * 3)) & bit_mask) + ord_zero]) + mode_str
|
| 57 |
+
# END for each 8 octal value
|
| 58 |
+
|
| 59 |
+
# git slices away the first octal if it's zero.
|
| 60 |
+
if mode_str[0] == ord_zero:
|
| 61 |
+
mode_str = mode_str[1:]
|
| 62 |
+
# END save a byte
|
| 63 |
+
|
| 64 |
+
# Here it comes: If the name is actually unicode, the replacement below will not
|
| 65 |
+
# work as the binsha is not part of the ascii unicode encoding - hence we must
|
| 66 |
+
# convert to an UTF-8 string for it to work properly. According to my tests,
|
| 67 |
+
# this is exactly what git does, that is it just takes the input literally,
|
| 68 |
+
# which appears to be UTF-8 on linux.
|
| 69 |
+
if isinstance(name, str):
|
| 70 |
+
name_bytes = name.encode(defenc)
|
| 71 |
+
else:
|
| 72 |
+
name_bytes = name # type: ignore[unreachable] # check runtime types - is always str?
|
| 73 |
+
write(b"".join((mode_str, b" ", name_bytes, b"\0", binsha)))
|
| 74 |
+
# END for each item
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def tree_entries_from_data(data: bytes) -> List[EntryTup]:
|
| 78 |
+
"""Read the binary representation of a tree and returns tuples of
|
| 79 |
+
:class:`~git.objects.tree.Tree` items.
|
| 80 |
+
|
| 81 |
+
:param data:
|
| 82 |
+
Data block with tree data (as bytes).
|
| 83 |
+
|
| 84 |
+
:return:
|
| 85 |
+
list(tuple(binsha, mode, tree_relative_path), ...)
|
| 86 |
+
"""
|
| 87 |
+
ord_zero = ord("0")
|
| 88 |
+
space_ord = ord(" ")
|
| 89 |
+
len_data = len(data)
|
| 90 |
+
i = 0
|
| 91 |
+
out = []
|
| 92 |
+
while i < len_data:
|
| 93 |
+
mode = 0
|
| 94 |
+
|
| 95 |
+
# Read Mode
|
| 96 |
+
# Some git versions truncate the leading 0, some don't.
|
| 97 |
+
# The type will be extracted from the mode later.
|
| 98 |
+
while data[i] != space_ord:
|
| 99 |
+
# Move existing mode integer up one level being 3 bits and add the actual
|
| 100 |
+
# ordinal value of the character.
|
| 101 |
+
mode = (mode << 3) + (data[i] - ord_zero)
|
| 102 |
+
i += 1
|
| 103 |
+
# END while reading mode
|
| 104 |
+
|
| 105 |
+
# Byte is space now, skip it.
|
| 106 |
+
i += 1
|
| 107 |
+
|
| 108 |
+
# Parse name, it is NULL separated.
|
| 109 |
+
|
| 110 |
+
ns = i
|
| 111 |
+
while data[i] != 0:
|
| 112 |
+
i += 1
|
| 113 |
+
# END while not reached NULL
|
| 114 |
+
|
| 115 |
+
# Default encoding for strings in git is UTF-8.
|
| 116 |
+
# Only use the respective unicode object if the byte stream was encoded.
|
| 117 |
+
name_bytes = data[ns:i]
|
| 118 |
+
name = safe_decode(name_bytes)
|
| 119 |
+
|
| 120 |
+
# Byte is NULL, get next 20.
|
| 121 |
+
i += 1
|
| 122 |
+
sha = data[i : i + 20]
|
| 123 |
+
i = i + 20
|
| 124 |
+
out.append((sha, mode, name))
|
| 125 |
+
# END for each byte in data stream
|
| 126 |
+
return out
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def _find_by_name(tree_data: MutableSequence[EntryTupOrNone], name: str, is_dir: bool, start_at: int) -> EntryTupOrNone:
|
| 130 |
+
"""Return data entry matching the given name and tree mode or ``None``.
|
| 131 |
+
|
| 132 |
+
Before the item is returned, the respective data item is set None in the `tree_data`
|
| 133 |
+
list to mark it done.
|
| 134 |
+
"""
|
| 135 |
+
|
| 136 |
+
try:
|
| 137 |
+
item = tree_data[start_at]
|
| 138 |
+
if item and item[2] == name and S_ISDIR(item[1]) == is_dir:
|
| 139 |
+
tree_data[start_at] = None
|
| 140 |
+
return item
|
| 141 |
+
except IndexError:
|
| 142 |
+
pass
|
| 143 |
+
# END exception handling
|
| 144 |
+
for index, item in enumerate(tree_data):
|
| 145 |
+
if item and item[2] == name and S_ISDIR(item[1]) == is_dir:
|
| 146 |
+
tree_data[index] = None
|
| 147 |
+
return item
|
| 148 |
+
# END if item matches
|
| 149 |
+
# END for each item
|
| 150 |
+
return None
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
@overload
|
| 154 |
+
def _to_full_path(item: None, path_prefix: str) -> None: ...
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
@overload
|
| 158 |
+
def _to_full_path(item: EntryTup, path_prefix: str) -> EntryTup: ...
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
def _to_full_path(item: EntryTupOrNone, path_prefix: str) -> EntryTupOrNone:
|
| 162 |
+
"""Rebuild entry with given path prefix."""
|
| 163 |
+
if not item:
|
| 164 |
+
return item
|
| 165 |
+
return (item[0], item[1], path_prefix + item[2])
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
def traverse_trees_recursive(
|
| 169 |
+
odb: "GitCmdObjectDB", tree_shas: Sequence[Union[bytes, None]], path_prefix: str
|
| 170 |
+
) -> List[Tuple[EntryTupOrNone, ...]]:
|
| 171 |
+
"""
|
| 172 |
+
:return:
|
| 173 |
+
List of list with entries according to the given binary tree-shas.
|
| 174 |
+
|
| 175 |
+
The result is encoded in a list
|
| 176 |
+
of n tuple|None per blob/commit, (n == len(tree_shas)), where:
|
| 177 |
+
|
| 178 |
+
* [0] == 20 byte sha
|
| 179 |
+
* [1] == mode as int
|
| 180 |
+
* [2] == path relative to working tree root
|
| 181 |
+
|
| 182 |
+
The entry tuple is ``None`` if the respective blob/commit did not exist in the
|
| 183 |
+
given tree.
|
| 184 |
+
|
| 185 |
+
:param tree_shas:
|
| 186 |
+
Iterable of shas pointing to trees. All trees must be on the same level.
|
| 187 |
+
A tree-sha may be ``None``, in which case ``None``.
|
| 188 |
+
|
| 189 |
+
:param path_prefix:
|
| 190 |
+
A prefix to be added to the returned paths on this level.
|
| 191 |
+
Set it ``""`` for the first iteration.
|
| 192 |
+
|
| 193 |
+
:note:
|
| 194 |
+
The ordering of the returned items will be partially lost.
|
| 195 |
+
"""
|
| 196 |
+
trees_data: List[List[EntryTupOrNone]] = []
|
| 197 |
+
|
| 198 |
+
nt = len(tree_shas)
|
| 199 |
+
for tree_sha in tree_shas:
|
| 200 |
+
if tree_sha is None:
|
| 201 |
+
data: List[EntryTupOrNone] = []
|
| 202 |
+
else:
|
| 203 |
+
# Make new list for typing as list invariant.
|
| 204 |
+
data = list(tree_entries_from_data(odb.stream(tree_sha).read()))
|
| 205 |
+
# END handle muted trees
|
| 206 |
+
trees_data.append(data)
|
| 207 |
+
# END for each sha to get data for
|
| 208 |
+
|
| 209 |
+
out: List[Tuple[EntryTupOrNone, ...]] = []
|
| 210 |
+
|
| 211 |
+
# Find all matching entries and recursively process them together if the match is a
|
| 212 |
+
# tree. If the match is a non-tree item, put it into the result.
|
| 213 |
+
# Processed items will be set None.
|
| 214 |
+
for ti, tree_data in enumerate(trees_data):
|
| 215 |
+
for ii, item in enumerate(tree_data):
|
| 216 |
+
if not item:
|
| 217 |
+
continue
|
| 218 |
+
# END skip already done items
|
| 219 |
+
entries: List[EntryTupOrNone]
|
| 220 |
+
entries = [None for _ in range(nt)]
|
| 221 |
+
entries[ti] = item
|
| 222 |
+
_sha, mode, name = item
|
| 223 |
+
is_dir = S_ISDIR(mode) # Type mode bits
|
| 224 |
+
|
| 225 |
+
# Find this item in all other tree data items.
|
| 226 |
+
# Wrap around, but stop one before our current index, hence ti+nt, not
|
| 227 |
+
# ti+1+nt.
|
| 228 |
+
for tio in range(ti + 1, ti + nt):
|
| 229 |
+
tio = tio % nt
|
| 230 |
+
entries[tio] = _find_by_name(trees_data[tio], name, is_dir, ii)
|
| 231 |
+
|
| 232 |
+
# END for each other item data
|
| 233 |
+
# If we are a directory, enter recursion.
|
| 234 |
+
if is_dir:
|
| 235 |
+
out.extend(
|
| 236 |
+
traverse_trees_recursive(
|
| 237 |
+
odb,
|
| 238 |
+
[((ei and ei[0]) or None) for ei in entries],
|
| 239 |
+
path_prefix + name + "/",
|
| 240 |
+
)
|
| 241 |
+
)
|
| 242 |
+
else:
|
| 243 |
+
out.append(tuple(_to_full_path(e, path_prefix) for e in entries))
|
| 244 |
+
|
| 245 |
+
# END handle recursion
|
| 246 |
+
# Finally mark it done.
|
| 247 |
+
tree_data[ii] = None
|
| 248 |
+
# END for each item
|
| 249 |
+
|
| 250 |
+
# We are done with one tree, set all its data empty.
|
| 251 |
+
del tree_data[:]
|
| 252 |
+
# END for each tree_data chunk
|
| 253 |
+
return out
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
def traverse_tree_recursive(odb: "GitCmdObjectDB", tree_sha: bytes, path_prefix: str) -> List[EntryTup]:
|
| 257 |
+
"""
|
| 258 |
+
:return:
|
| 259 |
+
List of entries of the tree pointed to by the binary `tree_sha`.
|
| 260 |
+
|
| 261 |
+
An entry has the following format:
|
| 262 |
+
|
| 263 |
+
* [0] 20 byte sha
|
| 264 |
+
* [1] mode as int
|
| 265 |
+
* [2] path relative to the repository
|
| 266 |
+
|
| 267 |
+
:param path_prefix:
|
| 268 |
+
Prefix to prepend to the front of all returned paths.
|
| 269 |
+
"""
|
| 270 |
+
entries = []
|
| 271 |
+
data = tree_entries_from_data(odb.stream(tree_sha).read())
|
| 272 |
+
|
| 273 |
+
# Unpacking/packing is faster than accessing individual items.
|
| 274 |
+
for sha, mode, name in data:
|
| 275 |
+
if S_ISDIR(mode):
|
| 276 |
+
entries.extend(traverse_tree_recursive(odb, sha, path_prefix + name + "/"))
|
| 277 |
+
else:
|
| 278 |
+
entries.append((sha, mode, path_prefix + name))
|
| 279 |
+
# END for each item
|
| 280 |
+
|
| 281 |
+
return entries
|
parrot/lib/python3.10/site-packages/git/objects/submodule/__pycache__/base.cpython-310.pyc
ADDED
|
Binary file (40.4 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/objects/submodule/root.py
ADDED
|
@@ -0,0 +1,467 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
__all__ = ["RootModule", "RootUpdateProgress"]
|
| 5 |
+
|
| 6 |
+
import logging
|
| 7 |
+
|
| 8 |
+
import git
|
| 9 |
+
from git.exc import InvalidGitRepositoryError
|
| 10 |
+
|
| 11 |
+
from .base import Submodule, UpdateProgress
|
| 12 |
+
from .util import find_first_remote_branch
|
| 13 |
+
|
| 14 |
+
# typing -------------------------------------------------------------------
|
| 15 |
+
|
| 16 |
+
from typing import TYPE_CHECKING, Union
|
| 17 |
+
|
| 18 |
+
from git.types import Commit_ish
|
| 19 |
+
|
| 20 |
+
if TYPE_CHECKING:
|
| 21 |
+
from git.repo import Repo
|
| 22 |
+
from git.util import IterableList
|
| 23 |
+
|
| 24 |
+
# ----------------------------------------------------------------------------
|
| 25 |
+
|
| 26 |
+
_logger = logging.getLogger(__name__)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class RootUpdateProgress(UpdateProgress):
|
| 30 |
+
"""Utility class which adds more opcodes to
|
| 31 |
+
:class:`~git.objects.submodule.base.UpdateProgress`."""
|
| 32 |
+
|
| 33 |
+
REMOVE, PATHCHANGE, BRANCHCHANGE, URLCHANGE = [
|
| 34 |
+
1 << x for x in range(UpdateProgress._num_op_codes, UpdateProgress._num_op_codes + 4)
|
| 35 |
+
]
|
| 36 |
+
_num_op_codes = UpdateProgress._num_op_codes + 4
|
| 37 |
+
|
| 38 |
+
__slots__ = ()
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
BEGIN = RootUpdateProgress.BEGIN
|
| 42 |
+
END = RootUpdateProgress.END
|
| 43 |
+
REMOVE = RootUpdateProgress.REMOVE
|
| 44 |
+
BRANCHCHANGE = RootUpdateProgress.BRANCHCHANGE
|
| 45 |
+
URLCHANGE = RootUpdateProgress.URLCHANGE
|
| 46 |
+
PATHCHANGE = RootUpdateProgress.PATHCHANGE
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class RootModule(Submodule):
|
| 50 |
+
"""A (virtual) root of all submodules in the given repository.
|
| 51 |
+
|
| 52 |
+
This can be used to more easily traverse all submodules of the
|
| 53 |
+
superproject (master repository).
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
__slots__ = ()
|
| 57 |
+
|
| 58 |
+
k_root_name = "__ROOT__"
|
| 59 |
+
|
| 60 |
+
def __init__(self, repo: "Repo") -> None:
|
| 61 |
+
# repo, binsha, mode=None, path=None, name = None, parent_commit=None, url=None, ref=None)
|
| 62 |
+
super().__init__(
|
| 63 |
+
repo,
|
| 64 |
+
binsha=self.NULL_BIN_SHA,
|
| 65 |
+
mode=self.k_default_mode,
|
| 66 |
+
path="",
|
| 67 |
+
name=self.k_root_name,
|
| 68 |
+
parent_commit=repo.head.commit,
|
| 69 |
+
url="",
|
| 70 |
+
branch_path=git.Head.to_full_path(self.k_head_default),
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
def _clear_cache(self) -> None:
|
| 74 |
+
"""May not do anything."""
|
| 75 |
+
pass
|
| 76 |
+
|
| 77 |
+
# { Interface
|
| 78 |
+
|
| 79 |
+
def update( # type: ignore[override]
|
| 80 |
+
self,
|
| 81 |
+
previous_commit: Union[Commit_ish, str, None] = None,
|
| 82 |
+
recursive: bool = True,
|
| 83 |
+
force_remove: bool = False,
|
| 84 |
+
init: bool = True,
|
| 85 |
+
to_latest_revision: bool = False,
|
| 86 |
+
progress: Union[None, "RootUpdateProgress"] = None,
|
| 87 |
+
dry_run: bool = False,
|
| 88 |
+
force_reset: bool = False,
|
| 89 |
+
keep_going: bool = False,
|
| 90 |
+
) -> "RootModule":
|
| 91 |
+
"""Update the submodules of this repository to the current HEAD commit.
|
| 92 |
+
|
| 93 |
+
This method behaves smartly by determining changes of the path of a submodule's
|
| 94 |
+
repository, next to changes to the to-be-checked-out commit or the branch to be
|
| 95 |
+
checked out. This works if the submodule's ID does not change.
|
| 96 |
+
|
| 97 |
+
Additionally it will detect addition and removal of submodules, which will be
|
| 98 |
+
handled gracefully.
|
| 99 |
+
|
| 100 |
+
:param previous_commit:
|
| 101 |
+
If set to a commit-ish, the commit we should use as the previous commit the
|
| 102 |
+
HEAD pointed to before it was set to the commit it points to now.
|
| 103 |
+
If ``None``, it defaults to ``HEAD@{1}`` otherwise.
|
| 104 |
+
|
| 105 |
+
:param recursive:
|
| 106 |
+
If ``True``, the children of submodules will be updated as well using the
|
| 107 |
+
same technique.
|
| 108 |
+
|
| 109 |
+
:param force_remove:
|
| 110 |
+
If submodules have been deleted, they will be forcibly removed. Otherwise
|
| 111 |
+
the update may fail if a submodule's repository cannot be deleted as changes
|
| 112 |
+
have been made to it.
|
| 113 |
+
(See :meth:`Submodule.update <git.objects.submodule.base.Submodule.update>`
|
| 114 |
+
for more information.)
|
| 115 |
+
|
| 116 |
+
:param init:
|
| 117 |
+
If we encounter a new module which would need to be initialized, then do it.
|
| 118 |
+
|
| 119 |
+
:param to_latest_revision:
|
| 120 |
+
If ``True``, instead of checking out the revision pointed to by this
|
| 121 |
+
submodule's sha, the checked out tracking branch will be merged with the
|
| 122 |
+
latest remote branch fetched from the repository's origin.
|
| 123 |
+
|
| 124 |
+
Unless `force_reset` is specified, a local tracking branch will never be
|
| 125 |
+
reset into its past, therefore the remote branch must be in the future for
|
| 126 |
+
this to have an effect.
|
| 127 |
+
|
| 128 |
+
:param force_reset:
|
| 129 |
+
If ``True``, submodules may checkout or reset their branch even if the
|
| 130 |
+
repository has pending changes that would be overwritten, or if the local
|
| 131 |
+
tracking branch is in the future of the remote tracking branch and would be
|
| 132 |
+
reset into its past.
|
| 133 |
+
|
| 134 |
+
:param progress:
|
| 135 |
+
:class:`RootUpdateProgress` instance, or ``None`` if no progress should be
|
| 136 |
+
sent.
|
| 137 |
+
|
| 138 |
+
:param dry_run:
|
| 139 |
+
If ``True``, operations will not actually be performed. Progress messages
|
| 140 |
+
will change accordingly to indicate the WOULD DO state of the operation.
|
| 141 |
+
|
| 142 |
+
:param keep_going:
|
| 143 |
+
If ``True``, we will ignore but log all errors, and keep going recursively.
|
| 144 |
+
Unless `dry_run` is set as well, `keep_going` could cause
|
| 145 |
+
subsequent/inherited errors you wouldn't see otherwise.
|
| 146 |
+
In conjunction with `dry_run`, this can be useful to anticipate all errors
|
| 147 |
+
when updating submodules.
|
| 148 |
+
|
| 149 |
+
:return:
|
| 150 |
+
self
|
| 151 |
+
"""
|
| 152 |
+
if self.repo.bare:
|
| 153 |
+
raise InvalidGitRepositoryError("Cannot update submodules in bare repositories")
|
| 154 |
+
# END handle bare
|
| 155 |
+
|
| 156 |
+
if progress is None:
|
| 157 |
+
progress = RootUpdateProgress()
|
| 158 |
+
# END ensure progress is set
|
| 159 |
+
|
| 160 |
+
prefix = ""
|
| 161 |
+
if dry_run:
|
| 162 |
+
prefix = "DRY-RUN: "
|
| 163 |
+
|
| 164 |
+
repo = self.repo
|
| 165 |
+
|
| 166 |
+
try:
|
| 167 |
+
# SETUP BASE COMMIT
|
| 168 |
+
###################
|
| 169 |
+
cur_commit = repo.head.commit
|
| 170 |
+
if previous_commit is None:
|
| 171 |
+
try:
|
| 172 |
+
previous_commit = repo.commit(repo.head.log_entry(-1).oldhexsha)
|
| 173 |
+
if previous_commit.binsha == previous_commit.NULL_BIN_SHA:
|
| 174 |
+
raise IndexError
|
| 175 |
+
# END handle initial commit
|
| 176 |
+
except IndexError:
|
| 177 |
+
# In new repositories, there is no previous commit.
|
| 178 |
+
previous_commit = cur_commit
|
| 179 |
+
# END exception handling
|
| 180 |
+
else:
|
| 181 |
+
previous_commit = repo.commit(previous_commit) # Obtain commit object.
|
| 182 |
+
# END handle previous commit
|
| 183 |
+
|
| 184 |
+
psms: "IterableList[Submodule]" = self.list_items(repo, parent_commit=previous_commit)
|
| 185 |
+
sms: "IterableList[Submodule]" = self.list_items(repo)
|
| 186 |
+
spsms = set(psms)
|
| 187 |
+
ssms = set(sms)
|
| 188 |
+
|
| 189 |
+
# HANDLE REMOVALS
|
| 190 |
+
###################
|
| 191 |
+
rrsm = spsms - ssms
|
| 192 |
+
len_rrsm = len(rrsm)
|
| 193 |
+
|
| 194 |
+
for i, rsm in enumerate(rrsm):
|
| 195 |
+
op = REMOVE
|
| 196 |
+
if i == 0:
|
| 197 |
+
op |= BEGIN
|
| 198 |
+
# END handle begin
|
| 199 |
+
|
| 200 |
+
# Fake it into thinking its at the current commit to allow deletion
|
| 201 |
+
# of previous module. Trigger the cache to be updated before that.
|
| 202 |
+
progress.update(
|
| 203 |
+
op,
|
| 204 |
+
i,
|
| 205 |
+
len_rrsm,
|
| 206 |
+
prefix + "Removing submodule %r at %s" % (rsm.name, rsm.abspath),
|
| 207 |
+
)
|
| 208 |
+
rsm._parent_commit = repo.head.commit
|
| 209 |
+
rsm.remove(
|
| 210 |
+
configuration=False,
|
| 211 |
+
module=True,
|
| 212 |
+
force=force_remove,
|
| 213 |
+
dry_run=dry_run,
|
| 214 |
+
)
|
| 215 |
+
|
| 216 |
+
if i == len_rrsm - 1:
|
| 217 |
+
op |= END
|
| 218 |
+
# END handle end
|
| 219 |
+
progress.update(op, i, len_rrsm, prefix + "Done removing submodule %r" % rsm.name)
|
| 220 |
+
# END for each removed submodule
|
| 221 |
+
|
| 222 |
+
# HANDLE PATH RENAMES
|
| 223 |
+
#####################
|
| 224 |
+
# URL changes + branch changes.
|
| 225 |
+
csms = spsms & ssms
|
| 226 |
+
len_csms = len(csms)
|
| 227 |
+
for i, csm in enumerate(csms):
|
| 228 |
+
psm: "Submodule" = psms[csm.name]
|
| 229 |
+
sm: "Submodule" = sms[csm.name]
|
| 230 |
+
|
| 231 |
+
# PATH CHANGES
|
| 232 |
+
##############
|
| 233 |
+
if sm.path != psm.path and psm.module_exists():
|
| 234 |
+
progress.update(
|
| 235 |
+
BEGIN | PATHCHANGE,
|
| 236 |
+
i,
|
| 237 |
+
len_csms,
|
| 238 |
+
prefix + "Moving repository of submodule %r from %s to %s" % (sm.name, psm.abspath, sm.abspath),
|
| 239 |
+
)
|
| 240 |
+
# Move the module to the new path.
|
| 241 |
+
if not dry_run:
|
| 242 |
+
psm.move(sm.path, module=True, configuration=False)
|
| 243 |
+
# END handle dry_run
|
| 244 |
+
progress.update(
|
| 245 |
+
END | PATHCHANGE,
|
| 246 |
+
i,
|
| 247 |
+
len_csms,
|
| 248 |
+
prefix + "Done moving repository of submodule %r" % sm.name,
|
| 249 |
+
)
|
| 250 |
+
# END handle path changes
|
| 251 |
+
|
| 252 |
+
if sm.module_exists():
|
| 253 |
+
# HANDLE URL CHANGE
|
| 254 |
+
###################
|
| 255 |
+
if sm.url != psm.url:
|
| 256 |
+
# Add the new remote, remove the old one.
|
| 257 |
+
# This way, if the url just changes, the commits will not have
|
| 258 |
+
# to be re-retrieved.
|
| 259 |
+
nn = "__new_origin__"
|
| 260 |
+
smm = sm.module()
|
| 261 |
+
rmts = smm.remotes
|
| 262 |
+
|
| 263 |
+
# Don't do anything if we already have the url we search in
|
| 264 |
+
# place.
|
| 265 |
+
if len([r for r in rmts if r.url == sm.url]) == 0:
|
| 266 |
+
progress.update(
|
| 267 |
+
BEGIN | URLCHANGE,
|
| 268 |
+
i,
|
| 269 |
+
len_csms,
|
| 270 |
+
prefix + "Changing url of submodule %r from %s to %s" % (sm.name, psm.url, sm.url),
|
| 271 |
+
)
|
| 272 |
+
|
| 273 |
+
if not dry_run:
|
| 274 |
+
assert nn not in [r.name for r in rmts]
|
| 275 |
+
smr = smm.create_remote(nn, sm.url)
|
| 276 |
+
smr.fetch(progress=progress)
|
| 277 |
+
|
| 278 |
+
# If we have a tracking branch, it should be available
|
| 279 |
+
# in the new remote as well.
|
| 280 |
+
if len([r for r in smr.refs if r.remote_head == sm.branch_name]) == 0:
|
| 281 |
+
raise ValueError(
|
| 282 |
+
"Submodule branch named %r was not available in new submodule remote at %r"
|
| 283 |
+
% (sm.branch_name, sm.url)
|
| 284 |
+
)
|
| 285 |
+
# END head is not detached
|
| 286 |
+
|
| 287 |
+
# Now delete the changed one.
|
| 288 |
+
rmt_for_deletion = None
|
| 289 |
+
for remote in rmts:
|
| 290 |
+
if remote.url == psm.url:
|
| 291 |
+
rmt_for_deletion = remote
|
| 292 |
+
break
|
| 293 |
+
# END if urls match
|
| 294 |
+
# END for each remote
|
| 295 |
+
|
| 296 |
+
# If we didn't find a matching remote, but have exactly
|
| 297 |
+
# one, we can safely use this one.
|
| 298 |
+
if rmt_for_deletion is None:
|
| 299 |
+
if len(rmts) == 1:
|
| 300 |
+
rmt_for_deletion = rmts[0]
|
| 301 |
+
else:
|
| 302 |
+
# If we have not found any remote with the
|
| 303 |
+
# original URL we may not have a name. This is a
|
| 304 |
+
# special case, and its okay to fail here.
|
| 305 |
+
# Alternatively we could just generate a unique
|
| 306 |
+
# name and leave all existing ones in place.
|
| 307 |
+
raise InvalidGitRepositoryError(
|
| 308 |
+
"Couldn't find original remote-repo at url %r" % psm.url
|
| 309 |
+
)
|
| 310 |
+
# END handle one single remote
|
| 311 |
+
# END handle check we found a remote
|
| 312 |
+
|
| 313 |
+
orig_name = rmt_for_deletion.name
|
| 314 |
+
smm.delete_remote(rmt_for_deletion)
|
| 315 |
+
# NOTE: Currently we leave tags from the deleted remotes
|
| 316 |
+
# as well as separate tracking branches in the possibly
|
| 317 |
+
# totally changed repository (someone could have changed
|
| 318 |
+
# the url to another project). At some point, one might
|
| 319 |
+
# want to clean it up, but the danger is high to remove
|
| 320 |
+
# stuff the user has added explicitly.
|
| 321 |
+
|
| 322 |
+
# Rename the new remote back to what it was.
|
| 323 |
+
smr.rename(orig_name)
|
| 324 |
+
|
| 325 |
+
# Early on, we verified that the our current tracking
|
| 326 |
+
# branch exists in the remote. Now we have to ensure
|
| 327 |
+
# that the sha we point to is still contained in the new
|
| 328 |
+
# remote tracking branch.
|
| 329 |
+
smsha = sm.binsha
|
| 330 |
+
found = False
|
| 331 |
+
rref = smr.refs[self.branch_name]
|
| 332 |
+
for c in rref.commit.traverse():
|
| 333 |
+
if c.binsha == smsha:
|
| 334 |
+
found = True
|
| 335 |
+
break
|
| 336 |
+
# END traverse all commits in search for sha
|
| 337 |
+
# END for each commit
|
| 338 |
+
|
| 339 |
+
if not found:
|
| 340 |
+
# Adjust our internal binsha to use the one of the
|
| 341 |
+
# remote this way, it will be checked out in the
|
| 342 |
+
# next step. This will change the submodule relative
|
| 343 |
+
# to us, so the user will be able to commit the
|
| 344 |
+
# change easily.
|
| 345 |
+
_logger.warning(
|
| 346 |
+
"Current sha %s was not contained in the tracking\
|
| 347 |
+
branch at the new remote, setting it the the remote's tracking branch",
|
| 348 |
+
sm.hexsha,
|
| 349 |
+
)
|
| 350 |
+
sm.binsha = rref.commit.binsha
|
| 351 |
+
# END reset binsha
|
| 352 |
+
|
| 353 |
+
# NOTE: All checkout is performed by the base
|
| 354 |
+
# implementation of update.
|
| 355 |
+
# END handle dry_run
|
| 356 |
+
progress.update(
|
| 357 |
+
END | URLCHANGE,
|
| 358 |
+
i,
|
| 359 |
+
len_csms,
|
| 360 |
+
prefix + "Done adjusting url of submodule %r" % (sm.name),
|
| 361 |
+
)
|
| 362 |
+
# END skip remote handling if new url already exists in module
|
| 363 |
+
# END handle url
|
| 364 |
+
|
| 365 |
+
# HANDLE PATH CHANGES
|
| 366 |
+
#####################
|
| 367 |
+
if sm.branch_path != psm.branch_path:
|
| 368 |
+
# Finally, create a new tracking branch which tracks the new
|
| 369 |
+
# remote branch.
|
| 370 |
+
progress.update(
|
| 371 |
+
BEGIN | BRANCHCHANGE,
|
| 372 |
+
i,
|
| 373 |
+
len_csms,
|
| 374 |
+
prefix
|
| 375 |
+
+ "Changing branch of submodule %r from %s to %s"
|
| 376 |
+
% (sm.name, psm.branch_path, sm.branch_path),
|
| 377 |
+
)
|
| 378 |
+
if not dry_run:
|
| 379 |
+
smm = sm.module()
|
| 380 |
+
smmr = smm.remotes
|
| 381 |
+
# As the branch might not exist yet, we will have to fetch
|
| 382 |
+
# all remotes to be sure...
|
| 383 |
+
for remote in smmr:
|
| 384 |
+
remote.fetch(progress=progress)
|
| 385 |
+
# END for each remote
|
| 386 |
+
|
| 387 |
+
try:
|
| 388 |
+
tbr = git.Head.create(
|
| 389 |
+
smm,
|
| 390 |
+
sm.branch_name,
|
| 391 |
+
logmsg="branch: Created from HEAD",
|
| 392 |
+
)
|
| 393 |
+
except OSError:
|
| 394 |
+
# ...or reuse the existing one.
|
| 395 |
+
tbr = git.Head(smm, sm.branch_path)
|
| 396 |
+
# END ensure tracking branch exists
|
| 397 |
+
|
| 398 |
+
tbr.set_tracking_branch(find_first_remote_branch(smmr, sm.branch_name))
|
| 399 |
+
# NOTE: All head-resetting is done in the base
|
| 400 |
+
# implementation of update but we will have to checkout the
|
| 401 |
+
# new branch here. As it still points to the currently
|
| 402 |
+
# checked out commit, we don't do any harm.
|
| 403 |
+
# As we don't want to update working-tree or index, changing
|
| 404 |
+
# the ref is all there is to do.
|
| 405 |
+
smm.head.reference = tbr
|
| 406 |
+
# END handle dry_run
|
| 407 |
+
|
| 408 |
+
progress.update(
|
| 409 |
+
END | BRANCHCHANGE,
|
| 410 |
+
i,
|
| 411 |
+
len_csms,
|
| 412 |
+
prefix + "Done changing branch of submodule %r" % sm.name,
|
| 413 |
+
)
|
| 414 |
+
# END handle branch
|
| 415 |
+
# END handle
|
| 416 |
+
# END for each common submodule
|
| 417 |
+
except Exception as err:
|
| 418 |
+
if not keep_going:
|
| 419 |
+
raise
|
| 420 |
+
_logger.error(str(err))
|
| 421 |
+
# END handle keep_going
|
| 422 |
+
|
| 423 |
+
# FINALLY UPDATE ALL ACTUAL SUBMODULES
|
| 424 |
+
######################################
|
| 425 |
+
for sm in sms:
|
| 426 |
+
# Update the submodule using the default method.
|
| 427 |
+
sm.update(
|
| 428 |
+
recursive=False,
|
| 429 |
+
init=init,
|
| 430 |
+
to_latest_revision=to_latest_revision,
|
| 431 |
+
progress=progress,
|
| 432 |
+
dry_run=dry_run,
|
| 433 |
+
force=force_reset,
|
| 434 |
+
keep_going=keep_going,
|
| 435 |
+
)
|
| 436 |
+
|
| 437 |
+
# Update recursively depth first - question is which inconsistent state will
|
| 438 |
+
# be better in case it fails somewhere. Defective branch or defective depth.
|
| 439 |
+
# The RootSubmodule type will never process itself, which was done in the
|
| 440 |
+
# previous expression.
|
| 441 |
+
if recursive:
|
| 442 |
+
# The module would exist by now if we are not in dry_run mode.
|
| 443 |
+
if sm.module_exists():
|
| 444 |
+
type(self)(sm.module()).update(
|
| 445 |
+
recursive=True,
|
| 446 |
+
force_remove=force_remove,
|
| 447 |
+
init=init,
|
| 448 |
+
to_latest_revision=to_latest_revision,
|
| 449 |
+
progress=progress,
|
| 450 |
+
dry_run=dry_run,
|
| 451 |
+
force_reset=force_reset,
|
| 452 |
+
keep_going=keep_going,
|
| 453 |
+
)
|
| 454 |
+
# END handle dry_run
|
| 455 |
+
# END handle recursive
|
| 456 |
+
# END for each submodule to update
|
| 457 |
+
|
| 458 |
+
return self
|
| 459 |
+
|
| 460 |
+
def module(self) -> "Repo":
|
| 461 |
+
""":return: The actual repository containing the submodules"""
|
| 462 |
+
return self.repo
|
| 463 |
+
|
| 464 |
+
# } END interface
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
# } END classes
|
parrot/lib/python3.10/site-packages/git/objects/tag.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitPython and is released under the
|
| 4 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 5 |
+
|
| 6 |
+
"""Provides an :class:`~git.objects.base.Object`-based type for annotated tags.
|
| 7 |
+
|
| 8 |
+
This defines the :class:`TagObject` class, which represents annotated tags.
|
| 9 |
+
For lightweight tags, see the :mod:`git.refs.tag` module.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
__all__ = ["TagObject"]
|
| 13 |
+
|
| 14 |
+
import sys
|
| 15 |
+
|
| 16 |
+
from git.compat import defenc
|
| 17 |
+
from git.util import hex_to_bin
|
| 18 |
+
|
| 19 |
+
from . import base
|
| 20 |
+
from .util import get_object_type_by_name, parse_actor_and_date
|
| 21 |
+
|
| 22 |
+
# typing ----------------------------------------------
|
| 23 |
+
|
| 24 |
+
from typing import List, TYPE_CHECKING, Union
|
| 25 |
+
|
| 26 |
+
if sys.version_info >= (3, 8):
|
| 27 |
+
from typing import Literal
|
| 28 |
+
else:
|
| 29 |
+
from typing_extensions import Literal
|
| 30 |
+
|
| 31 |
+
if TYPE_CHECKING:
|
| 32 |
+
from git.repo import Repo
|
| 33 |
+
from git.util import Actor
|
| 34 |
+
|
| 35 |
+
from .blob import Blob
|
| 36 |
+
from .commit import Commit
|
| 37 |
+
from .tree import Tree
|
| 38 |
+
|
| 39 |
+
# ---------------------------------------------------
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class TagObject(base.Object):
|
| 43 |
+
"""Annotated (i.e. non-lightweight) tag carrying additional information about an
|
| 44 |
+
object we are pointing to.
|
| 45 |
+
|
| 46 |
+
See :manpage:`gitglossary(7)` on "tag object":
|
| 47 |
+
https://git-scm.com/docs/gitglossary#def_tag_object
|
| 48 |
+
"""
|
| 49 |
+
|
| 50 |
+
type: Literal["tag"] = "tag"
|
| 51 |
+
|
| 52 |
+
__slots__ = (
|
| 53 |
+
"object",
|
| 54 |
+
"tag",
|
| 55 |
+
"tagger",
|
| 56 |
+
"tagged_date",
|
| 57 |
+
"tagger_tz_offset",
|
| 58 |
+
"message",
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
def __init__(
|
| 62 |
+
self,
|
| 63 |
+
repo: "Repo",
|
| 64 |
+
binsha: bytes,
|
| 65 |
+
object: Union[None, base.Object] = None,
|
| 66 |
+
tag: Union[None, str] = None,
|
| 67 |
+
tagger: Union[None, "Actor"] = None,
|
| 68 |
+
tagged_date: Union[int, None] = None,
|
| 69 |
+
tagger_tz_offset: Union[int, None] = None,
|
| 70 |
+
message: Union[str, None] = None,
|
| 71 |
+
) -> None: # @ReservedAssignment
|
| 72 |
+
"""Initialize a tag object with additional data.
|
| 73 |
+
|
| 74 |
+
:param repo:
|
| 75 |
+
Repository this object is located in.
|
| 76 |
+
|
| 77 |
+
:param binsha:
|
| 78 |
+
20 byte SHA1.
|
| 79 |
+
|
| 80 |
+
:param object:
|
| 81 |
+
:class:`~git.objects.base.Object` instance of object we are pointing to.
|
| 82 |
+
|
| 83 |
+
:param tag:
|
| 84 |
+
Name of this tag.
|
| 85 |
+
|
| 86 |
+
:param tagger:
|
| 87 |
+
:class:`~git.util.Actor` identifying the tagger.
|
| 88 |
+
|
| 89 |
+
:param tagged_date: int_seconds_since_epoch
|
| 90 |
+
The DateTime of the tag creation.
|
| 91 |
+
Use :func:`time.gmtime` to convert it into a different format.
|
| 92 |
+
|
| 93 |
+
:param tagger_tz_offset: int_seconds_west_of_utc
|
| 94 |
+
The timezone that the `tagged_date` is in, in a format similar to
|
| 95 |
+
:attr:`time.altzone`.
|
| 96 |
+
"""
|
| 97 |
+
super().__init__(repo, binsha)
|
| 98 |
+
if object is not None:
|
| 99 |
+
self.object: Union["Commit", "Blob", "Tree", "TagObject"] = object
|
| 100 |
+
if tag is not None:
|
| 101 |
+
self.tag = tag
|
| 102 |
+
if tagger is not None:
|
| 103 |
+
self.tagger = tagger
|
| 104 |
+
if tagged_date is not None:
|
| 105 |
+
self.tagged_date = tagged_date
|
| 106 |
+
if tagger_tz_offset is not None:
|
| 107 |
+
self.tagger_tz_offset = tagger_tz_offset
|
| 108 |
+
if message is not None:
|
| 109 |
+
self.message = message
|
| 110 |
+
|
| 111 |
+
def _set_cache_(self, attr: str) -> None:
|
| 112 |
+
"""Cache all our attributes at once."""
|
| 113 |
+
if attr in TagObject.__slots__:
|
| 114 |
+
ostream = self.repo.odb.stream(self.binsha)
|
| 115 |
+
lines: List[str] = ostream.read().decode(defenc, "replace").splitlines()
|
| 116 |
+
|
| 117 |
+
_obj, hexsha = lines[0].split(" ")
|
| 118 |
+
_type_token, type_name = lines[1].split(" ")
|
| 119 |
+
object_type = get_object_type_by_name(type_name.encode("ascii"))
|
| 120 |
+
self.object = object_type(self.repo, hex_to_bin(hexsha))
|
| 121 |
+
|
| 122 |
+
self.tag = lines[2][4:] # tag <tag name>
|
| 123 |
+
|
| 124 |
+
if len(lines) > 3:
|
| 125 |
+
tagger_info = lines[3] # tagger <actor> <date>
|
| 126 |
+
(
|
| 127 |
+
self.tagger,
|
| 128 |
+
self.tagged_date,
|
| 129 |
+
self.tagger_tz_offset,
|
| 130 |
+
) = parse_actor_and_date(tagger_info)
|
| 131 |
+
|
| 132 |
+
# Line 4 empty - it could mark the beginning of the next header.
|
| 133 |
+
# In case there really is no message, it would not exist.
|
| 134 |
+
# Otherwise a newline separates header from message.
|
| 135 |
+
if len(lines) > 5:
|
| 136 |
+
self.message = "\n".join(lines[5:])
|
| 137 |
+
else:
|
| 138 |
+
self.message = ""
|
| 139 |
+
# END check our attributes
|
| 140 |
+
else:
|
| 141 |
+
super()._set_cache_(attr)
|
parrot/lib/python3.10/site-packages/git/objects/tree.py
ADDED
|
@@ -0,0 +1,414 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitPython and is released under the
|
| 4 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 5 |
+
|
| 6 |
+
__all__ = ["TreeModifier", "Tree"]
|
| 7 |
+
|
| 8 |
+
import sys
|
| 9 |
+
|
| 10 |
+
import git.diff as git_diff
|
| 11 |
+
from git.util import IterableList, join_path, to_bin_sha
|
| 12 |
+
|
| 13 |
+
from . import util
|
| 14 |
+
from .base import IndexObjUnion, IndexObject
|
| 15 |
+
from .blob import Blob
|
| 16 |
+
from .fun import tree_entries_from_data, tree_to_stream
|
| 17 |
+
from .submodule.base import Submodule
|
| 18 |
+
|
| 19 |
+
# typing -------------------------------------------------
|
| 20 |
+
|
| 21 |
+
from typing import (
|
| 22 |
+
Any,
|
| 23 |
+
Callable,
|
| 24 |
+
Dict,
|
| 25 |
+
Iterable,
|
| 26 |
+
Iterator,
|
| 27 |
+
List,
|
| 28 |
+
Tuple,
|
| 29 |
+
TYPE_CHECKING,
|
| 30 |
+
Type,
|
| 31 |
+
Union,
|
| 32 |
+
cast,
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
if sys.version_info >= (3, 8):
|
| 36 |
+
from typing import Literal
|
| 37 |
+
else:
|
| 38 |
+
from typing_extensions import Literal
|
| 39 |
+
|
| 40 |
+
from git.types import PathLike
|
| 41 |
+
|
| 42 |
+
if TYPE_CHECKING:
|
| 43 |
+
from io import BytesIO
|
| 44 |
+
|
| 45 |
+
from git.repo import Repo
|
| 46 |
+
|
| 47 |
+
TreeCacheTup = Tuple[bytes, int, str]
|
| 48 |
+
|
| 49 |
+
TraversedTreeTup = Union[Tuple[Union["Tree", None], IndexObjUnion, Tuple["Submodule", "Submodule"]]]
|
| 50 |
+
|
| 51 |
+
# --------------------------------------------------------
|
| 52 |
+
|
| 53 |
+
cmp: Callable[[str, str], int] = lambda a, b: (a > b) - (a < b)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class TreeModifier:
|
| 57 |
+
"""A utility class providing methods to alter the underlying cache in a list-like
|
| 58 |
+
fashion.
|
| 59 |
+
|
| 60 |
+
Once all adjustments are complete, the :attr:`_cache`, which really is a reference
|
| 61 |
+
to the cache of a tree, will be sorted. This ensures it will be in a serializable
|
| 62 |
+
state.
|
| 63 |
+
"""
|
| 64 |
+
|
| 65 |
+
__slots__ = ("_cache",)
|
| 66 |
+
|
| 67 |
+
def __init__(self, cache: List[TreeCacheTup]) -> None:
|
| 68 |
+
self._cache = cache
|
| 69 |
+
|
| 70 |
+
def _index_by_name(self, name: str) -> int:
|
| 71 |
+
""":return: index of an item with name, or -1 if not found"""
|
| 72 |
+
for i, t in enumerate(self._cache):
|
| 73 |
+
if t[2] == name:
|
| 74 |
+
return i
|
| 75 |
+
# END found item
|
| 76 |
+
# END for each item in cache
|
| 77 |
+
return -1
|
| 78 |
+
|
| 79 |
+
# { Interface
|
| 80 |
+
def set_done(self) -> "TreeModifier":
|
| 81 |
+
"""Call this method once you are done modifying the tree information.
|
| 82 |
+
|
| 83 |
+
This may be called several times, but be aware that each call will cause a sort
|
| 84 |
+
operation.
|
| 85 |
+
|
| 86 |
+
:return:
|
| 87 |
+
self
|
| 88 |
+
"""
|
| 89 |
+
self._cache.sort(key=lambda x: (x[2] + "/") if x[1] == Tree.tree_id << 12 else x[2])
|
| 90 |
+
return self
|
| 91 |
+
|
| 92 |
+
# } END interface
|
| 93 |
+
|
| 94 |
+
# { Mutators
|
| 95 |
+
def add(self, sha: bytes, mode: int, name: str, force: bool = False) -> "TreeModifier":
|
| 96 |
+
"""Add the given item to the tree.
|
| 97 |
+
|
| 98 |
+
If an item with the given name already exists, nothing will be done, but a
|
| 99 |
+
:exc:`ValueError` will be raised if the sha and mode of the existing item do not
|
| 100 |
+
match the one you add, unless `force` is ``True``.
|
| 101 |
+
|
| 102 |
+
:param sha:
|
| 103 |
+
The 20 or 40 byte sha of the item to add.
|
| 104 |
+
|
| 105 |
+
:param mode:
|
| 106 |
+
:class:`int` representing the stat-compatible mode of the item.
|
| 107 |
+
|
| 108 |
+
:param force:
|
| 109 |
+
If ``True``, an item with your name and information will overwrite any
|
| 110 |
+
existing item with the same name, no matter which information it has.
|
| 111 |
+
|
| 112 |
+
:return:
|
| 113 |
+
self
|
| 114 |
+
"""
|
| 115 |
+
if "/" in name:
|
| 116 |
+
raise ValueError("Name must not contain '/' characters")
|
| 117 |
+
if (mode >> 12) not in Tree._map_id_to_type:
|
| 118 |
+
raise ValueError("Invalid object type according to mode %o" % mode)
|
| 119 |
+
|
| 120 |
+
sha = to_bin_sha(sha)
|
| 121 |
+
index = self._index_by_name(name)
|
| 122 |
+
|
| 123 |
+
item = (sha, mode, name)
|
| 124 |
+
|
| 125 |
+
if index == -1:
|
| 126 |
+
self._cache.append(item)
|
| 127 |
+
else:
|
| 128 |
+
if force:
|
| 129 |
+
self._cache[index] = item
|
| 130 |
+
else:
|
| 131 |
+
ex_item = self._cache[index]
|
| 132 |
+
if ex_item[0] != sha or ex_item[1] != mode:
|
| 133 |
+
raise ValueError("Item %r existed with different properties" % name)
|
| 134 |
+
# END handle mismatch
|
| 135 |
+
# END handle force
|
| 136 |
+
# END handle name exists
|
| 137 |
+
return self
|
| 138 |
+
|
| 139 |
+
def add_unchecked(self, binsha: bytes, mode: int, name: str) -> None:
|
| 140 |
+
"""Add the given item to the tree. Its correctness is assumed, so it is the
|
| 141 |
+
caller's responsibility to ensure that the input is correct.
|
| 142 |
+
|
| 143 |
+
For more information on the parameters, see :meth:`add`.
|
| 144 |
+
|
| 145 |
+
:param binsha:
|
| 146 |
+
20 byte binary sha.
|
| 147 |
+
"""
|
| 148 |
+
assert isinstance(binsha, bytes) and isinstance(mode, int) and isinstance(name, str)
|
| 149 |
+
tree_cache = (binsha, mode, name)
|
| 150 |
+
|
| 151 |
+
self._cache.append(tree_cache)
|
| 152 |
+
|
| 153 |
+
def __delitem__(self, name: str) -> None:
|
| 154 |
+
"""Delete an item with the given name if it exists."""
|
| 155 |
+
index = self._index_by_name(name)
|
| 156 |
+
if index > -1:
|
| 157 |
+
del self._cache[index]
|
| 158 |
+
|
| 159 |
+
# } END mutators
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
|
| 163 |
+
R"""Tree objects represent an ordered list of :class:`~git.objects.blob.Blob`\s and
|
| 164 |
+
other :class:`Tree`\s.
|
| 165 |
+
|
| 166 |
+
See :manpage:`gitglossary(7)` on "tree object":
|
| 167 |
+
https://git-scm.com/docs/gitglossary#def_tree_object
|
| 168 |
+
|
| 169 |
+
Subscripting is supported, as with a list or dict:
|
| 170 |
+
|
| 171 |
+
* Access a specific blob using the ``tree["filename"]`` notation.
|
| 172 |
+
* You may likewise access by index, like ``blob = tree[0]``.
|
| 173 |
+
"""
|
| 174 |
+
|
| 175 |
+
type: Literal["tree"] = "tree"
|
| 176 |
+
|
| 177 |
+
__slots__ = ("_cache",)
|
| 178 |
+
|
| 179 |
+
# Actual integer IDs for comparison.
|
| 180 |
+
commit_id = 0o16 # Equals stat.S_IFDIR | stat.S_IFLNK - a directory link.
|
| 181 |
+
blob_id = 0o10
|
| 182 |
+
symlink_id = 0o12
|
| 183 |
+
tree_id = 0o04
|
| 184 |
+
|
| 185 |
+
_map_id_to_type: Dict[int, Type[IndexObjUnion]] = {
|
| 186 |
+
commit_id: Submodule,
|
| 187 |
+
blob_id: Blob,
|
| 188 |
+
symlink_id: Blob,
|
| 189 |
+
# Tree ID added once Tree is defined.
|
| 190 |
+
}
|
| 191 |
+
|
| 192 |
+
def __init__(
|
| 193 |
+
self,
|
| 194 |
+
repo: "Repo",
|
| 195 |
+
binsha: bytes,
|
| 196 |
+
mode: int = tree_id << 12,
|
| 197 |
+
path: Union[PathLike, None] = None,
|
| 198 |
+
):
|
| 199 |
+
super().__init__(repo, binsha, mode, path)
|
| 200 |
+
|
| 201 |
+
@classmethod
|
| 202 |
+
def _get_intermediate_items(
|
| 203 |
+
cls,
|
| 204 |
+
index_object: IndexObjUnion,
|
| 205 |
+
) -> Union[Tuple["Tree", ...], Tuple[()]]:
|
| 206 |
+
if index_object.type == "tree":
|
| 207 |
+
return tuple(index_object._iter_convert_to_object(index_object._cache))
|
| 208 |
+
return ()
|
| 209 |
+
|
| 210 |
+
def _set_cache_(self, attr: str) -> None:
|
| 211 |
+
if attr == "_cache":
|
| 212 |
+
# Set the data when we need it.
|
| 213 |
+
ostream = self.repo.odb.stream(self.binsha)
|
| 214 |
+
self._cache: List[TreeCacheTup] = tree_entries_from_data(ostream.read())
|
| 215 |
+
else:
|
| 216 |
+
super()._set_cache_(attr)
|
| 217 |
+
# END handle attribute
|
| 218 |
+
|
| 219 |
+
def _iter_convert_to_object(self, iterable: Iterable[TreeCacheTup]) -> Iterator[IndexObjUnion]:
|
| 220 |
+
"""Iterable yields tuples of (binsha, mode, name), which will be converted to
|
| 221 |
+
the respective object representation.
|
| 222 |
+
"""
|
| 223 |
+
for binsha, mode, name in iterable:
|
| 224 |
+
path = join_path(self.path, name)
|
| 225 |
+
try:
|
| 226 |
+
yield self._map_id_to_type[mode >> 12](self.repo, binsha, mode, path)
|
| 227 |
+
except KeyError as e:
|
| 228 |
+
raise TypeError("Unknown mode %o found in tree data for path '%s'" % (mode, path)) from e
|
| 229 |
+
# END for each item
|
| 230 |
+
|
| 231 |
+
def join(self, file: str) -> IndexObjUnion:
|
| 232 |
+
"""Find the named object in this tree's contents.
|
| 233 |
+
|
| 234 |
+
:return:
|
| 235 |
+
:class:`~git.objects.blob.Blob`, :class:`Tree`, or
|
| 236 |
+
:class:`~git.objects.submodule.base.Submodule`
|
| 237 |
+
|
| 238 |
+
:raise KeyError:
|
| 239 |
+
If the given file or tree does not exist in this tree.
|
| 240 |
+
"""
|
| 241 |
+
msg = "Blob or Tree named %r not found"
|
| 242 |
+
if "/" in file:
|
| 243 |
+
tree = self
|
| 244 |
+
item = self
|
| 245 |
+
tokens = file.split("/")
|
| 246 |
+
for i, token in enumerate(tokens):
|
| 247 |
+
item = tree[token]
|
| 248 |
+
if item.type == "tree":
|
| 249 |
+
tree = item
|
| 250 |
+
else:
|
| 251 |
+
# Safety assertion - blobs are at the end of the path.
|
| 252 |
+
if i != len(tokens) - 1:
|
| 253 |
+
raise KeyError(msg % file)
|
| 254 |
+
return item
|
| 255 |
+
# END handle item type
|
| 256 |
+
# END for each token of split path
|
| 257 |
+
if item == self:
|
| 258 |
+
raise KeyError(msg % file)
|
| 259 |
+
return item
|
| 260 |
+
else:
|
| 261 |
+
for info in self._cache:
|
| 262 |
+
if info[2] == file: # [2] == name
|
| 263 |
+
return self._map_id_to_type[info[1] >> 12](
|
| 264 |
+
self.repo, info[0], info[1], join_path(self.path, info[2])
|
| 265 |
+
)
|
| 266 |
+
# END for each obj
|
| 267 |
+
raise KeyError(msg % file)
|
| 268 |
+
# END handle long paths
|
| 269 |
+
|
| 270 |
+
def __truediv__(self, file: str) -> IndexObjUnion:
|
| 271 |
+
"""The ``/`` operator is another syntax for joining.
|
| 272 |
+
|
| 273 |
+
See :meth:`join` for details.
|
| 274 |
+
"""
|
| 275 |
+
return self.join(file)
|
| 276 |
+
|
| 277 |
+
@property
|
| 278 |
+
def trees(self) -> List["Tree"]:
|
| 279 |
+
""":return: list(Tree, ...) List of trees directly below this tree"""
|
| 280 |
+
return [i for i in self if i.type == "tree"]
|
| 281 |
+
|
| 282 |
+
@property
|
| 283 |
+
def blobs(self) -> List[Blob]:
|
| 284 |
+
""":return: list(Blob, ...) List of blobs directly below this tree"""
|
| 285 |
+
return [i for i in self if i.type == "blob"]
|
| 286 |
+
|
| 287 |
+
@property
|
| 288 |
+
def cache(self) -> TreeModifier:
|
| 289 |
+
"""
|
| 290 |
+
:return:
|
| 291 |
+
An object allowing modification of the internal cache. This can be used to
|
| 292 |
+
change the tree's contents. When done, make sure you call
|
| 293 |
+
:meth:`~TreeModifier.set_done` on the tree modifier, or serialization
|
| 294 |
+
behaviour will be incorrect.
|
| 295 |
+
|
| 296 |
+
:note:
|
| 297 |
+
See :class:`TreeModifier` for more information on how to alter the cache.
|
| 298 |
+
"""
|
| 299 |
+
return TreeModifier(self._cache)
|
| 300 |
+
|
| 301 |
+
def traverse(
|
| 302 |
+
self,
|
| 303 |
+
predicate: Callable[[Union[IndexObjUnion, TraversedTreeTup], int], bool] = lambda i, d: True,
|
| 304 |
+
prune: Callable[[Union[IndexObjUnion, TraversedTreeTup], int], bool] = lambda i, d: False,
|
| 305 |
+
depth: int = -1,
|
| 306 |
+
branch_first: bool = True,
|
| 307 |
+
visit_once: bool = False,
|
| 308 |
+
ignore_self: int = 1,
|
| 309 |
+
as_edge: bool = False,
|
| 310 |
+
) -> Union[Iterator[IndexObjUnion], Iterator[TraversedTreeTup]]:
|
| 311 |
+
"""For documentation, see
|
| 312 |
+
`Traversable._traverse() <git.objects.util.Traversable._traverse>`.
|
| 313 |
+
|
| 314 |
+
Trees are set to ``visit_once = False`` to gain more performance in the
|
| 315 |
+
traversal.
|
| 316 |
+
"""
|
| 317 |
+
|
| 318 |
+
# # To typecheck instead of using cast.
|
| 319 |
+
# import itertools
|
| 320 |
+
# def is_tree_traversed(inp: Tuple) -> TypeGuard[Tuple[Iterator[Union['Tree', 'Blob', 'Submodule']]]]:
|
| 321 |
+
# return all(isinstance(x, (Blob, Tree, Submodule)) for x in inp[1])
|
| 322 |
+
|
| 323 |
+
# ret = super().traverse(predicate, prune, depth, branch_first, visit_once, ignore_self)
|
| 324 |
+
# ret_tup = itertools.tee(ret, 2)
|
| 325 |
+
# assert is_tree_traversed(ret_tup), f"Type is {[type(x) for x in list(ret_tup[0])]}"
|
| 326 |
+
# return ret_tup[0]
|
| 327 |
+
|
| 328 |
+
return cast(
|
| 329 |
+
Union[Iterator[IndexObjUnion], Iterator[TraversedTreeTup]],
|
| 330 |
+
super()._traverse(
|
| 331 |
+
predicate, # type: ignore[arg-type]
|
| 332 |
+
prune, # type: ignore[arg-type]
|
| 333 |
+
depth,
|
| 334 |
+
branch_first,
|
| 335 |
+
visit_once,
|
| 336 |
+
ignore_self,
|
| 337 |
+
),
|
| 338 |
+
)
|
| 339 |
+
|
| 340 |
+
def list_traverse(self, *args: Any, **kwargs: Any) -> IterableList[IndexObjUnion]:
|
| 341 |
+
"""
|
| 342 |
+
:return:
|
| 343 |
+
:class:`~git.util.IterableList` with the results of the traversal as
|
| 344 |
+
produced by :meth:`traverse`
|
| 345 |
+
|
| 346 |
+
Tree -> IterableList[Union[Submodule, Tree, Blob]]
|
| 347 |
+
"""
|
| 348 |
+
return super()._list_traverse(*args, **kwargs)
|
| 349 |
+
|
| 350 |
+
# List protocol
|
| 351 |
+
|
| 352 |
+
def __getslice__(self, i: int, j: int) -> List[IndexObjUnion]:
|
| 353 |
+
return list(self._iter_convert_to_object(self._cache[i:j]))
|
| 354 |
+
|
| 355 |
+
def __iter__(self) -> Iterator[IndexObjUnion]:
|
| 356 |
+
return self._iter_convert_to_object(self._cache)
|
| 357 |
+
|
| 358 |
+
def __len__(self) -> int:
|
| 359 |
+
return len(self._cache)
|
| 360 |
+
|
| 361 |
+
def __getitem__(self, item: Union[str, int, slice]) -> IndexObjUnion:
|
| 362 |
+
if isinstance(item, int):
|
| 363 |
+
info = self._cache[item]
|
| 364 |
+
return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], join_path(self.path, info[2]))
|
| 365 |
+
|
| 366 |
+
if isinstance(item, str):
|
| 367 |
+
# compatibility
|
| 368 |
+
return self.join(item)
|
| 369 |
+
# END index is basestring
|
| 370 |
+
|
| 371 |
+
raise TypeError("Invalid index type: %r" % item)
|
| 372 |
+
|
| 373 |
+
def __contains__(self, item: Union[IndexObjUnion, PathLike]) -> bool:
|
| 374 |
+
if isinstance(item, IndexObject):
|
| 375 |
+
for info in self._cache:
|
| 376 |
+
if item.binsha == info[0]:
|
| 377 |
+
return True
|
| 378 |
+
# END compare sha
|
| 379 |
+
# END for each entry
|
| 380 |
+
# END handle item is index object
|
| 381 |
+
# compatibility
|
| 382 |
+
|
| 383 |
+
# Treat item as repo-relative path.
|
| 384 |
+
else:
|
| 385 |
+
path = self.path
|
| 386 |
+
for info in self._cache:
|
| 387 |
+
if item == join_path(path, info[2]):
|
| 388 |
+
return True
|
| 389 |
+
# END for each item
|
| 390 |
+
return False
|
| 391 |
+
|
| 392 |
+
def __reversed__(self) -> Iterator[IndexObjUnion]:
|
| 393 |
+
return reversed(self._iter_convert_to_object(self._cache)) # type: ignore[call-overload]
|
| 394 |
+
|
| 395 |
+
def _serialize(self, stream: "BytesIO") -> "Tree":
|
| 396 |
+
"""Serialize this tree into the stream. Assumes sorted tree data.
|
| 397 |
+
|
| 398 |
+
:note:
|
| 399 |
+
We will assume our tree data to be in a sorted state. If this is not the
|
| 400 |
+
case, serialization will not generate a correct tree representation as these
|
| 401 |
+
are assumed to be sorted by algorithms.
|
| 402 |
+
"""
|
| 403 |
+
tree_to_stream(self._cache, stream.write)
|
| 404 |
+
return self
|
| 405 |
+
|
| 406 |
+
def _deserialize(self, stream: "BytesIO") -> "Tree":
|
| 407 |
+
self._cache = tree_entries_from_data(stream.read())
|
| 408 |
+
return self
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
# END tree
|
| 412 |
+
|
| 413 |
+
# Finalize map definition.
|
| 414 |
+
Tree._map_id_to_type[Tree.tree_id] = Tree
|
parrot/lib/python3.10/site-packages/git/objects/util.py
ADDED
|
@@ -0,0 +1,700 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitPython and is released under the
|
| 4 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 5 |
+
|
| 6 |
+
"""Utility functions for working with git objects."""
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"get_object_type_by_name",
|
| 10 |
+
"parse_date",
|
| 11 |
+
"parse_actor_and_date",
|
| 12 |
+
"ProcessStreamAdapter",
|
| 13 |
+
"Traversable",
|
| 14 |
+
"altz_to_utctz_str",
|
| 15 |
+
"utctz_to_altz",
|
| 16 |
+
"verify_utctz",
|
| 17 |
+
"Actor",
|
| 18 |
+
"tzoffset",
|
| 19 |
+
"utc",
|
| 20 |
+
]
|
| 21 |
+
|
| 22 |
+
from abc import ABC, abstractmethod
|
| 23 |
+
import calendar
|
| 24 |
+
from collections import deque
|
| 25 |
+
from datetime import datetime, timedelta, tzinfo
|
| 26 |
+
import re
|
| 27 |
+
from string import digits
|
| 28 |
+
import time
|
| 29 |
+
import warnings
|
| 30 |
+
|
| 31 |
+
from git.util import Actor, IterableList, IterableObj
|
| 32 |
+
|
| 33 |
+
# typing ------------------------------------------------------------
|
| 34 |
+
|
| 35 |
+
from typing import (
|
| 36 |
+
Any,
|
| 37 |
+
Callable,
|
| 38 |
+
Deque,
|
| 39 |
+
Iterator,
|
| 40 |
+
NamedTuple,
|
| 41 |
+
Sequence,
|
| 42 |
+
TYPE_CHECKING,
|
| 43 |
+
Tuple,
|
| 44 |
+
Type,
|
| 45 |
+
TypeVar,
|
| 46 |
+
Union,
|
| 47 |
+
cast,
|
| 48 |
+
overload,
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
from git.types import Has_id_attribute, Literal
|
| 52 |
+
|
| 53 |
+
if TYPE_CHECKING:
|
| 54 |
+
from io import BytesIO, StringIO
|
| 55 |
+
from subprocess import Popen
|
| 56 |
+
|
| 57 |
+
from git.types import Protocol, runtime_checkable
|
| 58 |
+
|
| 59 |
+
from .blob import Blob
|
| 60 |
+
from .commit import Commit
|
| 61 |
+
from .submodule.base import Submodule
|
| 62 |
+
from .tag import TagObject
|
| 63 |
+
from .tree import TraversedTreeTup, Tree
|
| 64 |
+
else:
|
| 65 |
+
Protocol = ABC
|
| 66 |
+
|
| 67 |
+
def runtime_checkable(f):
|
| 68 |
+
return f
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class TraverseNT(NamedTuple):
|
| 72 |
+
depth: int
|
| 73 |
+
item: Union["Traversable", "Blob"]
|
| 74 |
+
src: Union["Traversable", None]
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
T_TIobj = TypeVar("T_TIobj", bound="TraversableIterableObj") # For TraversableIterableObj.traverse()
|
| 78 |
+
|
| 79 |
+
TraversedTup = Union[
|
| 80 |
+
Tuple[Union["Traversable", None], "Traversable"], # For Commit, Submodule.
|
| 81 |
+
"TraversedTreeTup", # For Tree.traverse().
|
| 82 |
+
]
|
| 83 |
+
|
| 84 |
+
# --------------------------------------------------------------------
|
| 85 |
+
|
| 86 |
+
ZERO = timedelta(0)
|
| 87 |
+
|
| 88 |
+
# { Functions
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def mode_str_to_int(modestr: Union[bytes, str]) -> int:
|
| 92 |
+
"""Convert mode bits from an octal mode string to an integer mode for git.
|
| 93 |
+
|
| 94 |
+
:param modestr:
|
| 95 |
+
String like ``755`` or ``644`` or ``100644`` - only the last 6 chars will be
|
| 96 |
+
used.
|
| 97 |
+
|
| 98 |
+
:return:
|
| 99 |
+
String identifying a mode compatible to the mode methods ids of the :mod:`stat`
|
| 100 |
+
module regarding the rwx permissions for user, group and other, special flags
|
| 101 |
+
and file system flags, such as whether it is a symlink.
|
| 102 |
+
"""
|
| 103 |
+
mode = 0
|
| 104 |
+
for iteration, char in enumerate(reversed(modestr[-6:])):
|
| 105 |
+
char = cast(Union[str, int], char)
|
| 106 |
+
mode += int(char) << iteration * 3
|
| 107 |
+
# END for each char
|
| 108 |
+
return mode
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def get_object_type_by_name(
|
| 112 |
+
object_type_name: bytes,
|
| 113 |
+
) -> Union[Type["Commit"], Type["TagObject"], Type["Tree"], Type["Blob"]]:
|
| 114 |
+
"""Retrieve the Python class GitPython uses to represent a kind of Git object.
|
| 115 |
+
|
| 116 |
+
:return:
|
| 117 |
+
A type suitable to handle the given as `object_type_name`.
|
| 118 |
+
This type can be called create new instances.
|
| 119 |
+
|
| 120 |
+
:param object_type_name:
|
| 121 |
+
Member of :attr:`Object.TYPES <git.objects.base.Object.TYPES>`.
|
| 122 |
+
|
| 123 |
+
:raise ValueError:
|
| 124 |
+
If `object_type_name` is unknown.
|
| 125 |
+
"""
|
| 126 |
+
if object_type_name == b"commit":
|
| 127 |
+
from . import commit
|
| 128 |
+
|
| 129 |
+
return commit.Commit
|
| 130 |
+
elif object_type_name == b"tag":
|
| 131 |
+
from . import tag
|
| 132 |
+
|
| 133 |
+
return tag.TagObject
|
| 134 |
+
elif object_type_name == b"blob":
|
| 135 |
+
from . import blob
|
| 136 |
+
|
| 137 |
+
return blob.Blob
|
| 138 |
+
elif object_type_name == b"tree":
|
| 139 |
+
from . import tree
|
| 140 |
+
|
| 141 |
+
return tree.Tree
|
| 142 |
+
else:
|
| 143 |
+
raise ValueError("Cannot handle unknown object type: %s" % object_type_name.decode())
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def utctz_to_altz(utctz: str) -> int:
|
| 147 |
+
"""Convert a git timezone offset into a timezone offset west of UTC in seconds
|
| 148 |
+
(compatible with :attr:`time.altzone`).
|
| 149 |
+
|
| 150 |
+
:param utctz:
|
| 151 |
+
git utc timezone string, e.g. +0200
|
| 152 |
+
"""
|
| 153 |
+
int_utctz = int(utctz)
|
| 154 |
+
seconds = (abs(int_utctz) // 100) * 3600 + (abs(int_utctz) % 100) * 60
|
| 155 |
+
return seconds if int_utctz < 0 else -seconds
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def altz_to_utctz_str(altz: float) -> str:
|
| 159 |
+
"""Convert a timezone offset west of UTC in seconds into a Git timezone offset
|
| 160 |
+
string.
|
| 161 |
+
|
| 162 |
+
:param altz:
|
| 163 |
+
Timezone offset in seconds west of UTC.
|
| 164 |
+
"""
|
| 165 |
+
hours = abs(altz) // 3600
|
| 166 |
+
minutes = (abs(altz) % 3600) // 60
|
| 167 |
+
sign = "-" if altz >= 60 else "+"
|
| 168 |
+
return "{}{:02}{:02}".format(sign, hours, minutes)
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def verify_utctz(offset: str) -> str:
|
| 172 |
+
"""
|
| 173 |
+
:raise ValueError:
|
| 174 |
+
If `offset` is incorrect.
|
| 175 |
+
|
| 176 |
+
:return:
|
| 177 |
+
`offset`
|
| 178 |
+
"""
|
| 179 |
+
fmt_exc = ValueError("Invalid timezone offset format: %s" % offset)
|
| 180 |
+
if len(offset) != 5:
|
| 181 |
+
raise fmt_exc
|
| 182 |
+
if offset[0] not in "+-":
|
| 183 |
+
raise fmt_exc
|
| 184 |
+
if offset[1] not in digits or offset[2] not in digits or offset[3] not in digits or offset[4] not in digits:
|
| 185 |
+
raise fmt_exc
|
| 186 |
+
# END for each char
|
| 187 |
+
return offset
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
class tzoffset(tzinfo):
|
| 191 |
+
def __init__(self, secs_west_of_utc: float, name: Union[None, str] = None) -> None:
|
| 192 |
+
self._offset = timedelta(seconds=-secs_west_of_utc)
|
| 193 |
+
self._name = name or "fixed"
|
| 194 |
+
|
| 195 |
+
def __reduce__(self) -> Tuple[Type["tzoffset"], Tuple[float, str]]:
|
| 196 |
+
return tzoffset, (-self._offset.total_seconds(), self._name)
|
| 197 |
+
|
| 198 |
+
def utcoffset(self, dt: Union[datetime, None]) -> timedelta:
|
| 199 |
+
return self._offset
|
| 200 |
+
|
| 201 |
+
def tzname(self, dt: Union[datetime, None]) -> str:
|
| 202 |
+
return self._name
|
| 203 |
+
|
| 204 |
+
def dst(self, dt: Union[datetime, None]) -> timedelta:
|
| 205 |
+
return ZERO
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
utc = tzoffset(0, "UTC")
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def from_timestamp(timestamp: float, tz_offset: float) -> datetime:
|
| 212 |
+
"""Convert a `timestamp` + `tz_offset` into an aware :class:`~datetime.datetime`
|
| 213 |
+
instance."""
|
| 214 |
+
utc_dt = datetime.fromtimestamp(timestamp, utc)
|
| 215 |
+
try:
|
| 216 |
+
local_dt = utc_dt.astimezone(tzoffset(tz_offset))
|
| 217 |
+
return local_dt
|
| 218 |
+
except ValueError:
|
| 219 |
+
return utc_dt
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def parse_date(string_date: Union[str, datetime]) -> Tuple[int, int]:
|
| 223 |
+
"""Parse the given date as one of the following:
|
| 224 |
+
|
| 225 |
+
* Aware datetime instance
|
| 226 |
+
* Git internal format: timestamp offset
|
| 227 |
+
* :rfc:`2822`: ``Thu, 07 Apr 2005 22:13:13 +0200``
|
| 228 |
+
* ISO 8601: ``2005-04-07T22:13:13`` - The ``T`` can be a space as well.
|
| 229 |
+
|
| 230 |
+
:return:
|
| 231 |
+
Tuple(int(timestamp_UTC), int(offset)), both in seconds since epoch
|
| 232 |
+
|
| 233 |
+
:raise ValueError:
|
| 234 |
+
If the format could not be understood.
|
| 235 |
+
|
| 236 |
+
:note:
|
| 237 |
+
Date can also be ``YYYY.MM.DD``, ``MM/DD/YYYY`` and ``DD.MM.YYYY``.
|
| 238 |
+
"""
|
| 239 |
+
if isinstance(string_date, datetime):
|
| 240 |
+
if string_date.tzinfo:
|
| 241 |
+
utcoffset = cast(timedelta, string_date.utcoffset()) # typeguard, if tzinfoand is not None
|
| 242 |
+
offset = -int(utcoffset.total_seconds())
|
| 243 |
+
return int(string_date.astimezone(utc).timestamp()), offset
|
| 244 |
+
else:
|
| 245 |
+
raise ValueError(f"string_date datetime object without tzinfo, {string_date}")
|
| 246 |
+
|
| 247 |
+
# Git time
|
| 248 |
+
try:
|
| 249 |
+
if string_date.count(" ") == 1 and string_date.rfind(":") == -1:
|
| 250 |
+
timestamp, offset_str = string_date.split()
|
| 251 |
+
if timestamp.startswith("@"):
|
| 252 |
+
timestamp = timestamp[1:]
|
| 253 |
+
timestamp_int = int(timestamp)
|
| 254 |
+
return timestamp_int, utctz_to_altz(verify_utctz(offset_str))
|
| 255 |
+
else:
|
| 256 |
+
offset_str = "+0000" # Local time by default.
|
| 257 |
+
if string_date[-5] in "-+":
|
| 258 |
+
offset_str = verify_utctz(string_date[-5:])
|
| 259 |
+
string_date = string_date[:-6] # skip space as well
|
| 260 |
+
# END split timezone info
|
| 261 |
+
offset = utctz_to_altz(offset_str)
|
| 262 |
+
|
| 263 |
+
# Now figure out the date and time portion - split time.
|
| 264 |
+
date_formats = []
|
| 265 |
+
splitter = -1
|
| 266 |
+
if "," in string_date:
|
| 267 |
+
date_formats.append("%a, %d %b %Y")
|
| 268 |
+
splitter = string_date.rfind(" ")
|
| 269 |
+
else:
|
| 270 |
+
# ISO plus additional
|
| 271 |
+
date_formats.append("%Y-%m-%d")
|
| 272 |
+
date_formats.append("%Y.%m.%d")
|
| 273 |
+
date_formats.append("%m/%d/%Y")
|
| 274 |
+
date_formats.append("%d.%m.%Y")
|
| 275 |
+
|
| 276 |
+
splitter = string_date.rfind("T")
|
| 277 |
+
if splitter == -1:
|
| 278 |
+
splitter = string_date.rfind(" ")
|
| 279 |
+
# END handle 'T' and ' '
|
| 280 |
+
# END handle RFC or ISO
|
| 281 |
+
|
| 282 |
+
assert splitter > -1
|
| 283 |
+
|
| 284 |
+
# Split date and time.
|
| 285 |
+
time_part = string_date[splitter + 1 :] # Skip space.
|
| 286 |
+
date_part = string_date[:splitter]
|
| 287 |
+
|
| 288 |
+
# Parse time.
|
| 289 |
+
tstruct = time.strptime(time_part, "%H:%M:%S")
|
| 290 |
+
|
| 291 |
+
for fmt in date_formats:
|
| 292 |
+
try:
|
| 293 |
+
dtstruct = time.strptime(date_part, fmt)
|
| 294 |
+
utctime = calendar.timegm(
|
| 295 |
+
(
|
| 296 |
+
dtstruct.tm_year,
|
| 297 |
+
dtstruct.tm_mon,
|
| 298 |
+
dtstruct.tm_mday,
|
| 299 |
+
tstruct.tm_hour,
|
| 300 |
+
tstruct.tm_min,
|
| 301 |
+
tstruct.tm_sec,
|
| 302 |
+
dtstruct.tm_wday,
|
| 303 |
+
dtstruct.tm_yday,
|
| 304 |
+
tstruct.tm_isdst,
|
| 305 |
+
)
|
| 306 |
+
)
|
| 307 |
+
return int(utctime), offset
|
| 308 |
+
except ValueError:
|
| 309 |
+
continue
|
| 310 |
+
# END exception handling
|
| 311 |
+
# END for each fmt
|
| 312 |
+
|
| 313 |
+
# Still here ? fail.
|
| 314 |
+
raise ValueError("no format matched")
|
| 315 |
+
# END handle format
|
| 316 |
+
except Exception as e:
|
| 317 |
+
raise ValueError(f"Unsupported date format or type: {string_date}, type={type(string_date)}") from e
|
| 318 |
+
# END handle exceptions
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
# Precompiled regexes
|
| 322 |
+
_re_actor_epoch = re.compile(r"^.+? (.*) (\d+) ([+-]\d+).*$")
|
| 323 |
+
_re_only_actor = re.compile(r"^.+? (.*)$")
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
def parse_actor_and_date(line: str) -> Tuple[Actor, int, int]:
|
| 327 |
+
"""Parse out the actor (author or committer) info from a line like::
|
| 328 |
+
|
| 329 |
+
author Tom Preston-Werner <tom@mojombo.com> 1191999972 -0700
|
| 330 |
+
|
| 331 |
+
:return:
|
| 332 |
+
[Actor, int_seconds_since_epoch, int_timezone_offset]
|
| 333 |
+
"""
|
| 334 |
+
actor, epoch, offset = "", "0", "0"
|
| 335 |
+
m = _re_actor_epoch.search(line)
|
| 336 |
+
if m:
|
| 337 |
+
actor, epoch, offset = m.groups()
|
| 338 |
+
else:
|
| 339 |
+
m = _re_only_actor.search(line)
|
| 340 |
+
actor = m.group(1) if m else line or ""
|
| 341 |
+
return (Actor._from_string(actor), int(epoch), utctz_to_altz(offset))
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
# } END functions
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
# { Classes
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
class ProcessStreamAdapter:
|
| 351 |
+
"""Class wiring all calls to the contained Process instance.
|
| 352 |
+
|
| 353 |
+
Use this type to hide the underlying process to provide access only to a specified
|
| 354 |
+
stream. The process is usually wrapped into an :class:`~git.cmd.Git.AutoInterrupt`
|
| 355 |
+
class to kill it if the instance goes out of scope.
|
| 356 |
+
"""
|
| 357 |
+
|
| 358 |
+
__slots__ = ("_proc", "_stream")
|
| 359 |
+
|
| 360 |
+
def __init__(self, process: "Popen", stream_name: str) -> None:
|
| 361 |
+
self._proc = process
|
| 362 |
+
self._stream: StringIO = getattr(process, stream_name) # guessed type
|
| 363 |
+
|
| 364 |
+
def __getattr__(self, attr: str) -> Any:
|
| 365 |
+
return getattr(self._stream, attr)
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
@runtime_checkable
|
| 369 |
+
class Traversable(Protocol):
|
| 370 |
+
"""Simple interface to perform depth-first or breadth-first traversals in one
|
| 371 |
+
direction.
|
| 372 |
+
|
| 373 |
+
Subclasses only need to implement one function.
|
| 374 |
+
|
| 375 |
+
Instances of the subclass must be hashable.
|
| 376 |
+
|
| 377 |
+
Defined subclasses:
|
| 378 |
+
|
| 379 |
+
* :class:`Commit <git.objects.Commit>`
|
| 380 |
+
* :class:`Tree <git.objects.tree.Tree>`
|
| 381 |
+
* :class:`Submodule <git.objects.submodule.base.Submodule>`
|
| 382 |
+
"""
|
| 383 |
+
|
| 384 |
+
__slots__ = ()
|
| 385 |
+
|
| 386 |
+
@classmethod
|
| 387 |
+
@abstractmethod
|
| 388 |
+
def _get_intermediate_items(cls, item: Any) -> Sequence["Traversable"]:
|
| 389 |
+
"""
|
| 390 |
+
:return:
|
| 391 |
+
Tuple of items connected to the given item.
|
| 392 |
+
Must be implemented in subclass.
|
| 393 |
+
|
| 394 |
+
class Commit:: (cls, Commit) -> Tuple[Commit, ...]
|
| 395 |
+
class Submodule:: (cls, Submodule) -> Iterablelist[Submodule]
|
| 396 |
+
class Tree:: (cls, Tree) -> Tuple[Tree, ...]
|
| 397 |
+
"""
|
| 398 |
+
raise NotImplementedError("To be implemented in subclass")
|
| 399 |
+
|
| 400 |
+
@abstractmethod
|
| 401 |
+
def list_traverse(self, *args: Any, **kwargs: Any) -> Any:
|
| 402 |
+
"""Traverse self and collect all items found.
|
| 403 |
+
|
| 404 |
+
Calling this directly on the abstract base class, including via a ``super()``
|
| 405 |
+
proxy, is deprecated. Only overridden implementations should be called.
|
| 406 |
+
"""
|
| 407 |
+
warnings.warn(
|
| 408 |
+
"list_traverse() method should only be called from subclasses."
|
| 409 |
+
" Calling from Traversable abstract class will raise NotImplementedError in 4.0.0."
|
| 410 |
+
" The concrete subclasses in GitPython itself are 'Commit', 'RootModule', 'Submodule', and 'Tree'.",
|
| 411 |
+
DeprecationWarning,
|
| 412 |
+
stacklevel=2,
|
| 413 |
+
)
|
| 414 |
+
return self._list_traverse(*args, **kwargs)
|
| 415 |
+
|
| 416 |
+
def _list_traverse(
|
| 417 |
+
self, as_edge: bool = False, *args: Any, **kwargs: Any
|
| 418 |
+
) -> IterableList[Union["Commit", "Submodule", "Tree", "Blob"]]:
|
| 419 |
+
"""Traverse self and collect all items found.
|
| 420 |
+
|
| 421 |
+
:return:
|
| 422 |
+
:class:`~git.util.IterableList` with the results of the traversal as
|
| 423 |
+
produced by :meth:`traverse`::
|
| 424 |
+
|
| 425 |
+
Commit -> IterableList[Commit]
|
| 426 |
+
Submodule -> IterableList[Submodule]
|
| 427 |
+
Tree -> IterableList[Union[Submodule, Tree, Blob]]
|
| 428 |
+
"""
|
| 429 |
+
# Commit and Submodule have id.__attribute__ as IterableObj.
|
| 430 |
+
# Tree has id.__attribute__ inherited from IndexObject.
|
| 431 |
+
if isinstance(self, Has_id_attribute):
|
| 432 |
+
id = self._id_attribute_
|
| 433 |
+
else:
|
| 434 |
+
# Shouldn't reach here, unless Traversable subclass created with no
|
| 435 |
+
# _id_attribute_.
|
| 436 |
+
id = ""
|
| 437 |
+
# Could add _id_attribute_ to Traversable, or make all Traversable also
|
| 438 |
+
# Iterable?
|
| 439 |
+
|
| 440 |
+
if not as_edge:
|
| 441 |
+
out: IterableList[Union["Commit", "Submodule", "Tree", "Blob"]] = IterableList(id)
|
| 442 |
+
out.extend(self.traverse(as_edge=as_edge, *args, **kwargs)) # noqa: B026
|
| 443 |
+
return out
|
| 444 |
+
# Overloads in subclasses (mypy doesn't allow typing self: subclass).
|
| 445 |
+
# Union[IterableList['Commit'], IterableList['Submodule'], IterableList[Union['Submodule', 'Tree', 'Blob']]]
|
| 446 |
+
else:
|
| 447 |
+
# Raise DeprecationWarning, it doesn't make sense to use this.
|
| 448 |
+
out_list: IterableList = IterableList(self.traverse(*args, **kwargs))
|
| 449 |
+
return out_list
|
| 450 |
+
|
| 451 |
+
@abstractmethod
|
| 452 |
+
def traverse(self, *args: Any, **kwargs: Any) -> Any:
|
| 453 |
+
"""Iterator yielding items found when traversing self.
|
| 454 |
+
|
| 455 |
+
Calling this directly on the abstract base class, including via a ``super()``
|
| 456 |
+
proxy, is deprecated. Only overridden implementations should be called.
|
| 457 |
+
"""
|
| 458 |
+
warnings.warn(
|
| 459 |
+
"traverse() method should only be called from subclasses."
|
| 460 |
+
" Calling from Traversable abstract class will raise NotImplementedError in 4.0.0."
|
| 461 |
+
" The concrete subclasses in GitPython itself are 'Commit', 'RootModule', 'Submodule', and 'Tree'.",
|
| 462 |
+
DeprecationWarning,
|
| 463 |
+
stacklevel=2,
|
| 464 |
+
)
|
| 465 |
+
return self._traverse(*args, **kwargs)
|
| 466 |
+
|
| 467 |
+
def _traverse(
|
| 468 |
+
self,
|
| 469 |
+
predicate: Callable[[Union["Traversable", "Blob", TraversedTup], int], bool] = lambda i, d: True,
|
| 470 |
+
prune: Callable[[Union["Traversable", "Blob", TraversedTup], int], bool] = lambda i, d: False,
|
| 471 |
+
depth: int = -1,
|
| 472 |
+
branch_first: bool = True,
|
| 473 |
+
visit_once: bool = True,
|
| 474 |
+
ignore_self: int = 1,
|
| 475 |
+
as_edge: bool = False,
|
| 476 |
+
) -> Union[Iterator[Union["Traversable", "Blob"]], Iterator[TraversedTup]]:
|
| 477 |
+
"""Iterator yielding items found when traversing `self`.
|
| 478 |
+
|
| 479 |
+
:param predicate:
|
| 480 |
+
A function ``f(i,d)`` that returns ``False`` if item i at depth ``d`` should
|
| 481 |
+
not be included in the result.
|
| 482 |
+
|
| 483 |
+
:param prune:
|
| 484 |
+
A function ``f(i,d)`` that returns ``True`` if the search should stop at
|
| 485 |
+
item ``i`` at depth ``d``. Item ``i`` will not be returned.
|
| 486 |
+
|
| 487 |
+
:param depth:
|
| 488 |
+
Defines at which level the iteration should not go deeper if -1. There is no
|
| 489 |
+
limit if 0, you would effectively only get `self`, the root of the
|
| 490 |
+
iteration. If 1, you would only get the first level of
|
| 491 |
+
predecessors/successors.
|
| 492 |
+
|
| 493 |
+
:param branch_first:
|
| 494 |
+
If ``True``, items will be returned branch first, otherwise depth first.
|
| 495 |
+
|
| 496 |
+
:param visit_once:
|
| 497 |
+
If ``True``, items will only be returned once, although they might be
|
| 498 |
+
encountered several times. Loops are prevented that way.
|
| 499 |
+
|
| 500 |
+
:param ignore_self:
|
| 501 |
+
If ``True``, `self` will be ignored and automatically pruned from the
|
| 502 |
+
result. Otherwise it will be the first item to be returned. If `as_edge` is
|
| 503 |
+
``True``, the source of the first edge is ``None``.
|
| 504 |
+
|
| 505 |
+
:param as_edge:
|
| 506 |
+
If ``True``, return a pair of items, first being the source, second the
|
| 507 |
+
destination, i.e. tuple(src, dest) with the edge spanning from source to
|
| 508 |
+
destination.
|
| 509 |
+
|
| 510 |
+
:return:
|
| 511 |
+
Iterator yielding items found when traversing `self`::
|
| 512 |
+
|
| 513 |
+
Commit -> Iterator[Union[Commit, Tuple[Commit, Commit]] Submodule ->
|
| 514 |
+
Iterator[Submodule, Tuple[Submodule, Submodule]] Tree ->
|
| 515 |
+
Iterator[Union[Blob, Tree, Submodule,
|
| 516 |
+
Tuple[Union[Submodule, Tree], Union[Blob, Tree,
|
| 517 |
+
Submodule]]]
|
| 518 |
+
|
| 519 |
+
ignore_self=True is_edge=True -> Iterator[item] ignore_self=True
|
| 520 |
+
is_edge=False --> Iterator[item] ignore_self=False is_edge=True ->
|
| 521 |
+
Iterator[item] | Iterator[Tuple[src, item]] ignore_self=False
|
| 522 |
+
is_edge=False -> Iterator[Tuple[src, item]]
|
| 523 |
+
"""
|
| 524 |
+
|
| 525 |
+
visited = set()
|
| 526 |
+
stack: Deque[TraverseNT] = deque()
|
| 527 |
+
stack.append(TraverseNT(0, self, None)) # self is always depth level 0.
|
| 528 |
+
|
| 529 |
+
def addToStack(
|
| 530 |
+
stack: Deque[TraverseNT],
|
| 531 |
+
src_item: "Traversable",
|
| 532 |
+
branch_first: bool,
|
| 533 |
+
depth: int,
|
| 534 |
+
) -> None:
|
| 535 |
+
lst = self._get_intermediate_items(item)
|
| 536 |
+
if not lst: # Empty list
|
| 537 |
+
return
|
| 538 |
+
if branch_first:
|
| 539 |
+
stack.extendleft(TraverseNT(depth, i, src_item) for i in lst)
|
| 540 |
+
else:
|
| 541 |
+
reviter = (TraverseNT(depth, lst[i], src_item) for i in range(len(lst) - 1, -1, -1))
|
| 542 |
+
stack.extend(reviter)
|
| 543 |
+
|
| 544 |
+
# END addToStack local method
|
| 545 |
+
|
| 546 |
+
while stack:
|
| 547 |
+
d, item, src = stack.pop() # Depth of item, item, item_source
|
| 548 |
+
|
| 549 |
+
if visit_once and item in visited:
|
| 550 |
+
continue
|
| 551 |
+
|
| 552 |
+
if visit_once:
|
| 553 |
+
visited.add(item)
|
| 554 |
+
|
| 555 |
+
rval: Union[TraversedTup, "Traversable", "Blob"]
|
| 556 |
+
if as_edge:
|
| 557 |
+
# If as_edge return (src, item) unless rrc is None
|
| 558 |
+
# (e.g. for first item).
|
| 559 |
+
rval = (src, item)
|
| 560 |
+
else:
|
| 561 |
+
rval = item
|
| 562 |
+
|
| 563 |
+
if prune(rval, d):
|
| 564 |
+
continue
|
| 565 |
+
|
| 566 |
+
skipStartItem = ignore_self and (item is self)
|
| 567 |
+
if not skipStartItem and predicate(rval, d):
|
| 568 |
+
yield rval
|
| 569 |
+
|
| 570 |
+
# Only continue to next level if this is appropriate!
|
| 571 |
+
nd = d + 1
|
| 572 |
+
if depth > -1 and nd > depth:
|
| 573 |
+
continue
|
| 574 |
+
|
| 575 |
+
addToStack(stack, item, branch_first, nd)
|
| 576 |
+
# END for each item on work stack
|
| 577 |
+
|
| 578 |
+
|
| 579 |
+
@runtime_checkable
|
| 580 |
+
class Serializable(Protocol):
|
| 581 |
+
"""Defines methods to serialize and deserialize objects from and into a data
|
| 582 |
+
stream."""
|
| 583 |
+
|
| 584 |
+
__slots__ = ()
|
| 585 |
+
|
| 586 |
+
# @abstractmethod
|
| 587 |
+
def _serialize(self, stream: "BytesIO") -> "Serializable":
|
| 588 |
+
"""Serialize the data of this object into the given data stream.
|
| 589 |
+
|
| 590 |
+
:note:
|
| 591 |
+
A serialized object would :meth:`_deserialize` into the same object.
|
| 592 |
+
|
| 593 |
+
:param stream:
|
| 594 |
+
A file-like object.
|
| 595 |
+
|
| 596 |
+
:return:
|
| 597 |
+
self
|
| 598 |
+
"""
|
| 599 |
+
raise NotImplementedError("To be implemented in subclass")
|
| 600 |
+
|
| 601 |
+
# @abstractmethod
|
| 602 |
+
def _deserialize(self, stream: "BytesIO") -> "Serializable":
|
| 603 |
+
"""Deserialize all information regarding this object from the stream.
|
| 604 |
+
|
| 605 |
+
:param stream:
|
| 606 |
+
A file-like object.
|
| 607 |
+
|
| 608 |
+
:return:
|
| 609 |
+
self
|
| 610 |
+
"""
|
| 611 |
+
raise NotImplementedError("To be implemented in subclass")
|
| 612 |
+
|
| 613 |
+
|
| 614 |
+
class TraversableIterableObj(IterableObj, Traversable):
|
| 615 |
+
__slots__ = ()
|
| 616 |
+
|
| 617 |
+
TIobj_tuple = Tuple[Union[T_TIobj, None], T_TIobj]
|
| 618 |
+
|
| 619 |
+
def list_traverse(self: T_TIobj, *args: Any, **kwargs: Any) -> IterableList[T_TIobj]:
|
| 620 |
+
return super()._list_traverse(*args, **kwargs)
|
| 621 |
+
|
| 622 |
+
@overload
|
| 623 |
+
def traverse(self: T_TIobj) -> Iterator[T_TIobj]: ...
|
| 624 |
+
|
| 625 |
+
@overload
|
| 626 |
+
def traverse(
|
| 627 |
+
self: T_TIobj,
|
| 628 |
+
predicate: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
|
| 629 |
+
prune: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
|
| 630 |
+
depth: int,
|
| 631 |
+
branch_first: bool,
|
| 632 |
+
visit_once: bool,
|
| 633 |
+
ignore_self: Literal[True],
|
| 634 |
+
as_edge: Literal[False],
|
| 635 |
+
) -> Iterator[T_TIobj]: ...
|
| 636 |
+
|
| 637 |
+
@overload
|
| 638 |
+
def traverse(
|
| 639 |
+
self: T_TIobj,
|
| 640 |
+
predicate: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
|
| 641 |
+
prune: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
|
| 642 |
+
depth: int,
|
| 643 |
+
branch_first: bool,
|
| 644 |
+
visit_once: bool,
|
| 645 |
+
ignore_self: Literal[False],
|
| 646 |
+
as_edge: Literal[True],
|
| 647 |
+
) -> Iterator[Tuple[Union[T_TIobj, None], T_TIobj]]: ...
|
| 648 |
+
|
| 649 |
+
@overload
|
| 650 |
+
def traverse(
|
| 651 |
+
self: T_TIobj,
|
| 652 |
+
predicate: Callable[[Union[T_TIobj, TIobj_tuple], int], bool],
|
| 653 |
+
prune: Callable[[Union[T_TIobj, TIobj_tuple], int], bool],
|
| 654 |
+
depth: int,
|
| 655 |
+
branch_first: bool,
|
| 656 |
+
visit_once: bool,
|
| 657 |
+
ignore_self: Literal[True],
|
| 658 |
+
as_edge: Literal[True],
|
| 659 |
+
) -> Iterator[Tuple[T_TIobj, T_TIobj]]: ...
|
| 660 |
+
|
| 661 |
+
def traverse(
|
| 662 |
+
self: T_TIobj,
|
| 663 |
+
predicate: Callable[[Union[T_TIobj, TIobj_tuple], int], bool] = lambda i, d: True,
|
| 664 |
+
prune: Callable[[Union[T_TIobj, TIobj_tuple], int], bool] = lambda i, d: False,
|
| 665 |
+
depth: int = -1,
|
| 666 |
+
branch_first: bool = True,
|
| 667 |
+
visit_once: bool = True,
|
| 668 |
+
ignore_self: int = 1,
|
| 669 |
+
as_edge: bool = False,
|
| 670 |
+
) -> Union[Iterator[T_TIobj], Iterator[Tuple[T_TIobj, T_TIobj]], Iterator[TIobj_tuple]]:
|
| 671 |
+
"""For documentation, see :meth:`Traversable._traverse`."""
|
| 672 |
+
|
| 673 |
+
## To typecheck instead of using cast:
|
| 674 |
+
#
|
| 675 |
+
# import itertools
|
| 676 |
+
# from git.types import TypeGuard
|
| 677 |
+
# def is_commit_traversed(inp: Tuple) -> TypeGuard[Tuple[Iterator[Tuple['Commit', 'Commit']]]]:
|
| 678 |
+
# for x in inp[1]:
|
| 679 |
+
# if not isinstance(x, tuple) and len(x) != 2:
|
| 680 |
+
# if all(isinstance(inner, Commit) for inner in x):
|
| 681 |
+
# continue
|
| 682 |
+
# return True
|
| 683 |
+
#
|
| 684 |
+
# ret = super(Commit, self).traverse(predicate, prune, depth, branch_first, visit_once, ignore_self, as_edge)
|
| 685 |
+
# ret_tup = itertools.tee(ret, 2)
|
| 686 |
+
# assert is_commit_traversed(ret_tup), f"{[type(x) for x in list(ret_tup[0])]}"
|
| 687 |
+
# return ret_tup[0]
|
| 688 |
+
|
| 689 |
+
return cast(
|
| 690 |
+
Union[Iterator[T_TIobj], Iterator[Tuple[Union[None, T_TIobj], T_TIobj]]],
|
| 691 |
+
super()._traverse(
|
| 692 |
+
predicate, # type: ignore[arg-type]
|
| 693 |
+
prune, # type: ignore[arg-type]
|
| 694 |
+
depth,
|
| 695 |
+
branch_first,
|
| 696 |
+
visit_once,
|
| 697 |
+
ignore_self,
|
| 698 |
+
as_edge,
|
| 699 |
+
),
|
| 700 |
+
)
|
parrot/lib/python3.10/site-packages/git/refs/__init__.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
__all__ = [
|
| 5 |
+
"HEAD",
|
| 6 |
+
"Head",
|
| 7 |
+
"RefLog",
|
| 8 |
+
"RefLogEntry",
|
| 9 |
+
"Reference",
|
| 10 |
+
"RemoteReference",
|
| 11 |
+
"SymbolicReference",
|
| 12 |
+
"Tag",
|
| 13 |
+
"TagReference",
|
| 14 |
+
]
|
| 15 |
+
|
| 16 |
+
from .head import HEAD, Head
|
| 17 |
+
from .log import RefLog, RefLogEntry
|
| 18 |
+
from .reference import Reference
|
| 19 |
+
from .remote import RemoteReference
|
| 20 |
+
from .symbolic import SymbolicReference
|
| 21 |
+
from .tag import Tag, TagReference
|
parrot/lib/python3.10/site-packages/git/refs/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (523 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/git/refs/__pycache__/head.cpython-310.pyc
ADDED
|
Binary file (9.43 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/refs/__pycache__/log.cpython-310.pyc
ADDED
|
Binary file (11.4 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/refs/__pycache__/reference.cpython-310.pyc
ADDED
|
Binary file (4.58 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/refs/__pycache__/remote.cpython-310.pyc
ADDED
|
Binary file (2.26 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/refs/__pycache__/symbolic.cpython-310.pyc
ADDED
|
Binary file (26 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/refs/__pycache__/tag.cpython-310.pyc
ADDED
|
Binary file (4.5 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/refs/head.py
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
"""Some ref-based objects.
|
| 5 |
+
|
| 6 |
+
Note the distinction between the :class:`HEAD` and :class:`Head` classes.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
__all__ = ["HEAD", "Head"]
|
| 10 |
+
|
| 11 |
+
from git.config import GitConfigParser, SectionConstraint
|
| 12 |
+
from git.exc import GitCommandError
|
| 13 |
+
from git.util import join_path
|
| 14 |
+
|
| 15 |
+
from .reference import Reference
|
| 16 |
+
from .symbolic import SymbolicReference
|
| 17 |
+
|
| 18 |
+
# typing ---------------------------------------------------
|
| 19 |
+
|
| 20 |
+
from typing import Any, Sequence, TYPE_CHECKING, Union
|
| 21 |
+
|
| 22 |
+
from git.types import Commit_ish, PathLike
|
| 23 |
+
|
| 24 |
+
if TYPE_CHECKING:
|
| 25 |
+
from git.objects import Commit
|
| 26 |
+
from git.refs import RemoteReference
|
| 27 |
+
from git.repo import Repo
|
| 28 |
+
|
| 29 |
+
# -------------------------------------------------------------------
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def strip_quotes(string: str) -> str:
|
| 33 |
+
if string.startswith('"') and string.endswith('"'):
|
| 34 |
+
return string[1:-1]
|
| 35 |
+
return string
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class HEAD(SymbolicReference):
|
| 39 |
+
"""Special case of a :class:`~git.refs.symbolic.SymbolicReference` representing the
|
| 40 |
+
repository's HEAD reference."""
|
| 41 |
+
|
| 42 |
+
_HEAD_NAME = "HEAD"
|
| 43 |
+
_ORIG_HEAD_NAME = "ORIG_HEAD"
|
| 44 |
+
|
| 45 |
+
__slots__ = ()
|
| 46 |
+
|
| 47 |
+
# TODO: This can be removed once SymbolicReference.commit has static type hints.
|
| 48 |
+
commit: "Commit"
|
| 49 |
+
|
| 50 |
+
def __init__(self, repo: "Repo", path: PathLike = _HEAD_NAME) -> None:
|
| 51 |
+
if path != self._HEAD_NAME:
|
| 52 |
+
raise ValueError("HEAD instance must point to %r, got %r" % (self._HEAD_NAME, path))
|
| 53 |
+
super().__init__(repo, path)
|
| 54 |
+
|
| 55 |
+
def orig_head(self) -> SymbolicReference:
|
| 56 |
+
"""
|
| 57 |
+
:return:
|
| 58 |
+
:class:`~git.refs.symbolic.SymbolicReference` pointing at the ORIG_HEAD,
|
| 59 |
+
which is maintained to contain the previous value of HEAD.
|
| 60 |
+
"""
|
| 61 |
+
return SymbolicReference(self.repo, self._ORIG_HEAD_NAME)
|
| 62 |
+
|
| 63 |
+
def reset(
|
| 64 |
+
self,
|
| 65 |
+
commit: Union[Commit_ish, SymbolicReference, str] = "HEAD",
|
| 66 |
+
index: bool = True,
|
| 67 |
+
working_tree: bool = False,
|
| 68 |
+
paths: Union[PathLike, Sequence[PathLike], None] = None,
|
| 69 |
+
**kwargs: Any,
|
| 70 |
+
) -> "HEAD":
|
| 71 |
+
"""Reset our HEAD to the given commit optionally synchronizing the index and
|
| 72 |
+
working tree. The reference we refer to will be set to commit as well.
|
| 73 |
+
|
| 74 |
+
:param commit:
|
| 75 |
+
:class:`~git.objects.commit.Commit`, :class:`~git.refs.reference.Reference`,
|
| 76 |
+
or string identifying a revision we should reset HEAD to.
|
| 77 |
+
|
| 78 |
+
:param index:
|
| 79 |
+
If ``True``, the index will be set to match the given commit.
|
| 80 |
+
Otherwise it will not be touched.
|
| 81 |
+
|
| 82 |
+
:param working_tree:
|
| 83 |
+
If ``True``, the working tree will be forcefully adjusted to match the given
|
| 84 |
+
commit, possibly overwriting uncommitted changes without warning.
|
| 85 |
+
If `working_tree` is ``True``, `index` must be ``True`` as well.
|
| 86 |
+
|
| 87 |
+
:param paths:
|
| 88 |
+
Single path or list of paths relative to the git root directory
|
| 89 |
+
that are to be reset. This allows to partially reset individual files.
|
| 90 |
+
|
| 91 |
+
:param kwargs:
|
| 92 |
+
Additional arguments passed to :manpage:`git-reset(1)`.
|
| 93 |
+
|
| 94 |
+
:return:
|
| 95 |
+
self
|
| 96 |
+
"""
|
| 97 |
+
mode: Union[str, None]
|
| 98 |
+
mode = "--soft"
|
| 99 |
+
if index:
|
| 100 |
+
mode = "--mixed"
|
| 101 |
+
|
| 102 |
+
# It appears some git versions declare mixed and paths deprecated.
|
| 103 |
+
# See http://github.com/Byron/GitPython/issues#issue/2.
|
| 104 |
+
if paths:
|
| 105 |
+
mode = None
|
| 106 |
+
# END special case
|
| 107 |
+
# END handle index
|
| 108 |
+
|
| 109 |
+
if working_tree:
|
| 110 |
+
mode = "--hard"
|
| 111 |
+
if not index:
|
| 112 |
+
raise ValueError("Cannot reset the working tree if the index is not reset as well")
|
| 113 |
+
|
| 114 |
+
# END working tree handling
|
| 115 |
+
|
| 116 |
+
try:
|
| 117 |
+
self.repo.git.reset(mode, commit, "--", paths, **kwargs)
|
| 118 |
+
except GitCommandError as e:
|
| 119 |
+
# git nowadays may use 1 as status to indicate there are still unstaged
|
| 120 |
+
# modifications after the reset.
|
| 121 |
+
if e.status != 1:
|
| 122 |
+
raise
|
| 123 |
+
# END handle exception
|
| 124 |
+
|
| 125 |
+
return self
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class Head(Reference):
|
| 129 |
+
"""A Head is a named reference to a :class:`~git.objects.commit.Commit`. Every Head
|
| 130 |
+
instance contains a name and a :class:`~git.objects.commit.Commit` object.
|
| 131 |
+
|
| 132 |
+
Examples::
|
| 133 |
+
|
| 134 |
+
>>> repo = Repo("/path/to/repo")
|
| 135 |
+
>>> head = repo.heads[0]
|
| 136 |
+
|
| 137 |
+
>>> head.name
|
| 138 |
+
'master'
|
| 139 |
+
|
| 140 |
+
>>> head.commit
|
| 141 |
+
<git.Commit "1c09f116cbc2cb4100fb6935bb162daa4723f455">
|
| 142 |
+
|
| 143 |
+
>>> head.commit.hexsha
|
| 144 |
+
'1c09f116cbc2cb4100fb6935bb162daa4723f455'
|
| 145 |
+
"""
|
| 146 |
+
|
| 147 |
+
_common_path_default = "refs/heads"
|
| 148 |
+
k_config_remote = "remote"
|
| 149 |
+
k_config_remote_ref = "merge" # Branch to merge from remote.
|
| 150 |
+
|
| 151 |
+
@classmethod
|
| 152 |
+
def delete(cls, repo: "Repo", *heads: "Union[Head, str]", force: bool = False, **kwargs: Any) -> None:
|
| 153 |
+
"""Delete the given heads.
|
| 154 |
+
|
| 155 |
+
:param force:
|
| 156 |
+
If ``True``, the heads will be deleted even if they are not yet merged into
|
| 157 |
+
the main development stream. Default ``False``.
|
| 158 |
+
"""
|
| 159 |
+
flag = "-d"
|
| 160 |
+
if force:
|
| 161 |
+
flag = "-D"
|
| 162 |
+
repo.git.branch(flag, *heads)
|
| 163 |
+
|
| 164 |
+
def set_tracking_branch(self, remote_reference: Union["RemoteReference", None]) -> "Head":
|
| 165 |
+
"""Configure this branch to track the given remote reference. This will
|
| 166 |
+
alter this branch's configuration accordingly.
|
| 167 |
+
|
| 168 |
+
:param remote_reference:
|
| 169 |
+
The remote reference to track or None to untrack any references.
|
| 170 |
+
|
| 171 |
+
:return:
|
| 172 |
+
self
|
| 173 |
+
"""
|
| 174 |
+
from .remote import RemoteReference
|
| 175 |
+
|
| 176 |
+
if remote_reference is not None and not isinstance(remote_reference, RemoteReference):
|
| 177 |
+
raise ValueError("Incorrect parameter type: %r" % remote_reference)
|
| 178 |
+
# END handle type
|
| 179 |
+
|
| 180 |
+
with self.config_writer() as writer:
|
| 181 |
+
if remote_reference is None:
|
| 182 |
+
writer.remove_option(self.k_config_remote)
|
| 183 |
+
writer.remove_option(self.k_config_remote_ref)
|
| 184 |
+
if len(writer.options()) == 0:
|
| 185 |
+
writer.remove_section()
|
| 186 |
+
else:
|
| 187 |
+
writer.set_value(self.k_config_remote, remote_reference.remote_name)
|
| 188 |
+
writer.set_value(
|
| 189 |
+
self.k_config_remote_ref,
|
| 190 |
+
Head.to_full_path(remote_reference.remote_head),
|
| 191 |
+
)
|
| 192 |
+
|
| 193 |
+
return self
|
| 194 |
+
|
| 195 |
+
def tracking_branch(self) -> Union["RemoteReference", None]:
|
| 196 |
+
"""
|
| 197 |
+
:return:
|
| 198 |
+
The remote reference we are tracking, or ``None`` if we are not a tracking
|
| 199 |
+
branch.
|
| 200 |
+
"""
|
| 201 |
+
from .remote import RemoteReference
|
| 202 |
+
|
| 203 |
+
reader = self.config_reader()
|
| 204 |
+
if reader.has_option(self.k_config_remote) and reader.has_option(self.k_config_remote_ref):
|
| 205 |
+
ref = Head(
|
| 206 |
+
self.repo,
|
| 207 |
+
Head.to_full_path(strip_quotes(reader.get_value(self.k_config_remote_ref))),
|
| 208 |
+
)
|
| 209 |
+
remote_refpath = RemoteReference.to_full_path(join_path(reader.get_value(self.k_config_remote), ref.name))
|
| 210 |
+
return RemoteReference(self.repo, remote_refpath)
|
| 211 |
+
# END handle have tracking branch
|
| 212 |
+
|
| 213 |
+
# We are not a tracking branch.
|
| 214 |
+
return None
|
| 215 |
+
|
| 216 |
+
def rename(self, new_path: PathLike, force: bool = False) -> "Head":
|
| 217 |
+
"""Rename self to a new path.
|
| 218 |
+
|
| 219 |
+
:param new_path:
|
| 220 |
+
Either a simple name or a path, e.g. ``new_name`` or ``features/new_name``.
|
| 221 |
+
The prefix ``refs/heads`` is implied.
|
| 222 |
+
|
| 223 |
+
:param force:
|
| 224 |
+
If ``True``, the rename will succeed even if a head with the target name
|
| 225 |
+
already exists.
|
| 226 |
+
|
| 227 |
+
:return:
|
| 228 |
+
self
|
| 229 |
+
|
| 230 |
+
:note:
|
| 231 |
+
Respects the ref log, as git commands are used.
|
| 232 |
+
"""
|
| 233 |
+
flag = "-m"
|
| 234 |
+
if force:
|
| 235 |
+
flag = "-M"
|
| 236 |
+
|
| 237 |
+
self.repo.git.branch(flag, self, new_path)
|
| 238 |
+
self.path = "%s/%s" % (self._common_path_default, new_path)
|
| 239 |
+
return self
|
| 240 |
+
|
| 241 |
+
def checkout(self, force: bool = False, **kwargs: Any) -> Union["HEAD", "Head"]:
|
| 242 |
+
"""Check out this head by setting the HEAD to this reference, by updating the
|
| 243 |
+
index to reflect the tree we point to and by updating the working tree to
|
| 244 |
+
reflect the latest index.
|
| 245 |
+
|
| 246 |
+
The command will fail if changed working tree files would be overwritten.
|
| 247 |
+
|
| 248 |
+
:param force:
|
| 249 |
+
If ``True``, changes to the index and the working tree will be discarded.
|
| 250 |
+
If ``False``, :exc:`~git.exc.GitCommandError` will be raised in that
|
| 251 |
+
situation.
|
| 252 |
+
|
| 253 |
+
:param kwargs:
|
| 254 |
+
Additional keyword arguments to be passed to git checkout, e.g.
|
| 255 |
+
``b="new_branch"`` to create a new branch at the given spot.
|
| 256 |
+
|
| 257 |
+
:return:
|
| 258 |
+
The active branch after the checkout operation, usually self unless a new
|
| 259 |
+
branch has been created.
|
| 260 |
+
If there is no active branch, as the HEAD is now detached, the HEAD
|
| 261 |
+
reference will be returned instead.
|
| 262 |
+
|
| 263 |
+
:note:
|
| 264 |
+
By default it is only allowed to checkout heads - everything else will leave
|
| 265 |
+
the HEAD detached which is allowed and possible, but remains a special state
|
| 266 |
+
that some tools might not be able to handle.
|
| 267 |
+
"""
|
| 268 |
+
kwargs["f"] = force
|
| 269 |
+
if kwargs["f"] is False:
|
| 270 |
+
kwargs.pop("f")
|
| 271 |
+
|
| 272 |
+
self.repo.git.checkout(self, **kwargs)
|
| 273 |
+
if self.repo.head.is_detached:
|
| 274 |
+
return self.repo.head
|
| 275 |
+
else:
|
| 276 |
+
return self.repo.active_branch
|
| 277 |
+
|
| 278 |
+
# { Configuration
|
| 279 |
+
def _config_parser(self, read_only: bool) -> SectionConstraint[GitConfigParser]:
|
| 280 |
+
if read_only:
|
| 281 |
+
parser = self.repo.config_reader()
|
| 282 |
+
else:
|
| 283 |
+
parser = self.repo.config_writer()
|
| 284 |
+
# END handle parser instance
|
| 285 |
+
|
| 286 |
+
return SectionConstraint(parser, 'branch "%s"' % self.name)
|
| 287 |
+
|
| 288 |
+
def config_reader(self) -> SectionConstraint[GitConfigParser]:
|
| 289 |
+
"""
|
| 290 |
+
:return:
|
| 291 |
+
A configuration parser instance constrained to only read this instance's
|
| 292 |
+
values.
|
| 293 |
+
"""
|
| 294 |
+
return self._config_parser(read_only=True)
|
| 295 |
+
|
| 296 |
+
def config_writer(self) -> SectionConstraint[GitConfigParser]:
|
| 297 |
+
"""
|
| 298 |
+
:return:
|
| 299 |
+
A configuration writer instance with read-and write access to options of
|
| 300 |
+
this head.
|
| 301 |
+
"""
|
| 302 |
+
return self._config_parser(read_only=False)
|
| 303 |
+
|
| 304 |
+
# } END configuration
|
parrot/lib/python3.10/site-packages/git/refs/log.py
ADDED
|
@@ -0,0 +1,399 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
__all__ = ["RefLog", "RefLogEntry"]
|
| 5 |
+
|
| 6 |
+
from mmap import mmap
|
| 7 |
+
import os.path as osp
|
| 8 |
+
import re
|
| 9 |
+
import time as _time
|
| 10 |
+
|
| 11 |
+
from git.compat import defenc
|
| 12 |
+
from git.objects.util import (
|
| 13 |
+
Serializable,
|
| 14 |
+
altz_to_utctz_str,
|
| 15 |
+
parse_date,
|
| 16 |
+
)
|
| 17 |
+
from git.util import (
|
| 18 |
+
Actor,
|
| 19 |
+
LockedFD,
|
| 20 |
+
LockFile,
|
| 21 |
+
assure_directory_exists,
|
| 22 |
+
bin_to_hex,
|
| 23 |
+
file_contents_ro_filepath,
|
| 24 |
+
to_native_path,
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
# typing ------------------------------------------------------------------
|
| 28 |
+
|
| 29 |
+
from typing import Iterator, List, Tuple, TYPE_CHECKING, Union
|
| 30 |
+
|
| 31 |
+
from git.types import PathLike
|
| 32 |
+
|
| 33 |
+
if TYPE_CHECKING:
|
| 34 |
+
from io import BytesIO
|
| 35 |
+
|
| 36 |
+
from git.config import GitConfigParser, SectionConstraint
|
| 37 |
+
from git.refs import SymbolicReference
|
| 38 |
+
|
| 39 |
+
# ------------------------------------------------------------------------------
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class RefLogEntry(Tuple[str, str, Actor, Tuple[int, int], str]):
|
| 43 |
+
"""Named tuple allowing easy access to the revlog data fields."""
|
| 44 |
+
|
| 45 |
+
_re_hexsha_only = re.compile(r"^[0-9A-Fa-f]{40}$")
|
| 46 |
+
|
| 47 |
+
__slots__ = ()
|
| 48 |
+
|
| 49 |
+
def __repr__(self) -> str:
|
| 50 |
+
"""Representation of ourselves in git reflog format."""
|
| 51 |
+
return self.format()
|
| 52 |
+
|
| 53 |
+
def format(self) -> str:
|
| 54 |
+
""":return: A string suitable to be placed in a reflog file."""
|
| 55 |
+
act = self.actor
|
| 56 |
+
time = self.time
|
| 57 |
+
return "{} {} {} <{}> {!s} {}\t{}\n".format(
|
| 58 |
+
self.oldhexsha,
|
| 59 |
+
self.newhexsha,
|
| 60 |
+
act.name,
|
| 61 |
+
act.email,
|
| 62 |
+
time[0],
|
| 63 |
+
altz_to_utctz_str(time[1]),
|
| 64 |
+
self.message,
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
@property
|
| 68 |
+
def oldhexsha(self) -> str:
|
| 69 |
+
"""The hexsha to the commit the ref pointed to before the change."""
|
| 70 |
+
return self[0]
|
| 71 |
+
|
| 72 |
+
@property
|
| 73 |
+
def newhexsha(self) -> str:
|
| 74 |
+
"""The hexsha to the commit the ref now points to, after the change."""
|
| 75 |
+
return self[1]
|
| 76 |
+
|
| 77 |
+
@property
|
| 78 |
+
def actor(self) -> Actor:
|
| 79 |
+
"""Actor instance, providing access."""
|
| 80 |
+
return self[2]
|
| 81 |
+
|
| 82 |
+
@property
|
| 83 |
+
def time(self) -> Tuple[int, int]:
|
| 84 |
+
"""Time as tuple:
|
| 85 |
+
|
| 86 |
+
* [0] = ``int(time)``
|
| 87 |
+
* [1] = ``int(timezone_offset)`` in :attr:`time.altzone` format
|
| 88 |
+
"""
|
| 89 |
+
return self[3]
|
| 90 |
+
|
| 91 |
+
@property
|
| 92 |
+
def message(self) -> str:
|
| 93 |
+
"""Message describing the operation that acted on the reference."""
|
| 94 |
+
return self[4]
|
| 95 |
+
|
| 96 |
+
@classmethod
|
| 97 |
+
def new(
|
| 98 |
+
cls,
|
| 99 |
+
oldhexsha: str,
|
| 100 |
+
newhexsha: str,
|
| 101 |
+
actor: Actor,
|
| 102 |
+
time: int,
|
| 103 |
+
tz_offset: int,
|
| 104 |
+
message: str,
|
| 105 |
+
) -> "RefLogEntry": # skipcq: PYL-W0621
|
| 106 |
+
""":return: New instance of a :class:`RefLogEntry`"""
|
| 107 |
+
if not isinstance(actor, Actor):
|
| 108 |
+
raise ValueError("Need actor instance, got %s" % actor)
|
| 109 |
+
# END check types
|
| 110 |
+
return RefLogEntry((oldhexsha, newhexsha, actor, (time, tz_offset), message))
|
| 111 |
+
|
| 112 |
+
@classmethod
|
| 113 |
+
def from_line(cls, line: bytes) -> "RefLogEntry":
|
| 114 |
+
""":return: New :class:`RefLogEntry` instance from the given revlog line.
|
| 115 |
+
|
| 116 |
+
:param line:
|
| 117 |
+
Line bytes without trailing newline
|
| 118 |
+
|
| 119 |
+
:raise ValueError:
|
| 120 |
+
If `line` could not be parsed.
|
| 121 |
+
"""
|
| 122 |
+
line_str = line.decode(defenc)
|
| 123 |
+
fields = line_str.split("\t", 1)
|
| 124 |
+
if len(fields) == 1:
|
| 125 |
+
info, msg = fields[0], None
|
| 126 |
+
elif len(fields) == 2:
|
| 127 |
+
info, msg = fields
|
| 128 |
+
else:
|
| 129 |
+
raise ValueError("Line must have up to two TAB-separated fields." " Got %s" % repr(line_str))
|
| 130 |
+
# END handle first split
|
| 131 |
+
|
| 132 |
+
oldhexsha = info[:40]
|
| 133 |
+
newhexsha = info[41:81]
|
| 134 |
+
for hexsha in (oldhexsha, newhexsha):
|
| 135 |
+
if not cls._re_hexsha_only.match(hexsha):
|
| 136 |
+
raise ValueError("Invalid hexsha: %r" % (hexsha,))
|
| 137 |
+
# END if hexsha re doesn't match
|
| 138 |
+
# END for each hexsha
|
| 139 |
+
|
| 140 |
+
email_end = info.find(">", 82)
|
| 141 |
+
if email_end == -1:
|
| 142 |
+
raise ValueError("Missing token: >")
|
| 143 |
+
# END handle missing end brace
|
| 144 |
+
|
| 145 |
+
actor = Actor._from_string(info[82 : email_end + 1])
|
| 146 |
+
time, tz_offset = parse_date(info[email_end + 2 :]) # skipcq: PYL-W0621
|
| 147 |
+
|
| 148 |
+
return RefLogEntry((oldhexsha, newhexsha, actor, (time, tz_offset), msg))
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
class RefLog(List[RefLogEntry], Serializable):
|
| 152 |
+
R"""A reflog contains :class:`RefLogEntry`\s, each of which defines a certain state
|
| 153 |
+
of the head in question. Custom query methods allow to retrieve log entries by date
|
| 154 |
+
or by other criteria.
|
| 155 |
+
|
| 156 |
+
Reflog entries are ordered. The first added entry is first in the list. The last
|
| 157 |
+
entry, i.e. the last change of the head or reference, is last in the list.
|
| 158 |
+
"""
|
| 159 |
+
|
| 160 |
+
__slots__ = ("_path",)
|
| 161 |
+
|
| 162 |
+
def __new__(cls, filepath: Union[PathLike, None] = None) -> "RefLog":
|
| 163 |
+
inst = super().__new__(cls)
|
| 164 |
+
return inst
|
| 165 |
+
|
| 166 |
+
def __init__(self, filepath: Union[PathLike, None] = None) -> None:
|
| 167 |
+
"""Initialize this instance with an optional filepath, from which we will
|
| 168 |
+
initialize our data. The path is also used to write changes back using the
|
| 169 |
+
:meth:`write` method."""
|
| 170 |
+
self._path = filepath
|
| 171 |
+
if filepath is not None:
|
| 172 |
+
self._read_from_file()
|
| 173 |
+
# END handle filepath
|
| 174 |
+
|
| 175 |
+
def _read_from_file(self) -> None:
|
| 176 |
+
try:
|
| 177 |
+
fmap = file_contents_ro_filepath(self._path, stream=True, allow_mmap=True)
|
| 178 |
+
except OSError:
|
| 179 |
+
# It is possible and allowed that the file doesn't exist!
|
| 180 |
+
return
|
| 181 |
+
# END handle invalid log
|
| 182 |
+
|
| 183 |
+
try:
|
| 184 |
+
self._deserialize(fmap)
|
| 185 |
+
finally:
|
| 186 |
+
fmap.close()
|
| 187 |
+
# END handle closing of handle
|
| 188 |
+
|
| 189 |
+
# { Interface
|
| 190 |
+
|
| 191 |
+
@classmethod
|
| 192 |
+
def from_file(cls, filepath: PathLike) -> "RefLog":
|
| 193 |
+
"""
|
| 194 |
+
:return:
|
| 195 |
+
A new :class:`RefLog` instance containing all entries from the reflog at the
|
| 196 |
+
given `filepath`.
|
| 197 |
+
|
| 198 |
+
:param filepath:
|
| 199 |
+
Path to reflog.
|
| 200 |
+
|
| 201 |
+
:raise ValueError:
|
| 202 |
+
If the file could not be read or was corrupted in some way.
|
| 203 |
+
"""
|
| 204 |
+
return cls(filepath)
|
| 205 |
+
|
| 206 |
+
@classmethod
|
| 207 |
+
def path(cls, ref: "SymbolicReference") -> str:
|
| 208 |
+
"""
|
| 209 |
+
:return:
|
| 210 |
+
String to absolute path at which the reflog of the given ref instance would
|
| 211 |
+
be found. The path is not guaranteed to point to a valid file though.
|
| 212 |
+
|
| 213 |
+
:param ref:
|
| 214 |
+
:class:`~git.refs.symbolic.SymbolicReference` instance
|
| 215 |
+
"""
|
| 216 |
+
return osp.join(ref.repo.git_dir, "logs", to_native_path(ref.path))
|
| 217 |
+
|
| 218 |
+
@classmethod
|
| 219 |
+
def iter_entries(cls, stream: Union[str, "BytesIO", mmap]) -> Iterator[RefLogEntry]:
|
| 220 |
+
"""
|
| 221 |
+
:return:
|
| 222 |
+
Iterator yielding :class:`RefLogEntry` instances, one for each line read
|
| 223 |
+
from the given stream.
|
| 224 |
+
|
| 225 |
+
:param stream:
|
| 226 |
+
File-like object containing the revlog in its native format or string
|
| 227 |
+
instance pointing to a file to read.
|
| 228 |
+
"""
|
| 229 |
+
new_entry = RefLogEntry.from_line
|
| 230 |
+
if isinstance(stream, str):
|
| 231 |
+
# Default args return mmap since Python 3.
|
| 232 |
+
_stream = file_contents_ro_filepath(stream)
|
| 233 |
+
assert isinstance(_stream, mmap)
|
| 234 |
+
else:
|
| 235 |
+
_stream = stream
|
| 236 |
+
# END handle stream type
|
| 237 |
+
while True:
|
| 238 |
+
line = _stream.readline()
|
| 239 |
+
if not line:
|
| 240 |
+
return
|
| 241 |
+
yield new_entry(line.strip())
|
| 242 |
+
# END endless loop
|
| 243 |
+
|
| 244 |
+
@classmethod
|
| 245 |
+
def entry_at(cls, filepath: PathLike, index: int) -> "RefLogEntry":
|
| 246 |
+
"""
|
| 247 |
+
:return:
|
| 248 |
+
:class:`RefLogEntry` at the given index.
|
| 249 |
+
|
| 250 |
+
:param filepath:
|
| 251 |
+
Full path to the index file from which to read the entry.
|
| 252 |
+
|
| 253 |
+
:param index:
|
| 254 |
+
Python list compatible index, i.e. it may be negative to specify an entry
|
| 255 |
+
counted from the end of the list.
|
| 256 |
+
|
| 257 |
+
:raise IndexError:
|
| 258 |
+
If the entry didn't exist.
|
| 259 |
+
|
| 260 |
+
:note:
|
| 261 |
+
This method is faster as it only parses the entry at index, skipping all
|
| 262 |
+
other lines. Nonetheless, the whole file has to be read if the index is
|
| 263 |
+
negative.
|
| 264 |
+
"""
|
| 265 |
+
with open(filepath, "rb") as fp:
|
| 266 |
+
if index < 0:
|
| 267 |
+
return RefLogEntry.from_line(fp.readlines()[index].strip())
|
| 268 |
+
# Read until index is reached.
|
| 269 |
+
|
| 270 |
+
for i in range(index + 1):
|
| 271 |
+
line = fp.readline()
|
| 272 |
+
if not line:
|
| 273 |
+
raise IndexError(f"Index file ended at line {i + 1}, before given index was reached")
|
| 274 |
+
# END abort on eof
|
| 275 |
+
# END handle runup
|
| 276 |
+
|
| 277 |
+
return RefLogEntry.from_line(line.strip())
|
| 278 |
+
# END handle index
|
| 279 |
+
|
| 280 |
+
def to_file(self, filepath: PathLike) -> None:
|
| 281 |
+
"""Write the contents of the reflog instance to a file at the given filepath.
|
| 282 |
+
|
| 283 |
+
:param filepath:
|
| 284 |
+
Path to file. Parent directories are assumed to exist.
|
| 285 |
+
"""
|
| 286 |
+
lfd = LockedFD(filepath)
|
| 287 |
+
assure_directory_exists(filepath, is_file=True)
|
| 288 |
+
|
| 289 |
+
fp = lfd.open(write=True, stream=True)
|
| 290 |
+
try:
|
| 291 |
+
self._serialize(fp)
|
| 292 |
+
lfd.commit()
|
| 293 |
+
except BaseException:
|
| 294 |
+
lfd.rollback()
|
| 295 |
+
raise
|
| 296 |
+
# END handle change
|
| 297 |
+
|
| 298 |
+
@classmethod
|
| 299 |
+
def append_entry(
|
| 300 |
+
cls,
|
| 301 |
+
config_reader: Union[Actor, "GitConfigParser", "SectionConstraint", None],
|
| 302 |
+
filepath: PathLike,
|
| 303 |
+
oldbinsha: bytes,
|
| 304 |
+
newbinsha: bytes,
|
| 305 |
+
message: str,
|
| 306 |
+
write: bool = True,
|
| 307 |
+
) -> "RefLogEntry":
|
| 308 |
+
"""Append a new log entry to the revlog at filepath.
|
| 309 |
+
|
| 310 |
+
:param config_reader:
|
| 311 |
+
Configuration reader of the repository - used to obtain user information.
|
| 312 |
+
May also be an :class:`~git.util.Actor` instance identifying the committer
|
| 313 |
+
directly or ``None``.
|
| 314 |
+
|
| 315 |
+
:param filepath:
|
| 316 |
+
Full path to the log file.
|
| 317 |
+
|
| 318 |
+
:param oldbinsha:
|
| 319 |
+
Binary sha of the previous commit.
|
| 320 |
+
|
| 321 |
+
:param newbinsha:
|
| 322 |
+
Binary sha of the current commit.
|
| 323 |
+
|
| 324 |
+
:param message:
|
| 325 |
+
Message describing the change to the reference.
|
| 326 |
+
|
| 327 |
+
:param write:
|
| 328 |
+
If ``True``, the changes will be written right away.
|
| 329 |
+
Otherwise the change will not be written.
|
| 330 |
+
|
| 331 |
+
:return:
|
| 332 |
+
:class:`RefLogEntry` objects which was appended to the log.
|
| 333 |
+
|
| 334 |
+
:note:
|
| 335 |
+
As we are append-only, concurrent access is not a problem as we do not
|
| 336 |
+
interfere with readers.
|
| 337 |
+
"""
|
| 338 |
+
|
| 339 |
+
if len(oldbinsha) != 20 or len(newbinsha) != 20:
|
| 340 |
+
raise ValueError("Shas need to be given in binary format")
|
| 341 |
+
# END handle sha type
|
| 342 |
+
assure_directory_exists(filepath, is_file=True)
|
| 343 |
+
first_line = message.split("\n")[0]
|
| 344 |
+
if isinstance(config_reader, Actor):
|
| 345 |
+
committer = config_reader # mypy thinks this is Actor | Gitconfigparser, but why?
|
| 346 |
+
else:
|
| 347 |
+
committer = Actor.committer(config_reader)
|
| 348 |
+
entry = RefLogEntry(
|
| 349 |
+
(
|
| 350 |
+
bin_to_hex(oldbinsha).decode("ascii"),
|
| 351 |
+
bin_to_hex(newbinsha).decode("ascii"),
|
| 352 |
+
committer,
|
| 353 |
+
(int(_time.time()), _time.altzone),
|
| 354 |
+
first_line,
|
| 355 |
+
)
|
| 356 |
+
)
|
| 357 |
+
|
| 358 |
+
if write:
|
| 359 |
+
lf = LockFile(filepath)
|
| 360 |
+
lf._obtain_lock_or_raise()
|
| 361 |
+
fd = open(filepath, "ab")
|
| 362 |
+
try:
|
| 363 |
+
fd.write(entry.format().encode(defenc))
|
| 364 |
+
finally:
|
| 365 |
+
fd.close()
|
| 366 |
+
lf._release_lock()
|
| 367 |
+
# END handle write operation
|
| 368 |
+
return entry
|
| 369 |
+
|
| 370 |
+
def write(self) -> "RefLog":
|
| 371 |
+
"""Write this instance's data to the file we are originating from.
|
| 372 |
+
|
| 373 |
+
:return:
|
| 374 |
+
self
|
| 375 |
+
"""
|
| 376 |
+
if self._path is None:
|
| 377 |
+
raise ValueError("Instance was not initialized with a path, use to_file(...) instead")
|
| 378 |
+
# END assert path
|
| 379 |
+
self.to_file(self._path)
|
| 380 |
+
return self
|
| 381 |
+
|
| 382 |
+
# } END interface
|
| 383 |
+
|
| 384 |
+
# { Serializable Interface
|
| 385 |
+
|
| 386 |
+
def _serialize(self, stream: "BytesIO") -> "RefLog":
|
| 387 |
+
write = stream.write
|
| 388 |
+
|
| 389 |
+
# Write all entries.
|
| 390 |
+
for e in self:
|
| 391 |
+
write(e.format().encode(defenc))
|
| 392 |
+
# END for each entry
|
| 393 |
+
return self
|
| 394 |
+
|
| 395 |
+
def _deserialize(self, stream: "BytesIO") -> "RefLog":
|
| 396 |
+
self.extend(self.iter_entries(stream))
|
| 397 |
+
return self
|
| 398 |
+
|
| 399 |
+
# } END serializable interface
|
parrot/lib/python3.10/site-packages/git/refs/reference.py
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
__all__ = ["Reference"]
|
| 5 |
+
|
| 6 |
+
from git.util import IterableObj, LazyMixin
|
| 7 |
+
|
| 8 |
+
from .symbolic import SymbolicReference, T_References
|
| 9 |
+
|
| 10 |
+
# typing ------------------------------------------------------------------
|
| 11 |
+
|
| 12 |
+
from typing import Any, Callable, Iterator, TYPE_CHECKING, Type, Union
|
| 13 |
+
|
| 14 |
+
from git.types import AnyGitObject, PathLike, _T
|
| 15 |
+
|
| 16 |
+
if TYPE_CHECKING:
|
| 17 |
+
from git.repo import Repo
|
| 18 |
+
|
| 19 |
+
# ------------------------------------------------------------------------------
|
| 20 |
+
|
| 21 |
+
# { Utilities
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def require_remote_ref_path(func: Callable[..., _T]) -> Callable[..., _T]:
|
| 25 |
+
"""A decorator raising :exc:`ValueError` if we are not a valid remote, based on the
|
| 26 |
+
path."""
|
| 27 |
+
|
| 28 |
+
def wrapper(self: T_References, *args: Any) -> _T:
|
| 29 |
+
if not self.is_remote():
|
| 30 |
+
raise ValueError("ref path does not point to a remote reference: %s" % self.path)
|
| 31 |
+
return func(self, *args)
|
| 32 |
+
|
| 33 |
+
# END wrapper
|
| 34 |
+
wrapper.__name__ = func.__name__
|
| 35 |
+
return wrapper
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# } END utilities
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class Reference(SymbolicReference, LazyMixin, IterableObj):
|
| 42 |
+
"""A named reference to any object.
|
| 43 |
+
|
| 44 |
+
Subclasses may apply restrictions though, e.g., a :class:`~git.refs.head.Head` can
|
| 45 |
+
only point to commits.
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
__slots__ = ()
|
| 49 |
+
|
| 50 |
+
_points_to_commits_only = False
|
| 51 |
+
_resolve_ref_on_create = True
|
| 52 |
+
_common_path_default = "refs"
|
| 53 |
+
|
| 54 |
+
def __init__(self, repo: "Repo", path: PathLike, check_path: bool = True) -> None:
|
| 55 |
+
"""Initialize this instance.
|
| 56 |
+
|
| 57 |
+
:param repo:
|
| 58 |
+
Our parent repository.
|
| 59 |
+
|
| 60 |
+
:param path:
|
| 61 |
+
Path relative to the ``.git/`` directory pointing to the ref in question,
|
| 62 |
+
e.g. ``refs/heads/master``.
|
| 63 |
+
|
| 64 |
+
:param check_path:
|
| 65 |
+
If ``False``, you can provide any path.
|
| 66 |
+
Otherwise the path must start with the default path prefix of this type.
|
| 67 |
+
"""
|
| 68 |
+
if check_path and not str(path).startswith(self._common_path_default + "/"):
|
| 69 |
+
raise ValueError(f"Cannot instantiate {self.__class__.__name__!r} from path {path}")
|
| 70 |
+
self.path: str # SymbolicReference converts to string at the moment.
|
| 71 |
+
super().__init__(repo, path)
|
| 72 |
+
|
| 73 |
+
def __str__(self) -> str:
|
| 74 |
+
return self.name
|
| 75 |
+
|
| 76 |
+
# { Interface
|
| 77 |
+
|
| 78 |
+
# @ReservedAssignment
|
| 79 |
+
def set_object(
|
| 80 |
+
self,
|
| 81 |
+
object: Union[AnyGitObject, "SymbolicReference", str],
|
| 82 |
+
logmsg: Union[str, None] = None,
|
| 83 |
+
) -> "Reference":
|
| 84 |
+
"""Special version which checks if the head-log needs an update as well.
|
| 85 |
+
|
| 86 |
+
:return:
|
| 87 |
+
self
|
| 88 |
+
"""
|
| 89 |
+
oldbinsha = None
|
| 90 |
+
if logmsg is not None:
|
| 91 |
+
head = self.repo.head
|
| 92 |
+
if not head.is_detached and head.ref == self:
|
| 93 |
+
oldbinsha = self.commit.binsha
|
| 94 |
+
# END handle commit retrieval
|
| 95 |
+
# END handle message is set
|
| 96 |
+
|
| 97 |
+
super().set_object(object, logmsg)
|
| 98 |
+
|
| 99 |
+
if oldbinsha is not None:
|
| 100 |
+
# From refs/files-backend.c in git-source:
|
| 101 |
+
# /*
|
| 102 |
+
# * Special hack: If a branch is updated directly and HEAD
|
| 103 |
+
# * points to it (may happen on the remote side of a push
|
| 104 |
+
# * for example) then logically the HEAD reflog should be
|
| 105 |
+
# * updated too.
|
| 106 |
+
# * A generic solution implies reverse symref information,
|
| 107 |
+
# * but finding all symrefs pointing to the given branch
|
| 108 |
+
# * would be rather costly for this rare event (the direct
|
| 109 |
+
# * update of a branch) to be worth it. So let's cheat and
|
| 110 |
+
# * check with HEAD only which should cover 99% of all usage
|
| 111 |
+
# * scenarios (even 100% of the default ones).
|
| 112 |
+
# */
|
| 113 |
+
self.repo.head.log_append(oldbinsha, logmsg)
|
| 114 |
+
# END check if the head
|
| 115 |
+
|
| 116 |
+
return self
|
| 117 |
+
|
| 118 |
+
# NOTE: No need to overwrite properties, as the will only work without a the log.
|
| 119 |
+
|
| 120 |
+
@property
|
| 121 |
+
def name(self) -> str:
|
| 122 |
+
"""
|
| 123 |
+
:return:
|
| 124 |
+
(shortest) Name of this reference - it may contain path components
|
| 125 |
+
"""
|
| 126 |
+
# The first two path tokens can be removed as they are
|
| 127 |
+
# refs/heads or refs/tags or refs/remotes.
|
| 128 |
+
tokens = self.path.split("/")
|
| 129 |
+
if len(tokens) < 3:
|
| 130 |
+
return self.path # could be refs/HEAD
|
| 131 |
+
return "/".join(tokens[2:])
|
| 132 |
+
|
| 133 |
+
@classmethod
|
| 134 |
+
def iter_items(
|
| 135 |
+
cls: Type[T_References],
|
| 136 |
+
repo: "Repo",
|
| 137 |
+
common_path: Union[PathLike, None] = None,
|
| 138 |
+
*args: Any,
|
| 139 |
+
**kwargs: Any,
|
| 140 |
+
) -> Iterator[T_References]:
|
| 141 |
+
"""Equivalent to
|
| 142 |
+
:meth:`SymbolicReference.iter_items <git.refs.symbolic.SymbolicReference.iter_items>`,
|
| 143 |
+
but will return non-detached references as well."""
|
| 144 |
+
return cls._iter_items(repo, common_path)
|
| 145 |
+
|
| 146 |
+
# } END interface
|
| 147 |
+
|
| 148 |
+
# { Remote Interface
|
| 149 |
+
|
| 150 |
+
@property
|
| 151 |
+
@require_remote_ref_path
|
| 152 |
+
def remote_name(self) -> str:
|
| 153 |
+
"""
|
| 154 |
+
:return:
|
| 155 |
+
Name of the remote we are a reference of, such as ``origin`` for a reference
|
| 156 |
+
named ``origin/master``.
|
| 157 |
+
"""
|
| 158 |
+
tokens = self.path.split("/")
|
| 159 |
+
# /refs/remotes/<remote name>/<branch_name>
|
| 160 |
+
return tokens[2]
|
| 161 |
+
|
| 162 |
+
@property
|
| 163 |
+
@require_remote_ref_path
|
| 164 |
+
def remote_head(self) -> str:
|
| 165 |
+
"""
|
| 166 |
+
:return:
|
| 167 |
+
Name of the remote head itself, e.g. ``master``.
|
| 168 |
+
|
| 169 |
+
:note:
|
| 170 |
+
The returned name is usually not qualified enough to uniquely identify a
|
| 171 |
+
branch.
|
| 172 |
+
"""
|
| 173 |
+
tokens = self.path.split("/")
|
| 174 |
+
return "/".join(tokens[3:])
|
| 175 |
+
|
| 176 |
+
# } END remote interface
|
parrot/lib/python3.10/site-packages/git/refs/remote.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
"""Module implementing a remote object allowing easy access to git remotes."""
|
| 5 |
+
|
| 6 |
+
__all__ = ["RemoteReference"]
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
|
| 10 |
+
from git.util import join_path
|
| 11 |
+
|
| 12 |
+
from .head import Head
|
| 13 |
+
|
| 14 |
+
# typing ------------------------------------------------------------------
|
| 15 |
+
|
| 16 |
+
from typing import Any, Iterator, NoReturn, TYPE_CHECKING, Union
|
| 17 |
+
|
| 18 |
+
from git.types import PathLike
|
| 19 |
+
|
| 20 |
+
if TYPE_CHECKING:
|
| 21 |
+
from git.remote import Remote
|
| 22 |
+
from git.repo import Repo
|
| 23 |
+
|
| 24 |
+
# ------------------------------------------------------------------------------
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class RemoteReference(Head):
|
| 28 |
+
"""A reference pointing to a remote head."""
|
| 29 |
+
|
| 30 |
+
_common_path_default = Head._remote_common_path_default
|
| 31 |
+
|
| 32 |
+
@classmethod
|
| 33 |
+
def iter_items(
|
| 34 |
+
cls,
|
| 35 |
+
repo: "Repo",
|
| 36 |
+
common_path: Union[PathLike, None] = None,
|
| 37 |
+
remote: Union["Remote", None] = None,
|
| 38 |
+
*args: Any,
|
| 39 |
+
**kwargs: Any,
|
| 40 |
+
) -> Iterator["RemoteReference"]:
|
| 41 |
+
"""Iterate remote references, and if given, constrain them to the given remote."""
|
| 42 |
+
common_path = common_path or cls._common_path_default
|
| 43 |
+
if remote is not None:
|
| 44 |
+
common_path = join_path(common_path, str(remote))
|
| 45 |
+
# END handle remote constraint
|
| 46 |
+
# super is Reference
|
| 47 |
+
return super().iter_items(repo, common_path)
|
| 48 |
+
|
| 49 |
+
# The Head implementation of delete also accepts strs, but this implementation does
|
| 50 |
+
# not. mypy doesn't have a way of representing tightening the types of arguments in
|
| 51 |
+
# subclasses and recommends Any or "type: ignore".
|
| 52 |
+
# (See: https://github.com/python/typing/issues/241)
|
| 53 |
+
@classmethod
|
| 54 |
+
def delete(cls, repo: "Repo", *refs: "RemoteReference", **kwargs: Any) -> None: # type: ignore[override]
|
| 55 |
+
"""Delete the given remote references.
|
| 56 |
+
|
| 57 |
+
:note:
|
| 58 |
+
`kwargs` are given for comparability with the base class method as we
|
| 59 |
+
should not narrow the signature.
|
| 60 |
+
"""
|
| 61 |
+
repo.git.branch("-d", "-r", *refs)
|
| 62 |
+
# The official deletion method will ignore remote symbolic refs - these are
|
| 63 |
+
# generally ignored in the refs/ folder. We don't though and delete remainders
|
| 64 |
+
# manually.
|
| 65 |
+
for ref in refs:
|
| 66 |
+
try:
|
| 67 |
+
os.remove(os.path.join(repo.common_dir, ref.path))
|
| 68 |
+
except OSError:
|
| 69 |
+
pass
|
| 70 |
+
try:
|
| 71 |
+
os.remove(os.path.join(repo.git_dir, ref.path))
|
| 72 |
+
except OSError:
|
| 73 |
+
pass
|
| 74 |
+
# END for each ref
|
| 75 |
+
|
| 76 |
+
@classmethod
|
| 77 |
+
def create(cls, *args: Any, **kwargs: Any) -> NoReturn:
|
| 78 |
+
"""Raise :exc:`TypeError`. Defined so the ``create`` method is disabled."""
|
| 79 |
+
raise TypeError("Cannot explicitly create remote references")
|
parrot/lib/python3.10/site-packages/git/refs/symbolic.py
ADDED
|
@@ -0,0 +1,926 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
__all__ = ["SymbolicReference"]
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
|
| 8 |
+
from gitdb.exc import BadName, BadObject
|
| 9 |
+
|
| 10 |
+
from git.compat import defenc
|
| 11 |
+
from git.objects.base import Object
|
| 12 |
+
from git.objects.commit import Commit
|
| 13 |
+
from git.refs.log import RefLog
|
| 14 |
+
from git.util import (
|
| 15 |
+
LockedFD,
|
| 16 |
+
assure_directory_exists,
|
| 17 |
+
hex_to_bin,
|
| 18 |
+
join_path,
|
| 19 |
+
join_path_native,
|
| 20 |
+
to_native_path_linux,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
# typing ------------------------------------------------------------------
|
| 24 |
+
|
| 25 |
+
from typing import (
|
| 26 |
+
Any,
|
| 27 |
+
Iterator,
|
| 28 |
+
List,
|
| 29 |
+
TYPE_CHECKING,
|
| 30 |
+
Tuple,
|
| 31 |
+
Type,
|
| 32 |
+
TypeVar,
|
| 33 |
+
Union,
|
| 34 |
+
cast,
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
from git.types import AnyGitObject, PathLike
|
| 38 |
+
|
| 39 |
+
if TYPE_CHECKING:
|
| 40 |
+
from git.config import GitConfigParser
|
| 41 |
+
from git.objects.commit import Actor
|
| 42 |
+
from git.refs import Head, TagReference, RemoteReference, Reference
|
| 43 |
+
from git.refs.log import RefLogEntry
|
| 44 |
+
from git.repo import Repo
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
T_References = TypeVar("T_References", bound="SymbolicReference")
|
| 48 |
+
|
| 49 |
+
# ------------------------------------------------------------------------------
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def _git_dir(repo: "Repo", path: Union[PathLike, None]) -> PathLike:
|
| 53 |
+
"""Find the git dir that is appropriate for the path."""
|
| 54 |
+
name = f"{path}"
|
| 55 |
+
if name in ["HEAD", "ORIG_HEAD", "FETCH_HEAD", "index", "logs"]:
|
| 56 |
+
return repo.git_dir
|
| 57 |
+
return repo.common_dir
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class SymbolicReference:
|
| 61 |
+
"""Special case of a reference that is symbolic.
|
| 62 |
+
|
| 63 |
+
This does not point to a specific commit, but to another
|
| 64 |
+
:class:`~git.refs.head.Head`, which itself specifies a commit.
|
| 65 |
+
|
| 66 |
+
A typical example for a symbolic reference is :class:`~git.refs.head.HEAD`.
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
__slots__ = ("repo", "path")
|
| 70 |
+
|
| 71 |
+
_resolve_ref_on_create = False
|
| 72 |
+
_points_to_commits_only = True
|
| 73 |
+
_common_path_default = ""
|
| 74 |
+
_remote_common_path_default = "refs/remotes"
|
| 75 |
+
_id_attribute_ = "name"
|
| 76 |
+
|
| 77 |
+
def __init__(self, repo: "Repo", path: PathLike, check_path: bool = False) -> None:
|
| 78 |
+
self.repo = repo
|
| 79 |
+
self.path = path
|
| 80 |
+
|
| 81 |
+
def __str__(self) -> str:
|
| 82 |
+
return str(self.path)
|
| 83 |
+
|
| 84 |
+
def __repr__(self) -> str:
|
| 85 |
+
return '<git.%s "%s">' % (self.__class__.__name__, self.path)
|
| 86 |
+
|
| 87 |
+
def __eq__(self, other: object) -> bool:
|
| 88 |
+
if hasattr(other, "path"):
|
| 89 |
+
other = cast(SymbolicReference, other)
|
| 90 |
+
return self.path == other.path
|
| 91 |
+
return False
|
| 92 |
+
|
| 93 |
+
def __ne__(self, other: object) -> bool:
|
| 94 |
+
return not (self == other)
|
| 95 |
+
|
| 96 |
+
def __hash__(self) -> int:
|
| 97 |
+
return hash(self.path)
|
| 98 |
+
|
| 99 |
+
@property
|
| 100 |
+
def name(self) -> str:
|
| 101 |
+
"""
|
| 102 |
+
:return:
|
| 103 |
+
In case of symbolic references, the shortest assumable name is the path
|
| 104 |
+
itself.
|
| 105 |
+
"""
|
| 106 |
+
return str(self.path)
|
| 107 |
+
|
| 108 |
+
@property
|
| 109 |
+
def abspath(self) -> PathLike:
|
| 110 |
+
return join_path_native(_git_dir(self.repo, self.path), self.path)
|
| 111 |
+
|
| 112 |
+
@classmethod
|
| 113 |
+
def _get_packed_refs_path(cls, repo: "Repo") -> str:
|
| 114 |
+
return os.path.join(repo.common_dir, "packed-refs")
|
| 115 |
+
|
| 116 |
+
@classmethod
|
| 117 |
+
def _iter_packed_refs(cls, repo: "Repo") -> Iterator[Tuple[str, str]]:
|
| 118 |
+
"""Return an iterator yielding pairs of sha1/path pairs (as strings) for the
|
| 119 |
+
corresponding refs.
|
| 120 |
+
|
| 121 |
+
:note:
|
| 122 |
+
The packed refs file will be kept open as long as we iterate.
|
| 123 |
+
"""
|
| 124 |
+
try:
|
| 125 |
+
with open(cls._get_packed_refs_path(repo), "rt", encoding="UTF-8") as fp:
|
| 126 |
+
for line in fp:
|
| 127 |
+
line = line.strip()
|
| 128 |
+
if not line:
|
| 129 |
+
continue
|
| 130 |
+
if line.startswith("#"):
|
| 131 |
+
# "# pack-refs with: peeled fully-peeled sorted"
|
| 132 |
+
# the git source code shows "peeled",
|
| 133 |
+
# "fully-peeled" and "sorted" as the keywords
|
| 134 |
+
# that can go on this line, as per comments in git file
|
| 135 |
+
# refs/packed-backend.c
|
| 136 |
+
# I looked at master on 2017-10-11,
|
| 137 |
+
# commit 111ef79afe, after tag v2.15.0-rc1
|
| 138 |
+
# from repo https://github.com/git/git.git
|
| 139 |
+
if line.startswith("# pack-refs with:") and "peeled" not in line:
|
| 140 |
+
raise TypeError("PackingType of packed-Refs not understood: %r" % line)
|
| 141 |
+
# END abort if we do not understand the packing scheme
|
| 142 |
+
continue
|
| 143 |
+
# END parse comment
|
| 144 |
+
|
| 145 |
+
# Skip dereferenced tag object entries - previous line was actual
|
| 146 |
+
# tag reference for it.
|
| 147 |
+
if line[0] == "^":
|
| 148 |
+
continue
|
| 149 |
+
|
| 150 |
+
yield cast(Tuple[str, str], tuple(line.split(" ", 1)))
|
| 151 |
+
# END for each line
|
| 152 |
+
except OSError:
|
| 153 |
+
return None
|
| 154 |
+
# END no packed-refs file handling
|
| 155 |
+
|
| 156 |
+
@classmethod
|
| 157 |
+
def dereference_recursive(cls, repo: "Repo", ref_path: Union[PathLike, None]) -> str:
|
| 158 |
+
"""
|
| 159 |
+
:return:
|
| 160 |
+
hexsha stored in the reference at the given `ref_path`, recursively
|
| 161 |
+
dereferencing all intermediate references as required
|
| 162 |
+
|
| 163 |
+
:param repo:
|
| 164 |
+
The repository containing the reference at `ref_path`.
|
| 165 |
+
"""
|
| 166 |
+
|
| 167 |
+
while True:
|
| 168 |
+
hexsha, ref_path = cls._get_ref_info(repo, ref_path)
|
| 169 |
+
if hexsha is not None:
|
| 170 |
+
return hexsha
|
| 171 |
+
# END recursive dereferencing
|
| 172 |
+
|
| 173 |
+
@staticmethod
|
| 174 |
+
def _check_ref_name_valid(ref_path: PathLike) -> None:
|
| 175 |
+
"""Check a ref name for validity.
|
| 176 |
+
|
| 177 |
+
This is based on the rules described in :manpage:`git-check-ref-format(1)`.
|
| 178 |
+
"""
|
| 179 |
+
previous: Union[str, None] = None
|
| 180 |
+
one_before_previous: Union[str, None] = None
|
| 181 |
+
for c in str(ref_path):
|
| 182 |
+
if c in " ~^:?*[\\":
|
| 183 |
+
raise ValueError(
|
| 184 |
+
f"Invalid reference '{ref_path}': references cannot contain spaces, tildes (~), carets (^),"
|
| 185 |
+
f" colons (:), question marks (?), asterisks (*), open brackets ([) or backslashes (\\)"
|
| 186 |
+
)
|
| 187 |
+
elif c == ".":
|
| 188 |
+
if previous is None or previous == "/":
|
| 189 |
+
raise ValueError(
|
| 190 |
+
f"Invalid reference '{ref_path}': references cannot start with a period (.) or contain '/.'"
|
| 191 |
+
)
|
| 192 |
+
elif previous == ".":
|
| 193 |
+
raise ValueError(f"Invalid reference '{ref_path}': references cannot contain '..'")
|
| 194 |
+
elif c == "/":
|
| 195 |
+
if previous == "/":
|
| 196 |
+
raise ValueError(f"Invalid reference '{ref_path}': references cannot contain '//'")
|
| 197 |
+
elif previous is None:
|
| 198 |
+
raise ValueError(
|
| 199 |
+
f"Invalid reference '{ref_path}': references cannot start with forward slashes '/'"
|
| 200 |
+
)
|
| 201 |
+
elif c == "{" and previous == "@":
|
| 202 |
+
raise ValueError(f"Invalid reference '{ref_path}': references cannot contain '@{{'")
|
| 203 |
+
elif ord(c) < 32 or ord(c) == 127:
|
| 204 |
+
raise ValueError(f"Invalid reference '{ref_path}': references cannot contain ASCII control characters")
|
| 205 |
+
|
| 206 |
+
one_before_previous = previous
|
| 207 |
+
previous = c
|
| 208 |
+
|
| 209 |
+
if previous == ".":
|
| 210 |
+
raise ValueError(f"Invalid reference '{ref_path}': references cannot end with a period (.)")
|
| 211 |
+
elif previous == "/":
|
| 212 |
+
raise ValueError(f"Invalid reference '{ref_path}': references cannot end with a forward slash (/)")
|
| 213 |
+
elif previous == "@" and one_before_previous is None:
|
| 214 |
+
raise ValueError(f"Invalid reference '{ref_path}': references cannot be '@'")
|
| 215 |
+
elif any(component.endswith(".lock") for component in str(ref_path).split("/")):
|
| 216 |
+
raise ValueError(
|
| 217 |
+
f"Invalid reference '{ref_path}': references cannot have slash-separated components that end with"
|
| 218 |
+
" '.lock'"
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
@classmethod
|
| 222 |
+
def _get_ref_info_helper(
|
| 223 |
+
cls, repo: "Repo", ref_path: Union[PathLike, None]
|
| 224 |
+
) -> Union[Tuple[str, None], Tuple[None, str]]:
|
| 225 |
+
"""
|
| 226 |
+
:return:
|
| 227 |
+
*(str(sha), str(target_ref_path))*, where:
|
| 228 |
+
|
| 229 |
+
* *sha* is of the file at rela_path points to if available, or ``None``.
|
| 230 |
+
* *target_ref_path* is the reference we point to, or ``None``.
|
| 231 |
+
"""
|
| 232 |
+
if ref_path:
|
| 233 |
+
cls._check_ref_name_valid(ref_path)
|
| 234 |
+
|
| 235 |
+
tokens: Union[None, List[str], Tuple[str, str]] = None
|
| 236 |
+
repodir = _git_dir(repo, ref_path)
|
| 237 |
+
try:
|
| 238 |
+
with open(os.path.join(repodir, str(ref_path)), "rt", encoding="UTF-8") as fp:
|
| 239 |
+
value = fp.read().rstrip()
|
| 240 |
+
# Don't only split on spaces, but on whitespace, which allows to parse lines like:
|
| 241 |
+
# 60b64ef992065e2600bfef6187a97f92398a9144 branch 'master' of git-server:/path/to/repo
|
| 242 |
+
tokens = value.split()
|
| 243 |
+
assert len(tokens) != 0
|
| 244 |
+
except OSError:
|
| 245 |
+
# Probably we are just packed. Find our entry in the packed refs file.
|
| 246 |
+
# NOTE: We are not a symbolic ref if we are in a packed file, as these
|
| 247 |
+
# are excluded explicitly.
|
| 248 |
+
for sha, path in cls._iter_packed_refs(repo):
|
| 249 |
+
if path != ref_path:
|
| 250 |
+
continue
|
| 251 |
+
# sha will be used.
|
| 252 |
+
tokens = sha, path
|
| 253 |
+
break
|
| 254 |
+
# END for each packed ref
|
| 255 |
+
# END handle packed refs
|
| 256 |
+
if tokens is None:
|
| 257 |
+
raise ValueError("Reference at %r does not exist" % ref_path)
|
| 258 |
+
|
| 259 |
+
# Is it a reference?
|
| 260 |
+
if tokens[0] == "ref:":
|
| 261 |
+
return (None, tokens[1])
|
| 262 |
+
|
| 263 |
+
# It's a commit.
|
| 264 |
+
if repo.re_hexsha_only.match(tokens[0]):
|
| 265 |
+
return (tokens[0], None)
|
| 266 |
+
|
| 267 |
+
raise ValueError("Failed to parse reference information from %r" % ref_path)
|
| 268 |
+
|
| 269 |
+
@classmethod
|
| 270 |
+
def _get_ref_info(cls, repo: "Repo", ref_path: Union[PathLike, None]) -> Union[Tuple[str, None], Tuple[None, str]]:
|
| 271 |
+
"""
|
| 272 |
+
:return:
|
| 273 |
+
*(str(sha), str(target_ref_path))*, where:
|
| 274 |
+
|
| 275 |
+
* *sha* is of the file at rela_path points to if available, or ``None``.
|
| 276 |
+
* *target_ref_path* is the reference we point to, or ``None``.
|
| 277 |
+
"""
|
| 278 |
+
return cls._get_ref_info_helper(repo, ref_path)
|
| 279 |
+
|
| 280 |
+
def _get_object(self) -> AnyGitObject:
|
| 281 |
+
"""
|
| 282 |
+
:return:
|
| 283 |
+
The object our ref currently refers to. Refs can be cached, they will always
|
| 284 |
+
point to the actual object as it gets re-created on each query.
|
| 285 |
+
"""
|
| 286 |
+
# We have to be dynamic here as we may be a tag which can point to anything.
|
| 287 |
+
# Our path will be resolved to the hexsha which will be used accordingly.
|
| 288 |
+
return Object.new_from_sha(self.repo, hex_to_bin(self.dereference_recursive(self.repo, self.path)))
|
| 289 |
+
|
| 290 |
+
def _get_commit(self) -> "Commit":
|
| 291 |
+
"""
|
| 292 |
+
:return:
|
| 293 |
+
:class:`~git.objects.commit.Commit` object we point to. This works for
|
| 294 |
+
detached and non-detached :class:`SymbolicReference` instances. The symbolic
|
| 295 |
+
reference will be dereferenced recursively.
|
| 296 |
+
"""
|
| 297 |
+
obj = self._get_object()
|
| 298 |
+
if obj.type == "tag":
|
| 299 |
+
obj = obj.object
|
| 300 |
+
# END dereference tag
|
| 301 |
+
|
| 302 |
+
if obj.type != Commit.type:
|
| 303 |
+
raise TypeError("Symbolic Reference pointed to object %r, commit was required" % obj)
|
| 304 |
+
# END handle type
|
| 305 |
+
return obj
|
| 306 |
+
|
| 307 |
+
def set_commit(
|
| 308 |
+
self,
|
| 309 |
+
commit: Union[Commit, "SymbolicReference", str],
|
| 310 |
+
logmsg: Union[str, None] = None,
|
| 311 |
+
) -> "SymbolicReference":
|
| 312 |
+
"""Like :meth:`set_object`, but restricts the type of object to be a
|
| 313 |
+
:class:`~git.objects.commit.Commit`.
|
| 314 |
+
|
| 315 |
+
:raise ValueError:
|
| 316 |
+
If `commit` is not a :class:`~git.objects.commit.Commit` object, nor does it
|
| 317 |
+
point to a commit.
|
| 318 |
+
|
| 319 |
+
:return:
|
| 320 |
+
self
|
| 321 |
+
"""
|
| 322 |
+
# Check the type - assume the best if it is a base-string.
|
| 323 |
+
invalid_type = False
|
| 324 |
+
if isinstance(commit, Object):
|
| 325 |
+
invalid_type = commit.type != Commit.type
|
| 326 |
+
elif isinstance(commit, SymbolicReference):
|
| 327 |
+
invalid_type = commit.object.type != Commit.type
|
| 328 |
+
else:
|
| 329 |
+
try:
|
| 330 |
+
invalid_type = self.repo.rev_parse(commit).type != Commit.type
|
| 331 |
+
except (BadObject, BadName) as e:
|
| 332 |
+
raise ValueError("Invalid object: %s" % commit) from e
|
| 333 |
+
# END handle exception
|
| 334 |
+
# END verify type
|
| 335 |
+
|
| 336 |
+
if invalid_type:
|
| 337 |
+
raise ValueError("Need commit, got %r" % commit)
|
| 338 |
+
# END handle raise
|
| 339 |
+
|
| 340 |
+
# We leave strings to the rev-parse method below.
|
| 341 |
+
self.set_object(commit, logmsg)
|
| 342 |
+
|
| 343 |
+
return self
|
| 344 |
+
|
| 345 |
+
def set_object(
|
| 346 |
+
self,
|
| 347 |
+
object: Union[AnyGitObject, "SymbolicReference", str],
|
| 348 |
+
logmsg: Union[str, None] = None,
|
| 349 |
+
) -> "SymbolicReference":
|
| 350 |
+
"""Set the object we point to, possibly dereference our symbolic reference
|
| 351 |
+
first. If the reference does not exist, it will be created.
|
| 352 |
+
|
| 353 |
+
:param object:
|
| 354 |
+
A refspec, a :class:`SymbolicReference` or an
|
| 355 |
+
:class:`~git.objects.base.Object` instance.
|
| 356 |
+
|
| 357 |
+
* :class:`SymbolicReference` instances will be dereferenced beforehand to
|
| 358 |
+
obtain the git object they point to.
|
| 359 |
+
* :class:`~git.objects.base.Object` instances must represent git objects
|
| 360 |
+
(:class:`~git.types.AnyGitObject`).
|
| 361 |
+
|
| 362 |
+
:param logmsg:
|
| 363 |
+
If not ``None``, the message will be used in the reflog entry to be written.
|
| 364 |
+
Otherwise the reflog is not altered.
|
| 365 |
+
|
| 366 |
+
:note:
|
| 367 |
+
Plain :class:`SymbolicReference` instances may not actually point to objects
|
| 368 |
+
by convention.
|
| 369 |
+
|
| 370 |
+
:return:
|
| 371 |
+
self
|
| 372 |
+
"""
|
| 373 |
+
if isinstance(object, SymbolicReference):
|
| 374 |
+
object = object.object # @ReservedAssignment
|
| 375 |
+
# END resolve references
|
| 376 |
+
|
| 377 |
+
is_detached = True
|
| 378 |
+
try:
|
| 379 |
+
is_detached = self.is_detached
|
| 380 |
+
except ValueError:
|
| 381 |
+
pass
|
| 382 |
+
# END handle non-existing ones
|
| 383 |
+
|
| 384 |
+
if is_detached:
|
| 385 |
+
return self.set_reference(object, logmsg)
|
| 386 |
+
|
| 387 |
+
# set the commit on our reference
|
| 388 |
+
return self._get_reference().set_object(object, logmsg)
|
| 389 |
+
|
| 390 |
+
commit = property(
|
| 391 |
+
_get_commit,
|
| 392 |
+
set_commit, # type: ignore[arg-type]
|
| 393 |
+
doc="Query or set commits directly",
|
| 394 |
+
)
|
| 395 |
+
|
| 396 |
+
object = property(
|
| 397 |
+
_get_object,
|
| 398 |
+
set_object, # type: ignore[arg-type]
|
| 399 |
+
doc="Return the object our ref currently refers to",
|
| 400 |
+
)
|
| 401 |
+
|
| 402 |
+
def _get_reference(self) -> "SymbolicReference":
|
| 403 |
+
"""
|
| 404 |
+
:return:
|
| 405 |
+
:class:`~git.refs.reference.Reference` object we point to
|
| 406 |
+
|
| 407 |
+
:raise TypeError:
|
| 408 |
+
If this symbolic reference is detached, hence it doesn't point to a
|
| 409 |
+
reference, but to a commit.
|
| 410 |
+
"""
|
| 411 |
+
sha, target_ref_path = self._get_ref_info(self.repo, self.path)
|
| 412 |
+
if target_ref_path is None:
|
| 413 |
+
raise TypeError("%s is a detached symbolic reference as it points to %r" % (self, sha))
|
| 414 |
+
return self.from_path(self.repo, target_ref_path)
|
| 415 |
+
|
| 416 |
+
def set_reference(
|
| 417 |
+
self,
|
| 418 |
+
ref: Union[AnyGitObject, "SymbolicReference", str],
|
| 419 |
+
logmsg: Union[str, None] = None,
|
| 420 |
+
) -> "SymbolicReference":
|
| 421 |
+
"""Set ourselves to the given `ref`.
|
| 422 |
+
|
| 423 |
+
It will stay a symbol if the `ref` is a :class:`~git.refs.reference.Reference`.
|
| 424 |
+
|
| 425 |
+
Otherwise a git object, specified as a :class:`~git.objects.base.Object`
|
| 426 |
+
instance or refspec, is assumed. If it is valid, this reference will be set to
|
| 427 |
+
it, which effectively detaches the reference if it was a purely symbolic one.
|
| 428 |
+
|
| 429 |
+
:param ref:
|
| 430 |
+
A :class:`SymbolicReference` instance, an :class:`~git.objects.base.Object`
|
| 431 |
+
instance (specifically an :class:`~git.types.AnyGitObject`), or a refspec
|
| 432 |
+
string. Only if the ref is a :class:`SymbolicReference` instance, we will
|
| 433 |
+
point to it. Everything else is dereferenced to obtain the actual object.
|
| 434 |
+
|
| 435 |
+
:param logmsg:
|
| 436 |
+
If set to a string, the message will be used in the reflog.
|
| 437 |
+
Otherwise, a reflog entry is not written for the changed reference.
|
| 438 |
+
The previous commit of the entry will be the commit we point to now.
|
| 439 |
+
|
| 440 |
+
See also: :meth:`log_append`
|
| 441 |
+
|
| 442 |
+
:return:
|
| 443 |
+
self
|
| 444 |
+
|
| 445 |
+
:note:
|
| 446 |
+
This symbolic reference will not be dereferenced. For that, see
|
| 447 |
+
:meth:`set_object`.
|
| 448 |
+
"""
|
| 449 |
+
write_value = None
|
| 450 |
+
obj = None
|
| 451 |
+
if isinstance(ref, SymbolicReference):
|
| 452 |
+
write_value = "ref: %s" % ref.path
|
| 453 |
+
elif isinstance(ref, Object):
|
| 454 |
+
obj = ref
|
| 455 |
+
write_value = ref.hexsha
|
| 456 |
+
elif isinstance(ref, str):
|
| 457 |
+
try:
|
| 458 |
+
obj = self.repo.rev_parse(ref + "^{}") # Optionally dereference tags.
|
| 459 |
+
write_value = obj.hexsha
|
| 460 |
+
except (BadObject, BadName) as e:
|
| 461 |
+
raise ValueError("Could not extract object from %s" % ref) from e
|
| 462 |
+
# END end try string
|
| 463 |
+
else:
|
| 464 |
+
raise ValueError("Unrecognized Value: %r" % ref)
|
| 465 |
+
# END try commit attribute
|
| 466 |
+
|
| 467 |
+
# typecheck
|
| 468 |
+
if obj is not None and self._points_to_commits_only and obj.type != Commit.type:
|
| 469 |
+
raise TypeError("Require commit, got %r" % obj)
|
| 470 |
+
# END verify type
|
| 471 |
+
|
| 472 |
+
oldbinsha: bytes = b""
|
| 473 |
+
if logmsg is not None:
|
| 474 |
+
try:
|
| 475 |
+
oldbinsha = self.commit.binsha
|
| 476 |
+
except ValueError:
|
| 477 |
+
oldbinsha = Commit.NULL_BIN_SHA
|
| 478 |
+
# END handle non-existing
|
| 479 |
+
# END retrieve old hexsha
|
| 480 |
+
|
| 481 |
+
fpath = self.abspath
|
| 482 |
+
assure_directory_exists(fpath, is_file=True)
|
| 483 |
+
|
| 484 |
+
lfd = LockedFD(fpath)
|
| 485 |
+
fd = lfd.open(write=True, stream=True)
|
| 486 |
+
try:
|
| 487 |
+
fd.write(write_value.encode("utf-8") + b"\n")
|
| 488 |
+
lfd.commit()
|
| 489 |
+
except BaseException:
|
| 490 |
+
lfd.rollback()
|
| 491 |
+
raise
|
| 492 |
+
# Adjust the reflog
|
| 493 |
+
if logmsg is not None:
|
| 494 |
+
self.log_append(oldbinsha, logmsg)
|
| 495 |
+
|
| 496 |
+
return self
|
| 497 |
+
|
| 498 |
+
# Aliased reference
|
| 499 |
+
reference: Union["Head", "TagReference", "RemoteReference", "Reference"]
|
| 500 |
+
reference = property( # type: ignore[assignment]
|
| 501 |
+
_get_reference,
|
| 502 |
+
set_reference, # type: ignore[arg-type]
|
| 503 |
+
doc="Returns the Reference we point to",
|
| 504 |
+
)
|
| 505 |
+
ref = reference
|
| 506 |
+
|
| 507 |
+
def is_valid(self) -> bool:
|
| 508 |
+
"""
|
| 509 |
+
:return:
|
| 510 |
+
``True`` if the reference is valid, hence it can be read and points to a
|
| 511 |
+
valid object or reference.
|
| 512 |
+
"""
|
| 513 |
+
try:
|
| 514 |
+
self.object # noqa: B018
|
| 515 |
+
except (OSError, ValueError):
|
| 516 |
+
return False
|
| 517 |
+
else:
|
| 518 |
+
return True
|
| 519 |
+
|
| 520 |
+
@property
|
| 521 |
+
def is_detached(self) -> bool:
|
| 522 |
+
"""
|
| 523 |
+
:return:
|
| 524 |
+
``True`` if we are a detached reference, hence we point to a specific commit
|
| 525 |
+
instead to another reference.
|
| 526 |
+
"""
|
| 527 |
+
try:
|
| 528 |
+
self.ref # noqa: B018
|
| 529 |
+
return False
|
| 530 |
+
except TypeError:
|
| 531 |
+
return True
|
| 532 |
+
|
| 533 |
+
def log(self) -> "RefLog":
|
| 534 |
+
"""
|
| 535 |
+
:return:
|
| 536 |
+
:class:`~git.refs.log.RefLog` for this reference.
|
| 537 |
+
Its last entry reflects the latest change applied to this reference.
|
| 538 |
+
|
| 539 |
+
:note:
|
| 540 |
+
As the log is parsed every time, its recommended to cache it for use instead
|
| 541 |
+
of calling this method repeatedly. It should be considered read-only.
|
| 542 |
+
"""
|
| 543 |
+
return RefLog.from_file(RefLog.path(self))
|
| 544 |
+
|
| 545 |
+
def log_append(
|
| 546 |
+
self,
|
| 547 |
+
oldbinsha: bytes,
|
| 548 |
+
message: Union[str, None],
|
| 549 |
+
newbinsha: Union[bytes, None] = None,
|
| 550 |
+
) -> "RefLogEntry":
|
| 551 |
+
"""Append a logentry to the logfile of this ref.
|
| 552 |
+
|
| 553 |
+
:param oldbinsha:
|
| 554 |
+
Binary sha this ref used to point to.
|
| 555 |
+
|
| 556 |
+
:param message:
|
| 557 |
+
A message describing the change.
|
| 558 |
+
|
| 559 |
+
:param newbinsha:
|
| 560 |
+
The sha the ref points to now. If None, our current commit sha will be used.
|
| 561 |
+
|
| 562 |
+
:return:
|
| 563 |
+
The added :class:`~git.refs.log.RefLogEntry` instance.
|
| 564 |
+
"""
|
| 565 |
+
# NOTE: We use the committer of the currently active commit - this should be
|
| 566 |
+
# correct to allow overriding the committer on a per-commit level.
|
| 567 |
+
# See https://github.com/gitpython-developers/GitPython/pull/146.
|
| 568 |
+
try:
|
| 569 |
+
committer_or_reader: Union["Actor", "GitConfigParser"] = self.commit.committer
|
| 570 |
+
except ValueError:
|
| 571 |
+
committer_or_reader = self.repo.config_reader()
|
| 572 |
+
# END handle newly cloned repositories
|
| 573 |
+
if newbinsha is None:
|
| 574 |
+
newbinsha = self.commit.binsha
|
| 575 |
+
|
| 576 |
+
if message is None:
|
| 577 |
+
message = ""
|
| 578 |
+
|
| 579 |
+
return RefLog.append_entry(committer_or_reader, RefLog.path(self), oldbinsha, newbinsha, message)
|
| 580 |
+
|
| 581 |
+
def log_entry(self, index: int) -> "RefLogEntry":
|
| 582 |
+
"""
|
| 583 |
+
:return:
|
| 584 |
+
:class:`~git.refs.log.RefLogEntry` at the given index
|
| 585 |
+
|
| 586 |
+
:param index:
|
| 587 |
+
Python list compatible positive or negative index.
|
| 588 |
+
|
| 589 |
+
:note:
|
| 590 |
+
This method must read part of the reflog during execution, hence it should
|
| 591 |
+
be used sparingly, or only if you need just one index. In that case, it will
|
| 592 |
+
be faster than the :meth:`log` method.
|
| 593 |
+
"""
|
| 594 |
+
return RefLog.entry_at(RefLog.path(self), index)
|
| 595 |
+
|
| 596 |
+
@classmethod
|
| 597 |
+
def to_full_path(cls, path: Union[PathLike, "SymbolicReference"]) -> PathLike:
|
| 598 |
+
"""
|
| 599 |
+
:return:
|
| 600 |
+
String with a full repository-relative path which can be used to initialize
|
| 601 |
+
a :class:`~git.refs.reference.Reference` instance, for instance by using
|
| 602 |
+
:meth:`Reference.from_path <git.refs.reference.Reference.from_path>`.
|
| 603 |
+
"""
|
| 604 |
+
if isinstance(path, SymbolicReference):
|
| 605 |
+
path = path.path
|
| 606 |
+
full_ref_path = path
|
| 607 |
+
if not cls._common_path_default:
|
| 608 |
+
return full_ref_path
|
| 609 |
+
if not str(path).startswith(cls._common_path_default + "/"):
|
| 610 |
+
full_ref_path = "%s/%s" % (cls._common_path_default, path)
|
| 611 |
+
return full_ref_path
|
| 612 |
+
|
| 613 |
+
@classmethod
|
| 614 |
+
def delete(cls, repo: "Repo", path: PathLike) -> None:
|
| 615 |
+
"""Delete the reference at the given path.
|
| 616 |
+
|
| 617 |
+
:param repo:
|
| 618 |
+
Repository to delete the reference from.
|
| 619 |
+
|
| 620 |
+
:param path:
|
| 621 |
+
Short or full path pointing to the reference, e.g. ``refs/myreference`` or
|
| 622 |
+
just ``myreference``, hence ``refs/`` is implied.
|
| 623 |
+
Alternatively the symbolic reference to be deleted.
|
| 624 |
+
"""
|
| 625 |
+
full_ref_path = cls.to_full_path(path)
|
| 626 |
+
abs_path = os.path.join(repo.common_dir, full_ref_path)
|
| 627 |
+
if os.path.exists(abs_path):
|
| 628 |
+
os.remove(abs_path)
|
| 629 |
+
else:
|
| 630 |
+
# Check packed refs.
|
| 631 |
+
pack_file_path = cls._get_packed_refs_path(repo)
|
| 632 |
+
try:
|
| 633 |
+
with open(pack_file_path, "rb") as reader:
|
| 634 |
+
new_lines = []
|
| 635 |
+
made_change = False
|
| 636 |
+
dropped_last_line = False
|
| 637 |
+
for line_bytes in reader:
|
| 638 |
+
line = line_bytes.decode(defenc)
|
| 639 |
+
_, _, line_ref = line.partition(" ")
|
| 640 |
+
line_ref = line_ref.strip()
|
| 641 |
+
# Keep line if it is a comment or if the ref to delete is not in
|
| 642 |
+
# the line.
|
| 643 |
+
# If we deleted the last line and this one is a tag-reference
|
| 644 |
+
# object, we drop it as well.
|
| 645 |
+
if (line.startswith("#") or full_ref_path != line_ref) and (
|
| 646 |
+
not dropped_last_line or dropped_last_line and not line.startswith("^")
|
| 647 |
+
):
|
| 648 |
+
new_lines.append(line)
|
| 649 |
+
dropped_last_line = False
|
| 650 |
+
continue
|
| 651 |
+
# END skip comments and lines without our path
|
| 652 |
+
|
| 653 |
+
# Drop this line.
|
| 654 |
+
made_change = True
|
| 655 |
+
dropped_last_line = True
|
| 656 |
+
|
| 657 |
+
# Write the new lines.
|
| 658 |
+
if made_change:
|
| 659 |
+
# Binary writing is required, otherwise Windows will open the file
|
| 660 |
+
# in text mode and change LF to CRLF!
|
| 661 |
+
with open(pack_file_path, "wb") as fd:
|
| 662 |
+
fd.writelines(line.encode(defenc) for line in new_lines)
|
| 663 |
+
|
| 664 |
+
except OSError:
|
| 665 |
+
pass # It didn't exist at all.
|
| 666 |
+
|
| 667 |
+
# Delete the reflog.
|
| 668 |
+
reflog_path = RefLog.path(cls(repo, full_ref_path))
|
| 669 |
+
if os.path.isfile(reflog_path):
|
| 670 |
+
os.remove(reflog_path)
|
| 671 |
+
# END remove reflog
|
| 672 |
+
|
| 673 |
+
@classmethod
|
| 674 |
+
def _create(
|
| 675 |
+
cls: Type[T_References],
|
| 676 |
+
repo: "Repo",
|
| 677 |
+
path: PathLike,
|
| 678 |
+
resolve: bool,
|
| 679 |
+
reference: Union["SymbolicReference", str],
|
| 680 |
+
force: bool,
|
| 681 |
+
logmsg: Union[str, None] = None,
|
| 682 |
+
) -> T_References:
|
| 683 |
+
"""Internal method used to create a new symbolic reference.
|
| 684 |
+
|
| 685 |
+
If `resolve` is ``False``, the reference will be taken as is, creating a proper
|
| 686 |
+
symbolic reference. Otherwise it will be resolved to the corresponding object
|
| 687 |
+
and a detached symbolic reference will be created instead.
|
| 688 |
+
"""
|
| 689 |
+
git_dir = _git_dir(repo, path)
|
| 690 |
+
full_ref_path = cls.to_full_path(path)
|
| 691 |
+
abs_ref_path = os.path.join(git_dir, full_ref_path)
|
| 692 |
+
|
| 693 |
+
# Figure out target data.
|
| 694 |
+
target = reference
|
| 695 |
+
if resolve:
|
| 696 |
+
target = repo.rev_parse(str(reference))
|
| 697 |
+
|
| 698 |
+
if not force and os.path.isfile(abs_ref_path):
|
| 699 |
+
target_data = str(target)
|
| 700 |
+
if isinstance(target, SymbolicReference):
|
| 701 |
+
target_data = str(target.path)
|
| 702 |
+
if not resolve:
|
| 703 |
+
target_data = "ref: " + target_data
|
| 704 |
+
with open(abs_ref_path, "rb") as fd:
|
| 705 |
+
existing_data = fd.read().decode(defenc).strip()
|
| 706 |
+
if existing_data != target_data:
|
| 707 |
+
raise OSError(
|
| 708 |
+
"Reference at %r does already exist, pointing to %r, requested was %r"
|
| 709 |
+
% (full_ref_path, existing_data, target_data)
|
| 710 |
+
)
|
| 711 |
+
# END no force handling
|
| 712 |
+
|
| 713 |
+
ref = cls(repo, full_ref_path)
|
| 714 |
+
ref.set_reference(target, logmsg)
|
| 715 |
+
return ref
|
| 716 |
+
|
| 717 |
+
@classmethod
|
| 718 |
+
def create(
|
| 719 |
+
cls: Type[T_References],
|
| 720 |
+
repo: "Repo",
|
| 721 |
+
path: PathLike,
|
| 722 |
+
reference: Union["SymbolicReference", str] = "HEAD",
|
| 723 |
+
logmsg: Union[str, None] = None,
|
| 724 |
+
force: bool = False,
|
| 725 |
+
**kwargs: Any,
|
| 726 |
+
) -> T_References:
|
| 727 |
+
"""Create a new symbolic reference: a reference pointing to another reference.
|
| 728 |
+
|
| 729 |
+
:param repo:
|
| 730 |
+
Repository to create the reference in.
|
| 731 |
+
|
| 732 |
+
:param path:
|
| 733 |
+
Full path at which the new symbolic reference is supposed to be created at,
|
| 734 |
+
e.g. ``NEW_HEAD`` or ``symrefs/my_new_symref``.
|
| 735 |
+
|
| 736 |
+
:param reference:
|
| 737 |
+
The reference which the new symbolic reference should point to.
|
| 738 |
+
If it is a commit-ish, the symbolic ref will be detached.
|
| 739 |
+
|
| 740 |
+
:param force:
|
| 741 |
+
If ``True``, force creation even if a symbolic reference with that name
|
| 742 |
+
already exists. Raise :exc:`OSError` otherwise.
|
| 743 |
+
|
| 744 |
+
:param logmsg:
|
| 745 |
+
If not ``None``, the message to append to the reflog.
|
| 746 |
+
If ``None``, no reflog entry is written.
|
| 747 |
+
|
| 748 |
+
:return:
|
| 749 |
+
Newly created symbolic reference
|
| 750 |
+
|
| 751 |
+
:raise OSError:
|
| 752 |
+
If a (Symbolic)Reference with the same name but different contents already
|
| 753 |
+
exists.
|
| 754 |
+
|
| 755 |
+
:note:
|
| 756 |
+
This does not alter the current HEAD, index or working tree.
|
| 757 |
+
"""
|
| 758 |
+
return cls._create(repo, path, cls._resolve_ref_on_create, reference, force, logmsg)
|
| 759 |
+
|
| 760 |
+
def rename(self, new_path: PathLike, force: bool = False) -> "SymbolicReference":
|
| 761 |
+
"""Rename self to a new path.
|
| 762 |
+
|
| 763 |
+
:param new_path:
|
| 764 |
+
Either a simple name or a full path, e.g. ``new_name`` or
|
| 765 |
+
``features/new_name``.
|
| 766 |
+
The prefix ``refs/`` is implied for references and will be set as needed.
|
| 767 |
+
In case this is a symbolic ref, there is no implied prefix.
|
| 768 |
+
|
| 769 |
+
:param force:
|
| 770 |
+
If ``True``, the rename will succeed even if a head with the target name
|
| 771 |
+
already exists. It will be overwritten in that case.
|
| 772 |
+
|
| 773 |
+
:return:
|
| 774 |
+
self
|
| 775 |
+
|
| 776 |
+
:raise OSError:
|
| 777 |
+
If a file at path but with different contents already exists.
|
| 778 |
+
"""
|
| 779 |
+
new_path = self.to_full_path(new_path)
|
| 780 |
+
if self.path == new_path:
|
| 781 |
+
return self
|
| 782 |
+
|
| 783 |
+
new_abs_path = os.path.join(_git_dir(self.repo, new_path), new_path)
|
| 784 |
+
cur_abs_path = os.path.join(_git_dir(self.repo, self.path), self.path)
|
| 785 |
+
if os.path.isfile(new_abs_path):
|
| 786 |
+
if not force:
|
| 787 |
+
# If they point to the same file, it's not an error.
|
| 788 |
+
with open(new_abs_path, "rb") as fd1:
|
| 789 |
+
f1 = fd1.read().strip()
|
| 790 |
+
with open(cur_abs_path, "rb") as fd2:
|
| 791 |
+
f2 = fd2.read().strip()
|
| 792 |
+
if f1 != f2:
|
| 793 |
+
raise OSError("File at path %r already exists" % new_abs_path)
|
| 794 |
+
# else: We could remove ourselves and use the other one, but...
|
| 795 |
+
# ...for clarity, we just continue as usual.
|
| 796 |
+
# END not force handling
|
| 797 |
+
os.remove(new_abs_path)
|
| 798 |
+
# END handle existing target file
|
| 799 |
+
|
| 800 |
+
dname = os.path.dirname(new_abs_path)
|
| 801 |
+
if not os.path.isdir(dname):
|
| 802 |
+
os.makedirs(dname)
|
| 803 |
+
# END create directory
|
| 804 |
+
|
| 805 |
+
os.rename(cur_abs_path, new_abs_path)
|
| 806 |
+
self.path = new_path
|
| 807 |
+
|
| 808 |
+
return self
|
| 809 |
+
|
| 810 |
+
@classmethod
|
| 811 |
+
def _iter_items(
|
| 812 |
+
cls: Type[T_References], repo: "Repo", common_path: Union[PathLike, None] = None
|
| 813 |
+
) -> Iterator[T_References]:
|
| 814 |
+
if common_path is None:
|
| 815 |
+
common_path = cls._common_path_default
|
| 816 |
+
rela_paths = set()
|
| 817 |
+
|
| 818 |
+
# Walk loose refs.
|
| 819 |
+
# Currently we do not follow links.
|
| 820 |
+
for root, dirs, files in os.walk(join_path_native(repo.common_dir, common_path)):
|
| 821 |
+
if "refs" not in root.split(os.sep): # Skip non-refs subfolders.
|
| 822 |
+
refs_id = [d for d in dirs if d == "refs"]
|
| 823 |
+
if refs_id:
|
| 824 |
+
dirs[0:] = ["refs"]
|
| 825 |
+
# END prune non-refs folders
|
| 826 |
+
|
| 827 |
+
for f in files:
|
| 828 |
+
if f == "packed-refs":
|
| 829 |
+
continue
|
| 830 |
+
abs_path = to_native_path_linux(join_path(root, f))
|
| 831 |
+
rela_paths.add(abs_path.replace(to_native_path_linux(repo.common_dir) + "/", ""))
|
| 832 |
+
# END for each file in root directory
|
| 833 |
+
# END for each directory to walk
|
| 834 |
+
|
| 835 |
+
# Read packed refs.
|
| 836 |
+
for _sha, rela_path in cls._iter_packed_refs(repo):
|
| 837 |
+
if rela_path.startswith(str(common_path)):
|
| 838 |
+
rela_paths.add(rela_path)
|
| 839 |
+
# END relative path matches common path
|
| 840 |
+
# END packed refs reading
|
| 841 |
+
|
| 842 |
+
# Yield paths in sorted order.
|
| 843 |
+
for path in sorted(rela_paths):
|
| 844 |
+
try:
|
| 845 |
+
yield cls.from_path(repo, path)
|
| 846 |
+
except ValueError:
|
| 847 |
+
continue
|
| 848 |
+
# END for each sorted relative refpath
|
| 849 |
+
|
| 850 |
+
@classmethod
|
| 851 |
+
def iter_items(
|
| 852 |
+
cls: Type[T_References],
|
| 853 |
+
repo: "Repo",
|
| 854 |
+
common_path: Union[PathLike, None] = None,
|
| 855 |
+
*args: Any,
|
| 856 |
+
**kwargs: Any,
|
| 857 |
+
) -> Iterator[T_References]:
|
| 858 |
+
"""Find all refs in the repository.
|
| 859 |
+
|
| 860 |
+
:param repo:
|
| 861 |
+
The :class:`~git.repo.base.Repo`.
|
| 862 |
+
|
| 863 |
+
:param common_path:
|
| 864 |
+
Optional keyword argument to the path which is to be shared by all returned
|
| 865 |
+
Ref objects.
|
| 866 |
+
Defaults to class specific portion if ``None``, ensuring that only refs
|
| 867 |
+
suitable for the actual class are returned.
|
| 868 |
+
|
| 869 |
+
:return:
|
| 870 |
+
A list of :class:`SymbolicReference`, each guaranteed to be a symbolic ref
|
| 871 |
+
which is not detached and pointing to a valid ref.
|
| 872 |
+
|
| 873 |
+
The list is lexicographically sorted. The returned objects are instances of
|
| 874 |
+
concrete subclasses, such as :class:`~git.refs.head.Head` or
|
| 875 |
+
:class:`~git.refs.tag.TagReference`.
|
| 876 |
+
"""
|
| 877 |
+
return (r for r in cls._iter_items(repo, common_path) if r.__class__ is SymbolicReference or not r.is_detached)
|
| 878 |
+
|
| 879 |
+
@classmethod
|
| 880 |
+
def from_path(cls: Type[T_References], repo: "Repo", path: PathLike) -> T_References:
|
| 881 |
+
"""Make a symbolic reference from a path.
|
| 882 |
+
|
| 883 |
+
:param path:
|
| 884 |
+
Full ``.git``-directory-relative path name to the Reference to instantiate.
|
| 885 |
+
|
| 886 |
+
:note:
|
| 887 |
+
Use :meth:`to_full_path` if you only have a partial path of a known
|
| 888 |
+
Reference type.
|
| 889 |
+
|
| 890 |
+
:return:
|
| 891 |
+
Instance of type :class:`~git.refs.reference.Reference`,
|
| 892 |
+
:class:`~git.refs.head.Head`, or :class:`~git.refs.tag.Tag`, depending on
|
| 893 |
+
the given path.
|
| 894 |
+
"""
|
| 895 |
+
if not path:
|
| 896 |
+
raise ValueError("Cannot create Reference from %r" % path)
|
| 897 |
+
|
| 898 |
+
# Names like HEAD are inserted after the refs module is imported - we have an
|
| 899 |
+
# import dependency cycle and don't want to import these names in-function.
|
| 900 |
+
from . import HEAD, Head, RemoteReference, TagReference, Reference
|
| 901 |
+
|
| 902 |
+
for ref_type in (
|
| 903 |
+
HEAD,
|
| 904 |
+
Head,
|
| 905 |
+
RemoteReference,
|
| 906 |
+
TagReference,
|
| 907 |
+
Reference,
|
| 908 |
+
SymbolicReference,
|
| 909 |
+
):
|
| 910 |
+
try:
|
| 911 |
+
instance: T_References
|
| 912 |
+
instance = ref_type(repo, path)
|
| 913 |
+
if instance.__class__ is SymbolicReference and instance.is_detached:
|
| 914 |
+
raise ValueError("SymbolicRef was detached, we drop it")
|
| 915 |
+
else:
|
| 916 |
+
return instance
|
| 917 |
+
|
| 918 |
+
except ValueError:
|
| 919 |
+
pass
|
| 920 |
+
# END exception handling
|
| 921 |
+
# END for each type to try
|
| 922 |
+
raise ValueError("Could not find reference type suitable to handle path %r" % path)
|
| 923 |
+
|
| 924 |
+
def is_remote(self) -> bool:
|
| 925 |
+
""":return: True if this symbolic reference points to a remote branch"""
|
| 926 |
+
return str(self.path).startswith(self._remote_common_path_default + "/")
|
parrot/lib/python3.10/site-packages/git/refs/tag.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
"""Provides a :class:`~git.refs.reference.Reference`-based type for lightweight tags.
|
| 5 |
+
|
| 6 |
+
This defines the :class:`TagReference` class (and its alias :class:`Tag`), which
|
| 7 |
+
represents lightweight tags. For annotated tags (which are git objects), see the
|
| 8 |
+
:mod:`git.objects.tag` module.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
__all__ = ["TagReference", "Tag"]
|
| 12 |
+
|
| 13 |
+
from .reference import Reference
|
| 14 |
+
|
| 15 |
+
# typing ------------------------------------------------------------------
|
| 16 |
+
|
| 17 |
+
from typing import Any, TYPE_CHECKING, Type, Union
|
| 18 |
+
|
| 19 |
+
from git.types import AnyGitObject, PathLike
|
| 20 |
+
|
| 21 |
+
if TYPE_CHECKING:
|
| 22 |
+
from git.objects import Commit, TagObject
|
| 23 |
+
from git.refs import SymbolicReference
|
| 24 |
+
from git.repo import Repo
|
| 25 |
+
|
| 26 |
+
# ------------------------------------------------------------------------------
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class TagReference(Reference):
|
| 30 |
+
"""A lightweight tag reference which either points to a commit, a tag object or any
|
| 31 |
+
other object. In the latter case additional information, like the signature or the
|
| 32 |
+
tag-creator, is available.
|
| 33 |
+
|
| 34 |
+
This tag object will always point to a commit object, but may carry additional
|
| 35 |
+
information in a tag object::
|
| 36 |
+
|
| 37 |
+
tagref = TagReference.list_items(repo)[0]
|
| 38 |
+
print(tagref.commit.message)
|
| 39 |
+
if tagref.tag is not None:
|
| 40 |
+
print(tagref.tag.message)
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
__slots__ = ()
|
| 44 |
+
|
| 45 |
+
_common_default = "tags"
|
| 46 |
+
_common_path_default = Reference._common_path_default + "/" + _common_default
|
| 47 |
+
|
| 48 |
+
@property
|
| 49 |
+
def commit(self) -> "Commit": # type: ignore[override] # LazyMixin has unrelated commit method
|
| 50 |
+
""":return: Commit object the tag ref points to
|
| 51 |
+
|
| 52 |
+
:raise ValueError:
|
| 53 |
+
If the tag points to a tree or blob.
|
| 54 |
+
"""
|
| 55 |
+
obj = self.object
|
| 56 |
+
while obj.type != "commit":
|
| 57 |
+
if obj.type == "tag":
|
| 58 |
+
# It is a tag object which carries the commit as an object - we can point to anything.
|
| 59 |
+
obj = obj.object
|
| 60 |
+
else:
|
| 61 |
+
raise ValueError(
|
| 62 |
+
(
|
| 63 |
+
"Cannot resolve commit as tag %s points to a %s object - "
|
| 64 |
+
+ "use the `.object` property instead to access it"
|
| 65 |
+
)
|
| 66 |
+
% (self, obj.type)
|
| 67 |
+
)
|
| 68 |
+
return obj
|
| 69 |
+
|
| 70 |
+
@property
|
| 71 |
+
def tag(self) -> Union["TagObject", None]:
|
| 72 |
+
"""
|
| 73 |
+
:return:
|
| 74 |
+
Tag object this tag ref points to, or ``None`` in case we are a lightweight
|
| 75 |
+
tag
|
| 76 |
+
"""
|
| 77 |
+
obj = self.object
|
| 78 |
+
if obj.type == "tag":
|
| 79 |
+
return obj
|
| 80 |
+
return None
|
| 81 |
+
|
| 82 |
+
# Make object read-only. It should be reasonably hard to adjust an existing tag.
|
| 83 |
+
@property
|
| 84 |
+
def object(self) -> AnyGitObject: # type: ignore[override]
|
| 85 |
+
return Reference._get_object(self)
|
| 86 |
+
|
| 87 |
+
@classmethod
|
| 88 |
+
def create(
|
| 89 |
+
cls: Type["TagReference"],
|
| 90 |
+
repo: "Repo",
|
| 91 |
+
path: PathLike,
|
| 92 |
+
reference: Union[str, "SymbolicReference"] = "HEAD",
|
| 93 |
+
logmsg: Union[str, None] = None,
|
| 94 |
+
force: bool = False,
|
| 95 |
+
**kwargs: Any,
|
| 96 |
+
) -> "TagReference":
|
| 97 |
+
"""Create a new tag reference.
|
| 98 |
+
|
| 99 |
+
:param repo:
|
| 100 |
+
The :class:`~git.repo.base.Repo` to create the tag in.
|
| 101 |
+
|
| 102 |
+
:param path:
|
| 103 |
+
The name of the tag, e.g. ``1.0`` or ``releases/1.0``.
|
| 104 |
+
The prefix ``refs/tags`` is implied.
|
| 105 |
+
|
| 106 |
+
:param reference:
|
| 107 |
+
A reference to the :class:`~git.objects.base.Object` you want to tag.
|
| 108 |
+
The referenced object can be a commit, tree, or blob.
|
| 109 |
+
|
| 110 |
+
:param logmsg:
|
| 111 |
+
If not ``None``, the message will be used in your tag object. This will also
|
| 112 |
+
create an additional tag object that allows to obtain that information,
|
| 113 |
+
e.g.::
|
| 114 |
+
|
| 115 |
+
tagref.tag.message
|
| 116 |
+
|
| 117 |
+
:param message:
|
| 118 |
+
Synonym for the `logmsg` parameter. Included for backwards compatibility.
|
| 119 |
+
`logmsg` takes precedence if both are passed.
|
| 120 |
+
|
| 121 |
+
:param force:
|
| 122 |
+
If ``True``, force creation of a tag even though that tag already exists.
|
| 123 |
+
|
| 124 |
+
:param kwargs:
|
| 125 |
+
Additional keyword arguments to be passed to :manpage:`git-tag(1)`.
|
| 126 |
+
|
| 127 |
+
:return:
|
| 128 |
+
A new :class:`TagReference`.
|
| 129 |
+
"""
|
| 130 |
+
if "ref" in kwargs and kwargs["ref"]:
|
| 131 |
+
reference = kwargs["ref"]
|
| 132 |
+
|
| 133 |
+
if "message" in kwargs and kwargs["message"]:
|
| 134 |
+
kwargs["m"] = kwargs["message"]
|
| 135 |
+
del kwargs["message"]
|
| 136 |
+
|
| 137 |
+
if logmsg:
|
| 138 |
+
kwargs["m"] = logmsg
|
| 139 |
+
|
| 140 |
+
if force:
|
| 141 |
+
kwargs["f"] = True
|
| 142 |
+
|
| 143 |
+
args = (path, reference)
|
| 144 |
+
|
| 145 |
+
repo.git.tag(*args, **kwargs)
|
| 146 |
+
return TagReference(repo, "%s/%s" % (cls._common_path_default, path))
|
| 147 |
+
|
| 148 |
+
@classmethod
|
| 149 |
+
def delete(cls, repo: "Repo", *tags: "TagReference") -> None: # type: ignore[override]
|
| 150 |
+
"""Delete the given existing tag or tags."""
|
| 151 |
+
repo.git.tag("-d", *tags)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
# Provide an alias.
|
| 155 |
+
Tag = TagReference
|
parrot/lib/python3.10/site-packages/git/repo/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
"""Initialize the repo package."""
|
| 5 |
+
|
| 6 |
+
__all__ = ["Repo"]
|
| 7 |
+
|
| 8 |
+
from .base import Repo
|
parrot/lib/python3.10/site-packages/git/repo/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (260 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/git/repo/__pycache__/base.cpython-310.pyc
ADDED
|
Binary file (45.3 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/repo/__pycache__/fun.cpython-310.pyc
ADDED
|
Binary file (8.97 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/git/repo/fun.py
ADDED
|
@@ -0,0 +1,419 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module is part of GitPython and is released under the
|
| 2 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 3 |
+
|
| 4 |
+
"""General repository-related functions."""
|
| 5 |
+
|
| 6 |
+
from __future__ import annotations
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"rev_parse",
|
| 10 |
+
"is_git_dir",
|
| 11 |
+
"touch",
|
| 12 |
+
"find_submodule_git_dir",
|
| 13 |
+
"name_to_object",
|
| 14 |
+
"short_to_long",
|
| 15 |
+
"deref_tag",
|
| 16 |
+
"to_commit",
|
| 17 |
+
"find_worktree_git_dir",
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
import os
|
| 21 |
+
import os.path as osp
|
| 22 |
+
from pathlib import Path
|
| 23 |
+
import stat
|
| 24 |
+
from string import digits
|
| 25 |
+
|
| 26 |
+
from gitdb.exc import BadName, BadObject
|
| 27 |
+
|
| 28 |
+
from git.cmd import Git
|
| 29 |
+
from git.exc import WorkTreeRepositoryUnsupported
|
| 30 |
+
from git.objects import Object
|
| 31 |
+
from git.refs import SymbolicReference
|
| 32 |
+
from git.util import cygpath, bin_to_hex, hex_to_bin
|
| 33 |
+
|
| 34 |
+
# Typing ----------------------------------------------------------------------
|
| 35 |
+
|
| 36 |
+
from typing import Optional, TYPE_CHECKING, Union, cast, overload
|
| 37 |
+
|
| 38 |
+
from git.types import AnyGitObject, Literal, PathLike
|
| 39 |
+
|
| 40 |
+
if TYPE_CHECKING:
|
| 41 |
+
from git.db import GitCmdObjectDB
|
| 42 |
+
from git.objects import Commit, TagObject
|
| 43 |
+
from git.refs.reference import Reference
|
| 44 |
+
from git.refs.tag import Tag
|
| 45 |
+
|
| 46 |
+
from .base import Repo
|
| 47 |
+
|
| 48 |
+
# ----------------------------------------------------------------------------
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def touch(filename: str) -> str:
|
| 52 |
+
with open(filename, "ab"):
|
| 53 |
+
pass
|
| 54 |
+
return filename
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def is_git_dir(d: PathLike) -> bool:
|
| 58 |
+
"""This is taken from the git setup.c:is_git_directory function.
|
| 59 |
+
|
| 60 |
+
:raise git.exc.WorkTreeRepositoryUnsupported:
|
| 61 |
+
If it sees a worktree directory. It's quite hacky to do that here, but at least
|
| 62 |
+
clearly indicates that we don't support it. There is the unlikely danger to
|
| 63 |
+
throw if we see directories which just look like a worktree dir, but are none.
|
| 64 |
+
"""
|
| 65 |
+
if osp.isdir(d):
|
| 66 |
+
if (osp.isdir(osp.join(d, "objects")) or "GIT_OBJECT_DIRECTORY" in os.environ) and osp.isdir(
|
| 67 |
+
osp.join(d, "refs")
|
| 68 |
+
):
|
| 69 |
+
headref = osp.join(d, "HEAD")
|
| 70 |
+
return osp.isfile(headref) or (osp.islink(headref) and os.readlink(headref).startswith("refs"))
|
| 71 |
+
elif (
|
| 72 |
+
osp.isfile(osp.join(d, "gitdir"))
|
| 73 |
+
and osp.isfile(osp.join(d, "commondir"))
|
| 74 |
+
and osp.isfile(osp.join(d, "gitfile"))
|
| 75 |
+
):
|
| 76 |
+
raise WorkTreeRepositoryUnsupported(d)
|
| 77 |
+
return False
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def find_worktree_git_dir(dotgit: PathLike) -> Optional[str]:
|
| 81 |
+
"""Search for a gitdir for this worktree."""
|
| 82 |
+
try:
|
| 83 |
+
statbuf = os.stat(dotgit)
|
| 84 |
+
except OSError:
|
| 85 |
+
return None
|
| 86 |
+
if not stat.S_ISREG(statbuf.st_mode):
|
| 87 |
+
return None
|
| 88 |
+
|
| 89 |
+
try:
|
| 90 |
+
lines = Path(dotgit).read_text().splitlines()
|
| 91 |
+
for key, value in [line.strip().split(": ") for line in lines]:
|
| 92 |
+
if key == "gitdir":
|
| 93 |
+
return value
|
| 94 |
+
except ValueError:
|
| 95 |
+
pass
|
| 96 |
+
return None
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def find_submodule_git_dir(d: PathLike) -> Optional[PathLike]:
|
| 100 |
+
"""Search for a submodule repo."""
|
| 101 |
+
if is_git_dir(d):
|
| 102 |
+
return d
|
| 103 |
+
|
| 104 |
+
try:
|
| 105 |
+
with open(d) as fp:
|
| 106 |
+
content = fp.read().rstrip()
|
| 107 |
+
except IOError:
|
| 108 |
+
# It's probably not a file.
|
| 109 |
+
pass
|
| 110 |
+
else:
|
| 111 |
+
if content.startswith("gitdir: "):
|
| 112 |
+
path = content[8:]
|
| 113 |
+
|
| 114 |
+
if Git.is_cygwin():
|
| 115 |
+
# Cygwin creates submodules prefixed with `/cygdrive/...` suffixes.
|
| 116 |
+
# Cygwin git understands Cygwin paths much better than Windows ones.
|
| 117 |
+
# Also the Cygwin tests are assuming Cygwin paths.
|
| 118 |
+
path = cygpath(path)
|
| 119 |
+
if not osp.isabs(path):
|
| 120 |
+
path = osp.normpath(osp.join(osp.dirname(d), path))
|
| 121 |
+
return find_submodule_git_dir(path)
|
| 122 |
+
# END handle exception
|
| 123 |
+
return None
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def short_to_long(odb: "GitCmdObjectDB", hexsha: str) -> Optional[bytes]:
|
| 127 |
+
"""
|
| 128 |
+
:return:
|
| 129 |
+
Long hexadecimal sha1 from the given less than 40 byte hexsha, or ``None`` if no
|
| 130 |
+
candidate could be found.
|
| 131 |
+
|
| 132 |
+
:param hexsha:
|
| 133 |
+
hexsha with less than 40 bytes.
|
| 134 |
+
"""
|
| 135 |
+
try:
|
| 136 |
+
return bin_to_hex(odb.partial_to_complete_sha_hex(hexsha))
|
| 137 |
+
except BadObject:
|
| 138 |
+
return None
|
| 139 |
+
# END exception handling
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
@overload
|
| 143 |
+
def name_to_object(repo: "Repo", name: str, return_ref: Literal[False] = ...) -> AnyGitObject: ...
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
@overload
|
| 147 |
+
def name_to_object(repo: "Repo", name: str, return_ref: Literal[True]) -> Union[AnyGitObject, SymbolicReference]: ...
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def name_to_object(repo: "Repo", name: str, return_ref: bool = False) -> Union[AnyGitObject, SymbolicReference]:
|
| 151 |
+
"""
|
| 152 |
+
:return:
|
| 153 |
+
Object specified by the given name - hexshas (short and long) as well as
|
| 154 |
+
references are supported.
|
| 155 |
+
|
| 156 |
+
:param return_ref:
|
| 157 |
+
If ``True``, and name specifies a reference, we will return the reference
|
| 158 |
+
instead of the object. Otherwise it will raise :exc:`~gitdb.exc.BadObject` or
|
| 159 |
+
:exc:`~gitdb.exc.BadName`.
|
| 160 |
+
"""
|
| 161 |
+
hexsha: Union[None, str, bytes] = None
|
| 162 |
+
|
| 163 |
+
# Is it a hexsha? Try the most common ones, which is 7 to 40.
|
| 164 |
+
if repo.re_hexsha_shortened.match(name):
|
| 165 |
+
if len(name) != 40:
|
| 166 |
+
# Find long sha for short sha.
|
| 167 |
+
hexsha = short_to_long(repo.odb, name)
|
| 168 |
+
else:
|
| 169 |
+
hexsha = name
|
| 170 |
+
# END handle short shas
|
| 171 |
+
# END find sha if it matches
|
| 172 |
+
|
| 173 |
+
# If we couldn't find an object for what seemed to be a short hexsha, try to find it
|
| 174 |
+
# as reference anyway, it could be named 'aaa' for instance.
|
| 175 |
+
if hexsha is None:
|
| 176 |
+
for base in (
|
| 177 |
+
"%s",
|
| 178 |
+
"refs/%s",
|
| 179 |
+
"refs/tags/%s",
|
| 180 |
+
"refs/heads/%s",
|
| 181 |
+
"refs/remotes/%s",
|
| 182 |
+
"refs/remotes/%s/HEAD",
|
| 183 |
+
):
|
| 184 |
+
try:
|
| 185 |
+
hexsha = SymbolicReference.dereference_recursive(repo, base % name)
|
| 186 |
+
if return_ref:
|
| 187 |
+
return SymbolicReference(repo, base % name)
|
| 188 |
+
# END handle symbolic ref
|
| 189 |
+
break
|
| 190 |
+
except ValueError:
|
| 191 |
+
pass
|
| 192 |
+
# END for each base
|
| 193 |
+
# END handle hexsha
|
| 194 |
+
|
| 195 |
+
# Didn't find any ref, this is an error.
|
| 196 |
+
if return_ref:
|
| 197 |
+
raise BadObject("Couldn't find reference named %r" % name)
|
| 198 |
+
# END handle return ref
|
| 199 |
+
|
| 200 |
+
# Tried everything ? fail.
|
| 201 |
+
if hexsha is None:
|
| 202 |
+
raise BadName(name)
|
| 203 |
+
# END assert hexsha was found
|
| 204 |
+
|
| 205 |
+
return Object.new_from_sha(repo, hex_to_bin(hexsha))
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
def deref_tag(tag: "Tag") -> AnyGitObject:
|
| 209 |
+
"""Recursively dereference a tag and return the resulting object."""
|
| 210 |
+
while True:
|
| 211 |
+
try:
|
| 212 |
+
tag = tag.object
|
| 213 |
+
except AttributeError:
|
| 214 |
+
break
|
| 215 |
+
# END dereference tag
|
| 216 |
+
return tag
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def to_commit(obj: Object) -> "Commit":
|
| 220 |
+
"""Convert the given object to a commit if possible and return it."""
|
| 221 |
+
if obj.type == "tag":
|
| 222 |
+
obj = deref_tag(obj)
|
| 223 |
+
|
| 224 |
+
if obj.type != "commit":
|
| 225 |
+
raise ValueError("Cannot convert object %r to type commit" % obj)
|
| 226 |
+
# END verify type
|
| 227 |
+
return obj
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
def rev_parse(repo: "Repo", rev: str) -> AnyGitObject:
|
| 231 |
+
"""Parse a revision string. Like :manpage:`git-rev-parse(1)`.
|
| 232 |
+
|
| 233 |
+
:return:
|
| 234 |
+
`~git.objects.base.Object` at the given revision.
|
| 235 |
+
|
| 236 |
+
This may be any type of git object:
|
| 237 |
+
|
| 238 |
+
* :class:`Commit <git.objects.commit.Commit>`
|
| 239 |
+
* :class:`TagObject <git.objects.tag.TagObject>`
|
| 240 |
+
* :class:`Tree <git.objects.tree.Tree>`
|
| 241 |
+
* :class:`Blob <git.objects.blob.Blob>`
|
| 242 |
+
|
| 243 |
+
:param rev:
|
| 244 |
+
:manpage:`git-rev-parse(1)`-compatible revision specification as string.
|
| 245 |
+
Please see :manpage:`git-rev-parse(1)` for details.
|
| 246 |
+
|
| 247 |
+
:raise gitdb.exc.BadObject:
|
| 248 |
+
If the given revision could not be found.
|
| 249 |
+
|
| 250 |
+
:raise ValueError:
|
| 251 |
+
If `rev` couldn't be parsed.
|
| 252 |
+
|
| 253 |
+
:raise IndexError:
|
| 254 |
+
If an invalid reflog index is specified.
|
| 255 |
+
"""
|
| 256 |
+
# Are we in colon search mode?
|
| 257 |
+
if rev.startswith(":/"):
|
| 258 |
+
# Colon search mode
|
| 259 |
+
raise NotImplementedError("commit by message search (regex)")
|
| 260 |
+
# END handle search
|
| 261 |
+
|
| 262 |
+
obj: Optional[AnyGitObject] = None
|
| 263 |
+
ref = None
|
| 264 |
+
output_type = "commit"
|
| 265 |
+
start = 0
|
| 266 |
+
parsed_to = 0
|
| 267 |
+
lr = len(rev)
|
| 268 |
+
while start < lr:
|
| 269 |
+
if rev[start] not in "^~:@":
|
| 270 |
+
start += 1
|
| 271 |
+
continue
|
| 272 |
+
# END handle start
|
| 273 |
+
|
| 274 |
+
token = rev[start]
|
| 275 |
+
|
| 276 |
+
if obj is None:
|
| 277 |
+
# token is a rev name.
|
| 278 |
+
if start == 0:
|
| 279 |
+
ref = repo.head.ref
|
| 280 |
+
else:
|
| 281 |
+
if token == "@":
|
| 282 |
+
ref = cast("Reference", name_to_object(repo, rev[:start], return_ref=True))
|
| 283 |
+
else:
|
| 284 |
+
obj = name_to_object(repo, rev[:start])
|
| 285 |
+
# END handle token
|
| 286 |
+
# END handle refname
|
| 287 |
+
else:
|
| 288 |
+
if ref is not None:
|
| 289 |
+
obj = cast("Commit", ref.commit)
|
| 290 |
+
# END handle ref
|
| 291 |
+
# END initialize obj on first token
|
| 292 |
+
|
| 293 |
+
start += 1
|
| 294 |
+
|
| 295 |
+
# Try to parse {type}.
|
| 296 |
+
if start < lr and rev[start] == "{":
|
| 297 |
+
end = rev.find("}", start)
|
| 298 |
+
if end == -1:
|
| 299 |
+
raise ValueError("Missing closing brace to define type in %s" % rev)
|
| 300 |
+
output_type = rev[start + 1 : end] # Exclude brace.
|
| 301 |
+
|
| 302 |
+
# Handle type.
|
| 303 |
+
if output_type == "commit":
|
| 304 |
+
pass # Default.
|
| 305 |
+
elif output_type == "tree":
|
| 306 |
+
try:
|
| 307 |
+
obj = cast(AnyGitObject, obj)
|
| 308 |
+
obj = to_commit(obj).tree
|
| 309 |
+
except (AttributeError, ValueError):
|
| 310 |
+
pass # Error raised later.
|
| 311 |
+
# END exception handling
|
| 312 |
+
elif output_type in ("", "blob"):
|
| 313 |
+
obj = cast("TagObject", obj)
|
| 314 |
+
if obj and obj.type == "tag":
|
| 315 |
+
obj = deref_tag(obj)
|
| 316 |
+
else:
|
| 317 |
+
# Cannot do anything for non-tags.
|
| 318 |
+
pass
|
| 319 |
+
# END handle tag
|
| 320 |
+
elif token == "@":
|
| 321 |
+
# try single int
|
| 322 |
+
assert ref is not None, "Require Reference to access reflog"
|
| 323 |
+
revlog_index = None
|
| 324 |
+
try:
|
| 325 |
+
# Transform reversed index into the format of our revlog.
|
| 326 |
+
revlog_index = -(int(output_type) + 1)
|
| 327 |
+
except ValueError as e:
|
| 328 |
+
# TODO: Try to parse the other date options, using parse_date maybe.
|
| 329 |
+
raise NotImplementedError("Support for additional @{...} modes not implemented") from e
|
| 330 |
+
# END handle revlog index
|
| 331 |
+
|
| 332 |
+
try:
|
| 333 |
+
entry = ref.log_entry(revlog_index)
|
| 334 |
+
except IndexError as e:
|
| 335 |
+
raise IndexError("Invalid revlog index: %i" % revlog_index) from e
|
| 336 |
+
# END handle index out of bound
|
| 337 |
+
|
| 338 |
+
obj = Object.new_from_sha(repo, hex_to_bin(entry.newhexsha))
|
| 339 |
+
|
| 340 |
+
# Make it pass the following checks.
|
| 341 |
+
output_type = ""
|
| 342 |
+
else:
|
| 343 |
+
raise ValueError("Invalid output type: %s ( in %s )" % (output_type, rev))
|
| 344 |
+
# END handle output type
|
| 345 |
+
|
| 346 |
+
# Empty output types don't require any specific type, its just about
|
| 347 |
+
# dereferencing tags.
|
| 348 |
+
if output_type and obj and obj.type != output_type:
|
| 349 |
+
raise ValueError("Could not accommodate requested object type %r, got %s" % (output_type, obj.type))
|
| 350 |
+
# END verify output type
|
| 351 |
+
|
| 352 |
+
start = end + 1 # Skip brace.
|
| 353 |
+
parsed_to = start
|
| 354 |
+
continue
|
| 355 |
+
# END parse type
|
| 356 |
+
|
| 357 |
+
# Try to parse a number.
|
| 358 |
+
num = 0
|
| 359 |
+
if token != ":":
|
| 360 |
+
found_digit = False
|
| 361 |
+
while start < lr:
|
| 362 |
+
if rev[start] in digits:
|
| 363 |
+
num = num * 10 + int(rev[start])
|
| 364 |
+
start += 1
|
| 365 |
+
found_digit = True
|
| 366 |
+
else:
|
| 367 |
+
break
|
| 368 |
+
# END handle number
|
| 369 |
+
# END number parse loop
|
| 370 |
+
|
| 371 |
+
# No explicit number given, 1 is the default. It could be 0 though.
|
| 372 |
+
if not found_digit:
|
| 373 |
+
num = 1
|
| 374 |
+
# END set default num
|
| 375 |
+
# END number parsing only if non-blob mode
|
| 376 |
+
|
| 377 |
+
parsed_to = start
|
| 378 |
+
# Handle hierarchy walk.
|
| 379 |
+
try:
|
| 380 |
+
obj = cast(AnyGitObject, obj)
|
| 381 |
+
if token == "~":
|
| 382 |
+
obj = to_commit(obj)
|
| 383 |
+
for _ in range(num):
|
| 384 |
+
obj = obj.parents[0]
|
| 385 |
+
# END for each history item to walk
|
| 386 |
+
elif token == "^":
|
| 387 |
+
obj = to_commit(obj)
|
| 388 |
+
# Must be n'th parent.
|
| 389 |
+
if num:
|
| 390 |
+
obj = obj.parents[num - 1]
|
| 391 |
+
elif token == ":":
|
| 392 |
+
if obj.type != "tree":
|
| 393 |
+
obj = obj.tree
|
| 394 |
+
# END get tree type
|
| 395 |
+
obj = obj[rev[start:]]
|
| 396 |
+
parsed_to = lr
|
| 397 |
+
else:
|
| 398 |
+
raise ValueError("Invalid token: %r" % token)
|
| 399 |
+
# END end handle tag
|
| 400 |
+
except (IndexError, AttributeError) as e:
|
| 401 |
+
raise BadName(
|
| 402 |
+
f"Invalid revision spec '{rev}' - not enough " f"parent commits to reach '{token}{int(num)}'"
|
| 403 |
+
) from e
|
| 404 |
+
# END exception handling
|
| 405 |
+
# END parse loop
|
| 406 |
+
|
| 407 |
+
# Still no obj? It's probably a simple name.
|
| 408 |
+
if obj is None:
|
| 409 |
+
obj = name_to_object(repo, rev)
|
| 410 |
+
parsed_to = lr
|
| 411 |
+
# END handle simple name
|
| 412 |
+
|
| 413 |
+
if obj is None:
|
| 414 |
+
raise ValueError("Revision specifier could not be parsed: %s" % rev)
|
| 415 |
+
|
| 416 |
+
if parsed_to != lr:
|
| 417 |
+
raise ValueError("Didn't consume complete rev spec %s, consumed part: %s" % (rev, rev[:parsed_to]))
|
| 418 |
+
|
| 419 |
+
return obj
|
parrot/lib/python3.10/site-packages/git/util.py
ADDED
|
@@ -0,0 +1,1344 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitPython and is released under the
|
| 4 |
+
# 3-Clause BSD License: https://opensource.org/license/bsd-3-clause/
|
| 5 |
+
|
| 6 |
+
import sys
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"stream_copy",
|
| 10 |
+
"join_path",
|
| 11 |
+
"to_native_path_linux",
|
| 12 |
+
"join_path_native",
|
| 13 |
+
"Stats",
|
| 14 |
+
"IndexFileSHA1Writer",
|
| 15 |
+
"IterableObj",
|
| 16 |
+
"IterableList",
|
| 17 |
+
"BlockingLockFile",
|
| 18 |
+
"LockFile",
|
| 19 |
+
"Actor",
|
| 20 |
+
"get_user_id",
|
| 21 |
+
"assure_directory_exists",
|
| 22 |
+
"RemoteProgress",
|
| 23 |
+
"CallableRemoteProgress",
|
| 24 |
+
"rmtree",
|
| 25 |
+
"unbare_repo",
|
| 26 |
+
"HIDE_WINDOWS_KNOWN_ERRORS",
|
| 27 |
+
]
|
| 28 |
+
|
| 29 |
+
if sys.platform == "win32":
|
| 30 |
+
__all__.append("to_native_path_windows")
|
| 31 |
+
|
| 32 |
+
from abc import abstractmethod
|
| 33 |
+
import contextlib
|
| 34 |
+
from functools import wraps
|
| 35 |
+
import getpass
|
| 36 |
+
import logging
|
| 37 |
+
import os
|
| 38 |
+
import os.path as osp
|
| 39 |
+
import pathlib
|
| 40 |
+
import platform
|
| 41 |
+
import re
|
| 42 |
+
import shutil
|
| 43 |
+
import stat
|
| 44 |
+
import subprocess
|
| 45 |
+
import time
|
| 46 |
+
from urllib.parse import urlsplit, urlunsplit
|
| 47 |
+
import warnings
|
| 48 |
+
|
| 49 |
+
# NOTE: Unused imports can be improved now that CI testing has fully resumed. Some of
|
| 50 |
+
# these be used indirectly through other GitPython modules, which avoids having to write
|
| 51 |
+
# gitdb all the time in their imports. They are not in __all__, at least currently,
|
| 52 |
+
# because they could be removed or changed at any time, and so should not be considered
|
| 53 |
+
# conceptually public to code outside GitPython. Linters of course do not like it.
|
| 54 |
+
from gitdb.util import (
|
| 55 |
+
LazyMixin, # noqa: F401
|
| 56 |
+
LockedFD, # noqa: F401
|
| 57 |
+
bin_to_hex, # noqa: F401
|
| 58 |
+
file_contents_ro, # noqa: F401
|
| 59 |
+
file_contents_ro_filepath, # noqa: F401
|
| 60 |
+
hex_to_bin, # noqa: F401
|
| 61 |
+
make_sha,
|
| 62 |
+
to_bin_sha, # noqa: F401
|
| 63 |
+
to_hex_sha, # noqa: F401
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
# typing ---------------------------------------------------------
|
| 67 |
+
|
| 68 |
+
from typing import (
|
| 69 |
+
Any,
|
| 70 |
+
AnyStr,
|
| 71 |
+
BinaryIO,
|
| 72 |
+
Callable,
|
| 73 |
+
Dict,
|
| 74 |
+
Generator,
|
| 75 |
+
IO,
|
| 76 |
+
Iterator,
|
| 77 |
+
List,
|
| 78 |
+
Optional,
|
| 79 |
+
Pattern,
|
| 80 |
+
Sequence,
|
| 81 |
+
Tuple,
|
| 82 |
+
TYPE_CHECKING,
|
| 83 |
+
TypeVar,
|
| 84 |
+
Union,
|
| 85 |
+
cast,
|
| 86 |
+
overload,
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
if TYPE_CHECKING:
|
| 90 |
+
from git.cmd import Git
|
| 91 |
+
from git.config import GitConfigParser, SectionConstraint
|
| 92 |
+
from git.remote import Remote
|
| 93 |
+
from git.repo.base import Repo
|
| 94 |
+
|
| 95 |
+
from git.types import (
|
| 96 |
+
Files_TD,
|
| 97 |
+
Has_id_attribute,
|
| 98 |
+
HSH_TD,
|
| 99 |
+
Literal,
|
| 100 |
+
PathLike,
|
| 101 |
+
Protocol,
|
| 102 |
+
SupportsIndex,
|
| 103 |
+
Total_TD,
|
| 104 |
+
runtime_checkable,
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
# ---------------------------------------------------------------------
|
| 108 |
+
|
| 109 |
+
T_IterableObj = TypeVar("T_IterableObj", bound=Union["IterableObj", "Has_id_attribute"], covariant=True)
|
| 110 |
+
# So IterableList[Head] is subtype of IterableList[IterableObj].
|
| 111 |
+
|
| 112 |
+
_logger = logging.getLogger(__name__)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def _read_env_flag(name: str, default: bool) -> bool:
|
| 116 |
+
"""Read a boolean flag from an environment variable.
|
| 117 |
+
|
| 118 |
+
:return:
|
| 119 |
+
The flag, or the `default` value if absent or ambiguous.
|
| 120 |
+
"""
|
| 121 |
+
try:
|
| 122 |
+
value = os.environ[name]
|
| 123 |
+
except KeyError:
|
| 124 |
+
return default
|
| 125 |
+
|
| 126 |
+
_logger.warning(
|
| 127 |
+
"The %s environment variable is deprecated. Its effect has never been documented and changes without warning.",
|
| 128 |
+
name,
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
adjusted_value = value.strip().lower()
|
| 132 |
+
|
| 133 |
+
if adjusted_value in {"", "0", "false", "no"}:
|
| 134 |
+
return False
|
| 135 |
+
if adjusted_value in {"1", "true", "yes"}:
|
| 136 |
+
return True
|
| 137 |
+
_logger.warning("%s has unrecognized value %r, treating as %r.", name, value, default)
|
| 138 |
+
return default
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def _read_win_env_flag(name: str, default: bool) -> bool:
|
| 142 |
+
"""Read a boolean flag from an environment variable on Windows.
|
| 143 |
+
|
| 144 |
+
:return:
|
| 145 |
+
On Windows, the flag, or the `default` value if absent or ambiguous.
|
| 146 |
+
On all other operating systems, ``False``.
|
| 147 |
+
|
| 148 |
+
:note:
|
| 149 |
+
This only accesses the environment on Windows.
|
| 150 |
+
"""
|
| 151 |
+
return sys.platform == "win32" and _read_env_flag(name, default)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
#: We need an easy way to see if Appveyor TCs start failing,
|
| 155 |
+
#: so the errors marked with this var are considered "acknowledged" ones, awaiting remedy,
|
| 156 |
+
#: till then, we wish to hide them.
|
| 157 |
+
HIDE_WINDOWS_KNOWN_ERRORS = _read_win_env_flag("HIDE_WINDOWS_KNOWN_ERRORS", True)
|
| 158 |
+
HIDE_WINDOWS_FREEZE_ERRORS = _read_win_env_flag("HIDE_WINDOWS_FREEZE_ERRORS", True)
|
| 159 |
+
|
| 160 |
+
# { Utility Methods
|
| 161 |
+
|
| 162 |
+
T = TypeVar("T")
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
def unbare_repo(func: Callable[..., T]) -> Callable[..., T]:
|
| 166 |
+
"""Methods with this decorator raise :exc:`~git.exc.InvalidGitRepositoryError` if
|
| 167 |
+
they encounter a bare repository."""
|
| 168 |
+
|
| 169 |
+
from .exc import InvalidGitRepositoryError
|
| 170 |
+
|
| 171 |
+
@wraps(func)
|
| 172 |
+
def wrapper(self: "Remote", *args: Any, **kwargs: Any) -> T:
|
| 173 |
+
if self.repo.bare:
|
| 174 |
+
raise InvalidGitRepositoryError("Method '%s' cannot operate on bare repositories" % func.__name__)
|
| 175 |
+
# END bare method
|
| 176 |
+
return func(self, *args, **kwargs)
|
| 177 |
+
|
| 178 |
+
# END wrapper
|
| 179 |
+
|
| 180 |
+
return wrapper
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
@contextlib.contextmanager
|
| 184 |
+
def cwd(new_dir: PathLike) -> Generator[PathLike, None, None]:
|
| 185 |
+
"""Context manager to temporarily change directory.
|
| 186 |
+
|
| 187 |
+
This is similar to :func:`contextlib.chdir` introduced in Python 3.11, but the
|
| 188 |
+
context manager object returned by a single call to this function is not reentrant.
|
| 189 |
+
"""
|
| 190 |
+
old_dir = os.getcwd()
|
| 191 |
+
os.chdir(new_dir)
|
| 192 |
+
try:
|
| 193 |
+
yield new_dir
|
| 194 |
+
finally:
|
| 195 |
+
os.chdir(old_dir)
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
@contextlib.contextmanager
|
| 199 |
+
def patch_env(name: str, value: str) -> Generator[None, None, None]:
|
| 200 |
+
"""Context manager to temporarily patch an environment variable."""
|
| 201 |
+
old_value = os.getenv(name)
|
| 202 |
+
os.environ[name] = value
|
| 203 |
+
try:
|
| 204 |
+
yield
|
| 205 |
+
finally:
|
| 206 |
+
if old_value is None:
|
| 207 |
+
del os.environ[name]
|
| 208 |
+
else:
|
| 209 |
+
os.environ[name] = old_value
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
def rmtree(path: PathLike) -> None:
|
| 213 |
+
"""Remove the given directory tree recursively.
|
| 214 |
+
|
| 215 |
+
:note:
|
| 216 |
+
We use :func:`shutil.rmtree` but adjust its behaviour to see whether files that
|
| 217 |
+
couldn't be deleted are read-only. Windows will not remove them in that case.
|
| 218 |
+
"""
|
| 219 |
+
|
| 220 |
+
def handler(function: Callable, path: PathLike, _excinfo: Any) -> None:
|
| 221 |
+
"""Callback for :func:`shutil.rmtree`.
|
| 222 |
+
|
| 223 |
+
This works as either a ``onexc`` or ``onerror`` style callback.
|
| 224 |
+
"""
|
| 225 |
+
# Is the error an access error?
|
| 226 |
+
os.chmod(path, stat.S_IWUSR)
|
| 227 |
+
|
| 228 |
+
try:
|
| 229 |
+
function(path)
|
| 230 |
+
except PermissionError as ex:
|
| 231 |
+
if HIDE_WINDOWS_KNOWN_ERRORS:
|
| 232 |
+
from unittest import SkipTest
|
| 233 |
+
|
| 234 |
+
raise SkipTest(f"FIXME: fails with: PermissionError\n {ex}") from ex
|
| 235 |
+
raise
|
| 236 |
+
|
| 237 |
+
if sys.platform != "win32":
|
| 238 |
+
shutil.rmtree(path)
|
| 239 |
+
elif sys.version_info >= (3, 12):
|
| 240 |
+
shutil.rmtree(path, onexc=handler)
|
| 241 |
+
else:
|
| 242 |
+
shutil.rmtree(path, onerror=handler)
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
def rmfile(path: PathLike) -> None:
|
| 246 |
+
"""Ensure file deleted also on *Windows* where read-only files need special
|
| 247 |
+
treatment."""
|
| 248 |
+
if osp.isfile(path):
|
| 249 |
+
if sys.platform == "win32":
|
| 250 |
+
os.chmod(path, 0o777)
|
| 251 |
+
os.remove(path)
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def stream_copy(source: BinaryIO, destination: BinaryIO, chunk_size: int = 512 * 1024) -> int:
|
| 255 |
+
"""Copy all data from the `source` stream into the `destination` stream in chunks
|
| 256 |
+
of size `chunk_size`.
|
| 257 |
+
|
| 258 |
+
:return:
|
| 259 |
+
Number of bytes written
|
| 260 |
+
"""
|
| 261 |
+
br = 0
|
| 262 |
+
while True:
|
| 263 |
+
chunk = source.read(chunk_size)
|
| 264 |
+
destination.write(chunk)
|
| 265 |
+
br += len(chunk)
|
| 266 |
+
if len(chunk) < chunk_size:
|
| 267 |
+
break
|
| 268 |
+
# END reading output stream
|
| 269 |
+
return br
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def join_path(a: PathLike, *p: PathLike) -> PathLike:
|
| 273 |
+
R"""Join path tokens together similar to osp.join, but always use ``/`` instead of
|
| 274 |
+
possibly ``\`` on Windows."""
|
| 275 |
+
path = str(a)
|
| 276 |
+
for b in p:
|
| 277 |
+
b = str(b)
|
| 278 |
+
if not b:
|
| 279 |
+
continue
|
| 280 |
+
if b.startswith("/"):
|
| 281 |
+
path += b[1:]
|
| 282 |
+
elif path == "" or path.endswith("/"):
|
| 283 |
+
path += b
|
| 284 |
+
else:
|
| 285 |
+
path += "/" + b
|
| 286 |
+
# END for each path token to add
|
| 287 |
+
return path
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
if sys.platform == "win32":
|
| 291 |
+
|
| 292 |
+
def to_native_path_windows(path: PathLike) -> PathLike:
|
| 293 |
+
path = str(path)
|
| 294 |
+
return path.replace("/", "\\")
|
| 295 |
+
|
| 296 |
+
def to_native_path_linux(path: PathLike) -> str:
|
| 297 |
+
path = str(path)
|
| 298 |
+
return path.replace("\\", "/")
|
| 299 |
+
|
| 300 |
+
to_native_path = to_native_path_windows
|
| 301 |
+
else:
|
| 302 |
+
# No need for any work on Linux.
|
| 303 |
+
def to_native_path_linux(path: PathLike) -> str:
|
| 304 |
+
return str(path)
|
| 305 |
+
|
| 306 |
+
to_native_path = to_native_path_linux
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
def join_path_native(a: PathLike, *p: PathLike) -> PathLike:
|
| 310 |
+
R"""Like :func:`join_path`, but makes sure an OS native path is returned.
|
| 311 |
+
|
| 312 |
+
This is only needed to play it safe on Windows and to ensure nice paths that only
|
| 313 |
+
use ``\``.
|
| 314 |
+
"""
|
| 315 |
+
return to_native_path(join_path(a, *p))
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def assure_directory_exists(path: PathLike, is_file: bool = False) -> bool:
|
| 319 |
+
"""Make sure that the directory pointed to by path exists.
|
| 320 |
+
|
| 321 |
+
:param is_file:
|
| 322 |
+
If ``True``, `path` is assumed to be a file and handled correctly.
|
| 323 |
+
Otherwise it must be a directory.
|
| 324 |
+
|
| 325 |
+
:return:
|
| 326 |
+
``True`` if the directory was created, ``False`` if it already existed.
|
| 327 |
+
"""
|
| 328 |
+
if is_file:
|
| 329 |
+
path = osp.dirname(path)
|
| 330 |
+
# END handle file
|
| 331 |
+
if not osp.isdir(path):
|
| 332 |
+
os.makedirs(path, exist_ok=True)
|
| 333 |
+
return True
|
| 334 |
+
return False
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
def _get_exe_extensions() -> Sequence[str]:
|
| 338 |
+
PATHEXT = os.environ.get("PATHEXT", None)
|
| 339 |
+
if PATHEXT:
|
| 340 |
+
return tuple(p.upper() for p in PATHEXT.split(os.pathsep))
|
| 341 |
+
elif sys.platform == "win32":
|
| 342 |
+
return (".BAT", "COM", ".EXE")
|
| 343 |
+
else:
|
| 344 |
+
return ()
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
def py_where(program: str, path: Optional[PathLike] = None) -> List[str]:
|
| 348 |
+
"""Perform a path search to assist :func:`is_cygwin_git`.
|
| 349 |
+
|
| 350 |
+
This is not robust for general use. It is an implementation detail of
|
| 351 |
+
:func:`is_cygwin_git`. When a search following all shell rules is needed,
|
| 352 |
+
:func:`shutil.which` can be used instead.
|
| 353 |
+
|
| 354 |
+
:note:
|
| 355 |
+
Neither this function nor :func:`shutil.which` will predict the effect of an
|
| 356 |
+
executable search on a native Windows system due to a :class:`subprocess.Popen`
|
| 357 |
+
call without ``shell=True``, because shell and non-shell executable search on
|
| 358 |
+
Windows differ considerably.
|
| 359 |
+
"""
|
| 360 |
+
# From: http://stackoverflow.com/a/377028/548792
|
| 361 |
+
winprog_exts = _get_exe_extensions()
|
| 362 |
+
|
| 363 |
+
def is_exec(fpath: str) -> bool:
|
| 364 |
+
return (
|
| 365 |
+
osp.isfile(fpath)
|
| 366 |
+
and os.access(fpath, os.X_OK)
|
| 367 |
+
and (
|
| 368 |
+
sys.platform != "win32" or not winprog_exts or any(fpath.upper().endswith(ext) for ext in winprog_exts)
|
| 369 |
+
)
|
| 370 |
+
)
|
| 371 |
+
|
| 372 |
+
progs = []
|
| 373 |
+
if not path:
|
| 374 |
+
path = os.environ["PATH"]
|
| 375 |
+
for folder in str(path).split(os.pathsep):
|
| 376 |
+
folder = folder.strip('"')
|
| 377 |
+
if folder:
|
| 378 |
+
exe_path = osp.join(folder, program)
|
| 379 |
+
for f in [exe_path] + ["%s%s" % (exe_path, e) for e in winprog_exts]:
|
| 380 |
+
if is_exec(f):
|
| 381 |
+
progs.append(f)
|
| 382 |
+
return progs
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
def _cygexpath(drive: Optional[str], path: str) -> str:
|
| 386 |
+
if osp.isabs(path) and not drive:
|
| 387 |
+
# Invoked from `cygpath()` directly with `D:Apps\123`?
|
| 388 |
+
# It's an error, leave it alone just slashes)
|
| 389 |
+
p = path # convert to str if AnyPath given
|
| 390 |
+
else:
|
| 391 |
+
p = path and osp.normpath(osp.expandvars(osp.expanduser(path)))
|
| 392 |
+
if osp.isabs(p):
|
| 393 |
+
if drive:
|
| 394 |
+
# Confusing, maybe a remote system should expand vars.
|
| 395 |
+
p = path
|
| 396 |
+
else:
|
| 397 |
+
p = cygpath(p)
|
| 398 |
+
elif drive:
|
| 399 |
+
p = "/proc/cygdrive/%s/%s" % (drive.lower(), p)
|
| 400 |
+
p_str = str(p) # ensure it is a str and not AnyPath
|
| 401 |
+
return p_str.replace("\\", "/")
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
_cygpath_parsers: Tuple[Tuple[Pattern[str], Callable, bool], ...] = (
|
| 405 |
+
# See: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
|
| 406 |
+
# and: https://www.cygwin.com/cygwin-ug-net/using.html#unc-paths
|
| 407 |
+
(
|
| 408 |
+
re.compile(r"\\\\\?\\UNC\\([^\\]+)\\([^\\]+)(?:\\(.*))?"),
|
| 409 |
+
(lambda server, share, rest_path: "//%s/%s/%s" % (server, share, rest_path.replace("\\", "/"))),
|
| 410 |
+
False,
|
| 411 |
+
),
|
| 412 |
+
(re.compile(r"\\\\\?\\(\w):[/\\](.*)"), (_cygexpath), False),
|
| 413 |
+
(re.compile(r"(\w):[/\\](.*)"), (_cygexpath), False),
|
| 414 |
+
(re.compile(r"file:(.*)", re.I), (lambda rest_path: rest_path), True),
|
| 415 |
+
(re.compile(r"(\w{2,}:.*)"), (lambda url: url), False), # remote URL, do nothing
|
| 416 |
+
)
|
| 417 |
+
|
| 418 |
+
|
| 419 |
+
def cygpath(path: str) -> str:
|
| 420 |
+
"""Use :meth:`git.cmd.Git.polish_url` instead, that works on any environment."""
|
| 421 |
+
path = str(path) # Ensure is str and not AnyPath.
|
| 422 |
+
# Fix to use Paths when 3.5 dropped. Or to be just str if only for URLs?
|
| 423 |
+
if not path.startswith(("/cygdrive", "//", "/proc/cygdrive")):
|
| 424 |
+
for regex, parser, recurse in _cygpath_parsers:
|
| 425 |
+
match = regex.match(path)
|
| 426 |
+
if match:
|
| 427 |
+
path = parser(*match.groups())
|
| 428 |
+
if recurse:
|
| 429 |
+
path = cygpath(path)
|
| 430 |
+
break
|
| 431 |
+
else:
|
| 432 |
+
path = _cygexpath(None, path)
|
| 433 |
+
|
| 434 |
+
return path
|
| 435 |
+
|
| 436 |
+
|
| 437 |
+
_decygpath_regex = re.compile(r"(?:/proc)?/cygdrive/(\w)(/.*)?")
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
def decygpath(path: PathLike) -> str:
|
| 441 |
+
path = str(path)
|
| 442 |
+
m = _decygpath_regex.match(path)
|
| 443 |
+
if m:
|
| 444 |
+
drive, rest_path = m.groups()
|
| 445 |
+
path = "%s:%s" % (drive.upper(), rest_path or "")
|
| 446 |
+
|
| 447 |
+
return path.replace("/", "\\")
|
| 448 |
+
|
| 449 |
+
|
| 450 |
+
#: Store boolean flags denoting if a specific Git executable
|
| 451 |
+
#: is from a Cygwin installation (since `cache_lru()` unsupported on PY2).
|
| 452 |
+
_is_cygwin_cache: Dict[str, Optional[bool]] = {}
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
def _is_cygwin_git(git_executable: str) -> bool:
|
| 456 |
+
is_cygwin = _is_cygwin_cache.get(git_executable) # type: Optional[bool]
|
| 457 |
+
if is_cygwin is None:
|
| 458 |
+
is_cygwin = False
|
| 459 |
+
try:
|
| 460 |
+
git_dir = osp.dirname(git_executable)
|
| 461 |
+
if not git_dir:
|
| 462 |
+
res = py_where(git_executable)
|
| 463 |
+
git_dir = osp.dirname(res[0]) if res else ""
|
| 464 |
+
|
| 465 |
+
# Just a name given, not a real path.
|
| 466 |
+
uname_cmd = osp.join(git_dir, "uname")
|
| 467 |
+
process = subprocess.Popen([uname_cmd], stdout=subprocess.PIPE, universal_newlines=True)
|
| 468 |
+
uname_out, _ = process.communicate()
|
| 469 |
+
# retcode = process.poll()
|
| 470 |
+
is_cygwin = "CYGWIN" in uname_out
|
| 471 |
+
except Exception as ex:
|
| 472 |
+
_logger.debug("Failed checking if running in CYGWIN due to: %r", ex)
|
| 473 |
+
_is_cygwin_cache[git_executable] = is_cygwin
|
| 474 |
+
|
| 475 |
+
return is_cygwin
|
| 476 |
+
|
| 477 |
+
|
| 478 |
+
@overload
|
| 479 |
+
def is_cygwin_git(git_executable: None) -> Literal[False]: ...
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
@overload
|
| 483 |
+
def is_cygwin_git(git_executable: PathLike) -> bool: ...
|
| 484 |
+
|
| 485 |
+
|
| 486 |
+
def is_cygwin_git(git_executable: Union[None, PathLike]) -> bool:
|
| 487 |
+
if sys.platform == "win32": # TODO: See if we can use `sys.platform != "cygwin"`.
|
| 488 |
+
return False
|
| 489 |
+
elif git_executable is None:
|
| 490 |
+
return False
|
| 491 |
+
else:
|
| 492 |
+
return _is_cygwin_git(str(git_executable))
|
| 493 |
+
|
| 494 |
+
|
| 495 |
+
def get_user_id() -> str:
|
| 496 |
+
""":return: String identifying the currently active system user as ``name@node``"""
|
| 497 |
+
return "%s@%s" % (getpass.getuser(), platform.node())
|
| 498 |
+
|
| 499 |
+
|
| 500 |
+
def finalize_process(proc: Union[subprocess.Popen, "Git.AutoInterrupt"], **kwargs: Any) -> None:
|
| 501 |
+
"""Wait for the process (clone, fetch, pull or push) and handle its errors
|
| 502 |
+
accordingly."""
|
| 503 |
+
# TODO: No close proc-streams??
|
| 504 |
+
proc.wait(**kwargs)
|
| 505 |
+
|
| 506 |
+
|
| 507 |
+
@overload
|
| 508 |
+
def expand_path(p: None, expand_vars: bool = ...) -> None: ...
|
| 509 |
+
|
| 510 |
+
|
| 511 |
+
@overload
|
| 512 |
+
def expand_path(p: PathLike, expand_vars: bool = ...) -> str:
|
| 513 |
+
# TODO: Support for Python 3.5 has been dropped, so these overloads can be improved.
|
| 514 |
+
...
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
def expand_path(p: Union[None, PathLike], expand_vars: bool = True) -> Optional[PathLike]:
|
| 518 |
+
if isinstance(p, pathlib.Path):
|
| 519 |
+
return p.resolve()
|
| 520 |
+
try:
|
| 521 |
+
p = osp.expanduser(p) # type: ignore[arg-type]
|
| 522 |
+
if expand_vars:
|
| 523 |
+
p = osp.expandvars(p)
|
| 524 |
+
return osp.normpath(osp.abspath(p))
|
| 525 |
+
except Exception:
|
| 526 |
+
return None
|
| 527 |
+
|
| 528 |
+
|
| 529 |
+
def remove_password_if_present(cmdline: Sequence[str]) -> List[str]:
|
| 530 |
+
"""Parse any command line argument and if one of the elements is an URL with a
|
| 531 |
+
username and/or password, replace them by stars (in-place).
|
| 532 |
+
|
| 533 |
+
If nothing is found, this just returns the command line as-is.
|
| 534 |
+
|
| 535 |
+
This should be used for every log line that print a command line, as well as
|
| 536 |
+
exception messages.
|
| 537 |
+
"""
|
| 538 |
+
new_cmdline = []
|
| 539 |
+
for index, to_parse in enumerate(cmdline):
|
| 540 |
+
new_cmdline.append(to_parse)
|
| 541 |
+
try:
|
| 542 |
+
url = urlsplit(to_parse)
|
| 543 |
+
# Remove password from the URL if present.
|
| 544 |
+
if url.password is None and url.username is None:
|
| 545 |
+
continue
|
| 546 |
+
|
| 547 |
+
if url.password is not None:
|
| 548 |
+
url = url._replace(netloc=url.netloc.replace(url.password, "*****"))
|
| 549 |
+
if url.username is not None:
|
| 550 |
+
url = url._replace(netloc=url.netloc.replace(url.username, "*****"))
|
| 551 |
+
new_cmdline[index] = urlunsplit(url)
|
| 552 |
+
except ValueError:
|
| 553 |
+
# This is not a valid URL.
|
| 554 |
+
continue
|
| 555 |
+
return new_cmdline
|
| 556 |
+
|
| 557 |
+
|
| 558 |
+
# } END utilities
|
| 559 |
+
|
| 560 |
+
# { Classes
|
| 561 |
+
|
| 562 |
+
|
| 563 |
+
class RemoteProgress:
|
| 564 |
+
"""Handler providing an interface to parse progress information emitted by
|
| 565 |
+
:manpage:`git-push(1)` and :manpage:`git-fetch(1)` and to dispatch callbacks
|
| 566 |
+
allowing subclasses to react to the progress."""
|
| 567 |
+
|
| 568 |
+
_num_op_codes: int = 9
|
| 569 |
+
(
|
| 570 |
+
BEGIN,
|
| 571 |
+
END,
|
| 572 |
+
COUNTING,
|
| 573 |
+
COMPRESSING,
|
| 574 |
+
WRITING,
|
| 575 |
+
RECEIVING,
|
| 576 |
+
RESOLVING,
|
| 577 |
+
FINDING_SOURCES,
|
| 578 |
+
CHECKING_OUT,
|
| 579 |
+
) = [1 << x for x in range(_num_op_codes)]
|
| 580 |
+
STAGE_MASK = BEGIN | END
|
| 581 |
+
OP_MASK = ~STAGE_MASK
|
| 582 |
+
|
| 583 |
+
DONE_TOKEN = "done."
|
| 584 |
+
TOKEN_SEPARATOR = ", "
|
| 585 |
+
|
| 586 |
+
__slots__ = (
|
| 587 |
+
"_cur_line",
|
| 588 |
+
"_seen_ops",
|
| 589 |
+
"error_lines", # Lines that started with 'error:' or 'fatal:'.
|
| 590 |
+
"other_lines", # Lines not denoting progress (i.e.g. push-infos).
|
| 591 |
+
)
|
| 592 |
+
re_op_absolute = re.compile(r"(remote: )?([\w\s]+):\s+()(\d+)()(.*)")
|
| 593 |
+
re_op_relative = re.compile(r"(remote: )?([\w\s]+):\s+(\d+)% \((\d+)/(\d+)\)(.*)")
|
| 594 |
+
|
| 595 |
+
def __init__(self) -> None:
|
| 596 |
+
self._seen_ops: List[int] = []
|
| 597 |
+
self._cur_line: Optional[str] = None
|
| 598 |
+
self.error_lines: List[str] = []
|
| 599 |
+
self.other_lines: List[str] = []
|
| 600 |
+
|
| 601 |
+
def _parse_progress_line(self, line: AnyStr) -> None:
|
| 602 |
+
"""Parse progress information from the given line as retrieved by
|
| 603 |
+
:manpage:`git-push(1)` or :manpage:`git-fetch(1)`.
|
| 604 |
+
|
| 605 |
+
- Lines that do not contain progress info are stored in :attr:`other_lines`.
|
| 606 |
+
- Lines that seem to contain an error (i.e. start with ``error:`` or ``fatal:``)
|
| 607 |
+
are stored in :attr:`error_lines`.
|
| 608 |
+
"""
|
| 609 |
+
# handle
|
| 610 |
+
# Counting objects: 4, done.
|
| 611 |
+
# Compressing objects: 50% (1/2)
|
| 612 |
+
# Compressing objects: 100% (2/2)
|
| 613 |
+
# Compressing objects: 100% (2/2), done.
|
| 614 |
+
if isinstance(line, bytes): # mypy argues about ternary assignment.
|
| 615 |
+
line_str = line.decode("utf-8")
|
| 616 |
+
else:
|
| 617 |
+
line_str = line
|
| 618 |
+
self._cur_line = line_str
|
| 619 |
+
|
| 620 |
+
if self._cur_line.startswith(("error:", "fatal:")):
|
| 621 |
+
self.error_lines.append(self._cur_line)
|
| 622 |
+
return
|
| 623 |
+
|
| 624 |
+
cur_count, max_count = None, None
|
| 625 |
+
match = self.re_op_relative.match(line_str)
|
| 626 |
+
if match is None:
|
| 627 |
+
match = self.re_op_absolute.match(line_str)
|
| 628 |
+
|
| 629 |
+
if not match:
|
| 630 |
+
self.line_dropped(line_str)
|
| 631 |
+
self.other_lines.append(line_str)
|
| 632 |
+
return
|
| 633 |
+
# END could not get match
|
| 634 |
+
|
| 635 |
+
op_code = 0
|
| 636 |
+
_remote, op_name, _percent, cur_count, max_count, message = match.groups()
|
| 637 |
+
|
| 638 |
+
# Get operation ID.
|
| 639 |
+
if op_name == "Counting objects":
|
| 640 |
+
op_code |= self.COUNTING
|
| 641 |
+
elif op_name == "Compressing objects":
|
| 642 |
+
op_code |= self.COMPRESSING
|
| 643 |
+
elif op_name == "Writing objects":
|
| 644 |
+
op_code |= self.WRITING
|
| 645 |
+
elif op_name == "Receiving objects":
|
| 646 |
+
op_code |= self.RECEIVING
|
| 647 |
+
elif op_name == "Resolving deltas":
|
| 648 |
+
op_code |= self.RESOLVING
|
| 649 |
+
elif op_name == "Finding sources":
|
| 650 |
+
op_code |= self.FINDING_SOURCES
|
| 651 |
+
elif op_name == "Checking out files":
|
| 652 |
+
op_code |= self.CHECKING_OUT
|
| 653 |
+
else:
|
| 654 |
+
# Note: On Windows it can happen that partial lines are sent.
|
| 655 |
+
# Hence we get something like "CompreReceiving objects", which is
|
| 656 |
+
# a blend of "Compressing objects" and "Receiving objects".
|
| 657 |
+
# This can't really be prevented, so we drop the line verbosely
|
| 658 |
+
# to make sure we get informed in case the process spits out new
|
| 659 |
+
# commands at some point.
|
| 660 |
+
self.line_dropped(line_str)
|
| 661 |
+
# Note: Don't add this line to the other lines, as we have to silently
|
| 662 |
+
# drop it.
|
| 663 |
+
return
|
| 664 |
+
# END handle op code
|
| 665 |
+
|
| 666 |
+
# Figure out stage.
|
| 667 |
+
if op_code not in self._seen_ops:
|
| 668 |
+
self._seen_ops.append(op_code)
|
| 669 |
+
op_code |= self.BEGIN
|
| 670 |
+
# END begin opcode
|
| 671 |
+
|
| 672 |
+
if message is None:
|
| 673 |
+
message = ""
|
| 674 |
+
# END message handling
|
| 675 |
+
|
| 676 |
+
message = message.strip()
|
| 677 |
+
if message.endswith(self.DONE_TOKEN):
|
| 678 |
+
op_code |= self.END
|
| 679 |
+
message = message[: -len(self.DONE_TOKEN)]
|
| 680 |
+
# END end message handling
|
| 681 |
+
message = message.strip(self.TOKEN_SEPARATOR)
|
| 682 |
+
|
| 683 |
+
self.update(
|
| 684 |
+
op_code,
|
| 685 |
+
cur_count and float(cur_count),
|
| 686 |
+
max_count and float(max_count),
|
| 687 |
+
message,
|
| 688 |
+
)
|
| 689 |
+
|
| 690 |
+
def new_message_handler(self) -> Callable[[str], None]:
|
| 691 |
+
"""
|
| 692 |
+
:return:
|
| 693 |
+
A progress handler suitable for :func:`~git.cmd.handle_process_output`,
|
| 694 |
+
passing lines on to this progress handler in a suitable format.
|
| 695 |
+
"""
|
| 696 |
+
|
| 697 |
+
def handler(line: AnyStr) -> None:
|
| 698 |
+
return self._parse_progress_line(line.rstrip())
|
| 699 |
+
|
| 700 |
+
# END handler
|
| 701 |
+
|
| 702 |
+
return handler
|
| 703 |
+
|
| 704 |
+
def line_dropped(self, line: str) -> None:
|
| 705 |
+
"""Called whenever a line could not be understood and was therefore dropped."""
|
| 706 |
+
pass
|
| 707 |
+
|
| 708 |
+
def update(
|
| 709 |
+
self,
|
| 710 |
+
op_code: int,
|
| 711 |
+
cur_count: Union[str, float],
|
| 712 |
+
max_count: Union[str, float, None] = None,
|
| 713 |
+
message: str = "",
|
| 714 |
+
) -> None:
|
| 715 |
+
"""Called whenever the progress changes.
|
| 716 |
+
|
| 717 |
+
:param op_code:
|
| 718 |
+
Integer allowing to be compared against Operation IDs and stage IDs.
|
| 719 |
+
|
| 720 |
+
Stage IDs are :const:`BEGIN` and :const:`END`. :const:`BEGIN` will only be
|
| 721 |
+
set once for each Operation ID as well as :const:`END`. It may be that
|
| 722 |
+
:const:`BEGIN` and :const:`END` are set at once in case only one progress
|
| 723 |
+
message was emitted due to the speed of the operation. Between
|
| 724 |
+
:const:`BEGIN` and :const:`END`, none of these flags will be set.
|
| 725 |
+
|
| 726 |
+
Operation IDs are all held within the :const:`OP_MASK`. Only one Operation
|
| 727 |
+
ID will be active per call.
|
| 728 |
+
|
| 729 |
+
:param cur_count:
|
| 730 |
+
Current absolute count of items.
|
| 731 |
+
|
| 732 |
+
:param max_count:
|
| 733 |
+
The maximum count of items we expect. It may be ``None`` in case there is no
|
| 734 |
+
maximum number of items or if it is (yet) unknown.
|
| 735 |
+
|
| 736 |
+
:param message:
|
| 737 |
+
In case of the :const:`WRITING` operation, it contains the amount of bytes
|
| 738 |
+
transferred. It may possibly be used for other purposes as well.
|
| 739 |
+
|
| 740 |
+
:note:
|
| 741 |
+
You may read the contents of the current line in
|
| 742 |
+
:attr:`self._cur_line <_cur_line>`.
|
| 743 |
+
"""
|
| 744 |
+
pass
|
| 745 |
+
|
| 746 |
+
|
| 747 |
+
class CallableRemoteProgress(RemoteProgress):
|
| 748 |
+
"""A :class:`RemoteProgress` implementation forwarding updates to any callable.
|
| 749 |
+
|
| 750 |
+
:note:
|
| 751 |
+
Like direct instances of :class:`RemoteProgress`, instances of this
|
| 752 |
+
:class:`CallableRemoteProgress` class are not themselves directly callable.
|
| 753 |
+
Rather, instances of this class wrap a callable and forward to it. This should
|
| 754 |
+
therefore not be confused with :class:`git.types.CallableProgress`.
|
| 755 |
+
"""
|
| 756 |
+
|
| 757 |
+
__slots__ = ("_callable",)
|
| 758 |
+
|
| 759 |
+
def __init__(self, fn: Callable) -> None:
|
| 760 |
+
self._callable = fn
|
| 761 |
+
super().__init__()
|
| 762 |
+
|
| 763 |
+
def update(self, *args: Any, **kwargs: Any) -> None:
|
| 764 |
+
self._callable(*args, **kwargs)
|
| 765 |
+
|
| 766 |
+
|
| 767 |
+
class Actor:
|
| 768 |
+
"""Actors hold information about a person acting on the repository. They can be
|
| 769 |
+
committers and authors or anything with a name and an email as mentioned in the git
|
| 770 |
+
log entries."""
|
| 771 |
+
|
| 772 |
+
# PRECOMPILED REGEX
|
| 773 |
+
name_only_regex = re.compile(r"<(.*)>")
|
| 774 |
+
name_email_regex = re.compile(r"(.*) <(.*?)>")
|
| 775 |
+
|
| 776 |
+
# ENVIRONMENT VARIABLES
|
| 777 |
+
# These are read when creating new commits.
|
| 778 |
+
env_author_name = "GIT_AUTHOR_NAME"
|
| 779 |
+
env_author_email = "GIT_AUTHOR_EMAIL"
|
| 780 |
+
env_committer_name = "GIT_COMMITTER_NAME"
|
| 781 |
+
env_committer_email = "GIT_COMMITTER_EMAIL"
|
| 782 |
+
|
| 783 |
+
# CONFIGURATION KEYS
|
| 784 |
+
conf_name = "name"
|
| 785 |
+
conf_email = "email"
|
| 786 |
+
|
| 787 |
+
__slots__ = ("name", "email")
|
| 788 |
+
|
| 789 |
+
def __init__(self, name: Optional[str], email: Optional[str]) -> None:
|
| 790 |
+
self.name = name
|
| 791 |
+
self.email = email
|
| 792 |
+
|
| 793 |
+
def __eq__(self, other: Any) -> bool:
|
| 794 |
+
return self.name == other.name and self.email == other.email
|
| 795 |
+
|
| 796 |
+
def __ne__(self, other: Any) -> bool:
|
| 797 |
+
return not (self == other)
|
| 798 |
+
|
| 799 |
+
def __hash__(self) -> int:
|
| 800 |
+
return hash((self.name, self.email))
|
| 801 |
+
|
| 802 |
+
def __str__(self) -> str:
|
| 803 |
+
return self.name if self.name else ""
|
| 804 |
+
|
| 805 |
+
def __repr__(self) -> str:
|
| 806 |
+
return '<git.Actor "%s <%s>">' % (self.name, self.email)
|
| 807 |
+
|
| 808 |
+
@classmethod
|
| 809 |
+
def _from_string(cls, string: str) -> "Actor":
|
| 810 |
+
"""Create an :class:`Actor` from a string.
|
| 811 |
+
|
| 812 |
+
:param string:
|
| 813 |
+
The string, which is expected to be in regular git format::
|
| 814 |
+
|
| 815 |
+
John Doe <jdoe@example.com>
|
| 816 |
+
|
| 817 |
+
:return:
|
| 818 |
+
:class:`Actor`
|
| 819 |
+
"""
|
| 820 |
+
m = cls.name_email_regex.search(string)
|
| 821 |
+
if m:
|
| 822 |
+
name, email = m.groups()
|
| 823 |
+
return Actor(name, email)
|
| 824 |
+
else:
|
| 825 |
+
m = cls.name_only_regex.search(string)
|
| 826 |
+
if m:
|
| 827 |
+
return Actor(m.group(1), None)
|
| 828 |
+
# Assume the best and use the whole string as name.
|
| 829 |
+
return Actor(string, None)
|
| 830 |
+
# END special case name
|
| 831 |
+
# END handle name/email matching
|
| 832 |
+
|
| 833 |
+
@classmethod
|
| 834 |
+
def _main_actor(
|
| 835 |
+
cls,
|
| 836 |
+
env_name: str,
|
| 837 |
+
env_email: str,
|
| 838 |
+
config_reader: Union[None, "GitConfigParser", "SectionConstraint"] = None,
|
| 839 |
+
) -> "Actor":
|
| 840 |
+
actor = Actor("", "")
|
| 841 |
+
user_id = None # We use this to avoid multiple calls to getpass.getuser().
|
| 842 |
+
|
| 843 |
+
def default_email() -> str:
|
| 844 |
+
nonlocal user_id
|
| 845 |
+
if not user_id:
|
| 846 |
+
user_id = get_user_id()
|
| 847 |
+
return user_id
|
| 848 |
+
|
| 849 |
+
def default_name() -> str:
|
| 850 |
+
return default_email().split("@")[0]
|
| 851 |
+
|
| 852 |
+
for attr, evar, cvar, default in (
|
| 853 |
+
("name", env_name, cls.conf_name, default_name),
|
| 854 |
+
("email", env_email, cls.conf_email, default_email),
|
| 855 |
+
):
|
| 856 |
+
try:
|
| 857 |
+
val = os.environ[evar]
|
| 858 |
+
setattr(actor, attr, val)
|
| 859 |
+
except KeyError:
|
| 860 |
+
if config_reader is not None:
|
| 861 |
+
try:
|
| 862 |
+
val = config_reader.get("user", cvar)
|
| 863 |
+
except Exception:
|
| 864 |
+
val = default()
|
| 865 |
+
setattr(actor, attr, val)
|
| 866 |
+
# END config-reader handling
|
| 867 |
+
if not getattr(actor, attr):
|
| 868 |
+
setattr(actor, attr, default())
|
| 869 |
+
# END handle name
|
| 870 |
+
# END for each item to retrieve
|
| 871 |
+
return actor
|
| 872 |
+
|
| 873 |
+
@classmethod
|
| 874 |
+
def committer(cls, config_reader: Union[None, "GitConfigParser", "SectionConstraint"] = None) -> "Actor":
|
| 875 |
+
"""
|
| 876 |
+
:return:
|
| 877 |
+
:class:`Actor` instance corresponding to the configured committer. It
|
| 878 |
+
behaves similar to the git implementation, such that the environment will
|
| 879 |
+
override configuration values of `config_reader`. If no value is set at all,
|
| 880 |
+
it will be generated.
|
| 881 |
+
|
| 882 |
+
:param config_reader:
|
| 883 |
+
ConfigReader to use to retrieve the values from in case they are not set in
|
| 884 |
+
the environment.
|
| 885 |
+
"""
|
| 886 |
+
return cls._main_actor(cls.env_committer_name, cls.env_committer_email, config_reader)
|
| 887 |
+
|
| 888 |
+
@classmethod
|
| 889 |
+
def author(cls, config_reader: Union[None, "GitConfigParser", "SectionConstraint"] = None) -> "Actor":
|
| 890 |
+
"""Same as :meth:`committer`, but defines the main author. It may be specified
|
| 891 |
+
in the environment, but defaults to the committer."""
|
| 892 |
+
return cls._main_actor(cls.env_author_name, cls.env_author_email, config_reader)
|
| 893 |
+
|
| 894 |
+
|
| 895 |
+
class Stats:
|
| 896 |
+
"""Represents stat information as presented by git at the end of a merge. It is
|
| 897 |
+
created from the output of a diff operation.
|
| 898 |
+
|
| 899 |
+
Example::
|
| 900 |
+
|
| 901 |
+
c = Commit( sha1 )
|
| 902 |
+
s = c.stats
|
| 903 |
+
s.total # full-stat-dict
|
| 904 |
+
s.files # dict( filepath : stat-dict )
|
| 905 |
+
|
| 906 |
+
``stat-dict``
|
| 907 |
+
|
| 908 |
+
A dictionary with the following keys and values::
|
| 909 |
+
|
| 910 |
+
deletions = number of deleted lines as int
|
| 911 |
+
insertions = number of inserted lines as int
|
| 912 |
+
lines = total number of lines changed as int, or deletions + insertions
|
| 913 |
+
|
| 914 |
+
``full-stat-dict``
|
| 915 |
+
|
| 916 |
+
In addition to the items in the stat-dict, it features additional information::
|
| 917 |
+
|
| 918 |
+
files = number of changed files as int
|
| 919 |
+
"""
|
| 920 |
+
|
| 921 |
+
__slots__ = ("total", "files")
|
| 922 |
+
|
| 923 |
+
def __init__(self, total: Total_TD, files: Dict[PathLike, Files_TD]) -> None:
|
| 924 |
+
self.total = total
|
| 925 |
+
self.files = files
|
| 926 |
+
|
| 927 |
+
@classmethod
|
| 928 |
+
def _list_from_string(cls, repo: "Repo", text: str) -> "Stats":
|
| 929 |
+
"""Create a :class:`Stats` object from output retrieved by
|
| 930 |
+
:manpage:`git-diff(1)`.
|
| 931 |
+
|
| 932 |
+
:return:
|
| 933 |
+
:class:`git.Stats`
|
| 934 |
+
"""
|
| 935 |
+
|
| 936 |
+
hsh: HSH_TD = {
|
| 937 |
+
"total": {"insertions": 0, "deletions": 0, "lines": 0, "files": 0},
|
| 938 |
+
"files": {},
|
| 939 |
+
}
|
| 940 |
+
for line in text.splitlines():
|
| 941 |
+
(raw_insertions, raw_deletions, filename) = line.split("\t")
|
| 942 |
+
insertions = raw_insertions != "-" and int(raw_insertions) or 0
|
| 943 |
+
deletions = raw_deletions != "-" and int(raw_deletions) or 0
|
| 944 |
+
hsh["total"]["insertions"] += insertions
|
| 945 |
+
hsh["total"]["deletions"] += deletions
|
| 946 |
+
hsh["total"]["lines"] += insertions + deletions
|
| 947 |
+
hsh["total"]["files"] += 1
|
| 948 |
+
files_dict: Files_TD = {
|
| 949 |
+
"insertions": insertions,
|
| 950 |
+
"deletions": deletions,
|
| 951 |
+
"lines": insertions + deletions,
|
| 952 |
+
}
|
| 953 |
+
hsh["files"][filename.strip()] = files_dict
|
| 954 |
+
return Stats(hsh["total"], hsh["files"])
|
| 955 |
+
|
| 956 |
+
|
| 957 |
+
class IndexFileSHA1Writer:
|
| 958 |
+
"""Wrapper around a file-like object that remembers the SHA1 of the data written to
|
| 959 |
+
it. It will write a sha when the stream is closed or if asked for explicitly using
|
| 960 |
+
:meth:`write_sha`.
|
| 961 |
+
|
| 962 |
+
Only useful to the index file.
|
| 963 |
+
|
| 964 |
+
:note:
|
| 965 |
+
Based on the dulwich project.
|
| 966 |
+
"""
|
| 967 |
+
|
| 968 |
+
__slots__ = ("f", "sha1")
|
| 969 |
+
|
| 970 |
+
def __init__(self, f: IO) -> None:
|
| 971 |
+
self.f = f
|
| 972 |
+
self.sha1 = make_sha(b"")
|
| 973 |
+
|
| 974 |
+
def write(self, data: AnyStr) -> int:
|
| 975 |
+
self.sha1.update(data)
|
| 976 |
+
return self.f.write(data)
|
| 977 |
+
|
| 978 |
+
def write_sha(self) -> bytes:
|
| 979 |
+
sha = self.sha1.digest()
|
| 980 |
+
self.f.write(sha)
|
| 981 |
+
return sha
|
| 982 |
+
|
| 983 |
+
def close(self) -> bytes:
|
| 984 |
+
sha = self.write_sha()
|
| 985 |
+
self.f.close()
|
| 986 |
+
return sha
|
| 987 |
+
|
| 988 |
+
def tell(self) -> int:
|
| 989 |
+
return self.f.tell()
|
| 990 |
+
|
| 991 |
+
|
| 992 |
+
class LockFile:
|
| 993 |
+
"""Provides methods to obtain, check for, and release a file based lock which
|
| 994 |
+
should be used to handle concurrent access to the same file.
|
| 995 |
+
|
| 996 |
+
As we are a utility class to be derived from, we only use protected methods.
|
| 997 |
+
|
| 998 |
+
Locks will automatically be released on destruction.
|
| 999 |
+
"""
|
| 1000 |
+
|
| 1001 |
+
__slots__ = ("_file_path", "_owns_lock")
|
| 1002 |
+
|
| 1003 |
+
def __init__(self, file_path: PathLike) -> None:
|
| 1004 |
+
self._file_path = file_path
|
| 1005 |
+
self._owns_lock = False
|
| 1006 |
+
|
| 1007 |
+
def __del__(self) -> None:
|
| 1008 |
+
self._release_lock()
|
| 1009 |
+
|
| 1010 |
+
def _lock_file_path(self) -> str:
|
| 1011 |
+
""":return: Path to lockfile"""
|
| 1012 |
+
return "%s.lock" % (self._file_path)
|
| 1013 |
+
|
| 1014 |
+
def _has_lock(self) -> bool:
|
| 1015 |
+
"""
|
| 1016 |
+
:return:
|
| 1017 |
+
True if we have a lock and if the lockfile still exists
|
| 1018 |
+
|
| 1019 |
+
:raise AssertionError:
|
| 1020 |
+
If our lock-file does not exist.
|
| 1021 |
+
"""
|
| 1022 |
+
return self._owns_lock
|
| 1023 |
+
|
| 1024 |
+
def _obtain_lock_or_raise(self) -> None:
|
| 1025 |
+
"""Create a lock file as flag for other instances, mark our instance as
|
| 1026 |
+
lock-holder.
|
| 1027 |
+
|
| 1028 |
+
:raise IOError:
|
| 1029 |
+
If a lock was already present or a lock file could not be written.
|
| 1030 |
+
"""
|
| 1031 |
+
if self._has_lock():
|
| 1032 |
+
return
|
| 1033 |
+
lock_file = self._lock_file_path()
|
| 1034 |
+
if osp.isfile(lock_file):
|
| 1035 |
+
raise IOError(
|
| 1036 |
+
"Lock for file %r did already exist, delete %r in case the lock is illegal"
|
| 1037 |
+
% (self._file_path, lock_file)
|
| 1038 |
+
)
|
| 1039 |
+
|
| 1040 |
+
try:
|
| 1041 |
+
with open(lock_file, mode="w"):
|
| 1042 |
+
pass
|
| 1043 |
+
except OSError as e:
|
| 1044 |
+
raise IOError(str(e)) from e
|
| 1045 |
+
|
| 1046 |
+
self._owns_lock = True
|
| 1047 |
+
|
| 1048 |
+
def _obtain_lock(self) -> None:
|
| 1049 |
+
"""The default implementation will raise if a lock cannot be obtained.
|
| 1050 |
+
|
| 1051 |
+
Subclasses may override this method to provide a different implementation.
|
| 1052 |
+
"""
|
| 1053 |
+
return self._obtain_lock_or_raise()
|
| 1054 |
+
|
| 1055 |
+
def _release_lock(self) -> None:
|
| 1056 |
+
"""Release our lock if we have one."""
|
| 1057 |
+
if not self._has_lock():
|
| 1058 |
+
return
|
| 1059 |
+
|
| 1060 |
+
# If someone removed our file beforehand, lets just flag this issue instead of
|
| 1061 |
+
# failing, to make it more usable.
|
| 1062 |
+
lfp = self._lock_file_path()
|
| 1063 |
+
try:
|
| 1064 |
+
rmfile(lfp)
|
| 1065 |
+
except OSError:
|
| 1066 |
+
pass
|
| 1067 |
+
self._owns_lock = False
|
| 1068 |
+
|
| 1069 |
+
|
| 1070 |
+
class BlockingLockFile(LockFile):
|
| 1071 |
+
"""The lock file will block until a lock could be obtained, or fail after a
|
| 1072 |
+
specified timeout.
|
| 1073 |
+
|
| 1074 |
+
:note:
|
| 1075 |
+
If the directory containing the lock was removed, an exception will be raised
|
| 1076 |
+
during the blocking period, preventing hangs as the lock can never be obtained.
|
| 1077 |
+
"""
|
| 1078 |
+
|
| 1079 |
+
__slots__ = ("_check_interval", "_max_block_time")
|
| 1080 |
+
|
| 1081 |
+
def __init__(
|
| 1082 |
+
self,
|
| 1083 |
+
file_path: PathLike,
|
| 1084 |
+
check_interval_s: float = 0.3,
|
| 1085 |
+
max_block_time_s: int = sys.maxsize,
|
| 1086 |
+
) -> None:
|
| 1087 |
+
"""Configure the instance.
|
| 1088 |
+
|
| 1089 |
+
:param check_interval_s:
|
| 1090 |
+
Period of time to sleep until the lock is checked the next time.
|
| 1091 |
+
By default, it waits a nearly unlimited time.
|
| 1092 |
+
|
| 1093 |
+
:param max_block_time_s:
|
| 1094 |
+
Maximum amount of seconds we may lock.
|
| 1095 |
+
"""
|
| 1096 |
+
super().__init__(file_path)
|
| 1097 |
+
self._check_interval = check_interval_s
|
| 1098 |
+
self._max_block_time = max_block_time_s
|
| 1099 |
+
|
| 1100 |
+
def _obtain_lock(self) -> None:
|
| 1101 |
+
"""This method blocks until it obtained the lock, or raises :exc:`IOError` if it
|
| 1102 |
+
ran out of time or if the parent directory was not available anymore.
|
| 1103 |
+
|
| 1104 |
+
If this method returns, you are guaranteed to own the lock.
|
| 1105 |
+
"""
|
| 1106 |
+
starttime = time.time()
|
| 1107 |
+
maxtime = starttime + float(self._max_block_time)
|
| 1108 |
+
while True:
|
| 1109 |
+
try:
|
| 1110 |
+
super()._obtain_lock()
|
| 1111 |
+
except IOError as e:
|
| 1112 |
+
# synity check: if the directory leading to the lockfile is not
|
| 1113 |
+
# readable anymore, raise an exception
|
| 1114 |
+
curtime = time.time()
|
| 1115 |
+
if not osp.isdir(osp.dirname(self._lock_file_path())):
|
| 1116 |
+
msg = "Directory containing the lockfile %r was not readable anymore after waiting %g seconds" % (
|
| 1117 |
+
self._lock_file_path(),
|
| 1118 |
+
curtime - starttime,
|
| 1119 |
+
)
|
| 1120 |
+
raise IOError(msg) from e
|
| 1121 |
+
# END handle missing directory
|
| 1122 |
+
|
| 1123 |
+
if curtime >= maxtime:
|
| 1124 |
+
msg = "Waited %g seconds for lock at %r" % (
|
| 1125 |
+
maxtime - starttime,
|
| 1126 |
+
self._lock_file_path(),
|
| 1127 |
+
)
|
| 1128 |
+
raise IOError(msg) from e
|
| 1129 |
+
# END abort if we wait too long
|
| 1130 |
+
time.sleep(self._check_interval)
|
| 1131 |
+
else:
|
| 1132 |
+
break
|
| 1133 |
+
# END endless loop
|
| 1134 |
+
|
| 1135 |
+
|
| 1136 |
+
class IterableList(List[T_IterableObj]):
|
| 1137 |
+
"""List of iterable objects allowing to query an object by id or by named index::
|
| 1138 |
+
|
| 1139 |
+
heads = repo.heads
|
| 1140 |
+
heads.master
|
| 1141 |
+
heads['master']
|
| 1142 |
+
heads[0]
|
| 1143 |
+
|
| 1144 |
+
Iterable parent objects:
|
| 1145 |
+
|
| 1146 |
+
* :class:`Commit <git.objects.Commit>`
|
| 1147 |
+
* :class:`Submodule <git.objects.submodule.base.Submodule>`
|
| 1148 |
+
* :class:`Reference <git.refs.reference.Reference>`
|
| 1149 |
+
* :class:`FetchInfo <git.remote.FetchInfo>`
|
| 1150 |
+
* :class:`PushInfo <git.remote.PushInfo>`
|
| 1151 |
+
|
| 1152 |
+
Iterable via inheritance:
|
| 1153 |
+
|
| 1154 |
+
* :class:`Head <git.refs.head.Head>`
|
| 1155 |
+
* :class:`TagReference <git.refs.tag.TagReference>`
|
| 1156 |
+
* :class:`RemoteReference <git.refs.remote.RemoteReference>`
|
| 1157 |
+
|
| 1158 |
+
This requires an ``id_attribute`` name to be set which will be queried from its
|
| 1159 |
+
contained items to have a means for comparison.
|
| 1160 |
+
|
| 1161 |
+
A prefix can be specified which is to be used in case the id returned by the items
|
| 1162 |
+
always contains a prefix that does not matter to the user, so it can be left out.
|
| 1163 |
+
"""
|
| 1164 |
+
|
| 1165 |
+
__slots__ = ("_id_attr", "_prefix")
|
| 1166 |
+
|
| 1167 |
+
def __new__(cls, id_attr: str, prefix: str = "") -> "IterableList[T_IterableObj]":
|
| 1168 |
+
return super().__new__(cls)
|
| 1169 |
+
|
| 1170 |
+
def __init__(self, id_attr: str, prefix: str = "") -> None:
|
| 1171 |
+
self._id_attr = id_attr
|
| 1172 |
+
self._prefix = prefix
|
| 1173 |
+
|
| 1174 |
+
def __contains__(self, attr: object) -> bool:
|
| 1175 |
+
# First try identity match for performance.
|
| 1176 |
+
try:
|
| 1177 |
+
rval = list.__contains__(self, attr)
|
| 1178 |
+
if rval:
|
| 1179 |
+
return rval
|
| 1180 |
+
except (AttributeError, TypeError):
|
| 1181 |
+
pass
|
| 1182 |
+
# END handle match
|
| 1183 |
+
|
| 1184 |
+
# Otherwise make a full name search.
|
| 1185 |
+
try:
|
| 1186 |
+
getattr(self, cast(str, attr)) # Use cast to silence mypy.
|
| 1187 |
+
return True
|
| 1188 |
+
except (AttributeError, TypeError):
|
| 1189 |
+
return False
|
| 1190 |
+
# END handle membership
|
| 1191 |
+
|
| 1192 |
+
def __getattr__(self, attr: str) -> T_IterableObj:
|
| 1193 |
+
attr = self._prefix + attr
|
| 1194 |
+
for item in self:
|
| 1195 |
+
if getattr(item, self._id_attr) == attr:
|
| 1196 |
+
return item
|
| 1197 |
+
# END for each item
|
| 1198 |
+
return list.__getattribute__(self, attr)
|
| 1199 |
+
|
| 1200 |
+
def __getitem__(self, index: Union[SupportsIndex, int, slice, str]) -> T_IterableObj: # type: ignore[override]
|
| 1201 |
+
assert isinstance(index, (int, str, slice)), "Index of IterableList should be an int or str"
|
| 1202 |
+
|
| 1203 |
+
if isinstance(index, int):
|
| 1204 |
+
return list.__getitem__(self, index)
|
| 1205 |
+
elif isinstance(index, slice):
|
| 1206 |
+
raise ValueError("Index should be an int or str")
|
| 1207 |
+
else:
|
| 1208 |
+
try:
|
| 1209 |
+
return getattr(self, index)
|
| 1210 |
+
except AttributeError as e:
|
| 1211 |
+
raise IndexError("No item found with id %r" % (self._prefix + index)) from e
|
| 1212 |
+
# END handle getattr
|
| 1213 |
+
|
| 1214 |
+
def __delitem__(self, index: Union[SupportsIndex, int, slice, str]) -> None:
|
| 1215 |
+
assert isinstance(index, (int, str)), "Index of IterableList should be an int or str"
|
| 1216 |
+
|
| 1217 |
+
delindex = cast(int, index)
|
| 1218 |
+
if not isinstance(index, int):
|
| 1219 |
+
delindex = -1
|
| 1220 |
+
name = self._prefix + index
|
| 1221 |
+
for i, item in enumerate(self):
|
| 1222 |
+
if getattr(item, self._id_attr) == name:
|
| 1223 |
+
delindex = i
|
| 1224 |
+
break
|
| 1225 |
+
# END search index
|
| 1226 |
+
# END for each item
|
| 1227 |
+
if delindex == -1:
|
| 1228 |
+
raise IndexError("Item with name %s not found" % name)
|
| 1229 |
+
# END handle error
|
| 1230 |
+
# END get index to delete
|
| 1231 |
+
list.__delitem__(self, delindex)
|
| 1232 |
+
|
| 1233 |
+
|
| 1234 |
+
@runtime_checkable
|
| 1235 |
+
class IterableObj(Protocol):
|
| 1236 |
+
"""Defines an interface for iterable items, so there is a uniform way to retrieve
|
| 1237 |
+
and iterate items within the git repository.
|
| 1238 |
+
|
| 1239 |
+
Subclasses:
|
| 1240 |
+
|
| 1241 |
+
* :class:`Submodule <git.objects.submodule.base.Submodule>`
|
| 1242 |
+
* :class:`Commit <git.objects.Commit>`
|
| 1243 |
+
* :class:`Reference <git.refs.reference.Reference>`
|
| 1244 |
+
* :class:`PushInfo <git.remote.PushInfo>`
|
| 1245 |
+
* :class:`FetchInfo <git.remote.FetchInfo>`
|
| 1246 |
+
* :class:`Remote <git.remote.Remote>`
|
| 1247 |
+
"""
|
| 1248 |
+
|
| 1249 |
+
__slots__ = ()
|
| 1250 |
+
|
| 1251 |
+
_id_attribute_: str
|
| 1252 |
+
|
| 1253 |
+
@classmethod
|
| 1254 |
+
@abstractmethod
|
| 1255 |
+
def iter_items(cls, repo: "Repo", *args: Any, **kwargs: Any) -> Iterator[T_IterableObj]:
|
| 1256 |
+
# Return-typed to be compatible with subtypes e.g. Remote.
|
| 1257 |
+
"""Find (all) items of this type.
|
| 1258 |
+
|
| 1259 |
+
Subclasses can specify `args` and `kwargs` differently, and may use them for
|
| 1260 |
+
filtering. However, when the method is called with no additional positional or
|
| 1261 |
+
keyword arguments, subclasses are obliged to to yield all items.
|
| 1262 |
+
|
| 1263 |
+
:return:
|
| 1264 |
+
Iterator yielding Items
|
| 1265 |
+
"""
|
| 1266 |
+
raise NotImplementedError("To be implemented by Subclass")
|
| 1267 |
+
|
| 1268 |
+
@classmethod
|
| 1269 |
+
def list_items(cls, repo: "Repo", *args: Any, **kwargs: Any) -> IterableList[T_IterableObj]:
|
| 1270 |
+
"""Find (all) items of this type and collect them into a list.
|
| 1271 |
+
|
| 1272 |
+
For more information about the arguments, see :meth:`iter_items`.
|
| 1273 |
+
|
| 1274 |
+
:note:
|
| 1275 |
+
Favor the :meth:`iter_items` method as it will avoid eagerly collecting all
|
| 1276 |
+
items. When there are many items, that can slow performance and increase
|
| 1277 |
+
memory usage.
|
| 1278 |
+
|
| 1279 |
+
:return:
|
| 1280 |
+
list(Item,...) list of item instances
|
| 1281 |
+
"""
|
| 1282 |
+
out_list: IterableList = IterableList(cls._id_attribute_)
|
| 1283 |
+
out_list.extend(cls.iter_items(repo, *args, **kwargs))
|
| 1284 |
+
return out_list
|
| 1285 |
+
|
| 1286 |
+
|
| 1287 |
+
class IterableClassWatcher(type):
|
| 1288 |
+
"""Metaclass that issues :exc:`DeprecationWarning` when :class:`git.util.Iterable`
|
| 1289 |
+
is subclassed."""
|
| 1290 |
+
|
| 1291 |
+
def __init__(cls, name: str, bases: Tuple, clsdict: Dict) -> None:
|
| 1292 |
+
for base in bases:
|
| 1293 |
+
if type(base) is IterableClassWatcher:
|
| 1294 |
+
warnings.warn(
|
| 1295 |
+
f"GitPython Iterable subclassed by {name}."
|
| 1296 |
+
" Iterable is deprecated due to naming clash since v3.1.18"
|
| 1297 |
+
" and will be removed in 4.0.0."
|
| 1298 |
+
" Use IterableObj instead.",
|
| 1299 |
+
DeprecationWarning,
|
| 1300 |
+
stacklevel=2,
|
| 1301 |
+
)
|
| 1302 |
+
|
| 1303 |
+
|
| 1304 |
+
class Iterable(metaclass=IterableClassWatcher):
|
| 1305 |
+
"""Deprecated, use :class:`IterableObj` instead.
|
| 1306 |
+
|
| 1307 |
+
Defines an interface for iterable items, so there is a uniform way to retrieve
|
| 1308 |
+
and iterate items within the git repository.
|
| 1309 |
+
"""
|
| 1310 |
+
|
| 1311 |
+
__slots__ = ()
|
| 1312 |
+
|
| 1313 |
+
_id_attribute_ = "attribute that most suitably identifies your instance"
|
| 1314 |
+
|
| 1315 |
+
@classmethod
|
| 1316 |
+
def iter_items(cls, repo: "Repo", *args: Any, **kwargs: Any) -> Any:
|
| 1317 |
+
"""Deprecated, use :class:`IterableObj` instead.
|
| 1318 |
+
|
| 1319 |
+
Find (all) items of this type.
|
| 1320 |
+
|
| 1321 |
+
See :meth:`IterableObj.iter_items` for details on usage.
|
| 1322 |
+
|
| 1323 |
+
:return:
|
| 1324 |
+
Iterator yielding Items
|
| 1325 |
+
"""
|
| 1326 |
+
raise NotImplementedError("To be implemented by Subclass")
|
| 1327 |
+
|
| 1328 |
+
@classmethod
|
| 1329 |
+
def list_items(cls, repo: "Repo", *args: Any, **kwargs: Any) -> Any:
|
| 1330 |
+
"""Deprecated, use :class:`IterableObj` instead.
|
| 1331 |
+
|
| 1332 |
+
Find (all) items of this type and collect them into a list.
|
| 1333 |
+
|
| 1334 |
+
See :meth:`IterableObj.list_items` for details on usage.
|
| 1335 |
+
|
| 1336 |
+
:return:
|
| 1337 |
+
list(Item,...) list of item instances
|
| 1338 |
+
"""
|
| 1339 |
+
out_list: Any = IterableList(cls._id_attribute_)
|
| 1340 |
+
out_list.extend(cls.iter_items(repo, *args, **kwargs))
|
| 1341 |
+
return out_list
|
| 1342 |
+
|
| 1343 |
+
|
| 1344 |
+
# } END classes
|