Datasets:
Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +59 -59
- .idea/.gitignore +3 -0
- .idea/inspectionProfiles/Project_Default.xml +30 -0
- .idea/inspectionProfiles/profiles_settings.xml +6 -0
- .idea/misc.xml +7 -0
- .idea/modules.xml +8 -0
- .idea/pac.iml +10 -0
- .idea/vcs.xml +6 -0
- .idea/workspace.xml +84 -0
- .venv/.gitignore +2 -0
- .venv/Lib/site-packages/pip/__pip-runner__.py +50 -0
- .venv/Lib/site-packages/pip/_internal/__init__.py +19 -0
- .venv/Lib/site-packages/pip/_internal/build_env.py +311 -0
- .venv/Lib/site-packages/pip/_internal/cache.py +292 -0
- .venv/Lib/site-packages/pip/_internal/cli/__init__.py +4 -0
- .venv/Lib/site-packages/pip/_internal/cli/autocompletion.py +171 -0
- .venv/Lib/site-packages/pip/_internal/cli/base_command.py +236 -0
- .venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py +1074 -0
- .venv/Lib/site-packages/pip/_internal/cli/command_context.py +27 -0
- .venv/Lib/site-packages/pip/_internal/cli/main.py +79 -0
- .venv/Lib/site-packages/pip/_internal/cli/main_parser.py +134 -0
- .venv/Lib/site-packages/pip/_internal/cli/parser.py +294 -0
- .venv/Lib/site-packages/pip/_internal/cli/progress_bars.py +68 -0
- .venv/Lib/site-packages/pip/_internal/cli/req_command.py +508 -0
- .venv/Lib/site-packages/pip/_internal/cli/spinners.py +159 -0
- .venv/Lib/site-packages/pip/_internal/cli/status_codes.py +6 -0
- .venv/Lib/site-packages/pip/_internal/commands/__init__.py +132 -0
- .venv/Lib/site-packages/pip/_internal/commands/cache.py +222 -0
- .venv/Lib/site-packages/pip/_internal/commands/check.py +54 -0
- .venv/Lib/site-packages/pip/_internal/commands/completion.py +121 -0
- .venv/Lib/site-packages/pip/_internal/commands/configuration.py +282 -0
- .venv/Lib/site-packages/pip/_internal/commands/debug.py +199 -0
- .venv/Lib/site-packages/pip/_internal/commands/download.py +147 -0
- .venv/Lib/site-packages/pip/_internal/commands/freeze.py +108 -0
- .venv/Lib/site-packages/pip/_internal/commands/hash.py +59 -0
- .venv/Lib/site-packages/pip/_internal/commands/help.py +41 -0
- .venv/Lib/site-packages/pip/_internal/commands/index.py +139 -0
- .venv/Lib/site-packages/pip/_internal/commands/inspect.py +92 -0
- .venv/Lib/site-packages/pip/_internal/commands/install.py +778 -0
- .venv/Lib/site-packages/pip/_internal/commands/list.py +368 -0
- .venv/Lib/site-packages/pip/_internal/commands/search.py +174 -0
- .venv/Lib/site-packages/pip/_internal/commands/show.py +189 -0
- .venv/Lib/site-packages/pip/_internal/commands/uninstall.py +113 -0
- .venv/Lib/site-packages/pip/_internal/commands/wheel.py +183 -0
- .venv/Lib/site-packages/pip/_internal/configuration.py +381 -0
- .venv/Lib/site-packages/pip/_internal/distributions/__init__.py +21 -0
- .venv/Lib/site-packages/pip/_internal/distributions/base.py +39 -0
- .venv/Lib/site-packages/pip/_internal/distributions/installed.py +23 -0
- .venv/Lib/site-packages/pip/_internal/distributions/sdist.py +150 -0
- .venv/Lib/site-packages/pip/_internal/distributions/wheel.py +34 -0
.gitattributes
CHANGED
|
@@ -1,59 +1,59 @@
|
|
| 1 |
-
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
-
*.lz4 filter=lfs diff=lfs merge=lfs -text
|
| 12 |
-
*.mds filter=lfs diff=lfs merge=lfs -text
|
| 13 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 14 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
| 15 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 16 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 17 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 18 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 19 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 20 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 21 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 22 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 23 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 24 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 25 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 26 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 27 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 28 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 29 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 30 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 31 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 32 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 34 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 35 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 36 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 37 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 38 |
-
# Audio files - uncompressed
|
| 39 |
-
*.pcm filter=lfs diff=lfs merge=lfs -text
|
| 40 |
-
*.sam filter=lfs diff=lfs merge=lfs -text
|
| 41 |
-
*.raw filter=lfs diff=lfs merge=lfs -text
|
| 42 |
-
# Audio files - compressed
|
| 43 |
-
*.aac filter=lfs diff=lfs merge=lfs -text
|
| 44 |
-
*.flac filter=lfs diff=lfs merge=lfs -text
|
| 45 |
-
*.mp3 filter=lfs diff=lfs merge=lfs -text
|
| 46 |
-
*.ogg filter=lfs diff=lfs merge=lfs -text
|
| 47 |
-
*.wav filter=lfs diff=lfs merge=lfs -text
|
| 48 |
-
# Image files - uncompressed
|
| 49 |
-
*.bmp filter=lfs diff=lfs merge=lfs -text
|
| 50 |
-
*.gif filter=lfs diff=lfs merge=lfs -text
|
| 51 |
-
*.png filter=lfs diff=lfs merge=lfs -text
|
| 52 |
-
*.tiff filter=lfs diff=lfs merge=lfs -text
|
| 53 |
-
# Image files - compressed
|
| 54 |
-
*.jpg filter=lfs diff=lfs merge=lfs -text
|
| 55 |
-
*.jpeg filter=lfs diff=lfs merge=lfs -text
|
| 56 |
-
*.webp filter=lfs diff=lfs merge=lfs -text
|
| 57 |
-
# Video files - compressed
|
| 58 |
-
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 59 |
-
*.webm filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.lz4 filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
*.mds filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 38 |
+
# Audio files - uncompressed
|
| 39 |
+
*.pcm filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
*.sam filter=lfs diff=lfs merge=lfs -text
|
| 41 |
+
*.raw filter=lfs diff=lfs merge=lfs -text
|
| 42 |
+
# Audio files - compressed
|
| 43 |
+
*.aac filter=lfs diff=lfs merge=lfs -text
|
| 44 |
+
*.flac filter=lfs diff=lfs merge=lfs -text
|
| 45 |
+
*.mp3 filter=lfs diff=lfs merge=lfs -text
|
| 46 |
+
*.ogg filter=lfs diff=lfs merge=lfs -text
|
| 47 |
+
*.wav filter=lfs diff=lfs merge=lfs -text
|
| 48 |
+
# Image files - uncompressed
|
| 49 |
+
*.bmp filter=lfs diff=lfs merge=lfs -text
|
| 50 |
+
*.gif filter=lfs diff=lfs merge=lfs -text
|
| 51 |
+
*.png filter=lfs diff=lfs merge=lfs -text
|
| 52 |
+
*.tiff filter=lfs diff=lfs merge=lfs -text
|
| 53 |
+
# Image files - compressed
|
| 54 |
+
*.jpg filter=lfs diff=lfs merge=lfs -text
|
| 55 |
+
*.jpeg filter=lfs diff=lfs merge=lfs -text
|
| 56 |
+
*.webp filter=lfs diff=lfs merge=lfs -text
|
| 57 |
+
# Video files - compressed
|
| 58 |
+
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 59 |
+
*.webm filter=lfs diff=lfs merge=lfs -text
|
.idea/.gitignore
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Default ignored files
|
| 2 |
+
/shelf/
|
| 3 |
+
/workspace.xml
|
.idea/inspectionProfiles/Project_Default.xml
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<component name="InspectionProjectProfileManager">
|
| 2 |
+
<profile version="1.0">
|
| 3 |
+
<option name="myName" value="Project Default" />
|
| 4 |
+
<inspection_tool class="DuplicatedCode" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
| 5 |
+
<Languages>
|
| 6 |
+
<language minSize="118" name="Python" />
|
| 7 |
+
</Languages>
|
| 8 |
+
</inspection_tool>
|
| 9 |
+
<inspection_tool class="Eslint" enabled="true" level="WARNING" enabled_by_default="true" />
|
| 10 |
+
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
| 11 |
+
<option name="ignoredPackages">
|
| 12 |
+
<value>
|
| 13 |
+
<list size="4">
|
| 14 |
+
<item index="0" class="java.lang.String" itemvalue="tensorflowjs" />
|
| 15 |
+
<item index="1" class="java.lang.String" itemvalue="tensorflow-io-gcs-filesystem" />
|
| 16 |
+
<item index="2" class="java.lang.String" itemvalue="typeguard" />
|
| 17 |
+
<item index="3" class="java.lang.String" itemvalue="tensorflow_addons" />
|
| 18 |
+
</list>
|
| 19 |
+
</value>
|
| 20 |
+
</option>
|
| 21 |
+
</inspection_tool>
|
| 22 |
+
<inspection_tool class="PyPep8Inspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
| 23 |
+
<option name="ignoredErrors">
|
| 24 |
+
<list>
|
| 25 |
+
<option value="E731" />
|
| 26 |
+
</list>
|
| 27 |
+
</option>
|
| 28 |
+
</inspection_tool>
|
| 29 |
+
</profile>
|
| 30 |
+
</component>
|
.idea/inspectionProfiles/profiles_settings.xml
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<component name="InspectionProjectProfileManager">
|
| 2 |
+
<settings>
|
| 3 |
+
<option name="USE_PROJECT_PROFILE" value="false" />
|
| 4 |
+
<version value="1.0" />
|
| 5 |
+
</settings>
|
| 6 |
+
</component>
|
.idea/misc.xml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="Black">
|
| 4 |
+
<option name="sdkName" value="Python 3.10 (pac)" />
|
| 5 |
+
</component>
|
| 6 |
+
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.10 (pac)" project-jdk-type="Python SDK" />
|
| 7 |
+
</project>
|
.idea/modules.xml
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="ProjectModuleManager">
|
| 4 |
+
<modules>
|
| 5 |
+
<module fileurl="file://$PROJECT_DIR$/.idea/pac.iml" filepath="$PROJECT_DIR$/.idea/pac.iml" />
|
| 6 |
+
</modules>
|
| 7 |
+
</component>
|
| 8 |
+
</project>
|
.idea/pac.iml
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<module type="PYTHON_MODULE" version="4">
|
| 3 |
+
<component name="NewModuleRootManager">
|
| 4 |
+
<content url="file://$MODULE_DIR$">
|
| 5 |
+
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
| 6 |
+
</content>
|
| 7 |
+
<orderEntry type="jdk" jdkName="Python 3.10 (pac)" jdkType="Python SDK" />
|
| 8 |
+
<orderEntry type="sourceFolder" forTests="false" />
|
| 9 |
+
</component>
|
| 10 |
+
</module>
|
.idea/vcs.xml
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="VcsDirectoryMappings">
|
| 4 |
+
<mapping directory="" vcs="Git" />
|
| 5 |
+
</component>
|
| 6 |
+
</project>
|
.idea/workspace.xml
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="AutoImportSettings">
|
| 4 |
+
<option name="autoReloadType" value="SELECTIVE" />
|
| 5 |
+
</component>
|
| 6 |
+
<component name="ChangeListManager">
|
| 7 |
+
<list default="true" id="444ec9ca-b004-44c2-93e4-451ebc131871" name="Changes" comment="Adds metadata">
|
| 8 |
+
<change beforePath="$PROJECT_DIR$/README.md" beforeDir="false" afterPath="$PROJECT_DIR$/README.md" afterDir="false" />
|
| 9 |
+
</list>
|
| 10 |
+
<option name="SHOW_DIALOG" value="false" />
|
| 11 |
+
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
| 12 |
+
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
| 13 |
+
<option name="LAST_RESOLUTION" value="IGNORE" />
|
| 14 |
+
</component>
|
| 15 |
+
<component name="Git.Settings">
|
| 16 |
+
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
|
| 17 |
+
</component>
|
| 18 |
+
<component name="GitHubPullRequestSearchHistory"><![CDATA[{
|
| 19 |
+
"lastFilter": {
|
| 20 |
+
"state": "OPEN",
|
| 21 |
+
"assignee": "fegemo"
|
| 22 |
+
}
|
| 23 |
+
}]]></component>
|
| 24 |
+
<component name="GithubPullRequestsUISettings"><![CDATA[{
|
| 25 |
+
"selectedUrlAndAccountId": {
|
| 26 |
+
"url": "git@github.com:plucksquire/pac.git",
|
| 27 |
+
"accountId": "f7c969a6-2610-4425-915b-91e26a746a39"
|
| 28 |
+
}
|
| 29 |
+
}]]></component>
|
| 30 |
+
<component name="ProjectColorInfo"><![CDATA[{
|
| 31 |
+
"associatedIndex": 7
|
| 32 |
+
}]]></component>
|
| 33 |
+
<component name="ProjectId" id="2qu9qC2IEXiviSGDMehtlXLkgWT" />
|
| 34 |
+
<component name="ProjectViewState">
|
| 35 |
+
<option name="hideEmptyMiddlePackages" value="true" />
|
| 36 |
+
<option name="showLibraryContents" value="true" />
|
| 37 |
+
</component>
|
| 38 |
+
<component name="PropertiesComponent"><![CDATA[{
|
| 39 |
+
"keyToString": {
|
| 40 |
+
"RunOnceActivity.ShowReadmeOnStart": "true",
|
| 41 |
+
"RunOnceActivity.git.unshallow": "true",
|
| 42 |
+
"git-widget-placeholder": "main",
|
| 43 |
+
"ignore.virus.scanning.warn.message": "true",
|
| 44 |
+
"node.js.detected.package.eslint": "true",
|
| 45 |
+
"node.js.selected.package.eslint": "(autodetect)",
|
| 46 |
+
"nodejs_package_manager_path": "npm"
|
| 47 |
+
}
|
| 48 |
+
}]]></component>
|
| 49 |
+
<component name="SharedIndexes">
|
| 50 |
+
<attachedChunks>
|
| 51 |
+
<set>
|
| 52 |
+
<option value="bundled-python-sdk-0fc6c617c4bd-9a18a617cbe4-com.jetbrains.pycharm.pro.sharedIndexes.bundled-PY-243.22562.220" />
|
| 53 |
+
</set>
|
| 54 |
+
</attachedChunks>
|
| 55 |
+
</component>
|
| 56 |
+
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
|
| 57 |
+
<component name="TaskManager">
|
| 58 |
+
<task active="true" id="Default" summary="Default task">
|
| 59 |
+
<changelist id="444ec9ca-b004-44c2-93e4-451ebc131871" name="Changes" comment="" />
|
| 60 |
+
<created>1735501782328</created>
|
| 61 |
+
<option name="number" value="Default" />
|
| 62 |
+
<option name="presentableId" value="Default" />
|
| 63 |
+
<updated>1735501782328</updated>
|
| 64 |
+
<workItem from="1735501783439" duration="2016000" />
|
| 65 |
+
</task>
|
| 66 |
+
<task id="LOCAL-00001" summary="Adds metadata">
|
| 67 |
+
<option name="closed" value="true" />
|
| 68 |
+
<created>1735502948132</created>
|
| 69 |
+
<option name="number" value="00001" />
|
| 70 |
+
<option name="presentableId" value="LOCAL-00001" />
|
| 71 |
+
<option name="project" value="LOCAL" />
|
| 72 |
+
<updated>1735502948132</updated>
|
| 73 |
+
</task>
|
| 74 |
+
<option name="localTasksCounter" value="2" />
|
| 75 |
+
<servers />
|
| 76 |
+
</component>
|
| 77 |
+
<component name="TypeScriptGeneratedFilesManager">
|
| 78 |
+
<option name="version" value="3" />
|
| 79 |
+
</component>
|
| 80 |
+
<component name="VcsManagerConfiguration">
|
| 81 |
+
<MESSAGE value="Adds metadata" />
|
| 82 |
+
<option name="LAST_COMMIT_MESSAGE" value="Adds metadata" />
|
| 83 |
+
</component>
|
| 84 |
+
</project>
|
.venv/.gitignore
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# created by virtualenv automatically
|
| 2 |
+
*
|
.venv/Lib/site-packages/pip/__pip-runner__.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Execute exactly this copy of pip, within a different environment.
|
| 2 |
+
|
| 3 |
+
This file is named as it is, to ensure that this module can't be imported via
|
| 4 |
+
an import statement.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# /!\ This version compatibility check section must be Python 2 compatible. /!\
|
| 8 |
+
|
| 9 |
+
import sys
|
| 10 |
+
|
| 11 |
+
# Copied from setup.py
|
| 12 |
+
PYTHON_REQUIRES = (3, 7)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def version_str(version): # type: ignore
|
| 16 |
+
return ".".join(str(v) for v in version)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
if sys.version_info[:2] < PYTHON_REQUIRES:
|
| 20 |
+
raise SystemExit(
|
| 21 |
+
"This version of pip does not support python {} (requires >={}).".format(
|
| 22 |
+
version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES)
|
| 23 |
+
)
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
# From here on, we can use Python 3 features, but the syntax must remain
|
| 27 |
+
# Python 2 compatible.
|
| 28 |
+
|
| 29 |
+
import runpy # noqa: E402
|
| 30 |
+
from importlib.machinery import PathFinder # noqa: E402
|
| 31 |
+
from os.path import dirname # noqa: E402
|
| 32 |
+
|
| 33 |
+
PIP_SOURCES_ROOT = dirname(dirname(__file__))
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class PipImportRedirectingFinder:
|
| 37 |
+
@classmethod
|
| 38 |
+
def find_spec(self, fullname, path=None, target=None): # type: ignore
|
| 39 |
+
if fullname != "pip":
|
| 40 |
+
return None
|
| 41 |
+
|
| 42 |
+
spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target)
|
| 43 |
+
assert spec, (PIP_SOURCES_ROOT, fullname)
|
| 44 |
+
return spec
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
sys.meta_path.insert(0, PipImportRedirectingFinder())
|
| 48 |
+
|
| 49 |
+
assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
|
| 50 |
+
runpy.run_module("pip", run_name="__main__", alter_sys=True)
|
.venv/Lib/site-packages/pip/_internal/__init__.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
|
| 3 |
+
import pip._internal.utils.inject_securetransport # noqa
|
| 4 |
+
from pip._internal.utils import _log
|
| 5 |
+
|
| 6 |
+
# init_logging() must be called before any call to logging.getLogger()
|
| 7 |
+
# which happens at import of most modules.
|
| 8 |
+
_log.init_logging()
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def main(args: (Optional[List[str]]) = None) -> int:
|
| 12 |
+
"""This is preserved for old console scripts that may still be referencing
|
| 13 |
+
it.
|
| 14 |
+
|
| 15 |
+
For additional details, see https://github.com/pypa/pip/issues/7498.
|
| 16 |
+
"""
|
| 17 |
+
from pip._internal.utils.entrypoints import _wrapper
|
| 18 |
+
|
| 19 |
+
return _wrapper(args)
|
.venv/Lib/site-packages/pip/_internal/build_env.py
ADDED
|
@@ -0,0 +1,311 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Build Environment used for isolation during sdist building
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
import pathlib
|
| 7 |
+
import site
|
| 8 |
+
import sys
|
| 9 |
+
import textwrap
|
| 10 |
+
from collections import OrderedDict
|
| 11 |
+
from types import TracebackType
|
| 12 |
+
from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
|
| 13 |
+
|
| 14 |
+
from pip._vendor.certifi import where
|
| 15 |
+
from pip._vendor.packaging.requirements import Requirement
|
| 16 |
+
from pip._vendor.packaging.version import Version
|
| 17 |
+
|
| 18 |
+
from pip import __file__ as pip_location
|
| 19 |
+
from pip._internal.cli.spinners import open_spinner
|
| 20 |
+
from pip._internal.locations import get_platlib, get_purelib, get_scheme
|
| 21 |
+
from pip._internal.metadata import get_default_environment, get_environment
|
| 22 |
+
from pip._internal.utils.subprocess import call_subprocess
|
| 23 |
+
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
| 24 |
+
|
| 25 |
+
if TYPE_CHECKING:
|
| 26 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 27 |
+
|
| 28 |
+
logger = logging.getLogger(__name__)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
|
| 32 |
+
return (a, b) if a != b else (a,)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class _Prefix:
|
| 36 |
+
def __init__(self, path: str) -> None:
|
| 37 |
+
self.path = path
|
| 38 |
+
self.setup = False
|
| 39 |
+
scheme = get_scheme("", prefix=path)
|
| 40 |
+
self.bin_dir = scheme.scripts
|
| 41 |
+
self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def get_runnable_pip() -> str:
|
| 45 |
+
"""Get a file to pass to a Python executable, to run the currently-running pip.
|
| 46 |
+
|
| 47 |
+
This is used to run a pip subprocess, for installing requirements into the build
|
| 48 |
+
environment.
|
| 49 |
+
"""
|
| 50 |
+
source = pathlib.Path(pip_location).resolve().parent
|
| 51 |
+
|
| 52 |
+
if not source.is_dir():
|
| 53 |
+
# This would happen if someone is using pip from inside a zip file. In that
|
| 54 |
+
# case, we can use that directly.
|
| 55 |
+
return str(source)
|
| 56 |
+
|
| 57 |
+
return os.fsdecode(source / "__pip-runner__.py")
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _get_system_sitepackages() -> Set[str]:
|
| 61 |
+
"""Get system site packages
|
| 62 |
+
|
| 63 |
+
Usually from site.getsitepackages,
|
| 64 |
+
but fallback on `get_purelib()/get_platlib()` if unavailable
|
| 65 |
+
(e.g. in a virtualenv created by virtualenv<20)
|
| 66 |
+
|
| 67 |
+
Returns normalized set of strings.
|
| 68 |
+
"""
|
| 69 |
+
if hasattr(site, "getsitepackages"):
|
| 70 |
+
system_sites = site.getsitepackages()
|
| 71 |
+
else:
|
| 72 |
+
# virtualenv < 20 overwrites site.py without getsitepackages
|
| 73 |
+
# fallback on get_purelib/get_platlib.
|
| 74 |
+
# this is known to miss things, but shouldn't in the cases
|
| 75 |
+
# where getsitepackages() has been removed (inside a virtualenv)
|
| 76 |
+
system_sites = [get_purelib(), get_platlib()]
|
| 77 |
+
return {os.path.normcase(path) for path in system_sites}
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class BuildEnvironment:
|
| 81 |
+
"""Creates and manages an isolated environment to install build deps"""
|
| 82 |
+
|
| 83 |
+
def __init__(self) -> None:
|
| 84 |
+
temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
|
| 85 |
+
|
| 86 |
+
self._prefixes = OrderedDict(
|
| 87 |
+
(name, _Prefix(os.path.join(temp_dir.path, name)))
|
| 88 |
+
for name in ("normal", "overlay")
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
self._bin_dirs: List[str] = []
|
| 92 |
+
self._lib_dirs: List[str] = []
|
| 93 |
+
for prefix in reversed(list(self._prefixes.values())):
|
| 94 |
+
self._bin_dirs.append(prefix.bin_dir)
|
| 95 |
+
self._lib_dirs.extend(prefix.lib_dirs)
|
| 96 |
+
|
| 97 |
+
# Customize site to:
|
| 98 |
+
# - ensure .pth files are honored
|
| 99 |
+
# - prevent access to system site packages
|
| 100 |
+
system_sites = _get_system_sitepackages()
|
| 101 |
+
|
| 102 |
+
self._site_dir = os.path.join(temp_dir.path, "site")
|
| 103 |
+
if not os.path.exists(self._site_dir):
|
| 104 |
+
os.mkdir(self._site_dir)
|
| 105 |
+
with open(
|
| 106 |
+
os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
|
| 107 |
+
) as fp:
|
| 108 |
+
fp.write(
|
| 109 |
+
textwrap.dedent(
|
| 110 |
+
"""
|
| 111 |
+
import os, site, sys
|
| 112 |
+
|
| 113 |
+
# First, drop system-sites related paths.
|
| 114 |
+
original_sys_path = sys.path[:]
|
| 115 |
+
known_paths = set()
|
| 116 |
+
for path in {system_sites!r}:
|
| 117 |
+
site.addsitedir(path, known_paths=known_paths)
|
| 118 |
+
system_paths = set(
|
| 119 |
+
os.path.normcase(path)
|
| 120 |
+
for path in sys.path[len(original_sys_path):]
|
| 121 |
+
)
|
| 122 |
+
original_sys_path = [
|
| 123 |
+
path for path in original_sys_path
|
| 124 |
+
if os.path.normcase(path) not in system_paths
|
| 125 |
+
]
|
| 126 |
+
sys.path = original_sys_path
|
| 127 |
+
|
| 128 |
+
# Second, add lib directories.
|
| 129 |
+
# ensuring .pth file are processed.
|
| 130 |
+
for path in {lib_dirs!r}:
|
| 131 |
+
assert not path in sys.path
|
| 132 |
+
site.addsitedir(path)
|
| 133 |
+
"""
|
| 134 |
+
).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
def __enter__(self) -> None:
|
| 138 |
+
self._save_env = {
|
| 139 |
+
name: os.environ.get(name, None)
|
| 140 |
+
for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
path = self._bin_dirs[:]
|
| 144 |
+
old_path = self._save_env["PATH"]
|
| 145 |
+
if old_path:
|
| 146 |
+
path.extend(old_path.split(os.pathsep))
|
| 147 |
+
|
| 148 |
+
pythonpath = [self._site_dir]
|
| 149 |
+
|
| 150 |
+
os.environ.update(
|
| 151 |
+
{
|
| 152 |
+
"PATH": os.pathsep.join(path),
|
| 153 |
+
"PYTHONNOUSERSITE": "1",
|
| 154 |
+
"PYTHONPATH": os.pathsep.join(pythonpath),
|
| 155 |
+
}
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
def __exit__(
|
| 159 |
+
self,
|
| 160 |
+
exc_type: Optional[Type[BaseException]],
|
| 161 |
+
exc_val: Optional[BaseException],
|
| 162 |
+
exc_tb: Optional[TracebackType],
|
| 163 |
+
) -> None:
|
| 164 |
+
for varname, old_value in self._save_env.items():
|
| 165 |
+
if old_value is None:
|
| 166 |
+
os.environ.pop(varname, None)
|
| 167 |
+
else:
|
| 168 |
+
os.environ[varname] = old_value
|
| 169 |
+
|
| 170 |
+
def check_requirements(
|
| 171 |
+
self, reqs: Iterable[str]
|
| 172 |
+
) -> Tuple[Set[Tuple[str, str]], Set[str]]:
|
| 173 |
+
"""Return 2 sets:
|
| 174 |
+
- conflicting requirements: set of (installed, wanted) reqs tuples
|
| 175 |
+
- missing requirements: set of reqs
|
| 176 |
+
"""
|
| 177 |
+
missing = set()
|
| 178 |
+
conflicting = set()
|
| 179 |
+
if reqs:
|
| 180 |
+
env = (
|
| 181 |
+
get_environment(self._lib_dirs)
|
| 182 |
+
if hasattr(self, "_lib_dirs")
|
| 183 |
+
else get_default_environment()
|
| 184 |
+
)
|
| 185 |
+
for req_str in reqs:
|
| 186 |
+
req = Requirement(req_str)
|
| 187 |
+
# We're explicitly evaluating with an empty extra value, since build
|
| 188 |
+
# environments are not provided any mechanism to select specific extras.
|
| 189 |
+
if req.marker is not None and not req.marker.evaluate({"extra": ""}):
|
| 190 |
+
continue
|
| 191 |
+
dist = env.get_distribution(req.name)
|
| 192 |
+
if not dist:
|
| 193 |
+
missing.add(req_str)
|
| 194 |
+
continue
|
| 195 |
+
if isinstance(dist.version, Version):
|
| 196 |
+
installed_req_str = f"{req.name}=={dist.version}"
|
| 197 |
+
else:
|
| 198 |
+
installed_req_str = f"{req.name}==={dist.version}"
|
| 199 |
+
if not req.specifier.contains(dist.version, prereleases=True):
|
| 200 |
+
conflicting.add((installed_req_str, req_str))
|
| 201 |
+
# FIXME: Consider direct URL?
|
| 202 |
+
return conflicting, missing
|
| 203 |
+
|
| 204 |
+
def install_requirements(
|
| 205 |
+
self,
|
| 206 |
+
finder: "PackageFinder",
|
| 207 |
+
requirements: Iterable[str],
|
| 208 |
+
prefix_as_string: str,
|
| 209 |
+
*,
|
| 210 |
+
kind: str,
|
| 211 |
+
) -> None:
|
| 212 |
+
prefix = self._prefixes[prefix_as_string]
|
| 213 |
+
assert not prefix.setup
|
| 214 |
+
prefix.setup = True
|
| 215 |
+
if not requirements:
|
| 216 |
+
return
|
| 217 |
+
self._install_requirements(
|
| 218 |
+
get_runnable_pip(),
|
| 219 |
+
finder,
|
| 220 |
+
requirements,
|
| 221 |
+
prefix,
|
| 222 |
+
kind=kind,
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
@staticmethod
|
| 226 |
+
def _install_requirements(
|
| 227 |
+
pip_runnable: str,
|
| 228 |
+
finder: "PackageFinder",
|
| 229 |
+
requirements: Iterable[str],
|
| 230 |
+
prefix: _Prefix,
|
| 231 |
+
*,
|
| 232 |
+
kind: str,
|
| 233 |
+
) -> None:
|
| 234 |
+
args: List[str] = [
|
| 235 |
+
sys.executable,
|
| 236 |
+
pip_runnable,
|
| 237 |
+
"install",
|
| 238 |
+
"--ignore-installed",
|
| 239 |
+
"--no-user",
|
| 240 |
+
"--prefix",
|
| 241 |
+
prefix.path,
|
| 242 |
+
"--no-warn-script-location",
|
| 243 |
+
]
|
| 244 |
+
if logger.getEffectiveLevel() <= logging.DEBUG:
|
| 245 |
+
args.append("-v")
|
| 246 |
+
for format_control in ("no_binary", "only_binary"):
|
| 247 |
+
formats = getattr(finder.format_control, format_control)
|
| 248 |
+
args.extend(
|
| 249 |
+
(
|
| 250 |
+
"--" + format_control.replace("_", "-"),
|
| 251 |
+
",".join(sorted(formats or {":none:"})),
|
| 252 |
+
)
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
index_urls = finder.index_urls
|
| 256 |
+
if index_urls:
|
| 257 |
+
args.extend(["-i", index_urls[0]])
|
| 258 |
+
for extra_index in index_urls[1:]:
|
| 259 |
+
args.extend(["--extra-index-url", extra_index])
|
| 260 |
+
else:
|
| 261 |
+
args.append("--no-index")
|
| 262 |
+
for link in finder.find_links:
|
| 263 |
+
args.extend(["--find-links", link])
|
| 264 |
+
|
| 265 |
+
for host in finder.trusted_hosts:
|
| 266 |
+
args.extend(["--trusted-host", host])
|
| 267 |
+
if finder.allow_all_prereleases:
|
| 268 |
+
args.append("--pre")
|
| 269 |
+
if finder.prefer_binary:
|
| 270 |
+
args.append("--prefer-binary")
|
| 271 |
+
args.append("--")
|
| 272 |
+
args.extend(requirements)
|
| 273 |
+
extra_environ = {"_PIP_STANDALONE_CERT": where()}
|
| 274 |
+
with open_spinner(f"Installing {kind}") as spinner:
|
| 275 |
+
call_subprocess(
|
| 276 |
+
args,
|
| 277 |
+
command_desc=f"pip subprocess to install {kind}",
|
| 278 |
+
spinner=spinner,
|
| 279 |
+
extra_environ=extra_environ,
|
| 280 |
+
)
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
class NoOpBuildEnvironment(BuildEnvironment):
|
| 284 |
+
"""A no-op drop-in replacement for BuildEnvironment"""
|
| 285 |
+
|
| 286 |
+
def __init__(self) -> None:
|
| 287 |
+
pass
|
| 288 |
+
|
| 289 |
+
def __enter__(self) -> None:
|
| 290 |
+
pass
|
| 291 |
+
|
| 292 |
+
def __exit__(
|
| 293 |
+
self,
|
| 294 |
+
exc_type: Optional[Type[BaseException]],
|
| 295 |
+
exc_val: Optional[BaseException],
|
| 296 |
+
exc_tb: Optional[TracebackType],
|
| 297 |
+
) -> None:
|
| 298 |
+
pass
|
| 299 |
+
|
| 300 |
+
def cleanup(self) -> None:
|
| 301 |
+
pass
|
| 302 |
+
|
| 303 |
+
def install_requirements(
|
| 304 |
+
self,
|
| 305 |
+
finder: "PackageFinder",
|
| 306 |
+
requirements: Iterable[str],
|
| 307 |
+
prefix_as_string: str,
|
| 308 |
+
*,
|
| 309 |
+
kind: str,
|
| 310 |
+
) -> None:
|
| 311 |
+
raise NotImplementedError()
|
.venv/Lib/site-packages/pip/_internal/cache.py
ADDED
|
@@ -0,0 +1,292 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Cache Management
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import hashlib
|
| 5 |
+
import json
|
| 6 |
+
import logging
|
| 7 |
+
import os
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from typing import Any, Dict, List, Optional
|
| 10 |
+
|
| 11 |
+
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
|
| 12 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 13 |
+
|
| 14 |
+
from pip._internal.exceptions import InvalidWheelFilename
|
| 15 |
+
from pip._internal.models.direct_url import DirectUrl
|
| 16 |
+
from pip._internal.models.link import Link
|
| 17 |
+
from pip._internal.models.wheel import Wheel
|
| 18 |
+
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
| 19 |
+
from pip._internal.utils.urls import path_to_url
|
| 20 |
+
|
| 21 |
+
logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
ORIGIN_JSON_NAME = "origin.json"
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def _hash_dict(d: Dict[str, str]) -> str:
|
| 27 |
+
"""Return a stable sha224 of a dictionary."""
|
| 28 |
+
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
|
| 29 |
+
return hashlib.sha224(s.encode("ascii")).hexdigest()
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class Cache:
|
| 33 |
+
"""An abstract class - provides cache directories for data from links
|
| 34 |
+
|
| 35 |
+
:param cache_dir: The root of the cache.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
def __init__(self, cache_dir: str) -> None:
|
| 39 |
+
super().__init__()
|
| 40 |
+
assert not cache_dir or os.path.isabs(cache_dir)
|
| 41 |
+
self.cache_dir = cache_dir or None
|
| 42 |
+
|
| 43 |
+
def _get_cache_path_parts(self, link: Link) -> List[str]:
|
| 44 |
+
"""Get parts of part that must be os.path.joined with cache_dir"""
|
| 45 |
+
|
| 46 |
+
# We want to generate an url to use as our cache key, we don't want to
|
| 47 |
+
# just re-use the URL because it might have other items in the fragment
|
| 48 |
+
# and we don't care about those.
|
| 49 |
+
key_parts = {"url": link.url_without_fragment}
|
| 50 |
+
if link.hash_name is not None and link.hash is not None:
|
| 51 |
+
key_parts[link.hash_name] = link.hash
|
| 52 |
+
if link.subdirectory_fragment:
|
| 53 |
+
key_parts["subdirectory"] = link.subdirectory_fragment
|
| 54 |
+
|
| 55 |
+
# Include interpreter name, major and minor version in cache key
|
| 56 |
+
# to cope with ill-behaved sdists that build a different wheel
|
| 57 |
+
# depending on the python version their setup.py is being run on,
|
| 58 |
+
# and don't encode the difference in compatibility tags.
|
| 59 |
+
# https://github.com/pypa/pip/issues/7296
|
| 60 |
+
key_parts["interpreter_name"] = interpreter_name()
|
| 61 |
+
key_parts["interpreter_version"] = interpreter_version()
|
| 62 |
+
|
| 63 |
+
# Encode our key url with sha224, we'll use this because it has similar
|
| 64 |
+
# security properties to sha256, but with a shorter total output (and
|
| 65 |
+
# thus less secure). However the differences don't make a lot of
|
| 66 |
+
# difference for our use case here.
|
| 67 |
+
hashed = _hash_dict(key_parts)
|
| 68 |
+
|
| 69 |
+
# We want to nest the directories some to prevent having a ton of top
|
| 70 |
+
# level directories where we might run out of sub directories on some
|
| 71 |
+
# FS.
|
| 72 |
+
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
| 73 |
+
|
| 74 |
+
return parts
|
| 75 |
+
|
| 76 |
+
def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
|
| 77 |
+
can_not_cache = not self.cache_dir or not canonical_package_name or not link
|
| 78 |
+
if can_not_cache:
|
| 79 |
+
return []
|
| 80 |
+
|
| 81 |
+
candidates = []
|
| 82 |
+
path = self.get_path_for_link(link)
|
| 83 |
+
if os.path.isdir(path):
|
| 84 |
+
for candidate in os.listdir(path):
|
| 85 |
+
candidates.append((candidate, path))
|
| 86 |
+
return candidates
|
| 87 |
+
|
| 88 |
+
def get_path_for_link(self, link: Link) -> str:
|
| 89 |
+
"""Return a directory to store cached items in for link."""
|
| 90 |
+
raise NotImplementedError()
|
| 91 |
+
|
| 92 |
+
def get(
|
| 93 |
+
self,
|
| 94 |
+
link: Link,
|
| 95 |
+
package_name: Optional[str],
|
| 96 |
+
supported_tags: List[Tag],
|
| 97 |
+
) -> Link:
|
| 98 |
+
"""Returns a link to a cached item if it exists, otherwise returns the
|
| 99 |
+
passed link.
|
| 100 |
+
"""
|
| 101 |
+
raise NotImplementedError()
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
class SimpleWheelCache(Cache):
|
| 105 |
+
"""A cache of wheels for future installs."""
|
| 106 |
+
|
| 107 |
+
def __init__(self, cache_dir: str) -> None:
|
| 108 |
+
super().__init__(cache_dir)
|
| 109 |
+
|
| 110 |
+
def get_path_for_link(self, link: Link) -> str:
|
| 111 |
+
"""Return a directory to store cached wheels for link
|
| 112 |
+
|
| 113 |
+
Because there are M wheels for any one sdist, we provide a directory
|
| 114 |
+
to cache them in, and then consult that directory when looking up
|
| 115 |
+
cache hits.
|
| 116 |
+
|
| 117 |
+
We only insert things into the cache if they have plausible version
|
| 118 |
+
numbers, so that we don't contaminate the cache with things that were
|
| 119 |
+
not unique. E.g. ./package might have dozens of installs done for it
|
| 120 |
+
and build a version of 0.0...and if we built and cached a wheel, we'd
|
| 121 |
+
end up using the same wheel even if the source has been edited.
|
| 122 |
+
|
| 123 |
+
:param link: The link of the sdist for which this will cache wheels.
|
| 124 |
+
"""
|
| 125 |
+
parts = self._get_cache_path_parts(link)
|
| 126 |
+
assert self.cache_dir
|
| 127 |
+
# Store wheels within the root cache_dir
|
| 128 |
+
return os.path.join(self.cache_dir, "wheels", *parts)
|
| 129 |
+
|
| 130 |
+
def get(
|
| 131 |
+
self,
|
| 132 |
+
link: Link,
|
| 133 |
+
package_name: Optional[str],
|
| 134 |
+
supported_tags: List[Tag],
|
| 135 |
+
) -> Link:
|
| 136 |
+
candidates = []
|
| 137 |
+
|
| 138 |
+
if not package_name:
|
| 139 |
+
return link
|
| 140 |
+
|
| 141 |
+
canonical_package_name = canonicalize_name(package_name)
|
| 142 |
+
for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
|
| 143 |
+
try:
|
| 144 |
+
wheel = Wheel(wheel_name)
|
| 145 |
+
except InvalidWheelFilename:
|
| 146 |
+
continue
|
| 147 |
+
if canonicalize_name(wheel.name) != canonical_package_name:
|
| 148 |
+
logger.debug(
|
| 149 |
+
"Ignoring cached wheel %s for %s as it "
|
| 150 |
+
"does not match the expected distribution name %s.",
|
| 151 |
+
wheel_name,
|
| 152 |
+
link,
|
| 153 |
+
package_name,
|
| 154 |
+
)
|
| 155 |
+
continue
|
| 156 |
+
if not wheel.supported(supported_tags):
|
| 157 |
+
# Built for a different python/arch/etc
|
| 158 |
+
continue
|
| 159 |
+
candidates.append(
|
| 160 |
+
(
|
| 161 |
+
wheel.support_index_min(supported_tags),
|
| 162 |
+
wheel_name,
|
| 163 |
+
wheel_dir,
|
| 164 |
+
)
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
if not candidates:
|
| 168 |
+
return link
|
| 169 |
+
|
| 170 |
+
_, wheel_name, wheel_dir = min(candidates)
|
| 171 |
+
return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
class EphemWheelCache(SimpleWheelCache):
|
| 175 |
+
"""A SimpleWheelCache that creates it's own temporary cache directory"""
|
| 176 |
+
|
| 177 |
+
def __init__(self) -> None:
|
| 178 |
+
self._temp_dir = TempDirectory(
|
| 179 |
+
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
|
| 180 |
+
globally_managed=True,
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
super().__init__(self._temp_dir.path)
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
class CacheEntry:
|
| 187 |
+
def __init__(
|
| 188 |
+
self,
|
| 189 |
+
link: Link,
|
| 190 |
+
persistent: bool,
|
| 191 |
+
):
|
| 192 |
+
self.link = link
|
| 193 |
+
self.persistent = persistent
|
| 194 |
+
self.origin: Optional[DirectUrl] = None
|
| 195 |
+
origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
|
| 196 |
+
if origin_direct_url_path.exists():
|
| 197 |
+
try:
|
| 198 |
+
self.origin = DirectUrl.from_json(
|
| 199 |
+
origin_direct_url_path.read_text(encoding="utf-8")
|
| 200 |
+
)
|
| 201 |
+
except Exception as e:
|
| 202 |
+
logger.warning(
|
| 203 |
+
"Ignoring invalid cache entry origin file %s for %s (%s)",
|
| 204 |
+
origin_direct_url_path,
|
| 205 |
+
link.filename,
|
| 206 |
+
e,
|
| 207 |
+
)
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
class WheelCache(Cache):
|
| 211 |
+
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
|
| 212 |
+
|
| 213 |
+
This Cache allows for gracefully degradation, using the ephem wheel cache
|
| 214 |
+
when a certain link is not found in the simple wheel cache first.
|
| 215 |
+
"""
|
| 216 |
+
|
| 217 |
+
def __init__(self, cache_dir: str) -> None:
|
| 218 |
+
super().__init__(cache_dir)
|
| 219 |
+
self._wheel_cache = SimpleWheelCache(cache_dir)
|
| 220 |
+
self._ephem_cache = EphemWheelCache()
|
| 221 |
+
|
| 222 |
+
def get_path_for_link(self, link: Link) -> str:
|
| 223 |
+
return self._wheel_cache.get_path_for_link(link)
|
| 224 |
+
|
| 225 |
+
def get_ephem_path_for_link(self, link: Link) -> str:
|
| 226 |
+
return self._ephem_cache.get_path_for_link(link)
|
| 227 |
+
|
| 228 |
+
def get(
|
| 229 |
+
self,
|
| 230 |
+
link: Link,
|
| 231 |
+
package_name: Optional[str],
|
| 232 |
+
supported_tags: List[Tag],
|
| 233 |
+
) -> Link:
|
| 234 |
+
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
|
| 235 |
+
if cache_entry is None:
|
| 236 |
+
return link
|
| 237 |
+
return cache_entry.link
|
| 238 |
+
|
| 239 |
+
def get_cache_entry(
|
| 240 |
+
self,
|
| 241 |
+
link: Link,
|
| 242 |
+
package_name: Optional[str],
|
| 243 |
+
supported_tags: List[Tag],
|
| 244 |
+
) -> Optional[CacheEntry]:
|
| 245 |
+
"""Returns a CacheEntry with a link to a cached item if it exists or
|
| 246 |
+
None. The cache entry indicates if the item was found in the persistent
|
| 247 |
+
or ephemeral cache.
|
| 248 |
+
"""
|
| 249 |
+
retval = self._wheel_cache.get(
|
| 250 |
+
link=link,
|
| 251 |
+
package_name=package_name,
|
| 252 |
+
supported_tags=supported_tags,
|
| 253 |
+
)
|
| 254 |
+
if retval is not link:
|
| 255 |
+
return CacheEntry(retval, persistent=True)
|
| 256 |
+
|
| 257 |
+
retval = self._ephem_cache.get(
|
| 258 |
+
link=link,
|
| 259 |
+
package_name=package_name,
|
| 260 |
+
supported_tags=supported_tags,
|
| 261 |
+
)
|
| 262 |
+
if retval is not link:
|
| 263 |
+
return CacheEntry(retval, persistent=False)
|
| 264 |
+
|
| 265 |
+
return None
|
| 266 |
+
|
| 267 |
+
@staticmethod
|
| 268 |
+
def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
|
| 269 |
+
origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
|
| 270 |
+
if origin_path.exists():
|
| 271 |
+
try:
|
| 272 |
+
origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
|
| 273 |
+
except Exception as e:
|
| 274 |
+
logger.warning(
|
| 275 |
+
"Could not read origin file %s in cache entry (%s). "
|
| 276 |
+
"Will attempt to overwrite it.",
|
| 277 |
+
origin_path,
|
| 278 |
+
e,
|
| 279 |
+
)
|
| 280 |
+
else:
|
| 281 |
+
# TODO: use DirectUrl.equivalent when
|
| 282 |
+
# https://github.com/pypa/pip/pull/10564 is merged.
|
| 283 |
+
if origin.url != download_info.url:
|
| 284 |
+
logger.warning(
|
| 285 |
+
"Origin URL %s in cache entry %s does not match download URL "
|
| 286 |
+
"%s. This is likely a pip bug or a cache corruption issue. "
|
| 287 |
+
"Will overwrite it with the new value.",
|
| 288 |
+
origin.url,
|
| 289 |
+
cache_dir,
|
| 290 |
+
download_info.url,
|
| 291 |
+
)
|
| 292 |
+
origin_path.write_text(download_info.to_json(), encoding="utf-8")
|
.venv/Lib/site-packages/pip/_internal/cli/__init__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Subpackage containing all of pip's command line interface related code
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
# This file intentionally does not import submodules
|
.venv/Lib/site-packages/pip/_internal/cli/autocompletion.py
ADDED
|
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Logic that powers autocompletion installed by ``pip completion``.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import optparse
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
from itertools import chain
|
| 8 |
+
from typing import Any, Iterable, List, Optional
|
| 9 |
+
|
| 10 |
+
from pip._internal.cli.main_parser import create_main_parser
|
| 11 |
+
from pip._internal.commands import commands_dict, create_command
|
| 12 |
+
from pip._internal.metadata import get_default_environment
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def autocomplete() -> None:
|
| 16 |
+
"""Entry Point for completion of main and subcommand options."""
|
| 17 |
+
# Don't complete if user hasn't sourced bash_completion file.
|
| 18 |
+
if "PIP_AUTO_COMPLETE" not in os.environ:
|
| 19 |
+
return
|
| 20 |
+
cwords = os.environ["COMP_WORDS"].split()[1:]
|
| 21 |
+
cword = int(os.environ["COMP_CWORD"])
|
| 22 |
+
try:
|
| 23 |
+
current = cwords[cword - 1]
|
| 24 |
+
except IndexError:
|
| 25 |
+
current = ""
|
| 26 |
+
|
| 27 |
+
parser = create_main_parser()
|
| 28 |
+
subcommands = list(commands_dict)
|
| 29 |
+
options = []
|
| 30 |
+
|
| 31 |
+
# subcommand
|
| 32 |
+
subcommand_name: Optional[str] = None
|
| 33 |
+
for word in cwords:
|
| 34 |
+
if word in subcommands:
|
| 35 |
+
subcommand_name = word
|
| 36 |
+
break
|
| 37 |
+
# subcommand options
|
| 38 |
+
if subcommand_name is not None:
|
| 39 |
+
# special case: 'help' subcommand has no options
|
| 40 |
+
if subcommand_name == "help":
|
| 41 |
+
sys.exit(1)
|
| 42 |
+
# special case: list locally installed dists for show and uninstall
|
| 43 |
+
should_list_installed = not current.startswith("-") and subcommand_name in [
|
| 44 |
+
"show",
|
| 45 |
+
"uninstall",
|
| 46 |
+
]
|
| 47 |
+
if should_list_installed:
|
| 48 |
+
env = get_default_environment()
|
| 49 |
+
lc = current.lower()
|
| 50 |
+
installed = [
|
| 51 |
+
dist.canonical_name
|
| 52 |
+
for dist in env.iter_installed_distributions(local_only=True)
|
| 53 |
+
if dist.canonical_name.startswith(lc)
|
| 54 |
+
and dist.canonical_name not in cwords[1:]
|
| 55 |
+
]
|
| 56 |
+
# if there are no dists installed, fall back to option completion
|
| 57 |
+
if installed:
|
| 58 |
+
for dist in installed:
|
| 59 |
+
print(dist)
|
| 60 |
+
sys.exit(1)
|
| 61 |
+
|
| 62 |
+
should_list_installables = (
|
| 63 |
+
not current.startswith("-") and subcommand_name == "install"
|
| 64 |
+
)
|
| 65 |
+
if should_list_installables:
|
| 66 |
+
for path in auto_complete_paths(current, "path"):
|
| 67 |
+
print(path)
|
| 68 |
+
sys.exit(1)
|
| 69 |
+
|
| 70 |
+
subcommand = create_command(subcommand_name)
|
| 71 |
+
|
| 72 |
+
for opt in subcommand.parser.option_list_all:
|
| 73 |
+
if opt.help != optparse.SUPPRESS_HELP:
|
| 74 |
+
for opt_str in opt._long_opts + opt._short_opts:
|
| 75 |
+
options.append((opt_str, opt.nargs))
|
| 76 |
+
|
| 77 |
+
# filter out previously specified options from available options
|
| 78 |
+
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
|
| 79 |
+
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
| 80 |
+
# filter options by current input
|
| 81 |
+
options = [(k, v) for k, v in options if k.startswith(current)]
|
| 82 |
+
# get completion type given cwords and available subcommand options
|
| 83 |
+
completion_type = get_path_completion_type(
|
| 84 |
+
cwords,
|
| 85 |
+
cword,
|
| 86 |
+
subcommand.parser.option_list_all,
|
| 87 |
+
)
|
| 88 |
+
# get completion files and directories if ``completion_type`` is
|
| 89 |
+
# ``<file>``, ``<dir>`` or ``<path>``
|
| 90 |
+
if completion_type:
|
| 91 |
+
paths = auto_complete_paths(current, completion_type)
|
| 92 |
+
options = [(path, 0) for path in paths]
|
| 93 |
+
for option in options:
|
| 94 |
+
opt_label = option[0]
|
| 95 |
+
# append '=' to options which require args
|
| 96 |
+
if option[1] and option[0][:2] == "--":
|
| 97 |
+
opt_label += "="
|
| 98 |
+
print(opt_label)
|
| 99 |
+
else:
|
| 100 |
+
# show main parser options only when necessary
|
| 101 |
+
|
| 102 |
+
opts = [i.option_list for i in parser.option_groups]
|
| 103 |
+
opts.append(parser.option_list)
|
| 104 |
+
flattened_opts = chain.from_iterable(opts)
|
| 105 |
+
if current.startswith("-"):
|
| 106 |
+
for opt in flattened_opts:
|
| 107 |
+
if opt.help != optparse.SUPPRESS_HELP:
|
| 108 |
+
subcommands += opt._long_opts + opt._short_opts
|
| 109 |
+
else:
|
| 110 |
+
# get completion type given cwords and all available options
|
| 111 |
+
completion_type = get_path_completion_type(cwords, cword, flattened_opts)
|
| 112 |
+
if completion_type:
|
| 113 |
+
subcommands = list(auto_complete_paths(current, completion_type))
|
| 114 |
+
|
| 115 |
+
print(" ".join([x for x in subcommands if x.startswith(current)]))
|
| 116 |
+
sys.exit(1)
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def get_path_completion_type(
|
| 120 |
+
cwords: List[str], cword: int, opts: Iterable[Any]
|
| 121 |
+
) -> Optional[str]:
|
| 122 |
+
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
|
| 123 |
+
|
| 124 |
+
:param cwords: same as the environmental variable ``COMP_WORDS``
|
| 125 |
+
:param cword: same as the environmental variable ``COMP_CWORD``
|
| 126 |
+
:param opts: The available options to check
|
| 127 |
+
:return: path completion type (``file``, ``dir``, ``path`` or None)
|
| 128 |
+
"""
|
| 129 |
+
if cword < 2 or not cwords[cword - 2].startswith("-"):
|
| 130 |
+
return None
|
| 131 |
+
for opt in opts:
|
| 132 |
+
if opt.help == optparse.SUPPRESS_HELP:
|
| 133 |
+
continue
|
| 134 |
+
for o in str(opt).split("/"):
|
| 135 |
+
if cwords[cword - 2].split("=")[0] == o:
|
| 136 |
+
if not opt.metavar or any(
|
| 137 |
+
x in ("path", "file", "dir") for x in opt.metavar.split("/")
|
| 138 |
+
):
|
| 139 |
+
return opt.metavar
|
| 140 |
+
return None
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
|
| 144 |
+
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
|
| 145 |
+
and directories starting with ``current``; otherwise only list directories
|
| 146 |
+
starting with ``current``.
|
| 147 |
+
|
| 148 |
+
:param current: The word to be completed
|
| 149 |
+
:param completion_type: path completion type(``file``, ``path`` or ``dir``)
|
| 150 |
+
:return: A generator of regular files and/or directories
|
| 151 |
+
"""
|
| 152 |
+
directory, filename = os.path.split(current)
|
| 153 |
+
current_path = os.path.abspath(directory)
|
| 154 |
+
# Don't complete paths if they can't be accessed
|
| 155 |
+
if not os.access(current_path, os.R_OK):
|
| 156 |
+
return
|
| 157 |
+
filename = os.path.normcase(filename)
|
| 158 |
+
# list all files that start with ``filename``
|
| 159 |
+
file_list = (
|
| 160 |
+
x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
|
| 161 |
+
)
|
| 162 |
+
for f in file_list:
|
| 163 |
+
opt = os.path.join(current_path, f)
|
| 164 |
+
comp_file = os.path.normcase(os.path.join(directory, f))
|
| 165 |
+
# complete regular files when there is not ``<dir>`` after option
|
| 166 |
+
# complete directories when there is ``<file>``, ``<path>`` or
|
| 167 |
+
# ``<dir>``after option
|
| 168 |
+
if completion_type != "dir" and os.path.isfile(opt):
|
| 169 |
+
yield comp_file
|
| 170 |
+
elif os.path.isdir(opt):
|
| 171 |
+
yield os.path.join(comp_file, "")
|
.venv/Lib/site-packages/pip/_internal/cli/base_command.py
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Base Command class, and related routines"""
|
| 2 |
+
|
| 3 |
+
import functools
|
| 4 |
+
import logging
|
| 5 |
+
import logging.config
|
| 6 |
+
import optparse
|
| 7 |
+
import os
|
| 8 |
+
import sys
|
| 9 |
+
import traceback
|
| 10 |
+
from optparse import Values
|
| 11 |
+
from typing import Any, Callable, List, Optional, Tuple
|
| 12 |
+
|
| 13 |
+
from pip._vendor.rich import traceback as rich_traceback
|
| 14 |
+
|
| 15 |
+
from pip._internal.cli import cmdoptions
|
| 16 |
+
from pip._internal.cli.command_context import CommandContextMixIn
|
| 17 |
+
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
| 18 |
+
from pip._internal.cli.status_codes import (
|
| 19 |
+
ERROR,
|
| 20 |
+
PREVIOUS_BUILD_DIR_ERROR,
|
| 21 |
+
UNKNOWN_ERROR,
|
| 22 |
+
VIRTUALENV_NOT_FOUND,
|
| 23 |
+
)
|
| 24 |
+
from pip._internal.exceptions import (
|
| 25 |
+
BadCommand,
|
| 26 |
+
CommandError,
|
| 27 |
+
DiagnosticPipError,
|
| 28 |
+
InstallationError,
|
| 29 |
+
NetworkConnectionError,
|
| 30 |
+
PreviousBuildDirError,
|
| 31 |
+
UninstallationError,
|
| 32 |
+
)
|
| 33 |
+
from pip._internal.utils.filesystem import check_path_owner
|
| 34 |
+
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
| 35 |
+
from pip._internal.utils.misc import get_prog, normalize_path
|
| 36 |
+
from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
|
| 37 |
+
from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
|
| 38 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
| 39 |
+
|
| 40 |
+
__all__ = ["Command"]
|
| 41 |
+
|
| 42 |
+
logger = logging.getLogger(__name__)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class Command(CommandContextMixIn):
|
| 46 |
+
usage: str = ""
|
| 47 |
+
ignore_require_venv: bool = False
|
| 48 |
+
|
| 49 |
+
def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
|
| 50 |
+
super().__init__()
|
| 51 |
+
|
| 52 |
+
self.name = name
|
| 53 |
+
self.summary = summary
|
| 54 |
+
self.parser = ConfigOptionParser(
|
| 55 |
+
usage=self.usage,
|
| 56 |
+
prog=f"{get_prog()} {name}",
|
| 57 |
+
formatter=UpdatingDefaultsHelpFormatter(),
|
| 58 |
+
add_help_option=False,
|
| 59 |
+
name=name,
|
| 60 |
+
description=self.__doc__,
|
| 61 |
+
isolated=isolated,
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
self.tempdir_registry: Optional[TempDirRegistry] = None
|
| 65 |
+
|
| 66 |
+
# Commands should add options to this option group
|
| 67 |
+
optgroup_name = f"{self.name.capitalize()} Options"
|
| 68 |
+
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
|
| 69 |
+
|
| 70 |
+
# Add the general options
|
| 71 |
+
gen_opts = cmdoptions.make_option_group(
|
| 72 |
+
cmdoptions.general_group,
|
| 73 |
+
self.parser,
|
| 74 |
+
)
|
| 75 |
+
self.parser.add_option_group(gen_opts)
|
| 76 |
+
|
| 77 |
+
self.add_options()
|
| 78 |
+
|
| 79 |
+
def add_options(self) -> None:
|
| 80 |
+
pass
|
| 81 |
+
|
| 82 |
+
def handle_pip_version_check(self, options: Values) -> None:
|
| 83 |
+
"""
|
| 84 |
+
This is a no-op so that commands by default do not do the pip version
|
| 85 |
+
check.
|
| 86 |
+
"""
|
| 87 |
+
# Make sure we do the pip version check if the index_group options
|
| 88 |
+
# are present.
|
| 89 |
+
assert not hasattr(options, "no_index")
|
| 90 |
+
|
| 91 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 92 |
+
raise NotImplementedError
|
| 93 |
+
|
| 94 |
+
def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
|
| 95 |
+
# factored out for testability
|
| 96 |
+
return self.parser.parse_args(args)
|
| 97 |
+
|
| 98 |
+
def main(self, args: List[str]) -> int:
|
| 99 |
+
try:
|
| 100 |
+
with self.main_context():
|
| 101 |
+
return self._main(args)
|
| 102 |
+
finally:
|
| 103 |
+
logging.shutdown()
|
| 104 |
+
|
| 105 |
+
def _main(self, args: List[str]) -> int:
|
| 106 |
+
# We must initialize this before the tempdir manager, otherwise the
|
| 107 |
+
# configuration would not be accessible by the time we clean up the
|
| 108 |
+
# tempdir manager.
|
| 109 |
+
self.tempdir_registry = self.enter_context(tempdir_registry())
|
| 110 |
+
# Intentionally set as early as possible so globally-managed temporary
|
| 111 |
+
# directories are available to the rest of the code.
|
| 112 |
+
self.enter_context(global_tempdir_manager())
|
| 113 |
+
|
| 114 |
+
options, args = self.parse_args(args)
|
| 115 |
+
|
| 116 |
+
# Set verbosity so that it can be used elsewhere.
|
| 117 |
+
self.verbosity = options.verbose - options.quiet
|
| 118 |
+
|
| 119 |
+
level_number = setup_logging(
|
| 120 |
+
verbosity=self.verbosity,
|
| 121 |
+
no_color=options.no_color,
|
| 122 |
+
user_log_file=options.log,
|
| 123 |
+
)
|
| 124 |
+
|
| 125 |
+
always_enabled_features = set(options.features_enabled) & set(
|
| 126 |
+
cmdoptions.ALWAYS_ENABLED_FEATURES
|
| 127 |
+
)
|
| 128 |
+
if always_enabled_features:
|
| 129 |
+
logger.warning(
|
| 130 |
+
"The following features are always enabled: %s. ",
|
| 131 |
+
", ".join(sorted(always_enabled_features)),
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
# Make sure that the --python argument isn't specified after the
|
| 135 |
+
# subcommand. We can tell, because if --python was specified,
|
| 136 |
+
# we should only reach this point if we're running in the created
|
| 137 |
+
# subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
|
| 138 |
+
# variable set.
|
| 139 |
+
if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
|
| 140 |
+
logger.critical(
|
| 141 |
+
"The --python option must be placed before the pip subcommand name"
|
| 142 |
+
)
|
| 143 |
+
sys.exit(ERROR)
|
| 144 |
+
|
| 145 |
+
# TODO: Try to get these passing down from the command?
|
| 146 |
+
# without resorting to os.environ to hold these.
|
| 147 |
+
# This also affects isolated builds and it should.
|
| 148 |
+
|
| 149 |
+
if options.no_input:
|
| 150 |
+
os.environ["PIP_NO_INPUT"] = "1"
|
| 151 |
+
|
| 152 |
+
if options.exists_action:
|
| 153 |
+
os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
|
| 154 |
+
|
| 155 |
+
if options.require_venv and not self.ignore_require_venv:
|
| 156 |
+
# If a venv is required check if it can really be found
|
| 157 |
+
if not running_under_virtualenv():
|
| 158 |
+
logger.critical("Could not find an activated virtualenv (required).")
|
| 159 |
+
sys.exit(VIRTUALENV_NOT_FOUND)
|
| 160 |
+
|
| 161 |
+
if options.cache_dir:
|
| 162 |
+
options.cache_dir = normalize_path(options.cache_dir)
|
| 163 |
+
if not check_path_owner(options.cache_dir):
|
| 164 |
+
logger.warning(
|
| 165 |
+
"The directory '%s' or its parent directory is not owned "
|
| 166 |
+
"or is not writable by the current user. The cache "
|
| 167 |
+
"has been disabled. Check the permissions and owner of "
|
| 168 |
+
"that directory. If executing pip with sudo, you should "
|
| 169 |
+
"use sudo's -H flag.",
|
| 170 |
+
options.cache_dir,
|
| 171 |
+
)
|
| 172 |
+
options.cache_dir = None
|
| 173 |
+
|
| 174 |
+
def intercepts_unhandled_exc(
|
| 175 |
+
run_func: Callable[..., int]
|
| 176 |
+
) -> Callable[..., int]:
|
| 177 |
+
@functools.wraps(run_func)
|
| 178 |
+
def exc_logging_wrapper(*args: Any) -> int:
|
| 179 |
+
try:
|
| 180 |
+
status = run_func(*args)
|
| 181 |
+
assert isinstance(status, int)
|
| 182 |
+
return status
|
| 183 |
+
except DiagnosticPipError as exc:
|
| 184 |
+
logger.error("[present-rich] %s", exc)
|
| 185 |
+
logger.debug("Exception information:", exc_info=True)
|
| 186 |
+
|
| 187 |
+
return ERROR
|
| 188 |
+
except PreviousBuildDirError as exc:
|
| 189 |
+
logger.critical(str(exc))
|
| 190 |
+
logger.debug("Exception information:", exc_info=True)
|
| 191 |
+
|
| 192 |
+
return PREVIOUS_BUILD_DIR_ERROR
|
| 193 |
+
except (
|
| 194 |
+
InstallationError,
|
| 195 |
+
UninstallationError,
|
| 196 |
+
BadCommand,
|
| 197 |
+
NetworkConnectionError,
|
| 198 |
+
) as exc:
|
| 199 |
+
logger.critical(str(exc))
|
| 200 |
+
logger.debug("Exception information:", exc_info=True)
|
| 201 |
+
|
| 202 |
+
return ERROR
|
| 203 |
+
except CommandError as exc:
|
| 204 |
+
logger.critical("%s", exc)
|
| 205 |
+
logger.debug("Exception information:", exc_info=True)
|
| 206 |
+
|
| 207 |
+
return ERROR
|
| 208 |
+
except BrokenStdoutLoggingError:
|
| 209 |
+
# Bypass our logger and write any remaining messages to
|
| 210 |
+
# stderr because stdout no longer works.
|
| 211 |
+
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
|
| 212 |
+
if level_number <= logging.DEBUG:
|
| 213 |
+
traceback.print_exc(file=sys.stderr)
|
| 214 |
+
|
| 215 |
+
return ERROR
|
| 216 |
+
except KeyboardInterrupt:
|
| 217 |
+
logger.critical("Operation cancelled by user")
|
| 218 |
+
logger.debug("Exception information:", exc_info=True)
|
| 219 |
+
|
| 220 |
+
return ERROR
|
| 221 |
+
except BaseException:
|
| 222 |
+
logger.critical("Exception:", exc_info=True)
|
| 223 |
+
|
| 224 |
+
return UNKNOWN_ERROR
|
| 225 |
+
|
| 226 |
+
return exc_logging_wrapper
|
| 227 |
+
|
| 228 |
+
try:
|
| 229 |
+
if not options.debug_mode:
|
| 230 |
+
run = intercepts_unhandled_exc(self.run)
|
| 231 |
+
else:
|
| 232 |
+
run = self.run
|
| 233 |
+
rich_traceback.install(show_locals=True)
|
| 234 |
+
return run(options, args)
|
| 235 |
+
finally:
|
| 236 |
+
self.handle_pip_version_check(options)
|
.venv/Lib/site-packages/pip/_internal/cli/cmdoptions.py
ADDED
|
@@ -0,0 +1,1074 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
shared options and groups
|
| 3 |
+
|
| 4 |
+
The principle here is to define options once, but *not* instantiate them
|
| 5 |
+
globally. One reason being that options with action='append' can carry state
|
| 6 |
+
between parses. pip parses general options twice internally, and shouldn't
|
| 7 |
+
pass on state. To be consistent, all options will follow this design.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# The following comment should be removed at some point in the future.
|
| 11 |
+
# mypy: strict-optional=False
|
| 12 |
+
|
| 13 |
+
import importlib.util
|
| 14 |
+
import logging
|
| 15 |
+
import os
|
| 16 |
+
import textwrap
|
| 17 |
+
from functools import partial
|
| 18 |
+
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
|
| 19 |
+
from textwrap import dedent
|
| 20 |
+
from typing import Any, Callable, Dict, Optional, Tuple
|
| 21 |
+
|
| 22 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 23 |
+
|
| 24 |
+
from pip._internal.cli.parser import ConfigOptionParser
|
| 25 |
+
from pip._internal.exceptions import CommandError
|
| 26 |
+
from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
|
| 27 |
+
from pip._internal.models.format_control import FormatControl
|
| 28 |
+
from pip._internal.models.index import PyPI
|
| 29 |
+
from pip._internal.models.target_python import TargetPython
|
| 30 |
+
from pip._internal.utils.hashes import STRONG_HASHES
|
| 31 |
+
from pip._internal.utils.misc import strtobool
|
| 32 |
+
|
| 33 |
+
logger = logging.getLogger(__name__)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
|
| 37 |
+
"""
|
| 38 |
+
Raise an option parsing error using parser.error().
|
| 39 |
+
|
| 40 |
+
Args:
|
| 41 |
+
parser: an OptionParser instance.
|
| 42 |
+
option: an Option instance.
|
| 43 |
+
msg: the error text.
|
| 44 |
+
"""
|
| 45 |
+
msg = f"{option} error: {msg}"
|
| 46 |
+
msg = textwrap.fill(" ".join(msg.split()))
|
| 47 |
+
parser.error(msg)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
|
| 51 |
+
"""
|
| 52 |
+
Return an OptionGroup object
|
| 53 |
+
group -- assumed to be dict with 'name' and 'options' keys
|
| 54 |
+
parser -- an optparse Parser
|
| 55 |
+
"""
|
| 56 |
+
option_group = OptionGroup(parser, group["name"])
|
| 57 |
+
for option in group["options"]:
|
| 58 |
+
option_group.add_option(option())
|
| 59 |
+
return option_group
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def check_dist_restriction(options: Values, check_target: bool = False) -> None:
|
| 63 |
+
"""Function for determining if custom platform options are allowed.
|
| 64 |
+
|
| 65 |
+
:param options: The OptionParser options.
|
| 66 |
+
:param check_target: Whether or not to check if --target is being used.
|
| 67 |
+
"""
|
| 68 |
+
dist_restriction_set = any(
|
| 69 |
+
[
|
| 70 |
+
options.python_version,
|
| 71 |
+
options.platforms,
|
| 72 |
+
options.abis,
|
| 73 |
+
options.implementation,
|
| 74 |
+
]
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
binary_only = FormatControl(set(), {":all:"})
|
| 78 |
+
sdist_dependencies_allowed = (
|
| 79 |
+
options.format_control != binary_only and not options.ignore_dependencies
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
# Installations or downloads using dist restrictions must not combine
|
| 83 |
+
# source distributions and dist-specific wheels, as they are not
|
| 84 |
+
# guaranteed to be locally compatible.
|
| 85 |
+
if dist_restriction_set and sdist_dependencies_allowed:
|
| 86 |
+
raise CommandError(
|
| 87 |
+
"When restricting platform and interpreter constraints using "
|
| 88 |
+
"--python-version, --platform, --abi, or --implementation, "
|
| 89 |
+
"either --no-deps must be set, or --only-binary=:all: must be "
|
| 90 |
+
"set and --no-binary must not be set (or must be set to "
|
| 91 |
+
":none:)."
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
if check_target:
|
| 95 |
+
if dist_restriction_set and not options.target_dir:
|
| 96 |
+
raise CommandError(
|
| 97 |
+
"Can not use any platform or abi specific options unless "
|
| 98 |
+
"installing via '--target'"
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def _path_option_check(option: Option, opt: str, value: str) -> str:
|
| 103 |
+
return os.path.expanduser(value)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def _package_name_option_check(option: Option, opt: str, value: str) -> str:
|
| 107 |
+
return canonicalize_name(value)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class PipOption(Option):
|
| 111 |
+
TYPES = Option.TYPES + ("path", "package_name")
|
| 112 |
+
TYPE_CHECKER = Option.TYPE_CHECKER.copy()
|
| 113 |
+
TYPE_CHECKER["package_name"] = _package_name_option_check
|
| 114 |
+
TYPE_CHECKER["path"] = _path_option_check
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
###########
|
| 118 |
+
# options #
|
| 119 |
+
###########
|
| 120 |
+
|
| 121 |
+
help_: Callable[..., Option] = partial(
|
| 122 |
+
Option,
|
| 123 |
+
"-h",
|
| 124 |
+
"--help",
|
| 125 |
+
dest="help",
|
| 126 |
+
action="help",
|
| 127 |
+
help="Show help.",
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
debug_mode: Callable[..., Option] = partial(
|
| 131 |
+
Option,
|
| 132 |
+
"--debug",
|
| 133 |
+
dest="debug_mode",
|
| 134 |
+
action="store_true",
|
| 135 |
+
default=False,
|
| 136 |
+
help=(
|
| 137 |
+
"Let unhandled exceptions propagate outside the main subroutine, "
|
| 138 |
+
"instead of logging them to stderr."
|
| 139 |
+
),
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
isolated_mode: Callable[..., Option] = partial(
|
| 143 |
+
Option,
|
| 144 |
+
"--isolated",
|
| 145 |
+
dest="isolated_mode",
|
| 146 |
+
action="store_true",
|
| 147 |
+
default=False,
|
| 148 |
+
help=(
|
| 149 |
+
"Run pip in an isolated mode, ignoring environment variables and user "
|
| 150 |
+
"configuration."
|
| 151 |
+
),
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
require_virtualenv: Callable[..., Option] = partial(
|
| 155 |
+
Option,
|
| 156 |
+
"--require-virtualenv",
|
| 157 |
+
"--require-venv",
|
| 158 |
+
dest="require_venv",
|
| 159 |
+
action="store_true",
|
| 160 |
+
default=False,
|
| 161 |
+
help=(
|
| 162 |
+
"Allow pip to only run in a virtual environment; "
|
| 163 |
+
"exit with an error otherwise."
|
| 164 |
+
),
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
override_externally_managed: Callable[..., Option] = partial(
|
| 168 |
+
Option,
|
| 169 |
+
"--break-system-packages",
|
| 170 |
+
dest="override_externally_managed",
|
| 171 |
+
action="store_true",
|
| 172 |
+
help="Allow pip to modify an EXTERNALLY-MANAGED Python installation",
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
python: Callable[..., Option] = partial(
|
| 176 |
+
Option,
|
| 177 |
+
"--python",
|
| 178 |
+
dest="python",
|
| 179 |
+
help="Run pip with the specified Python interpreter.",
|
| 180 |
+
)
|
| 181 |
+
|
| 182 |
+
verbose: Callable[..., Option] = partial(
|
| 183 |
+
Option,
|
| 184 |
+
"-v",
|
| 185 |
+
"--verbose",
|
| 186 |
+
dest="verbose",
|
| 187 |
+
action="count",
|
| 188 |
+
default=0,
|
| 189 |
+
help="Give more output. Option is additive, and can be used up to 3 times.",
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
no_color: Callable[..., Option] = partial(
|
| 193 |
+
Option,
|
| 194 |
+
"--no-color",
|
| 195 |
+
dest="no_color",
|
| 196 |
+
action="store_true",
|
| 197 |
+
default=False,
|
| 198 |
+
help="Suppress colored output.",
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
version: Callable[..., Option] = partial(
|
| 202 |
+
Option,
|
| 203 |
+
"-V",
|
| 204 |
+
"--version",
|
| 205 |
+
dest="version",
|
| 206 |
+
action="store_true",
|
| 207 |
+
help="Show version and exit.",
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
quiet: Callable[..., Option] = partial(
|
| 211 |
+
Option,
|
| 212 |
+
"-q",
|
| 213 |
+
"--quiet",
|
| 214 |
+
dest="quiet",
|
| 215 |
+
action="count",
|
| 216 |
+
default=0,
|
| 217 |
+
help=(
|
| 218 |
+
"Give less output. Option is additive, and can be used up to 3"
|
| 219 |
+
" times (corresponding to WARNING, ERROR, and CRITICAL logging"
|
| 220 |
+
" levels)."
|
| 221 |
+
),
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
progress_bar: Callable[..., Option] = partial(
|
| 225 |
+
Option,
|
| 226 |
+
"--progress-bar",
|
| 227 |
+
dest="progress_bar",
|
| 228 |
+
type="choice",
|
| 229 |
+
choices=["on", "off"],
|
| 230 |
+
default="on",
|
| 231 |
+
help="Specify whether the progress bar should be used [on, off] (default: on)",
|
| 232 |
+
)
|
| 233 |
+
|
| 234 |
+
log: Callable[..., Option] = partial(
|
| 235 |
+
PipOption,
|
| 236 |
+
"--log",
|
| 237 |
+
"--log-file",
|
| 238 |
+
"--local-log",
|
| 239 |
+
dest="log",
|
| 240 |
+
metavar="path",
|
| 241 |
+
type="path",
|
| 242 |
+
help="Path to a verbose appending log.",
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
no_input: Callable[..., Option] = partial(
|
| 246 |
+
Option,
|
| 247 |
+
# Don't ask for input
|
| 248 |
+
"--no-input",
|
| 249 |
+
dest="no_input",
|
| 250 |
+
action="store_true",
|
| 251 |
+
default=False,
|
| 252 |
+
help="Disable prompting for input.",
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
keyring_provider: Callable[..., Option] = partial(
|
| 256 |
+
Option,
|
| 257 |
+
"--keyring-provider",
|
| 258 |
+
dest="keyring_provider",
|
| 259 |
+
choices=["auto", "disabled", "import", "subprocess"],
|
| 260 |
+
default="auto",
|
| 261 |
+
help=(
|
| 262 |
+
"Enable the credential lookup via the keyring library if user input is allowed."
|
| 263 |
+
" Specify which mechanism to use [disabled, import, subprocess]."
|
| 264 |
+
" (default: disabled)"
|
| 265 |
+
),
|
| 266 |
+
)
|
| 267 |
+
|
| 268 |
+
proxy: Callable[..., Option] = partial(
|
| 269 |
+
Option,
|
| 270 |
+
"--proxy",
|
| 271 |
+
dest="proxy",
|
| 272 |
+
type="str",
|
| 273 |
+
default="",
|
| 274 |
+
help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
retries: Callable[..., Option] = partial(
|
| 278 |
+
Option,
|
| 279 |
+
"--retries",
|
| 280 |
+
dest="retries",
|
| 281 |
+
type="int",
|
| 282 |
+
default=5,
|
| 283 |
+
help="Maximum number of retries each connection should attempt "
|
| 284 |
+
"(default %default times).",
|
| 285 |
+
)
|
| 286 |
+
|
| 287 |
+
timeout: Callable[..., Option] = partial(
|
| 288 |
+
Option,
|
| 289 |
+
"--timeout",
|
| 290 |
+
"--default-timeout",
|
| 291 |
+
metavar="sec",
|
| 292 |
+
dest="timeout",
|
| 293 |
+
type="float",
|
| 294 |
+
default=15,
|
| 295 |
+
help="Set the socket timeout (default %default seconds).",
|
| 296 |
+
)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def exists_action() -> Option:
|
| 300 |
+
return Option(
|
| 301 |
+
# Option when path already exist
|
| 302 |
+
"--exists-action",
|
| 303 |
+
dest="exists_action",
|
| 304 |
+
type="choice",
|
| 305 |
+
choices=["s", "i", "w", "b", "a"],
|
| 306 |
+
default=[],
|
| 307 |
+
action="append",
|
| 308 |
+
metavar="action",
|
| 309 |
+
help="Default action when a path already exists: "
|
| 310 |
+
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
|
| 311 |
+
)
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
cert: Callable[..., Option] = partial(
|
| 315 |
+
PipOption,
|
| 316 |
+
"--cert",
|
| 317 |
+
dest="cert",
|
| 318 |
+
type="path",
|
| 319 |
+
metavar="path",
|
| 320 |
+
help=(
|
| 321 |
+
"Path to PEM-encoded CA certificate bundle. "
|
| 322 |
+
"If provided, overrides the default. "
|
| 323 |
+
"See 'SSL Certificate Verification' in pip documentation "
|
| 324 |
+
"for more information."
|
| 325 |
+
),
|
| 326 |
+
)
|
| 327 |
+
|
| 328 |
+
client_cert: Callable[..., Option] = partial(
|
| 329 |
+
PipOption,
|
| 330 |
+
"--client-cert",
|
| 331 |
+
dest="client_cert",
|
| 332 |
+
type="path",
|
| 333 |
+
default=None,
|
| 334 |
+
metavar="path",
|
| 335 |
+
help="Path to SSL client certificate, a single file containing the "
|
| 336 |
+
"private key and the certificate in PEM format.",
|
| 337 |
+
)
|
| 338 |
+
|
| 339 |
+
index_url: Callable[..., Option] = partial(
|
| 340 |
+
Option,
|
| 341 |
+
"-i",
|
| 342 |
+
"--index-url",
|
| 343 |
+
"--pypi-url",
|
| 344 |
+
dest="index_url",
|
| 345 |
+
metavar="URL",
|
| 346 |
+
default=PyPI.simple_url,
|
| 347 |
+
help="Base URL of the Python Package Index (default %default). "
|
| 348 |
+
"This should point to a repository compliant with PEP 503 "
|
| 349 |
+
"(the simple repository API) or a local directory laid out "
|
| 350 |
+
"in the same format.",
|
| 351 |
+
)
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
def extra_index_url() -> Option:
|
| 355 |
+
return Option(
|
| 356 |
+
"--extra-index-url",
|
| 357 |
+
dest="extra_index_urls",
|
| 358 |
+
metavar="URL",
|
| 359 |
+
action="append",
|
| 360 |
+
default=[],
|
| 361 |
+
help="Extra URLs of package indexes to use in addition to "
|
| 362 |
+
"--index-url. Should follow the same rules as "
|
| 363 |
+
"--index-url.",
|
| 364 |
+
)
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
no_index: Callable[..., Option] = partial(
|
| 368 |
+
Option,
|
| 369 |
+
"--no-index",
|
| 370 |
+
dest="no_index",
|
| 371 |
+
action="store_true",
|
| 372 |
+
default=False,
|
| 373 |
+
help="Ignore package index (only looking at --find-links URLs instead).",
|
| 374 |
+
)
|
| 375 |
+
|
| 376 |
+
|
| 377 |
+
def find_links() -> Option:
|
| 378 |
+
return Option(
|
| 379 |
+
"-f",
|
| 380 |
+
"--find-links",
|
| 381 |
+
dest="find_links",
|
| 382 |
+
action="append",
|
| 383 |
+
default=[],
|
| 384 |
+
metavar="url",
|
| 385 |
+
help="If a URL or path to an html file, then parse for links to "
|
| 386 |
+
"archives such as sdist (.tar.gz) or wheel (.whl) files. "
|
| 387 |
+
"If a local path or file:// URL that's a directory, "
|
| 388 |
+
"then look for archives in the directory listing. "
|
| 389 |
+
"Links to VCS project URLs are not supported.",
|
| 390 |
+
)
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
def trusted_host() -> Option:
|
| 394 |
+
return Option(
|
| 395 |
+
"--trusted-host",
|
| 396 |
+
dest="trusted_hosts",
|
| 397 |
+
action="append",
|
| 398 |
+
metavar="HOSTNAME",
|
| 399 |
+
default=[],
|
| 400 |
+
help="Mark this host or host:port pair as trusted, even though it "
|
| 401 |
+
"does not have valid or any HTTPS.",
|
| 402 |
+
)
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
def constraints() -> Option:
|
| 406 |
+
return Option(
|
| 407 |
+
"-c",
|
| 408 |
+
"--constraint",
|
| 409 |
+
dest="constraints",
|
| 410 |
+
action="append",
|
| 411 |
+
default=[],
|
| 412 |
+
metavar="file",
|
| 413 |
+
help="Constrain versions using the given constraints file. "
|
| 414 |
+
"This option can be used multiple times.",
|
| 415 |
+
)
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
def requirements() -> Option:
|
| 419 |
+
return Option(
|
| 420 |
+
"-r",
|
| 421 |
+
"--requirement",
|
| 422 |
+
dest="requirements",
|
| 423 |
+
action="append",
|
| 424 |
+
default=[],
|
| 425 |
+
metavar="file",
|
| 426 |
+
help="Install from the given requirements file. "
|
| 427 |
+
"This option can be used multiple times.",
|
| 428 |
+
)
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
def editable() -> Option:
|
| 432 |
+
return Option(
|
| 433 |
+
"-e",
|
| 434 |
+
"--editable",
|
| 435 |
+
dest="editables",
|
| 436 |
+
action="append",
|
| 437 |
+
default=[],
|
| 438 |
+
metavar="path/url",
|
| 439 |
+
help=(
|
| 440 |
+
"Install a project in editable mode (i.e. setuptools "
|
| 441 |
+
'"develop mode") from a local project path or a VCS url.'
|
| 442 |
+
),
|
| 443 |
+
)
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
|
| 447 |
+
value = os.path.abspath(value)
|
| 448 |
+
setattr(parser.values, option.dest, value)
|
| 449 |
+
|
| 450 |
+
|
| 451 |
+
src: Callable[..., Option] = partial(
|
| 452 |
+
PipOption,
|
| 453 |
+
"--src",
|
| 454 |
+
"--source",
|
| 455 |
+
"--source-dir",
|
| 456 |
+
"--source-directory",
|
| 457 |
+
dest="src_dir",
|
| 458 |
+
type="path",
|
| 459 |
+
metavar="dir",
|
| 460 |
+
default=get_src_prefix(),
|
| 461 |
+
action="callback",
|
| 462 |
+
callback=_handle_src,
|
| 463 |
+
help="Directory to check out editable projects into. "
|
| 464 |
+
'The default in a virtualenv is "<venv path>/src". '
|
| 465 |
+
'The default for global installs is "<current dir>/src".',
|
| 466 |
+
)
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
def _get_format_control(values: Values, option: Option) -> Any:
|
| 470 |
+
"""Get a format_control object."""
|
| 471 |
+
return getattr(values, option.dest)
|
| 472 |
+
|
| 473 |
+
|
| 474 |
+
def _handle_no_binary(
|
| 475 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 476 |
+
) -> None:
|
| 477 |
+
existing = _get_format_control(parser.values, option)
|
| 478 |
+
FormatControl.handle_mutual_excludes(
|
| 479 |
+
value,
|
| 480 |
+
existing.no_binary,
|
| 481 |
+
existing.only_binary,
|
| 482 |
+
)
|
| 483 |
+
|
| 484 |
+
|
| 485 |
+
def _handle_only_binary(
|
| 486 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 487 |
+
) -> None:
|
| 488 |
+
existing = _get_format_control(parser.values, option)
|
| 489 |
+
FormatControl.handle_mutual_excludes(
|
| 490 |
+
value,
|
| 491 |
+
existing.only_binary,
|
| 492 |
+
existing.no_binary,
|
| 493 |
+
)
|
| 494 |
+
|
| 495 |
+
|
| 496 |
+
def no_binary() -> Option:
|
| 497 |
+
format_control = FormatControl(set(), set())
|
| 498 |
+
return Option(
|
| 499 |
+
"--no-binary",
|
| 500 |
+
dest="format_control",
|
| 501 |
+
action="callback",
|
| 502 |
+
callback=_handle_no_binary,
|
| 503 |
+
type="str",
|
| 504 |
+
default=format_control,
|
| 505 |
+
help="Do not use binary packages. Can be supplied multiple times, and "
|
| 506 |
+
'each time adds to the existing value. Accepts either ":all:" to '
|
| 507 |
+
'disable all binary packages, ":none:" to empty the set (notice '
|
| 508 |
+
"the colons), or one or more package names with commas between "
|
| 509 |
+
"them (no colons). Note that some packages are tricky to compile "
|
| 510 |
+
"and may fail to install when this option is used on them.",
|
| 511 |
+
)
|
| 512 |
+
|
| 513 |
+
|
| 514 |
+
def only_binary() -> Option:
|
| 515 |
+
format_control = FormatControl(set(), set())
|
| 516 |
+
return Option(
|
| 517 |
+
"--only-binary",
|
| 518 |
+
dest="format_control",
|
| 519 |
+
action="callback",
|
| 520 |
+
callback=_handle_only_binary,
|
| 521 |
+
type="str",
|
| 522 |
+
default=format_control,
|
| 523 |
+
help="Do not use source packages. Can be supplied multiple times, and "
|
| 524 |
+
'each time adds to the existing value. Accepts either ":all:" to '
|
| 525 |
+
'disable all source packages, ":none:" to empty the set, or one '
|
| 526 |
+
"or more package names with commas between them. Packages "
|
| 527 |
+
"without binary distributions will fail to install when this "
|
| 528 |
+
"option is used on them.",
|
| 529 |
+
)
|
| 530 |
+
|
| 531 |
+
|
| 532 |
+
platforms: Callable[..., Option] = partial(
|
| 533 |
+
Option,
|
| 534 |
+
"--platform",
|
| 535 |
+
dest="platforms",
|
| 536 |
+
metavar="platform",
|
| 537 |
+
action="append",
|
| 538 |
+
default=None,
|
| 539 |
+
help=(
|
| 540 |
+
"Only use wheels compatible with <platform>. Defaults to the "
|
| 541 |
+
"platform of the running system. Use this option multiple times to "
|
| 542 |
+
"specify multiple platforms supported by the target interpreter."
|
| 543 |
+
),
|
| 544 |
+
)
|
| 545 |
+
|
| 546 |
+
|
| 547 |
+
# This was made a separate function for unit-testing purposes.
|
| 548 |
+
def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
|
| 549 |
+
"""
|
| 550 |
+
Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
|
| 551 |
+
|
| 552 |
+
:return: A 2-tuple (version_info, error_msg), where `error_msg` is
|
| 553 |
+
non-None if and only if there was a parsing error.
|
| 554 |
+
"""
|
| 555 |
+
if not value:
|
| 556 |
+
# The empty string is the same as not providing a value.
|
| 557 |
+
return (None, None)
|
| 558 |
+
|
| 559 |
+
parts = value.split(".")
|
| 560 |
+
if len(parts) > 3:
|
| 561 |
+
return ((), "at most three version parts are allowed")
|
| 562 |
+
|
| 563 |
+
if len(parts) == 1:
|
| 564 |
+
# Then we are in the case of "3" or "37".
|
| 565 |
+
value = parts[0]
|
| 566 |
+
if len(value) > 1:
|
| 567 |
+
parts = [value[0], value[1:]]
|
| 568 |
+
|
| 569 |
+
try:
|
| 570 |
+
version_info = tuple(int(part) for part in parts)
|
| 571 |
+
except ValueError:
|
| 572 |
+
return ((), "each version part must be an integer")
|
| 573 |
+
|
| 574 |
+
return (version_info, None)
|
| 575 |
+
|
| 576 |
+
|
| 577 |
+
def _handle_python_version(
|
| 578 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 579 |
+
) -> None:
|
| 580 |
+
"""
|
| 581 |
+
Handle a provided --python-version value.
|
| 582 |
+
"""
|
| 583 |
+
version_info, error_msg = _convert_python_version(value)
|
| 584 |
+
if error_msg is not None:
|
| 585 |
+
msg = "invalid --python-version value: {!r}: {}".format(
|
| 586 |
+
value,
|
| 587 |
+
error_msg,
|
| 588 |
+
)
|
| 589 |
+
raise_option_error(parser, option=option, msg=msg)
|
| 590 |
+
|
| 591 |
+
parser.values.python_version = version_info
|
| 592 |
+
|
| 593 |
+
|
| 594 |
+
python_version: Callable[..., Option] = partial(
|
| 595 |
+
Option,
|
| 596 |
+
"--python-version",
|
| 597 |
+
dest="python_version",
|
| 598 |
+
metavar="python_version",
|
| 599 |
+
action="callback",
|
| 600 |
+
callback=_handle_python_version,
|
| 601 |
+
type="str",
|
| 602 |
+
default=None,
|
| 603 |
+
help=dedent(
|
| 604 |
+
"""\
|
| 605 |
+
The Python interpreter version to use for wheel and "Requires-Python"
|
| 606 |
+
compatibility checks. Defaults to a version derived from the running
|
| 607 |
+
interpreter. The version can be specified using up to three dot-separated
|
| 608 |
+
integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
|
| 609 |
+
version can also be given as a string without dots (e.g. "37" for 3.7.0).
|
| 610 |
+
"""
|
| 611 |
+
),
|
| 612 |
+
)
|
| 613 |
+
|
| 614 |
+
|
| 615 |
+
implementation: Callable[..., Option] = partial(
|
| 616 |
+
Option,
|
| 617 |
+
"--implementation",
|
| 618 |
+
dest="implementation",
|
| 619 |
+
metavar="implementation",
|
| 620 |
+
default=None,
|
| 621 |
+
help=(
|
| 622 |
+
"Only use wheels compatible with Python "
|
| 623 |
+
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
|
| 624 |
+
" or 'ip'. If not specified, then the current "
|
| 625 |
+
"interpreter implementation is used. Use 'py' to force "
|
| 626 |
+
"implementation-agnostic wheels."
|
| 627 |
+
),
|
| 628 |
+
)
|
| 629 |
+
|
| 630 |
+
|
| 631 |
+
abis: Callable[..., Option] = partial(
|
| 632 |
+
Option,
|
| 633 |
+
"--abi",
|
| 634 |
+
dest="abis",
|
| 635 |
+
metavar="abi",
|
| 636 |
+
action="append",
|
| 637 |
+
default=None,
|
| 638 |
+
help=(
|
| 639 |
+
"Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
|
| 640 |
+
"If not specified, then the current interpreter abi tag is used. "
|
| 641 |
+
"Use this option multiple times to specify multiple abis supported "
|
| 642 |
+
"by the target interpreter. Generally you will need to specify "
|
| 643 |
+
"--implementation, --platform, and --python-version when using this "
|
| 644 |
+
"option."
|
| 645 |
+
),
|
| 646 |
+
)
|
| 647 |
+
|
| 648 |
+
|
| 649 |
+
def add_target_python_options(cmd_opts: OptionGroup) -> None:
|
| 650 |
+
cmd_opts.add_option(platforms())
|
| 651 |
+
cmd_opts.add_option(python_version())
|
| 652 |
+
cmd_opts.add_option(implementation())
|
| 653 |
+
cmd_opts.add_option(abis())
|
| 654 |
+
|
| 655 |
+
|
| 656 |
+
def make_target_python(options: Values) -> TargetPython:
|
| 657 |
+
target_python = TargetPython(
|
| 658 |
+
platforms=options.platforms,
|
| 659 |
+
py_version_info=options.python_version,
|
| 660 |
+
abis=options.abis,
|
| 661 |
+
implementation=options.implementation,
|
| 662 |
+
)
|
| 663 |
+
|
| 664 |
+
return target_python
|
| 665 |
+
|
| 666 |
+
|
| 667 |
+
def prefer_binary() -> Option:
|
| 668 |
+
return Option(
|
| 669 |
+
"--prefer-binary",
|
| 670 |
+
dest="prefer_binary",
|
| 671 |
+
action="store_true",
|
| 672 |
+
default=False,
|
| 673 |
+
help="Prefer older binary packages over newer source packages.",
|
| 674 |
+
)
|
| 675 |
+
|
| 676 |
+
|
| 677 |
+
cache_dir: Callable[..., Option] = partial(
|
| 678 |
+
PipOption,
|
| 679 |
+
"--cache-dir",
|
| 680 |
+
dest="cache_dir",
|
| 681 |
+
default=USER_CACHE_DIR,
|
| 682 |
+
metavar="dir",
|
| 683 |
+
type="path",
|
| 684 |
+
help="Store the cache data in <dir>.",
|
| 685 |
+
)
|
| 686 |
+
|
| 687 |
+
|
| 688 |
+
def _handle_no_cache_dir(
|
| 689 |
+
option: Option, opt: str, value: str, parser: OptionParser
|
| 690 |
+
) -> None:
|
| 691 |
+
"""
|
| 692 |
+
Process a value provided for the --no-cache-dir option.
|
| 693 |
+
|
| 694 |
+
This is an optparse.Option callback for the --no-cache-dir option.
|
| 695 |
+
"""
|
| 696 |
+
# The value argument will be None if --no-cache-dir is passed via the
|
| 697 |
+
# command-line, since the option doesn't accept arguments. However,
|
| 698 |
+
# the value can be non-None if the option is triggered e.g. by an
|
| 699 |
+
# environment variable, like PIP_NO_CACHE_DIR=true.
|
| 700 |
+
if value is not None:
|
| 701 |
+
# Then parse the string value to get argument error-checking.
|
| 702 |
+
try:
|
| 703 |
+
strtobool(value)
|
| 704 |
+
except ValueError as exc:
|
| 705 |
+
raise_option_error(parser, option=option, msg=str(exc))
|
| 706 |
+
|
| 707 |
+
# Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
|
| 708 |
+
# converted to 0 (like "false" or "no") caused cache_dir to be disabled
|
| 709 |
+
# rather than enabled (logic would say the latter). Thus, we disable
|
| 710 |
+
# the cache directory not just on values that parse to True, but (for
|
| 711 |
+
# backwards compatibility reasons) also on values that parse to False.
|
| 712 |
+
# In other words, always set it to False if the option is provided in
|
| 713 |
+
# some (valid) form.
|
| 714 |
+
parser.values.cache_dir = False
|
| 715 |
+
|
| 716 |
+
|
| 717 |
+
no_cache: Callable[..., Option] = partial(
|
| 718 |
+
Option,
|
| 719 |
+
"--no-cache-dir",
|
| 720 |
+
dest="cache_dir",
|
| 721 |
+
action="callback",
|
| 722 |
+
callback=_handle_no_cache_dir,
|
| 723 |
+
help="Disable the cache.",
|
| 724 |
+
)
|
| 725 |
+
|
| 726 |
+
no_deps: Callable[..., Option] = partial(
|
| 727 |
+
Option,
|
| 728 |
+
"--no-deps",
|
| 729 |
+
"--no-dependencies",
|
| 730 |
+
dest="ignore_dependencies",
|
| 731 |
+
action="store_true",
|
| 732 |
+
default=False,
|
| 733 |
+
help="Don't install package dependencies.",
|
| 734 |
+
)
|
| 735 |
+
|
| 736 |
+
ignore_requires_python: Callable[..., Option] = partial(
|
| 737 |
+
Option,
|
| 738 |
+
"--ignore-requires-python",
|
| 739 |
+
dest="ignore_requires_python",
|
| 740 |
+
action="store_true",
|
| 741 |
+
help="Ignore the Requires-Python information.",
|
| 742 |
+
)
|
| 743 |
+
|
| 744 |
+
no_build_isolation: Callable[..., Option] = partial(
|
| 745 |
+
Option,
|
| 746 |
+
"--no-build-isolation",
|
| 747 |
+
dest="build_isolation",
|
| 748 |
+
action="store_false",
|
| 749 |
+
default=True,
|
| 750 |
+
help="Disable isolation when building a modern source distribution. "
|
| 751 |
+
"Build dependencies specified by PEP 518 must be already installed "
|
| 752 |
+
"if this option is used.",
|
| 753 |
+
)
|
| 754 |
+
|
| 755 |
+
check_build_deps: Callable[..., Option] = partial(
|
| 756 |
+
Option,
|
| 757 |
+
"--check-build-dependencies",
|
| 758 |
+
dest="check_build_deps",
|
| 759 |
+
action="store_true",
|
| 760 |
+
default=False,
|
| 761 |
+
help="Check the build dependencies when PEP517 is used.",
|
| 762 |
+
)
|
| 763 |
+
|
| 764 |
+
|
| 765 |
+
def _handle_no_use_pep517(
|
| 766 |
+
option: Option, opt: str, value: str, parser: OptionParser
|
| 767 |
+
) -> None:
|
| 768 |
+
"""
|
| 769 |
+
Process a value provided for the --no-use-pep517 option.
|
| 770 |
+
|
| 771 |
+
This is an optparse.Option callback for the no_use_pep517 option.
|
| 772 |
+
"""
|
| 773 |
+
# Since --no-use-pep517 doesn't accept arguments, the value argument
|
| 774 |
+
# will be None if --no-use-pep517 is passed via the command-line.
|
| 775 |
+
# However, the value can be non-None if the option is triggered e.g.
|
| 776 |
+
# by an environment variable, for example "PIP_NO_USE_PEP517=true".
|
| 777 |
+
if value is not None:
|
| 778 |
+
msg = """A value was passed for --no-use-pep517,
|
| 779 |
+
probably using either the PIP_NO_USE_PEP517 environment variable
|
| 780 |
+
or the "no-use-pep517" config file option. Use an appropriate value
|
| 781 |
+
of the PIP_USE_PEP517 environment variable or the "use-pep517"
|
| 782 |
+
config file option instead.
|
| 783 |
+
"""
|
| 784 |
+
raise_option_error(parser, option=option, msg=msg)
|
| 785 |
+
|
| 786 |
+
# If user doesn't wish to use pep517, we check if setuptools and wheel are installed
|
| 787 |
+
# and raise error if it is not.
|
| 788 |
+
packages = ("setuptools", "wheel")
|
| 789 |
+
if not all(importlib.util.find_spec(package) for package in packages):
|
| 790 |
+
msg = (
|
| 791 |
+
f"It is not possible to use --no-use-pep517 "
|
| 792 |
+
f"without {' and '.join(packages)} installed."
|
| 793 |
+
)
|
| 794 |
+
raise_option_error(parser, option=option, msg=msg)
|
| 795 |
+
|
| 796 |
+
# Otherwise, --no-use-pep517 was passed via the command-line.
|
| 797 |
+
parser.values.use_pep517 = False
|
| 798 |
+
|
| 799 |
+
|
| 800 |
+
use_pep517: Any = partial(
|
| 801 |
+
Option,
|
| 802 |
+
"--use-pep517",
|
| 803 |
+
dest="use_pep517",
|
| 804 |
+
action="store_true",
|
| 805 |
+
default=None,
|
| 806 |
+
help="Use PEP 517 for building source distributions "
|
| 807 |
+
"(use --no-use-pep517 to force legacy behaviour).",
|
| 808 |
+
)
|
| 809 |
+
|
| 810 |
+
no_use_pep517: Any = partial(
|
| 811 |
+
Option,
|
| 812 |
+
"--no-use-pep517",
|
| 813 |
+
dest="use_pep517",
|
| 814 |
+
action="callback",
|
| 815 |
+
callback=_handle_no_use_pep517,
|
| 816 |
+
default=None,
|
| 817 |
+
help=SUPPRESS_HELP,
|
| 818 |
+
)
|
| 819 |
+
|
| 820 |
+
|
| 821 |
+
def _handle_config_settings(
|
| 822 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 823 |
+
) -> None:
|
| 824 |
+
key, sep, val = value.partition("=")
|
| 825 |
+
if sep != "=":
|
| 826 |
+
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa
|
| 827 |
+
dest = getattr(parser.values, option.dest)
|
| 828 |
+
if dest is None:
|
| 829 |
+
dest = {}
|
| 830 |
+
setattr(parser.values, option.dest, dest)
|
| 831 |
+
if key in dest:
|
| 832 |
+
if isinstance(dest[key], list):
|
| 833 |
+
dest[key].append(val)
|
| 834 |
+
else:
|
| 835 |
+
dest[key] = [dest[key], val]
|
| 836 |
+
else:
|
| 837 |
+
dest[key] = val
|
| 838 |
+
|
| 839 |
+
|
| 840 |
+
config_settings: Callable[..., Option] = partial(
|
| 841 |
+
Option,
|
| 842 |
+
"-C",
|
| 843 |
+
"--config-settings",
|
| 844 |
+
dest="config_settings",
|
| 845 |
+
type=str,
|
| 846 |
+
action="callback",
|
| 847 |
+
callback=_handle_config_settings,
|
| 848 |
+
metavar="settings",
|
| 849 |
+
help="Configuration settings to be passed to the PEP 517 build backend. "
|
| 850 |
+
"Settings take the form KEY=VALUE. Use multiple --config-settings options "
|
| 851 |
+
"to pass multiple keys to the backend.",
|
| 852 |
+
)
|
| 853 |
+
|
| 854 |
+
build_options: Callable[..., Option] = partial(
|
| 855 |
+
Option,
|
| 856 |
+
"--build-option",
|
| 857 |
+
dest="build_options",
|
| 858 |
+
metavar="options",
|
| 859 |
+
action="append",
|
| 860 |
+
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
|
| 861 |
+
)
|
| 862 |
+
|
| 863 |
+
global_options: Callable[..., Option] = partial(
|
| 864 |
+
Option,
|
| 865 |
+
"--global-option",
|
| 866 |
+
dest="global_options",
|
| 867 |
+
action="append",
|
| 868 |
+
metavar="options",
|
| 869 |
+
help="Extra global options to be supplied to the setup.py "
|
| 870 |
+
"call before the install or bdist_wheel command.",
|
| 871 |
+
)
|
| 872 |
+
|
| 873 |
+
no_clean: Callable[..., Option] = partial(
|
| 874 |
+
Option,
|
| 875 |
+
"--no-clean",
|
| 876 |
+
action="store_true",
|
| 877 |
+
default=False,
|
| 878 |
+
help="Don't clean up build directories.",
|
| 879 |
+
)
|
| 880 |
+
|
| 881 |
+
pre: Callable[..., Option] = partial(
|
| 882 |
+
Option,
|
| 883 |
+
"--pre",
|
| 884 |
+
action="store_true",
|
| 885 |
+
default=False,
|
| 886 |
+
help="Include pre-release and development versions. By default, "
|
| 887 |
+
"pip only finds stable versions.",
|
| 888 |
+
)
|
| 889 |
+
|
| 890 |
+
disable_pip_version_check: Callable[..., Option] = partial(
|
| 891 |
+
Option,
|
| 892 |
+
"--disable-pip-version-check",
|
| 893 |
+
dest="disable_pip_version_check",
|
| 894 |
+
action="store_true",
|
| 895 |
+
default=False,
|
| 896 |
+
help="Don't periodically check PyPI to determine whether a new version "
|
| 897 |
+
"of pip is available for download. Implied with --no-index.",
|
| 898 |
+
)
|
| 899 |
+
|
| 900 |
+
root_user_action: Callable[..., Option] = partial(
|
| 901 |
+
Option,
|
| 902 |
+
"--root-user-action",
|
| 903 |
+
dest="root_user_action",
|
| 904 |
+
default="warn",
|
| 905 |
+
choices=["warn", "ignore"],
|
| 906 |
+
help="Action if pip is run as a root user. By default, a warning message is shown.",
|
| 907 |
+
)
|
| 908 |
+
|
| 909 |
+
|
| 910 |
+
def _handle_merge_hash(
|
| 911 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 912 |
+
) -> None:
|
| 913 |
+
"""Given a value spelled "algo:digest", append the digest to a list
|
| 914 |
+
pointed to in a dict by the algo name."""
|
| 915 |
+
if not parser.values.hashes:
|
| 916 |
+
parser.values.hashes = {}
|
| 917 |
+
try:
|
| 918 |
+
algo, digest = value.split(":", 1)
|
| 919 |
+
except ValueError:
|
| 920 |
+
parser.error(
|
| 921 |
+
"Arguments to {} must be a hash name " # noqa
|
| 922 |
+
"followed by a value, like --hash=sha256:"
|
| 923 |
+
"abcde...".format(opt_str)
|
| 924 |
+
)
|
| 925 |
+
if algo not in STRONG_HASHES:
|
| 926 |
+
parser.error(
|
| 927 |
+
"Allowed hash algorithms for {} are {}.".format( # noqa
|
| 928 |
+
opt_str, ", ".join(STRONG_HASHES)
|
| 929 |
+
)
|
| 930 |
+
)
|
| 931 |
+
parser.values.hashes.setdefault(algo, []).append(digest)
|
| 932 |
+
|
| 933 |
+
|
| 934 |
+
hash: Callable[..., Option] = partial(
|
| 935 |
+
Option,
|
| 936 |
+
"--hash",
|
| 937 |
+
# Hash values eventually end up in InstallRequirement.hashes due to
|
| 938 |
+
# __dict__ copying in process_line().
|
| 939 |
+
dest="hashes",
|
| 940 |
+
action="callback",
|
| 941 |
+
callback=_handle_merge_hash,
|
| 942 |
+
type="string",
|
| 943 |
+
help="Verify that the package's archive matches this "
|
| 944 |
+
"hash before installing. Example: --hash=sha256:abcdef...",
|
| 945 |
+
)
|
| 946 |
+
|
| 947 |
+
|
| 948 |
+
require_hashes: Callable[..., Option] = partial(
|
| 949 |
+
Option,
|
| 950 |
+
"--require-hashes",
|
| 951 |
+
dest="require_hashes",
|
| 952 |
+
action="store_true",
|
| 953 |
+
default=False,
|
| 954 |
+
help="Require a hash to check each requirement against, for "
|
| 955 |
+
"repeatable installs. This option is implied when any package in a "
|
| 956 |
+
"requirements file has a --hash option.",
|
| 957 |
+
)
|
| 958 |
+
|
| 959 |
+
|
| 960 |
+
list_path: Callable[..., Option] = partial(
|
| 961 |
+
PipOption,
|
| 962 |
+
"--path",
|
| 963 |
+
dest="path",
|
| 964 |
+
type="path",
|
| 965 |
+
action="append",
|
| 966 |
+
help="Restrict to the specified installation path for listing "
|
| 967 |
+
"packages (can be used multiple times).",
|
| 968 |
+
)
|
| 969 |
+
|
| 970 |
+
|
| 971 |
+
def check_list_path_option(options: Values) -> None:
|
| 972 |
+
if options.path and (options.user or options.local):
|
| 973 |
+
raise CommandError("Cannot combine '--path' with '--user' or '--local'")
|
| 974 |
+
|
| 975 |
+
|
| 976 |
+
list_exclude: Callable[..., Option] = partial(
|
| 977 |
+
PipOption,
|
| 978 |
+
"--exclude",
|
| 979 |
+
dest="excludes",
|
| 980 |
+
action="append",
|
| 981 |
+
metavar="package",
|
| 982 |
+
type="package_name",
|
| 983 |
+
help="Exclude specified package from the output",
|
| 984 |
+
)
|
| 985 |
+
|
| 986 |
+
|
| 987 |
+
no_python_version_warning: Callable[..., Option] = partial(
|
| 988 |
+
Option,
|
| 989 |
+
"--no-python-version-warning",
|
| 990 |
+
dest="no_python_version_warning",
|
| 991 |
+
action="store_true",
|
| 992 |
+
default=False,
|
| 993 |
+
help="Silence deprecation warnings for upcoming unsupported Pythons.",
|
| 994 |
+
)
|
| 995 |
+
|
| 996 |
+
|
| 997 |
+
# Features that are now always on. A warning is printed if they are used.
|
| 998 |
+
ALWAYS_ENABLED_FEATURES = [
|
| 999 |
+
"no-binary-enable-wheel-cache", # always on since 23.1
|
| 1000 |
+
]
|
| 1001 |
+
|
| 1002 |
+
use_new_feature: Callable[..., Option] = partial(
|
| 1003 |
+
Option,
|
| 1004 |
+
"--use-feature",
|
| 1005 |
+
dest="features_enabled",
|
| 1006 |
+
metavar="feature",
|
| 1007 |
+
action="append",
|
| 1008 |
+
default=[],
|
| 1009 |
+
choices=[
|
| 1010 |
+
"fast-deps",
|
| 1011 |
+
"truststore",
|
| 1012 |
+
]
|
| 1013 |
+
+ ALWAYS_ENABLED_FEATURES,
|
| 1014 |
+
help="Enable new functionality, that may be backward incompatible.",
|
| 1015 |
+
)
|
| 1016 |
+
|
| 1017 |
+
use_deprecated_feature: Callable[..., Option] = partial(
|
| 1018 |
+
Option,
|
| 1019 |
+
"--use-deprecated",
|
| 1020 |
+
dest="deprecated_features_enabled",
|
| 1021 |
+
metavar="feature",
|
| 1022 |
+
action="append",
|
| 1023 |
+
default=[],
|
| 1024 |
+
choices=[
|
| 1025 |
+
"legacy-resolver",
|
| 1026 |
+
],
|
| 1027 |
+
help=("Enable deprecated functionality, that will be removed in the future."),
|
| 1028 |
+
)
|
| 1029 |
+
|
| 1030 |
+
|
| 1031 |
+
##########
|
| 1032 |
+
# groups #
|
| 1033 |
+
##########
|
| 1034 |
+
|
| 1035 |
+
general_group: Dict[str, Any] = {
|
| 1036 |
+
"name": "General Options",
|
| 1037 |
+
"options": [
|
| 1038 |
+
help_,
|
| 1039 |
+
debug_mode,
|
| 1040 |
+
isolated_mode,
|
| 1041 |
+
require_virtualenv,
|
| 1042 |
+
python,
|
| 1043 |
+
verbose,
|
| 1044 |
+
version,
|
| 1045 |
+
quiet,
|
| 1046 |
+
log,
|
| 1047 |
+
no_input,
|
| 1048 |
+
keyring_provider,
|
| 1049 |
+
proxy,
|
| 1050 |
+
retries,
|
| 1051 |
+
timeout,
|
| 1052 |
+
exists_action,
|
| 1053 |
+
trusted_host,
|
| 1054 |
+
cert,
|
| 1055 |
+
client_cert,
|
| 1056 |
+
cache_dir,
|
| 1057 |
+
no_cache,
|
| 1058 |
+
disable_pip_version_check,
|
| 1059 |
+
no_color,
|
| 1060 |
+
no_python_version_warning,
|
| 1061 |
+
use_new_feature,
|
| 1062 |
+
use_deprecated_feature,
|
| 1063 |
+
],
|
| 1064 |
+
}
|
| 1065 |
+
|
| 1066 |
+
index_group: Dict[str, Any] = {
|
| 1067 |
+
"name": "Package Index Options",
|
| 1068 |
+
"options": [
|
| 1069 |
+
index_url,
|
| 1070 |
+
extra_index_url,
|
| 1071 |
+
no_index,
|
| 1072 |
+
find_links,
|
| 1073 |
+
],
|
| 1074 |
+
}
|
.venv/Lib/site-packages/pip/_internal/cli/command_context.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from contextlib import ExitStack, contextmanager
|
| 2 |
+
from typing import ContextManager, Generator, TypeVar
|
| 3 |
+
|
| 4 |
+
_T = TypeVar("_T", covariant=True)
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class CommandContextMixIn:
|
| 8 |
+
def __init__(self) -> None:
|
| 9 |
+
super().__init__()
|
| 10 |
+
self._in_main_context = False
|
| 11 |
+
self._main_context = ExitStack()
|
| 12 |
+
|
| 13 |
+
@contextmanager
|
| 14 |
+
def main_context(self) -> Generator[None, None, None]:
|
| 15 |
+
assert not self._in_main_context
|
| 16 |
+
|
| 17 |
+
self._in_main_context = True
|
| 18 |
+
try:
|
| 19 |
+
with self._main_context:
|
| 20 |
+
yield
|
| 21 |
+
finally:
|
| 22 |
+
self._in_main_context = False
|
| 23 |
+
|
| 24 |
+
def enter_context(self, context_provider: ContextManager[_T]) -> _T:
|
| 25 |
+
assert self._in_main_context
|
| 26 |
+
|
| 27 |
+
return self._main_context.enter_context(context_provider)
|
.venv/Lib/site-packages/pip/_internal/cli/main.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Primary application entrypoint.
|
| 2 |
+
"""
|
| 3 |
+
import locale
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
import warnings
|
| 8 |
+
from typing import List, Optional
|
| 9 |
+
|
| 10 |
+
from pip._internal.cli.autocompletion import autocomplete
|
| 11 |
+
from pip._internal.cli.main_parser import parse_command
|
| 12 |
+
from pip._internal.commands import create_command
|
| 13 |
+
from pip._internal.exceptions import PipError
|
| 14 |
+
from pip._internal.utils import deprecation
|
| 15 |
+
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
# Do not import and use main() directly! Using it directly is actively
|
| 20 |
+
# discouraged by pip's maintainers. The name, location and behavior of
|
| 21 |
+
# this function is subject to change, so calling it directly is not
|
| 22 |
+
# portable across different pip versions.
|
| 23 |
+
|
| 24 |
+
# In addition, running pip in-process is unsupported and unsafe. This is
|
| 25 |
+
# elaborated in detail at
|
| 26 |
+
# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
|
| 27 |
+
# That document also provides suggestions that should work for nearly
|
| 28 |
+
# all users that are considering importing and using main() directly.
|
| 29 |
+
|
| 30 |
+
# However, we know that certain users will still want to invoke pip
|
| 31 |
+
# in-process. If you understand and accept the implications of using pip
|
| 32 |
+
# in an unsupported manner, the best approach is to use runpy to avoid
|
| 33 |
+
# depending on the exact location of this entry point.
|
| 34 |
+
|
| 35 |
+
# The following example shows how to use runpy to invoke pip in that
|
| 36 |
+
# case:
|
| 37 |
+
#
|
| 38 |
+
# sys.argv = ["pip", your, args, here]
|
| 39 |
+
# runpy.run_module("pip", run_name="__main__")
|
| 40 |
+
#
|
| 41 |
+
# Note that this will exit the process after running, unlike a direct
|
| 42 |
+
# call to main. As it is not safe to do any processing after calling
|
| 43 |
+
# main, this should not be an issue in practice.
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def main(args: Optional[List[str]] = None) -> int:
|
| 47 |
+
if args is None:
|
| 48 |
+
args = sys.argv[1:]
|
| 49 |
+
|
| 50 |
+
# Suppress the pkg_resources deprecation warning
|
| 51 |
+
# Note - we use a module of .*pkg_resources to cover
|
| 52 |
+
# the normal case (pip._vendor.pkg_resources) and the
|
| 53 |
+
# devendored case (a bare pkg_resources)
|
| 54 |
+
warnings.filterwarnings(
|
| 55 |
+
action="ignore", category=DeprecationWarning, module=".*pkg_resources"
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
# Configure our deprecation warnings to be sent through loggers
|
| 59 |
+
deprecation.install_warning_logger()
|
| 60 |
+
|
| 61 |
+
autocomplete()
|
| 62 |
+
|
| 63 |
+
try:
|
| 64 |
+
cmd_name, cmd_args = parse_command(args)
|
| 65 |
+
except PipError as exc:
|
| 66 |
+
sys.stderr.write(f"ERROR: {exc}")
|
| 67 |
+
sys.stderr.write(os.linesep)
|
| 68 |
+
sys.exit(1)
|
| 69 |
+
|
| 70 |
+
# Needed for locale.getpreferredencoding(False) to work
|
| 71 |
+
# in pip._internal.utils.encoding.auto_decode
|
| 72 |
+
try:
|
| 73 |
+
locale.setlocale(locale.LC_ALL, "")
|
| 74 |
+
except locale.Error as e:
|
| 75 |
+
# setlocale can apparently crash if locale are uninitialized
|
| 76 |
+
logger.debug("Ignoring error %s when setting locale", e)
|
| 77 |
+
command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
|
| 78 |
+
|
| 79 |
+
return command.main(cmd_args)
|
.venv/Lib/site-packages/pip/_internal/cli/main_parser.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A single place for constructing and exposing the main parser
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import os
|
| 5 |
+
import subprocess
|
| 6 |
+
import sys
|
| 7 |
+
from typing import List, Optional, Tuple
|
| 8 |
+
|
| 9 |
+
from pip._internal.build_env import get_runnable_pip
|
| 10 |
+
from pip._internal.cli import cmdoptions
|
| 11 |
+
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
| 12 |
+
from pip._internal.commands import commands_dict, get_similar_commands
|
| 13 |
+
from pip._internal.exceptions import CommandError
|
| 14 |
+
from pip._internal.utils.misc import get_pip_version, get_prog
|
| 15 |
+
|
| 16 |
+
__all__ = ["create_main_parser", "parse_command"]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def create_main_parser() -> ConfigOptionParser:
|
| 20 |
+
"""Creates and returns the main parser for pip's CLI"""
|
| 21 |
+
|
| 22 |
+
parser = ConfigOptionParser(
|
| 23 |
+
usage="\n%prog <command> [options]",
|
| 24 |
+
add_help_option=False,
|
| 25 |
+
formatter=UpdatingDefaultsHelpFormatter(),
|
| 26 |
+
name="global",
|
| 27 |
+
prog=get_prog(),
|
| 28 |
+
)
|
| 29 |
+
parser.disable_interspersed_args()
|
| 30 |
+
|
| 31 |
+
parser.version = get_pip_version()
|
| 32 |
+
|
| 33 |
+
# add the general options
|
| 34 |
+
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
| 35 |
+
parser.add_option_group(gen_opts)
|
| 36 |
+
|
| 37 |
+
# so the help formatter knows
|
| 38 |
+
parser.main = True # type: ignore
|
| 39 |
+
|
| 40 |
+
# create command listing for description
|
| 41 |
+
description = [""] + [
|
| 42 |
+
f"{name:27} {command_info.summary}"
|
| 43 |
+
for name, command_info in commands_dict.items()
|
| 44 |
+
]
|
| 45 |
+
parser.description = "\n".join(description)
|
| 46 |
+
|
| 47 |
+
return parser
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def identify_python_interpreter(python: str) -> Optional[str]:
|
| 51 |
+
# If the named file exists, use it.
|
| 52 |
+
# If it's a directory, assume it's a virtual environment and
|
| 53 |
+
# look for the environment's Python executable.
|
| 54 |
+
if os.path.exists(python):
|
| 55 |
+
if os.path.isdir(python):
|
| 56 |
+
# bin/python for Unix, Scripts/python.exe for Windows
|
| 57 |
+
# Try both in case of odd cases like cygwin.
|
| 58 |
+
for exe in ("bin/python", "Scripts/python.exe"):
|
| 59 |
+
py = os.path.join(python, exe)
|
| 60 |
+
if os.path.exists(py):
|
| 61 |
+
return py
|
| 62 |
+
else:
|
| 63 |
+
return python
|
| 64 |
+
|
| 65 |
+
# Could not find the interpreter specified
|
| 66 |
+
return None
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def parse_command(args: List[str]) -> Tuple[str, List[str]]:
|
| 70 |
+
parser = create_main_parser()
|
| 71 |
+
|
| 72 |
+
# Note: parser calls disable_interspersed_args(), so the result of this
|
| 73 |
+
# call is to split the initial args into the general options before the
|
| 74 |
+
# subcommand and everything else.
|
| 75 |
+
# For example:
|
| 76 |
+
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
| 77 |
+
# general_options: ['--timeout==5']
|
| 78 |
+
# args_else: ['install', '--user', 'INITools']
|
| 79 |
+
general_options, args_else = parser.parse_args(args)
|
| 80 |
+
|
| 81 |
+
# --python
|
| 82 |
+
if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
|
| 83 |
+
# Re-invoke pip using the specified Python interpreter
|
| 84 |
+
interpreter = identify_python_interpreter(general_options.python)
|
| 85 |
+
if interpreter is None:
|
| 86 |
+
raise CommandError(
|
| 87 |
+
f"Could not locate Python interpreter {general_options.python}"
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
pip_cmd = [
|
| 91 |
+
interpreter,
|
| 92 |
+
get_runnable_pip(),
|
| 93 |
+
]
|
| 94 |
+
pip_cmd.extend(args)
|
| 95 |
+
|
| 96 |
+
# Set a flag so the child doesn't re-invoke itself, causing
|
| 97 |
+
# an infinite loop.
|
| 98 |
+
os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
|
| 99 |
+
returncode = 0
|
| 100 |
+
try:
|
| 101 |
+
proc = subprocess.run(pip_cmd)
|
| 102 |
+
returncode = proc.returncode
|
| 103 |
+
except (subprocess.SubprocessError, OSError) as exc:
|
| 104 |
+
raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
|
| 105 |
+
sys.exit(returncode)
|
| 106 |
+
|
| 107 |
+
# --version
|
| 108 |
+
if general_options.version:
|
| 109 |
+
sys.stdout.write(parser.version)
|
| 110 |
+
sys.stdout.write(os.linesep)
|
| 111 |
+
sys.exit()
|
| 112 |
+
|
| 113 |
+
# pip || pip help -> print_help()
|
| 114 |
+
if not args_else or (args_else[0] == "help" and len(args_else) == 1):
|
| 115 |
+
parser.print_help()
|
| 116 |
+
sys.exit()
|
| 117 |
+
|
| 118 |
+
# the subcommand name
|
| 119 |
+
cmd_name = args_else[0]
|
| 120 |
+
|
| 121 |
+
if cmd_name not in commands_dict:
|
| 122 |
+
guess = get_similar_commands(cmd_name)
|
| 123 |
+
|
| 124 |
+
msg = [f'unknown command "{cmd_name}"']
|
| 125 |
+
if guess:
|
| 126 |
+
msg.append(f'maybe you meant "{guess}"')
|
| 127 |
+
|
| 128 |
+
raise CommandError(" - ".join(msg))
|
| 129 |
+
|
| 130 |
+
# all the args without the subcommand
|
| 131 |
+
cmd_args = args[:]
|
| 132 |
+
cmd_args.remove(cmd_name)
|
| 133 |
+
|
| 134 |
+
return cmd_name, cmd_args
|
.venv/Lib/site-packages/pip/_internal/cli/parser.py
ADDED
|
@@ -0,0 +1,294 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Base option parser setup"""
|
| 2 |
+
|
| 3 |
+
import logging
|
| 4 |
+
import optparse
|
| 5 |
+
import shutil
|
| 6 |
+
import sys
|
| 7 |
+
import textwrap
|
| 8 |
+
from contextlib import suppress
|
| 9 |
+
from typing import Any, Dict, Generator, List, Tuple
|
| 10 |
+
|
| 11 |
+
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
| 12 |
+
from pip._internal.configuration import Configuration, ConfigurationError
|
| 13 |
+
from pip._internal.utils.misc import redact_auth_from_url, strtobool
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
| 19 |
+
"""A prettier/less verbose help formatter for optparse."""
|
| 20 |
+
|
| 21 |
+
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
| 22 |
+
# help position must be aligned with __init__.parseopts.description
|
| 23 |
+
kwargs["max_help_position"] = 30
|
| 24 |
+
kwargs["indent_increment"] = 1
|
| 25 |
+
kwargs["width"] = shutil.get_terminal_size()[0] - 2
|
| 26 |
+
super().__init__(*args, **kwargs)
|
| 27 |
+
|
| 28 |
+
def format_option_strings(self, option: optparse.Option) -> str:
|
| 29 |
+
return self._format_option_strings(option)
|
| 30 |
+
|
| 31 |
+
def _format_option_strings(
|
| 32 |
+
self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
|
| 33 |
+
) -> str:
|
| 34 |
+
"""
|
| 35 |
+
Return a comma-separated list of option strings and metavars.
|
| 36 |
+
|
| 37 |
+
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
|
| 38 |
+
:param mvarfmt: metavar format string
|
| 39 |
+
:param optsep: separator
|
| 40 |
+
"""
|
| 41 |
+
opts = []
|
| 42 |
+
|
| 43 |
+
if option._short_opts:
|
| 44 |
+
opts.append(option._short_opts[0])
|
| 45 |
+
if option._long_opts:
|
| 46 |
+
opts.append(option._long_opts[0])
|
| 47 |
+
if len(opts) > 1:
|
| 48 |
+
opts.insert(1, optsep)
|
| 49 |
+
|
| 50 |
+
if option.takes_value():
|
| 51 |
+
assert option.dest is not None
|
| 52 |
+
metavar = option.metavar or option.dest.lower()
|
| 53 |
+
opts.append(mvarfmt.format(metavar.lower()))
|
| 54 |
+
|
| 55 |
+
return "".join(opts)
|
| 56 |
+
|
| 57 |
+
def format_heading(self, heading: str) -> str:
|
| 58 |
+
if heading == "Options":
|
| 59 |
+
return ""
|
| 60 |
+
return heading + ":\n"
|
| 61 |
+
|
| 62 |
+
def format_usage(self, usage: str) -> str:
|
| 63 |
+
"""
|
| 64 |
+
Ensure there is only one newline between usage and the first heading
|
| 65 |
+
if there is no description.
|
| 66 |
+
"""
|
| 67 |
+
msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
|
| 68 |
+
return msg
|
| 69 |
+
|
| 70 |
+
def format_description(self, description: str) -> str:
|
| 71 |
+
# leave full control over description to us
|
| 72 |
+
if description:
|
| 73 |
+
if hasattr(self.parser, "main"):
|
| 74 |
+
label = "Commands"
|
| 75 |
+
else:
|
| 76 |
+
label = "Description"
|
| 77 |
+
# some doc strings have initial newlines, some don't
|
| 78 |
+
description = description.lstrip("\n")
|
| 79 |
+
# some doc strings have final newlines and spaces, some don't
|
| 80 |
+
description = description.rstrip()
|
| 81 |
+
# dedent, then reindent
|
| 82 |
+
description = self.indent_lines(textwrap.dedent(description), " ")
|
| 83 |
+
description = f"{label}:\n{description}\n"
|
| 84 |
+
return description
|
| 85 |
+
else:
|
| 86 |
+
return ""
|
| 87 |
+
|
| 88 |
+
def format_epilog(self, epilog: str) -> str:
|
| 89 |
+
# leave full control over epilog to us
|
| 90 |
+
if epilog:
|
| 91 |
+
return epilog
|
| 92 |
+
else:
|
| 93 |
+
return ""
|
| 94 |
+
|
| 95 |
+
def indent_lines(self, text: str, indent: str) -> str:
|
| 96 |
+
new_lines = [indent + line for line in text.split("\n")]
|
| 97 |
+
return "\n".join(new_lines)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
| 101 |
+
"""Custom help formatter for use in ConfigOptionParser.
|
| 102 |
+
|
| 103 |
+
This is updates the defaults before expanding them, allowing
|
| 104 |
+
them to show up correctly in the help listing.
|
| 105 |
+
|
| 106 |
+
Also redact auth from url type options
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
def expand_default(self, option: optparse.Option) -> str:
|
| 110 |
+
default_values = None
|
| 111 |
+
if self.parser is not None:
|
| 112 |
+
assert isinstance(self.parser, ConfigOptionParser)
|
| 113 |
+
self.parser._update_defaults(self.parser.defaults)
|
| 114 |
+
assert option.dest is not None
|
| 115 |
+
default_values = self.parser.defaults.get(option.dest)
|
| 116 |
+
help_text = super().expand_default(option)
|
| 117 |
+
|
| 118 |
+
if default_values and option.metavar == "URL":
|
| 119 |
+
if isinstance(default_values, str):
|
| 120 |
+
default_values = [default_values]
|
| 121 |
+
|
| 122 |
+
# If its not a list, we should abort and just return the help text
|
| 123 |
+
if not isinstance(default_values, list):
|
| 124 |
+
default_values = []
|
| 125 |
+
|
| 126 |
+
for val in default_values:
|
| 127 |
+
help_text = help_text.replace(val, redact_auth_from_url(val))
|
| 128 |
+
|
| 129 |
+
return help_text
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class CustomOptionParser(optparse.OptionParser):
|
| 133 |
+
def insert_option_group(
|
| 134 |
+
self, idx: int, *args: Any, **kwargs: Any
|
| 135 |
+
) -> optparse.OptionGroup:
|
| 136 |
+
"""Insert an OptionGroup at a given position."""
|
| 137 |
+
group = self.add_option_group(*args, **kwargs)
|
| 138 |
+
|
| 139 |
+
self.option_groups.pop()
|
| 140 |
+
self.option_groups.insert(idx, group)
|
| 141 |
+
|
| 142 |
+
return group
|
| 143 |
+
|
| 144 |
+
@property
|
| 145 |
+
def option_list_all(self) -> List[optparse.Option]:
|
| 146 |
+
"""Get a list of all options, including those in option groups."""
|
| 147 |
+
res = self.option_list[:]
|
| 148 |
+
for i in self.option_groups:
|
| 149 |
+
res.extend(i.option_list)
|
| 150 |
+
|
| 151 |
+
return res
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
class ConfigOptionParser(CustomOptionParser):
|
| 155 |
+
"""Custom option parser which updates its defaults by checking the
|
| 156 |
+
configuration files and environmental variables"""
|
| 157 |
+
|
| 158 |
+
def __init__(
|
| 159 |
+
self,
|
| 160 |
+
*args: Any,
|
| 161 |
+
name: str,
|
| 162 |
+
isolated: bool = False,
|
| 163 |
+
**kwargs: Any,
|
| 164 |
+
) -> None:
|
| 165 |
+
self.name = name
|
| 166 |
+
self.config = Configuration(isolated)
|
| 167 |
+
|
| 168 |
+
assert self.name
|
| 169 |
+
super().__init__(*args, **kwargs)
|
| 170 |
+
|
| 171 |
+
def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
|
| 172 |
+
try:
|
| 173 |
+
return option.check_value(key, val)
|
| 174 |
+
except optparse.OptionValueError as exc:
|
| 175 |
+
print(f"An error occurred during configuration: {exc}")
|
| 176 |
+
sys.exit(3)
|
| 177 |
+
|
| 178 |
+
def _get_ordered_configuration_items(
|
| 179 |
+
self,
|
| 180 |
+
) -> Generator[Tuple[str, Any], None, None]:
|
| 181 |
+
# Configuration gives keys in an unordered manner. Order them.
|
| 182 |
+
override_order = ["global", self.name, ":env:"]
|
| 183 |
+
|
| 184 |
+
# Pool the options into different groups
|
| 185 |
+
section_items: Dict[str, List[Tuple[str, Any]]] = {
|
| 186 |
+
name: [] for name in override_order
|
| 187 |
+
}
|
| 188 |
+
for section_key, val in self.config.items():
|
| 189 |
+
# ignore empty values
|
| 190 |
+
if not val:
|
| 191 |
+
logger.debug(
|
| 192 |
+
"Ignoring configuration key '%s' as it's value is empty.",
|
| 193 |
+
section_key,
|
| 194 |
+
)
|
| 195 |
+
continue
|
| 196 |
+
|
| 197 |
+
section, key = section_key.split(".", 1)
|
| 198 |
+
if section in override_order:
|
| 199 |
+
section_items[section].append((key, val))
|
| 200 |
+
|
| 201 |
+
# Yield each group in their override order
|
| 202 |
+
for section in override_order:
|
| 203 |
+
for key, val in section_items[section]:
|
| 204 |
+
yield key, val
|
| 205 |
+
|
| 206 |
+
def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
|
| 207 |
+
"""Updates the given defaults with values from the config files and
|
| 208 |
+
the environ. Does a little special handling for certain types of
|
| 209 |
+
options (lists)."""
|
| 210 |
+
|
| 211 |
+
# Accumulate complex default state.
|
| 212 |
+
self.values = optparse.Values(self.defaults)
|
| 213 |
+
late_eval = set()
|
| 214 |
+
# Then set the options with those values
|
| 215 |
+
for key, val in self._get_ordered_configuration_items():
|
| 216 |
+
# '--' because configuration supports only long names
|
| 217 |
+
option = self.get_option("--" + key)
|
| 218 |
+
|
| 219 |
+
# Ignore options not present in this parser. E.g. non-globals put
|
| 220 |
+
# in [global] by users that want them to apply to all applicable
|
| 221 |
+
# commands.
|
| 222 |
+
if option is None:
|
| 223 |
+
continue
|
| 224 |
+
|
| 225 |
+
assert option.dest is not None
|
| 226 |
+
|
| 227 |
+
if option.action in ("store_true", "store_false"):
|
| 228 |
+
try:
|
| 229 |
+
val = strtobool(val)
|
| 230 |
+
except ValueError:
|
| 231 |
+
self.error(
|
| 232 |
+
"{} is not a valid value for {} option, " # noqa
|
| 233 |
+
"please specify a boolean value like yes/no, "
|
| 234 |
+
"true/false or 1/0 instead.".format(val, key)
|
| 235 |
+
)
|
| 236 |
+
elif option.action == "count":
|
| 237 |
+
with suppress(ValueError):
|
| 238 |
+
val = strtobool(val)
|
| 239 |
+
with suppress(ValueError):
|
| 240 |
+
val = int(val)
|
| 241 |
+
if not isinstance(val, int) or val < 0:
|
| 242 |
+
self.error(
|
| 243 |
+
"{} is not a valid value for {} option, " # noqa
|
| 244 |
+
"please instead specify either a non-negative integer "
|
| 245 |
+
"or a boolean value like yes/no or false/true "
|
| 246 |
+
"which is equivalent to 1/0.".format(val, key)
|
| 247 |
+
)
|
| 248 |
+
elif option.action == "append":
|
| 249 |
+
val = val.split()
|
| 250 |
+
val = [self.check_default(option, key, v) for v in val]
|
| 251 |
+
elif option.action == "callback":
|
| 252 |
+
assert option.callback is not None
|
| 253 |
+
late_eval.add(option.dest)
|
| 254 |
+
opt_str = option.get_opt_string()
|
| 255 |
+
val = option.convert_value(opt_str, val)
|
| 256 |
+
# From take_action
|
| 257 |
+
args = option.callback_args or ()
|
| 258 |
+
kwargs = option.callback_kwargs or {}
|
| 259 |
+
option.callback(option, opt_str, val, self, *args, **kwargs)
|
| 260 |
+
else:
|
| 261 |
+
val = self.check_default(option, key, val)
|
| 262 |
+
|
| 263 |
+
defaults[option.dest] = val
|
| 264 |
+
|
| 265 |
+
for key in late_eval:
|
| 266 |
+
defaults[key] = getattr(self.values, key)
|
| 267 |
+
self.values = None
|
| 268 |
+
return defaults
|
| 269 |
+
|
| 270 |
+
def get_default_values(self) -> optparse.Values:
|
| 271 |
+
"""Overriding to make updating the defaults after instantiation of
|
| 272 |
+
the option parser possible, _update_defaults() does the dirty work."""
|
| 273 |
+
if not self.process_default_values:
|
| 274 |
+
# Old, pre-Optik 1.5 behaviour.
|
| 275 |
+
return optparse.Values(self.defaults)
|
| 276 |
+
|
| 277 |
+
# Load the configuration, or error out in case of an error
|
| 278 |
+
try:
|
| 279 |
+
self.config.load()
|
| 280 |
+
except ConfigurationError as err:
|
| 281 |
+
self.exit(UNKNOWN_ERROR, str(err))
|
| 282 |
+
|
| 283 |
+
defaults = self._update_defaults(self.defaults.copy()) # ours
|
| 284 |
+
for option in self._get_all_options():
|
| 285 |
+
assert option.dest is not None
|
| 286 |
+
default = defaults.get(option.dest)
|
| 287 |
+
if isinstance(default, str):
|
| 288 |
+
opt_str = option.get_opt_string()
|
| 289 |
+
defaults[option.dest] = option.check_value(opt_str, default)
|
| 290 |
+
return optparse.Values(defaults)
|
| 291 |
+
|
| 292 |
+
def error(self, msg: str) -> None:
|
| 293 |
+
self.print_usage(sys.stderr)
|
| 294 |
+
self.exit(UNKNOWN_ERROR, f"{msg}\n")
|
.venv/Lib/site-packages/pip/_internal/cli/progress_bars.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
|
| 3 |
+
|
| 4 |
+
from pip._vendor.rich.progress import (
|
| 5 |
+
BarColumn,
|
| 6 |
+
DownloadColumn,
|
| 7 |
+
FileSizeColumn,
|
| 8 |
+
Progress,
|
| 9 |
+
ProgressColumn,
|
| 10 |
+
SpinnerColumn,
|
| 11 |
+
TextColumn,
|
| 12 |
+
TimeElapsedColumn,
|
| 13 |
+
TimeRemainingColumn,
|
| 14 |
+
TransferSpeedColumn,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
from pip._internal.utils.logging import get_indentation
|
| 18 |
+
|
| 19 |
+
DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _rich_progress_bar(
|
| 23 |
+
iterable: Iterable[bytes],
|
| 24 |
+
*,
|
| 25 |
+
bar_type: str,
|
| 26 |
+
size: int,
|
| 27 |
+
) -> Generator[bytes, None, None]:
|
| 28 |
+
assert bar_type == "on", "This should only be used in the default mode."
|
| 29 |
+
|
| 30 |
+
if not size:
|
| 31 |
+
total = float("inf")
|
| 32 |
+
columns: Tuple[ProgressColumn, ...] = (
|
| 33 |
+
TextColumn("[progress.description]{task.description}"),
|
| 34 |
+
SpinnerColumn("line", speed=1.5),
|
| 35 |
+
FileSizeColumn(),
|
| 36 |
+
TransferSpeedColumn(),
|
| 37 |
+
TimeElapsedColumn(),
|
| 38 |
+
)
|
| 39 |
+
else:
|
| 40 |
+
total = size
|
| 41 |
+
columns = (
|
| 42 |
+
TextColumn("[progress.description]{task.description}"),
|
| 43 |
+
BarColumn(),
|
| 44 |
+
DownloadColumn(),
|
| 45 |
+
TransferSpeedColumn(),
|
| 46 |
+
TextColumn("eta"),
|
| 47 |
+
TimeRemainingColumn(),
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
progress = Progress(*columns, refresh_per_second=30)
|
| 51 |
+
task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
|
| 52 |
+
with progress:
|
| 53 |
+
for chunk in iterable:
|
| 54 |
+
yield chunk
|
| 55 |
+
progress.update(task_id, advance=len(chunk))
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def get_download_progress_renderer(
|
| 59 |
+
*, bar_type: str, size: Optional[int] = None
|
| 60 |
+
) -> DownloadProgressRenderer:
|
| 61 |
+
"""Get an object that can be used to render the download progress.
|
| 62 |
+
|
| 63 |
+
Returns a callable, that takes an iterable to "wrap".
|
| 64 |
+
"""
|
| 65 |
+
if bar_type == "on":
|
| 66 |
+
return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
|
| 67 |
+
else:
|
| 68 |
+
return iter # no-op, when passed an iterator
|
.venv/Lib/site-packages/pip/_internal/cli/req_command.py
ADDED
|
@@ -0,0 +1,508 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Contains the Command base classes that depend on PipSession.
|
| 2 |
+
|
| 3 |
+
The classes in this module are in a separate module so the commands not
|
| 4 |
+
needing download / PackageFinder capability don't unnecessarily import the
|
| 5 |
+
PackageFinder machinery and all its vendored dependencies, etc.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import logging
|
| 9 |
+
import os
|
| 10 |
+
import sys
|
| 11 |
+
from functools import partial
|
| 12 |
+
from optparse import Values
|
| 13 |
+
from typing import TYPE_CHECKING, Any, List, Optional, Tuple
|
| 14 |
+
|
| 15 |
+
from pip._internal.cache import WheelCache
|
| 16 |
+
from pip._internal.cli import cmdoptions
|
| 17 |
+
from pip._internal.cli.base_command import Command
|
| 18 |
+
from pip._internal.cli.command_context import CommandContextMixIn
|
| 19 |
+
from pip._internal.exceptions import CommandError, PreviousBuildDirError
|
| 20 |
+
from pip._internal.index.collector import LinkCollector
|
| 21 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 22 |
+
from pip._internal.models.selection_prefs import SelectionPreferences
|
| 23 |
+
from pip._internal.models.target_python import TargetPython
|
| 24 |
+
from pip._internal.network.session import PipSession
|
| 25 |
+
from pip._internal.operations.build.build_tracker import BuildTracker
|
| 26 |
+
from pip._internal.operations.prepare import RequirementPreparer
|
| 27 |
+
from pip._internal.req.constructors import (
|
| 28 |
+
install_req_from_editable,
|
| 29 |
+
install_req_from_line,
|
| 30 |
+
install_req_from_parsed_requirement,
|
| 31 |
+
install_req_from_req_string,
|
| 32 |
+
)
|
| 33 |
+
from pip._internal.req.req_file import parse_requirements
|
| 34 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 35 |
+
from pip._internal.resolution.base import BaseResolver
|
| 36 |
+
from pip._internal.self_outdated_check import pip_self_version_check
|
| 37 |
+
from pip._internal.utils.temp_dir import (
|
| 38 |
+
TempDirectory,
|
| 39 |
+
TempDirectoryTypeRegistry,
|
| 40 |
+
tempdir_kinds,
|
| 41 |
+
)
|
| 42 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
| 43 |
+
|
| 44 |
+
if TYPE_CHECKING:
|
| 45 |
+
from ssl import SSLContext
|
| 46 |
+
|
| 47 |
+
logger = logging.getLogger(__name__)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _create_truststore_ssl_context() -> Optional["SSLContext"]:
|
| 51 |
+
if sys.version_info < (3, 10):
|
| 52 |
+
raise CommandError("The truststore feature is only available for Python 3.10+")
|
| 53 |
+
|
| 54 |
+
try:
|
| 55 |
+
import ssl
|
| 56 |
+
except ImportError:
|
| 57 |
+
logger.warning("Disabling truststore since ssl support is missing")
|
| 58 |
+
return None
|
| 59 |
+
|
| 60 |
+
try:
|
| 61 |
+
import truststore
|
| 62 |
+
except ImportError:
|
| 63 |
+
raise CommandError(
|
| 64 |
+
"To use the truststore feature, 'truststore' must be installed into "
|
| 65 |
+
"pip's current environment."
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class SessionCommandMixin(CommandContextMixIn):
|
| 72 |
+
|
| 73 |
+
"""
|
| 74 |
+
A class mixin for command classes needing _build_session().
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
def __init__(self) -> None:
|
| 78 |
+
super().__init__()
|
| 79 |
+
self._session: Optional[PipSession] = None
|
| 80 |
+
|
| 81 |
+
@classmethod
|
| 82 |
+
def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
|
| 83 |
+
"""Return a list of index urls from user-provided options."""
|
| 84 |
+
index_urls = []
|
| 85 |
+
if not getattr(options, "no_index", False):
|
| 86 |
+
url = getattr(options, "index_url", None)
|
| 87 |
+
if url:
|
| 88 |
+
index_urls.append(url)
|
| 89 |
+
urls = getattr(options, "extra_index_urls", None)
|
| 90 |
+
if urls:
|
| 91 |
+
index_urls.extend(urls)
|
| 92 |
+
# Return None rather than an empty list
|
| 93 |
+
return index_urls or None
|
| 94 |
+
|
| 95 |
+
def get_default_session(self, options: Values) -> PipSession:
|
| 96 |
+
"""Get a default-managed session."""
|
| 97 |
+
if self._session is None:
|
| 98 |
+
self._session = self.enter_context(self._build_session(options))
|
| 99 |
+
# there's no type annotation on requests.Session, so it's
|
| 100 |
+
# automatically ContextManager[Any] and self._session becomes Any,
|
| 101 |
+
# then https://github.com/python/mypy/issues/7696 kicks in
|
| 102 |
+
assert self._session is not None
|
| 103 |
+
return self._session
|
| 104 |
+
|
| 105 |
+
def _build_session(
|
| 106 |
+
self,
|
| 107 |
+
options: Values,
|
| 108 |
+
retries: Optional[int] = None,
|
| 109 |
+
timeout: Optional[int] = None,
|
| 110 |
+
fallback_to_certifi: bool = False,
|
| 111 |
+
) -> PipSession:
|
| 112 |
+
cache_dir = options.cache_dir
|
| 113 |
+
assert not cache_dir or os.path.isabs(cache_dir)
|
| 114 |
+
|
| 115 |
+
if "truststore" in options.features_enabled:
|
| 116 |
+
try:
|
| 117 |
+
ssl_context = _create_truststore_ssl_context()
|
| 118 |
+
except Exception:
|
| 119 |
+
if not fallback_to_certifi:
|
| 120 |
+
raise
|
| 121 |
+
ssl_context = None
|
| 122 |
+
else:
|
| 123 |
+
ssl_context = None
|
| 124 |
+
|
| 125 |
+
session = PipSession(
|
| 126 |
+
cache=os.path.join(cache_dir, "http") if cache_dir else None,
|
| 127 |
+
retries=retries if retries is not None else options.retries,
|
| 128 |
+
trusted_hosts=options.trusted_hosts,
|
| 129 |
+
index_urls=self._get_index_urls(options),
|
| 130 |
+
ssl_context=ssl_context,
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
# Handle custom ca-bundles from the user
|
| 134 |
+
if options.cert:
|
| 135 |
+
session.verify = options.cert
|
| 136 |
+
|
| 137 |
+
# Handle SSL client certificate
|
| 138 |
+
if options.client_cert:
|
| 139 |
+
session.cert = options.client_cert
|
| 140 |
+
|
| 141 |
+
# Handle timeouts
|
| 142 |
+
if options.timeout or timeout:
|
| 143 |
+
session.timeout = timeout if timeout is not None else options.timeout
|
| 144 |
+
|
| 145 |
+
# Handle configured proxies
|
| 146 |
+
if options.proxy:
|
| 147 |
+
session.proxies = {
|
| 148 |
+
"http": options.proxy,
|
| 149 |
+
"https": options.proxy,
|
| 150 |
+
}
|
| 151 |
+
|
| 152 |
+
# Determine if we can prompt the user for authentication or not
|
| 153 |
+
session.auth.prompting = not options.no_input
|
| 154 |
+
session.auth.keyring_provider = options.keyring_provider
|
| 155 |
+
|
| 156 |
+
return session
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
class IndexGroupCommand(Command, SessionCommandMixin):
|
| 160 |
+
|
| 161 |
+
"""
|
| 162 |
+
Abstract base class for commands with the index_group options.
|
| 163 |
+
|
| 164 |
+
This also corresponds to the commands that permit the pip version check.
|
| 165 |
+
"""
|
| 166 |
+
|
| 167 |
+
def handle_pip_version_check(self, options: Values) -> None:
|
| 168 |
+
"""
|
| 169 |
+
Do the pip version check if not disabled.
|
| 170 |
+
|
| 171 |
+
This overrides the default behavior of not doing the check.
|
| 172 |
+
"""
|
| 173 |
+
# Make sure the index_group options are present.
|
| 174 |
+
assert hasattr(options, "no_index")
|
| 175 |
+
|
| 176 |
+
if options.disable_pip_version_check or options.no_index:
|
| 177 |
+
return
|
| 178 |
+
|
| 179 |
+
# Otherwise, check if we're using the latest version of pip available.
|
| 180 |
+
session = self._build_session(
|
| 181 |
+
options,
|
| 182 |
+
retries=0,
|
| 183 |
+
timeout=min(5, options.timeout),
|
| 184 |
+
# This is set to ensure the function does not fail when truststore is
|
| 185 |
+
# specified in use-feature but cannot be loaded. This usually raises a
|
| 186 |
+
# CommandError and shows a nice user-facing error, but this function is not
|
| 187 |
+
# called in that try-except block.
|
| 188 |
+
fallback_to_certifi=True,
|
| 189 |
+
)
|
| 190 |
+
with session:
|
| 191 |
+
pip_self_version_check(session, options)
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
KEEPABLE_TEMPDIR_TYPES = [
|
| 195 |
+
tempdir_kinds.BUILD_ENV,
|
| 196 |
+
tempdir_kinds.EPHEM_WHEEL_CACHE,
|
| 197 |
+
tempdir_kinds.REQ_BUILD,
|
| 198 |
+
]
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def warn_if_run_as_root() -> None:
|
| 202 |
+
"""Output a warning for sudo users on Unix.
|
| 203 |
+
|
| 204 |
+
In a virtual environment, sudo pip still writes to virtualenv.
|
| 205 |
+
On Windows, users may run pip as Administrator without issues.
|
| 206 |
+
This warning only applies to Unix root users outside of virtualenv.
|
| 207 |
+
"""
|
| 208 |
+
if running_under_virtualenv():
|
| 209 |
+
return
|
| 210 |
+
if not hasattr(os, "getuid"):
|
| 211 |
+
return
|
| 212 |
+
# On Windows, there are no "system managed" Python packages. Installing as
|
| 213 |
+
# Administrator via pip is the correct way of updating system environments.
|
| 214 |
+
#
|
| 215 |
+
# We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
|
| 216 |
+
# checks: https://mypy.readthedocs.io/en/stable/common_issues.html
|
| 217 |
+
if sys.platform == "win32" or sys.platform == "cygwin":
|
| 218 |
+
return
|
| 219 |
+
|
| 220 |
+
if os.getuid() != 0:
|
| 221 |
+
return
|
| 222 |
+
|
| 223 |
+
logger.warning(
|
| 224 |
+
"Running pip as the 'root' user can result in broken permissions and "
|
| 225 |
+
"conflicting behaviour with the system package manager. "
|
| 226 |
+
"It is recommended to use a virtual environment instead: "
|
| 227 |
+
"https://pip.pypa.io/warnings/venv"
|
| 228 |
+
)
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
def with_cleanup(func: Any) -> Any:
|
| 232 |
+
"""Decorator for common logic related to managing temporary
|
| 233 |
+
directories.
|
| 234 |
+
"""
|
| 235 |
+
|
| 236 |
+
def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
|
| 237 |
+
for t in KEEPABLE_TEMPDIR_TYPES:
|
| 238 |
+
registry.set_delete(t, False)
|
| 239 |
+
|
| 240 |
+
def wrapper(
|
| 241 |
+
self: RequirementCommand, options: Values, args: List[Any]
|
| 242 |
+
) -> Optional[int]:
|
| 243 |
+
assert self.tempdir_registry is not None
|
| 244 |
+
if options.no_clean:
|
| 245 |
+
configure_tempdir_registry(self.tempdir_registry)
|
| 246 |
+
|
| 247 |
+
try:
|
| 248 |
+
return func(self, options, args)
|
| 249 |
+
except PreviousBuildDirError:
|
| 250 |
+
# This kind of conflict can occur when the user passes an explicit
|
| 251 |
+
# build directory with a pre-existing folder. In that case we do
|
| 252 |
+
# not want to accidentally remove it.
|
| 253 |
+
configure_tempdir_registry(self.tempdir_registry)
|
| 254 |
+
raise
|
| 255 |
+
|
| 256 |
+
return wrapper
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
class RequirementCommand(IndexGroupCommand):
|
| 260 |
+
def __init__(self, *args: Any, **kw: Any) -> None:
|
| 261 |
+
super().__init__(*args, **kw)
|
| 262 |
+
|
| 263 |
+
self.cmd_opts.add_option(cmdoptions.no_clean())
|
| 264 |
+
|
| 265 |
+
@staticmethod
|
| 266 |
+
def determine_resolver_variant(options: Values) -> str:
|
| 267 |
+
"""Determines which resolver should be used, based on the given options."""
|
| 268 |
+
if "legacy-resolver" in options.deprecated_features_enabled:
|
| 269 |
+
return "legacy"
|
| 270 |
+
|
| 271 |
+
return "2020-resolver"
|
| 272 |
+
|
| 273 |
+
@classmethod
|
| 274 |
+
def make_requirement_preparer(
|
| 275 |
+
cls,
|
| 276 |
+
temp_build_dir: TempDirectory,
|
| 277 |
+
options: Values,
|
| 278 |
+
build_tracker: BuildTracker,
|
| 279 |
+
session: PipSession,
|
| 280 |
+
finder: PackageFinder,
|
| 281 |
+
use_user_site: bool,
|
| 282 |
+
download_dir: Optional[str] = None,
|
| 283 |
+
verbosity: int = 0,
|
| 284 |
+
) -> RequirementPreparer:
|
| 285 |
+
"""
|
| 286 |
+
Create a RequirementPreparer instance for the given parameters.
|
| 287 |
+
"""
|
| 288 |
+
temp_build_dir_path = temp_build_dir.path
|
| 289 |
+
assert temp_build_dir_path is not None
|
| 290 |
+
legacy_resolver = False
|
| 291 |
+
|
| 292 |
+
resolver_variant = cls.determine_resolver_variant(options)
|
| 293 |
+
if resolver_variant == "2020-resolver":
|
| 294 |
+
lazy_wheel = "fast-deps" in options.features_enabled
|
| 295 |
+
if lazy_wheel:
|
| 296 |
+
logger.warning(
|
| 297 |
+
"pip is using lazily downloaded wheels using HTTP "
|
| 298 |
+
"range requests to obtain dependency information. "
|
| 299 |
+
"This experimental feature is enabled through "
|
| 300 |
+
"--use-feature=fast-deps and it is not ready for "
|
| 301 |
+
"production."
|
| 302 |
+
)
|
| 303 |
+
else:
|
| 304 |
+
legacy_resolver = True
|
| 305 |
+
lazy_wheel = False
|
| 306 |
+
if "fast-deps" in options.features_enabled:
|
| 307 |
+
logger.warning(
|
| 308 |
+
"fast-deps has no effect when used with the legacy resolver."
|
| 309 |
+
)
|
| 310 |
+
|
| 311 |
+
return RequirementPreparer(
|
| 312 |
+
build_dir=temp_build_dir_path,
|
| 313 |
+
src_dir=options.src_dir,
|
| 314 |
+
download_dir=download_dir,
|
| 315 |
+
build_isolation=options.build_isolation,
|
| 316 |
+
check_build_deps=options.check_build_deps,
|
| 317 |
+
build_tracker=build_tracker,
|
| 318 |
+
session=session,
|
| 319 |
+
progress_bar=options.progress_bar,
|
| 320 |
+
finder=finder,
|
| 321 |
+
require_hashes=options.require_hashes,
|
| 322 |
+
use_user_site=use_user_site,
|
| 323 |
+
lazy_wheel=lazy_wheel,
|
| 324 |
+
verbosity=verbosity,
|
| 325 |
+
legacy_resolver=legacy_resolver,
|
| 326 |
+
)
|
| 327 |
+
|
| 328 |
+
@classmethod
|
| 329 |
+
def make_resolver(
|
| 330 |
+
cls,
|
| 331 |
+
preparer: RequirementPreparer,
|
| 332 |
+
finder: PackageFinder,
|
| 333 |
+
options: Values,
|
| 334 |
+
wheel_cache: Optional[WheelCache] = None,
|
| 335 |
+
use_user_site: bool = False,
|
| 336 |
+
ignore_installed: bool = True,
|
| 337 |
+
ignore_requires_python: bool = False,
|
| 338 |
+
force_reinstall: bool = False,
|
| 339 |
+
upgrade_strategy: str = "to-satisfy-only",
|
| 340 |
+
use_pep517: Optional[bool] = None,
|
| 341 |
+
py_version_info: Optional[Tuple[int, ...]] = None,
|
| 342 |
+
) -> BaseResolver:
|
| 343 |
+
"""
|
| 344 |
+
Create a Resolver instance for the given parameters.
|
| 345 |
+
"""
|
| 346 |
+
make_install_req = partial(
|
| 347 |
+
install_req_from_req_string,
|
| 348 |
+
isolated=options.isolated_mode,
|
| 349 |
+
use_pep517=use_pep517,
|
| 350 |
+
)
|
| 351 |
+
resolver_variant = cls.determine_resolver_variant(options)
|
| 352 |
+
# The long import name and duplicated invocation is needed to convince
|
| 353 |
+
# Mypy into correctly typechecking. Otherwise it would complain the
|
| 354 |
+
# "Resolver" class being redefined.
|
| 355 |
+
if resolver_variant == "2020-resolver":
|
| 356 |
+
import pip._internal.resolution.resolvelib.resolver
|
| 357 |
+
|
| 358 |
+
return pip._internal.resolution.resolvelib.resolver.Resolver(
|
| 359 |
+
preparer=preparer,
|
| 360 |
+
finder=finder,
|
| 361 |
+
wheel_cache=wheel_cache,
|
| 362 |
+
make_install_req=make_install_req,
|
| 363 |
+
use_user_site=use_user_site,
|
| 364 |
+
ignore_dependencies=options.ignore_dependencies,
|
| 365 |
+
ignore_installed=ignore_installed,
|
| 366 |
+
ignore_requires_python=ignore_requires_python,
|
| 367 |
+
force_reinstall=force_reinstall,
|
| 368 |
+
upgrade_strategy=upgrade_strategy,
|
| 369 |
+
py_version_info=py_version_info,
|
| 370 |
+
)
|
| 371 |
+
import pip._internal.resolution.legacy.resolver
|
| 372 |
+
|
| 373 |
+
return pip._internal.resolution.legacy.resolver.Resolver(
|
| 374 |
+
preparer=preparer,
|
| 375 |
+
finder=finder,
|
| 376 |
+
wheel_cache=wheel_cache,
|
| 377 |
+
make_install_req=make_install_req,
|
| 378 |
+
use_user_site=use_user_site,
|
| 379 |
+
ignore_dependencies=options.ignore_dependencies,
|
| 380 |
+
ignore_installed=ignore_installed,
|
| 381 |
+
ignore_requires_python=ignore_requires_python,
|
| 382 |
+
force_reinstall=force_reinstall,
|
| 383 |
+
upgrade_strategy=upgrade_strategy,
|
| 384 |
+
py_version_info=py_version_info,
|
| 385 |
+
)
|
| 386 |
+
|
| 387 |
+
def get_requirements(
|
| 388 |
+
self,
|
| 389 |
+
args: List[str],
|
| 390 |
+
options: Values,
|
| 391 |
+
finder: PackageFinder,
|
| 392 |
+
session: PipSession,
|
| 393 |
+
) -> List[InstallRequirement]:
|
| 394 |
+
"""
|
| 395 |
+
Parse command-line arguments into the corresponding requirements.
|
| 396 |
+
"""
|
| 397 |
+
requirements: List[InstallRequirement] = []
|
| 398 |
+
for filename in options.constraints:
|
| 399 |
+
for parsed_req in parse_requirements(
|
| 400 |
+
filename,
|
| 401 |
+
constraint=True,
|
| 402 |
+
finder=finder,
|
| 403 |
+
options=options,
|
| 404 |
+
session=session,
|
| 405 |
+
):
|
| 406 |
+
req_to_add = install_req_from_parsed_requirement(
|
| 407 |
+
parsed_req,
|
| 408 |
+
isolated=options.isolated_mode,
|
| 409 |
+
user_supplied=False,
|
| 410 |
+
)
|
| 411 |
+
requirements.append(req_to_add)
|
| 412 |
+
|
| 413 |
+
for req in args:
|
| 414 |
+
req_to_add = install_req_from_line(
|
| 415 |
+
req,
|
| 416 |
+
comes_from=None,
|
| 417 |
+
isolated=options.isolated_mode,
|
| 418 |
+
use_pep517=options.use_pep517,
|
| 419 |
+
user_supplied=True,
|
| 420 |
+
config_settings=getattr(options, "config_settings", None),
|
| 421 |
+
)
|
| 422 |
+
requirements.append(req_to_add)
|
| 423 |
+
|
| 424 |
+
for req in options.editables:
|
| 425 |
+
req_to_add = install_req_from_editable(
|
| 426 |
+
req,
|
| 427 |
+
user_supplied=True,
|
| 428 |
+
isolated=options.isolated_mode,
|
| 429 |
+
use_pep517=options.use_pep517,
|
| 430 |
+
config_settings=getattr(options, "config_settings", None),
|
| 431 |
+
)
|
| 432 |
+
requirements.append(req_to_add)
|
| 433 |
+
|
| 434 |
+
# NOTE: options.require_hashes may be set if --require-hashes is True
|
| 435 |
+
for filename in options.requirements:
|
| 436 |
+
for parsed_req in parse_requirements(
|
| 437 |
+
filename, finder=finder, options=options, session=session
|
| 438 |
+
):
|
| 439 |
+
req_to_add = install_req_from_parsed_requirement(
|
| 440 |
+
parsed_req,
|
| 441 |
+
isolated=options.isolated_mode,
|
| 442 |
+
use_pep517=options.use_pep517,
|
| 443 |
+
user_supplied=True,
|
| 444 |
+
config_settings=parsed_req.options.get("config_settings")
|
| 445 |
+
if parsed_req.options
|
| 446 |
+
else None,
|
| 447 |
+
)
|
| 448 |
+
requirements.append(req_to_add)
|
| 449 |
+
|
| 450 |
+
# If any requirement has hash options, enable hash checking.
|
| 451 |
+
if any(req.has_hash_options for req in requirements):
|
| 452 |
+
options.require_hashes = True
|
| 453 |
+
|
| 454 |
+
if not (args or options.editables or options.requirements):
|
| 455 |
+
opts = {"name": self.name}
|
| 456 |
+
if options.find_links:
|
| 457 |
+
raise CommandError(
|
| 458 |
+
"You must give at least one requirement to {name} "
|
| 459 |
+
'(maybe you meant "pip {name} {links}"?)'.format(
|
| 460 |
+
**dict(opts, links=" ".join(options.find_links))
|
| 461 |
+
)
|
| 462 |
+
)
|
| 463 |
+
else:
|
| 464 |
+
raise CommandError(
|
| 465 |
+
"You must give at least one requirement to {name} "
|
| 466 |
+
'(see "pip help {name}")'.format(**opts)
|
| 467 |
+
)
|
| 468 |
+
|
| 469 |
+
return requirements
|
| 470 |
+
|
| 471 |
+
@staticmethod
|
| 472 |
+
def trace_basic_info(finder: PackageFinder) -> None:
|
| 473 |
+
"""
|
| 474 |
+
Trace basic information about the provided objects.
|
| 475 |
+
"""
|
| 476 |
+
# Display where finder is looking for packages
|
| 477 |
+
search_scope = finder.search_scope
|
| 478 |
+
locations = search_scope.get_formatted_locations()
|
| 479 |
+
if locations:
|
| 480 |
+
logger.info(locations)
|
| 481 |
+
|
| 482 |
+
def _build_package_finder(
|
| 483 |
+
self,
|
| 484 |
+
options: Values,
|
| 485 |
+
session: PipSession,
|
| 486 |
+
target_python: Optional[TargetPython] = None,
|
| 487 |
+
ignore_requires_python: Optional[bool] = None,
|
| 488 |
+
) -> PackageFinder:
|
| 489 |
+
"""
|
| 490 |
+
Create a package finder appropriate to this requirement command.
|
| 491 |
+
|
| 492 |
+
:param ignore_requires_python: Whether to ignore incompatible
|
| 493 |
+
"Requires-Python" values in links. Defaults to False.
|
| 494 |
+
"""
|
| 495 |
+
link_collector = LinkCollector.create(session, options=options)
|
| 496 |
+
selection_prefs = SelectionPreferences(
|
| 497 |
+
allow_yanked=True,
|
| 498 |
+
format_control=options.format_control,
|
| 499 |
+
allow_all_prereleases=options.pre,
|
| 500 |
+
prefer_binary=options.prefer_binary,
|
| 501 |
+
ignore_requires_python=ignore_requires_python,
|
| 502 |
+
)
|
| 503 |
+
|
| 504 |
+
return PackageFinder.create(
|
| 505 |
+
link_collector=link_collector,
|
| 506 |
+
selection_prefs=selection_prefs,
|
| 507 |
+
target_python=target_python,
|
| 508 |
+
)
|
.venv/Lib/site-packages/pip/_internal/cli/spinners.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import itertools
|
| 3 |
+
import logging
|
| 4 |
+
import sys
|
| 5 |
+
import time
|
| 6 |
+
from typing import IO, Generator, Optional
|
| 7 |
+
|
| 8 |
+
from pip._internal.utils.compat import WINDOWS
|
| 9 |
+
from pip._internal.utils.logging import get_indentation
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class SpinnerInterface:
|
| 15 |
+
def spin(self) -> None:
|
| 16 |
+
raise NotImplementedError()
|
| 17 |
+
|
| 18 |
+
def finish(self, final_status: str) -> None:
|
| 19 |
+
raise NotImplementedError()
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class InteractiveSpinner(SpinnerInterface):
|
| 23 |
+
def __init__(
|
| 24 |
+
self,
|
| 25 |
+
message: str,
|
| 26 |
+
file: Optional[IO[str]] = None,
|
| 27 |
+
spin_chars: str = "-\\|/",
|
| 28 |
+
# Empirically, 8 updates/second looks nice
|
| 29 |
+
min_update_interval_seconds: float = 0.125,
|
| 30 |
+
):
|
| 31 |
+
self._message = message
|
| 32 |
+
if file is None:
|
| 33 |
+
file = sys.stdout
|
| 34 |
+
self._file = file
|
| 35 |
+
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
| 36 |
+
self._finished = False
|
| 37 |
+
|
| 38 |
+
self._spin_cycle = itertools.cycle(spin_chars)
|
| 39 |
+
|
| 40 |
+
self._file.write(" " * get_indentation() + self._message + " ... ")
|
| 41 |
+
self._width = 0
|
| 42 |
+
|
| 43 |
+
def _write(self, status: str) -> None:
|
| 44 |
+
assert not self._finished
|
| 45 |
+
# Erase what we wrote before by backspacing to the beginning, writing
|
| 46 |
+
# spaces to overwrite the old text, and then backspacing again
|
| 47 |
+
backup = "\b" * self._width
|
| 48 |
+
self._file.write(backup + " " * self._width + backup)
|
| 49 |
+
# Now we have a blank slate to add our status
|
| 50 |
+
self._file.write(status)
|
| 51 |
+
self._width = len(status)
|
| 52 |
+
self._file.flush()
|
| 53 |
+
self._rate_limiter.reset()
|
| 54 |
+
|
| 55 |
+
def spin(self) -> None:
|
| 56 |
+
if self._finished:
|
| 57 |
+
return
|
| 58 |
+
if not self._rate_limiter.ready():
|
| 59 |
+
return
|
| 60 |
+
self._write(next(self._spin_cycle))
|
| 61 |
+
|
| 62 |
+
def finish(self, final_status: str) -> None:
|
| 63 |
+
if self._finished:
|
| 64 |
+
return
|
| 65 |
+
self._write(final_status)
|
| 66 |
+
self._file.write("\n")
|
| 67 |
+
self._file.flush()
|
| 68 |
+
self._finished = True
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
# Used for dumb terminals, non-interactive installs (no tty), etc.
|
| 72 |
+
# We still print updates occasionally (once every 60 seconds by default) to
|
| 73 |
+
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
|
| 74 |
+
# an indication that a task has frozen.
|
| 75 |
+
class NonInteractiveSpinner(SpinnerInterface):
|
| 76 |
+
def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
|
| 77 |
+
self._message = message
|
| 78 |
+
self._finished = False
|
| 79 |
+
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
| 80 |
+
self._update("started")
|
| 81 |
+
|
| 82 |
+
def _update(self, status: str) -> None:
|
| 83 |
+
assert not self._finished
|
| 84 |
+
self._rate_limiter.reset()
|
| 85 |
+
logger.info("%s: %s", self._message, status)
|
| 86 |
+
|
| 87 |
+
def spin(self) -> None:
|
| 88 |
+
if self._finished:
|
| 89 |
+
return
|
| 90 |
+
if not self._rate_limiter.ready():
|
| 91 |
+
return
|
| 92 |
+
self._update("still running...")
|
| 93 |
+
|
| 94 |
+
def finish(self, final_status: str) -> None:
|
| 95 |
+
if self._finished:
|
| 96 |
+
return
|
| 97 |
+
self._update(f"finished with status '{final_status}'")
|
| 98 |
+
self._finished = True
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class RateLimiter:
|
| 102 |
+
def __init__(self, min_update_interval_seconds: float) -> None:
|
| 103 |
+
self._min_update_interval_seconds = min_update_interval_seconds
|
| 104 |
+
self._last_update: float = 0
|
| 105 |
+
|
| 106 |
+
def ready(self) -> bool:
|
| 107 |
+
now = time.time()
|
| 108 |
+
delta = now - self._last_update
|
| 109 |
+
return delta >= self._min_update_interval_seconds
|
| 110 |
+
|
| 111 |
+
def reset(self) -> None:
|
| 112 |
+
self._last_update = time.time()
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
@contextlib.contextmanager
|
| 116 |
+
def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
|
| 117 |
+
# Interactive spinner goes directly to sys.stdout rather than being routed
|
| 118 |
+
# through the logging system, but it acts like it has level INFO,
|
| 119 |
+
# i.e. it's only displayed if we're at level INFO or better.
|
| 120 |
+
# Non-interactive spinner goes through the logging system, so it is always
|
| 121 |
+
# in sync with logging configuration.
|
| 122 |
+
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
|
| 123 |
+
spinner: SpinnerInterface = InteractiveSpinner(message)
|
| 124 |
+
else:
|
| 125 |
+
spinner = NonInteractiveSpinner(message)
|
| 126 |
+
try:
|
| 127 |
+
with hidden_cursor(sys.stdout):
|
| 128 |
+
yield spinner
|
| 129 |
+
except KeyboardInterrupt:
|
| 130 |
+
spinner.finish("canceled")
|
| 131 |
+
raise
|
| 132 |
+
except Exception:
|
| 133 |
+
spinner.finish("error")
|
| 134 |
+
raise
|
| 135 |
+
else:
|
| 136 |
+
spinner.finish("done")
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
HIDE_CURSOR = "\x1b[?25l"
|
| 140 |
+
SHOW_CURSOR = "\x1b[?25h"
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
@contextlib.contextmanager
|
| 144 |
+
def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
|
| 145 |
+
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
| 146 |
+
# even via colorama. So don't even try.
|
| 147 |
+
if WINDOWS:
|
| 148 |
+
yield
|
| 149 |
+
# We don't want to clutter the output with control characters if we're
|
| 150 |
+
# writing to a file, or if the user is running with --quiet.
|
| 151 |
+
# See https://github.com/pypa/pip/issues/3418
|
| 152 |
+
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
|
| 153 |
+
yield
|
| 154 |
+
else:
|
| 155 |
+
file.write(HIDE_CURSOR)
|
| 156 |
+
try:
|
| 157 |
+
yield
|
| 158 |
+
finally:
|
| 159 |
+
file.write(SHOW_CURSOR)
|
.venv/Lib/site-packages/pip/_internal/cli/status_codes.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
SUCCESS = 0
|
| 2 |
+
ERROR = 1
|
| 3 |
+
UNKNOWN_ERROR = 2
|
| 4 |
+
VIRTUALENV_NOT_FOUND = 3
|
| 5 |
+
PREVIOUS_BUILD_DIR_ERROR = 4
|
| 6 |
+
NO_MATCHES_FOUND = 23
|
.venv/Lib/site-packages/pip/_internal/commands/__init__.py
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Package containing all pip commands
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import importlib
|
| 6 |
+
from collections import namedtuple
|
| 7 |
+
from typing import Any, Dict, Optional
|
| 8 |
+
|
| 9 |
+
from pip._internal.cli.base_command import Command
|
| 10 |
+
|
| 11 |
+
CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
|
| 12 |
+
|
| 13 |
+
# This dictionary does a bunch of heavy lifting for help output:
|
| 14 |
+
# - Enables avoiding additional (costly) imports for presenting `--help`.
|
| 15 |
+
# - The ordering matters for help display.
|
| 16 |
+
#
|
| 17 |
+
# Even though the module path starts with the same "pip._internal.commands"
|
| 18 |
+
# prefix, the full path makes testing easier (specifically when modifying
|
| 19 |
+
# `commands_dict` in test setup / teardown).
|
| 20 |
+
commands_dict: Dict[str, CommandInfo] = {
|
| 21 |
+
"install": CommandInfo(
|
| 22 |
+
"pip._internal.commands.install",
|
| 23 |
+
"InstallCommand",
|
| 24 |
+
"Install packages.",
|
| 25 |
+
),
|
| 26 |
+
"download": CommandInfo(
|
| 27 |
+
"pip._internal.commands.download",
|
| 28 |
+
"DownloadCommand",
|
| 29 |
+
"Download packages.",
|
| 30 |
+
),
|
| 31 |
+
"uninstall": CommandInfo(
|
| 32 |
+
"pip._internal.commands.uninstall",
|
| 33 |
+
"UninstallCommand",
|
| 34 |
+
"Uninstall packages.",
|
| 35 |
+
),
|
| 36 |
+
"freeze": CommandInfo(
|
| 37 |
+
"pip._internal.commands.freeze",
|
| 38 |
+
"FreezeCommand",
|
| 39 |
+
"Output installed packages in requirements format.",
|
| 40 |
+
),
|
| 41 |
+
"inspect": CommandInfo(
|
| 42 |
+
"pip._internal.commands.inspect",
|
| 43 |
+
"InspectCommand",
|
| 44 |
+
"Inspect the python environment.",
|
| 45 |
+
),
|
| 46 |
+
"list": CommandInfo(
|
| 47 |
+
"pip._internal.commands.list",
|
| 48 |
+
"ListCommand",
|
| 49 |
+
"List installed packages.",
|
| 50 |
+
),
|
| 51 |
+
"show": CommandInfo(
|
| 52 |
+
"pip._internal.commands.show",
|
| 53 |
+
"ShowCommand",
|
| 54 |
+
"Show information about installed packages.",
|
| 55 |
+
),
|
| 56 |
+
"check": CommandInfo(
|
| 57 |
+
"pip._internal.commands.check",
|
| 58 |
+
"CheckCommand",
|
| 59 |
+
"Verify installed packages have compatible dependencies.",
|
| 60 |
+
),
|
| 61 |
+
"config": CommandInfo(
|
| 62 |
+
"pip._internal.commands.configuration",
|
| 63 |
+
"ConfigurationCommand",
|
| 64 |
+
"Manage local and global configuration.",
|
| 65 |
+
),
|
| 66 |
+
"search": CommandInfo(
|
| 67 |
+
"pip._internal.commands.search",
|
| 68 |
+
"SearchCommand",
|
| 69 |
+
"Search PyPI for packages.",
|
| 70 |
+
),
|
| 71 |
+
"cache": CommandInfo(
|
| 72 |
+
"pip._internal.commands.cache",
|
| 73 |
+
"CacheCommand",
|
| 74 |
+
"Inspect and manage pip's wheel cache.",
|
| 75 |
+
),
|
| 76 |
+
"index": CommandInfo(
|
| 77 |
+
"pip._internal.commands.index",
|
| 78 |
+
"IndexCommand",
|
| 79 |
+
"Inspect information available from package indexes.",
|
| 80 |
+
),
|
| 81 |
+
"wheel": CommandInfo(
|
| 82 |
+
"pip._internal.commands.wheel",
|
| 83 |
+
"WheelCommand",
|
| 84 |
+
"Build wheels from your requirements.",
|
| 85 |
+
),
|
| 86 |
+
"hash": CommandInfo(
|
| 87 |
+
"pip._internal.commands.hash",
|
| 88 |
+
"HashCommand",
|
| 89 |
+
"Compute hashes of package archives.",
|
| 90 |
+
),
|
| 91 |
+
"completion": CommandInfo(
|
| 92 |
+
"pip._internal.commands.completion",
|
| 93 |
+
"CompletionCommand",
|
| 94 |
+
"A helper command used for command completion.",
|
| 95 |
+
),
|
| 96 |
+
"debug": CommandInfo(
|
| 97 |
+
"pip._internal.commands.debug",
|
| 98 |
+
"DebugCommand",
|
| 99 |
+
"Show information useful for debugging.",
|
| 100 |
+
),
|
| 101 |
+
"help": CommandInfo(
|
| 102 |
+
"pip._internal.commands.help",
|
| 103 |
+
"HelpCommand",
|
| 104 |
+
"Show help for commands.",
|
| 105 |
+
),
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def create_command(name: str, **kwargs: Any) -> Command:
|
| 110 |
+
"""
|
| 111 |
+
Create an instance of the Command class with the given name.
|
| 112 |
+
"""
|
| 113 |
+
module_path, class_name, summary = commands_dict[name]
|
| 114 |
+
module = importlib.import_module(module_path)
|
| 115 |
+
command_class = getattr(module, class_name)
|
| 116 |
+
command = command_class(name=name, summary=summary, **kwargs)
|
| 117 |
+
|
| 118 |
+
return command
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def get_similar_commands(name: str) -> Optional[str]:
|
| 122 |
+
"""Command name auto-correct."""
|
| 123 |
+
from difflib import get_close_matches
|
| 124 |
+
|
| 125 |
+
name = name.lower()
|
| 126 |
+
|
| 127 |
+
close_commands = get_close_matches(name, commands_dict.keys())
|
| 128 |
+
|
| 129 |
+
if close_commands:
|
| 130 |
+
return close_commands[0]
|
| 131 |
+
else:
|
| 132 |
+
return None
|
.venv/Lib/site-packages/pip/_internal/commands/cache.py
ADDED
|
@@ -0,0 +1,222 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import textwrap
|
| 3 |
+
from optparse import Values
|
| 4 |
+
from typing import Any, List
|
| 5 |
+
|
| 6 |
+
import pip._internal.utils.filesystem as filesystem
|
| 7 |
+
from pip._internal.cli.base_command import Command
|
| 8 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 9 |
+
from pip._internal.exceptions import CommandError, PipError
|
| 10 |
+
from pip._internal.utils.logging import getLogger
|
| 11 |
+
|
| 12 |
+
logger = getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class CacheCommand(Command):
|
| 16 |
+
"""
|
| 17 |
+
Inspect and manage pip's wheel cache.
|
| 18 |
+
|
| 19 |
+
Subcommands:
|
| 20 |
+
|
| 21 |
+
- dir: Show the cache directory.
|
| 22 |
+
- info: Show information about the cache.
|
| 23 |
+
- list: List filenames of packages stored in the cache.
|
| 24 |
+
- remove: Remove one or more package from the cache.
|
| 25 |
+
- purge: Remove all items from the cache.
|
| 26 |
+
|
| 27 |
+
``<pattern>`` can be a glob expression or a package name.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
ignore_require_venv = True
|
| 31 |
+
usage = """
|
| 32 |
+
%prog dir
|
| 33 |
+
%prog info
|
| 34 |
+
%prog list [<pattern>] [--format=[human, abspath]]
|
| 35 |
+
%prog remove <pattern>
|
| 36 |
+
%prog purge
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
def add_options(self) -> None:
|
| 40 |
+
self.cmd_opts.add_option(
|
| 41 |
+
"--format",
|
| 42 |
+
action="store",
|
| 43 |
+
dest="list_format",
|
| 44 |
+
default="human",
|
| 45 |
+
choices=("human", "abspath"),
|
| 46 |
+
help="Select the output format among: human (default) or abspath",
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 50 |
+
|
| 51 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 52 |
+
handlers = {
|
| 53 |
+
"dir": self.get_cache_dir,
|
| 54 |
+
"info": self.get_cache_info,
|
| 55 |
+
"list": self.list_cache_items,
|
| 56 |
+
"remove": self.remove_cache_items,
|
| 57 |
+
"purge": self.purge_cache,
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
if not options.cache_dir:
|
| 61 |
+
logger.error("pip cache commands can not function since cache is disabled.")
|
| 62 |
+
return ERROR
|
| 63 |
+
|
| 64 |
+
# Determine action
|
| 65 |
+
if not args or args[0] not in handlers:
|
| 66 |
+
logger.error(
|
| 67 |
+
"Need an action (%s) to perform.",
|
| 68 |
+
", ".join(sorted(handlers)),
|
| 69 |
+
)
|
| 70 |
+
return ERROR
|
| 71 |
+
|
| 72 |
+
action = args[0]
|
| 73 |
+
|
| 74 |
+
# Error handling happens here, not in the action-handlers.
|
| 75 |
+
try:
|
| 76 |
+
handlers[action](options, args[1:])
|
| 77 |
+
except PipError as e:
|
| 78 |
+
logger.error(e.args[0])
|
| 79 |
+
return ERROR
|
| 80 |
+
|
| 81 |
+
return SUCCESS
|
| 82 |
+
|
| 83 |
+
def get_cache_dir(self, options: Values, args: List[Any]) -> None:
|
| 84 |
+
if args:
|
| 85 |
+
raise CommandError("Too many arguments")
|
| 86 |
+
|
| 87 |
+
logger.info(options.cache_dir)
|
| 88 |
+
|
| 89 |
+
def get_cache_info(self, options: Values, args: List[Any]) -> None:
|
| 90 |
+
if args:
|
| 91 |
+
raise CommandError("Too many arguments")
|
| 92 |
+
|
| 93 |
+
num_http_files = len(self._find_http_files(options))
|
| 94 |
+
num_packages = len(self._find_wheels(options, "*"))
|
| 95 |
+
|
| 96 |
+
http_cache_location = self._cache_dir(options, "http")
|
| 97 |
+
wheels_cache_location = self._cache_dir(options, "wheels")
|
| 98 |
+
http_cache_size = filesystem.format_directory_size(http_cache_location)
|
| 99 |
+
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
|
| 100 |
+
|
| 101 |
+
message = (
|
| 102 |
+
textwrap.dedent(
|
| 103 |
+
"""
|
| 104 |
+
Package index page cache location: {http_cache_location}
|
| 105 |
+
Package index page cache size: {http_cache_size}
|
| 106 |
+
Number of HTTP files: {num_http_files}
|
| 107 |
+
Locally built wheels location: {wheels_cache_location}
|
| 108 |
+
Locally built wheels size: {wheels_cache_size}
|
| 109 |
+
Number of locally built wheels: {package_count}
|
| 110 |
+
"""
|
| 111 |
+
)
|
| 112 |
+
.format(
|
| 113 |
+
http_cache_location=http_cache_location,
|
| 114 |
+
http_cache_size=http_cache_size,
|
| 115 |
+
num_http_files=num_http_files,
|
| 116 |
+
wheels_cache_location=wheels_cache_location,
|
| 117 |
+
package_count=num_packages,
|
| 118 |
+
wheels_cache_size=wheels_cache_size,
|
| 119 |
+
)
|
| 120 |
+
.strip()
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
logger.info(message)
|
| 124 |
+
|
| 125 |
+
def list_cache_items(self, options: Values, args: List[Any]) -> None:
|
| 126 |
+
if len(args) > 1:
|
| 127 |
+
raise CommandError("Too many arguments")
|
| 128 |
+
|
| 129 |
+
if args:
|
| 130 |
+
pattern = args[0]
|
| 131 |
+
else:
|
| 132 |
+
pattern = "*"
|
| 133 |
+
|
| 134 |
+
files = self._find_wheels(options, pattern)
|
| 135 |
+
if options.list_format == "human":
|
| 136 |
+
self.format_for_human(files)
|
| 137 |
+
else:
|
| 138 |
+
self.format_for_abspath(files)
|
| 139 |
+
|
| 140 |
+
def format_for_human(self, files: List[str]) -> None:
|
| 141 |
+
if not files:
|
| 142 |
+
logger.info("No locally built wheels cached.")
|
| 143 |
+
return
|
| 144 |
+
|
| 145 |
+
results = []
|
| 146 |
+
for filename in files:
|
| 147 |
+
wheel = os.path.basename(filename)
|
| 148 |
+
size = filesystem.format_file_size(filename)
|
| 149 |
+
results.append(f" - {wheel} ({size})")
|
| 150 |
+
logger.info("Cache contents:\n")
|
| 151 |
+
logger.info("\n".join(sorted(results)))
|
| 152 |
+
|
| 153 |
+
def format_for_abspath(self, files: List[str]) -> None:
|
| 154 |
+
if not files:
|
| 155 |
+
return
|
| 156 |
+
|
| 157 |
+
results = []
|
| 158 |
+
for filename in files:
|
| 159 |
+
results.append(filename)
|
| 160 |
+
|
| 161 |
+
logger.info("\n".join(sorted(results)))
|
| 162 |
+
|
| 163 |
+
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
|
| 164 |
+
if len(args) > 1:
|
| 165 |
+
raise CommandError("Too many arguments")
|
| 166 |
+
|
| 167 |
+
if not args:
|
| 168 |
+
raise CommandError("Please provide a pattern")
|
| 169 |
+
|
| 170 |
+
files = self._find_wheels(options, args[0])
|
| 171 |
+
|
| 172 |
+
no_matching_msg = "No matching packages"
|
| 173 |
+
if args[0] == "*":
|
| 174 |
+
# Only fetch http files if no specific pattern given
|
| 175 |
+
files += self._find_http_files(options)
|
| 176 |
+
else:
|
| 177 |
+
# Add the pattern to the log message
|
| 178 |
+
no_matching_msg += ' for pattern "{}"'.format(args[0])
|
| 179 |
+
|
| 180 |
+
if not files:
|
| 181 |
+
logger.warning(no_matching_msg)
|
| 182 |
+
|
| 183 |
+
for filename in files:
|
| 184 |
+
os.unlink(filename)
|
| 185 |
+
logger.verbose("Removed %s", filename)
|
| 186 |
+
logger.info("Files removed: %s", len(files))
|
| 187 |
+
|
| 188 |
+
def purge_cache(self, options: Values, args: List[Any]) -> None:
|
| 189 |
+
if args:
|
| 190 |
+
raise CommandError("Too many arguments")
|
| 191 |
+
|
| 192 |
+
return self.remove_cache_items(options, ["*"])
|
| 193 |
+
|
| 194 |
+
def _cache_dir(self, options: Values, subdir: str) -> str:
|
| 195 |
+
return os.path.join(options.cache_dir, subdir)
|
| 196 |
+
|
| 197 |
+
def _find_http_files(self, options: Values) -> List[str]:
|
| 198 |
+
http_dir = self._cache_dir(options, "http")
|
| 199 |
+
return filesystem.find_files(http_dir, "*")
|
| 200 |
+
|
| 201 |
+
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
|
| 202 |
+
wheel_dir = self._cache_dir(options, "wheels")
|
| 203 |
+
|
| 204 |
+
# The wheel filename format, as specified in PEP 427, is:
|
| 205 |
+
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
| 206 |
+
#
|
| 207 |
+
# Additionally, non-alphanumeric values in the distribution are
|
| 208 |
+
# normalized to underscores (_), meaning hyphens can never occur
|
| 209 |
+
# before `-{version}`.
|
| 210 |
+
#
|
| 211 |
+
# Given that information:
|
| 212 |
+
# - If the pattern we're given contains a hyphen (-), the user is
|
| 213 |
+
# providing at least the version. Thus, we can just append `*.whl`
|
| 214 |
+
# to match the rest of it.
|
| 215 |
+
# - If the pattern we're given doesn't contain a hyphen (-), the
|
| 216 |
+
# user is only providing the name. Thus, we append `-*.whl` to
|
| 217 |
+
# match the hyphen before the version, followed by anything else.
|
| 218 |
+
#
|
| 219 |
+
# PEP 427: https://www.python.org/dev/peps/pep-0427/
|
| 220 |
+
pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
|
| 221 |
+
|
| 222 |
+
return filesystem.find_files(wheel_dir, pattern)
|
.venv/Lib/site-packages/pip/_internal/commands/check.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from optparse import Values
|
| 3 |
+
from typing import List
|
| 4 |
+
|
| 5 |
+
from pip._internal.cli.base_command import Command
|
| 6 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 7 |
+
from pip._internal.operations.check import (
|
| 8 |
+
check_package_set,
|
| 9 |
+
create_package_set_from_installed,
|
| 10 |
+
warn_legacy_versions_and_specifiers,
|
| 11 |
+
)
|
| 12 |
+
from pip._internal.utils.misc import write_output
|
| 13 |
+
|
| 14 |
+
logger = logging.getLogger(__name__)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class CheckCommand(Command):
|
| 18 |
+
"""Verify installed packages have compatible dependencies."""
|
| 19 |
+
|
| 20 |
+
usage = """
|
| 21 |
+
%prog [options]"""
|
| 22 |
+
|
| 23 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 24 |
+
package_set, parsing_probs = create_package_set_from_installed()
|
| 25 |
+
warn_legacy_versions_and_specifiers(package_set)
|
| 26 |
+
missing, conflicting = check_package_set(package_set)
|
| 27 |
+
|
| 28 |
+
for project_name in missing:
|
| 29 |
+
version = package_set[project_name].version
|
| 30 |
+
for dependency in missing[project_name]:
|
| 31 |
+
write_output(
|
| 32 |
+
"%s %s requires %s, which is not installed.",
|
| 33 |
+
project_name,
|
| 34 |
+
version,
|
| 35 |
+
dependency[0],
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
for project_name in conflicting:
|
| 39 |
+
version = package_set[project_name].version
|
| 40 |
+
for dep_name, dep_version, req in conflicting[project_name]:
|
| 41 |
+
write_output(
|
| 42 |
+
"%s %s has requirement %s, but you have %s %s.",
|
| 43 |
+
project_name,
|
| 44 |
+
version,
|
| 45 |
+
req,
|
| 46 |
+
dep_name,
|
| 47 |
+
dep_version,
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
if missing or conflicting or parsing_probs:
|
| 51 |
+
return ERROR
|
| 52 |
+
else:
|
| 53 |
+
write_output("No broken requirements found.")
|
| 54 |
+
return SUCCESS
|
.venv/Lib/site-packages/pip/_internal/commands/completion.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import textwrap
|
| 3 |
+
from optparse import Values
|
| 4 |
+
from typing import List
|
| 5 |
+
|
| 6 |
+
from pip._internal.cli.base_command import Command
|
| 7 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 8 |
+
from pip._internal.utils.misc import get_prog
|
| 9 |
+
|
| 10 |
+
BASE_COMPLETION = """
|
| 11 |
+
# pip {shell} completion start{script}# pip {shell} completion end
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
COMPLETION_SCRIPTS = {
|
| 15 |
+
"bash": """
|
| 16 |
+
_pip_completion()
|
| 17 |
+
{{
|
| 18 |
+
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
|
| 19 |
+
COMP_CWORD=$COMP_CWORD \\
|
| 20 |
+
PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
|
| 21 |
+
}}
|
| 22 |
+
complete -o default -F _pip_completion {prog}
|
| 23 |
+
""",
|
| 24 |
+
"zsh": """
|
| 25 |
+
#compdef -P pip[0-9.]#
|
| 26 |
+
compadd $( COMP_WORDS="$words[*]" \\
|
| 27 |
+
COMP_CWORD=$((CURRENT-1)) \\
|
| 28 |
+
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
|
| 29 |
+
""",
|
| 30 |
+
"fish": """
|
| 31 |
+
function __fish_complete_pip
|
| 32 |
+
set -lx COMP_WORDS (commandline -o) ""
|
| 33 |
+
set -lx COMP_CWORD ( \\
|
| 34 |
+
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
|
| 35 |
+
)
|
| 36 |
+
set -lx PIP_AUTO_COMPLETE 1
|
| 37 |
+
string split \\ -- (eval $COMP_WORDS[1])
|
| 38 |
+
end
|
| 39 |
+
complete -fa "(__fish_complete_pip)" -c {prog}
|
| 40 |
+
""",
|
| 41 |
+
"powershell": """
|
| 42 |
+
if ((Test-Path Function:\\TabExpansion) -and -not `
|
| 43 |
+
(Test-Path Function:\\_pip_completeBackup)) {{
|
| 44 |
+
Rename-Item Function:\\TabExpansion _pip_completeBackup
|
| 45 |
+
}}
|
| 46 |
+
function TabExpansion($line, $lastWord) {{
|
| 47 |
+
$lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
|
| 48 |
+
if ($lastBlock.StartsWith("{prog} ")) {{
|
| 49 |
+
$Env:COMP_WORDS=$lastBlock
|
| 50 |
+
$Env:COMP_CWORD=$lastBlock.Split().Length - 1
|
| 51 |
+
$Env:PIP_AUTO_COMPLETE=1
|
| 52 |
+
(& {prog}).Split()
|
| 53 |
+
Remove-Item Env:COMP_WORDS
|
| 54 |
+
Remove-Item Env:COMP_CWORD
|
| 55 |
+
Remove-Item Env:PIP_AUTO_COMPLETE
|
| 56 |
+
}}
|
| 57 |
+
elseif (Test-Path Function:\\_pip_completeBackup) {{
|
| 58 |
+
# Fall back on existing tab expansion
|
| 59 |
+
_pip_completeBackup $line $lastWord
|
| 60 |
+
}}
|
| 61 |
+
}}
|
| 62 |
+
""",
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class CompletionCommand(Command):
|
| 67 |
+
"""A helper command to be used for command completion."""
|
| 68 |
+
|
| 69 |
+
ignore_require_venv = True
|
| 70 |
+
|
| 71 |
+
def add_options(self) -> None:
|
| 72 |
+
self.cmd_opts.add_option(
|
| 73 |
+
"--bash",
|
| 74 |
+
"-b",
|
| 75 |
+
action="store_const",
|
| 76 |
+
const="bash",
|
| 77 |
+
dest="shell",
|
| 78 |
+
help="Emit completion code for bash",
|
| 79 |
+
)
|
| 80 |
+
self.cmd_opts.add_option(
|
| 81 |
+
"--zsh",
|
| 82 |
+
"-z",
|
| 83 |
+
action="store_const",
|
| 84 |
+
const="zsh",
|
| 85 |
+
dest="shell",
|
| 86 |
+
help="Emit completion code for zsh",
|
| 87 |
+
)
|
| 88 |
+
self.cmd_opts.add_option(
|
| 89 |
+
"--fish",
|
| 90 |
+
"-f",
|
| 91 |
+
action="store_const",
|
| 92 |
+
const="fish",
|
| 93 |
+
dest="shell",
|
| 94 |
+
help="Emit completion code for fish",
|
| 95 |
+
)
|
| 96 |
+
self.cmd_opts.add_option(
|
| 97 |
+
"--powershell",
|
| 98 |
+
"-p",
|
| 99 |
+
action="store_const",
|
| 100 |
+
const="powershell",
|
| 101 |
+
dest="shell",
|
| 102 |
+
help="Emit completion code for powershell",
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 106 |
+
|
| 107 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 108 |
+
"""Prints the completion code of the given shell"""
|
| 109 |
+
shells = COMPLETION_SCRIPTS.keys()
|
| 110 |
+
shell_options = ["--" + shell for shell in sorted(shells)]
|
| 111 |
+
if options.shell in shells:
|
| 112 |
+
script = textwrap.dedent(
|
| 113 |
+
COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
|
| 114 |
+
)
|
| 115 |
+
print(BASE_COMPLETION.format(script=script, shell=options.shell))
|
| 116 |
+
return SUCCESS
|
| 117 |
+
else:
|
| 118 |
+
sys.stderr.write(
|
| 119 |
+
"ERROR: You must pass {}\n".format(" or ".join(shell_options))
|
| 120 |
+
)
|
| 121 |
+
return SUCCESS
|
.venv/Lib/site-packages/pip/_internal/commands/configuration.py
ADDED
|
@@ -0,0 +1,282 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
import subprocess
|
| 4 |
+
from optparse import Values
|
| 5 |
+
from typing import Any, List, Optional
|
| 6 |
+
|
| 7 |
+
from pip._internal.cli.base_command import Command
|
| 8 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 9 |
+
from pip._internal.configuration import (
|
| 10 |
+
Configuration,
|
| 11 |
+
Kind,
|
| 12 |
+
get_configuration_files,
|
| 13 |
+
kinds,
|
| 14 |
+
)
|
| 15 |
+
from pip._internal.exceptions import PipError
|
| 16 |
+
from pip._internal.utils.logging import indent_log
|
| 17 |
+
from pip._internal.utils.misc import get_prog, write_output
|
| 18 |
+
|
| 19 |
+
logger = logging.getLogger(__name__)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class ConfigurationCommand(Command):
|
| 23 |
+
"""
|
| 24 |
+
Manage local and global configuration.
|
| 25 |
+
|
| 26 |
+
Subcommands:
|
| 27 |
+
|
| 28 |
+
- list: List the active configuration (or from the file specified)
|
| 29 |
+
- edit: Edit the configuration file in an editor
|
| 30 |
+
- get: Get the value associated with command.option
|
| 31 |
+
- set: Set the command.option=value
|
| 32 |
+
- unset: Unset the value associated with command.option
|
| 33 |
+
- debug: List the configuration files and values defined under them
|
| 34 |
+
|
| 35 |
+
Configuration keys should be dot separated command and option name,
|
| 36 |
+
with the special prefix "global" affecting any command. For example,
|
| 37 |
+
"pip config set global.index-url https://example.org/" would configure
|
| 38 |
+
the index url for all commands, but "pip config set download.timeout 10"
|
| 39 |
+
would configure a 10 second timeout only for "pip download" commands.
|
| 40 |
+
|
| 41 |
+
If none of --user, --global and --site are passed, a virtual
|
| 42 |
+
environment configuration file is used if one is active and the file
|
| 43 |
+
exists. Otherwise, all modifications happen to the user file by
|
| 44 |
+
default.
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
ignore_require_venv = True
|
| 48 |
+
usage = """
|
| 49 |
+
%prog [<file-option>] list
|
| 50 |
+
%prog [<file-option>] [--editor <editor-path>] edit
|
| 51 |
+
|
| 52 |
+
%prog [<file-option>] get command.option
|
| 53 |
+
%prog [<file-option>] set command.option value
|
| 54 |
+
%prog [<file-option>] unset command.option
|
| 55 |
+
%prog [<file-option>] debug
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
def add_options(self) -> None:
|
| 59 |
+
self.cmd_opts.add_option(
|
| 60 |
+
"--editor",
|
| 61 |
+
dest="editor",
|
| 62 |
+
action="store",
|
| 63 |
+
default=None,
|
| 64 |
+
help=(
|
| 65 |
+
"Editor to use to edit the file. Uses VISUAL or EDITOR "
|
| 66 |
+
"environment variables if not provided."
|
| 67 |
+
),
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
self.cmd_opts.add_option(
|
| 71 |
+
"--global",
|
| 72 |
+
dest="global_file",
|
| 73 |
+
action="store_true",
|
| 74 |
+
default=False,
|
| 75 |
+
help="Use the system-wide configuration file only",
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
self.cmd_opts.add_option(
|
| 79 |
+
"--user",
|
| 80 |
+
dest="user_file",
|
| 81 |
+
action="store_true",
|
| 82 |
+
default=False,
|
| 83 |
+
help="Use the user configuration file only",
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
self.cmd_opts.add_option(
|
| 87 |
+
"--site",
|
| 88 |
+
dest="site_file",
|
| 89 |
+
action="store_true",
|
| 90 |
+
default=False,
|
| 91 |
+
help="Use the current environment configuration file only",
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 95 |
+
|
| 96 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 97 |
+
handlers = {
|
| 98 |
+
"list": self.list_values,
|
| 99 |
+
"edit": self.open_in_editor,
|
| 100 |
+
"get": self.get_name,
|
| 101 |
+
"set": self.set_name_value,
|
| 102 |
+
"unset": self.unset_name,
|
| 103 |
+
"debug": self.list_config_values,
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
# Determine action
|
| 107 |
+
if not args or args[0] not in handlers:
|
| 108 |
+
logger.error(
|
| 109 |
+
"Need an action (%s) to perform.",
|
| 110 |
+
", ".join(sorted(handlers)),
|
| 111 |
+
)
|
| 112 |
+
return ERROR
|
| 113 |
+
|
| 114 |
+
action = args[0]
|
| 115 |
+
|
| 116 |
+
# Determine which configuration files are to be loaded
|
| 117 |
+
# Depends on whether the command is modifying.
|
| 118 |
+
try:
|
| 119 |
+
load_only = self._determine_file(
|
| 120 |
+
options, need_value=(action in ["get", "set", "unset", "edit"])
|
| 121 |
+
)
|
| 122 |
+
except PipError as e:
|
| 123 |
+
logger.error(e.args[0])
|
| 124 |
+
return ERROR
|
| 125 |
+
|
| 126 |
+
# Load a new configuration
|
| 127 |
+
self.configuration = Configuration(
|
| 128 |
+
isolated=options.isolated_mode, load_only=load_only
|
| 129 |
+
)
|
| 130 |
+
self.configuration.load()
|
| 131 |
+
|
| 132 |
+
# Error handling happens here, not in the action-handlers.
|
| 133 |
+
try:
|
| 134 |
+
handlers[action](options, args[1:])
|
| 135 |
+
except PipError as e:
|
| 136 |
+
logger.error(e.args[0])
|
| 137 |
+
return ERROR
|
| 138 |
+
|
| 139 |
+
return SUCCESS
|
| 140 |
+
|
| 141 |
+
def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
|
| 142 |
+
file_options = [
|
| 143 |
+
key
|
| 144 |
+
for key, value in (
|
| 145 |
+
(kinds.USER, options.user_file),
|
| 146 |
+
(kinds.GLOBAL, options.global_file),
|
| 147 |
+
(kinds.SITE, options.site_file),
|
| 148 |
+
)
|
| 149 |
+
if value
|
| 150 |
+
]
|
| 151 |
+
|
| 152 |
+
if not file_options:
|
| 153 |
+
if not need_value:
|
| 154 |
+
return None
|
| 155 |
+
# Default to user, unless there's a site file.
|
| 156 |
+
elif any(
|
| 157 |
+
os.path.exists(site_config_file)
|
| 158 |
+
for site_config_file in get_configuration_files()[kinds.SITE]
|
| 159 |
+
):
|
| 160 |
+
return kinds.SITE
|
| 161 |
+
else:
|
| 162 |
+
return kinds.USER
|
| 163 |
+
elif len(file_options) == 1:
|
| 164 |
+
return file_options[0]
|
| 165 |
+
|
| 166 |
+
raise PipError(
|
| 167 |
+
"Need exactly one file to operate upon "
|
| 168 |
+
"(--user, --site, --global) to perform."
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
def list_values(self, options: Values, args: List[str]) -> None:
|
| 172 |
+
self._get_n_args(args, "list", n=0)
|
| 173 |
+
|
| 174 |
+
for key, value in sorted(self.configuration.items()):
|
| 175 |
+
write_output("%s=%r", key, value)
|
| 176 |
+
|
| 177 |
+
def get_name(self, options: Values, args: List[str]) -> None:
|
| 178 |
+
key = self._get_n_args(args, "get [name]", n=1)
|
| 179 |
+
value = self.configuration.get_value(key)
|
| 180 |
+
|
| 181 |
+
write_output("%s", value)
|
| 182 |
+
|
| 183 |
+
def set_name_value(self, options: Values, args: List[str]) -> None:
|
| 184 |
+
key, value = self._get_n_args(args, "set [name] [value]", n=2)
|
| 185 |
+
self.configuration.set_value(key, value)
|
| 186 |
+
|
| 187 |
+
self._save_configuration()
|
| 188 |
+
|
| 189 |
+
def unset_name(self, options: Values, args: List[str]) -> None:
|
| 190 |
+
key = self._get_n_args(args, "unset [name]", n=1)
|
| 191 |
+
self.configuration.unset_value(key)
|
| 192 |
+
|
| 193 |
+
self._save_configuration()
|
| 194 |
+
|
| 195 |
+
def list_config_values(self, options: Values, args: List[str]) -> None:
|
| 196 |
+
"""List config key-value pairs across different config files"""
|
| 197 |
+
self._get_n_args(args, "debug", n=0)
|
| 198 |
+
|
| 199 |
+
self.print_env_var_values()
|
| 200 |
+
# Iterate over config files and print if they exist, and the
|
| 201 |
+
# key-value pairs present in them if they do
|
| 202 |
+
for variant, files in sorted(self.configuration.iter_config_files()):
|
| 203 |
+
write_output("%s:", variant)
|
| 204 |
+
for fname in files:
|
| 205 |
+
with indent_log():
|
| 206 |
+
file_exists = os.path.exists(fname)
|
| 207 |
+
write_output("%s, exists: %r", fname, file_exists)
|
| 208 |
+
if file_exists:
|
| 209 |
+
self.print_config_file_values(variant)
|
| 210 |
+
|
| 211 |
+
def print_config_file_values(self, variant: Kind) -> None:
|
| 212 |
+
"""Get key-value pairs from the file of a variant"""
|
| 213 |
+
for name, value in self.configuration.get_values_in_config(variant).items():
|
| 214 |
+
with indent_log():
|
| 215 |
+
write_output("%s: %s", name, value)
|
| 216 |
+
|
| 217 |
+
def print_env_var_values(self) -> None:
|
| 218 |
+
"""Get key-values pairs present as environment variables"""
|
| 219 |
+
write_output("%s:", "env_var")
|
| 220 |
+
with indent_log():
|
| 221 |
+
for key, value in sorted(self.configuration.get_environ_vars()):
|
| 222 |
+
env_var = f"PIP_{key.upper()}"
|
| 223 |
+
write_output("%s=%r", env_var, value)
|
| 224 |
+
|
| 225 |
+
def open_in_editor(self, options: Values, args: List[str]) -> None:
|
| 226 |
+
editor = self._determine_editor(options)
|
| 227 |
+
|
| 228 |
+
fname = self.configuration.get_file_to_edit()
|
| 229 |
+
if fname is None:
|
| 230 |
+
raise PipError("Could not determine appropriate file.")
|
| 231 |
+
elif '"' in fname:
|
| 232 |
+
# This shouldn't happen, unless we see a username like that.
|
| 233 |
+
# If that happens, we'd appreciate a pull request fixing this.
|
| 234 |
+
raise PipError(
|
| 235 |
+
f'Can not open an editor for a file name containing "\n{fname}'
|
| 236 |
+
)
|
| 237 |
+
|
| 238 |
+
try:
|
| 239 |
+
subprocess.check_call(f'{editor} "{fname}"', shell=True)
|
| 240 |
+
except FileNotFoundError as e:
|
| 241 |
+
if not e.filename:
|
| 242 |
+
e.filename = editor
|
| 243 |
+
raise
|
| 244 |
+
except subprocess.CalledProcessError as e:
|
| 245 |
+
raise PipError(
|
| 246 |
+
"Editor Subprocess exited with exit code {}".format(e.returncode)
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
|
| 250 |
+
"""Helper to make sure the command got the right number of arguments"""
|
| 251 |
+
if len(args) != n:
|
| 252 |
+
msg = (
|
| 253 |
+
"Got unexpected number of arguments, expected {}. "
|
| 254 |
+
'(example: "{} config {}")'
|
| 255 |
+
).format(n, get_prog(), example)
|
| 256 |
+
raise PipError(msg)
|
| 257 |
+
|
| 258 |
+
if n == 1:
|
| 259 |
+
return args[0]
|
| 260 |
+
else:
|
| 261 |
+
return args
|
| 262 |
+
|
| 263 |
+
def _save_configuration(self) -> None:
|
| 264 |
+
# We successfully ran a modifying command. Need to save the
|
| 265 |
+
# configuration.
|
| 266 |
+
try:
|
| 267 |
+
self.configuration.save()
|
| 268 |
+
except Exception:
|
| 269 |
+
logger.exception(
|
| 270 |
+
"Unable to save configuration. Please report this as a bug."
|
| 271 |
+
)
|
| 272 |
+
raise PipError("Internal Error.")
|
| 273 |
+
|
| 274 |
+
def _determine_editor(self, options: Values) -> str:
|
| 275 |
+
if options.editor is not None:
|
| 276 |
+
return options.editor
|
| 277 |
+
elif "VISUAL" in os.environ:
|
| 278 |
+
return os.environ["VISUAL"]
|
| 279 |
+
elif "EDITOR" in os.environ:
|
| 280 |
+
return os.environ["EDITOR"]
|
| 281 |
+
else:
|
| 282 |
+
raise PipError("Could not determine editor to use.")
|
.venv/Lib/site-packages/pip/_internal/commands/debug.py
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib.resources
|
| 2 |
+
import locale
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
from optparse import Values
|
| 7 |
+
from types import ModuleType
|
| 8 |
+
from typing import Any, Dict, List, Optional
|
| 9 |
+
|
| 10 |
+
import pip._vendor
|
| 11 |
+
from pip._vendor.certifi import where
|
| 12 |
+
from pip._vendor.packaging.version import parse as parse_version
|
| 13 |
+
|
| 14 |
+
from pip._internal.cli import cmdoptions
|
| 15 |
+
from pip._internal.cli.base_command import Command
|
| 16 |
+
from pip._internal.cli.cmdoptions import make_target_python
|
| 17 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 18 |
+
from pip._internal.configuration import Configuration
|
| 19 |
+
from pip._internal.metadata import get_environment
|
| 20 |
+
from pip._internal.utils.logging import indent_log
|
| 21 |
+
from pip._internal.utils.misc import get_pip_version
|
| 22 |
+
|
| 23 |
+
logger = logging.getLogger(__name__)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def show_value(name: str, value: Any) -> None:
|
| 27 |
+
logger.info("%s: %s", name, value)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def show_sys_implementation() -> None:
|
| 31 |
+
logger.info("sys.implementation:")
|
| 32 |
+
implementation_name = sys.implementation.name
|
| 33 |
+
with indent_log():
|
| 34 |
+
show_value("name", implementation_name)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def create_vendor_txt_map() -> Dict[str, str]:
|
| 38 |
+
with importlib.resources.open_text("pip._vendor", "vendor.txt") as f:
|
| 39 |
+
# Purge non version specifying lines.
|
| 40 |
+
# Also, remove any space prefix or suffixes (including comments).
|
| 41 |
+
lines = [
|
| 42 |
+
line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
|
| 43 |
+
]
|
| 44 |
+
|
| 45 |
+
# Transform into "module" -> version dict.
|
| 46 |
+
return dict(line.split("==", 1) for line in lines)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def get_module_from_module_name(module_name: str) -> ModuleType:
|
| 50 |
+
# Module name can be uppercase in vendor.txt for some reason...
|
| 51 |
+
module_name = module_name.lower().replace("-", "_")
|
| 52 |
+
# PATCH: setuptools is actually only pkg_resources.
|
| 53 |
+
if module_name == "setuptools":
|
| 54 |
+
module_name = "pkg_resources"
|
| 55 |
+
|
| 56 |
+
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
|
| 57 |
+
return getattr(pip._vendor, module_name)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
|
| 61 |
+
module = get_module_from_module_name(module_name)
|
| 62 |
+
version = getattr(module, "__version__", None)
|
| 63 |
+
|
| 64 |
+
if not version:
|
| 65 |
+
# Try to find version in debundled module info.
|
| 66 |
+
assert module.__file__ is not None
|
| 67 |
+
env = get_environment([os.path.dirname(module.__file__)])
|
| 68 |
+
dist = env.get_distribution(module_name)
|
| 69 |
+
if dist:
|
| 70 |
+
version = str(dist.version)
|
| 71 |
+
|
| 72 |
+
return version
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
|
| 76 |
+
"""Log the actual version and print extra info if there is
|
| 77 |
+
a conflict or if the actual version could not be imported.
|
| 78 |
+
"""
|
| 79 |
+
for module_name, expected_version in vendor_txt_versions.items():
|
| 80 |
+
extra_message = ""
|
| 81 |
+
actual_version = get_vendor_version_from_module(module_name)
|
| 82 |
+
if not actual_version:
|
| 83 |
+
extra_message = (
|
| 84 |
+
" (Unable to locate actual module version, using"
|
| 85 |
+
" vendor.txt specified version)"
|
| 86 |
+
)
|
| 87 |
+
actual_version = expected_version
|
| 88 |
+
elif parse_version(actual_version) != parse_version(expected_version):
|
| 89 |
+
extra_message = (
|
| 90 |
+
" (CONFLICT: vendor.txt suggests version should"
|
| 91 |
+
" be {})".format(expected_version)
|
| 92 |
+
)
|
| 93 |
+
logger.info("%s==%s%s", module_name, actual_version, extra_message)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def show_vendor_versions() -> None:
|
| 97 |
+
logger.info("vendored library versions:")
|
| 98 |
+
|
| 99 |
+
vendor_txt_versions = create_vendor_txt_map()
|
| 100 |
+
with indent_log():
|
| 101 |
+
show_actual_vendor_versions(vendor_txt_versions)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def show_tags(options: Values) -> None:
|
| 105 |
+
tag_limit = 10
|
| 106 |
+
|
| 107 |
+
target_python = make_target_python(options)
|
| 108 |
+
tags = target_python.get_tags()
|
| 109 |
+
|
| 110 |
+
# Display the target options that were explicitly provided.
|
| 111 |
+
formatted_target = target_python.format_given()
|
| 112 |
+
suffix = ""
|
| 113 |
+
if formatted_target:
|
| 114 |
+
suffix = f" (target: {formatted_target})"
|
| 115 |
+
|
| 116 |
+
msg = "Compatible tags: {}{}".format(len(tags), suffix)
|
| 117 |
+
logger.info(msg)
|
| 118 |
+
|
| 119 |
+
if options.verbose < 1 and len(tags) > tag_limit:
|
| 120 |
+
tags_limited = True
|
| 121 |
+
tags = tags[:tag_limit]
|
| 122 |
+
else:
|
| 123 |
+
tags_limited = False
|
| 124 |
+
|
| 125 |
+
with indent_log():
|
| 126 |
+
for tag in tags:
|
| 127 |
+
logger.info(str(tag))
|
| 128 |
+
|
| 129 |
+
if tags_limited:
|
| 130 |
+
msg = (
|
| 131 |
+
"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
|
| 132 |
+
).format(tag_limit=tag_limit)
|
| 133 |
+
logger.info(msg)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def ca_bundle_info(config: Configuration) -> str:
|
| 137 |
+
levels = set()
|
| 138 |
+
for key, _ in config.items():
|
| 139 |
+
levels.add(key.split(".")[0])
|
| 140 |
+
|
| 141 |
+
if not levels:
|
| 142 |
+
return "Not specified"
|
| 143 |
+
|
| 144 |
+
levels_that_override_global = ["install", "wheel", "download"]
|
| 145 |
+
global_overriding_level = [
|
| 146 |
+
level for level in levels if level in levels_that_override_global
|
| 147 |
+
]
|
| 148 |
+
if not global_overriding_level:
|
| 149 |
+
return "global"
|
| 150 |
+
|
| 151 |
+
if "global" in levels:
|
| 152 |
+
levels.remove("global")
|
| 153 |
+
return ", ".join(levels)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
class DebugCommand(Command):
|
| 157 |
+
"""
|
| 158 |
+
Display debug information.
|
| 159 |
+
"""
|
| 160 |
+
|
| 161 |
+
usage = """
|
| 162 |
+
%prog <options>"""
|
| 163 |
+
ignore_require_venv = True
|
| 164 |
+
|
| 165 |
+
def add_options(self) -> None:
|
| 166 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
| 167 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 168 |
+
self.parser.config.load()
|
| 169 |
+
|
| 170 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 171 |
+
logger.warning(
|
| 172 |
+
"This command is only meant for debugging. "
|
| 173 |
+
"Do not use this with automation for parsing and getting these "
|
| 174 |
+
"details, since the output and options of this command may "
|
| 175 |
+
"change without notice."
|
| 176 |
+
)
|
| 177 |
+
show_value("pip version", get_pip_version())
|
| 178 |
+
show_value("sys.version", sys.version)
|
| 179 |
+
show_value("sys.executable", sys.executable)
|
| 180 |
+
show_value("sys.getdefaultencoding", sys.getdefaultencoding())
|
| 181 |
+
show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
|
| 182 |
+
show_value(
|
| 183 |
+
"locale.getpreferredencoding",
|
| 184 |
+
locale.getpreferredencoding(),
|
| 185 |
+
)
|
| 186 |
+
show_value("sys.platform", sys.platform)
|
| 187 |
+
show_sys_implementation()
|
| 188 |
+
|
| 189 |
+
show_value("'cert' config value", ca_bundle_info(self.parser.config))
|
| 190 |
+
show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
|
| 191 |
+
show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
|
| 192 |
+
show_value("pip._vendor.certifi.where()", where())
|
| 193 |
+
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
| 194 |
+
|
| 195 |
+
show_vendor_versions()
|
| 196 |
+
|
| 197 |
+
show_tags(options)
|
| 198 |
+
|
| 199 |
+
return SUCCESS
|
.venv/Lib/site-packages/pip/_internal/commands/download.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
from optparse import Values
|
| 4 |
+
from typing import List
|
| 5 |
+
|
| 6 |
+
from pip._internal.cli import cmdoptions
|
| 7 |
+
from pip._internal.cli.cmdoptions import make_target_python
|
| 8 |
+
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
| 9 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 10 |
+
from pip._internal.operations.build.build_tracker import get_build_tracker
|
| 11 |
+
from pip._internal.req.req_install import check_legacy_setup_py_options
|
| 12 |
+
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
|
| 13 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class DownloadCommand(RequirementCommand):
|
| 19 |
+
"""
|
| 20 |
+
Download packages from:
|
| 21 |
+
|
| 22 |
+
- PyPI (and other indexes) using requirement specifiers.
|
| 23 |
+
- VCS project urls.
|
| 24 |
+
- Local project directories.
|
| 25 |
+
- Local or remote source archives.
|
| 26 |
+
|
| 27 |
+
pip also supports downloading from "requirements files", which provide
|
| 28 |
+
an easy way to specify a whole environment to be downloaded.
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
usage = """
|
| 32 |
+
%prog [options] <requirement specifier> [package-index-options] ...
|
| 33 |
+
%prog [options] -r <requirements file> [package-index-options] ...
|
| 34 |
+
%prog [options] <vcs project url> ...
|
| 35 |
+
%prog [options] <local project path> ...
|
| 36 |
+
%prog [options] <archive url/path> ..."""
|
| 37 |
+
|
| 38 |
+
def add_options(self) -> None:
|
| 39 |
+
self.cmd_opts.add_option(cmdoptions.constraints())
|
| 40 |
+
self.cmd_opts.add_option(cmdoptions.requirements())
|
| 41 |
+
self.cmd_opts.add_option(cmdoptions.no_deps())
|
| 42 |
+
self.cmd_opts.add_option(cmdoptions.global_options())
|
| 43 |
+
self.cmd_opts.add_option(cmdoptions.no_binary())
|
| 44 |
+
self.cmd_opts.add_option(cmdoptions.only_binary())
|
| 45 |
+
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
| 46 |
+
self.cmd_opts.add_option(cmdoptions.src())
|
| 47 |
+
self.cmd_opts.add_option(cmdoptions.pre())
|
| 48 |
+
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
| 49 |
+
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
| 50 |
+
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
| 51 |
+
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
| 52 |
+
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
| 53 |
+
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
| 54 |
+
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
| 55 |
+
|
| 56 |
+
self.cmd_opts.add_option(
|
| 57 |
+
"-d",
|
| 58 |
+
"--dest",
|
| 59 |
+
"--destination-dir",
|
| 60 |
+
"--destination-directory",
|
| 61 |
+
dest="download_dir",
|
| 62 |
+
metavar="dir",
|
| 63 |
+
default=os.curdir,
|
| 64 |
+
help="Download packages into <dir>.",
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
| 68 |
+
|
| 69 |
+
index_opts = cmdoptions.make_option_group(
|
| 70 |
+
cmdoptions.index_group,
|
| 71 |
+
self.parser,
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
self.parser.insert_option_group(0, index_opts)
|
| 75 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 76 |
+
|
| 77 |
+
@with_cleanup
|
| 78 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 79 |
+
options.ignore_installed = True
|
| 80 |
+
# editable doesn't really make sense for `pip download`, but the bowels
|
| 81 |
+
# of the RequirementSet code require that property.
|
| 82 |
+
options.editables = []
|
| 83 |
+
|
| 84 |
+
cmdoptions.check_dist_restriction(options)
|
| 85 |
+
|
| 86 |
+
options.download_dir = normalize_path(options.download_dir)
|
| 87 |
+
ensure_dir(options.download_dir)
|
| 88 |
+
|
| 89 |
+
session = self.get_default_session(options)
|
| 90 |
+
|
| 91 |
+
target_python = make_target_python(options)
|
| 92 |
+
finder = self._build_package_finder(
|
| 93 |
+
options=options,
|
| 94 |
+
session=session,
|
| 95 |
+
target_python=target_python,
|
| 96 |
+
ignore_requires_python=options.ignore_requires_python,
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
build_tracker = self.enter_context(get_build_tracker())
|
| 100 |
+
|
| 101 |
+
directory = TempDirectory(
|
| 102 |
+
delete=not options.no_clean,
|
| 103 |
+
kind="download",
|
| 104 |
+
globally_managed=True,
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
reqs = self.get_requirements(args, options, finder, session)
|
| 108 |
+
check_legacy_setup_py_options(options, reqs)
|
| 109 |
+
|
| 110 |
+
preparer = self.make_requirement_preparer(
|
| 111 |
+
temp_build_dir=directory,
|
| 112 |
+
options=options,
|
| 113 |
+
build_tracker=build_tracker,
|
| 114 |
+
session=session,
|
| 115 |
+
finder=finder,
|
| 116 |
+
download_dir=options.download_dir,
|
| 117 |
+
use_user_site=False,
|
| 118 |
+
verbosity=self.verbosity,
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
resolver = self.make_resolver(
|
| 122 |
+
preparer=preparer,
|
| 123 |
+
finder=finder,
|
| 124 |
+
options=options,
|
| 125 |
+
ignore_requires_python=options.ignore_requires_python,
|
| 126 |
+
use_pep517=options.use_pep517,
|
| 127 |
+
py_version_info=options.python_version,
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
self.trace_basic_info(finder)
|
| 131 |
+
|
| 132 |
+
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
| 133 |
+
|
| 134 |
+
downloaded: List[str] = []
|
| 135 |
+
for req in requirement_set.requirements.values():
|
| 136 |
+
if req.satisfied_by is None:
|
| 137 |
+
assert req.name is not None
|
| 138 |
+
preparer.save_linked_requirement(req)
|
| 139 |
+
downloaded.append(req.name)
|
| 140 |
+
|
| 141 |
+
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
|
| 142 |
+
requirement_set.warn_legacy_versions_and_specifiers()
|
| 143 |
+
|
| 144 |
+
if downloaded:
|
| 145 |
+
write_output("Successfully downloaded %s", " ".join(downloaded))
|
| 146 |
+
|
| 147 |
+
return SUCCESS
|
.venv/Lib/site-packages/pip/_internal/commands/freeze.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from optparse import Values
|
| 3 |
+
from typing import AbstractSet, List
|
| 4 |
+
|
| 5 |
+
from pip._internal.cli import cmdoptions
|
| 6 |
+
from pip._internal.cli.base_command import Command
|
| 7 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 8 |
+
from pip._internal.operations.freeze import freeze
|
| 9 |
+
from pip._internal.utils.compat import stdlib_pkgs
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def _should_suppress_build_backends() -> bool:
|
| 13 |
+
return sys.version_info < (3, 12)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def _dev_pkgs() -> AbstractSet[str]:
|
| 17 |
+
pkgs = {"pip"}
|
| 18 |
+
|
| 19 |
+
if _should_suppress_build_backends():
|
| 20 |
+
pkgs |= {"setuptools", "distribute", "wheel"}
|
| 21 |
+
|
| 22 |
+
return pkgs
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class FreezeCommand(Command):
|
| 26 |
+
"""
|
| 27 |
+
Output installed packages in requirements format.
|
| 28 |
+
|
| 29 |
+
packages are listed in a case-insensitive sorted order.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
usage = """
|
| 33 |
+
%prog [options]"""
|
| 34 |
+
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
| 35 |
+
|
| 36 |
+
def add_options(self) -> None:
|
| 37 |
+
self.cmd_opts.add_option(
|
| 38 |
+
"-r",
|
| 39 |
+
"--requirement",
|
| 40 |
+
dest="requirements",
|
| 41 |
+
action="append",
|
| 42 |
+
default=[],
|
| 43 |
+
metavar="file",
|
| 44 |
+
help=(
|
| 45 |
+
"Use the order in the given requirements file and its "
|
| 46 |
+
"comments when generating output. This option can be "
|
| 47 |
+
"used multiple times."
|
| 48 |
+
),
|
| 49 |
+
)
|
| 50 |
+
self.cmd_opts.add_option(
|
| 51 |
+
"-l",
|
| 52 |
+
"--local",
|
| 53 |
+
dest="local",
|
| 54 |
+
action="store_true",
|
| 55 |
+
default=False,
|
| 56 |
+
help=(
|
| 57 |
+
"If in a virtualenv that has global access, do not output "
|
| 58 |
+
"globally-installed packages."
|
| 59 |
+
),
|
| 60 |
+
)
|
| 61 |
+
self.cmd_opts.add_option(
|
| 62 |
+
"--user",
|
| 63 |
+
dest="user",
|
| 64 |
+
action="store_true",
|
| 65 |
+
default=False,
|
| 66 |
+
help="Only output packages installed in user-site.",
|
| 67 |
+
)
|
| 68 |
+
self.cmd_opts.add_option(cmdoptions.list_path())
|
| 69 |
+
self.cmd_opts.add_option(
|
| 70 |
+
"--all",
|
| 71 |
+
dest="freeze_all",
|
| 72 |
+
action="store_true",
|
| 73 |
+
help=(
|
| 74 |
+
"Do not skip these packages in the output:"
|
| 75 |
+
" {}".format(", ".join(_dev_pkgs()))
|
| 76 |
+
),
|
| 77 |
+
)
|
| 78 |
+
self.cmd_opts.add_option(
|
| 79 |
+
"--exclude-editable",
|
| 80 |
+
dest="exclude_editable",
|
| 81 |
+
action="store_true",
|
| 82 |
+
help="Exclude editable package from output.",
|
| 83 |
+
)
|
| 84 |
+
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
| 85 |
+
|
| 86 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 87 |
+
|
| 88 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 89 |
+
skip = set(stdlib_pkgs)
|
| 90 |
+
if not options.freeze_all:
|
| 91 |
+
skip.update(_dev_pkgs())
|
| 92 |
+
|
| 93 |
+
if options.excludes:
|
| 94 |
+
skip.update(options.excludes)
|
| 95 |
+
|
| 96 |
+
cmdoptions.check_list_path_option(options)
|
| 97 |
+
|
| 98 |
+
for line in freeze(
|
| 99 |
+
requirement=options.requirements,
|
| 100 |
+
local_only=options.local,
|
| 101 |
+
user_only=options.user,
|
| 102 |
+
paths=options.path,
|
| 103 |
+
isolated=options.isolated_mode,
|
| 104 |
+
skip=skip,
|
| 105 |
+
exclude_editable=options.exclude_editable,
|
| 106 |
+
):
|
| 107 |
+
sys.stdout.write(line + "\n")
|
| 108 |
+
return SUCCESS
|
.venv/Lib/site-packages/pip/_internal/commands/hash.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import hashlib
|
| 2 |
+
import logging
|
| 3 |
+
import sys
|
| 4 |
+
from optparse import Values
|
| 5 |
+
from typing import List
|
| 6 |
+
|
| 7 |
+
from pip._internal.cli.base_command import Command
|
| 8 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 9 |
+
from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
|
| 10 |
+
from pip._internal.utils.misc import read_chunks, write_output
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class HashCommand(Command):
|
| 16 |
+
"""
|
| 17 |
+
Compute a hash of a local package archive.
|
| 18 |
+
|
| 19 |
+
These can be used with --hash in a requirements file to do repeatable
|
| 20 |
+
installs.
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
usage = "%prog [options] <file> ..."
|
| 24 |
+
ignore_require_venv = True
|
| 25 |
+
|
| 26 |
+
def add_options(self) -> None:
|
| 27 |
+
self.cmd_opts.add_option(
|
| 28 |
+
"-a",
|
| 29 |
+
"--algorithm",
|
| 30 |
+
dest="algorithm",
|
| 31 |
+
choices=STRONG_HASHES,
|
| 32 |
+
action="store",
|
| 33 |
+
default=FAVORITE_HASH,
|
| 34 |
+
help="The hash algorithm to use: one of {}".format(
|
| 35 |
+
", ".join(STRONG_HASHES)
|
| 36 |
+
),
|
| 37 |
+
)
|
| 38 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 39 |
+
|
| 40 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 41 |
+
if not args:
|
| 42 |
+
self.parser.print_usage(sys.stderr)
|
| 43 |
+
return ERROR
|
| 44 |
+
|
| 45 |
+
algorithm = options.algorithm
|
| 46 |
+
for path in args:
|
| 47 |
+
write_output(
|
| 48 |
+
"%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
|
| 49 |
+
)
|
| 50 |
+
return SUCCESS
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def _hash_of_file(path: str, algorithm: str) -> str:
|
| 54 |
+
"""Return the hash digest of a file."""
|
| 55 |
+
with open(path, "rb") as archive:
|
| 56 |
+
hash = hashlib.new(algorithm)
|
| 57 |
+
for chunk in read_chunks(archive):
|
| 58 |
+
hash.update(chunk)
|
| 59 |
+
return hash.hexdigest()
|
.venv/Lib/site-packages/pip/_internal/commands/help.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from optparse import Values
|
| 2 |
+
from typing import List
|
| 3 |
+
|
| 4 |
+
from pip._internal.cli.base_command import Command
|
| 5 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 6 |
+
from pip._internal.exceptions import CommandError
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class HelpCommand(Command):
|
| 10 |
+
"""Show help for commands"""
|
| 11 |
+
|
| 12 |
+
usage = """
|
| 13 |
+
%prog <command>"""
|
| 14 |
+
ignore_require_venv = True
|
| 15 |
+
|
| 16 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 17 |
+
from pip._internal.commands import (
|
| 18 |
+
commands_dict,
|
| 19 |
+
create_command,
|
| 20 |
+
get_similar_commands,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
| 25 |
+
cmd_name = args[0] # the command we need help for
|
| 26 |
+
except IndexError:
|
| 27 |
+
return SUCCESS
|
| 28 |
+
|
| 29 |
+
if cmd_name not in commands_dict:
|
| 30 |
+
guess = get_similar_commands(cmd_name)
|
| 31 |
+
|
| 32 |
+
msg = [f'unknown command "{cmd_name}"']
|
| 33 |
+
if guess:
|
| 34 |
+
msg.append(f'maybe you meant "{guess}"')
|
| 35 |
+
|
| 36 |
+
raise CommandError(" - ".join(msg))
|
| 37 |
+
|
| 38 |
+
command = create_command(cmd_name)
|
| 39 |
+
command.parser.print_help()
|
| 40 |
+
|
| 41 |
+
return SUCCESS
|
.venv/Lib/site-packages/pip/_internal/commands/index.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from optparse import Values
|
| 3 |
+
from typing import Any, Iterable, List, Optional, Union
|
| 4 |
+
|
| 5 |
+
from pip._vendor.packaging.version import LegacyVersion, Version
|
| 6 |
+
|
| 7 |
+
from pip._internal.cli import cmdoptions
|
| 8 |
+
from pip._internal.cli.req_command import IndexGroupCommand
|
| 9 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 10 |
+
from pip._internal.commands.search import print_dist_installation_info
|
| 11 |
+
from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
|
| 12 |
+
from pip._internal.index.collector import LinkCollector
|
| 13 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 14 |
+
from pip._internal.models.selection_prefs import SelectionPreferences
|
| 15 |
+
from pip._internal.models.target_python import TargetPython
|
| 16 |
+
from pip._internal.network.session import PipSession
|
| 17 |
+
from pip._internal.utils.misc import write_output
|
| 18 |
+
|
| 19 |
+
logger = logging.getLogger(__name__)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class IndexCommand(IndexGroupCommand):
|
| 23 |
+
"""
|
| 24 |
+
Inspect information available from package indexes.
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
ignore_require_venv = True
|
| 28 |
+
usage = """
|
| 29 |
+
%prog versions <package>
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
def add_options(self) -> None:
|
| 33 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
| 34 |
+
|
| 35 |
+
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
| 36 |
+
self.cmd_opts.add_option(cmdoptions.pre())
|
| 37 |
+
self.cmd_opts.add_option(cmdoptions.no_binary())
|
| 38 |
+
self.cmd_opts.add_option(cmdoptions.only_binary())
|
| 39 |
+
|
| 40 |
+
index_opts = cmdoptions.make_option_group(
|
| 41 |
+
cmdoptions.index_group,
|
| 42 |
+
self.parser,
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
self.parser.insert_option_group(0, index_opts)
|
| 46 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 47 |
+
|
| 48 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 49 |
+
handlers = {
|
| 50 |
+
"versions": self.get_available_package_versions,
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
logger.warning(
|
| 54 |
+
"pip index is currently an experimental command. "
|
| 55 |
+
"It may be removed/changed in a future release "
|
| 56 |
+
"without prior warning."
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
# Determine action
|
| 60 |
+
if not args or args[0] not in handlers:
|
| 61 |
+
logger.error(
|
| 62 |
+
"Need an action (%s) to perform.",
|
| 63 |
+
", ".join(sorted(handlers)),
|
| 64 |
+
)
|
| 65 |
+
return ERROR
|
| 66 |
+
|
| 67 |
+
action = args[0]
|
| 68 |
+
|
| 69 |
+
# Error handling happens here, not in the action-handlers.
|
| 70 |
+
try:
|
| 71 |
+
handlers[action](options, args[1:])
|
| 72 |
+
except PipError as e:
|
| 73 |
+
logger.error(e.args[0])
|
| 74 |
+
return ERROR
|
| 75 |
+
|
| 76 |
+
return SUCCESS
|
| 77 |
+
|
| 78 |
+
def _build_package_finder(
|
| 79 |
+
self,
|
| 80 |
+
options: Values,
|
| 81 |
+
session: PipSession,
|
| 82 |
+
target_python: Optional[TargetPython] = None,
|
| 83 |
+
ignore_requires_python: Optional[bool] = None,
|
| 84 |
+
) -> PackageFinder:
|
| 85 |
+
"""
|
| 86 |
+
Create a package finder appropriate to the index command.
|
| 87 |
+
"""
|
| 88 |
+
link_collector = LinkCollector.create(session, options=options)
|
| 89 |
+
|
| 90 |
+
# Pass allow_yanked=False to ignore yanked versions.
|
| 91 |
+
selection_prefs = SelectionPreferences(
|
| 92 |
+
allow_yanked=False,
|
| 93 |
+
allow_all_prereleases=options.pre,
|
| 94 |
+
ignore_requires_python=ignore_requires_python,
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
return PackageFinder.create(
|
| 98 |
+
link_collector=link_collector,
|
| 99 |
+
selection_prefs=selection_prefs,
|
| 100 |
+
target_python=target_python,
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
|
| 104 |
+
if len(args) != 1:
|
| 105 |
+
raise CommandError("You need to specify exactly one argument")
|
| 106 |
+
|
| 107 |
+
target_python = cmdoptions.make_target_python(options)
|
| 108 |
+
query = args[0]
|
| 109 |
+
|
| 110 |
+
with self._build_session(options) as session:
|
| 111 |
+
finder = self._build_package_finder(
|
| 112 |
+
options=options,
|
| 113 |
+
session=session,
|
| 114 |
+
target_python=target_python,
|
| 115 |
+
ignore_requires_python=options.ignore_requires_python,
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
versions: Iterable[Union[LegacyVersion, Version]] = (
|
| 119 |
+
candidate.version for candidate in finder.find_all_candidates(query)
|
| 120 |
+
)
|
| 121 |
+
|
| 122 |
+
if not options.pre:
|
| 123 |
+
# Remove prereleases
|
| 124 |
+
versions = (
|
| 125 |
+
version for version in versions if not version.is_prerelease
|
| 126 |
+
)
|
| 127 |
+
versions = set(versions)
|
| 128 |
+
|
| 129 |
+
if not versions:
|
| 130 |
+
raise DistributionNotFound(
|
| 131 |
+
"No matching distribution found for {}".format(query)
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
|
| 135 |
+
latest = formatted_versions[0]
|
| 136 |
+
|
| 137 |
+
write_output("{} ({})".format(query, latest))
|
| 138 |
+
write_output("Available versions: {}".format(", ".join(formatted_versions)))
|
| 139 |
+
print_dist_installation_info(query, latest)
|
.venv/Lib/site-packages/pip/_internal/commands/inspect.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from optparse import Values
|
| 3 |
+
from typing import Any, Dict, List
|
| 4 |
+
|
| 5 |
+
from pip._vendor.packaging.markers import default_environment
|
| 6 |
+
from pip._vendor.rich import print_json
|
| 7 |
+
|
| 8 |
+
from pip import __version__
|
| 9 |
+
from pip._internal.cli import cmdoptions
|
| 10 |
+
from pip._internal.cli.req_command import Command
|
| 11 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 12 |
+
from pip._internal.metadata import BaseDistribution, get_environment
|
| 13 |
+
from pip._internal.utils.compat import stdlib_pkgs
|
| 14 |
+
from pip._internal.utils.urls import path_to_url
|
| 15 |
+
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class InspectCommand(Command):
|
| 20 |
+
"""
|
| 21 |
+
Inspect the content of a Python environment and produce a report in JSON format.
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
ignore_require_venv = True
|
| 25 |
+
usage = """
|
| 26 |
+
%prog [options]"""
|
| 27 |
+
|
| 28 |
+
def add_options(self) -> None:
|
| 29 |
+
self.cmd_opts.add_option(
|
| 30 |
+
"--local",
|
| 31 |
+
action="store_true",
|
| 32 |
+
default=False,
|
| 33 |
+
help=(
|
| 34 |
+
"If in a virtualenv that has global access, do not list "
|
| 35 |
+
"globally-installed packages."
|
| 36 |
+
),
|
| 37 |
+
)
|
| 38 |
+
self.cmd_opts.add_option(
|
| 39 |
+
"--user",
|
| 40 |
+
dest="user",
|
| 41 |
+
action="store_true",
|
| 42 |
+
default=False,
|
| 43 |
+
help="Only output packages installed in user-site.",
|
| 44 |
+
)
|
| 45 |
+
self.cmd_opts.add_option(cmdoptions.list_path())
|
| 46 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 47 |
+
|
| 48 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 49 |
+
cmdoptions.check_list_path_option(options)
|
| 50 |
+
dists = get_environment(options.path).iter_installed_distributions(
|
| 51 |
+
local_only=options.local,
|
| 52 |
+
user_only=options.user,
|
| 53 |
+
skip=set(stdlib_pkgs),
|
| 54 |
+
)
|
| 55 |
+
output = {
|
| 56 |
+
"version": "1",
|
| 57 |
+
"pip_version": __version__,
|
| 58 |
+
"installed": [self._dist_to_dict(dist) for dist in dists],
|
| 59 |
+
"environment": default_environment(),
|
| 60 |
+
# TODO tags? scheme?
|
| 61 |
+
}
|
| 62 |
+
print_json(data=output)
|
| 63 |
+
return SUCCESS
|
| 64 |
+
|
| 65 |
+
def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]:
|
| 66 |
+
res: Dict[str, Any] = {
|
| 67 |
+
"metadata": dist.metadata_dict,
|
| 68 |
+
"metadata_location": dist.info_location,
|
| 69 |
+
}
|
| 70 |
+
# direct_url. Note that we don't have download_info (as in the installation
|
| 71 |
+
# report) since it is not recorded in installed metadata.
|
| 72 |
+
direct_url = dist.direct_url
|
| 73 |
+
if direct_url is not None:
|
| 74 |
+
res["direct_url"] = direct_url.to_dict()
|
| 75 |
+
else:
|
| 76 |
+
# Emulate direct_url for legacy editable installs.
|
| 77 |
+
editable_project_location = dist.editable_project_location
|
| 78 |
+
if editable_project_location is not None:
|
| 79 |
+
res["direct_url"] = {
|
| 80 |
+
"url": path_to_url(editable_project_location),
|
| 81 |
+
"dir_info": {
|
| 82 |
+
"editable": True,
|
| 83 |
+
},
|
| 84 |
+
}
|
| 85 |
+
# installer
|
| 86 |
+
installer = dist.installer
|
| 87 |
+
if dist.installer:
|
| 88 |
+
res["installer"] = installer
|
| 89 |
+
# requested
|
| 90 |
+
if dist.installed_with_dist_info:
|
| 91 |
+
res["requested"] = dist.requested
|
| 92 |
+
return res
|
.venv/Lib/site-packages/pip/_internal/commands/install.py
ADDED
|
@@ -0,0 +1,778 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import errno
|
| 2 |
+
import json
|
| 3 |
+
import operator
|
| 4 |
+
import os
|
| 5 |
+
import shutil
|
| 6 |
+
import site
|
| 7 |
+
from optparse import SUPPRESS_HELP, Values
|
| 8 |
+
from typing import List, Optional
|
| 9 |
+
|
| 10 |
+
from pip._vendor.rich import print_json
|
| 11 |
+
|
| 12 |
+
from pip._internal.cache import WheelCache
|
| 13 |
+
from pip._internal.cli import cmdoptions
|
| 14 |
+
from pip._internal.cli.cmdoptions import make_target_python
|
| 15 |
+
from pip._internal.cli.req_command import (
|
| 16 |
+
RequirementCommand,
|
| 17 |
+
warn_if_run_as_root,
|
| 18 |
+
with_cleanup,
|
| 19 |
+
)
|
| 20 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 21 |
+
from pip._internal.exceptions import CommandError, InstallationError
|
| 22 |
+
from pip._internal.locations import get_scheme
|
| 23 |
+
from pip._internal.metadata import get_environment
|
| 24 |
+
from pip._internal.models.installation_report import InstallationReport
|
| 25 |
+
from pip._internal.operations.build.build_tracker import get_build_tracker
|
| 26 |
+
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
|
| 27 |
+
from pip._internal.req import install_given_reqs
|
| 28 |
+
from pip._internal.req.req_install import (
|
| 29 |
+
InstallRequirement,
|
| 30 |
+
check_legacy_setup_py_options,
|
| 31 |
+
)
|
| 32 |
+
from pip._internal.utils.compat import WINDOWS
|
| 33 |
+
from pip._internal.utils.filesystem import test_writable_dir
|
| 34 |
+
from pip._internal.utils.logging import getLogger
|
| 35 |
+
from pip._internal.utils.misc import (
|
| 36 |
+
check_externally_managed,
|
| 37 |
+
ensure_dir,
|
| 38 |
+
get_pip_version,
|
| 39 |
+
protect_pip_from_modification_on_windows,
|
| 40 |
+
write_output,
|
| 41 |
+
)
|
| 42 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 43 |
+
from pip._internal.utils.virtualenv import (
|
| 44 |
+
running_under_virtualenv,
|
| 45 |
+
virtualenv_no_global,
|
| 46 |
+
)
|
| 47 |
+
from pip._internal.wheel_builder import build, should_build_for_install_command
|
| 48 |
+
|
| 49 |
+
logger = getLogger(__name__)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class InstallCommand(RequirementCommand):
|
| 53 |
+
"""
|
| 54 |
+
Install packages from:
|
| 55 |
+
|
| 56 |
+
- PyPI (and other indexes) using requirement specifiers.
|
| 57 |
+
- VCS project urls.
|
| 58 |
+
- Local project directories.
|
| 59 |
+
- Local or remote source archives.
|
| 60 |
+
|
| 61 |
+
pip also supports installing from "requirements files", which provide
|
| 62 |
+
an easy way to specify a whole environment to be installed.
|
| 63 |
+
"""
|
| 64 |
+
|
| 65 |
+
usage = """
|
| 66 |
+
%prog [options] <requirement specifier> [package-index-options] ...
|
| 67 |
+
%prog [options] -r <requirements file> [package-index-options] ...
|
| 68 |
+
%prog [options] [-e] <vcs project url> ...
|
| 69 |
+
%prog [options] [-e] <local project path> ...
|
| 70 |
+
%prog [options] <archive url/path> ..."""
|
| 71 |
+
|
| 72 |
+
def add_options(self) -> None:
|
| 73 |
+
self.cmd_opts.add_option(cmdoptions.requirements())
|
| 74 |
+
self.cmd_opts.add_option(cmdoptions.constraints())
|
| 75 |
+
self.cmd_opts.add_option(cmdoptions.no_deps())
|
| 76 |
+
self.cmd_opts.add_option(cmdoptions.pre())
|
| 77 |
+
|
| 78 |
+
self.cmd_opts.add_option(cmdoptions.editable())
|
| 79 |
+
self.cmd_opts.add_option(
|
| 80 |
+
"--dry-run",
|
| 81 |
+
action="store_true",
|
| 82 |
+
dest="dry_run",
|
| 83 |
+
default=False,
|
| 84 |
+
help=(
|
| 85 |
+
"Don't actually install anything, just print what would be. "
|
| 86 |
+
"Can be used in combination with --ignore-installed "
|
| 87 |
+
"to 'resolve' the requirements."
|
| 88 |
+
),
|
| 89 |
+
)
|
| 90 |
+
self.cmd_opts.add_option(
|
| 91 |
+
"-t",
|
| 92 |
+
"--target",
|
| 93 |
+
dest="target_dir",
|
| 94 |
+
metavar="dir",
|
| 95 |
+
default=None,
|
| 96 |
+
help=(
|
| 97 |
+
"Install packages into <dir>. "
|
| 98 |
+
"By default this will not replace existing files/folders in "
|
| 99 |
+
"<dir>. Use --upgrade to replace existing packages in <dir> "
|
| 100 |
+
"with new versions."
|
| 101 |
+
),
|
| 102 |
+
)
|
| 103 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
| 104 |
+
|
| 105 |
+
self.cmd_opts.add_option(
|
| 106 |
+
"--user",
|
| 107 |
+
dest="use_user_site",
|
| 108 |
+
action="store_true",
|
| 109 |
+
help=(
|
| 110 |
+
"Install to the Python user install directory for your "
|
| 111 |
+
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
| 112 |
+
"Windows. (See the Python documentation for site.USER_BASE "
|
| 113 |
+
"for full details.)"
|
| 114 |
+
),
|
| 115 |
+
)
|
| 116 |
+
self.cmd_opts.add_option(
|
| 117 |
+
"--no-user",
|
| 118 |
+
dest="use_user_site",
|
| 119 |
+
action="store_false",
|
| 120 |
+
help=SUPPRESS_HELP,
|
| 121 |
+
)
|
| 122 |
+
self.cmd_opts.add_option(
|
| 123 |
+
"--root",
|
| 124 |
+
dest="root_path",
|
| 125 |
+
metavar="dir",
|
| 126 |
+
default=None,
|
| 127 |
+
help="Install everything relative to this alternate root directory.",
|
| 128 |
+
)
|
| 129 |
+
self.cmd_opts.add_option(
|
| 130 |
+
"--prefix",
|
| 131 |
+
dest="prefix_path",
|
| 132 |
+
metavar="dir",
|
| 133 |
+
default=None,
|
| 134 |
+
help=(
|
| 135 |
+
"Installation prefix where lib, bin and other top-level "
|
| 136 |
+
"folders are placed. Note that the resulting installation may "
|
| 137 |
+
"contain scripts and other resources which reference the "
|
| 138 |
+
"Python interpreter of pip, and not that of ``--prefix``. "
|
| 139 |
+
"See also the ``--python`` option if the intention is to "
|
| 140 |
+
"install packages into another (possibly pip-free) "
|
| 141 |
+
"environment."
|
| 142 |
+
),
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
self.cmd_opts.add_option(cmdoptions.src())
|
| 146 |
+
|
| 147 |
+
self.cmd_opts.add_option(
|
| 148 |
+
"-U",
|
| 149 |
+
"--upgrade",
|
| 150 |
+
dest="upgrade",
|
| 151 |
+
action="store_true",
|
| 152 |
+
help=(
|
| 153 |
+
"Upgrade all specified packages to the newest available "
|
| 154 |
+
"version. The handling of dependencies depends on the "
|
| 155 |
+
"upgrade-strategy used."
|
| 156 |
+
),
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
self.cmd_opts.add_option(
|
| 160 |
+
"--upgrade-strategy",
|
| 161 |
+
dest="upgrade_strategy",
|
| 162 |
+
default="only-if-needed",
|
| 163 |
+
choices=["only-if-needed", "eager"],
|
| 164 |
+
help=(
|
| 165 |
+
"Determines how dependency upgrading should be handled "
|
| 166 |
+
"[default: %default]. "
|
| 167 |
+
'"eager" - dependencies are upgraded regardless of '
|
| 168 |
+
"whether the currently installed version satisfies the "
|
| 169 |
+
"requirements of the upgraded package(s). "
|
| 170 |
+
'"only-if-needed" - are upgraded only when they do not '
|
| 171 |
+
"satisfy the requirements of the upgraded package(s)."
|
| 172 |
+
),
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
self.cmd_opts.add_option(
|
| 176 |
+
"--force-reinstall",
|
| 177 |
+
dest="force_reinstall",
|
| 178 |
+
action="store_true",
|
| 179 |
+
help="Reinstall all packages even if they are already up-to-date.",
|
| 180 |
+
)
|
| 181 |
+
|
| 182 |
+
self.cmd_opts.add_option(
|
| 183 |
+
"-I",
|
| 184 |
+
"--ignore-installed",
|
| 185 |
+
dest="ignore_installed",
|
| 186 |
+
action="store_true",
|
| 187 |
+
help=(
|
| 188 |
+
"Ignore the installed packages, overwriting them. "
|
| 189 |
+
"This can break your system if the existing package "
|
| 190 |
+
"is of a different version or was installed "
|
| 191 |
+
"with a different package manager!"
|
| 192 |
+
),
|
| 193 |
+
)
|
| 194 |
+
|
| 195 |
+
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
| 196 |
+
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
| 197 |
+
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
| 198 |
+
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
| 199 |
+
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
| 200 |
+
self.cmd_opts.add_option(cmdoptions.override_externally_managed())
|
| 201 |
+
|
| 202 |
+
self.cmd_opts.add_option(cmdoptions.config_settings())
|
| 203 |
+
self.cmd_opts.add_option(cmdoptions.global_options())
|
| 204 |
+
|
| 205 |
+
self.cmd_opts.add_option(
|
| 206 |
+
"--compile",
|
| 207 |
+
action="store_true",
|
| 208 |
+
dest="compile",
|
| 209 |
+
default=True,
|
| 210 |
+
help="Compile Python source files to bytecode",
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
self.cmd_opts.add_option(
|
| 214 |
+
"--no-compile",
|
| 215 |
+
action="store_false",
|
| 216 |
+
dest="compile",
|
| 217 |
+
help="Do not compile Python source files to bytecode",
|
| 218 |
+
)
|
| 219 |
+
|
| 220 |
+
self.cmd_opts.add_option(
|
| 221 |
+
"--no-warn-script-location",
|
| 222 |
+
action="store_false",
|
| 223 |
+
dest="warn_script_location",
|
| 224 |
+
default=True,
|
| 225 |
+
help="Do not warn when installing scripts outside PATH",
|
| 226 |
+
)
|
| 227 |
+
self.cmd_opts.add_option(
|
| 228 |
+
"--no-warn-conflicts",
|
| 229 |
+
action="store_false",
|
| 230 |
+
dest="warn_about_conflicts",
|
| 231 |
+
default=True,
|
| 232 |
+
help="Do not warn about broken dependencies",
|
| 233 |
+
)
|
| 234 |
+
self.cmd_opts.add_option(cmdoptions.no_binary())
|
| 235 |
+
self.cmd_opts.add_option(cmdoptions.only_binary())
|
| 236 |
+
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
| 237 |
+
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
| 238 |
+
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
| 239 |
+
self.cmd_opts.add_option(cmdoptions.root_user_action())
|
| 240 |
+
|
| 241 |
+
index_opts = cmdoptions.make_option_group(
|
| 242 |
+
cmdoptions.index_group,
|
| 243 |
+
self.parser,
|
| 244 |
+
)
|
| 245 |
+
|
| 246 |
+
self.parser.insert_option_group(0, index_opts)
|
| 247 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 248 |
+
|
| 249 |
+
self.cmd_opts.add_option(
|
| 250 |
+
"--report",
|
| 251 |
+
dest="json_report_file",
|
| 252 |
+
metavar="file",
|
| 253 |
+
default=None,
|
| 254 |
+
help=(
|
| 255 |
+
"Generate a JSON file describing what pip did to install "
|
| 256 |
+
"the provided requirements. "
|
| 257 |
+
"Can be used in combination with --dry-run and --ignore-installed "
|
| 258 |
+
"to 'resolve' the requirements. "
|
| 259 |
+
"When - is used as file name it writes to stdout. "
|
| 260 |
+
"When writing to stdout, please combine with the --quiet option "
|
| 261 |
+
"to avoid mixing pip logging output with JSON output."
|
| 262 |
+
),
|
| 263 |
+
)
|
| 264 |
+
|
| 265 |
+
@with_cleanup
|
| 266 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 267 |
+
if options.use_user_site and options.target_dir is not None:
|
| 268 |
+
raise CommandError("Can not combine '--user' and '--target'")
|
| 269 |
+
|
| 270 |
+
# Check whether the environment we're installing into is externally
|
| 271 |
+
# managed, as specified in PEP 668. Specifying --root, --target, or
|
| 272 |
+
# --prefix disables the check, since there's no reliable way to locate
|
| 273 |
+
# the EXTERNALLY-MANAGED file for those cases. An exception is also
|
| 274 |
+
# made specifically for "--dry-run --report" for convenience.
|
| 275 |
+
installing_into_current_environment = (
|
| 276 |
+
not (options.dry_run and options.json_report_file)
|
| 277 |
+
and options.root_path is None
|
| 278 |
+
and options.target_dir is None
|
| 279 |
+
and options.prefix_path is None
|
| 280 |
+
)
|
| 281 |
+
if (
|
| 282 |
+
installing_into_current_environment
|
| 283 |
+
and not options.override_externally_managed
|
| 284 |
+
):
|
| 285 |
+
check_externally_managed()
|
| 286 |
+
|
| 287 |
+
upgrade_strategy = "to-satisfy-only"
|
| 288 |
+
if options.upgrade:
|
| 289 |
+
upgrade_strategy = options.upgrade_strategy
|
| 290 |
+
|
| 291 |
+
cmdoptions.check_dist_restriction(options, check_target=True)
|
| 292 |
+
|
| 293 |
+
logger.verbose("Using %s", get_pip_version())
|
| 294 |
+
options.use_user_site = decide_user_install(
|
| 295 |
+
options.use_user_site,
|
| 296 |
+
prefix_path=options.prefix_path,
|
| 297 |
+
target_dir=options.target_dir,
|
| 298 |
+
root_path=options.root_path,
|
| 299 |
+
isolated_mode=options.isolated_mode,
|
| 300 |
+
)
|
| 301 |
+
|
| 302 |
+
target_temp_dir: Optional[TempDirectory] = None
|
| 303 |
+
target_temp_dir_path: Optional[str] = None
|
| 304 |
+
if options.target_dir:
|
| 305 |
+
options.ignore_installed = True
|
| 306 |
+
options.target_dir = os.path.abspath(options.target_dir)
|
| 307 |
+
if (
|
| 308 |
+
# fmt: off
|
| 309 |
+
os.path.exists(options.target_dir) and
|
| 310 |
+
not os.path.isdir(options.target_dir)
|
| 311 |
+
# fmt: on
|
| 312 |
+
):
|
| 313 |
+
raise CommandError(
|
| 314 |
+
"Target path exists but is not a directory, will not continue."
|
| 315 |
+
)
|
| 316 |
+
|
| 317 |
+
# Create a target directory for using with the target option
|
| 318 |
+
target_temp_dir = TempDirectory(kind="target")
|
| 319 |
+
target_temp_dir_path = target_temp_dir.path
|
| 320 |
+
self.enter_context(target_temp_dir)
|
| 321 |
+
|
| 322 |
+
global_options = options.global_options or []
|
| 323 |
+
|
| 324 |
+
session = self.get_default_session(options)
|
| 325 |
+
|
| 326 |
+
target_python = make_target_python(options)
|
| 327 |
+
finder = self._build_package_finder(
|
| 328 |
+
options=options,
|
| 329 |
+
session=session,
|
| 330 |
+
target_python=target_python,
|
| 331 |
+
ignore_requires_python=options.ignore_requires_python,
|
| 332 |
+
)
|
| 333 |
+
build_tracker = self.enter_context(get_build_tracker())
|
| 334 |
+
|
| 335 |
+
directory = TempDirectory(
|
| 336 |
+
delete=not options.no_clean,
|
| 337 |
+
kind="install",
|
| 338 |
+
globally_managed=True,
|
| 339 |
+
)
|
| 340 |
+
|
| 341 |
+
try:
|
| 342 |
+
reqs = self.get_requirements(args, options, finder, session)
|
| 343 |
+
check_legacy_setup_py_options(options, reqs)
|
| 344 |
+
|
| 345 |
+
wheel_cache = WheelCache(options.cache_dir)
|
| 346 |
+
|
| 347 |
+
# Only when installing is it permitted to use PEP 660.
|
| 348 |
+
# In other circumstances (pip wheel, pip download) we generate
|
| 349 |
+
# regular (i.e. non editable) metadata and wheels.
|
| 350 |
+
for req in reqs:
|
| 351 |
+
req.permit_editable_wheels = True
|
| 352 |
+
|
| 353 |
+
preparer = self.make_requirement_preparer(
|
| 354 |
+
temp_build_dir=directory,
|
| 355 |
+
options=options,
|
| 356 |
+
build_tracker=build_tracker,
|
| 357 |
+
session=session,
|
| 358 |
+
finder=finder,
|
| 359 |
+
use_user_site=options.use_user_site,
|
| 360 |
+
verbosity=self.verbosity,
|
| 361 |
+
)
|
| 362 |
+
resolver = self.make_resolver(
|
| 363 |
+
preparer=preparer,
|
| 364 |
+
finder=finder,
|
| 365 |
+
options=options,
|
| 366 |
+
wheel_cache=wheel_cache,
|
| 367 |
+
use_user_site=options.use_user_site,
|
| 368 |
+
ignore_installed=options.ignore_installed,
|
| 369 |
+
ignore_requires_python=options.ignore_requires_python,
|
| 370 |
+
force_reinstall=options.force_reinstall,
|
| 371 |
+
upgrade_strategy=upgrade_strategy,
|
| 372 |
+
use_pep517=options.use_pep517,
|
| 373 |
+
)
|
| 374 |
+
|
| 375 |
+
self.trace_basic_info(finder)
|
| 376 |
+
|
| 377 |
+
requirement_set = resolver.resolve(
|
| 378 |
+
reqs, check_supported_wheels=not options.target_dir
|
| 379 |
+
)
|
| 380 |
+
|
| 381 |
+
if options.json_report_file:
|
| 382 |
+
report = InstallationReport(requirement_set.requirements_to_install)
|
| 383 |
+
if options.json_report_file == "-":
|
| 384 |
+
print_json(data=report.to_dict())
|
| 385 |
+
else:
|
| 386 |
+
with open(options.json_report_file, "w", encoding="utf-8") as f:
|
| 387 |
+
json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
|
| 388 |
+
|
| 389 |
+
if options.dry_run:
|
| 390 |
+
# In non dry-run mode, the legacy versions and specifiers check
|
| 391 |
+
# will be done as part of conflict detection.
|
| 392 |
+
requirement_set.warn_legacy_versions_and_specifiers()
|
| 393 |
+
would_install_items = sorted(
|
| 394 |
+
(r.metadata["name"], r.metadata["version"])
|
| 395 |
+
for r in requirement_set.requirements_to_install
|
| 396 |
+
)
|
| 397 |
+
if would_install_items:
|
| 398 |
+
write_output(
|
| 399 |
+
"Would install %s",
|
| 400 |
+
" ".join("-".join(item) for item in would_install_items),
|
| 401 |
+
)
|
| 402 |
+
return SUCCESS
|
| 403 |
+
|
| 404 |
+
try:
|
| 405 |
+
pip_req = requirement_set.get_requirement("pip")
|
| 406 |
+
except KeyError:
|
| 407 |
+
modifying_pip = False
|
| 408 |
+
else:
|
| 409 |
+
# If we're not replacing an already installed pip,
|
| 410 |
+
# we're not modifying it.
|
| 411 |
+
modifying_pip = pip_req.satisfied_by is None
|
| 412 |
+
protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
|
| 413 |
+
|
| 414 |
+
reqs_to_build = [
|
| 415 |
+
r
|
| 416 |
+
for r in requirement_set.requirements.values()
|
| 417 |
+
if should_build_for_install_command(r)
|
| 418 |
+
]
|
| 419 |
+
|
| 420 |
+
_, build_failures = build(
|
| 421 |
+
reqs_to_build,
|
| 422 |
+
wheel_cache=wheel_cache,
|
| 423 |
+
verify=True,
|
| 424 |
+
build_options=[],
|
| 425 |
+
global_options=global_options,
|
| 426 |
+
)
|
| 427 |
+
|
| 428 |
+
if build_failures:
|
| 429 |
+
raise InstallationError(
|
| 430 |
+
"Could not build wheels for {}, which is required to "
|
| 431 |
+
"install pyproject.toml-based projects".format(
|
| 432 |
+
", ".join(r.name for r in build_failures) # type: ignore
|
| 433 |
+
)
|
| 434 |
+
)
|
| 435 |
+
|
| 436 |
+
to_install = resolver.get_installation_order(requirement_set)
|
| 437 |
+
|
| 438 |
+
# Check for conflicts in the package set we're installing.
|
| 439 |
+
conflicts: Optional[ConflictDetails] = None
|
| 440 |
+
should_warn_about_conflicts = (
|
| 441 |
+
not options.ignore_dependencies and options.warn_about_conflicts
|
| 442 |
+
)
|
| 443 |
+
if should_warn_about_conflicts:
|
| 444 |
+
conflicts = self._determine_conflicts(to_install)
|
| 445 |
+
|
| 446 |
+
# Don't warn about script install locations if
|
| 447 |
+
# --target or --prefix has been specified
|
| 448 |
+
warn_script_location = options.warn_script_location
|
| 449 |
+
if options.target_dir or options.prefix_path:
|
| 450 |
+
warn_script_location = False
|
| 451 |
+
|
| 452 |
+
installed = install_given_reqs(
|
| 453 |
+
to_install,
|
| 454 |
+
global_options,
|
| 455 |
+
root=options.root_path,
|
| 456 |
+
home=target_temp_dir_path,
|
| 457 |
+
prefix=options.prefix_path,
|
| 458 |
+
warn_script_location=warn_script_location,
|
| 459 |
+
use_user_site=options.use_user_site,
|
| 460 |
+
pycompile=options.compile,
|
| 461 |
+
)
|
| 462 |
+
|
| 463 |
+
lib_locations = get_lib_location_guesses(
|
| 464 |
+
user=options.use_user_site,
|
| 465 |
+
home=target_temp_dir_path,
|
| 466 |
+
root=options.root_path,
|
| 467 |
+
prefix=options.prefix_path,
|
| 468 |
+
isolated=options.isolated_mode,
|
| 469 |
+
)
|
| 470 |
+
env = get_environment(lib_locations)
|
| 471 |
+
|
| 472 |
+
installed.sort(key=operator.attrgetter("name"))
|
| 473 |
+
items = []
|
| 474 |
+
for result in installed:
|
| 475 |
+
item = result.name
|
| 476 |
+
try:
|
| 477 |
+
installed_dist = env.get_distribution(item)
|
| 478 |
+
if installed_dist is not None:
|
| 479 |
+
item = f"{item}-{installed_dist.version}"
|
| 480 |
+
except Exception:
|
| 481 |
+
pass
|
| 482 |
+
items.append(item)
|
| 483 |
+
|
| 484 |
+
if conflicts is not None:
|
| 485 |
+
self._warn_about_conflicts(
|
| 486 |
+
conflicts,
|
| 487 |
+
resolver_variant=self.determine_resolver_variant(options),
|
| 488 |
+
)
|
| 489 |
+
|
| 490 |
+
installed_desc = " ".join(items)
|
| 491 |
+
if installed_desc:
|
| 492 |
+
write_output(
|
| 493 |
+
"Successfully installed %s",
|
| 494 |
+
installed_desc,
|
| 495 |
+
)
|
| 496 |
+
except OSError as error:
|
| 497 |
+
show_traceback = self.verbosity >= 1
|
| 498 |
+
|
| 499 |
+
message = create_os_error_message(
|
| 500 |
+
error,
|
| 501 |
+
show_traceback,
|
| 502 |
+
options.use_user_site,
|
| 503 |
+
)
|
| 504 |
+
logger.error(message, exc_info=show_traceback) # noqa
|
| 505 |
+
|
| 506 |
+
return ERROR
|
| 507 |
+
|
| 508 |
+
if options.target_dir:
|
| 509 |
+
assert target_temp_dir
|
| 510 |
+
self._handle_target_dir(
|
| 511 |
+
options.target_dir, target_temp_dir, options.upgrade
|
| 512 |
+
)
|
| 513 |
+
if options.root_user_action == "warn":
|
| 514 |
+
warn_if_run_as_root()
|
| 515 |
+
return SUCCESS
|
| 516 |
+
|
| 517 |
+
def _handle_target_dir(
|
| 518 |
+
self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
|
| 519 |
+
) -> None:
|
| 520 |
+
ensure_dir(target_dir)
|
| 521 |
+
|
| 522 |
+
# Checking both purelib and platlib directories for installed
|
| 523 |
+
# packages to be moved to target directory
|
| 524 |
+
lib_dir_list = []
|
| 525 |
+
|
| 526 |
+
# Checking both purelib and platlib directories for installed
|
| 527 |
+
# packages to be moved to target directory
|
| 528 |
+
scheme = get_scheme("", home=target_temp_dir.path)
|
| 529 |
+
purelib_dir = scheme.purelib
|
| 530 |
+
platlib_dir = scheme.platlib
|
| 531 |
+
data_dir = scheme.data
|
| 532 |
+
|
| 533 |
+
if os.path.exists(purelib_dir):
|
| 534 |
+
lib_dir_list.append(purelib_dir)
|
| 535 |
+
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
|
| 536 |
+
lib_dir_list.append(platlib_dir)
|
| 537 |
+
if os.path.exists(data_dir):
|
| 538 |
+
lib_dir_list.append(data_dir)
|
| 539 |
+
|
| 540 |
+
for lib_dir in lib_dir_list:
|
| 541 |
+
for item in os.listdir(lib_dir):
|
| 542 |
+
if lib_dir == data_dir:
|
| 543 |
+
ddir = os.path.join(data_dir, item)
|
| 544 |
+
if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
|
| 545 |
+
continue
|
| 546 |
+
target_item_dir = os.path.join(target_dir, item)
|
| 547 |
+
if os.path.exists(target_item_dir):
|
| 548 |
+
if not upgrade:
|
| 549 |
+
logger.warning(
|
| 550 |
+
"Target directory %s already exists. Specify "
|
| 551 |
+
"--upgrade to force replacement.",
|
| 552 |
+
target_item_dir,
|
| 553 |
+
)
|
| 554 |
+
continue
|
| 555 |
+
if os.path.islink(target_item_dir):
|
| 556 |
+
logger.warning(
|
| 557 |
+
"Target directory %s already exists and is "
|
| 558 |
+
"a link. pip will not automatically replace "
|
| 559 |
+
"links, please remove if replacement is "
|
| 560 |
+
"desired.",
|
| 561 |
+
target_item_dir,
|
| 562 |
+
)
|
| 563 |
+
continue
|
| 564 |
+
if os.path.isdir(target_item_dir):
|
| 565 |
+
shutil.rmtree(target_item_dir)
|
| 566 |
+
else:
|
| 567 |
+
os.remove(target_item_dir)
|
| 568 |
+
|
| 569 |
+
shutil.move(os.path.join(lib_dir, item), target_item_dir)
|
| 570 |
+
|
| 571 |
+
def _determine_conflicts(
|
| 572 |
+
self, to_install: List[InstallRequirement]
|
| 573 |
+
) -> Optional[ConflictDetails]:
|
| 574 |
+
try:
|
| 575 |
+
return check_install_conflicts(to_install)
|
| 576 |
+
except Exception:
|
| 577 |
+
logger.exception(
|
| 578 |
+
"Error while checking for conflicts. Please file an issue on "
|
| 579 |
+
"pip's issue tracker: https://github.com/pypa/pip/issues/new"
|
| 580 |
+
)
|
| 581 |
+
return None
|
| 582 |
+
|
| 583 |
+
def _warn_about_conflicts(
|
| 584 |
+
self, conflict_details: ConflictDetails, resolver_variant: str
|
| 585 |
+
) -> None:
|
| 586 |
+
package_set, (missing, conflicting) = conflict_details
|
| 587 |
+
if not missing and not conflicting:
|
| 588 |
+
return
|
| 589 |
+
|
| 590 |
+
parts: List[str] = []
|
| 591 |
+
if resolver_variant == "legacy":
|
| 592 |
+
parts.append(
|
| 593 |
+
"pip's legacy dependency resolver does not consider dependency "
|
| 594 |
+
"conflicts when selecting packages. This behaviour is the "
|
| 595 |
+
"source of the following dependency conflicts."
|
| 596 |
+
)
|
| 597 |
+
else:
|
| 598 |
+
assert resolver_variant == "2020-resolver"
|
| 599 |
+
parts.append(
|
| 600 |
+
"pip's dependency resolver does not currently take into account "
|
| 601 |
+
"all the packages that are installed. This behaviour is the "
|
| 602 |
+
"source of the following dependency conflicts."
|
| 603 |
+
)
|
| 604 |
+
|
| 605 |
+
# NOTE: There is some duplication here, with commands/check.py
|
| 606 |
+
for project_name in missing:
|
| 607 |
+
version = package_set[project_name][0]
|
| 608 |
+
for dependency in missing[project_name]:
|
| 609 |
+
message = (
|
| 610 |
+
"{name} {version} requires {requirement}, "
|
| 611 |
+
"which is not installed."
|
| 612 |
+
).format(
|
| 613 |
+
name=project_name,
|
| 614 |
+
version=version,
|
| 615 |
+
requirement=dependency[1],
|
| 616 |
+
)
|
| 617 |
+
parts.append(message)
|
| 618 |
+
|
| 619 |
+
for project_name in conflicting:
|
| 620 |
+
version = package_set[project_name][0]
|
| 621 |
+
for dep_name, dep_version, req in conflicting[project_name]:
|
| 622 |
+
message = (
|
| 623 |
+
"{name} {version} requires {requirement}, but {you} have "
|
| 624 |
+
"{dep_name} {dep_version} which is incompatible."
|
| 625 |
+
).format(
|
| 626 |
+
name=project_name,
|
| 627 |
+
version=version,
|
| 628 |
+
requirement=req,
|
| 629 |
+
dep_name=dep_name,
|
| 630 |
+
dep_version=dep_version,
|
| 631 |
+
you=("you" if resolver_variant == "2020-resolver" else "you'll"),
|
| 632 |
+
)
|
| 633 |
+
parts.append(message)
|
| 634 |
+
|
| 635 |
+
logger.critical("\n".join(parts))
|
| 636 |
+
|
| 637 |
+
|
| 638 |
+
def get_lib_location_guesses(
|
| 639 |
+
user: bool = False,
|
| 640 |
+
home: Optional[str] = None,
|
| 641 |
+
root: Optional[str] = None,
|
| 642 |
+
isolated: bool = False,
|
| 643 |
+
prefix: Optional[str] = None,
|
| 644 |
+
) -> List[str]:
|
| 645 |
+
scheme = get_scheme(
|
| 646 |
+
"",
|
| 647 |
+
user=user,
|
| 648 |
+
home=home,
|
| 649 |
+
root=root,
|
| 650 |
+
isolated=isolated,
|
| 651 |
+
prefix=prefix,
|
| 652 |
+
)
|
| 653 |
+
return [scheme.purelib, scheme.platlib]
|
| 654 |
+
|
| 655 |
+
|
| 656 |
+
def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
|
| 657 |
+
return all(
|
| 658 |
+
test_writable_dir(d)
|
| 659 |
+
for d in set(get_lib_location_guesses(root=root, isolated=isolated))
|
| 660 |
+
)
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
def decide_user_install(
|
| 664 |
+
use_user_site: Optional[bool],
|
| 665 |
+
prefix_path: Optional[str] = None,
|
| 666 |
+
target_dir: Optional[str] = None,
|
| 667 |
+
root_path: Optional[str] = None,
|
| 668 |
+
isolated_mode: bool = False,
|
| 669 |
+
) -> bool:
|
| 670 |
+
"""Determine whether to do a user install based on the input options.
|
| 671 |
+
|
| 672 |
+
If use_user_site is False, no additional checks are done.
|
| 673 |
+
If use_user_site is True, it is checked for compatibility with other
|
| 674 |
+
options.
|
| 675 |
+
If use_user_site is None, the default behaviour depends on the environment,
|
| 676 |
+
which is provided by the other arguments.
|
| 677 |
+
"""
|
| 678 |
+
# In some cases (config from tox), use_user_site can be set to an integer
|
| 679 |
+
# rather than a bool, which 'use_user_site is False' wouldn't catch.
|
| 680 |
+
if (use_user_site is not None) and (not use_user_site):
|
| 681 |
+
logger.debug("Non-user install by explicit request")
|
| 682 |
+
return False
|
| 683 |
+
|
| 684 |
+
if use_user_site:
|
| 685 |
+
if prefix_path:
|
| 686 |
+
raise CommandError(
|
| 687 |
+
"Can not combine '--user' and '--prefix' as they imply "
|
| 688 |
+
"different installation locations"
|
| 689 |
+
)
|
| 690 |
+
if virtualenv_no_global():
|
| 691 |
+
raise InstallationError(
|
| 692 |
+
"Can not perform a '--user' install. User site-packages "
|
| 693 |
+
"are not visible in this virtualenv."
|
| 694 |
+
)
|
| 695 |
+
logger.debug("User install by explicit request")
|
| 696 |
+
return True
|
| 697 |
+
|
| 698 |
+
# If we are here, user installs have not been explicitly requested/avoided
|
| 699 |
+
assert use_user_site is None
|
| 700 |
+
|
| 701 |
+
# user install incompatible with --prefix/--target
|
| 702 |
+
if prefix_path or target_dir:
|
| 703 |
+
logger.debug("Non-user install due to --prefix or --target option")
|
| 704 |
+
return False
|
| 705 |
+
|
| 706 |
+
# If user installs are not enabled, choose a non-user install
|
| 707 |
+
if not site.ENABLE_USER_SITE:
|
| 708 |
+
logger.debug("Non-user install because user site-packages disabled")
|
| 709 |
+
return False
|
| 710 |
+
|
| 711 |
+
# If we have permission for a non-user install, do that,
|
| 712 |
+
# otherwise do a user install.
|
| 713 |
+
if site_packages_writable(root=root_path, isolated=isolated_mode):
|
| 714 |
+
logger.debug("Non-user install because site-packages writeable")
|
| 715 |
+
return False
|
| 716 |
+
|
| 717 |
+
logger.info(
|
| 718 |
+
"Defaulting to user installation because normal site-packages "
|
| 719 |
+
"is not writeable"
|
| 720 |
+
)
|
| 721 |
+
return True
|
| 722 |
+
|
| 723 |
+
|
| 724 |
+
def create_os_error_message(
|
| 725 |
+
error: OSError, show_traceback: bool, using_user_site: bool
|
| 726 |
+
) -> str:
|
| 727 |
+
"""Format an error message for an OSError
|
| 728 |
+
|
| 729 |
+
It may occur anytime during the execution of the install command.
|
| 730 |
+
"""
|
| 731 |
+
parts = []
|
| 732 |
+
|
| 733 |
+
# Mention the error if we are not going to show a traceback
|
| 734 |
+
parts.append("Could not install packages due to an OSError")
|
| 735 |
+
if not show_traceback:
|
| 736 |
+
parts.append(": ")
|
| 737 |
+
parts.append(str(error))
|
| 738 |
+
else:
|
| 739 |
+
parts.append(".")
|
| 740 |
+
|
| 741 |
+
# Spilt the error indication from a helper message (if any)
|
| 742 |
+
parts[-1] += "\n"
|
| 743 |
+
|
| 744 |
+
# Suggest useful actions to the user:
|
| 745 |
+
# (1) using user site-packages or (2) verifying the permissions
|
| 746 |
+
if error.errno == errno.EACCES:
|
| 747 |
+
user_option_part = "Consider using the `--user` option"
|
| 748 |
+
permissions_part = "Check the permissions"
|
| 749 |
+
|
| 750 |
+
if not running_under_virtualenv() and not using_user_site:
|
| 751 |
+
parts.extend(
|
| 752 |
+
[
|
| 753 |
+
user_option_part,
|
| 754 |
+
" or ",
|
| 755 |
+
permissions_part.lower(),
|
| 756 |
+
]
|
| 757 |
+
)
|
| 758 |
+
else:
|
| 759 |
+
parts.append(permissions_part)
|
| 760 |
+
parts.append(".\n")
|
| 761 |
+
|
| 762 |
+
# Suggest the user to enable Long Paths if path length is
|
| 763 |
+
# more than 260
|
| 764 |
+
if (
|
| 765 |
+
WINDOWS
|
| 766 |
+
and error.errno == errno.ENOENT
|
| 767 |
+
and error.filename
|
| 768 |
+
and len(error.filename) > 260
|
| 769 |
+
):
|
| 770 |
+
parts.append(
|
| 771 |
+
"HINT: This error might have occurred since "
|
| 772 |
+
"this system does not have Windows Long Path "
|
| 773 |
+
"support enabled. You can find information on "
|
| 774 |
+
"how to enable this at "
|
| 775 |
+
"https://pip.pypa.io/warnings/enable-long-paths\n"
|
| 776 |
+
)
|
| 777 |
+
|
| 778 |
+
return "".join(parts).strip() + "\n"
|
.venv/Lib/site-packages/pip/_internal/commands/list.py
ADDED
|
@@ -0,0 +1,368 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import logging
|
| 3 |
+
from optparse import Values
|
| 4 |
+
from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
|
| 5 |
+
|
| 6 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 7 |
+
|
| 8 |
+
from pip._internal.cli import cmdoptions
|
| 9 |
+
from pip._internal.cli.req_command import IndexGroupCommand
|
| 10 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 11 |
+
from pip._internal.exceptions import CommandError
|
| 12 |
+
from pip._internal.index.collector import LinkCollector
|
| 13 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 14 |
+
from pip._internal.metadata import BaseDistribution, get_environment
|
| 15 |
+
from pip._internal.models.selection_prefs import SelectionPreferences
|
| 16 |
+
from pip._internal.network.session import PipSession
|
| 17 |
+
from pip._internal.utils.compat import stdlib_pkgs
|
| 18 |
+
from pip._internal.utils.misc import tabulate, write_output
|
| 19 |
+
|
| 20 |
+
if TYPE_CHECKING:
|
| 21 |
+
from pip._internal.metadata.base import DistributionVersion
|
| 22 |
+
|
| 23 |
+
class _DistWithLatestInfo(BaseDistribution):
|
| 24 |
+
"""Give the distribution object a couple of extra fields.
|
| 25 |
+
|
| 26 |
+
These will be populated during ``get_outdated()``. This is dirty but
|
| 27 |
+
makes the rest of the code much cleaner.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
latest_version: DistributionVersion
|
| 31 |
+
latest_filetype: str
|
| 32 |
+
|
| 33 |
+
_ProcessedDists = Sequence[_DistWithLatestInfo]
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
logger = logging.getLogger(__name__)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class ListCommand(IndexGroupCommand):
|
| 40 |
+
"""
|
| 41 |
+
List installed packages, including editables.
|
| 42 |
+
|
| 43 |
+
Packages are listed in a case-insensitive sorted order.
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
ignore_require_venv = True
|
| 47 |
+
usage = """
|
| 48 |
+
%prog [options]"""
|
| 49 |
+
|
| 50 |
+
def add_options(self) -> None:
|
| 51 |
+
self.cmd_opts.add_option(
|
| 52 |
+
"-o",
|
| 53 |
+
"--outdated",
|
| 54 |
+
action="store_true",
|
| 55 |
+
default=False,
|
| 56 |
+
help="List outdated packages",
|
| 57 |
+
)
|
| 58 |
+
self.cmd_opts.add_option(
|
| 59 |
+
"-u",
|
| 60 |
+
"--uptodate",
|
| 61 |
+
action="store_true",
|
| 62 |
+
default=False,
|
| 63 |
+
help="List uptodate packages",
|
| 64 |
+
)
|
| 65 |
+
self.cmd_opts.add_option(
|
| 66 |
+
"-e",
|
| 67 |
+
"--editable",
|
| 68 |
+
action="store_true",
|
| 69 |
+
default=False,
|
| 70 |
+
help="List editable projects.",
|
| 71 |
+
)
|
| 72 |
+
self.cmd_opts.add_option(
|
| 73 |
+
"-l",
|
| 74 |
+
"--local",
|
| 75 |
+
action="store_true",
|
| 76 |
+
default=False,
|
| 77 |
+
help=(
|
| 78 |
+
"If in a virtualenv that has global access, do not list "
|
| 79 |
+
"globally-installed packages."
|
| 80 |
+
),
|
| 81 |
+
)
|
| 82 |
+
self.cmd_opts.add_option(
|
| 83 |
+
"--user",
|
| 84 |
+
dest="user",
|
| 85 |
+
action="store_true",
|
| 86 |
+
default=False,
|
| 87 |
+
help="Only output packages installed in user-site.",
|
| 88 |
+
)
|
| 89 |
+
self.cmd_opts.add_option(cmdoptions.list_path())
|
| 90 |
+
self.cmd_opts.add_option(
|
| 91 |
+
"--pre",
|
| 92 |
+
action="store_true",
|
| 93 |
+
default=False,
|
| 94 |
+
help=(
|
| 95 |
+
"Include pre-release and development versions. By default, "
|
| 96 |
+
"pip only finds stable versions."
|
| 97 |
+
),
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
self.cmd_opts.add_option(
|
| 101 |
+
"--format",
|
| 102 |
+
action="store",
|
| 103 |
+
dest="list_format",
|
| 104 |
+
default="columns",
|
| 105 |
+
choices=("columns", "freeze", "json"),
|
| 106 |
+
help=(
|
| 107 |
+
"Select the output format among: columns (default), freeze, or json. "
|
| 108 |
+
"The 'freeze' format cannot be used with the --outdated option."
|
| 109 |
+
),
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
self.cmd_opts.add_option(
|
| 113 |
+
"--not-required",
|
| 114 |
+
action="store_true",
|
| 115 |
+
dest="not_required",
|
| 116 |
+
help="List packages that are not dependencies of installed packages.",
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
self.cmd_opts.add_option(
|
| 120 |
+
"--exclude-editable",
|
| 121 |
+
action="store_false",
|
| 122 |
+
dest="include_editable",
|
| 123 |
+
help="Exclude editable package from output.",
|
| 124 |
+
)
|
| 125 |
+
self.cmd_opts.add_option(
|
| 126 |
+
"--include-editable",
|
| 127 |
+
action="store_true",
|
| 128 |
+
dest="include_editable",
|
| 129 |
+
help="Include editable package from output.",
|
| 130 |
+
default=True,
|
| 131 |
+
)
|
| 132 |
+
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
| 133 |
+
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
|
| 134 |
+
|
| 135 |
+
self.parser.insert_option_group(0, index_opts)
|
| 136 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 137 |
+
|
| 138 |
+
def _build_package_finder(
|
| 139 |
+
self, options: Values, session: PipSession
|
| 140 |
+
) -> PackageFinder:
|
| 141 |
+
"""
|
| 142 |
+
Create a package finder appropriate to this list command.
|
| 143 |
+
"""
|
| 144 |
+
link_collector = LinkCollector.create(session, options=options)
|
| 145 |
+
|
| 146 |
+
# Pass allow_yanked=False to ignore yanked versions.
|
| 147 |
+
selection_prefs = SelectionPreferences(
|
| 148 |
+
allow_yanked=False,
|
| 149 |
+
allow_all_prereleases=options.pre,
|
| 150 |
+
)
|
| 151 |
+
|
| 152 |
+
return PackageFinder.create(
|
| 153 |
+
link_collector=link_collector,
|
| 154 |
+
selection_prefs=selection_prefs,
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 158 |
+
if options.outdated and options.uptodate:
|
| 159 |
+
raise CommandError("Options --outdated and --uptodate cannot be combined.")
|
| 160 |
+
|
| 161 |
+
if options.outdated and options.list_format == "freeze":
|
| 162 |
+
raise CommandError(
|
| 163 |
+
"List format 'freeze' cannot be used with the --outdated option."
|
| 164 |
+
)
|
| 165 |
+
|
| 166 |
+
cmdoptions.check_list_path_option(options)
|
| 167 |
+
|
| 168 |
+
skip = set(stdlib_pkgs)
|
| 169 |
+
if options.excludes:
|
| 170 |
+
skip.update(canonicalize_name(n) for n in options.excludes)
|
| 171 |
+
|
| 172 |
+
packages: "_ProcessedDists" = [
|
| 173 |
+
cast("_DistWithLatestInfo", d)
|
| 174 |
+
for d in get_environment(options.path).iter_installed_distributions(
|
| 175 |
+
local_only=options.local,
|
| 176 |
+
user_only=options.user,
|
| 177 |
+
editables_only=options.editable,
|
| 178 |
+
include_editables=options.include_editable,
|
| 179 |
+
skip=skip,
|
| 180 |
+
)
|
| 181 |
+
]
|
| 182 |
+
|
| 183 |
+
# get_not_required must be called firstly in order to find and
|
| 184 |
+
# filter out all dependencies correctly. Otherwise a package
|
| 185 |
+
# can't be identified as requirement because some parent packages
|
| 186 |
+
# could be filtered out before.
|
| 187 |
+
if options.not_required:
|
| 188 |
+
packages = self.get_not_required(packages, options)
|
| 189 |
+
|
| 190 |
+
if options.outdated:
|
| 191 |
+
packages = self.get_outdated(packages, options)
|
| 192 |
+
elif options.uptodate:
|
| 193 |
+
packages = self.get_uptodate(packages, options)
|
| 194 |
+
|
| 195 |
+
self.output_package_listing(packages, options)
|
| 196 |
+
return SUCCESS
|
| 197 |
+
|
| 198 |
+
def get_outdated(
|
| 199 |
+
self, packages: "_ProcessedDists", options: Values
|
| 200 |
+
) -> "_ProcessedDists":
|
| 201 |
+
return [
|
| 202 |
+
dist
|
| 203 |
+
for dist in self.iter_packages_latest_infos(packages, options)
|
| 204 |
+
if dist.latest_version > dist.version
|
| 205 |
+
]
|
| 206 |
+
|
| 207 |
+
def get_uptodate(
|
| 208 |
+
self, packages: "_ProcessedDists", options: Values
|
| 209 |
+
) -> "_ProcessedDists":
|
| 210 |
+
return [
|
| 211 |
+
dist
|
| 212 |
+
for dist in self.iter_packages_latest_infos(packages, options)
|
| 213 |
+
if dist.latest_version == dist.version
|
| 214 |
+
]
|
| 215 |
+
|
| 216 |
+
def get_not_required(
|
| 217 |
+
self, packages: "_ProcessedDists", options: Values
|
| 218 |
+
) -> "_ProcessedDists":
|
| 219 |
+
dep_keys = {
|
| 220 |
+
canonicalize_name(dep.name)
|
| 221 |
+
for dist in packages
|
| 222 |
+
for dep in (dist.iter_dependencies() or ())
|
| 223 |
+
}
|
| 224 |
+
|
| 225 |
+
# Create a set to remove duplicate packages, and cast it to a list
|
| 226 |
+
# to keep the return type consistent with get_outdated and
|
| 227 |
+
# get_uptodate
|
| 228 |
+
return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys})
|
| 229 |
+
|
| 230 |
+
def iter_packages_latest_infos(
|
| 231 |
+
self, packages: "_ProcessedDists", options: Values
|
| 232 |
+
) -> Generator["_DistWithLatestInfo", None, None]:
|
| 233 |
+
with self._build_session(options) as session:
|
| 234 |
+
finder = self._build_package_finder(options, session)
|
| 235 |
+
|
| 236 |
+
def latest_info(
|
| 237 |
+
dist: "_DistWithLatestInfo",
|
| 238 |
+
) -> Optional["_DistWithLatestInfo"]:
|
| 239 |
+
all_candidates = finder.find_all_candidates(dist.canonical_name)
|
| 240 |
+
if not options.pre:
|
| 241 |
+
# Remove prereleases
|
| 242 |
+
all_candidates = [
|
| 243 |
+
candidate
|
| 244 |
+
for candidate in all_candidates
|
| 245 |
+
if not candidate.version.is_prerelease
|
| 246 |
+
]
|
| 247 |
+
|
| 248 |
+
evaluator = finder.make_candidate_evaluator(
|
| 249 |
+
project_name=dist.canonical_name,
|
| 250 |
+
)
|
| 251 |
+
best_candidate = evaluator.sort_best_candidate(all_candidates)
|
| 252 |
+
if best_candidate is None:
|
| 253 |
+
return None
|
| 254 |
+
|
| 255 |
+
remote_version = best_candidate.version
|
| 256 |
+
if best_candidate.link.is_wheel:
|
| 257 |
+
typ = "wheel"
|
| 258 |
+
else:
|
| 259 |
+
typ = "sdist"
|
| 260 |
+
dist.latest_version = remote_version
|
| 261 |
+
dist.latest_filetype = typ
|
| 262 |
+
return dist
|
| 263 |
+
|
| 264 |
+
for dist in map(latest_info, packages):
|
| 265 |
+
if dist is not None:
|
| 266 |
+
yield dist
|
| 267 |
+
|
| 268 |
+
def output_package_listing(
|
| 269 |
+
self, packages: "_ProcessedDists", options: Values
|
| 270 |
+
) -> None:
|
| 271 |
+
packages = sorted(
|
| 272 |
+
packages,
|
| 273 |
+
key=lambda dist: dist.canonical_name,
|
| 274 |
+
)
|
| 275 |
+
if options.list_format == "columns" and packages:
|
| 276 |
+
data, header = format_for_columns(packages, options)
|
| 277 |
+
self.output_package_listing_columns(data, header)
|
| 278 |
+
elif options.list_format == "freeze":
|
| 279 |
+
for dist in packages:
|
| 280 |
+
if options.verbose >= 1:
|
| 281 |
+
write_output(
|
| 282 |
+
"%s==%s (%s)", dist.raw_name, dist.version, dist.location
|
| 283 |
+
)
|
| 284 |
+
else:
|
| 285 |
+
write_output("%s==%s", dist.raw_name, dist.version)
|
| 286 |
+
elif options.list_format == "json":
|
| 287 |
+
write_output(format_for_json(packages, options))
|
| 288 |
+
|
| 289 |
+
def output_package_listing_columns(
|
| 290 |
+
self, data: List[List[str]], header: List[str]
|
| 291 |
+
) -> None:
|
| 292 |
+
# insert the header first: we need to know the size of column names
|
| 293 |
+
if len(data) > 0:
|
| 294 |
+
data.insert(0, header)
|
| 295 |
+
|
| 296 |
+
pkg_strings, sizes = tabulate(data)
|
| 297 |
+
|
| 298 |
+
# Create and add a separator.
|
| 299 |
+
if len(data) > 0:
|
| 300 |
+
pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
|
| 301 |
+
|
| 302 |
+
for val in pkg_strings:
|
| 303 |
+
write_output(val)
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
def format_for_columns(
|
| 307 |
+
pkgs: "_ProcessedDists", options: Values
|
| 308 |
+
) -> Tuple[List[List[str]], List[str]]:
|
| 309 |
+
"""
|
| 310 |
+
Convert the package data into something usable
|
| 311 |
+
by output_package_listing_columns.
|
| 312 |
+
"""
|
| 313 |
+
header = ["Package", "Version"]
|
| 314 |
+
|
| 315 |
+
running_outdated = options.outdated
|
| 316 |
+
if running_outdated:
|
| 317 |
+
header.extend(["Latest", "Type"])
|
| 318 |
+
|
| 319 |
+
has_editables = any(x.editable for x in pkgs)
|
| 320 |
+
if has_editables:
|
| 321 |
+
header.append("Editable project location")
|
| 322 |
+
|
| 323 |
+
if options.verbose >= 1:
|
| 324 |
+
header.append("Location")
|
| 325 |
+
if options.verbose >= 1:
|
| 326 |
+
header.append("Installer")
|
| 327 |
+
|
| 328 |
+
data = []
|
| 329 |
+
for proj in pkgs:
|
| 330 |
+
# if we're working on the 'outdated' list, separate out the
|
| 331 |
+
# latest_version and type
|
| 332 |
+
row = [proj.raw_name, str(proj.version)]
|
| 333 |
+
|
| 334 |
+
if running_outdated:
|
| 335 |
+
row.append(str(proj.latest_version))
|
| 336 |
+
row.append(proj.latest_filetype)
|
| 337 |
+
|
| 338 |
+
if has_editables:
|
| 339 |
+
row.append(proj.editable_project_location or "")
|
| 340 |
+
|
| 341 |
+
if options.verbose >= 1:
|
| 342 |
+
row.append(proj.location or "")
|
| 343 |
+
if options.verbose >= 1:
|
| 344 |
+
row.append(proj.installer)
|
| 345 |
+
|
| 346 |
+
data.append(row)
|
| 347 |
+
|
| 348 |
+
return data, header
|
| 349 |
+
|
| 350 |
+
|
| 351 |
+
def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
|
| 352 |
+
data = []
|
| 353 |
+
for dist in packages:
|
| 354 |
+
info = {
|
| 355 |
+
"name": dist.raw_name,
|
| 356 |
+
"version": str(dist.version),
|
| 357 |
+
}
|
| 358 |
+
if options.verbose >= 1:
|
| 359 |
+
info["location"] = dist.location or ""
|
| 360 |
+
info["installer"] = dist.installer
|
| 361 |
+
if options.outdated:
|
| 362 |
+
info["latest_version"] = str(dist.latest_version)
|
| 363 |
+
info["latest_filetype"] = dist.latest_filetype
|
| 364 |
+
editable_project_location = dist.editable_project_location
|
| 365 |
+
if editable_project_location:
|
| 366 |
+
info["editable_project_location"] = editable_project_location
|
| 367 |
+
data.append(info)
|
| 368 |
+
return json.dumps(data)
|
.venv/Lib/site-packages/pip/_internal/commands/search.py
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import shutil
|
| 3 |
+
import sys
|
| 4 |
+
import textwrap
|
| 5 |
+
import xmlrpc.client
|
| 6 |
+
from collections import OrderedDict
|
| 7 |
+
from optparse import Values
|
| 8 |
+
from typing import TYPE_CHECKING, Dict, List, Optional
|
| 9 |
+
|
| 10 |
+
from pip._vendor.packaging.version import parse as parse_version
|
| 11 |
+
|
| 12 |
+
from pip._internal.cli.base_command import Command
|
| 13 |
+
from pip._internal.cli.req_command import SessionCommandMixin
|
| 14 |
+
from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
|
| 15 |
+
from pip._internal.exceptions import CommandError
|
| 16 |
+
from pip._internal.metadata import get_default_environment
|
| 17 |
+
from pip._internal.models.index import PyPI
|
| 18 |
+
from pip._internal.network.xmlrpc import PipXmlrpcTransport
|
| 19 |
+
from pip._internal.utils.logging import indent_log
|
| 20 |
+
from pip._internal.utils.misc import write_output
|
| 21 |
+
|
| 22 |
+
if TYPE_CHECKING:
|
| 23 |
+
from typing import TypedDict
|
| 24 |
+
|
| 25 |
+
class TransformedHit(TypedDict):
|
| 26 |
+
name: str
|
| 27 |
+
summary: str
|
| 28 |
+
versions: List[str]
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
logger = logging.getLogger(__name__)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class SearchCommand(Command, SessionCommandMixin):
|
| 35 |
+
"""Search for PyPI packages whose name or summary contains <query>."""
|
| 36 |
+
|
| 37 |
+
usage = """
|
| 38 |
+
%prog [options] <query>"""
|
| 39 |
+
ignore_require_venv = True
|
| 40 |
+
|
| 41 |
+
def add_options(self) -> None:
|
| 42 |
+
self.cmd_opts.add_option(
|
| 43 |
+
"-i",
|
| 44 |
+
"--index",
|
| 45 |
+
dest="index",
|
| 46 |
+
metavar="URL",
|
| 47 |
+
default=PyPI.pypi_url,
|
| 48 |
+
help="Base URL of Python Package Index (default %default)",
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 52 |
+
|
| 53 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 54 |
+
if not args:
|
| 55 |
+
raise CommandError("Missing required argument (search query).")
|
| 56 |
+
query = args
|
| 57 |
+
pypi_hits = self.search(query, options)
|
| 58 |
+
hits = transform_hits(pypi_hits)
|
| 59 |
+
|
| 60 |
+
terminal_width = None
|
| 61 |
+
if sys.stdout.isatty():
|
| 62 |
+
terminal_width = shutil.get_terminal_size()[0]
|
| 63 |
+
|
| 64 |
+
print_results(hits, terminal_width=terminal_width)
|
| 65 |
+
if pypi_hits:
|
| 66 |
+
return SUCCESS
|
| 67 |
+
return NO_MATCHES_FOUND
|
| 68 |
+
|
| 69 |
+
def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
|
| 70 |
+
index_url = options.index
|
| 71 |
+
|
| 72 |
+
session = self.get_default_session(options)
|
| 73 |
+
|
| 74 |
+
transport = PipXmlrpcTransport(index_url, session)
|
| 75 |
+
pypi = xmlrpc.client.ServerProxy(index_url, transport)
|
| 76 |
+
try:
|
| 77 |
+
hits = pypi.search({"name": query, "summary": query}, "or")
|
| 78 |
+
except xmlrpc.client.Fault as fault:
|
| 79 |
+
message = "XMLRPC request failed [code: {code}]\n{string}".format(
|
| 80 |
+
code=fault.faultCode,
|
| 81 |
+
string=fault.faultString,
|
| 82 |
+
)
|
| 83 |
+
raise CommandError(message)
|
| 84 |
+
assert isinstance(hits, list)
|
| 85 |
+
return hits
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
|
| 89 |
+
"""
|
| 90 |
+
The list from pypi is really a list of versions. We want a list of
|
| 91 |
+
packages with the list of versions stored inline. This converts the
|
| 92 |
+
list from pypi into one we can use.
|
| 93 |
+
"""
|
| 94 |
+
packages: Dict[str, "TransformedHit"] = OrderedDict()
|
| 95 |
+
for hit in hits:
|
| 96 |
+
name = hit["name"]
|
| 97 |
+
summary = hit["summary"]
|
| 98 |
+
version = hit["version"]
|
| 99 |
+
|
| 100 |
+
if name not in packages.keys():
|
| 101 |
+
packages[name] = {
|
| 102 |
+
"name": name,
|
| 103 |
+
"summary": summary,
|
| 104 |
+
"versions": [version],
|
| 105 |
+
}
|
| 106 |
+
else:
|
| 107 |
+
packages[name]["versions"].append(version)
|
| 108 |
+
|
| 109 |
+
# if this is the highest version, replace summary and score
|
| 110 |
+
if version == highest_version(packages[name]["versions"]):
|
| 111 |
+
packages[name]["summary"] = summary
|
| 112 |
+
|
| 113 |
+
return list(packages.values())
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def print_dist_installation_info(name: str, latest: str) -> None:
|
| 117 |
+
env = get_default_environment()
|
| 118 |
+
dist = env.get_distribution(name)
|
| 119 |
+
if dist is not None:
|
| 120 |
+
with indent_log():
|
| 121 |
+
if dist.version == latest:
|
| 122 |
+
write_output("INSTALLED: %s (latest)", dist.version)
|
| 123 |
+
else:
|
| 124 |
+
write_output("INSTALLED: %s", dist.version)
|
| 125 |
+
if parse_version(latest).pre:
|
| 126 |
+
write_output(
|
| 127 |
+
"LATEST: %s (pre-release; install"
|
| 128 |
+
" with `pip install --pre`)",
|
| 129 |
+
latest,
|
| 130 |
+
)
|
| 131 |
+
else:
|
| 132 |
+
write_output("LATEST: %s", latest)
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def print_results(
|
| 136 |
+
hits: List["TransformedHit"],
|
| 137 |
+
name_column_width: Optional[int] = None,
|
| 138 |
+
terminal_width: Optional[int] = None,
|
| 139 |
+
) -> None:
|
| 140 |
+
if not hits:
|
| 141 |
+
return
|
| 142 |
+
if name_column_width is None:
|
| 143 |
+
name_column_width = (
|
| 144 |
+
max(
|
| 145 |
+
[
|
| 146 |
+
len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
|
| 147 |
+
for hit in hits
|
| 148 |
+
]
|
| 149 |
+
)
|
| 150 |
+
+ 4
|
| 151 |
+
)
|
| 152 |
+
|
| 153 |
+
for hit in hits:
|
| 154 |
+
name = hit["name"]
|
| 155 |
+
summary = hit["summary"] or ""
|
| 156 |
+
latest = highest_version(hit.get("versions", ["-"]))
|
| 157 |
+
if terminal_width is not None:
|
| 158 |
+
target_width = terminal_width - name_column_width - 5
|
| 159 |
+
if target_width > 10:
|
| 160 |
+
# wrap and indent summary to fit terminal
|
| 161 |
+
summary_lines = textwrap.wrap(summary, target_width)
|
| 162 |
+
summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
|
| 163 |
+
|
| 164 |
+
name_latest = f"{name} ({latest})"
|
| 165 |
+
line = f"{name_latest:{name_column_width}} - {summary}"
|
| 166 |
+
try:
|
| 167 |
+
write_output(line)
|
| 168 |
+
print_dist_installation_info(name, latest)
|
| 169 |
+
except UnicodeEncodeError:
|
| 170 |
+
pass
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
def highest_version(versions: List[str]) -> str:
|
| 174 |
+
return max(versions, key=parse_version)
|
.venv/Lib/site-packages/pip/_internal/commands/show.py
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from optparse import Values
|
| 3 |
+
from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
|
| 4 |
+
|
| 5 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 6 |
+
|
| 7 |
+
from pip._internal.cli.base_command import Command
|
| 8 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 9 |
+
from pip._internal.metadata import BaseDistribution, get_default_environment
|
| 10 |
+
from pip._internal.utils.misc import write_output
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class ShowCommand(Command):
|
| 16 |
+
"""
|
| 17 |
+
Show information about one or more installed packages.
|
| 18 |
+
|
| 19 |
+
The output is in RFC-compliant mail header format.
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
usage = """
|
| 23 |
+
%prog [options] <package> ..."""
|
| 24 |
+
ignore_require_venv = True
|
| 25 |
+
|
| 26 |
+
def add_options(self) -> None:
|
| 27 |
+
self.cmd_opts.add_option(
|
| 28 |
+
"-f",
|
| 29 |
+
"--files",
|
| 30 |
+
dest="files",
|
| 31 |
+
action="store_true",
|
| 32 |
+
default=False,
|
| 33 |
+
help="Show the full list of installed files for each package.",
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 37 |
+
|
| 38 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 39 |
+
if not args:
|
| 40 |
+
logger.warning("ERROR: Please provide a package name or names.")
|
| 41 |
+
return ERROR
|
| 42 |
+
query = args
|
| 43 |
+
|
| 44 |
+
results = search_packages_info(query)
|
| 45 |
+
if not print_results(
|
| 46 |
+
results, list_files=options.files, verbose=options.verbose
|
| 47 |
+
):
|
| 48 |
+
return ERROR
|
| 49 |
+
return SUCCESS
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class _PackageInfo(NamedTuple):
|
| 53 |
+
name: str
|
| 54 |
+
version: str
|
| 55 |
+
location: str
|
| 56 |
+
editable_project_location: Optional[str]
|
| 57 |
+
requires: List[str]
|
| 58 |
+
required_by: List[str]
|
| 59 |
+
installer: str
|
| 60 |
+
metadata_version: str
|
| 61 |
+
classifiers: List[str]
|
| 62 |
+
summary: str
|
| 63 |
+
homepage: str
|
| 64 |
+
project_urls: List[str]
|
| 65 |
+
author: str
|
| 66 |
+
author_email: str
|
| 67 |
+
license: str
|
| 68 |
+
entry_points: List[str]
|
| 69 |
+
files: Optional[List[str]]
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
|
| 73 |
+
"""
|
| 74 |
+
Gather details from installed distributions. Print distribution name,
|
| 75 |
+
version, location, and installed files. Installed files requires a
|
| 76 |
+
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
| 77 |
+
directory.
|
| 78 |
+
"""
|
| 79 |
+
env = get_default_environment()
|
| 80 |
+
|
| 81 |
+
installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
|
| 82 |
+
query_names = [canonicalize_name(name) for name in query]
|
| 83 |
+
missing = sorted(
|
| 84 |
+
[name for name, pkg in zip(query, query_names) if pkg not in installed]
|
| 85 |
+
)
|
| 86 |
+
if missing:
|
| 87 |
+
logger.warning("Package(s) not found: %s", ", ".join(missing))
|
| 88 |
+
|
| 89 |
+
def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
|
| 90 |
+
return (
|
| 91 |
+
dist.metadata["Name"] or "UNKNOWN"
|
| 92 |
+
for dist in installed.values()
|
| 93 |
+
if current_dist.canonical_name
|
| 94 |
+
in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
for query_name in query_names:
|
| 98 |
+
try:
|
| 99 |
+
dist = installed[query_name]
|
| 100 |
+
except KeyError:
|
| 101 |
+
continue
|
| 102 |
+
|
| 103 |
+
requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower)
|
| 104 |
+
required_by = sorted(_get_requiring_packages(dist), key=str.lower)
|
| 105 |
+
|
| 106 |
+
try:
|
| 107 |
+
entry_points_text = dist.read_text("entry_points.txt")
|
| 108 |
+
entry_points = entry_points_text.splitlines(keepends=False)
|
| 109 |
+
except FileNotFoundError:
|
| 110 |
+
entry_points = []
|
| 111 |
+
|
| 112 |
+
files_iter = dist.iter_declared_entries()
|
| 113 |
+
if files_iter is None:
|
| 114 |
+
files: Optional[List[str]] = None
|
| 115 |
+
else:
|
| 116 |
+
files = sorted(files_iter)
|
| 117 |
+
|
| 118 |
+
metadata = dist.metadata
|
| 119 |
+
|
| 120 |
+
yield _PackageInfo(
|
| 121 |
+
name=dist.raw_name,
|
| 122 |
+
version=str(dist.version),
|
| 123 |
+
location=dist.location or "",
|
| 124 |
+
editable_project_location=dist.editable_project_location,
|
| 125 |
+
requires=requires,
|
| 126 |
+
required_by=required_by,
|
| 127 |
+
installer=dist.installer,
|
| 128 |
+
metadata_version=dist.metadata_version or "",
|
| 129 |
+
classifiers=metadata.get_all("Classifier", []),
|
| 130 |
+
summary=metadata.get("Summary", ""),
|
| 131 |
+
homepage=metadata.get("Home-page", ""),
|
| 132 |
+
project_urls=metadata.get_all("Project-URL", []),
|
| 133 |
+
author=metadata.get("Author", ""),
|
| 134 |
+
author_email=metadata.get("Author-email", ""),
|
| 135 |
+
license=metadata.get("License", ""),
|
| 136 |
+
entry_points=entry_points,
|
| 137 |
+
files=files,
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def print_results(
|
| 142 |
+
distributions: Iterable[_PackageInfo],
|
| 143 |
+
list_files: bool,
|
| 144 |
+
verbose: bool,
|
| 145 |
+
) -> bool:
|
| 146 |
+
"""
|
| 147 |
+
Print the information from installed distributions found.
|
| 148 |
+
"""
|
| 149 |
+
results_printed = False
|
| 150 |
+
for i, dist in enumerate(distributions):
|
| 151 |
+
results_printed = True
|
| 152 |
+
if i > 0:
|
| 153 |
+
write_output("---")
|
| 154 |
+
|
| 155 |
+
write_output("Name: %s", dist.name)
|
| 156 |
+
write_output("Version: %s", dist.version)
|
| 157 |
+
write_output("Summary: %s", dist.summary)
|
| 158 |
+
write_output("Home-page: %s", dist.homepage)
|
| 159 |
+
write_output("Author: %s", dist.author)
|
| 160 |
+
write_output("Author-email: %s", dist.author_email)
|
| 161 |
+
write_output("License: %s", dist.license)
|
| 162 |
+
write_output("Location: %s", dist.location)
|
| 163 |
+
if dist.editable_project_location is not None:
|
| 164 |
+
write_output(
|
| 165 |
+
"Editable project location: %s", dist.editable_project_location
|
| 166 |
+
)
|
| 167 |
+
write_output("Requires: %s", ", ".join(dist.requires))
|
| 168 |
+
write_output("Required-by: %s", ", ".join(dist.required_by))
|
| 169 |
+
|
| 170 |
+
if verbose:
|
| 171 |
+
write_output("Metadata-Version: %s", dist.metadata_version)
|
| 172 |
+
write_output("Installer: %s", dist.installer)
|
| 173 |
+
write_output("Classifiers:")
|
| 174 |
+
for classifier in dist.classifiers:
|
| 175 |
+
write_output(" %s", classifier)
|
| 176 |
+
write_output("Entry-points:")
|
| 177 |
+
for entry in dist.entry_points:
|
| 178 |
+
write_output(" %s", entry.strip())
|
| 179 |
+
write_output("Project-URLs:")
|
| 180 |
+
for project_url in dist.project_urls:
|
| 181 |
+
write_output(" %s", project_url)
|
| 182 |
+
if list_files:
|
| 183 |
+
write_output("Files:")
|
| 184 |
+
if dist.files is None:
|
| 185 |
+
write_output("Cannot locate RECORD or installed-files.txt")
|
| 186 |
+
else:
|
| 187 |
+
for line in dist.files:
|
| 188 |
+
write_output(" %s", line.strip())
|
| 189 |
+
return results_printed
|
.venv/Lib/site-packages/pip/_internal/commands/uninstall.py
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from optparse import Values
|
| 3 |
+
from typing import List
|
| 4 |
+
|
| 5 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 6 |
+
|
| 7 |
+
from pip._internal.cli import cmdoptions
|
| 8 |
+
from pip._internal.cli.base_command import Command
|
| 9 |
+
from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
|
| 10 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 11 |
+
from pip._internal.exceptions import InstallationError
|
| 12 |
+
from pip._internal.req import parse_requirements
|
| 13 |
+
from pip._internal.req.constructors import (
|
| 14 |
+
install_req_from_line,
|
| 15 |
+
install_req_from_parsed_requirement,
|
| 16 |
+
)
|
| 17 |
+
from pip._internal.utils.misc import (
|
| 18 |
+
check_externally_managed,
|
| 19 |
+
protect_pip_from_modification_on_windows,
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
logger = logging.getLogger(__name__)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class UninstallCommand(Command, SessionCommandMixin):
|
| 26 |
+
"""
|
| 27 |
+
Uninstall packages.
|
| 28 |
+
|
| 29 |
+
pip is able to uninstall most installed packages. Known exceptions are:
|
| 30 |
+
|
| 31 |
+
- Pure distutils packages installed with ``python setup.py install``, which
|
| 32 |
+
leave behind no metadata to determine what files were installed.
|
| 33 |
+
- Script wrappers installed by ``python setup.py develop``.
|
| 34 |
+
"""
|
| 35 |
+
|
| 36 |
+
usage = """
|
| 37 |
+
%prog [options] <package> ...
|
| 38 |
+
%prog [options] -r <requirements file> ..."""
|
| 39 |
+
|
| 40 |
+
def add_options(self) -> None:
|
| 41 |
+
self.cmd_opts.add_option(
|
| 42 |
+
"-r",
|
| 43 |
+
"--requirement",
|
| 44 |
+
dest="requirements",
|
| 45 |
+
action="append",
|
| 46 |
+
default=[],
|
| 47 |
+
metavar="file",
|
| 48 |
+
help=(
|
| 49 |
+
"Uninstall all the packages listed in the given requirements "
|
| 50 |
+
"file. This option can be used multiple times."
|
| 51 |
+
),
|
| 52 |
+
)
|
| 53 |
+
self.cmd_opts.add_option(
|
| 54 |
+
"-y",
|
| 55 |
+
"--yes",
|
| 56 |
+
dest="yes",
|
| 57 |
+
action="store_true",
|
| 58 |
+
help="Don't ask for confirmation of uninstall deletions.",
|
| 59 |
+
)
|
| 60 |
+
self.cmd_opts.add_option(cmdoptions.root_user_action())
|
| 61 |
+
self.cmd_opts.add_option(cmdoptions.override_externally_managed())
|
| 62 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 63 |
+
|
| 64 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 65 |
+
session = self.get_default_session(options)
|
| 66 |
+
|
| 67 |
+
reqs_to_uninstall = {}
|
| 68 |
+
for name in args:
|
| 69 |
+
req = install_req_from_line(
|
| 70 |
+
name,
|
| 71 |
+
isolated=options.isolated_mode,
|
| 72 |
+
)
|
| 73 |
+
if req.name:
|
| 74 |
+
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
| 75 |
+
else:
|
| 76 |
+
logger.warning(
|
| 77 |
+
"Invalid requirement: %r ignored -"
|
| 78 |
+
" the uninstall command expects named"
|
| 79 |
+
" requirements.",
|
| 80 |
+
name,
|
| 81 |
+
)
|
| 82 |
+
for filename in options.requirements:
|
| 83 |
+
for parsed_req in parse_requirements(
|
| 84 |
+
filename, options=options, session=session
|
| 85 |
+
):
|
| 86 |
+
req = install_req_from_parsed_requirement(
|
| 87 |
+
parsed_req, isolated=options.isolated_mode
|
| 88 |
+
)
|
| 89 |
+
if req.name:
|
| 90 |
+
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
| 91 |
+
if not reqs_to_uninstall:
|
| 92 |
+
raise InstallationError(
|
| 93 |
+
f"You must give at least one requirement to {self.name} (see "
|
| 94 |
+
f'"pip help {self.name}")'
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
if not options.override_externally_managed:
|
| 98 |
+
check_externally_managed()
|
| 99 |
+
|
| 100 |
+
protect_pip_from_modification_on_windows(
|
| 101 |
+
modifying_pip="pip" in reqs_to_uninstall
|
| 102 |
+
)
|
| 103 |
+
|
| 104 |
+
for req in reqs_to_uninstall.values():
|
| 105 |
+
uninstall_pathset = req.uninstall(
|
| 106 |
+
auto_confirm=options.yes,
|
| 107 |
+
verbose=self.verbosity > 0,
|
| 108 |
+
)
|
| 109 |
+
if uninstall_pathset:
|
| 110 |
+
uninstall_pathset.commit()
|
| 111 |
+
if options.root_user_action == "warn":
|
| 112 |
+
warn_if_run_as_root()
|
| 113 |
+
return SUCCESS
|
.venv/Lib/site-packages/pip/_internal/commands/wheel.py
ADDED
|
@@ -0,0 +1,183 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
import shutil
|
| 4 |
+
from optparse import Values
|
| 5 |
+
from typing import List
|
| 6 |
+
|
| 7 |
+
from pip._internal.cache import WheelCache
|
| 8 |
+
from pip._internal.cli import cmdoptions
|
| 9 |
+
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
| 10 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 11 |
+
from pip._internal.exceptions import CommandError
|
| 12 |
+
from pip._internal.operations.build.build_tracker import get_build_tracker
|
| 13 |
+
from pip._internal.req.req_install import (
|
| 14 |
+
InstallRequirement,
|
| 15 |
+
check_legacy_setup_py_options,
|
| 16 |
+
)
|
| 17 |
+
from pip._internal.utils.misc import ensure_dir, normalize_path
|
| 18 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 19 |
+
from pip._internal.wheel_builder import build, should_build_for_wheel_command
|
| 20 |
+
|
| 21 |
+
logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class WheelCommand(RequirementCommand):
|
| 25 |
+
"""
|
| 26 |
+
Build Wheel archives for your requirements and dependencies.
|
| 27 |
+
|
| 28 |
+
Wheel is a built-package format, and offers the advantage of not
|
| 29 |
+
recompiling your software during every install. For more details, see the
|
| 30 |
+
wheel docs: https://wheel.readthedocs.io/en/latest/
|
| 31 |
+
|
| 32 |
+
'pip wheel' uses the build system interface as described here:
|
| 33 |
+
https://pip.pypa.io/en/stable/reference/build-system/
|
| 34 |
+
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
usage = """
|
| 38 |
+
%prog [options] <requirement specifier> ...
|
| 39 |
+
%prog [options] -r <requirements file> ...
|
| 40 |
+
%prog [options] [-e] <vcs project url> ...
|
| 41 |
+
%prog [options] [-e] <local project path> ...
|
| 42 |
+
%prog [options] <archive url/path> ..."""
|
| 43 |
+
|
| 44 |
+
def add_options(self) -> None:
|
| 45 |
+
self.cmd_opts.add_option(
|
| 46 |
+
"-w",
|
| 47 |
+
"--wheel-dir",
|
| 48 |
+
dest="wheel_dir",
|
| 49 |
+
metavar="dir",
|
| 50 |
+
default=os.curdir,
|
| 51 |
+
help=(
|
| 52 |
+
"Build wheels into <dir>, where the default is the "
|
| 53 |
+
"current working directory."
|
| 54 |
+
),
|
| 55 |
+
)
|
| 56 |
+
self.cmd_opts.add_option(cmdoptions.no_binary())
|
| 57 |
+
self.cmd_opts.add_option(cmdoptions.only_binary())
|
| 58 |
+
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
| 59 |
+
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
| 60 |
+
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
| 61 |
+
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
| 62 |
+
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
| 63 |
+
self.cmd_opts.add_option(cmdoptions.constraints())
|
| 64 |
+
self.cmd_opts.add_option(cmdoptions.editable())
|
| 65 |
+
self.cmd_opts.add_option(cmdoptions.requirements())
|
| 66 |
+
self.cmd_opts.add_option(cmdoptions.src())
|
| 67 |
+
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
| 68 |
+
self.cmd_opts.add_option(cmdoptions.no_deps())
|
| 69 |
+
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
| 70 |
+
|
| 71 |
+
self.cmd_opts.add_option(
|
| 72 |
+
"--no-verify",
|
| 73 |
+
dest="no_verify",
|
| 74 |
+
action="store_true",
|
| 75 |
+
default=False,
|
| 76 |
+
help="Don't verify if built wheel is valid.",
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
self.cmd_opts.add_option(cmdoptions.config_settings())
|
| 80 |
+
self.cmd_opts.add_option(cmdoptions.build_options())
|
| 81 |
+
self.cmd_opts.add_option(cmdoptions.global_options())
|
| 82 |
+
|
| 83 |
+
self.cmd_opts.add_option(
|
| 84 |
+
"--pre",
|
| 85 |
+
action="store_true",
|
| 86 |
+
default=False,
|
| 87 |
+
help=(
|
| 88 |
+
"Include pre-release and development versions. By default, "
|
| 89 |
+
"pip only finds stable versions."
|
| 90 |
+
),
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
| 94 |
+
|
| 95 |
+
index_opts = cmdoptions.make_option_group(
|
| 96 |
+
cmdoptions.index_group,
|
| 97 |
+
self.parser,
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
self.parser.insert_option_group(0, index_opts)
|
| 101 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 102 |
+
|
| 103 |
+
@with_cleanup
|
| 104 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 105 |
+
session = self.get_default_session(options)
|
| 106 |
+
|
| 107 |
+
finder = self._build_package_finder(options, session)
|
| 108 |
+
|
| 109 |
+
options.wheel_dir = normalize_path(options.wheel_dir)
|
| 110 |
+
ensure_dir(options.wheel_dir)
|
| 111 |
+
|
| 112 |
+
build_tracker = self.enter_context(get_build_tracker())
|
| 113 |
+
|
| 114 |
+
directory = TempDirectory(
|
| 115 |
+
delete=not options.no_clean,
|
| 116 |
+
kind="wheel",
|
| 117 |
+
globally_managed=True,
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
reqs = self.get_requirements(args, options, finder, session)
|
| 121 |
+
check_legacy_setup_py_options(options, reqs)
|
| 122 |
+
|
| 123 |
+
wheel_cache = WheelCache(options.cache_dir)
|
| 124 |
+
|
| 125 |
+
preparer = self.make_requirement_preparer(
|
| 126 |
+
temp_build_dir=directory,
|
| 127 |
+
options=options,
|
| 128 |
+
build_tracker=build_tracker,
|
| 129 |
+
session=session,
|
| 130 |
+
finder=finder,
|
| 131 |
+
download_dir=options.wheel_dir,
|
| 132 |
+
use_user_site=False,
|
| 133 |
+
verbosity=self.verbosity,
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
resolver = self.make_resolver(
|
| 137 |
+
preparer=preparer,
|
| 138 |
+
finder=finder,
|
| 139 |
+
options=options,
|
| 140 |
+
wheel_cache=wheel_cache,
|
| 141 |
+
ignore_requires_python=options.ignore_requires_python,
|
| 142 |
+
use_pep517=options.use_pep517,
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
self.trace_basic_info(finder)
|
| 146 |
+
|
| 147 |
+
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
| 148 |
+
|
| 149 |
+
reqs_to_build: List[InstallRequirement] = []
|
| 150 |
+
for req in requirement_set.requirements.values():
|
| 151 |
+
if req.is_wheel:
|
| 152 |
+
preparer.save_linked_requirement(req)
|
| 153 |
+
elif should_build_for_wheel_command(req):
|
| 154 |
+
reqs_to_build.append(req)
|
| 155 |
+
|
| 156 |
+
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
|
| 157 |
+
requirement_set.warn_legacy_versions_and_specifiers()
|
| 158 |
+
|
| 159 |
+
# build wheels
|
| 160 |
+
build_successes, build_failures = build(
|
| 161 |
+
reqs_to_build,
|
| 162 |
+
wheel_cache=wheel_cache,
|
| 163 |
+
verify=(not options.no_verify),
|
| 164 |
+
build_options=options.build_options or [],
|
| 165 |
+
global_options=options.global_options or [],
|
| 166 |
+
)
|
| 167 |
+
for req in build_successes:
|
| 168 |
+
assert req.link and req.link.is_wheel
|
| 169 |
+
assert req.local_file_path
|
| 170 |
+
# copy from cache to target directory
|
| 171 |
+
try:
|
| 172 |
+
shutil.copy(req.local_file_path, options.wheel_dir)
|
| 173 |
+
except OSError as e:
|
| 174 |
+
logger.warning(
|
| 175 |
+
"Building wheel for %s failed: %s",
|
| 176 |
+
req.name,
|
| 177 |
+
e,
|
| 178 |
+
)
|
| 179 |
+
build_failures.append(req)
|
| 180 |
+
if len(build_failures) != 0:
|
| 181 |
+
raise CommandError("Failed to build one or more wheels")
|
| 182 |
+
|
| 183 |
+
return SUCCESS
|
.venv/Lib/site-packages/pip/_internal/configuration.py
ADDED
|
@@ -0,0 +1,381 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Configuration management setup
|
| 2 |
+
|
| 3 |
+
Some terminology:
|
| 4 |
+
- name
|
| 5 |
+
As written in config files.
|
| 6 |
+
- value
|
| 7 |
+
Value associated with a name
|
| 8 |
+
- key
|
| 9 |
+
Name combined with it's section (section.name)
|
| 10 |
+
- variant
|
| 11 |
+
A single word describing where the configuration key-value pair came from
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
import configparser
|
| 15 |
+
import locale
|
| 16 |
+
import os
|
| 17 |
+
import sys
|
| 18 |
+
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
|
| 19 |
+
|
| 20 |
+
from pip._internal.exceptions import (
|
| 21 |
+
ConfigurationError,
|
| 22 |
+
ConfigurationFileCouldNotBeLoaded,
|
| 23 |
+
)
|
| 24 |
+
from pip._internal.utils import appdirs
|
| 25 |
+
from pip._internal.utils.compat import WINDOWS
|
| 26 |
+
from pip._internal.utils.logging import getLogger
|
| 27 |
+
from pip._internal.utils.misc import ensure_dir, enum
|
| 28 |
+
|
| 29 |
+
RawConfigParser = configparser.RawConfigParser # Shorthand
|
| 30 |
+
Kind = NewType("Kind", str)
|
| 31 |
+
|
| 32 |
+
CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
|
| 33 |
+
ENV_NAMES_IGNORED = "version", "help"
|
| 34 |
+
|
| 35 |
+
# The kinds of configurations there are.
|
| 36 |
+
kinds = enum(
|
| 37 |
+
USER="user", # User Specific
|
| 38 |
+
GLOBAL="global", # System Wide
|
| 39 |
+
SITE="site", # [Virtual] Environment Specific
|
| 40 |
+
ENV="env", # from PIP_CONFIG_FILE
|
| 41 |
+
ENV_VAR="env-var", # from Environment Variables
|
| 42 |
+
)
|
| 43 |
+
OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
|
| 44 |
+
VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
|
| 45 |
+
|
| 46 |
+
logger = getLogger(__name__)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
| 50 |
+
def _normalize_name(name: str) -> str:
|
| 51 |
+
"""Make a name consistent regardless of source (environment or file)"""
|
| 52 |
+
name = name.lower().replace("_", "-")
|
| 53 |
+
if name.startswith("--"):
|
| 54 |
+
name = name[2:] # only prefer long opts
|
| 55 |
+
return name
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def _disassemble_key(name: str) -> List[str]:
|
| 59 |
+
if "." not in name:
|
| 60 |
+
error_message = (
|
| 61 |
+
"Key does not contain dot separated section and key. "
|
| 62 |
+
"Perhaps you wanted to use 'global.{}' instead?"
|
| 63 |
+
).format(name)
|
| 64 |
+
raise ConfigurationError(error_message)
|
| 65 |
+
return name.split(".", 1)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def get_configuration_files() -> Dict[Kind, List[str]]:
|
| 69 |
+
global_config_files = [
|
| 70 |
+
os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
|
| 71 |
+
]
|
| 72 |
+
|
| 73 |
+
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
|
| 74 |
+
legacy_config_file = os.path.join(
|
| 75 |
+
os.path.expanduser("~"),
|
| 76 |
+
"pip" if WINDOWS else ".pip",
|
| 77 |
+
CONFIG_BASENAME,
|
| 78 |
+
)
|
| 79 |
+
new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
|
| 80 |
+
return {
|
| 81 |
+
kinds.GLOBAL: global_config_files,
|
| 82 |
+
kinds.SITE: [site_config_file],
|
| 83 |
+
kinds.USER: [legacy_config_file, new_config_file],
|
| 84 |
+
}
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class Configuration:
|
| 88 |
+
"""Handles management of configuration.
|
| 89 |
+
|
| 90 |
+
Provides an interface to accessing and managing configuration files.
|
| 91 |
+
|
| 92 |
+
This class converts provides an API that takes "section.key-name" style
|
| 93 |
+
keys and stores the value associated with it as "key-name" under the
|
| 94 |
+
section "section".
|
| 95 |
+
|
| 96 |
+
This allows for a clean interface wherein the both the section and the
|
| 97 |
+
key-name are preserved in an easy to manage form in the configuration files
|
| 98 |
+
and the data stored is also nice.
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
|
| 102 |
+
super().__init__()
|
| 103 |
+
|
| 104 |
+
if load_only is not None and load_only not in VALID_LOAD_ONLY:
|
| 105 |
+
raise ConfigurationError(
|
| 106 |
+
"Got invalid value for load_only - should be one of {}".format(
|
| 107 |
+
", ".join(map(repr, VALID_LOAD_ONLY))
|
| 108 |
+
)
|
| 109 |
+
)
|
| 110 |
+
self.isolated = isolated
|
| 111 |
+
self.load_only = load_only
|
| 112 |
+
|
| 113 |
+
# Because we keep track of where we got the data from
|
| 114 |
+
self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
|
| 115 |
+
variant: [] for variant in OVERRIDE_ORDER
|
| 116 |
+
}
|
| 117 |
+
self._config: Dict[Kind, Dict[str, Any]] = {
|
| 118 |
+
variant: {} for variant in OVERRIDE_ORDER
|
| 119 |
+
}
|
| 120 |
+
self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
|
| 121 |
+
|
| 122 |
+
def load(self) -> None:
|
| 123 |
+
"""Loads configuration from configuration files and environment"""
|
| 124 |
+
self._load_config_files()
|
| 125 |
+
if not self.isolated:
|
| 126 |
+
self._load_environment_vars()
|
| 127 |
+
|
| 128 |
+
def get_file_to_edit(self) -> Optional[str]:
|
| 129 |
+
"""Returns the file with highest priority in configuration"""
|
| 130 |
+
assert self.load_only is not None, "Need to be specified a file to be editing"
|
| 131 |
+
|
| 132 |
+
try:
|
| 133 |
+
return self._get_parser_to_modify()[0]
|
| 134 |
+
except IndexError:
|
| 135 |
+
return None
|
| 136 |
+
|
| 137 |
+
def items(self) -> Iterable[Tuple[str, Any]]:
|
| 138 |
+
"""Returns key-value pairs like dict.items() representing the loaded
|
| 139 |
+
configuration
|
| 140 |
+
"""
|
| 141 |
+
return self._dictionary.items()
|
| 142 |
+
|
| 143 |
+
def get_value(self, key: str) -> Any:
|
| 144 |
+
"""Get a value from the configuration."""
|
| 145 |
+
orig_key = key
|
| 146 |
+
key = _normalize_name(key)
|
| 147 |
+
try:
|
| 148 |
+
return self._dictionary[key]
|
| 149 |
+
except KeyError:
|
| 150 |
+
# disassembling triggers a more useful error message than simply
|
| 151 |
+
# "No such key" in the case that the key isn't in the form command.option
|
| 152 |
+
_disassemble_key(key)
|
| 153 |
+
raise ConfigurationError(f"No such key - {orig_key}")
|
| 154 |
+
|
| 155 |
+
def set_value(self, key: str, value: Any) -> None:
|
| 156 |
+
"""Modify a value in the configuration."""
|
| 157 |
+
key = _normalize_name(key)
|
| 158 |
+
self._ensure_have_load_only()
|
| 159 |
+
|
| 160 |
+
assert self.load_only
|
| 161 |
+
fname, parser = self._get_parser_to_modify()
|
| 162 |
+
|
| 163 |
+
if parser is not None:
|
| 164 |
+
section, name = _disassemble_key(key)
|
| 165 |
+
|
| 166 |
+
# Modify the parser and the configuration
|
| 167 |
+
if not parser.has_section(section):
|
| 168 |
+
parser.add_section(section)
|
| 169 |
+
parser.set(section, name, value)
|
| 170 |
+
|
| 171 |
+
self._config[self.load_only][key] = value
|
| 172 |
+
self._mark_as_modified(fname, parser)
|
| 173 |
+
|
| 174 |
+
def unset_value(self, key: str) -> None:
|
| 175 |
+
"""Unset a value in the configuration."""
|
| 176 |
+
orig_key = key
|
| 177 |
+
key = _normalize_name(key)
|
| 178 |
+
self._ensure_have_load_only()
|
| 179 |
+
|
| 180 |
+
assert self.load_only
|
| 181 |
+
if key not in self._config[self.load_only]:
|
| 182 |
+
raise ConfigurationError(f"No such key - {orig_key}")
|
| 183 |
+
|
| 184 |
+
fname, parser = self._get_parser_to_modify()
|
| 185 |
+
|
| 186 |
+
if parser is not None:
|
| 187 |
+
section, name = _disassemble_key(key)
|
| 188 |
+
if not (
|
| 189 |
+
parser.has_section(section) and parser.remove_option(section, name)
|
| 190 |
+
):
|
| 191 |
+
# The option was not removed.
|
| 192 |
+
raise ConfigurationError(
|
| 193 |
+
"Fatal Internal error [id=1]. Please report as a bug."
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
# The section may be empty after the option was removed.
|
| 197 |
+
if not parser.items(section):
|
| 198 |
+
parser.remove_section(section)
|
| 199 |
+
self._mark_as_modified(fname, parser)
|
| 200 |
+
|
| 201 |
+
del self._config[self.load_only][key]
|
| 202 |
+
|
| 203 |
+
def save(self) -> None:
|
| 204 |
+
"""Save the current in-memory state."""
|
| 205 |
+
self._ensure_have_load_only()
|
| 206 |
+
|
| 207 |
+
for fname, parser in self._modified_parsers:
|
| 208 |
+
logger.info("Writing to %s", fname)
|
| 209 |
+
|
| 210 |
+
# Ensure directory exists.
|
| 211 |
+
ensure_dir(os.path.dirname(fname))
|
| 212 |
+
|
| 213 |
+
# Ensure directory's permission(need to be writeable)
|
| 214 |
+
try:
|
| 215 |
+
with open(fname, "w") as f:
|
| 216 |
+
parser.write(f)
|
| 217 |
+
except OSError as error:
|
| 218 |
+
raise ConfigurationError(
|
| 219 |
+
f"An error occurred while writing to the configuration file "
|
| 220 |
+
f"{fname}: {error}"
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
#
|
| 224 |
+
# Private routines
|
| 225 |
+
#
|
| 226 |
+
|
| 227 |
+
def _ensure_have_load_only(self) -> None:
|
| 228 |
+
if self.load_only is None:
|
| 229 |
+
raise ConfigurationError("Needed a specific file to be modifying.")
|
| 230 |
+
logger.debug("Will be working with %s variant only", self.load_only)
|
| 231 |
+
|
| 232 |
+
@property
|
| 233 |
+
def _dictionary(self) -> Dict[str, Any]:
|
| 234 |
+
"""A dictionary representing the loaded configuration."""
|
| 235 |
+
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
| 236 |
+
# are not needed here.
|
| 237 |
+
retval = {}
|
| 238 |
+
|
| 239 |
+
for variant in OVERRIDE_ORDER:
|
| 240 |
+
retval.update(self._config[variant])
|
| 241 |
+
|
| 242 |
+
return retval
|
| 243 |
+
|
| 244 |
+
def _load_config_files(self) -> None:
|
| 245 |
+
"""Loads configuration from configuration files"""
|
| 246 |
+
config_files = dict(self.iter_config_files())
|
| 247 |
+
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
| 248 |
+
logger.debug(
|
| 249 |
+
"Skipping loading configuration files due to "
|
| 250 |
+
"environment's PIP_CONFIG_FILE being os.devnull"
|
| 251 |
+
)
|
| 252 |
+
return
|
| 253 |
+
|
| 254 |
+
for variant, files in config_files.items():
|
| 255 |
+
for fname in files:
|
| 256 |
+
# If there's specific variant set in `load_only`, load only
|
| 257 |
+
# that variant, not the others.
|
| 258 |
+
if self.load_only is not None and variant != self.load_only:
|
| 259 |
+
logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
|
| 260 |
+
continue
|
| 261 |
+
|
| 262 |
+
parser = self._load_file(variant, fname)
|
| 263 |
+
|
| 264 |
+
# Keeping track of the parsers used
|
| 265 |
+
self._parsers[variant].append((fname, parser))
|
| 266 |
+
|
| 267 |
+
def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
|
| 268 |
+
logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
|
| 269 |
+
parser = self._construct_parser(fname)
|
| 270 |
+
|
| 271 |
+
for section in parser.sections():
|
| 272 |
+
items = parser.items(section)
|
| 273 |
+
self._config[variant].update(self._normalized_keys(section, items))
|
| 274 |
+
|
| 275 |
+
return parser
|
| 276 |
+
|
| 277 |
+
def _construct_parser(self, fname: str) -> RawConfigParser:
|
| 278 |
+
parser = configparser.RawConfigParser()
|
| 279 |
+
# If there is no such file, don't bother reading it but create the
|
| 280 |
+
# parser anyway, to hold the data.
|
| 281 |
+
# Doing this is useful when modifying and saving files, where we don't
|
| 282 |
+
# need to construct a parser.
|
| 283 |
+
if os.path.exists(fname):
|
| 284 |
+
locale_encoding = locale.getpreferredencoding(False)
|
| 285 |
+
try:
|
| 286 |
+
parser.read(fname, encoding=locale_encoding)
|
| 287 |
+
except UnicodeDecodeError:
|
| 288 |
+
# See https://github.com/pypa/pip/issues/4963
|
| 289 |
+
raise ConfigurationFileCouldNotBeLoaded(
|
| 290 |
+
reason=f"contains invalid {locale_encoding} characters",
|
| 291 |
+
fname=fname,
|
| 292 |
+
)
|
| 293 |
+
except configparser.Error as error:
|
| 294 |
+
# See https://github.com/pypa/pip/issues/4893
|
| 295 |
+
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
| 296 |
+
return parser
|
| 297 |
+
|
| 298 |
+
def _load_environment_vars(self) -> None:
|
| 299 |
+
"""Loads configuration from environment variables"""
|
| 300 |
+
self._config[kinds.ENV_VAR].update(
|
| 301 |
+
self._normalized_keys(":env:", self.get_environ_vars())
|
| 302 |
+
)
|
| 303 |
+
|
| 304 |
+
def _normalized_keys(
|
| 305 |
+
self, section: str, items: Iterable[Tuple[str, Any]]
|
| 306 |
+
) -> Dict[str, Any]:
|
| 307 |
+
"""Normalizes items to construct a dictionary with normalized keys.
|
| 308 |
+
|
| 309 |
+
This routine is where the names become keys and are made the same
|
| 310 |
+
regardless of source - configuration files or environment.
|
| 311 |
+
"""
|
| 312 |
+
normalized = {}
|
| 313 |
+
for name, val in items:
|
| 314 |
+
key = section + "." + _normalize_name(name)
|
| 315 |
+
normalized[key] = val
|
| 316 |
+
return normalized
|
| 317 |
+
|
| 318 |
+
def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
|
| 319 |
+
"""Returns a generator with all environmental vars with prefix PIP_"""
|
| 320 |
+
for key, val in os.environ.items():
|
| 321 |
+
if key.startswith("PIP_"):
|
| 322 |
+
name = key[4:].lower()
|
| 323 |
+
if name not in ENV_NAMES_IGNORED:
|
| 324 |
+
yield name, val
|
| 325 |
+
|
| 326 |
+
# XXX: This is patched in the tests.
|
| 327 |
+
def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
|
| 328 |
+
"""Yields variant and configuration files associated with it.
|
| 329 |
+
|
| 330 |
+
This should be treated like items of a dictionary.
|
| 331 |
+
"""
|
| 332 |
+
# SMELL: Move the conditions out of this function
|
| 333 |
+
|
| 334 |
+
# environment variables have the lowest priority
|
| 335 |
+
config_file = os.environ.get("PIP_CONFIG_FILE", None)
|
| 336 |
+
if config_file is not None:
|
| 337 |
+
yield kinds.ENV, [config_file]
|
| 338 |
+
else:
|
| 339 |
+
yield kinds.ENV, []
|
| 340 |
+
|
| 341 |
+
config_files = get_configuration_files()
|
| 342 |
+
|
| 343 |
+
# at the base we have any global configuration
|
| 344 |
+
yield kinds.GLOBAL, config_files[kinds.GLOBAL]
|
| 345 |
+
|
| 346 |
+
# per-user configuration next
|
| 347 |
+
should_load_user_config = not self.isolated and not (
|
| 348 |
+
config_file and os.path.exists(config_file)
|
| 349 |
+
)
|
| 350 |
+
if should_load_user_config:
|
| 351 |
+
# The legacy config file is overridden by the new config file
|
| 352 |
+
yield kinds.USER, config_files[kinds.USER]
|
| 353 |
+
|
| 354 |
+
# finally virtualenv configuration first trumping others
|
| 355 |
+
yield kinds.SITE, config_files[kinds.SITE]
|
| 356 |
+
|
| 357 |
+
def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
|
| 358 |
+
"""Get values present in a config file"""
|
| 359 |
+
return self._config[variant]
|
| 360 |
+
|
| 361 |
+
def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
|
| 362 |
+
# Determine which parser to modify
|
| 363 |
+
assert self.load_only
|
| 364 |
+
parsers = self._parsers[self.load_only]
|
| 365 |
+
if not parsers:
|
| 366 |
+
# This should not happen if everything works correctly.
|
| 367 |
+
raise ConfigurationError(
|
| 368 |
+
"Fatal Internal error [id=2]. Please report as a bug."
|
| 369 |
+
)
|
| 370 |
+
|
| 371 |
+
# Use the highest priority parser.
|
| 372 |
+
return parsers[-1]
|
| 373 |
+
|
| 374 |
+
# XXX: This is patched in the tests.
|
| 375 |
+
def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
|
| 376 |
+
file_parser_tuple = (fname, parser)
|
| 377 |
+
if file_parser_tuple not in self._modified_parsers:
|
| 378 |
+
self._modified_parsers.append(file_parser_tuple)
|
| 379 |
+
|
| 380 |
+
def __repr__(self) -> str:
|
| 381 |
+
return f"{self.__class__.__name__}({self._dictionary!r})"
|
.venv/Lib/site-packages/pip/_internal/distributions/__init__.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pip._internal.distributions.base import AbstractDistribution
|
| 2 |
+
from pip._internal.distributions.sdist import SourceDistribution
|
| 3 |
+
from pip._internal.distributions.wheel import WheelDistribution
|
| 4 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def make_distribution_for_install_requirement(
|
| 8 |
+
install_req: InstallRequirement,
|
| 9 |
+
) -> AbstractDistribution:
|
| 10 |
+
"""Returns a Distribution for the given InstallRequirement"""
|
| 11 |
+
# Editable requirements will always be source distributions. They use the
|
| 12 |
+
# legacy logic until we create a modern standard for them.
|
| 13 |
+
if install_req.editable:
|
| 14 |
+
return SourceDistribution(install_req)
|
| 15 |
+
|
| 16 |
+
# If it's a wheel, it's a WheelDistribution
|
| 17 |
+
if install_req.is_wheel:
|
| 18 |
+
return WheelDistribution(install_req)
|
| 19 |
+
|
| 20 |
+
# Otherwise, a SourceDistribution
|
| 21 |
+
return SourceDistribution(install_req)
|
.venv/Lib/site-packages/pip/_internal/distributions/base.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
|
| 3 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 4 |
+
from pip._internal.metadata.base import BaseDistribution
|
| 5 |
+
from pip._internal.req import InstallRequirement
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class AbstractDistribution(metaclass=abc.ABCMeta):
|
| 9 |
+
"""A base class for handling installable artifacts.
|
| 10 |
+
|
| 11 |
+
The requirements for anything installable are as follows:
|
| 12 |
+
|
| 13 |
+
- we must be able to determine the requirement name
|
| 14 |
+
(or we can't correctly handle the non-upgrade case).
|
| 15 |
+
|
| 16 |
+
- for packages with setup requirements, we must also be able
|
| 17 |
+
to determine their requirements without installing additional
|
| 18 |
+
packages (for the same reason as run-time dependencies)
|
| 19 |
+
|
| 20 |
+
- we must be able to create a Distribution object exposing the
|
| 21 |
+
above metadata.
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
def __init__(self, req: InstallRequirement) -> None:
|
| 25 |
+
super().__init__()
|
| 26 |
+
self.req = req
|
| 27 |
+
|
| 28 |
+
@abc.abstractmethod
|
| 29 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
| 30 |
+
raise NotImplementedError()
|
| 31 |
+
|
| 32 |
+
@abc.abstractmethod
|
| 33 |
+
def prepare_distribution_metadata(
|
| 34 |
+
self,
|
| 35 |
+
finder: PackageFinder,
|
| 36 |
+
build_isolation: bool,
|
| 37 |
+
check_build_deps: bool,
|
| 38 |
+
) -> None:
|
| 39 |
+
raise NotImplementedError()
|
.venv/Lib/site-packages/pip/_internal/distributions/installed.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pip._internal.distributions.base import AbstractDistribution
|
| 2 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 3 |
+
from pip._internal.metadata import BaseDistribution
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class InstalledDistribution(AbstractDistribution):
|
| 7 |
+
"""Represents an installed package.
|
| 8 |
+
|
| 9 |
+
This does not need any preparation as the required information has already
|
| 10 |
+
been computed.
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
| 14 |
+
assert self.req.satisfied_by is not None, "not actually installed"
|
| 15 |
+
return self.req.satisfied_by
|
| 16 |
+
|
| 17 |
+
def prepare_distribution_metadata(
|
| 18 |
+
self,
|
| 19 |
+
finder: PackageFinder,
|
| 20 |
+
build_isolation: bool,
|
| 21 |
+
check_build_deps: bool,
|
| 22 |
+
) -> None:
|
| 23 |
+
pass
|
.venv/Lib/site-packages/pip/_internal/distributions/sdist.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from typing import Iterable, Set, Tuple
|
| 3 |
+
|
| 4 |
+
from pip._internal.build_env import BuildEnvironment
|
| 5 |
+
from pip._internal.distributions.base import AbstractDistribution
|
| 6 |
+
from pip._internal.exceptions import InstallationError
|
| 7 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 8 |
+
from pip._internal.metadata import BaseDistribution
|
| 9 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class SourceDistribution(AbstractDistribution):
|
| 15 |
+
"""Represents a source distribution.
|
| 16 |
+
|
| 17 |
+
The preparation step for these needs metadata for the packages to be
|
| 18 |
+
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
| 22 |
+
return self.req.get_dist()
|
| 23 |
+
|
| 24 |
+
def prepare_distribution_metadata(
|
| 25 |
+
self,
|
| 26 |
+
finder: PackageFinder,
|
| 27 |
+
build_isolation: bool,
|
| 28 |
+
check_build_deps: bool,
|
| 29 |
+
) -> None:
|
| 30 |
+
# Load pyproject.toml, to determine whether PEP 517 is to be used
|
| 31 |
+
self.req.load_pyproject_toml()
|
| 32 |
+
|
| 33 |
+
# Set up the build isolation, if this requirement should be isolated
|
| 34 |
+
should_isolate = self.req.use_pep517 and build_isolation
|
| 35 |
+
if should_isolate:
|
| 36 |
+
# Setup an isolated environment and install the build backend static
|
| 37 |
+
# requirements in it.
|
| 38 |
+
self._prepare_build_backend(finder)
|
| 39 |
+
# Check that if the requirement is editable, it either supports PEP 660 or
|
| 40 |
+
# has a setup.py or a setup.cfg. This cannot be done earlier because we need
|
| 41 |
+
# to setup the build backend to verify it supports build_editable, nor can
|
| 42 |
+
# it be done later, because we want to avoid installing build requirements
|
| 43 |
+
# needlessly. Doing it here also works around setuptools generating
|
| 44 |
+
# UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
|
| 45 |
+
# without setup.py nor setup.cfg.
|
| 46 |
+
self.req.isolated_editable_sanity_check()
|
| 47 |
+
# Install the dynamic build requirements.
|
| 48 |
+
self._install_build_reqs(finder)
|
| 49 |
+
# Check if the current environment provides build dependencies
|
| 50 |
+
should_check_deps = self.req.use_pep517 and check_build_deps
|
| 51 |
+
if should_check_deps:
|
| 52 |
+
pyproject_requires = self.req.pyproject_requires
|
| 53 |
+
assert pyproject_requires is not None
|
| 54 |
+
conflicting, missing = self.req.build_env.check_requirements(
|
| 55 |
+
pyproject_requires
|
| 56 |
+
)
|
| 57 |
+
if conflicting:
|
| 58 |
+
self._raise_conflicts("the backend dependencies", conflicting)
|
| 59 |
+
if missing:
|
| 60 |
+
self._raise_missing_reqs(missing)
|
| 61 |
+
self.req.prepare_metadata()
|
| 62 |
+
|
| 63 |
+
def _prepare_build_backend(self, finder: PackageFinder) -> None:
|
| 64 |
+
# Isolate in a BuildEnvironment and install the build-time
|
| 65 |
+
# requirements.
|
| 66 |
+
pyproject_requires = self.req.pyproject_requires
|
| 67 |
+
assert pyproject_requires is not None
|
| 68 |
+
|
| 69 |
+
self.req.build_env = BuildEnvironment()
|
| 70 |
+
self.req.build_env.install_requirements(
|
| 71 |
+
finder, pyproject_requires, "overlay", kind="build dependencies"
|
| 72 |
+
)
|
| 73 |
+
conflicting, missing = self.req.build_env.check_requirements(
|
| 74 |
+
self.req.requirements_to_check
|
| 75 |
+
)
|
| 76 |
+
if conflicting:
|
| 77 |
+
self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
|
| 78 |
+
if missing:
|
| 79 |
+
logger.warning(
|
| 80 |
+
"Missing build requirements in pyproject.toml for %s.",
|
| 81 |
+
self.req,
|
| 82 |
+
)
|
| 83 |
+
logger.warning(
|
| 84 |
+
"The project does not specify a build backend, and "
|
| 85 |
+
"pip cannot fall back to setuptools without %s.",
|
| 86 |
+
" and ".join(map(repr, sorted(missing))),
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
def _get_build_requires_wheel(self) -> Iterable[str]:
|
| 90 |
+
with self.req.build_env:
|
| 91 |
+
runner = runner_with_spinner_message("Getting requirements to build wheel")
|
| 92 |
+
backend = self.req.pep517_backend
|
| 93 |
+
assert backend is not None
|
| 94 |
+
with backend.subprocess_runner(runner):
|
| 95 |
+
return backend.get_requires_for_build_wheel()
|
| 96 |
+
|
| 97 |
+
def _get_build_requires_editable(self) -> Iterable[str]:
|
| 98 |
+
with self.req.build_env:
|
| 99 |
+
runner = runner_with_spinner_message(
|
| 100 |
+
"Getting requirements to build editable"
|
| 101 |
+
)
|
| 102 |
+
backend = self.req.pep517_backend
|
| 103 |
+
assert backend is not None
|
| 104 |
+
with backend.subprocess_runner(runner):
|
| 105 |
+
return backend.get_requires_for_build_editable()
|
| 106 |
+
|
| 107 |
+
def _install_build_reqs(self, finder: PackageFinder) -> None:
|
| 108 |
+
# Install any extra build dependencies that the backend requests.
|
| 109 |
+
# This must be done in a second pass, as the pyproject.toml
|
| 110 |
+
# dependencies must be installed before we can call the backend.
|
| 111 |
+
if (
|
| 112 |
+
self.req.editable
|
| 113 |
+
and self.req.permit_editable_wheels
|
| 114 |
+
and self.req.supports_pyproject_editable()
|
| 115 |
+
):
|
| 116 |
+
build_reqs = self._get_build_requires_editable()
|
| 117 |
+
else:
|
| 118 |
+
build_reqs = self._get_build_requires_wheel()
|
| 119 |
+
conflicting, missing = self.req.build_env.check_requirements(build_reqs)
|
| 120 |
+
if conflicting:
|
| 121 |
+
self._raise_conflicts("the backend dependencies", conflicting)
|
| 122 |
+
self.req.build_env.install_requirements(
|
| 123 |
+
finder, missing, "normal", kind="backend dependencies"
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
def _raise_conflicts(
|
| 127 |
+
self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
|
| 128 |
+
) -> None:
|
| 129 |
+
format_string = (
|
| 130 |
+
"Some build dependencies for {requirement} "
|
| 131 |
+
"conflict with {conflicting_with}: {description}."
|
| 132 |
+
)
|
| 133 |
+
error_message = format_string.format(
|
| 134 |
+
requirement=self.req,
|
| 135 |
+
conflicting_with=conflicting_with,
|
| 136 |
+
description=", ".join(
|
| 137 |
+
f"{installed} is incompatible with {wanted}"
|
| 138 |
+
for installed, wanted in sorted(conflicting_reqs)
|
| 139 |
+
),
|
| 140 |
+
)
|
| 141 |
+
raise InstallationError(error_message)
|
| 142 |
+
|
| 143 |
+
def _raise_missing_reqs(self, missing: Set[str]) -> None:
|
| 144 |
+
format_string = (
|
| 145 |
+
"Some build dependencies for {requirement} are missing: {missing}."
|
| 146 |
+
)
|
| 147 |
+
error_message = format_string.format(
|
| 148 |
+
requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
|
| 149 |
+
)
|
| 150 |
+
raise InstallationError(error_message)
|
.venv/Lib/site-packages/pip/_internal/distributions/wheel.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 2 |
+
|
| 3 |
+
from pip._internal.distributions.base import AbstractDistribution
|
| 4 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 5 |
+
from pip._internal.metadata import (
|
| 6 |
+
BaseDistribution,
|
| 7 |
+
FilesystemWheel,
|
| 8 |
+
get_wheel_distribution,
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class WheelDistribution(AbstractDistribution):
|
| 13 |
+
"""Represents a wheel distribution.
|
| 14 |
+
|
| 15 |
+
This does not need any preparation as wheels can be directly unpacked.
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
| 19 |
+
"""Loads the metadata from the wheel file into memory and returns a
|
| 20 |
+
Distribution that uses it, not relying on the wheel file or
|
| 21 |
+
requirement.
|
| 22 |
+
"""
|
| 23 |
+
assert self.req.local_file_path, "Set as part of preparation during download"
|
| 24 |
+
assert self.req.name, "Wheels are never unnamed"
|
| 25 |
+
wheel = FilesystemWheel(self.req.local_file_path)
|
| 26 |
+
return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
|
| 27 |
+
|
| 28 |
+
def prepare_distribution_metadata(
|
| 29 |
+
self,
|
| 30 |
+
finder: PackageFinder,
|
| 31 |
+
build_isolation: bool,
|
| 32 |
+
check_build_deps: bool,
|
| 33 |
+
) -> None:
|
| 34 |
+
pass
|