Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- falcon/lib/python3.10/site-packages/RUST/__init__.py +10 -0
- falcon/lib/python3.10/site-packages/RUST/__main__.py +302 -0
- falcon/lib/python3.10/site-packages/RUST/__pycache__/__init__.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/RUST/__pycache__/amino.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/RUST/__pycache__/codon.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/RUST/__pycache__/methods.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/RUST/__pycache__/predict_profiles.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/RUST/codon.py +550 -0
- falcon/lib/python3.10/site-packages/RUST/dipeptide.py +454 -0
- falcon/lib/python3.10/site-packages/RUST/plot_transcript.py +409 -0
- falcon/lib/python3.10/site-packages/RUST/synergy.py +194 -0
- falcon/lib/python3.10/site-packages/RUST/tripeptide.py +379 -0
- falcon/lib/python3.10/site-packages/setuptools/__pycache__/__init__.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/__pycache__/_itertools.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/__pycache__/errors.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/__pycache__/namespaces.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__init__.py +21 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/__init__.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/_requirestxt.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/build.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/build_ext.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/build_py.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/develop.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/dist_info.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/easy_install.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/editable_wheel.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/saveopts.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/sdist.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/setopt.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/test.cpython-310.pyc +0 -0
- falcon/lib/python3.10/site-packages/setuptools/command/_requirestxt.py +131 -0
- falcon/lib/python3.10/site-packages/setuptools/command/alias.py +77 -0
- falcon/lib/python3.10/site-packages/setuptools/command/bdist_egg.py +479 -0
- falcon/lib/python3.10/site-packages/setuptools/command/bdist_rpm.py +42 -0
- falcon/lib/python3.10/site-packages/setuptools/command/bdist_wheel.py +610 -0
- falcon/lib/python3.10/site-packages/setuptools/command/build.py +135 -0
- falcon/lib/python3.10/site-packages/setuptools/command/build_ext.py +469 -0
- falcon/lib/python3.10/site-packages/setuptools/command/build_py.py +400 -0
- falcon/lib/python3.10/site-packages/setuptools/command/develop.py +195 -0
- falcon/lib/python3.10/site-packages/setuptools/command/dist_info.py +103 -0
- falcon/lib/python3.10/site-packages/setuptools/command/easy_install.py +2365 -0
- falcon/lib/python3.10/site-packages/setuptools/command/editable_wheel.py +925 -0
- falcon/lib/python3.10/site-packages/setuptools/command/egg_info.py +720 -0
- falcon/lib/python3.10/site-packages/setuptools/command/install_egg_info.py +58 -0
- falcon/lib/python3.10/site-packages/setuptools/command/install_lib.py +137 -0
- falcon/lib/python3.10/site-packages/setuptools/command/install_scripts.py +73 -0
- falcon/lib/python3.10/site-packages/setuptools/command/launcher manifest.xml +15 -0
falcon/lib/python3.10/site-packages/RUST/__init__.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from RUST import __main__
|
| 2 |
+
from RUST import methods
|
| 3 |
+
from RUST import amino
|
| 4 |
+
from RUST import codon
|
| 5 |
+
from RUST import dipeptide
|
| 6 |
+
from RUST import nucleotide
|
| 7 |
+
from RUST import tripeptide
|
| 8 |
+
from RUST import synergy
|
| 9 |
+
from RUST import plot_transcript
|
| 10 |
+
from RUST import predict_profiles
|
falcon/lib/python3.10/site-packages/RUST/__main__.py
ADDED
|
@@ -0,0 +1,302 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
script to run RUST as a cli tool
|
| 3 |
+
|
| 4 |
+
Usage:
|
| 5 |
+
RUST <command> [<args>...]
|
| 6 |
+
|
| 7 |
+
Options:
|
| 8 |
+
-h --help Show this screen.
|
| 9 |
+
--version Show version.
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
import RUST
|
| 15 |
+
|
| 16 |
+
# import sys
|
| 17 |
+
# import time
|
| 18 |
+
# import traceback
|
| 19 |
+
import argparse
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def parser():
|
| 23 |
+
parser = argparse.ArgumentParser(description="RUST - Ribo-seq Unit Step Transform")
|
| 24 |
+
parser.usage = "RUST <command> [<args>]"
|
| 25 |
+
|
| 26 |
+
subparsers = parser.add_subparsers(help="mode of analysis")
|
| 27 |
+
|
| 28 |
+
amino = subparsers.add_parser("amino", help="Run RUST on each amino acid")
|
| 29 |
+
amino.add_argument(
|
| 30 |
+
"-t",
|
| 31 |
+
"--transcriptome",
|
| 32 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 33 |
+
", required=True",
|
| 34 |
+
)
|
| 35 |
+
amino.add_argument(
|
| 36 |
+
"-a",
|
| 37 |
+
"--alignment",
|
| 38 |
+
help="sorted bam file of transcriptome alignments",
|
| 39 |
+
required=True,
|
| 40 |
+
)
|
| 41 |
+
amino.add_argument("-o", "--offset", help="nucleotide offset to A-site", type=int)
|
| 42 |
+
amino.add_argument(
|
| 43 |
+
"-l",
|
| 44 |
+
"--lengths",
|
| 45 |
+
help="lengths of footprints included, for example 28:32 is 28,29,30,31,32",
|
| 46 |
+
)
|
| 47 |
+
amino.add_argument(
|
| 48 |
+
"-P", "--Path", help='path to outputfile, default is "amino"', default="amino"
|
| 49 |
+
)
|
| 50 |
+
amino.set_defaults(mode="amino")
|
| 51 |
+
|
| 52 |
+
codon = subparsers.add_parser("codon", help="Run RUST on each codon")
|
| 53 |
+
codon.add_argument(
|
| 54 |
+
"-t",
|
| 55 |
+
"--transcriptome",
|
| 56 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 57 |
+
", required=True",
|
| 58 |
+
)
|
| 59 |
+
codon.add_argument(
|
| 60 |
+
"-a",
|
| 61 |
+
"--alignment",
|
| 62 |
+
help="sorted bam file of transcriptome alignments",
|
| 63 |
+
required=True,
|
| 64 |
+
)
|
| 65 |
+
codon.add_argument("-o", "--offset", help="nucleotide offset to A-site", type=int)
|
| 66 |
+
codon.add_argument(
|
| 67 |
+
"-l",
|
| 68 |
+
"--lengths",
|
| 69 |
+
help="lengths of footprints included, for example 28:32 is 28,29,30,31,32",
|
| 70 |
+
)
|
| 71 |
+
codon.add_argument(
|
| 72 |
+
"-P", "--Path", help='path to outputfile, default is "codon"', default="codon"
|
| 73 |
+
)
|
| 74 |
+
codon.set_defaults(mode="codon")
|
| 75 |
+
|
| 76 |
+
nucleotide = subparsers.add_parser("nucleotide", help="Run RUST on each nucleotide")
|
| 77 |
+
nucleotide.add_argument(
|
| 78 |
+
"-t",
|
| 79 |
+
"--transcriptome",
|
| 80 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 81 |
+
", required=True",
|
| 82 |
+
)
|
| 83 |
+
nucleotide.add_argument(
|
| 84 |
+
"-a",
|
| 85 |
+
"--alignment",
|
| 86 |
+
help="sorted bam file of transcriptome alignments",
|
| 87 |
+
required=True,
|
| 88 |
+
)
|
| 89 |
+
nucleotide.add_argument(
|
| 90 |
+
"-o", "--offset", help="nucleotide offset to A-site", type=int
|
| 91 |
+
)
|
| 92 |
+
nucleotide.add_argument(
|
| 93 |
+
"-l",
|
| 94 |
+
"--lengths",
|
| 95 |
+
help="lengths of footprints included, for example 28:32 is 28,29,30,31,32",
|
| 96 |
+
)
|
| 97 |
+
nucleotide.add_argument(
|
| 98 |
+
"-P",
|
| 99 |
+
"--Path",
|
| 100 |
+
help='path to outputfile, default is "nucleotide"',
|
| 101 |
+
default="nucleotide",
|
| 102 |
+
)
|
| 103 |
+
nucleotide.set_defaults(mode="nucleotide")
|
| 104 |
+
|
| 105 |
+
dipeptide = subparsers.add_parser("dipeptide", help="Run RUST on each dipeptide")
|
| 106 |
+
dipeptide.add_argument(
|
| 107 |
+
"-t",
|
| 108 |
+
"--transcriptome",
|
| 109 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 110 |
+
", required=True",
|
| 111 |
+
)
|
| 112 |
+
dipeptide.add_argument(
|
| 113 |
+
"-a",
|
| 114 |
+
"--alignment",
|
| 115 |
+
help="sorted bam file of transcriptome alignments",
|
| 116 |
+
required=True,
|
| 117 |
+
)
|
| 118 |
+
dipeptide.add_argument(
|
| 119 |
+
"-o", "--offset", help="nucleotide offset to A-site", type=int
|
| 120 |
+
)
|
| 121 |
+
dipeptide.add_argument(
|
| 122 |
+
"-l",
|
| 123 |
+
"--lengths",
|
| 124 |
+
help="lengths of footprints included, for example 28:32 is 28,29,30,31,32",
|
| 125 |
+
)
|
| 126 |
+
dipeptide.add_argument(
|
| 127 |
+
"-P",
|
| 128 |
+
"--Path",
|
| 129 |
+
help='path to outputfile, default is "dipeptide"',
|
| 130 |
+
default="dipeptide",
|
| 131 |
+
)
|
| 132 |
+
dipeptide.set_defaults(mode="dipeptide")
|
| 133 |
+
|
| 134 |
+
tripeptide = subparsers.add_parser("tripeptide", help="Run RUST on each tripeptide")
|
| 135 |
+
tripeptide.add_argument(
|
| 136 |
+
"-t",
|
| 137 |
+
"--transcriptome",
|
| 138 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 139 |
+
", required=True",
|
| 140 |
+
)
|
| 141 |
+
tripeptide.add_argument(
|
| 142 |
+
"-a",
|
| 143 |
+
"--alignment",
|
| 144 |
+
help="sorted bam file of transcriptome alignments",
|
| 145 |
+
required=True,
|
| 146 |
+
)
|
| 147 |
+
tripeptide.add_argument(
|
| 148 |
+
"-o", "--offset", help="nucleotide offset to A-site", type=int
|
| 149 |
+
)
|
| 150 |
+
tripeptide.add_argument(
|
| 151 |
+
"-l",
|
| 152 |
+
"--lengths",
|
| 153 |
+
help="lengths of footprints included, for example 28:32 is 28,29,30,31,32",
|
| 154 |
+
)
|
| 155 |
+
tripeptide.add_argument(
|
| 156 |
+
"-P",
|
| 157 |
+
"--Path",
|
| 158 |
+
help='path to outputfile, default is "tripeptide"',
|
| 159 |
+
default="tripeptide",
|
| 160 |
+
)
|
| 161 |
+
tripeptide.set_defaults(mode="tripeptide")
|
| 162 |
+
|
| 163 |
+
predict = subparsers.add_parser(
|
| 164 |
+
"predict",
|
| 165 |
+
help="Correlation between observed and predicted profiles from CDS start + 120 to CDS stop - 60",
|
| 166 |
+
)
|
| 167 |
+
predict.add_argument(
|
| 168 |
+
"-t",
|
| 169 |
+
"--transcriptome",
|
| 170 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 171 |
+
", required=True",
|
| 172 |
+
)
|
| 173 |
+
predict.add_argument(
|
| 174 |
+
"-a",
|
| 175 |
+
"--alignment",
|
| 176 |
+
help="sorted bam file of transcriptome alignments",
|
| 177 |
+
required=True,
|
| 178 |
+
)
|
| 179 |
+
predict.add_argument("-o", "--offset", help="nucleotide offset to A-site", type=int)
|
| 180 |
+
predict.add_argument(
|
| 181 |
+
"-l",
|
| 182 |
+
"--lengths",
|
| 183 |
+
help="lengths of footprints included, for example 28:32 is 28,29,30,31,32",
|
| 184 |
+
)
|
| 185 |
+
predict.add_argument(
|
| 186 |
+
"-P",
|
| 187 |
+
"--Path",
|
| 188 |
+
help='path to outputfile, default is "amino"',
|
| 189 |
+
default="predict_profiles",
|
| 190 |
+
)
|
| 191 |
+
predict.add_argument("-r", "--rustfile", help="path to rust file produced by codon")
|
| 192 |
+
predict.add_argument(
|
| 193 |
+
"-p",
|
| 194 |
+
"--profiles",
|
| 195 |
+
action="store_true",
|
| 196 |
+
help="writes all profiles in csv files, may produce >10,000 files",
|
| 197 |
+
default=False,
|
| 198 |
+
)
|
| 199 |
+
|
| 200 |
+
predict.set_defaults(mode="predict")
|
| 201 |
+
|
| 202 |
+
synergy = subparsers.add_parser(
|
| 203 |
+
"synergy",
|
| 204 |
+
help="Identifies tripeptides that are candidates for synergistic interactions",
|
| 205 |
+
)
|
| 206 |
+
synergy.add_argument(
|
| 207 |
+
"-t",
|
| 208 |
+
"--transcriptome",
|
| 209 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 210 |
+
", required=True",
|
| 211 |
+
)
|
| 212 |
+
synergy.add_argument(
|
| 213 |
+
"--aa", help='path to file produced from "rust_amino"', required=True
|
| 214 |
+
)
|
| 215 |
+
synergy.add_argument(
|
| 216 |
+
"--tri", help='path to file produced from "rust_tripeptide"', required=True
|
| 217 |
+
)
|
| 218 |
+
synergy.add_argument(
|
| 219 |
+
"-P",
|
| 220 |
+
"--Path",
|
| 221 |
+
help='path to outputfile, default is "synergy"',
|
| 222 |
+
default="synergy",
|
| 223 |
+
)
|
| 224 |
+
synergy.set_defaults(mode="synergy")
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
plot = subparsers.add_parser(
|
| 229 |
+
"plot", help="Plot observed and predicted ribosome profiles"
|
| 230 |
+
)
|
| 231 |
+
plot.add_argument(
|
| 232 |
+
"-t",
|
| 233 |
+
"--transcriptome",
|
| 234 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 235 |
+
", required=True",
|
| 236 |
+
)
|
| 237 |
+
plot.add_argument(
|
| 238 |
+
"-a",
|
| 239 |
+
"--alignment",
|
| 240 |
+
help="sorted bam file of transcriptome alignments",
|
| 241 |
+
required=True,
|
| 242 |
+
)
|
| 243 |
+
plot.add_argument("-o", "--offset", help="nucleotide offset to A-site", type=int)
|
| 244 |
+
plot.add_argument(
|
| 245 |
+
"-l",
|
| 246 |
+
"--lengths",
|
| 247 |
+
help="lengths of footprints included, for example 28:32 is 28,29,30,31,32",
|
| 248 |
+
)
|
| 249 |
+
plot.add_argument(
|
| 250 |
+
"-P", "--Path", help='path to outputfile, default is "amino"', default="plot"
|
| 251 |
+
)
|
| 252 |
+
plot.add_argument(
|
| 253 |
+
"-i",
|
| 254 |
+
"--identifier",
|
| 255 |
+
help='Specific transcript to plot (Use of unique identifier is sufficient for example "NM_031946"',
|
| 256 |
+
required=True,
|
| 257 |
+
)
|
| 258 |
+
plot.add_argument(
|
| 259 |
+
"-r",
|
| 260 |
+
"--rustfile",
|
| 261 |
+
help='path to file produced from "rust_codon"',
|
| 262 |
+
required=True,
|
| 263 |
+
)
|
| 264 |
+
plot.set_defaults(mode="plot")
|
| 265 |
+
|
| 266 |
+
parser.add_argument("-v", "--version", action="version", version="%(prog)s 1.3.0")
|
| 267 |
+
|
| 268 |
+
args = parser.parse_args()
|
| 269 |
+
return args
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def main():
|
| 273 |
+
args = parser()
|
| 274 |
+
|
| 275 |
+
if args.mode == "amino":
|
| 276 |
+
RUST.amino.main(args)
|
| 277 |
+
print(f"RUST successfully ran and outputted to {args.Path}")
|
| 278 |
+
elif args.mode == "codon":
|
| 279 |
+
RUST.codon.main(args)
|
| 280 |
+
print(f"RUST successfully ran and outputted to {args.Path}")
|
| 281 |
+
elif args.mode == "nucleotide":
|
| 282 |
+
RUST.nucleotide.main(args)
|
| 283 |
+
print(f"RUST successfully ran and outputted to {args.Path}")
|
| 284 |
+
elif args.mode == "dipeptide":
|
| 285 |
+
RUST.dipeptide.main(args)
|
| 286 |
+
print(f"RUST successfully ran and outputted to {args.Path}")
|
| 287 |
+
elif args.mode == "tripeptide":
|
| 288 |
+
RUST.tripeptide.main(args)
|
| 289 |
+
print(f"RUST successfully ran and outputted to {args.Path}")
|
| 290 |
+
elif args.mode == "predict":
|
| 291 |
+
RUST.predict_profiles.main(args)
|
| 292 |
+
print(f"RUST successfully ran and outputted to {args.Path}")
|
| 293 |
+
elif args.mode == "synergy":
|
| 294 |
+
RUST.synergy.main(args)
|
| 295 |
+
print(f"RUST successfully ran and outputted to {args.Path}")
|
| 296 |
+
elif args.mode == "plot":
|
| 297 |
+
RUST.plot_transcript.main(args)
|
| 298 |
+
print(f"RUST successfully ran and outputted to {args.Path}")
|
| 299 |
+
else:
|
| 300 |
+
raise Exception(
|
| 301 |
+
"Weird. RUST ran to end of program without triggering a pipeline or raising an error"
|
| 302 |
+
)
|
falcon/lib/python3.10/site-packages/RUST/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (484 Bytes). View file
|
|
|
falcon/lib/python3.10/site-packages/RUST/__pycache__/amino.cpython-310.pyc
ADDED
|
Binary file (9.11 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/RUST/__pycache__/codon.cpython-310.pyc
ADDED
|
Binary file (12.4 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/RUST/__pycache__/methods.cpython-310.pyc
ADDED
|
Binary file (1.86 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/RUST/__pycache__/predict_profiles.cpython-310.pyc
ADDED
|
Binary file (7.03 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/RUST/codon.py
ADDED
|
@@ -0,0 +1,550 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
#####################################################################################
|
| 3 |
+
# rust_codon, Produces RUST metagene profile of codons
|
| 4 |
+
# Copyright (C) 2015 Patrick O'Connor
|
| 5 |
+
|
| 6 |
+
# This program is free software: you can redistribute it and/or modify
|
| 7 |
+
# it under the terms of the GNU General Public License as published by
|
| 8 |
+
# the Free Software Foundation, either version 3 of the License, or
|
| 9 |
+
# (at your option) any later version.
|
| 10 |
+
|
| 11 |
+
# This program is distributed in the hope that it will be useful,
|
| 12 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 13 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 14 |
+
# GNU General Public License for more details.
|
| 15 |
+
|
| 16 |
+
# You should have received a copy of the GNU General Public License
|
| 17 |
+
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
| 18 |
+
#####################################################################################
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
import os, re, pysam, sys, math, argparse
|
| 22 |
+
from RUST.methods import *
|
| 23 |
+
|
| 24 |
+
try:
|
| 25 |
+
import matplotlib as mpl
|
| 26 |
+
|
| 27 |
+
mpl.use("Agg")
|
| 28 |
+
import matplotlib.pyplot as plt
|
| 29 |
+
from pylab import MaxNLocator
|
| 30 |
+
except:
|
| 31 |
+
pass
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def RUST_metagene_plot(infileopen36, ax36):
|
| 35 |
+
infileopen36.seek(0)
|
| 36 |
+
infileopen36.readline()
|
| 37 |
+
while 1:
|
| 38 |
+
line = infileopen36.readline()
|
| 39 |
+
linesplit = line.split(",")
|
| 40 |
+
if len(linesplit) == 1:
|
| 41 |
+
break
|
| 42 |
+
codon = linesplit[0]
|
| 43 |
+
coverage = list(map(float, linesplit[1:]))
|
| 44 |
+
coverage_a = coverage[0]
|
| 45 |
+
if coverage_a == 0:
|
| 46 |
+
continue
|
| 47 |
+
coverage_n = [n / coverage_a for n in coverage[1:]]
|
| 48 |
+
log2_values = [math.log(n, 2) if n != 0 else float("-inf") for n in coverage_n]
|
| 49 |
+
ax36.plot(log2_values, color="gray")
|
| 50 |
+
|
| 51 |
+
line = infileopen36.readline()
|
| 52 |
+
linesplit = line.split(",")
|
| 53 |
+
if "NA" not in line:
|
| 54 |
+
coverage = map(float, linesplit[2:])
|
| 55 |
+
ax2 = ax36.twinx()
|
| 56 |
+
ax2.plot(coverage, color="blue")
|
| 57 |
+
for tl in ax2.get_yticklabels():
|
| 58 |
+
tl.set_color("blue")
|
| 59 |
+
tl.set_rotation(0)
|
| 60 |
+
|
| 61 |
+
ax2.yaxis.set_major_locator(MaxNLocator(3))
|
| 62 |
+
ax2.set_ylim(0, 1.0)
|
| 63 |
+
ax2.set_ylim(-2, 1.0)
|
| 64 |
+
ax2.set_yticks([0, 1], minor=False)
|
| 65 |
+
ax2.set_yticklabels(["0", "1"])
|
| 66 |
+
ax2.set_ylabel("Kullback-Leibler divergence", color="blue")
|
| 67 |
+
|
| 68 |
+
ax36.set_xticks([5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55])
|
| 69 |
+
ax36.set_xticklabels([-35, -30, -25, -20, -15, -10, -5, 0, 5, 10, 15])
|
| 70 |
+
ax36.set_xlabel("distance from A-site [codon]")
|
| 71 |
+
ax36.set_ylabel("Codon RUST ratio (observed/expected), log2")
|
| 72 |
+
ax36.axvline(40, color="red")
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def A_site_plot(infileopen35, dict_codon75, axis_Asite53, loc2):
|
| 76 |
+
codon_to_amino_dict = {}
|
| 77 |
+
amino_to_codons_dict = {}
|
| 78 |
+
for amino_acid, codons in dict_codon75.items():
|
| 79 |
+
for codon in codons:
|
| 80 |
+
codon_to_amino_dict[codon] = amino_acid
|
| 81 |
+
amino_to_codons_dict.setdefault(amino_acid, []).append(codon)
|
| 82 |
+
|
| 83 |
+
list1 = []
|
| 84 |
+
list2 = []
|
| 85 |
+
infileopen35.seek(0)
|
| 86 |
+
infileopen35.readline()
|
| 87 |
+
dict_amino_value = {}
|
| 88 |
+
for line in infileopen35:
|
| 89 |
+
linesplit = line.split(",")
|
| 90 |
+
if len(linesplit) == 1:
|
| 91 |
+
break
|
| 92 |
+
codon = linesplit[0]
|
| 93 |
+
if codon in ["TAA", "TAG", "TGA"]:
|
| 94 |
+
continue
|
| 95 |
+
list1.append(linesplit[0])
|
| 96 |
+
coverage = list(map(float, linesplit[1:]))
|
| 97 |
+
coverage_a = coverage[0]
|
| 98 |
+
coverage_n = [n / coverage_a for n in coverage[1:]]
|
| 99 |
+
list2.append(float(coverage_n[loc2]))
|
| 100 |
+
|
| 101 |
+
amino = codon_to_amino_dict[linesplit[0]]
|
| 102 |
+
if amino in dict_amino_value:
|
| 103 |
+
dict_amino_value[codon_to_amino_dict[linesplit[0]]].append(
|
| 104 |
+
float(coverage_n[loc2])
|
| 105 |
+
)
|
| 106 |
+
else:
|
| 107 |
+
dict_amino_value[codon_to_amino_dict[linesplit[0]]] = [
|
| 108 |
+
float(coverage_n[loc2])
|
| 109 |
+
]
|
| 110 |
+
|
| 111 |
+
list_amino_sorted = []
|
| 112 |
+
for key, value in dict_amino_value.items():
|
| 113 |
+
list_amino_sorted.append((mean_value(value), key))
|
| 114 |
+
list_amino_sorted.sort()
|
| 115 |
+
|
| 116 |
+
A_site_value_norm = [n / min(list2) for n in list2]
|
| 117 |
+
list3 = list(zip(A_site_value_norm, list1))
|
| 118 |
+
list3.sort()
|
| 119 |
+
A_site_value_norm_dict = {}
|
| 120 |
+
for tupel in list3:
|
| 121 |
+
A_site_value_norm_dict[tupel[1]] = tupel[0]
|
| 122 |
+
|
| 123 |
+
used_codons = []
|
| 124 |
+
xloc = []
|
| 125 |
+
xtick_label = []
|
| 126 |
+
n1 = 0
|
| 127 |
+
|
| 128 |
+
for _, amino_acid in list_amino_sorted:
|
| 129 |
+
if amino_acid in used_codons:
|
| 130 |
+
continue
|
| 131 |
+
used_codons.append(amino_acid)
|
| 132 |
+
n1 += 1 # len(dict_list_codon[amino_acid])
|
| 133 |
+
|
| 134 |
+
xloc.append(n1)
|
| 135 |
+
for amino_acid_codon in amino_to_codons_dict[amino_acid]:
|
| 136 |
+
axis_Asite53.scatter(
|
| 137 |
+
n1,
|
| 138 |
+
A_site_value_norm_dict[amino_acid_codon],
|
| 139 |
+
color="gray",
|
| 140 |
+
s=50,
|
| 141 |
+
edgecolor="gray",
|
| 142 |
+
)
|
| 143 |
+
xtick_label.append(amino_acid)
|
| 144 |
+
|
| 145 |
+
axis_Asite53.set_xticks(xloc)
|
| 146 |
+
axis_Asite53.set_xticklabels(xtick_label, rotation=90)
|
| 147 |
+
for tick in axis_Asite53.get_xticklabels():
|
| 148 |
+
if tick.get_text() in ["Phe", "Tyr", "Trp"]:
|
| 149 |
+
a2 = tick.set_backgroundcolor("lightgreen") # (dict(facecolor = "red"))
|
| 150 |
+
# tick.set_color("white")
|
| 151 |
+
if tick.get_text() in ["Val", "Ala", "Leu", "Met", "Ile"]:
|
| 152 |
+
tick.set_backgroundcolor("lightgrey")
|
| 153 |
+
# tick.set_color("white")
|
| 154 |
+
if tick.get_text() in ["Ser", "Asn", "Thr", "Gln"]:
|
| 155 |
+
tick.set_backgroundcolor("ForestGreen")
|
| 156 |
+
tick.set_color("white")
|
| 157 |
+
|
| 158 |
+
if tick.get_text() in ["His", "Lys", "Arg"]:
|
| 159 |
+
tick.set_backgroundcolor("blue")
|
| 160 |
+
tick.set_color("white")
|
| 161 |
+
if tick.get_text() in ["Glu", "Asp"]:
|
| 162 |
+
tick.set_backgroundcolor("red")
|
| 163 |
+
tick.set_color("white")
|
| 164 |
+
axis_Asite53.set_xlim(0, n1 + 1)
|
| 165 |
+
axis_Asite53.set_ylabel("A-site codon RUST ratio")
|
| 166 |
+
|
| 167 |
+
red = mpl.patches.Rectangle((0, 0), 1, 1, fc="r")
|
| 168 |
+
blue = mpl.patches.Rectangle((0, 0), 1, 1, fc="b")
|
| 169 |
+
fgreen = mpl.patches.Rectangle((0, 0), 1, 1, fc="ForestGreen")
|
| 170 |
+
lgreen = mpl.patches.Rectangle((0, 0), 1, 1, fc="lightGreen")
|
| 171 |
+
grey = mpl.patches.Rectangle((0, 0), 1, 1, fc="lightgrey")
|
| 172 |
+
|
| 173 |
+
axis_Asite53.legend(
|
| 174 |
+
[red, grey, lgreen, blue, fgreen],
|
| 175 |
+
["acidic", "aliphatic", "aromatic", "basic", "polar\nuncharged"],
|
| 176 |
+
bbox_to_anchor=(0, 0, 0.8, 1.12),
|
| 177 |
+
ncol=3,
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
def main(args):
|
| 182 |
+
|
| 183 |
+
universal_code = {
|
| 184 |
+
"Ala": ["GCT", "GCC", "GCG", "GCA"],
|
| 185 |
+
"Gly": ["GGT", "GGC", "GGG", "GGA"],
|
| 186 |
+
"Pro": ["CCT", "CCC", "CCG", "CCA"],
|
| 187 |
+
"Thr": ["ACT", "ACC", "ACG", "ACA"],
|
| 188 |
+
"Val": ["GTT", "GTC", "GTG", "GTA"],
|
| 189 |
+
"Ser": ["TCT", "TCC", "TCG", "TCA", "AGT", "AGC"],
|
| 190 |
+
"Arg": ["CGT", "CGC", "CGG", "CGA", "AGG", "AGA"],
|
| 191 |
+
"Leu": ["CTT", "CTC", "CTG", "CTA", "TTG", "TTA"],
|
| 192 |
+
"Phe": ["TTT", "TTC"],
|
| 193 |
+
"Asn": ["AAT", "AAC"],
|
| 194 |
+
"Lys": ["AAG", "AAA"],
|
| 195 |
+
"Asp": ["GAT", "GAC"],
|
| 196 |
+
"Glu": ["GAG", "GAA"],
|
| 197 |
+
"His": ["CAT", "CAC"],
|
| 198 |
+
"Gln": ["CAG", "CAA"],
|
| 199 |
+
"Ile": ["ATT", "ATC", "ATA"],
|
| 200 |
+
"Met": ["ATG"],
|
| 201 |
+
"Tyr": ["TAT", "TAC"],
|
| 202 |
+
"Cys": ["TGT", "TGC"],
|
| 203 |
+
"Trp": ["TGG"],
|
| 204 |
+
"Stop": ["TGA", "TAG", "TAA"],
|
| 205 |
+
}
|
| 206 |
+
|
| 207 |
+
mRNA_sequences = args.transcriptome # path to fastq file of transcripts
|
| 208 |
+
in_seq_handle = open(mRNA_sequences)
|
| 209 |
+
cds_start_dict = {}
|
| 210 |
+
cds_end_dict = {}
|
| 211 |
+
seq_dict = {}
|
| 212 |
+
for line in in_seq_handle:
|
| 213 |
+
if line[0] != ">":
|
| 214 |
+
seq_dict.setdefault(transcript, "")
|
| 215 |
+
seq_dict[transcript] += line[:-1]
|
| 216 |
+
continue
|
| 217 |
+
try:
|
| 218 |
+
transcript_split = line[:-1].split("\t")
|
| 219 |
+
transcript = transcript_split[0][1:]
|
| 220 |
+
cds_start_dict[transcript] = int(transcript_split[1])
|
| 221 |
+
cds_end_dict[transcript] = int(transcript_split[2])
|
| 222 |
+
except:
|
| 223 |
+
pass
|
| 224 |
+
in_seq_handle.close()
|
| 225 |
+
|
| 226 |
+
offset = args.offset
|
| 227 |
+
readlen_range = args.lengths
|
| 228 |
+
|
| 229 |
+
readlen_rangesplit = readlen_range.split(":")
|
| 230 |
+
if len(readlen_rangesplit) == 1:
|
| 231 |
+
accepted_read_lengths = [int(readlen_rangesplit[0])]
|
| 232 |
+
length_values = "%s" % int(readlen_rangesplit[0])
|
| 233 |
+
elif len(readlen_rangesplit) == 2:
|
| 234 |
+
accepted_read_lengths = [
|
| 235 |
+
readlen
|
| 236 |
+
for readlen in range(
|
| 237 |
+
int(readlen_rangesplit[0]), int(readlen_rangesplit[1]) + 1
|
| 238 |
+
)
|
| 239 |
+
]
|
| 240 |
+
length_values = "%s_%s" % (
|
| 241 |
+
int(readlen_rangesplit[0]),
|
| 242 |
+
int(readlen_rangesplit[1]),
|
| 243 |
+
)
|
| 244 |
+
else:
|
| 245 |
+
stop_err(
|
| 246 |
+
"Lengths of footprints parameter not in correct format, it should be either colon seperated with the second value greater or equal to the first, (28:32) or a single interger (31)"
|
| 247 |
+
)
|
| 248 |
+
if len(accepted_read_lengths) == 0:
|
| 249 |
+
stop_err(
|
| 250 |
+
"Lengths of footprints parameter not in correct format, it should be either colon seperated with the second value greater or equal to the first, (28:32) or a single interger (31)"
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
nts = ["A", "G", "C", "T"]
|
| 254 |
+
aligments_A1 = pysam.Samfile(
|
| 255 |
+
args.alignment, "rb"
|
| 256 |
+
) # path to aligments in bam format
|
| 257 |
+
|
| 258 |
+
codon_enrichment_dict = {}
|
| 259 |
+
codon_enrichment_expected_dict = {}
|
| 260 |
+
for nt in nts:
|
| 261 |
+
for nt2 in nts:
|
| 262 |
+
for nt3 in nts:
|
| 263 |
+
codon = "%s%s%s" % (nt, nt2, nt3)
|
| 264 |
+
codon_enrichment_dict[codon] = {}
|
| 265 |
+
codon_enrichment_expected_dict[codon] = []
|
| 266 |
+
for number in range(0, 60, 1):
|
| 267 |
+
codon_enrichment_dict[codon][number] = [0.0, 0.0]
|
| 268 |
+
|
| 269 |
+
list_transcripts = seq_dict.keys()
|
| 270 |
+
number_transcripts = 0
|
| 271 |
+
list_10_percentile = []
|
| 272 |
+
for value in range(1, 10):
|
| 273 |
+
list_10_percentile.append((len(list_transcripts) * value) / 10)
|
| 274 |
+
for transcript in list_transcripts:
|
| 275 |
+
number_transcripts += 1
|
| 276 |
+
if number_transcripts in list_10_percentile:
|
| 277 |
+
sys.stdout.write(
|
| 278 |
+
"%s percent\n"
|
| 279 |
+
% ((list_10_percentile.index(number_transcripts) + 1) * 10)
|
| 280 |
+
)
|
| 281 |
+
|
| 282 |
+
try: # use supplied CDS annotation
|
| 283 |
+
cds_start = cds_start_dict[transcript]
|
| 284 |
+
cds_end = cds_end_dict[transcript]
|
| 285 |
+
if cds_end < cds_start:
|
| 286 |
+
raise Exception
|
| 287 |
+
except Exception: # find longest ORF
|
| 288 |
+
transcript_seq = seq_dict[transcript]
|
| 289 |
+
cds_start = -1
|
| 290 |
+
start_post = []
|
| 291 |
+
end_post = []
|
| 292 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("ATG"), transcript_seq):
|
| 293 |
+
start_post.append(match.start())
|
| 294 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TAG"), transcript_seq):
|
| 295 |
+
end_post.append(match.start())
|
| 296 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TAA"), transcript_seq):
|
| 297 |
+
end_post.append(match.start())
|
| 298 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TGA"), transcript_seq):
|
| 299 |
+
end_post.append(match.start())
|
| 300 |
+
|
| 301 |
+
end_post.sort()
|
| 302 |
+
len_max_orf = 0
|
| 303 |
+
for value in start_post:
|
| 304 |
+
for value2 in end_post:
|
| 305 |
+
if value < value2:
|
| 306 |
+
if value % 3 == value2 % 3:
|
| 307 |
+
len_orf = value2 - value
|
| 308 |
+
if len_orf > len_max_orf:
|
| 309 |
+
cds_start = value
|
| 310 |
+
cds_end = value2 + 3
|
| 311 |
+
len_max_orf = len_orf
|
| 312 |
+
break
|
| 313 |
+
if cds_start == -1:
|
| 314 |
+
# sys.stdout.write( '%s, AUG codon not found\n'%transcript )
|
| 315 |
+
continue
|
| 316 |
+
|
| 317 |
+
elongation_region_all = seq_dict[transcript][cds_start:cds_end]
|
| 318 |
+
elongation_region_part = elongation_region_all[
|
| 319 |
+
120:-60
|
| 320 |
+
] # first 120 and last 60 nt are not used
|
| 321 |
+
# peptide_sequence = elongation_region_all.translate()
|
| 322 |
+
|
| 323 |
+
if len(elongation_region_part) % 3 != 0:
|
| 324 |
+
# sys.stdout.write( '%s, CDS not divisible by 3\n'%transcript )
|
| 325 |
+
continue
|
| 326 |
+
|
| 327 |
+
profile_list = [
|
| 328 |
+
0.0 for n in range(cds_start + 120, cds_end - 60)
|
| 329 |
+
] # records ribo-seq profile
|
| 330 |
+
if len(profile_list) < 50:
|
| 331 |
+
# sys.stdout.write( '%s, ORF too short\n'%transcript )
|
| 332 |
+
continue
|
| 333 |
+
all_reads = aligments_A1.fetch(transcript)
|
| 334 |
+
|
| 335 |
+
len_elongation_region = len(profile_list)
|
| 336 |
+
for read in all_reads:
|
| 337 |
+
readlen = read.qlen
|
| 338 |
+
if readlen not in accepted_read_lengths:
|
| 339 |
+
continue # selection of read of acceptable length
|
| 340 |
+
A_site = read.pos + offset - cds_start - 120 # addition of offset
|
| 341 |
+
if len_elongation_region > A_site > -1:
|
| 342 |
+
profile_list[A_site] += 1
|
| 343 |
+
|
| 344 |
+
average_gene_density = float(sum(profile_list)) / len(
|
| 345 |
+
profile_list
|
| 346 |
+
) # average gene density calculated
|
| 347 |
+
if average_gene_density != 0:
|
| 348 |
+
num_codon = len(
|
| 349 |
+
[
|
| 350 |
+
1
|
| 351 |
+
for number88 in range(0, len(profile_list), 3)
|
| 352 |
+
if (
|
| 353 |
+
(
|
| 354 |
+
profile_list[number88]
|
| 355 |
+
+ profile_list[number88 + 1]
|
| 356 |
+
+ profile_list[number88 + 2]
|
| 357 |
+
)
|
| 358 |
+
/ 3
|
| 359 |
+
)
|
| 360 |
+
> average_gene_density
|
| 361 |
+
]
|
| 362 |
+
)
|
| 363 |
+
# number of codons that exceed average gene density
|
| 364 |
+
expected_codon_density = float(num_codon) / (
|
| 365 |
+
len(profile_list) / 3
|
| 366 |
+
) # expected enrichment value
|
| 367 |
+
|
| 368 |
+
codon_start = 0
|
| 369 |
+
for sliding_w_n in range(
|
| 370 |
+
0, len(elongation_region_part), 3
|
| 371 |
+
): # sliding window using increments of 3 nts
|
| 372 |
+
codon_window = str(
|
| 373 |
+
elongation_region_all[codon_start : codon_start + 180]
|
| 374 |
+
) # 60 codon window,
|
| 375 |
+
if len(set(codon_window) - set(["A", "T", "G", "C"])) != 0:
|
| 376 |
+
codon_start += 3
|
| 377 |
+
continue
|
| 378 |
+
|
| 379 |
+
if (
|
| 380 |
+
profile_list[sliding_w_n]
|
| 381 |
+
+ profile_list[sliding_w_n + 1]
|
| 382 |
+
+ profile_list[sliding_w_n + 2]
|
| 383 |
+
) / 3 > average_gene_density:
|
| 384 |
+
for number in range(0, 60):
|
| 385 |
+
codon = codon_window[number * 3 : (number + 1) * 3]
|
| 386 |
+
codon_enrichment_dict[codon][number][0] += 1
|
| 387 |
+
codon_enrichment_dict[codon][number][1] += 1
|
| 388 |
+
else:
|
| 389 |
+
for number in range(0, 60):
|
| 390 |
+
codon = codon_window[number * 3 : (number + 1) * 3]
|
| 391 |
+
codon_enrichment_dict[codon][number][0] += 1
|
| 392 |
+
|
| 393 |
+
codon = codon_window[120:123] # corresponds to A-site codon
|
| 394 |
+
codon_enrichment_expected_dict[codon].append(expected_codon_density)
|
| 395 |
+
codon_start += 3
|
| 396 |
+
|
| 397 |
+
if not os.path.exists(args.Path):
|
| 398 |
+
os.mkdir(args.Path)
|
| 399 |
+
alignment_filename = args.alignment.split("/")[-1]
|
| 400 |
+
outfile = open(
|
| 401 |
+
"%s/RUST_codon_file_%s_%s_%s"
|
| 402 |
+
% (args.Path, alignment_filename, args.offset, length_values),
|
| 403 |
+
"w",
|
| 404 |
+
)
|
| 405 |
+
outfile.write("codon, expected value")
|
| 406 |
+
for number106 in range(-40, 20):
|
| 407 |
+
outfile.write(", %s" % number106)
|
| 408 |
+
outfile.write("\n")
|
| 409 |
+
|
| 410 |
+
list_codons = []
|
| 411 |
+
codons = list(codon_enrichment_dict)
|
| 412 |
+
codons.sort()
|
| 413 |
+
rust_expected = []
|
| 414 |
+
rust_observed_metafootprint = []
|
| 415 |
+
for codon in codons:
|
| 416 |
+
if codon in list_codons:
|
| 417 |
+
continue
|
| 418 |
+
if codon in ["TAA", "TGA", "TAG"]:
|
| 419 |
+
continue
|
| 420 |
+
list_codons.append(codon)
|
| 421 |
+
outfile.write("%s" % codon)
|
| 422 |
+
if codon_enrichment_expected_dict[codon] != []:
|
| 423 |
+
outfile.write(", %s" % mean_value(codon_enrichment_expected_dict[codon]))
|
| 424 |
+
list_data = []
|
| 425 |
+
for number in range(0, 60):
|
| 426 |
+
if codon_enrichment_dict[codon][number][0] != 0:
|
| 427 |
+
outfile.write(
|
| 428 |
+
", %s"
|
| 429 |
+
% (
|
| 430 |
+
codon_enrichment_dict[codon][number][1]
|
| 431 |
+
/ codon_enrichment_dict[codon][number][0]
|
| 432 |
+
)
|
| 433 |
+
)
|
| 434 |
+
list_data.append(
|
| 435 |
+
codon_enrichment_dict[codon][number][1]
|
| 436 |
+
/ codon_enrichment_dict[codon][number][0]
|
| 437 |
+
)
|
| 438 |
+
else:
|
| 439 |
+
outfile.write(", 0")
|
| 440 |
+
list_data.append(0)
|
| 441 |
+
outfile.write("\n")
|
| 442 |
+
rust_expected.append(mean_value(codon_enrichment_expected_dict[codon]))
|
| 443 |
+
rust_observed_metafootprint.append(list_data)
|
| 444 |
+
|
| 445 |
+
rust_expected_sum = sum(rust_expected)
|
| 446 |
+
q_values = [n / rust_expected_sum for n in rust_expected]
|
| 447 |
+
|
| 448 |
+
shannon_values = []
|
| 449 |
+
for loc_i in range(60):
|
| 450 |
+
rust_observed = [n[loc_i] for n in rust_observed_metafootprint]
|
| 451 |
+
rust_observed_sum = sum(rust_observed)
|
| 452 |
+
rust_observed_min = min(rust_observed)
|
| 453 |
+
if rust_observed_min == 0:
|
| 454 |
+
shannon_values.append("NA")
|
| 455 |
+
else:
|
| 456 |
+
p_values = [n / rust_observed_sum for n in rust_observed]
|
| 457 |
+
shannon = []
|
| 458 |
+
list_normalised = [] ####
|
| 459 |
+
for p_value, q_value in zip(p_values, q_values):
|
| 460 |
+
shannon.append(abs(p_value * math.log((p_value / q_value), 2)))
|
| 461 |
+
list_normalised.append(p_value / q_value) ####
|
| 462 |
+
shannon_values.append(sum(shannon))
|
| 463 |
+
|
| 464 |
+
outfile.write("\nKullback Leibler divergence,")
|
| 465 |
+
for value in shannon_values:
|
| 466 |
+
outfile.write(", %s" % value)
|
| 467 |
+
outfile.close()
|
| 468 |
+
|
| 469 |
+
try:
|
| 470 |
+
mpl.rcParams["xtick.direction"] = "out"
|
| 471 |
+
mpl.rcParams["ytick.direction"] = "out"
|
| 472 |
+
mpl.rcParams["legend.fontsize"] = 10
|
| 473 |
+
mpl.rcParams["ytick.labelsize"] = 10
|
| 474 |
+
mpl.rcParams["xtick.labelsize"] = 10
|
| 475 |
+
mpl.rcParams["font.size"] = 10
|
| 476 |
+
mpl.rcParams["axes.titlesize"] = 10
|
| 477 |
+
mpl.rcParams["legend.frameon"] = 0
|
| 478 |
+
mpl.rcParams["axes.axisbelow"] = False
|
| 479 |
+
mpl.rcParams["xtick.major.pad"] = 2.0
|
| 480 |
+
mpl.rcParams["ytick.major.pad"] = 2
|
| 481 |
+
mpl.rcParams["xtick.major.size"] = 2.0
|
| 482 |
+
mpl.rcParams["ytick.major.size"] = 2
|
| 483 |
+
mpl.rcParams["axes.linewidth"] = 0.5
|
| 484 |
+
mpl.rcParams["ytick.major.width"] = 0.25
|
| 485 |
+
mpl.rcParams["xtick.major.width"] = 0.25
|
| 486 |
+
mpl.rcParams["lines.linewidth"] = 1
|
| 487 |
+
mpl.rcParams["legend.borderpad"] = 0.01
|
| 488 |
+
mpl.rcParams["legend.labelspacing"] = 0.05
|
| 489 |
+
mpl.rcParams["legend.columnspacing"] = 0.5
|
| 490 |
+
mpl.rcParams["legend.borderaxespad"] = 0.15
|
| 491 |
+
mpl.rcParams["legend.handlelength"] = 1
|
| 492 |
+
|
| 493 |
+
fig = plt.figure(figsize=(6.69, 6.0))
|
| 494 |
+
infileopen = open(
|
| 495 |
+
"%s/RUST_codon_file_%s_%s_%s"
|
| 496 |
+
% (args.Path, alignment_filename, args.offset, length_values)
|
| 497 |
+
)
|
| 498 |
+
ax1_metafootprint = fig.add_subplot(111)
|
| 499 |
+
RUST_metagene_plot(infileopen, ax1_metafootprint)
|
| 500 |
+
plt.savefig(
|
| 501 |
+
"%s/RUST_codon_metafootprint_%s_%s_%s.png"
|
| 502 |
+
% (args.Path, alignment_filename, args.offset, length_values)
|
| 503 |
+
)
|
| 504 |
+
plt.clf()
|
| 505 |
+
|
| 506 |
+
infileopen = open(
|
| 507 |
+
"%s/RUST_codon_file_%s_%s_%s"
|
| 508 |
+
% (args.Path, alignment_filename, args.offset, length_values)
|
| 509 |
+
)
|
| 510 |
+
ax1codon_Asite = fig.add_subplot(111)
|
| 511 |
+
A_site_plot(infileopen, universal_code, ax1codon_Asite, 40)
|
| 512 |
+
plt.savefig(
|
| 513 |
+
"%s/A_site_%s_%s_%s.png"
|
| 514 |
+
% (args.Path, alignment_filename, args.offset, length_values)
|
| 515 |
+
)
|
| 516 |
+
|
| 517 |
+
except:
|
| 518 |
+
sys.stdout.write("Error producing images\n")
|
| 519 |
+
|
| 520 |
+
|
| 521 |
+
if __name__ == "__main__":
|
| 522 |
+
parser = argparse.ArgumentParser(
|
| 523 |
+
description="Produces RUST metagene profile of codons"
|
| 524 |
+
)
|
| 525 |
+
parser.add_argument("--version", action="version", version="%(prog)s 1.2")
|
| 526 |
+
|
| 527 |
+
parser.add_argument(
|
| 528 |
+
"-t",
|
| 529 |
+
"--transcriptome",
|
| 530 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 531 |
+
", required=True",
|
| 532 |
+
)
|
| 533 |
+
parser.add_argument(
|
| 534 |
+
"-a",
|
| 535 |
+
"--alignment",
|
| 536 |
+
help="sorted bam file of transcriptome alignments",
|
| 537 |
+
required=True,
|
| 538 |
+
)
|
| 539 |
+
parser.add_argument("-o", "--offset", help="nucleotide offset to A-site", type=int)
|
| 540 |
+
parser.add_argument(
|
| 541 |
+
"-l",
|
| 542 |
+
"--lengths",
|
| 543 |
+
help="lengths of footprints included, for example 28:32 is 28,29,30,31,32",
|
| 544 |
+
)
|
| 545 |
+
parser.add_argument(
|
| 546 |
+
"-P", "--Path", help='path to outputfile, default is "codon"', default="codon"
|
| 547 |
+
)
|
| 548 |
+
args = parser.parse_args(None)
|
| 549 |
+
|
| 550 |
+
main(args)
|
falcon/lib/python3.10/site-packages/RUST/dipeptide.py
ADDED
|
@@ -0,0 +1,454 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/python
|
| 2 |
+
#####################################################################################
|
| 3 |
+
# rust_dipeptide, Produces RUST metagene profile of dipeptides
|
| 4 |
+
# Copyright (C) 2015, Patrick O'Connor
|
| 5 |
+
|
| 6 |
+
# This program is free software: you can redistribute it and/or modify
|
| 7 |
+
# it under the terms of the GNU General Public License as published by
|
| 8 |
+
# the Free Software Foundation, either version 3 of the License, or
|
| 9 |
+
# (at your option) any later version.
|
| 10 |
+
|
| 11 |
+
# This program is distributed in the hope that it will be useful,
|
| 12 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 13 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 14 |
+
# GNU General Public License for more details.
|
| 15 |
+
|
| 16 |
+
# You should have received a copy of the GNU General Public License
|
| 17 |
+
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
| 18 |
+
#####################################################################################
|
| 19 |
+
|
| 20 |
+
import os, re, pysam, sys, math, argparse
|
| 21 |
+
from RUST.methods import *
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
import matplotlib as mpl
|
| 25 |
+
|
| 26 |
+
mpl.use("Agg")
|
| 27 |
+
import matplotlib.pyplot as plt
|
| 28 |
+
from pylab import MaxNLocator
|
| 29 |
+
except:
|
| 30 |
+
pass
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def RUST_metagene_plot(infileopen36, ax36):
|
| 34 |
+
infileopen36.seek(0)
|
| 35 |
+
infileopen36.readline()
|
| 36 |
+
log_style = 1
|
| 37 |
+
while 1:
|
| 38 |
+
line = infileopen36.readline()
|
| 39 |
+
linesplit = line.split(",")
|
| 40 |
+
if len(linesplit) == 1:
|
| 41 |
+
break
|
| 42 |
+
nucleotide_type = linesplit[0]
|
| 43 |
+
coverage = list(map(float, linesplit[1:]))
|
| 44 |
+
coverage_a = coverage[0]
|
| 45 |
+
if coverage_a == 0:
|
| 46 |
+
continue
|
| 47 |
+
coverage_n = [n / coverage_a for n in coverage[1:]]
|
| 48 |
+
if min(coverage_n[:-1]) == 0:
|
| 49 |
+
log_style = 0
|
| 50 |
+
continue
|
| 51 |
+
|
| 52 |
+
infileopen36.seek(0)
|
| 53 |
+
infileopen36.readline()
|
| 54 |
+
while 1:
|
| 55 |
+
line = infileopen36.readline()
|
| 56 |
+
linesplit = line.split(",")
|
| 57 |
+
if len(linesplit) == 1:
|
| 58 |
+
break
|
| 59 |
+
nucleotide_type = linesplit[0]
|
| 60 |
+
coverage = list(map(float, linesplit[1:]))
|
| 61 |
+
coverage_a = coverage[0]
|
| 62 |
+
if coverage_a == 0:
|
| 63 |
+
continue
|
| 64 |
+
coverage_n = [n / coverage_a for n in coverage[1:]]
|
| 65 |
+
if log_style:
|
| 66 |
+
log2_values = [math.log(n, 2) for n in coverage_n[:-1]]
|
| 67 |
+
ax36.plot(log2_values, color="gray")
|
| 68 |
+
else:
|
| 69 |
+
ax36.plot(coverage_n[:-1], color="gray")
|
| 70 |
+
|
| 71 |
+
line = infileopen36.readline()
|
| 72 |
+
linesplit = line.split(",")
|
| 73 |
+
if "NA" not in line[:-3]:
|
| 74 |
+
coverage = list(map(float, linesplit[2:-1]))
|
| 75 |
+
ax2 = ax36.twinx()
|
| 76 |
+
ax2.plot(coverage, color="blue")
|
| 77 |
+
for tl in ax2.get_yticklabels():
|
| 78 |
+
tl.set_color("blue")
|
| 79 |
+
tl.set_rotation(0)
|
| 80 |
+
|
| 81 |
+
ax2.yaxis.set_major_locator(MaxNLocator(3))
|
| 82 |
+
ax2.set_ylim(0, 1.0)
|
| 83 |
+
ax2.set_ylim(-2, 1.0)
|
| 84 |
+
ax2.set_yticks([0, 1], minor=False)
|
| 85 |
+
ax2.set_yticklabels(["0", "1"])
|
| 86 |
+
ax2.set_ylabel("Kullback-Leibler divergence", color="blue")
|
| 87 |
+
|
| 88 |
+
ax36.set_xticks([5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55])
|
| 89 |
+
ax36.set_xticklabels([-35, -30, -25, -20, -15, -10, -5, 0, 5, 10, 15])
|
| 90 |
+
ax36.set_xlabel("distance from A-site [codon]")
|
| 91 |
+
if log_style:
|
| 92 |
+
ax36.set_ylabel("Dipeptide RUST ratio (observed/expected), log2")
|
| 93 |
+
else:
|
| 94 |
+
ax36.set_ylabel("Dipeptide RUST ratio (observed/expected)")
|
| 95 |
+
ax36.axvline(40, color="red")
|
| 96 |
+
# ax36.legend()
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def main(args):
|
| 100 |
+
|
| 101 |
+
mRNA_sequences = args.transcriptome # path to fastq file of transcripts
|
| 102 |
+
in_seq_handle = open(mRNA_sequences)
|
| 103 |
+
cds_start_dict = {}
|
| 104 |
+
cds_end_dict = {}
|
| 105 |
+
seq_dict = {}
|
| 106 |
+
for line in in_seq_handle:
|
| 107 |
+
if line[0] != ">":
|
| 108 |
+
seq_dict.setdefault(transcript, "")
|
| 109 |
+
seq_dict[transcript] += line[:-1]
|
| 110 |
+
continue
|
| 111 |
+
try:
|
| 112 |
+
transcript_split = line[:-1].split("\t")
|
| 113 |
+
transcript = transcript_split[0][1:]
|
| 114 |
+
cds_start_dict[transcript] = int(transcript_split[1])
|
| 115 |
+
cds_end_dict[transcript] = int(transcript_split[2])
|
| 116 |
+
except:
|
| 117 |
+
pass
|
| 118 |
+
in_seq_handle.close()
|
| 119 |
+
|
| 120 |
+
offset = args.offset # path to fastq file of transcripts
|
| 121 |
+
readlen_range = args.lengths
|
| 122 |
+
readlen_rangesplit = readlen_range.split(":")
|
| 123 |
+
if len(readlen_rangesplit) == 1:
|
| 124 |
+
accepted_read_lengths = [int(readlen_rangesplit[0])]
|
| 125 |
+
length_values = "%s" % int(readlen_rangesplit[0])
|
| 126 |
+
elif len(readlen_rangesplit) == 2:
|
| 127 |
+
accepted_read_lengths = [
|
| 128 |
+
readlen
|
| 129 |
+
for readlen in range(
|
| 130 |
+
int(readlen_rangesplit[0]), int(readlen_rangesplit[1]) + 1
|
| 131 |
+
)
|
| 132 |
+
]
|
| 133 |
+
length_values = "%s_%s" % (
|
| 134 |
+
int(readlen_rangesplit[0]),
|
| 135 |
+
int(readlen_rangesplit[1]),
|
| 136 |
+
)
|
| 137 |
+
else:
|
| 138 |
+
stop_err(
|
| 139 |
+
"Lengths of footprints parameter not in correct format, it should be either colon seperated with the second value greater or equal to the first, (28:32) or a single interger (31)"
|
| 140 |
+
)
|
| 141 |
+
if len(accepted_read_lengths) == 0:
|
| 142 |
+
stop_err(
|
| 143 |
+
"Lengths of footprints parameter not in correct format, it should be either colon seperated with the second value greater or equal to the first, (28:32) or a single interger (31)"
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
amino_acids = [
|
| 147 |
+
"A",
|
| 148 |
+
"C",
|
| 149 |
+
"E",
|
| 150 |
+
"D",
|
| 151 |
+
"G",
|
| 152 |
+
"F",
|
| 153 |
+
"I",
|
| 154 |
+
"H",
|
| 155 |
+
"K",
|
| 156 |
+
"M",
|
| 157 |
+
"L",
|
| 158 |
+
"N",
|
| 159 |
+
"Q",
|
| 160 |
+
"P",
|
| 161 |
+
"S",
|
| 162 |
+
"R",
|
| 163 |
+
"T",
|
| 164 |
+
"W",
|
| 165 |
+
"V",
|
| 166 |
+
"Y",
|
| 167 |
+
]
|
| 168 |
+
aligments_A1 = pysam.Samfile(
|
| 169 |
+
args.alignment, "rb"
|
| 170 |
+
) # path to aligments in bam format
|
| 171 |
+
|
| 172 |
+
dipeptide_enrichment_dict = {}
|
| 173 |
+
dipeptide_enrichment_expected_dict = {}
|
| 174 |
+
for amino_acid in amino_acids:
|
| 175 |
+
for amino_acid2 in amino_acids:
|
| 176 |
+
# for amino_acid3 in amino_acids :
|
| 177 |
+
dipeptide = "%s%s" % (amino_acid, amino_acid2)
|
| 178 |
+
dipeptide_enrichment_dict[dipeptide] = {}
|
| 179 |
+
dipeptide_enrichment_expected_dict[dipeptide] = []
|
| 180 |
+
for number in range(0, 60, 1):
|
| 181 |
+
dipeptide_enrichment_dict[dipeptide][number] = [0.0, 0.0]
|
| 182 |
+
|
| 183 |
+
list_transcripts = seq_dict.keys()
|
| 184 |
+
number_transcripts = 0
|
| 185 |
+
list_10_percentile = []
|
| 186 |
+
for value in range(1, 10):
|
| 187 |
+
list_10_percentile.append((len(list_transcripts) * value) / 10)
|
| 188 |
+
for transcript in list_transcripts:
|
| 189 |
+
number_transcripts += 1
|
| 190 |
+
if number_transcripts in list_10_percentile:
|
| 191 |
+
sys.stdout.write(
|
| 192 |
+
"%s percent\n"
|
| 193 |
+
% ((list_10_percentile.index(number_transcripts) + 1) * 10)
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
try: # use supplied CDS annotation
|
| 197 |
+
cds_start = cds_start_dict[transcript]
|
| 198 |
+
cds_end = cds_end_dict[transcript]
|
| 199 |
+
if cds_end < cds_start:
|
| 200 |
+
raise Exception
|
| 201 |
+
except Exception: # find longest ORF
|
| 202 |
+
transcript_seq = seq_dict[transcript]
|
| 203 |
+
cds_start = -1
|
| 204 |
+
start_post = []
|
| 205 |
+
end_post = []
|
| 206 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("ATG"), transcript_seq):
|
| 207 |
+
start_post.append(match.start())
|
| 208 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TAG"), transcript_seq):
|
| 209 |
+
end_post.append(match.start())
|
| 210 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TAA"), transcript_seq):
|
| 211 |
+
end_post.append(match.start())
|
| 212 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TGA"), transcript_seq):
|
| 213 |
+
end_post.append(match.start())
|
| 214 |
+
|
| 215 |
+
end_post.sort()
|
| 216 |
+
len_max_orf = 0
|
| 217 |
+
for value in start_post:
|
| 218 |
+
for value2 in end_post:
|
| 219 |
+
if value < value2:
|
| 220 |
+
if value % 3 == value2 % 3:
|
| 221 |
+
len_orf = value2 - value
|
| 222 |
+
if len_orf > len_max_orf:
|
| 223 |
+
cds_start = value
|
| 224 |
+
cds_end = value2 + 3
|
| 225 |
+
len_max_orf = len_orf
|
| 226 |
+
break
|
| 227 |
+
if cds_start == -1:
|
| 228 |
+
# sys.stdout.write( '%s, AUG codon not found\n'%transcript )
|
| 229 |
+
continue
|
| 230 |
+
|
| 231 |
+
elongation_region_all = seq_dict[transcript][cds_start:cds_end]
|
| 232 |
+
elongation_region_part = elongation_region_all[
|
| 233 |
+
120:-60
|
| 234 |
+
] # first 120 and last 60 nt are not used
|
| 235 |
+
if len(elongation_region_part) % 3 != 0:
|
| 236 |
+
# sys.stdout.write( '%s, CDS not divisible by 3\n'%transcript )
|
| 237 |
+
continue
|
| 238 |
+
peptide_sequence = translate_dna(elongation_region_all)
|
| 239 |
+
|
| 240 |
+
profile_list = [
|
| 241 |
+
0.0 for n in range(cds_start + 120, cds_end - 60)
|
| 242 |
+
] # records ribo-seq profile
|
| 243 |
+
if len(profile_list) < 50:
|
| 244 |
+
# sys.stdout.write( '%s, ORF too short\n'%transcript )
|
| 245 |
+
continue
|
| 246 |
+
all_reads = aligments_A1.fetch(transcript)
|
| 247 |
+
|
| 248 |
+
len_elongation_region = len(profile_list)
|
| 249 |
+
for read in all_reads:
|
| 250 |
+
readlen = read.qlen
|
| 251 |
+
if readlen not in accepted_read_lengths:
|
| 252 |
+
continue # selection of read of acceptable length
|
| 253 |
+
A_site = read.pos + offset - cds_start - 120 # addition of offset
|
| 254 |
+
if len_elongation_region > A_site > -1:
|
| 255 |
+
profile_list[A_site] += 1
|
| 256 |
+
average_gene_density = float(sum(profile_list)) / len(
|
| 257 |
+
profile_list
|
| 258 |
+
) # average gene density calculated
|
| 259 |
+
|
| 260 |
+
if average_gene_density != 0:
|
| 261 |
+
num_codon = len(
|
| 262 |
+
[
|
| 263 |
+
1
|
| 264 |
+
for number88 in range(0, len(profile_list), 3)
|
| 265 |
+
if (
|
| 266 |
+
(
|
| 267 |
+
profile_list[number88]
|
| 268 |
+
+ profile_list[number88 + 1]
|
| 269 |
+
+ profile_list[number88 + 2]
|
| 270 |
+
)
|
| 271 |
+
/ 3
|
| 272 |
+
)
|
| 273 |
+
> average_gene_density
|
| 274 |
+
]
|
| 275 |
+
)
|
| 276 |
+
# number of codons that exceed average gene density
|
| 277 |
+
expected_codon_density = float(num_codon) / (
|
| 278 |
+
len(profile_list) / 3
|
| 279 |
+
) # expected enrichment value
|
| 280 |
+
|
| 281 |
+
peptide_start = 0
|
| 282 |
+
for sliding_w_n in range(
|
| 283 |
+
0, len(elongation_region_part), 3
|
| 284 |
+
): # sliding window using increments of 3 nts
|
| 285 |
+
amino_window = str(peptide_sequence[peptide_start : peptide_start + 60])
|
| 286 |
+
if len(set(amino_window) - set(amino_acids)) != 0:
|
| 287 |
+
peptide_start += 1
|
| 288 |
+
continue
|
| 289 |
+
|
| 290 |
+
if (
|
| 291 |
+
profile_list[sliding_w_n]
|
| 292 |
+
+ profile_list[sliding_w_n + 1]
|
| 293 |
+
+ profile_list[sliding_w_n + 2]
|
| 294 |
+
) / 3 > average_gene_density:
|
| 295 |
+
for number in range(0, 59):
|
| 296 |
+
amino_acid_2 = amino_window[number : number + 2]
|
| 297 |
+
dipeptide_enrichment_dict[amino_acid_2][number][0] += 1
|
| 298 |
+
dipeptide_enrichment_dict[amino_acid_2][number][1] += 1
|
| 299 |
+
else:
|
| 300 |
+
for number in range(0, 59):
|
| 301 |
+
amino_acid_2 = amino_window[number : number + 2]
|
| 302 |
+
dipeptide_enrichment_dict[amino_acid_2][number][0] += 1
|
| 303 |
+
|
| 304 |
+
amino_acid_2 = amino_window[40:42]
|
| 305 |
+
dipeptide_enrichment_expected_dict[amino_acid_2].append(
|
| 306 |
+
expected_codon_density
|
| 307 |
+
)
|
| 308 |
+
peptide_start += 1
|
| 309 |
+
|
| 310 |
+
alignment_filename = args.alignment.split("/")[-1]
|
| 311 |
+
if not os.path.exists(args.Path):
|
| 312 |
+
os.mkdir(args.Path)
|
| 313 |
+
|
| 314 |
+
outfile = open(
|
| 315 |
+
"%s/RUST_dipeptide_file_%s_%s_%s"
|
| 316 |
+
% (args.Path, alignment_filename, args.offset, length_values),
|
| 317 |
+
"w",
|
| 318 |
+
)
|
| 319 |
+
outfile.write("dipeptide, expected value")
|
| 320 |
+
for number106 in range(-40, 20):
|
| 321 |
+
outfile.write(", %s" % number106)
|
| 322 |
+
outfile.write("\n")
|
| 323 |
+
|
| 324 |
+
list_codons = []
|
| 325 |
+
list_amino_acids = list(dipeptide_enrichment_dict.keys())
|
| 326 |
+
list_amino_acids.sort()
|
| 327 |
+
rust_expected = []
|
| 328 |
+
rust_observed_metafootprint = []
|
| 329 |
+
for amino2 in list_amino_acids:
|
| 330 |
+
if amino2 in list_codons:
|
| 331 |
+
continue
|
| 332 |
+
list_codons.append(amino2)
|
| 333 |
+
outfile.write("%s" % amino2)
|
| 334 |
+
if dipeptide_enrichment_expected_dict[amino2] != []:
|
| 335 |
+
outfile.write(
|
| 336 |
+
", %s" % mean_value(dipeptide_enrichment_expected_dict[amino2])
|
| 337 |
+
)
|
| 338 |
+
list_data = []
|
| 339 |
+
for number in range(0, 60):
|
| 340 |
+
if dipeptide_enrichment_dict[amino2][number][0] != 0:
|
| 341 |
+
outfile.write(
|
| 342 |
+
", %s"
|
| 343 |
+
% (
|
| 344 |
+
dipeptide_enrichment_dict[amino2][number][1]
|
| 345 |
+
/ dipeptide_enrichment_dict[amino2][number][0]
|
| 346 |
+
)
|
| 347 |
+
)
|
| 348 |
+
list_data.append(
|
| 349 |
+
dipeptide_enrichment_dict[amino2][number][1]
|
| 350 |
+
/ dipeptide_enrichment_dict[amino2][number][0]
|
| 351 |
+
)
|
| 352 |
+
else:
|
| 353 |
+
outfile.write(", 0")
|
| 354 |
+
list_data.append(0)
|
| 355 |
+
outfile.write("\n")
|
| 356 |
+
rust_expected.append(mean_value(dipeptide_enrichment_expected_dict[amino2]))
|
| 357 |
+
rust_observed_metafootprint.append(list_data)
|
| 358 |
+
|
| 359 |
+
rust_expected_sum = sum(rust_expected)
|
| 360 |
+
q_values = [n / rust_expected_sum for n in rust_expected]
|
| 361 |
+
|
| 362 |
+
shannon_values = []
|
| 363 |
+
for loc_i in range(60):
|
| 364 |
+
rust_observed = [n[loc_i] for n in rust_observed_metafootprint]
|
| 365 |
+
rust_observed_sum = sum(rust_observed)
|
| 366 |
+
rust_observed_min = min(rust_observed)
|
| 367 |
+
if rust_observed_min == 0:
|
| 368 |
+
shannon_values.append("NA")
|
| 369 |
+
else:
|
| 370 |
+
p_values = [n / rust_observed_sum for n in rust_observed]
|
| 371 |
+
shannon = []
|
| 372 |
+
list_normalised = [] ####
|
| 373 |
+
for p_value, q_value in zip(p_values, q_values):
|
| 374 |
+
shannon.append(abs(p_value * math.log((p_value / q_value), 2)))
|
| 375 |
+
list_normalised.append(p_value / q_value) ####
|
| 376 |
+
shannon_values.append(sum(shannon))
|
| 377 |
+
|
| 378 |
+
outfile.write("\nKullback Leibler divergence,")
|
| 379 |
+
for value in shannon_values:
|
| 380 |
+
outfile.write(", %s" % value)
|
| 381 |
+
outfile.close()
|
| 382 |
+
|
| 383 |
+
try:
|
| 384 |
+
mpl.rcParams["xtick.direction"] = "out"
|
| 385 |
+
mpl.rcParams["ytick.direction"] = "out"
|
| 386 |
+
mpl.rcParams["legend.fontsize"] = 10
|
| 387 |
+
mpl.rcParams["ytick.labelsize"] = 10
|
| 388 |
+
mpl.rcParams["xtick.labelsize"] = 10
|
| 389 |
+
mpl.rcParams["font.size"] = 10
|
| 390 |
+
mpl.rcParams["axes.titlesize"] = 10
|
| 391 |
+
mpl.rcParams["legend.frameon"] = 0
|
| 392 |
+
mpl.rcParams["axes.axisbelow"] = False
|
| 393 |
+
mpl.rcParams["xtick.major.pad"] = 2.0
|
| 394 |
+
mpl.rcParams["ytick.major.pad"] = 2
|
| 395 |
+
mpl.rcParams["xtick.major.size"] = 2.0
|
| 396 |
+
mpl.rcParams["ytick.major.size"] = 2
|
| 397 |
+
mpl.rcParams["axes.linewidth"] = 0.5
|
| 398 |
+
mpl.rcParams["ytick.major.width"] = 0.25
|
| 399 |
+
mpl.rcParams["xtick.major.width"] = 0.25
|
| 400 |
+
mpl.rcParams["lines.linewidth"] = 1
|
| 401 |
+
mpl.rcParams["legend.borderpad"] = 0.01
|
| 402 |
+
mpl.rcParams["legend.labelspacing"] = 0.05
|
| 403 |
+
mpl.rcParams["legend.columnspacing"] = 0.5
|
| 404 |
+
mpl.rcParams["legend.borderaxespad"] = 0.15
|
| 405 |
+
mpl.rcParams["legend.handlelength"] = 1
|
| 406 |
+
|
| 407 |
+
fig = plt.figure(figsize=(6.69, 6.0))
|
| 408 |
+
infileopen = open(
|
| 409 |
+
"%s/RUST_dipeptide_file_%s_%s_%s"
|
| 410 |
+
% (args.Path, alignment_filename, args.offset, length_values)
|
| 411 |
+
)
|
| 412 |
+
ax1_metafootprint = fig.add_subplot(111)
|
| 413 |
+
RUST_metagene_plot(infileopen, ax1_metafootprint)
|
| 414 |
+
plt.savefig(
|
| 415 |
+
"%s/RUST_dipeptide_metafootprint_%s_%s_%s.png"
|
| 416 |
+
% (args.Path, alignment_filename, args.offset, length_values)
|
| 417 |
+
)
|
| 418 |
+
|
| 419 |
+
except:
|
| 420 |
+
sys.stdout.write("Error producing images\n")
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
if __name__ == "__main__":
|
| 424 |
+
parser = argparse.ArgumentParser(
|
| 425 |
+
description="Produces RUST metagene profile of dipeptides"
|
| 426 |
+
)
|
| 427 |
+
parser.add_argument("--version", action="version", version="%(prog)s 1.2")
|
| 428 |
+
|
| 429 |
+
parser.add_argument(
|
| 430 |
+
"-t",
|
| 431 |
+
"--transcriptome",
|
| 432 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 433 |
+
", required=True",
|
| 434 |
+
)
|
| 435 |
+
parser.add_argument(
|
| 436 |
+
"-a",
|
| 437 |
+
"--alignment",
|
| 438 |
+
help="sorted bam file of transcriptome alignments",
|
| 439 |
+
required=True,
|
| 440 |
+
)
|
| 441 |
+
parser.add_argument("-o", "--offset", help="nucleotide offset to A-site", type=int)
|
| 442 |
+
parser.add_argument(
|
| 443 |
+
"-l",
|
| 444 |
+
"--lengths",
|
| 445 |
+
help="lengths of footprints included, for example 28:32 is 28,29,30,31,32",
|
| 446 |
+
)
|
| 447 |
+
parser.add_argument(
|
| 448 |
+
"-P",
|
| 449 |
+
"--Path",
|
| 450 |
+
help='path to outputfile, default is "dipeptide"',
|
| 451 |
+
default="dipeptide",
|
| 452 |
+
)
|
| 453 |
+
args = parser.parse_args(None)
|
| 454 |
+
main(args)
|
falcon/lib/python3.10/site-packages/RUST/plot_transcript.py
ADDED
|
@@ -0,0 +1,409 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/python
|
| 2 |
+
#####################################################################################
|
| 3 |
+
# rust_plot_transcript, Plot observed and predicted ribosome profiles
|
| 4 |
+
# Copyright (C) 2015 Patrick O'Connor
|
| 5 |
+
|
| 6 |
+
# This program is free software: you can redistribute it and/or modify
|
| 7 |
+
# it under the terms of the GNU General Public License as published by
|
| 8 |
+
# the Free Software Foundation, either version 3 of the License, or
|
| 9 |
+
# (at your option) any later version.
|
| 10 |
+
|
| 11 |
+
# This program is distributed in the hope that it will be useful,
|
| 12 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 13 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 14 |
+
# GNU General Public License for more details.
|
| 15 |
+
|
| 16 |
+
# You should have received a copy of the GNU General Public License
|
| 17 |
+
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
| 18 |
+
#####################################################################################
|
| 19 |
+
|
| 20 |
+
import os, re, pysam, sys, math, argparse
|
| 21 |
+
from RUST.methods import *
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
import numpy
|
| 25 |
+
except:
|
| 26 |
+
pass
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
import matplotlib as mpl
|
| 30 |
+
|
| 31 |
+
mpl.use("Agg")
|
| 32 |
+
import matplotlib.pyplot as plt
|
| 33 |
+
from pylab import MaxNLocator
|
| 34 |
+
except:
|
| 35 |
+
pass
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def main(args):
|
| 39 |
+
|
| 40 |
+
RUST_file = open(args.rustfile) # file output of RUST_script.py
|
| 41 |
+
RUST_file.readline()
|
| 42 |
+
codon_rust_dict = {}
|
| 43 |
+
for line in RUST_file:
|
| 44 |
+
linesplit = line.split(",")
|
| 45 |
+
if len(linesplit) == 1:
|
| 46 |
+
break
|
| 47 |
+
codon = linesplit[0]
|
| 48 |
+
if len(codon) != 3 or len(set(codon) - set(["A", "T", "G", "C"])) != 0:
|
| 49 |
+
stop_err("Codon metafootprint file not correct, check input file")
|
| 50 |
+
codon_rust_dict[codon] = {}
|
| 51 |
+
rust_values = list(map(float, linesplit[1:]))
|
| 52 |
+
expected = rust_values[0]
|
| 53 |
+
rust_metafootprint = [ro_value / expected for ro_value in rust_values[1:]]
|
| 54 |
+
for n in range(34, 46):
|
| 55 |
+
codon_rust_dict[codon][n - 40] = rust_metafootprint[
|
| 56 |
+
n
|
| 57 |
+
] # for 12 codons positions near A-site the RUST ratios are recorded
|
| 58 |
+
RUST_file.close()
|
| 59 |
+
|
| 60 |
+
mRNA_sequences = args.transcriptome # path to fastq file of transcripts
|
| 61 |
+
in_seq_handle = open(mRNA_sequences)
|
| 62 |
+
cds_start_dict = {}
|
| 63 |
+
cds_end_dict = {}
|
| 64 |
+
seq_dict = {}
|
| 65 |
+
for line in in_seq_handle:
|
| 66 |
+
if line[0] != ">":
|
| 67 |
+
seq_dict.setdefault(transcript, "")
|
| 68 |
+
seq_dict[transcript] += line[:-1]
|
| 69 |
+
continue
|
| 70 |
+
try:
|
| 71 |
+
transcript_split = line[:-1].split("\t")
|
| 72 |
+
transcript = transcript_split[0][1:]
|
| 73 |
+
cds_start_dict[transcript] = int(transcript_split[1])
|
| 74 |
+
cds_end_dict[transcript] = int(transcript_split[2])
|
| 75 |
+
except:
|
| 76 |
+
pass
|
| 77 |
+
in_seq_handle.close()
|
| 78 |
+
|
| 79 |
+
offset = args.offset
|
| 80 |
+
readlen_range = args.lengths
|
| 81 |
+
readlen_rangesplit = readlen_range.split(":")
|
| 82 |
+
if len(readlen_rangesplit) == 1:
|
| 83 |
+
accepted_read_lengths = [int(readlen_rangesplit[0])]
|
| 84 |
+
length_values = "%s" % int(readlen_rangesplit[0])
|
| 85 |
+
elif len(readlen_rangesplit) == 2:
|
| 86 |
+
accepted_read_lengths = [
|
| 87 |
+
readlen
|
| 88 |
+
for readlen in range(
|
| 89 |
+
int(readlen_rangesplit[0]), int(readlen_rangesplit[1]) + 1
|
| 90 |
+
)
|
| 91 |
+
]
|
| 92 |
+
length_values = "%s_%s" % (
|
| 93 |
+
int(readlen_rangesplit[0]),
|
| 94 |
+
int(readlen_rangesplit[1]),
|
| 95 |
+
)
|
| 96 |
+
else:
|
| 97 |
+
stop_err(
|
| 98 |
+
"l, lengths of footprints parameter not in correct format, it should be either colon seperated with the second value greater or equal to the first, (28:32) or a single interger (31)"
|
| 99 |
+
)
|
| 100 |
+
if len(accepted_read_lengths) == 0:
|
| 101 |
+
stop_err(
|
| 102 |
+
"l, lengths of footprints parameter not in correct format, it should be either colon seperated with the second value greater or equal to the first, (28:32) or a single interger (31)"
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
amino_acids = [
|
| 106 |
+
"A",
|
| 107 |
+
"C",
|
| 108 |
+
"E",
|
| 109 |
+
"D",
|
| 110 |
+
"G",
|
| 111 |
+
"F",
|
| 112 |
+
"I",
|
| 113 |
+
"H",
|
| 114 |
+
"K",
|
| 115 |
+
"M",
|
| 116 |
+
"L",
|
| 117 |
+
"N",
|
| 118 |
+
"Q",
|
| 119 |
+
"P",
|
| 120 |
+
"S",
|
| 121 |
+
"R",
|
| 122 |
+
"T",
|
| 123 |
+
"W",
|
| 124 |
+
"V",
|
| 125 |
+
"Y",
|
| 126 |
+
]
|
| 127 |
+
aligments_A1 = pysam.Samfile(args.alignment, "rb")
|
| 128 |
+
|
| 129 |
+
if "/" in args.rustfile:
|
| 130 |
+
rustfile_split = args.rustfile.split("/")[-1]
|
| 131 |
+
# elif "\\" in args.rustfile:
|
| 132 |
+
# rustfile_split= args.rustfile.split("\\")[-1]
|
| 133 |
+
else:
|
| 134 |
+
rustfile_split = args.rustfile
|
| 135 |
+
|
| 136 |
+
if "RUST_codon_file_" in rustfile_split:
|
| 137 |
+
alignment_filename = rustfile_split[16:]
|
| 138 |
+
else:
|
| 139 |
+
alignment_filename = rustfile_split
|
| 140 |
+
|
| 141 |
+
list_transcripts = seq_dict.keys()
|
| 142 |
+
transcript_of_inter = args.identifier
|
| 143 |
+
transcript_of_inter2 = transcript_of_inter
|
| 144 |
+
if transcript_of_inter not in list_transcripts:
|
| 145 |
+
count_occurences = 0
|
| 146 |
+
for known_transcript in list_transcripts:
|
| 147 |
+
if transcript_of_inter in known_transcript:
|
| 148 |
+
transcript_of_inter2 = known_transcript
|
| 149 |
+
count_occurences += 1
|
| 150 |
+
if transcript_of_inter2 == transcript_of_inter:
|
| 151 |
+
stop_err("Transcript not in Transcriptome file")
|
| 152 |
+
if count_occurences > 1:
|
| 153 |
+
stop_err("%s not unique identifier" % transcript_of_inter)
|
| 154 |
+
sys.stdout.write(
|
| 155 |
+
"%s not in Transcriptome file, data provided for %s\n"
|
| 156 |
+
% (transcript_of_inter, transcript_of_inter2)
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
for transcript in [transcript_of_inter2]:
|
| 160 |
+
try:
|
| 161 |
+
cds_start = cds_start_dict[transcript]
|
| 162 |
+
cds_end = cds_end_dict[transcript]
|
| 163 |
+
if cds_end < cds_start:
|
| 164 |
+
raise Exception
|
| 165 |
+
except Exception:
|
| 166 |
+
transcript_seq = seq_dict[transcript]
|
| 167 |
+
cds_start = -1
|
| 168 |
+
start_post = []
|
| 169 |
+
end_post = []
|
| 170 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("ATG"), transcript_seq):
|
| 171 |
+
start_post.append(match.start())
|
| 172 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TAG"), transcript_seq):
|
| 173 |
+
end_post.append(match.start())
|
| 174 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TAA"), transcript_seq):
|
| 175 |
+
end_post.append(match.start())
|
| 176 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TGA"), transcript_seq):
|
| 177 |
+
end_post.append(match.start())
|
| 178 |
+
|
| 179 |
+
end_post.sort()
|
| 180 |
+
len_max_orf = 0
|
| 181 |
+
for value in start_post:
|
| 182 |
+
for value2 in end_post:
|
| 183 |
+
if value < value2:
|
| 184 |
+
if value % 3 == value2 % 3:
|
| 185 |
+
len_orf = value2 - value
|
| 186 |
+
if len_orf > len_max_orf:
|
| 187 |
+
cds_start = value
|
| 188 |
+
cds_end = value2 + 3
|
| 189 |
+
len_max_orf = len_orf
|
| 190 |
+
break
|
| 191 |
+
if cds_start == -1:
|
| 192 |
+
continue
|
| 193 |
+
|
| 194 |
+
elongation_region_all = seq_dict[transcript][cds_start:cds_end]
|
| 195 |
+
if (
|
| 196 |
+
len(elongation_region_all) % 3 != 0
|
| 197 |
+
): # genes with codon region not divisible by 3 skipped
|
| 198 |
+
stop_err("%s, CDS not divisible by 3\n" % transcript)
|
| 199 |
+
|
| 200 |
+
profile_expect = []
|
| 201 |
+
for n in range(
|
| 202 |
+
0, len(elongation_region_all[120:-60]), 3
|
| 203 |
+
): # predicts profile from 120 nts after start to 60 before stop
|
| 204 |
+
minus6_plus5_footprint = elongation_region_all[
|
| 205 |
+
120 + n - 18 : 120 + n + 16
|
| 206 |
+
] # contains sequence of region used to predict profile
|
| 207 |
+
value = 1.0
|
| 208 |
+
amino_loc = -6
|
| 209 |
+
for number in range(0, len(minus6_plus5_footprint) - 2, 3):
|
| 210 |
+
codon = minus6_plus5_footprint[number : number + 3]
|
| 211 |
+
if len(set(codon) - set(["A", "T", "G", "C"])) != 0 or codon in [
|
| 212 |
+
"TAG",
|
| 213 |
+
"TGA",
|
| 214 |
+
"TAA",
|
| 215 |
+
]:
|
| 216 |
+
amino_loc += 1
|
| 217 |
+
continue
|
| 218 |
+
value = value * codon_rust_dict[codon][amino_loc]
|
| 219 |
+
amino_loc += 1
|
| 220 |
+
profile_expect.append(value)
|
| 221 |
+
profile_expect_sum = sum(profile_expect)
|
| 222 |
+
profile_expect_probablility = [
|
| 223 |
+
float(value) / profile_expect_sum for value in profile_expect
|
| 224 |
+
]
|
| 225 |
+
|
| 226 |
+
profile_list = [
|
| 227 |
+
0.0 for n in range(cds_start + 120, cds_end - 60)
|
| 228 |
+
] # records ribo-seq profile
|
| 229 |
+
all_reads = aligments_A1.fetch(transcript)
|
| 230 |
+
|
| 231 |
+
len_elongation_region = len(profile_list)
|
| 232 |
+
for read in all_reads:
|
| 233 |
+
readlen = read.qlen
|
| 234 |
+
if readlen not in accepted_read_lengths:
|
| 235 |
+
continue # selection of read of acceptable length
|
| 236 |
+
A_site = read.pos + offset - cds_start - 120 # addition of offset
|
| 237 |
+
if len_elongation_region > A_site > -1:
|
| 238 |
+
profile_list[A_site] += 1
|
| 239 |
+
|
| 240 |
+
sys.stdout.write(
|
| 241 |
+
"Average read density = %s\n"
|
| 242 |
+
% round(sum(profile_list) / len(profile_list), 3)
|
| 243 |
+
)
|
| 244 |
+
if not os.path.exists(args.Path):
|
| 245 |
+
os.mkdir(args.Path)
|
| 246 |
+
open_file = open(
|
| 247 |
+
"%s/observed_predicted_%s_%s_%s_%s.csv"
|
| 248 |
+
% (
|
| 249 |
+
args.Path,
|
| 250 |
+
args.identifier,
|
| 251 |
+
alignment_filename,
|
| 252 |
+
args.offset,
|
| 253 |
+
length_values,
|
| 254 |
+
),
|
| 255 |
+
"w",
|
| 256 |
+
)
|
| 257 |
+
profiles_control_codon = [
|
| 258 |
+
profile_list[codon_ind]
|
| 259 |
+
+ profile_list[codon_ind + 1]
|
| 260 |
+
+ profile_list[codon_ind + 2]
|
| 261 |
+
for codon_ind in range(0, len(profile_list), 3)
|
| 262 |
+
]
|
| 263 |
+
profile_expect_probablility_index = 0
|
| 264 |
+
open_file.write("%s\n" % transcript)
|
| 265 |
+
open_file.write("codon, predicted probability, alignments\n")
|
| 266 |
+
for coordinate_index in range(0, len(elongation_region_all[120:-60]), 3):
|
| 267 |
+
codon = elongation_region_all[
|
| 268 |
+
120 + coordinate_index : 120 + coordinate_index + 3
|
| 269 |
+
]
|
| 270 |
+
open_file.write("%s, " % (codon))
|
| 271 |
+
open_file.write(
|
| 272 |
+
"%s, "
|
| 273 |
+
% (profile_expect_probablility[profile_expect_probablility_index])
|
| 274 |
+
)
|
| 275 |
+
open_file.write(
|
| 276 |
+
"%s\n" % (profiles_control_codon[profile_expect_probablility_index])
|
| 277 |
+
)
|
| 278 |
+
profile_expect_probablility_index += 1
|
| 279 |
+
open_file.close()
|
| 280 |
+
|
| 281 |
+
try:
|
| 282 |
+
# if 1:
|
| 283 |
+
mpl.rcParams["xtick.direction"] = "out"
|
| 284 |
+
mpl.rcParams["ytick.direction"] = "out"
|
| 285 |
+
mpl.rcParams["legend.fontsize"] = 10
|
| 286 |
+
mpl.rcParams["ytick.labelsize"] = 10
|
| 287 |
+
mpl.rcParams["xtick.labelsize"] = 10
|
| 288 |
+
mpl.rcParams["font.size"] = 10
|
| 289 |
+
mpl.rcParams["axes.titlesize"] = 10
|
| 290 |
+
mpl.rcParams["legend.frameon"] = 0
|
| 291 |
+
mpl.rcParams["axes.axisbelow"] = False
|
| 292 |
+
mpl.rcParams["xtick.major.pad"] = 2.0
|
| 293 |
+
mpl.rcParams["ytick.major.pad"] = 2
|
| 294 |
+
mpl.rcParams["xtick.major.size"] = 2.0
|
| 295 |
+
mpl.rcParams["ytick.major.size"] = 2
|
| 296 |
+
mpl.rcParams["axes.linewidth"] = 0.5
|
| 297 |
+
mpl.rcParams["ytick.major.width"] = 0.25
|
| 298 |
+
mpl.rcParams["xtick.major.width"] = 0.25
|
| 299 |
+
mpl.rcParams["lines.linewidth"] = 1
|
| 300 |
+
mpl.rcParams["legend.borderpad"] = 0.01
|
| 301 |
+
mpl.rcParams["legend.labelspacing"] = 0.05
|
| 302 |
+
mpl.rcParams["legend.columnspacing"] = 0.5
|
| 303 |
+
mpl.rcParams["legend.borderaxespad"] = 0.15
|
| 304 |
+
mpl.rcParams["legend.handlelength"] = 1
|
| 305 |
+
|
| 306 |
+
fig = plt.figure(figsize=(6.69, 6.0))
|
| 307 |
+
plt.subplots_adjust(left=0.09, right=0.87)
|
| 308 |
+
ax = fig.add_subplot(111)
|
| 309 |
+
ax.plot(profiles_control_codon, color="gray", label="observed")
|
| 310 |
+
ax2 = ax.twinx()
|
| 311 |
+
ax2.plot(
|
| 312 |
+
profile_expect_probablility, "--", color="DarkMagenta", label="predicted"
|
| 313 |
+
)
|
| 314 |
+
|
| 315 |
+
try:
|
| 316 |
+
ax.text(
|
| 317 |
+
0.1,
|
| 318 |
+
1.05,
|
| 319 |
+
"r =%s"
|
| 320 |
+
% round(
|
| 321 |
+
numpy.corrcoef(profiles_control_codon, profile_expect_probablility)[
|
| 322 |
+
0, 1
|
| 323 |
+
],
|
| 324 |
+
2,
|
| 325 |
+
),
|
| 326 |
+
transform=ax.transAxes,
|
| 327 |
+
)
|
| 328 |
+
except:
|
| 329 |
+
pass
|
| 330 |
+
|
| 331 |
+
l = ax.legend(
|
| 332 |
+
bbox_to_anchor=(0, 0, 0.890, 1.05), bbox_transform=ax.transAxes, ncol=1
|
| 333 |
+
)
|
| 334 |
+
l = ax2.legend(
|
| 335 |
+
bbox_to_anchor=(0, 0, 0.890, 1.10), bbox_transform=ax2.transAxes, ncol=1
|
| 336 |
+
)
|
| 337 |
+
|
| 338 |
+
ax.set_xlabel("transcript coordinates [codon]")
|
| 339 |
+
ax.set_ylabel("# alignments")
|
| 340 |
+
ax.yaxis.set_major_locator(MaxNLocator(5))
|
| 341 |
+
tciks = ax.get_xticks()
|
| 342 |
+
cds_start_codon = cds_start / 3
|
| 343 |
+
tciks2 = [int(n) + 40 + cds_start_codon for n in tciks]
|
| 344 |
+
ax.set_xticklabels(tciks2)
|
| 345 |
+
|
| 346 |
+
ax.set_title(transcript_of_inter)
|
| 347 |
+
for tl in ax2.get_yticklabels():
|
| 348 |
+
tl.set_color("DarkMagenta")
|
| 349 |
+
ax2.yaxis.set_major_locator(MaxNLocator(5))
|
| 350 |
+
ax2.set_ylabel("probability", color="darkmagenta")
|
| 351 |
+
ax2.set_xlim(0, len(profile_expect_probablility))
|
| 352 |
+
ax.set_xlim(0, len(profile_expect_probablility))
|
| 353 |
+
|
| 354 |
+
plt.savefig(
|
| 355 |
+
"%s/observed_predicted_%s_%s_%s_%s.png"
|
| 356 |
+
% (
|
| 357 |
+
args.Path,
|
| 358 |
+
args.identifier,
|
| 359 |
+
alignment_filename,
|
| 360 |
+
args.offset,
|
| 361 |
+
length_values,
|
| 362 |
+
)
|
| 363 |
+
)
|
| 364 |
+
except:
|
| 365 |
+
sys.stdout.write("Error producing images\n")
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
if __name__ == "__main__":
|
| 369 |
+
|
| 370 |
+
parser = argparse.ArgumentParser(
|
| 371 |
+
description="Plot observed and predicted ribosome profiles"
|
| 372 |
+
)
|
| 373 |
+
parser.add_argument(
|
| 374 |
+
"-t",
|
| 375 |
+
"--transcriptome",
|
| 376 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 377 |
+
", required=True",
|
| 378 |
+
)
|
| 379 |
+
parser.add_argument(
|
| 380 |
+
"-a",
|
| 381 |
+
"--alignment",
|
| 382 |
+
help="sorted bam file of transcriptome alignments",
|
| 383 |
+
required=True,
|
| 384 |
+
)
|
| 385 |
+
parser.add_argument("-o", "--offset", help="nucleotide offset to A-site", type=int)
|
| 386 |
+
parser.add_argument(
|
| 387 |
+
"-l",
|
| 388 |
+
"--lengths",
|
| 389 |
+
help="lengths of footprints included, for example 28:32 is 28,29,30,31,32",
|
| 390 |
+
)
|
| 391 |
+
parser.add_argument(
|
| 392 |
+
"-P", "--Path", help='path to outputfile, default is "amino"', default="plot"
|
| 393 |
+
)
|
| 394 |
+
parser.add_argument(
|
| 395 |
+
"-i",
|
| 396 |
+
"--identifier",
|
| 397 |
+
help='Specific transcript to plot (Use of unique identifier is sufficient for example "NM_031946"',
|
| 398 |
+
required=True,
|
| 399 |
+
)
|
| 400 |
+
parser.add_argument(
|
| 401 |
+
"-r",
|
| 402 |
+
"--rustfile",
|
| 403 |
+
help='path to file produced from "rust_codon"',
|
| 404 |
+
required=True,
|
| 405 |
+
)
|
| 406 |
+
parser.add_argument("--version", action="version", version="%(prog)s 1.2")
|
| 407 |
+
args = parser.parse_args(None)
|
| 408 |
+
|
| 409 |
+
main(args)
|
falcon/lib/python3.10/site-packages/RUST/synergy.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/python
|
| 2 |
+
#####################################################################################
|
| 3 |
+
# rust_synergy, Identifies tripeptides that are candidates for synergistic interactions
|
| 4 |
+
# Copyright (C) 2015 Patrick O'Connor
|
| 5 |
+
|
| 6 |
+
# This program is free software: you can redistribute it and/or modify
|
| 7 |
+
# it under the terms of the GNU General Public License as published by
|
| 8 |
+
# the Free Software Foundation, either version 3 of the License, or
|
| 9 |
+
# (at your option) any later version.
|
| 10 |
+
|
| 11 |
+
# This program is distributed in the hope that it will be useful,
|
| 12 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 13 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 14 |
+
# GNU General Public License for more details.
|
| 15 |
+
|
| 16 |
+
# You should have received a copy of the GNU General Public License
|
| 17 |
+
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
| 18 |
+
#####################################################################################
|
| 19 |
+
|
| 20 |
+
import numpy as np
|
| 21 |
+
import argparse, os, sys
|
| 22 |
+
from RUST.methods import *
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def main(args):
|
| 26 |
+
amino_acids = [
|
| 27 |
+
"A",
|
| 28 |
+
"C",
|
| 29 |
+
"E",
|
| 30 |
+
"D",
|
| 31 |
+
"G",
|
| 32 |
+
"F",
|
| 33 |
+
"I",
|
| 34 |
+
"H",
|
| 35 |
+
"K",
|
| 36 |
+
"M",
|
| 37 |
+
"L",
|
| 38 |
+
"N",
|
| 39 |
+
"Q",
|
| 40 |
+
"P",
|
| 41 |
+
"S",
|
| 42 |
+
"R",
|
| 43 |
+
"T",
|
| 44 |
+
"W",
|
| 45 |
+
"V",
|
| 46 |
+
"Y",
|
| 47 |
+
]
|
| 48 |
+
|
| 49 |
+
infileopen = open(args.tri)
|
| 50 |
+
infileopen.readline()
|
| 51 |
+
list_amino = []
|
| 52 |
+
list_zscores = []
|
| 53 |
+
list_fold_change = []
|
| 54 |
+
list_loc = []
|
| 55 |
+
|
| 56 |
+
for line in infileopen:
|
| 57 |
+
linesplit = line[:-1].split(",")
|
| 58 |
+
if len(linesplit) == 1:
|
| 59 |
+
break
|
| 60 |
+
amino = linesplit[0]
|
| 61 |
+
coverage = list(map(float, linesplit[1:]))
|
| 62 |
+
coverage_a = coverage[0]
|
| 63 |
+
if coverage_a == 0:
|
| 64 |
+
continue
|
| 65 |
+
coverage_n = [n / coverage_a for n in coverage[1:]]
|
| 66 |
+
|
| 67 |
+
if len(amino) != 3 or len(set(amino) - set(amino_acids)) != 0:
|
| 68 |
+
sys.stderr.write(
|
| 69 |
+
"Tripeptide metafootprint file not in correct, check input file\n"
|
| 70 |
+
)
|
| 71 |
+
# if os.path.exists( tmp_dir ): shutil.rmtree( tmp_dir )
|
| 72 |
+
exit()
|
| 73 |
+
aminoA = amino[0]
|
| 74 |
+
aminoB = amino[1]
|
| 75 |
+
aminoC = amino[2]
|
| 76 |
+
|
| 77 |
+
infileopen2 = open(args.aa)
|
| 78 |
+
infileopen2.seek(0)
|
| 79 |
+
infileopen2.readline()
|
| 80 |
+
for line2 in infileopen2:
|
| 81 |
+
linesplit = line2[:-1].split(",")
|
| 82 |
+
if len(linesplit) == 1:
|
| 83 |
+
break
|
| 84 |
+
amino2 = linesplit[0]
|
| 85 |
+
if len(amino2) != 1 or len(set(amino2) - set(amino_acids)) != 0:
|
| 86 |
+
sys.stderr.write(
|
| 87 |
+
"Amino acid metafootprint file not correct, check input file\n"
|
| 88 |
+
)
|
| 89 |
+
# if os.path.exists( tmp_dir ): shutil.rmtree( tmp_dir )
|
| 90 |
+
exit()
|
| 91 |
+
if amino2 in amino:
|
| 92 |
+
coverage = list(map(float, linesplit[1:]))
|
| 93 |
+
coverage_a = coverage[0]
|
| 94 |
+
if coverage_a == 0:
|
| 95 |
+
continue
|
| 96 |
+
if amino2 == aminoA:
|
| 97 |
+
coverage_n1 = [n / coverage_a for n in coverage[1:]]
|
| 98 |
+
if amino2 == aminoB:
|
| 99 |
+
coverage_n2 = [n / coverage_a for n in coverage[1:]]
|
| 100 |
+
if amino2 == aminoC:
|
| 101 |
+
coverage_n3 = [n / coverage_a for n in coverage[1:]]
|
| 102 |
+
infileopen2.close()
|
| 103 |
+
|
| 104 |
+
coverage_n_e = 0
|
| 105 |
+
differences = []
|
| 106 |
+
|
| 107 |
+
# for number_i in range(11):
|
| 108 |
+
##coverage_n_e = coverage_n1[number_i]*coverage_n2[number_i+1]*coverage_n3[number_i+2]
|
| 109 |
+
# differences.append(abs(coverage_n[number_i]) - abs(coverage_n_e))
|
| 110 |
+
for number_i in range(58):
|
| 111 |
+
coverage_n_e = (
|
| 112 |
+
coverage_n1[number_i]
|
| 113 |
+
* coverage_n2[number_i + 1]
|
| 114 |
+
* coverage_n3[number_i + 2]
|
| 115 |
+
)
|
| 116 |
+
differences.append(abs(coverage_n[number_i]) - abs(coverage_n_e))
|
| 117 |
+
|
| 118 |
+
std_diff = np.std(differences)
|
| 119 |
+
|
| 120 |
+
line_count = 0
|
| 121 |
+
for number_i in range(0, len(coverage_n) - 2):
|
| 122 |
+
coverage_n_e = (
|
| 123 |
+
coverage_n1[number_i]
|
| 124 |
+
* coverage_n2[number_i + 1]
|
| 125 |
+
* coverage_n3[number_i + 2]
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
list_amino.append(amino)
|
| 129 |
+
list_zscores.append((coverage_n[number_i] - coverage_n_e) / std_diff)
|
| 130 |
+
list_loc.append(number_i)
|
| 131 |
+
if coverage_n_e == 0:
|
| 132 |
+
list_fold_change.append("not defined")
|
| 133 |
+
else:
|
| 134 |
+
list_fold_change.append(coverage_n[number_i] / coverage_n_e)
|
| 135 |
+
|
| 136 |
+
if not os.path.exists(args.Path):
|
| 137 |
+
os.mkdir(args.Path)
|
| 138 |
+
if "/" in args.aa:
|
| 139 |
+
amino_file_split = args.aa.split("/")[-1]
|
| 140 |
+
else:
|
| 141 |
+
amino_file_split = args.aao
|
| 142 |
+
if "RUST_amino_file_" in amino_file_split:
|
| 143 |
+
amino_file = amino_file_split[16:]
|
| 144 |
+
else:
|
| 145 |
+
amino_file = amino_file_split
|
| 146 |
+
|
| 147 |
+
if "/" in args.tri:
|
| 148 |
+
tripeptide_file_split = args.tri.split("/")[-1]
|
| 149 |
+
else:
|
| 150 |
+
tripeptide_file_split = args.tri
|
| 151 |
+
if "RUST_tripeptide_file_" in tripeptide_file_split:
|
| 152 |
+
tripeptide_file = tripeptide_file_split[21:]
|
| 153 |
+
else:
|
| 154 |
+
tripeptide_file = tripeptide_file_split
|
| 155 |
+
|
| 156 |
+
outfile = open("%s/synergy_%s_%s" % (args.Path, amino_file, tripeptide_file), "w")
|
| 157 |
+
outfile.write(
|
| 158 |
+
"Tripeptide, Standard score, distance of 1st residue from A-site, fold change\n"
|
| 159 |
+
)
|
| 160 |
+
zipped_list = list(zip(list_zscores, list_amino, list_loc, list_fold_change))
|
| 161 |
+
zipped_list.sort()
|
| 162 |
+
zipped_list.reverse()
|
| 163 |
+
for zscore, amino, loc, fold_change in zipped_list:
|
| 164 |
+
if abs(zscore) > 5:
|
| 165 |
+
outfile.write("%s, %s, %s, %s\n" % (amino, zscore, loc - 40, fold_change))
|
| 166 |
+
outfile.close()
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
if __name__ == "__main__":
|
| 170 |
+
parser = argparse.ArgumentParser(
|
| 171 |
+
description="Identifies tripeptides that are candidates for synergistic interactions"
|
| 172 |
+
)
|
| 173 |
+
parser.add_argument(
|
| 174 |
+
"-t",
|
| 175 |
+
"--transcriptome",
|
| 176 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 177 |
+
", required=True",
|
| 178 |
+
)
|
| 179 |
+
parser.add_argument(
|
| 180 |
+
"--aa", help='path to file produced from "rust_amino"', required=True
|
| 181 |
+
)
|
| 182 |
+
parser.add_argument(
|
| 183 |
+
"--tri", help='path to file produced from "rust_tripeptide"', required=True
|
| 184 |
+
)
|
| 185 |
+
parser.add_argument(
|
| 186 |
+
"-P",
|
| 187 |
+
"--Path",
|
| 188 |
+
help='path to outputfile, default is "synergy"',
|
| 189 |
+
default="synergy",
|
| 190 |
+
)
|
| 191 |
+
parser.add_argument("--version", action="version", version="%(prog)s 1.2")
|
| 192 |
+
args = parser.parse_args(None)
|
| 193 |
+
|
| 194 |
+
main(args)
|
falcon/lib/python3.10/site-packages/RUST/tripeptide.py
ADDED
|
@@ -0,0 +1,379 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/python
|
| 2 |
+
#####################################################################################
|
| 3 |
+
# rust_tripeptide, Produces RUST metagene profile of tripeptide motifs
|
| 4 |
+
# Copyright (C) 2015 Patrick O'Connor
|
| 5 |
+
|
| 6 |
+
# This program is free software: you can redistribute it and/or modify
|
| 7 |
+
# it under the terms of the GNU General Public License as published by
|
| 8 |
+
# the Free Software Foundation, either version 3 of the License, or
|
| 9 |
+
# (at your option) any later version.
|
| 10 |
+
|
| 11 |
+
# This program is distributed in the hope that it will be useful,
|
| 12 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 13 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 14 |
+
# GNU General Public License for more details.
|
| 15 |
+
|
| 16 |
+
# You should have received a copy of the GNU General Public License
|
| 17 |
+
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
| 18 |
+
#####################################################################################
|
| 19 |
+
|
| 20 |
+
import os, re, pysam, sys, math, argparse
|
| 21 |
+
import RUST
|
| 22 |
+
from RUST.methods import *
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
try:
|
| 26 |
+
import matplotlib as mpl
|
| 27 |
+
|
| 28 |
+
mpl.use("Agg")
|
| 29 |
+
import matplotlib.pyplot as plt
|
| 30 |
+
from pylab import MaxNLocator
|
| 31 |
+
except:
|
| 32 |
+
pass
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def RUST_metagene_plot(infileopen36, ax36):
|
| 36 |
+
infileopen36.seek(0)
|
| 37 |
+
infileopen36.readline()
|
| 38 |
+
while 1:
|
| 39 |
+
line = infileopen36.readline()
|
| 40 |
+
linesplit = line.split(",")
|
| 41 |
+
if len(linesplit) == 1:
|
| 42 |
+
break
|
| 43 |
+
nucleotide_type = linesplit[0]
|
| 44 |
+
coverage = list(map(float, linesplit[1:]))
|
| 45 |
+
coverage_a = coverage[0]
|
| 46 |
+
if coverage_a == 0:
|
| 47 |
+
continue
|
| 48 |
+
coverage_n = [n / coverage_a for n in coverage[1:]]
|
| 49 |
+
ax36.plot(coverage_n[:-2], color="gray")
|
| 50 |
+
|
| 51 |
+
ax36.set_xticks([5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55])
|
| 52 |
+
ax36.set_xticklabels([-35, -30, -25, -20, -15, -10, -5, 0, 5, 10, 15])
|
| 53 |
+
ax36.set_xlabel("distance from A-site [codon]")
|
| 54 |
+
ax36.set_ylabel("Tripeptide RUST ratio (observed/expected)")
|
| 55 |
+
ax36.axvline(40, color="red")
|
| 56 |
+
# ax36.legend()
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def main(args):
|
| 60 |
+
|
| 61 |
+
mRNA_sequences = args.transcriptome # path to fastq file of transcripts
|
| 62 |
+
in_seq_handle = open(mRNA_sequences)
|
| 63 |
+
cds_start_dict = {}
|
| 64 |
+
cds_end_dict = {}
|
| 65 |
+
seq_dict = {}
|
| 66 |
+
for line in in_seq_handle:
|
| 67 |
+
if line[0] != ">":
|
| 68 |
+
seq_dict.setdefault(transcript, "")
|
| 69 |
+
seq_dict[transcript] += line[:-1]
|
| 70 |
+
continue
|
| 71 |
+
try:
|
| 72 |
+
transcript_split = line[:-1].split("\t")
|
| 73 |
+
transcript = transcript_split[0][1:]
|
| 74 |
+
cds_start_dict[transcript] = int(transcript_split[1])
|
| 75 |
+
cds_end_dict[transcript] = int(transcript_split[2])
|
| 76 |
+
except:
|
| 77 |
+
pass
|
| 78 |
+
in_seq_handle.close()
|
| 79 |
+
|
| 80 |
+
offset = args.offset
|
| 81 |
+
readlen_range = args.lengths
|
| 82 |
+
readlen_rangesplit = readlen_range.split(":")
|
| 83 |
+
if len(readlen_rangesplit) == 1:
|
| 84 |
+
accepted_read_lengths = [int(readlen_rangesplit[0])]
|
| 85 |
+
length_values = "%s" % int(readlen_rangesplit[0])
|
| 86 |
+
elif len(readlen_rangesplit) == 2:
|
| 87 |
+
accepted_read_lengths = [
|
| 88 |
+
readlen
|
| 89 |
+
for readlen in range(
|
| 90 |
+
int(readlen_rangesplit[0]), int(readlen_rangesplit[1]) + 1
|
| 91 |
+
)
|
| 92 |
+
]
|
| 93 |
+
length_values = "%s_%s" % (
|
| 94 |
+
int(readlen_rangesplit[0]),
|
| 95 |
+
int(readlen_rangesplit[1]),
|
| 96 |
+
)
|
| 97 |
+
else:
|
| 98 |
+
stop_err(
|
| 99 |
+
"Lengths of footprints parameter not in correct format, it should be either colon seperated with the second value greater or equal to the first, (28:32) or a single interger (31)"
|
| 100 |
+
)
|
| 101 |
+
if len(accepted_read_lengths) == 0:
|
| 102 |
+
stop_err(
|
| 103 |
+
"Lengths of footprints parameter not in correct format, it should be either colon seperated with the second value greater or equal to the first, (28:32) or a single interger (31)"
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
amino_acids = [
|
| 107 |
+
"A",
|
| 108 |
+
"C",
|
| 109 |
+
"E",
|
| 110 |
+
"D",
|
| 111 |
+
"G",
|
| 112 |
+
"F",
|
| 113 |
+
"I",
|
| 114 |
+
"H",
|
| 115 |
+
"K",
|
| 116 |
+
"M",
|
| 117 |
+
"L",
|
| 118 |
+
"N",
|
| 119 |
+
"Q",
|
| 120 |
+
"P",
|
| 121 |
+
"S",
|
| 122 |
+
"R",
|
| 123 |
+
"T",
|
| 124 |
+
"W",
|
| 125 |
+
"V",
|
| 126 |
+
"Y",
|
| 127 |
+
]
|
| 128 |
+
aligments_A1 = pysam.Samfile(
|
| 129 |
+
args.alignment, "rb"
|
| 130 |
+
) # path to aligments in bam format
|
| 131 |
+
|
| 132 |
+
tripeptide_enrichment_dict = {}
|
| 133 |
+
codon_enrichment_expected_dict = {}
|
| 134 |
+
for amino_acid in amino_acids:
|
| 135 |
+
for amino_acid2 in amino_acids:
|
| 136 |
+
for amino_acid3 in amino_acids:
|
| 137 |
+
tripeptide = "%s%s%s" % (amino_acid, amino_acid2, amino_acid3)
|
| 138 |
+
tripeptide_enrichment_dict[tripeptide] = {}
|
| 139 |
+
codon_enrichment_expected_dict[tripeptide] = []
|
| 140 |
+
for number in range(0, 60, 1):
|
| 141 |
+
tripeptide_enrichment_dict[tripeptide][number] = [0.0, 0.0]
|
| 142 |
+
|
| 143 |
+
list_transcripts = seq_dict.keys()
|
| 144 |
+
number_transcripts = 0
|
| 145 |
+
list_10_percentile = []
|
| 146 |
+
for value in range(1, 10):
|
| 147 |
+
list_10_percentile.append((len(list_transcripts) * value) / 10)
|
| 148 |
+
for transcript in list_transcripts:
|
| 149 |
+
number_transcripts += 1
|
| 150 |
+
if number_transcripts in list_10_percentile:
|
| 151 |
+
sys.stdout.write(
|
| 152 |
+
"%s percent\n"
|
| 153 |
+
% ((list_10_percentile.index(number_transcripts) + 1) * 10)
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
try: # use supplied CDS annotation
|
| 157 |
+
cds_start = cds_start_dict[transcript]
|
| 158 |
+
cds_end = cds_end_dict[transcript]
|
| 159 |
+
if cds_end < cds_start:
|
| 160 |
+
raise Exception
|
| 161 |
+
except Exception: # find longest ORF
|
| 162 |
+
transcript_seq = seq_dict[transcript]
|
| 163 |
+
cds_start = -1
|
| 164 |
+
start_post = []
|
| 165 |
+
end_post = []
|
| 166 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("ATG"), transcript_seq):
|
| 167 |
+
start_post.append(match.start())
|
| 168 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TAG"), transcript_seq):
|
| 169 |
+
end_post.append(match.start())
|
| 170 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TAA"), transcript_seq):
|
| 171 |
+
end_post.append(match.start())
|
| 172 |
+
for match in re.finditer(r"(?=(%s))" % re.escape("TGA"), transcript_seq):
|
| 173 |
+
end_post.append(match.start())
|
| 174 |
+
|
| 175 |
+
end_post.sort()
|
| 176 |
+
len_max_orf = 0
|
| 177 |
+
for value in start_post:
|
| 178 |
+
for value2 in end_post:
|
| 179 |
+
if value < value2:
|
| 180 |
+
if value % 3 == value2 % 3:
|
| 181 |
+
len_orf = value2 - value
|
| 182 |
+
if len_orf > len_max_orf:
|
| 183 |
+
cds_start = value
|
| 184 |
+
cds_end = value2 + 3
|
| 185 |
+
len_max_orf = len_orf
|
| 186 |
+
break
|
| 187 |
+
if cds_start == -1:
|
| 188 |
+
# sys.stdout.write( '%s, AUG codon not found\n'%transcript )
|
| 189 |
+
continue
|
| 190 |
+
|
| 191 |
+
elongation_region_all = seq_dict[transcript][cds_start:cds_end]
|
| 192 |
+
elongation_region_part = elongation_region_all[
|
| 193 |
+
120:-60
|
| 194 |
+
] # first 120 and last 60 nt are not used
|
| 195 |
+
if len(elongation_region_part) % 3 != 0:
|
| 196 |
+
# sys.stdout.write( '%s, CDS not divisible by 3\n'%transcript )
|
| 197 |
+
continue
|
| 198 |
+
peptide_sequence = translate_dna(elongation_region_all)
|
| 199 |
+
|
| 200 |
+
profile_list = [
|
| 201 |
+
0.0 for n in range(cds_start + 120, cds_end - 60)
|
| 202 |
+
] # records ribo-seq profile
|
| 203 |
+
if len(profile_list) < 50:
|
| 204 |
+
# sys.stdout.write( '%s, ORF too short\n'%transcript )
|
| 205 |
+
continue
|
| 206 |
+
all_reads = aligments_A1.fetch(transcript)
|
| 207 |
+
|
| 208 |
+
len_elongation_region = len(profile_list)
|
| 209 |
+
for read in all_reads:
|
| 210 |
+
readlen = read.qlen
|
| 211 |
+
if readlen not in accepted_read_lengths:
|
| 212 |
+
continue # selection of read of acceptable length
|
| 213 |
+
A_site = read.pos + offset - cds_start - 120 # addition of offset
|
| 214 |
+
if len_elongation_region > A_site > -1:
|
| 215 |
+
profile_list[A_site] += 1
|
| 216 |
+
average_gene_density = float(sum(profile_list)) / len(
|
| 217 |
+
profile_list
|
| 218 |
+
) # average gene density calculated
|
| 219 |
+
|
| 220 |
+
if average_gene_density != 0:
|
| 221 |
+
num_codon = len(
|
| 222 |
+
[
|
| 223 |
+
1
|
| 224 |
+
for number88 in range(0, len(profile_list), 3)
|
| 225 |
+
if (
|
| 226 |
+
(
|
| 227 |
+
profile_list[number88]
|
| 228 |
+
+ profile_list[number88 + 1]
|
| 229 |
+
+ profile_list[number88 + 2]
|
| 230 |
+
)
|
| 231 |
+
/ 3
|
| 232 |
+
)
|
| 233 |
+
> average_gene_density
|
| 234 |
+
]
|
| 235 |
+
)
|
| 236 |
+
# number of codons that exceed average gene density
|
| 237 |
+
expected_codon_density = float(num_codon) / (
|
| 238 |
+
len(profile_list) / 3
|
| 239 |
+
) # expected enrichment value
|
| 240 |
+
|
| 241 |
+
peptide_start = 0
|
| 242 |
+
for sliding_w_n in range(
|
| 243 |
+
0, len(elongation_region_part), 3
|
| 244 |
+
): # sliding window using increments of 3 nts
|
| 245 |
+
amino_window = str(peptide_sequence[peptide_start : peptide_start + 60])
|
| 246 |
+
if len(set(amino_window) - set(amino_acids)) != 0:
|
| 247 |
+
peptide_start += 1
|
| 248 |
+
continue
|
| 249 |
+
|
| 250 |
+
if (
|
| 251 |
+
profile_list[sliding_w_n]
|
| 252 |
+
+ profile_list[sliding_w_n + 1]
|
| 253 |
+
+ profile_list[sliding_w_n + 2]
|
| 254 |
+
) / 3 > average_gene_density:
|
| 255 |
+
for number in range(0, 58):
|
| 256 |
+
amino_acid_3 = amino_window[number : number + 3]
|
| 257 |
+
tripeptide_enrichment_dict[amino_acid_3][number][0] += 1
|
| 258 |
+
tripeptide_enrichment_dict[amino_acid_3][number][1] += 1
|
| 259 |
+
else:
|
| 260 |
+
for number in range(0, 58):
|
| 261 |
+
amino_acid_3 = amino_window[number : number + 3]
|
| 262 |
+
tripeptide_enrichment_dict[amino_acid_3][number][0] += 1
|
| 263 |
+
|
| 264 |
+
amino_acid_3 = amino_window[40:43]
|
| 265 |
+
codon_enrichment_expected_dict[amino_acid_3].append(
|
| 266 |
+
expected_codon_density
|
| 267 |
+
)
|
| 268 |
+
peptide_start += 1
|
| 269 |
+
|
| 270 |
+
if not os.path.exists(args.Path):
|
| 271 |
+
os.mkdir(args.Path)
|
| 272 |
+
|
| 273 |
+
alignment_filename = args.alignment.split("/")[-1]
|
| 274 |
+
outfile = open(
|
| 275 |
+
"%s/RUST_tripeptide_file_%s_%s_%s"
|
| 276 |
+
% (args.Path, alignment_filename, args.offset, length_values),
|
| 277 |
+
"w",
|
| 278 |
+
)
|
| 279 |
+
outfile.write("tripeptide, expected value")
|
| 280 |
+
for number106 in range(-40, 20):
|
| 281 |
+
outfile.write(", %s" % number106)
|
| 282 |
+
outfile.write("\n")
|
| 283 |
+
|
| 284 |
+
list_codons = []
|
| 285 |
+
list_amino_acids = list(tripeptide_enrichment_dict.keys())
|
| 286 |
+
list_amino_acids.sort()
|
| 287 |
+
for amino2 in list_amino_acids:
|
| 288 |
+
if amino2 in list_codons:
|
| 289 |
+
continue
|
| 290 |
+
list_codons.append(amino2)
|
| 291 |
+
outfile.write("%s" % amino2)
|
| 292 |
+
if codon_enrichment_expected_dict[amino2] != []:
|
| 293 |
+
outfile.write(", %s" % mean_value(codon_enrichment_expected_dict[amino2]))
|
| 294 |
+
|
| 295 |
+
for number in range(0, 60):
|
| 296 |
+
if tripeptide_enrichment_dict[amino2][number][0] != 0:
|
| 297 |
+
outfile.write(
|
| 298 |
+
", %s"
|
| 299 |
+
% (
|
| 300 |
+
tripeptide_enrichment_dict[amino2][number][1]
|
| 301 |
+
/ tripeptide_enrichment_dict[amino2][number][0]
|
| 302 |
+
)
|
| 303 |
+
)
|
| 304 |
+
else:
|
| 305 |
+
outfile.write(", 0")
|
| 306 |
+
outfile.write("\n")
|
| 307 |
+
outfile.close()
|
| 308 |
+
|
| 309 |
+
try:
|
| 310 |
+
mpl.rcParams["xtick.direction"] = "out"
|
| 311 |
+
mpl.rcParams["ytick.direction"] = "out"
|
| 312 |
+
mpl.rcParams["legend.fontsize"] = 10
|
| 313 |
+
mpl.rcParams["ytick.labelsize"] = 10
|
| 314 |
+
mpl.rcParams["xtick.labelsize"] = 10
|
| 315 |
+
mpl.rcParams["font.size"] = 10
|
| 316 |
+
mpl.rcParams["axes.titlesize"] = 10
|
| 317 |
+
mpl.rcParams["legend.frameon"] = 0
|
| 318 |
+
mpl.rcParams["axes.axisbelow"] = False
|
| 319 |
+
mpl.rcParams["xtick.major.pad"] = 2.0
|
| 320 |
+
mpl.rcParams["ytick.major.pad"] = 2
|
| 321 |
+
mpl.rcParams["xtick.major.size"] = 2.0
|
| 322 |
+
mpl.rcParams["ytick.major.size"] = 2
|
| 323 |
+
mpl.rcParams["axes.linewidth"] = 0.5
|
| 324 |
+
mpl.rcParams["ytick.major.width"] = 0.25
|
| 325 |
+
mpl.rcParams["xtick.major.width"] = 0.25
|
| 326 |
+
mpl.rcParams["lines.linewidth"] = 1
|
| 327 |
+
mpl.rcParams["legend.borderpad"] = 0.01
|
| 328 |
+
mpl.rcParams["legend.labelspacing"] = 0.05
|
| 329 |
+
mpl.rcParams["legend.columnspacing"] = 0.5
|
| 330 |
+
mpl.rcParams["legend.borderaxespad"] = 0.15
|
| 331 |
+
mpl.rcParams["legend.handlelength"] = 1
|
| 332 |
+
|
| 333 |
+
fig = plt.figure(figsize=(6.69, 6.0))
|
| 334 |
+
infileopen = open(
|
| 335 |
+
"%s/RUST_tripeptide_file_%s_%s_%s"
|
| 336 |
+
% (args.Path, alignment_filename, args.offset, length_values)
|
| 337 |
+
)
|
| 338 |
+
ax1_metafootprint = fig.add_subplot(111)
|
| 339 |
+
RUST_metagene_plot(infileopen, ax1_metafootprint)
|
| 340 |
+
plt.savefig(
|
| 341 |
+
"%s/RUST_tripeptide_metafootprint_%s_%s_%s.png"
|
| 342 |
+
% (args.Path, alignment_filename, args.offset, length_values)
|
| 343 |
+
)
|
| 344 |
+
|
| 345 |
+
except:
|
| 346 |
+
sys.stdout.write("Error producing images\n")
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
if __name__ == "__main__":
|
| 350 |
+
parser = argparse.ArgumentParser(
|
| 351 |
+
description="Produces RUST metagene profile of tripeptide motifs"
|
| 352 |
+
)
|
| 353 |
+
parser.add_argument(
|
| 354 |
+
"-t",
|
| 355 |
+
"--transcriptome",
|
| 356 |
+
help="fasta file of transcripts, CDS start and end may be provided on description line using tab separation e.g. >NM_0001 10 5000, otherwise it searches for longest ORF"
|
| 357 |
+
", required=True",
|
| 358 |
+
)
|
| 359 |
+
parser.add_argument(
|
| 360 |
+
"-a",
|
| 361 |
+
"--alignment",
|
| 362 |
+
help="sorted bam file of transcriptome alignments",
|
| 363 |
+
required=True,
|
| 364 |
+
)
|
| 365 |
+
parser.add_argument("-o", "--offset", help="nucleotide offset to A-site", type=int)
|
| 366 |
+
parser.add_argument(
|
| 367 |
+
"-l",
|
| 368 |
+
"--lengths",
|
| 369 |
+
help="lengths of footprints included, for example 28:32 is 28,29,30,31,32",
|
| 370 |
+
)
|
| 371 |
+
parser.add_argument(
|
| 372 |
+
"-P",
|
| 373 |
+
"--Path",
|
| 374 |
+
help='path to outputfile, default is "tripeptide"',
|
| 375 |
+
default="tripeptide",
|
| 376 |
+
)
|
| 377 |
+
parser.add_argument("--version", action="version", version="%(prog)s 1.2")
|
| 378 |
+
args = parser.parse_args(None)
|
| 379 |
+
main(args)
|
falcon/lib/python3.10/site-packages/setuptools/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (11.1 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/__pycache__/_itertools.cpython-310.pyc
ADDED
|
Binary file (862 Bytes). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/__pycache__/errors.cpython-310.pyc
ADDED
|
Binary file (2.84 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/__pycache__/namespaces.cpython-310.pyc
ADDED
|
Binary file (3.72 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__init__.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# mypy: disable_error_code=call-overload
|
| 2 |
+
# pyright: reportCallIssue=false, reportArgumentType=false
|
| 3 |
+
# Can't disable on the exact line because distutils doesn't exists on Python 3.12
|
| 4 |
+
# and type-checkers aren't aware of distutils_hack,
|
| 5 |
+
# causing distutils.command.bdist.bdist.format_commands to be Any.
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
from distutils.command.bdist import bdist
|
| 10 |
+
|
| 11 |
+
if 'egg' not in bdist.format_commands:
|
| 12 |
+
try:
|
| 13 |
+
# format_commands is a dict in vendored distutils
|
| 14 |
+
# It used to be a list in older (stdlib) distutils
|
| 15 |
+
# We support both for backwards compatibility
|
| 16 |
+
bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file")
|
| 17 |
+
except TypeError:
|
| 18 |
+
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
|
| 19 |
+
bdist.format_commands.append('egg')
|
| 20 |
+
|
| 21 |
+
del bdist, sys
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (410 Bytes). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/_requirestxt.cpython-310.pyc
ADDED
|
Binary file (4.66 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-310.pyc
ADDED
|
Binary file (13.7 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-310.pyc
ADDED
|
Binary file (1.78 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/build.cpython-310.pyc
ADDED
|
Binary file (5.28 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/build_ext.cpython-310.pyc
ADDED
|
Binary file (14 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/build_py.cpython-310.pyc
ADDED
|
Binary file (15 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/develop.cpython-310.pyc
ADDED
|
Binary file (6.1 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/dist_info.cpython-310.pyc
ADDED
|
Binary file (3.26 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/easy_install.cpython-310.pyc
ADDED
|
Binary file (65.2 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/editable_wheel.cpython-310.pyc
ADDED
|
Binary file (35.8 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-310.pyc
ADDED
|
Binary file (2.37 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/saveopts.cpython-310.pyc
ADDED
|
Binary file (911 Bytes). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/sdist.cpython-310.pyc
ADDED
|
Binary file (7.98 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/setopt.cpython-310.pyc
ADDED
|
Binary file (4.75 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/__pycache__/test.cpython-310.pyc
ADDED
|
Binary file (1.71 kB). View file
|
|
|
falcon/lib/python3.10/site-packages/setuptools/command/_requirestxt.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper code used to generate ``requires.txt`` files in the egg-info directory.
|
| 2 |
+
|
| 3 |
+
The ``requires.txt`` file has an specific format:
|
| 4 |
+
- Environment markers need to be part of the section headers and
|
| 5 |
+
should not be part of the requirement spec itself.
|
| 6 |
+
|
| 7 |
+
See https://setuptools.pypa.io/en/latest/deprecated/python_eggs.html#requires-txt
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from __future__ import annotations
|
| 11 |
+
|
| 12 |
+
import io
|
| 13 |
+
from collections import defaultdict
|
| 14 |
+
from collections.abc import Mapping
|
| 15 |
+
from itertools import filterfalse
|
| 16 |
+
from typing import TypeVar
|
| 17 |
+
|
| 18 |
+
from jaraco.text import yield_lines
|
| 19 |
+
from packaging.requirements import Requirement
|
| 20 |
+
|
| 21 |
+
from .. import _reqs
|
| 22 |
+
from .._reqs import _StrOrIter
|
| 23 |
+
|
| 24 |
+
# dict can work as an ordered set
|
| 25 |
+
_T = TypeVar("_T")
|
| 26 |
+
_Ordered = dict[_T, None]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _prepare(
|
| 30 |
+
install_requires: _StrOrIter, extras_require: Mapping[str, _StrOrIter]
|
| 31 |
+
) -> tuple[list[str], dict[str, list[str]]]:
|
| 32 |
+
"""Given values for ``install_requires`` and ``extras_require``
|
| 33 |
+
create modified versions in a way that can be written in ``requires.txt``
|
| 34 |
+
"""
|
| 35 |
+
extras = _convert_extras_requirements(extras_require)
|
| 36 |
+
return _move_install_requirements_markers(install_requires, extras)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _convert_extras_requirements(
|
| 40 |
+
extras_require: Mapping[str, _StrOrIter],
|
| 41 |
+
) -> defaultdict[str, _Ordered[Requirement]]:
|
| 42 |
+
"""
|
| 43 |
+
Convert requirements in `extras_require` of the form
|
| 44 |
+
`"extra": ["barbazquux; {marker}"]` to
|
| 45 |
+
`"extra:{marker}": ["barbazquux"]`.
|
| 46 |
+
"""
|
| 47 |
+
output = defaultdict[str, _Ordered[Requirement]](dict)
|
| 48 |
+
for section, v in extras_require.items():
|
| 49 |
+
# Do not strip empty sections.
|
| 50 |
+
output[section]
|
| 51 |
+
for r in _reqs.parse(v):
|
| 52 |
+
output[section + _suffix_for(r)].setdefault(r)
|
| 53 |
+
|
| 54 |
+
return output
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def _move_install_requirements_markers(
|
| 58 |
+
install_requires: _StrOrIter, extras_require: Mapping[str, _Ordered[Requirement]]
|
| 59 |
+
) -> tuple[list[str], dict[str, list[str]]]:
|
| 60 |
+
"""
|
| 61 |
+
The ``requires.txt`` file has an specific format:
|
| 62 |
+
- Environment markers need to be part of the section headers and
|
| 63 |
+
should not be part of the requirement spec itself.
|
| 64 |
+
|
| 65 |
+
Move requirements in ``install_requires`` that are using environment
|
| 66 |
+
markers ``extras_require``.
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
# divide the install_requires into two sets, simple ones still
|
| 70 |
+
# handled by install_requires and more complex ones handled by extras_require.
|
| 71 |
+
|
| 72 |
+
inst_reqs = list(_reqs.parse(install_requires))
|
| 73 |
+
simple_reqs = filter(_no_marker, inst_reqs)
|
| 74 |
+
complex_reqs = filterfalse(_no_marker, inst_reqs)
|
| 75 |
+
simple_install_requires = list(map(str, simple_reqs))
|
| 76 |
+
|
| 77 |
+
for r in complex_reqs:
|
| 78 |
+
extras_require[':' + str(r.marker)].setdefault(r)
|
| 79 |
+
|
| 80 |
+
expanded_extras = dict(
|
| 81 |
+
# list(dict.fromkeys(...)) ensures a list of unique strings
|
| 82 |
+
(k, list(dict.fromkeys(str(r) for r in map(_clean_req, v))))
|
| 83 |
+
for k, v in extras_require.items()
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
return simple_install_requires, expanded_extras
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def _suffix_for(req):
|
| 90 |
+
"""Return the 'extras_require' suffix for a given requirement."""
|
| 91 |
+
return ':' + str(req.marker) if req.marker else ''
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def _clean_req(req):
|
| 95 |
+
"""Given a Requirement, remove environment markers and return it"""
|
| 96 |
+
r = Requirement(str(req)) # create a copy before modifying
|
| 97 |
+
r.marker = None
|
| 98 |
+
return r
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def _no_marker(req):
|
| 102 |
+
return not req.marker
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def _write_requirements(stream, reqs):
|
| 106 |
+
lines = yield_lines(reqs or ())
|
| 107 |
+
|
| 108 |
+
def append_cr(line):
|
| 109 |
+
return line + '\n'
|
| 110 |
+
|
| 111 |
+
lines = map(append_cr, lines)
|
| 112 |
+
stream.writelines(lines)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def write_requirements(cmd, basename, filename):
|
| 116 |
+
dist = cmd.distribution
|
| 117 |
+
data = io.StringIO()
|
| 118 |
+
install_requires, extras_require = _prepare(
|
| 119 |
+
dist.install_requires or (), dist.extras_require or {}
|
| 120 |
+
)
|
| 121 |
+
_write_requirements(data, install_requires)
|
| 122 |
+
for extra in sorted(extras_require):
|
| 123 |
+
data.write('\n[{extra}]\n'.format(**vars()))
|
| 124 |
+
_write_requirements(data, extras_require[extra])
|
| 125 |
+
cmd.write_or_delete_file("requirements", filename, data.getvalue())
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def write_setup_requirements(cmd, basename, filename):
|
| 129 |
+
data = io.StringIO()
|
| 130 |
+
_write_requirements(data, cmd.distribution.setup_requires)
|
| 131 |
+
cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
|
falcon/lib/python3.10/site-packages/setuptools/command/alias.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from setuptools.command.setopt import config_file, edit_config, option_base
|
| 2 |
+
|
| 3 |
+
from distutils.errors import DistutilsOptionError
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def shquote(arg):
|
| 7 |
+
"""Quote an argument for later parsing by shlex.split()"""
|
| 8 |
+
for c in '"', "'", "\\", "#":
|
| 9 |
+
if c in arg:
|
| 10 |
+
return repr(arg)
|
| 11 |
+
if arg.split() != [arg]:
|
| 12 |
+
return repr(arg)
|
| 13 |
+
return arg
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class alias(option_base):
|
| 17 |
+
"""Define a shortcut that invokes one or more commands"""
|
| 18 |
+
|
| 19 |
+
description = "define a shortcut to invoke one or more commands"
|
| 20 |
+
command_consumes_arguments = True
|
| 21 |
+
|
| 22 |
+
user_options = [
|
| 23 |
+
('remove', 'r', 'remove (unset) the alias'),
|
| 24 |
+
] + option_base.user_options
|
| 25 |
+
|
| 26 |
+
boolean_options = option_base.boolean_options + ['remove']
|
| 27 |
+
|
| 28 |
+
def initialize_options(self):
|
| 29 |
+
option_base.initialize_options(self)
|
| 30 |
+
self.args = None
|
| 31 |
+
self.remove = None
|
| 32 |
+
|
| 33 |
+
def finalize_options(self) -> None:
|
| 34 |
+
option_base.finalize_options(self)
|
| 35 |
+
if self.remove and len(self.args) != 1:
|
| 36 |
+
raise DistutilsOptionError(
|
| 37 |
+
"Must specify exactly one argument (the alias name) when using --remove"
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
def run(self) -> None:
|
| 41 |
+
aliases = self.distribution.get_option_dict('aliases')
|
| 42 |
+
|
| 43 |
+
if not self.args:
|
| 44 |
+
print("Command Aliases")
|
| 45 |
+
print("---------------")
|
| 46 |
+
for alias in aliases:
|
| 47 |
+
print("setup.py alias", format_alias(alias, aliases))
|
| 48 |
+
return
|
| 49 |
+
|
| 50 |
+
elif len(self.args) == 1:
|
| 51 |
+
(alias,) = self.args
|
| 52 |
+
if self.remove:
|
| 53 |
+
command = None
|
| 54 |
+
elif alias in aliases:
|
| 55 |
+
print("setup.py alias", format_alias(alias, aliases))
|
| 56 |
+
return
|
| 57 |
+
else:
|
| 58 |
+
print(f"No alias definition found for {alias!r}")
|
| 59 |
+
return
|
| 60 |
+
else:
|
| 61 |
+
alias = self.args[0]
|
| 62 |
+
command = ' '.join(map(shquote, self.args[1:]))
|
| 63 |
+
|
| 64 |
+
edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def format_alias(name, aliases):
|
| 68 |
+
source, command = aliases[name]
|
| 69 |
+
if source == config_file('global'):
|
| 70 |
+
source = '--global-config '
|
| 71 |
+
elif source == config_file('user'):
|
| 72 |
+
source = '--user-config '
|
| 73 |
+
elif source == config_file('local'):
|
| 74 |
+
source = ''
|
| 75 |
+
else:
|
| 76 |
+
source = f'--filename={source!r}'
|
| 77 |
+
return source + name + ' ' + command
|
falcon/lib/python3.10/site-packages/setuptools/command/bdist_egg.py
ADDED
|
@@ -0,0 +1,479 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""setuptools.command.bdist_egg
|
| 2 |
+
|
| 3 |
+
Build .egg distributions"""
|
| 4 |
+
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
import marshal
|
| 8 |
+
import os
|
| 9 |
+
import re
|
| 10 |
+
import sys
|
| 11 |
+
import textwrap
|
| 12 |
+
from sysconfig import get_path, get_python_version
|
| 13 |
+
from types import CodeType
|
| 14 |
+
from typing import TYPE_CHECKING, Literal
|
| 15 |
+
|
| 16 |
+
from setuptools import Command
|
| 17 |
+
from setuptools.extension import Library
|
| 18 |
+
|
| 19 |
+
from .._path import StrPathT, ensure_directory
|
| 20 |
+
|
| 21 |
+
from distutils import log
|
| 22 |
+
from distutils.dir_util import mkpath, remove_tree
|
| 23 |
+
|
| 24 |
+
if TYPE_CHECKING:
|
| 25 |
+
from typing_extensions import TypeAlias
|
| 26 |
+
|
| 27 |
+
# Same as zipfile._ZipFileMode from typeshed
|
| 28 |
+
_ZipFileMode: TypeAlias = Literal["r", "w", "x", "a"]
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def _get_purelib():
|
| 32 |
+
return get_path("purelib")
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def strip_module(filename):
|
| 36 |
+
if '.' in filename:
|
| 37 |
+
filename = os.path.splitext(filename)[0]
|
| 38 |
+
if filename.endswith('module'):
|
| 39 |
+
filename = filename[:-6]
|
| 40 |
+
return filename
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def sorted_walk(dir):
|
| 44 |
+
"""Do os.walk in a reproducible way,
|
| 45 |
+
independent of indeterministic filesystem readdir order
|
| 46 |
+
"""
|
| 47 |
+
for base, dirs, files in os.walk(dir):
|
| 48 |
+
dirs.sort()
|
| 49 |
+
files.sort()
|
| 50 |
+
yield base, dirs, files
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def write_stub(resource, pyfile) -> None:
|
| 54 |
+
_stub_template = textwrap.dedent(
|
| 55 |
+
"""
|
| 56 |
+
def __bootstrap__():
|
| 57 |
+
global __bootstrap__, __loader__, __file__
|
| 58 |
+
import sys, pkg_resources, importlib.util
|
| 59 |
+
__file__ = pkg_resources.resource_filename(__name__, %r)
|
| 60 |
+
__loader__ = None; del __bootstrap__, __loader__
|
| 61 |
+
spec = importlib.util.spec_from_file_location(__name__,__file__)
|
| 62 |
+
mod = importlib.util.module_from_spec(spec)
|
| 63 |
+
spec.loader.exec_module(mod)
|
| 64 |
+
__bootstrap__()
|
| 65 |
+
"""
|
| 66 |
+
).lstrip()
|
| 67 |
+
with open(pyfile, 'w', encoding="utf-8") as f:
|
| 68 |
+
f.write(_stub_template % resource)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class bdist_egg(Command):
|
| 72 |
+
description = 'create an "egg" distribution'
|
| 73 |
+
|
| 74 |
+
user_options = [
|
| 75 |
+
('bdist-dir=', 'b', "temporary directory for creating the distribution"),
|
| 76 |
+
(
|
| 77 |
+
'plat-name=',
|
| 78 |
+
'p',
|
| 79 |
+
"platform name to embed in generated filenames "
|
| 80 |
+
"(by default uses `pkg_resources.get_build_platform()`)",
|
| 81 |
+
),
|
| 82 |
+
('exclude-source-files', None, "remove all .py files from the generated egg"),
|
| 83 |
+
(
|
| 84 |
+
'keep-temp',
|
| 85 |
+
'k',
|
| 86 |
+
"keep the pseudo-installation tree around after "
|
| 87 |
+
"creating the distribution archive",
|
| 88 |
+
),
|
| 89 |
+
('dist-dir=', 'd', "directory to put final built distributions in"),
|
| 90 |
+
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
| 91 |
+
]
|
| 92 |
+
|
| 93 |
+
boolean_options = ['keep-temp', 'skip-build', 'exclude-source-files']
|
| 94 |
+
|
| 95 |
+
def initialize_options(self):
|
| 96 |
+
self.bdist_dir = None
|
| 97 |
+
self.plat_name = None
|
| 98 |
+
self.keep_temp = False
|
| 99 |
+
self.dist_dir = None
|
| 100 |
+
self.skip_build = False
|
| 101 |
+
self.egg_output = None
|
| 102 |
+
self.exclude_source_files = None
|
| 103 |
+
|
| 104 |
+
def finalize_options(self) -> None:
|
| 105 |
+
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
|
| 106 |
+
self.egg_info = ei_cmd.egg_info
|
| 107 |
+
|
| 108 |
+
if self.bdist_dir is None:
|
| 109 |
+
bdist_base = self.get_finalized_command('bdist').bdist_base
|
| 110 |
+
self.bdist_dir = os.path.join(bdist_base, 'egg')
|
| 111 |
+
|
| 112 |
+
if self.plat_name is None:
|
| 113 |
+
from pkg_resources import get_build_platform
|
| 114 |
+
|
| 115 |
+
self.plat_name = get_build_platform()
|
| 116 |
+
|
| 117 |
+
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
| 118 |
+
|
| 119 |
+
if self.egg_output is None:
|
| 120 |
+
# Compute filename of the output egg
|
| 121 |
+
basename = ei_cmd._get_egg_basename(
|
| 122 |
+
py_version=get_python_version(),
|
| 123 |
+
platform=self.distribution.has_ext_modules() and self.plat_name,
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
|
| 127 |
+
|
| 128 |
+
def do_install_data(self) -> None:
|
| 129 |
+
# Hack for packages that install data to install's --install-lib
|
| 130 |
+
self.get_finalized_command('install').install_lib = self.bdist_dir
|
| 131 |
+
|
| 132 |
+
site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
|
| 133 |
+
old, self.distribution.data_files = self.distribution.data_files, []
|
| 134 |
+
|
| 135 |
+
for item in old:
|
| 136 |
+
if isinstance(item, tuple) and len(item) == 2:
|
| 137 |
+
if os.path.isabs(item[0]):
|
| 138 |
+
realpath = os.path.realpath(item[0])
|
| 139 |
+
normalized = os.path.normcase(realpath)
|
| 140 |
+
if normalized == site_packages or normalized.startswith(
|
| 141 |
+
site_packages + os.sep
|
| 142 |
+
):
|
| 143 |
+
item = realpath[len(site_packages) + 1 :], item[1]
|
| 144 |
+
# XXX else: raise ???
|
| 145 |
+
self.distribution.data_files.append(item)
|
| 146 |
+
|
| 147 |
+
try:
|
| 148 |
+
log.info("installing package data to %s", self.bdist_dir)
|
| 149 |
+
self.call_command('install_data', force=False, root=None)
|
| 150 |
+
finally:
|
| 151 |
+
self.distribution.data_files = old
|
| 152 |
+
|
| 153 |
+
def get_outputs(self):
|
| 154 |
+
return [self.egg_output]
|
| 155 |
+
|
| 156 |
+
def call_command(self, cmdname, **kw):
|
| 157 |
+
"""Invoke reinitialized command `cmdname` with keyword args"""
|
| 158 |
+
for dirname in INSTALL_DIRECTORY_ATTRS:
|
| 159 |
+
kw.setdefault(dirname, self.bdist_dir)
|
| 160 |
+
kw.setdefault('skip_build', self.skip_build)
|
| 161 |
+
kw.setdefault('dry_run', self.dry_run)
|
| 162 |
+
cmd = self.reinitialize_command(cmdname, **kw)
|
| 163 |
+
self.run_command(cmdname)
|
| 164 |
+
return cmd
|
| 165 |
+
|
| 166 |
+
def run(self): # noqa: C901 # is too complex (14) # FIXME
|
| 167 |
+
# Generate metadata first
|
| 168 |
+
self.run_command("egg_info")
|
| 169 |
+
# We run install_lib before install_data, because some data hacks
|
| 170 |
+
# pull their data path from the install_lib command.
|
| 171 |
+
log.info("installing library code to %s", self.bdist_dir)
|
| 172 |
+
instcmd = self.get_finalized_command('install')
|
| 173 |
+
old_root = instcmd.root
|
| 174 |
+
instcmd.root = None
|
| 175 |
+
if self.distribution.has_c_libraries() and not self.skip_build:
|
| 176 |
+
self.run_command('build_clib')
|
| 177 |
+
cmd = self.call_command('install_lib', warn_dir=False)
|
| 178 |
+
instcmd.root = old_root
|
| 179 |
+
|
| 180 |
+
all_outputs, ext_outputs = self.get_ext_outputs()
|
| 181 |
+
self.stubs = []
|
| 182 |
+
to_compile = []
|
| 183 |
+
for p, ext_name in enumerate(ext_outputs):
|
| 184 |
+
filename, _ext = os.path.splitext(ext_name)
|
| 185 |
+
pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py')
|
| 186 |
+
self.stubs.append(pyfile)
|
| 187 |
+
log.info("creating stub loader for %s", ext_name)
|
| 188 |
+
if not self.dry_run:
|
| 189 |
+
write_stub(os.path.basename(ext_name), pyfile)
|
| 190 |
+
to_compile.append(pyfile)
|
| 191 |
+
ext_outputs[p] = ext_name.replace(os.sep, '/')
|
| 192 |
+
|
| 193 |
+
if to_compile:
|
| 194 |
+
cmd.byte_compile(to_compile)
|
| 195 |
+
if self.distribution.data_files:
|
| 196 |
+
self.do_install_data()
|
| 197 |
+
|
| 198 |
+
# Make the EGG-INFO directory
|
| 199 |
+
archive_root = self.bdist_dir
|
| 200 |
+
egg_info = os.path.join(archive_root, 'EGG-INFO')
|
| 201 |
+
self.mkpath(egg_info)
|
| 202 |
+
if self.distribution.scripts:
|
| 203 |
+
script_dir = os.path.join(egg_info, 'scripts')
|
| 204 |
+
log.info("installing scripts to %s", script_dir)
|
| 205 |
+
self.call_command('install_scripts', install_dir=script_dir, no_ep=True)
|
| 206 |
+
|
| 207 |
+
self.copy_metadata_to(egg_info)
|
| 208 |
+
native_libs = os.path.join(egg_info, "native_libs.txt")
|
| 209 |
+
if all_outputs:
|
| 210 |
+
log.info("writing %s", native_libs)
|
| 211 |
+
if not self.dry_run:
|
| 212 |
+
ensure_directory(native_libs)
|
| 213 |
+
with open(native_libs, 'wt', encoding="utf-8") as libs_file:
|
| 214 |
+
libs_file.write('\n'.join(all_outputs))
|
| 215 |
+
libs_file.write('\n')
|
| 216 |
+
elif os.path.isfile(native_libs):
|
| 217 |
+
log.info("removing %s", native_libs)
|
| 218 |
+
if not self.dry_run:
|
| 219 |
+
os.unlink(native_libs)
|
| 220 |
+
|
| 221 |
+
write_safety_flag(os.path.join(archive_root, 'EGG-INFO'), self.zip_safe())
|
| 222 |
+
|
| 223 |
+
if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
|
| 224 |
+
log.warn(
|
| 225 |
+
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
|
| 226 |
+
"Use the install_requires/extras_require setup() args instead."
|
| 227 |
+
)
|
| 228 |
+
|
| 229 |
+
if self.exclude_source_files:
|
| 230 |
+
self.zap_pyfiles()
|
| 231 |
+
|
| 232 |
+
# Make the archive
|
| 233 |
+
make_zipfile(
|
| 234 |
+
self.egg_output,
|
| 235 |
+
archive_root,
|
| 236 |
+
verbose=self.verbose,
|
| 237 |
+
dry_run=self.dry_run,
|
| 238 |
+
mode=self.gen_header(),
|
| 239 |
+
)
|
| 240 |
+
if not self.keep_temp:
|
| 241 |
+
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
| 242 |
+
|
| 243 |
+
# Add to 'Distribution.dist_files' so that the "upload" command works
|
| 244 |
+
getattr(self.distribution, 'dist_files', []).append((
|
| 245 |
+
'bdist_egg',
|
| 246 |
+
get_python_version(),
|
| 247 |
+
self.egg_output,
|
| 248 |
+
))
|
| 249 |
+
|
| 250 |
+
def zap_pyfiles(self):
|
| 251 |
+
log.info("Removing .py files from temporary directory")
|
| 252 |
+
for base, dirs, files in walk_egg(self.bdist_dir):
|
| 253 |
+
for name in files:
|
| 254 |
+
path = os.path.join(base, name)
|
| 255 |
+
|
| 256 |
+
if name.endswith('.py'):
|
| 257 |
+
log.debug("Deleting %s", path)
|
| 258 |
+
os.unlink(path)
|
| 259 |
+
|
| 260 |
+
if base.endswith('__pycache__'):
|
| 261 |
+
path_old = path
|
| 262 |
+
|
| 263 |
+
pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'
|
| 264 |
+
m = re.match(pattern, name)
|
| 265 |
+
path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc')
|
| 266 |
+
log.info(f"Renaming file from [{path_old}] to [{path_new}]")
|
| 267 |
+
try:
|
| 268 |
+
os.remove(path_new)
|
| 269 |
+
except OSError:
|
| 270 |
+
pass
|
| 271 |
+
os.rename(path_old, path_new)
|
| 272 |
+
|
| 273 |
+
def zip_safe(self):
|
| 274 |
+
safe = getattr(self.distribution, 'zip_safe', None)
|
| 275 |
+
if safe is not None:
|
| 276 |
+
return safe
|
| 277 |
+
log.warn("zip_safe flag not set; analyzing archive contents...")
|
| 278 |
+
return analyze_egg(self.bdist_dir, self.stubs)
|
| 279 |
+
|
| 280 |
+
def gen_header(self) -> Literal["w"]:
|
| 281 |
+
return 'w'
|
| 282 |
+
|
| 283 |
+
def copy_metadata_to(self, target_dir) -> None:
|
| 284 |
+
"Copy metadata (egg info) to the target_dir"
|
| 285 |
+
# normalize the path (so that a forward-slash in egg_info will
|
| 286 |
+
# match using startswith below)
|
| 287 |
+
norm_egg_info = os.path.normpath(self.egg_info)
|
| 288 |
+
prefix = os.path.join(norm_egg_info, '')
|
| 289 |
+
for path in self.ei_cmd.filelist.files:
|
| 290 |
+
if path.startswith(prefix):
|
| 291 |
+
target = os.path.join(target_dir, path[len(prefix) :])
|
| 292 |
+
ensure_directory(target)
|
| 293 |
+
self.copy_file(path, target)
|
| 294 |
+
|
| 295 |
+
def get_ext_outputs(self):
|
| 296 |
+
"""Get a list of relative paths to C extensions in the output distro"""
|
| 297 |
+
|
| 298 |
+
all_outputs = []
|
| 299 |
+
ext_outputs = []
|
| 300 |
+
|
| 301 |
+
paths = {self.bdist_dir: ''}
|
| 302 |
+
for base, dirs, files in sorted_walk(self.bdist_dir):
|
| 303 |
+
all_outputs.extend(
|
| 304 |
+
paths[base] + filename
|
| 305 |
+
for filename in files
|
| 306 |
+
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS
|
| 307 |
+
)
|
| 308 |
+
for filename in dirs:
|
| 309 |
+
paths[os.path.join(base, filename)] = paths[base] + filename + '/'
|
| 310 |
+
|
| 311 |
+
if self.distribution.has_ext_modules():
|
| 312 |
+
build_cmd = self.get_finalized_command('build_ext')
|
| 313 |
+
for ext in build_cmd.extensions:
|
| 314 |
+
if isinstance(ext, Library):
|
| 315 |
+
continue
|
| 316 |
+
fullname = build_cmd.get_ext_fullname(ext.name)
|
| 317 |
+
filename = build_cmd.get_ext_filename(fullname)
|
| 318 |
+
if not os.path.basename(filename).startswith('dl-'):
|
| 319 |
+
if os.path.exists(os.path.join(self.bdist_dir, filename)):
|
| 320 |
+
ext_outputs.append(filename)
|
| 321 |
+
|
| 322 |
+
return all_outputs, ext_outputs
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
NATIVE_EXTENSIONS: dict[str, None] = dict.fromkeys('.dll .so .dylib .pyd'.split())
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
def walk_egg(egg_dir):
|
| 329 |
+
"""Walk an unpacked egg's contents, skipping the metadata directory"""
|
| 330 |
+
walker = sorted_walk(egg_dir)
|
| 331 |
+
base, dirs, files = next(walker)
|
| 332 |
+
if 'EGG-INFO' in dirs:
|
| 333 |
+
dirs.remove('EGG-INFO')
|
| 334 |
+
yield base, dirs, files
|
| 335 |
+
yield from walker
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
def analyze_egg(egg_dir, stubs):
|
| 339 |
+
# check for existing flag in EGG-INFO
|
| 340 |
+
for flag, fn in safety_flags.items():
|
| 341 |
+
if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
|
| 342 |
+
return flag
|
| 343 |
+
if not can_scan():
|
| 344 |
+
return False
|
| 345 |
+
safe = True
|
| 346 |
+
for base, dirs, files in walk_egg(egg_dir):
|
| 347 |
+
for name in files:
|
| 348 |
+
if name.endswith('.py') or name.endswith('.pyw'):
|
| 349 |
+
continue
|
| 350 |
+
elif name.endswith('.pyc') or name.endswith('.pyo'):
|
| 351 |
+
# always scan, even if we already know we're not safe
|
| 352 |
+
safe = scan_module(egg_dir, base, name, stubs) and safe
|
| 353 |
+
return safe
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
def write_safety_flag(egg_dir, safe) -> None:
|
| 357 |
+
# Write or remove zip safety flag file(s)
|
| 358 |
+
for flag, fn in safety_flags.items():
|
| 359 |
+
fn = os.path.join(egg_dir, fn)
|
| 360 |
+
if os.path.exists(fn):
|
| 361 |
+
if safe is None or bool(safe) != flag:
|
| 362 |
+
os.unlink(fn)
|
| 363 |
+
elif safe is not None and bool(safe) == flag:
|
| 364 |
+
with open(fn, 'wt', encoding="utf-8") as f:
|
| 365 |
+
f.write('\n')
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
safety_flags = {
|
| 369 |
+
True: 'zip-safe',
|
| 370 |
+
False: 'not-zip-safe',
|
| 371 |
+
}
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
def scan_module(egg_dir, base, name, stubs):
|
| 375 |
+
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
|
| 376 |
+
|
| 377 |
+
filename = os.path.join(base, name)
|
| 378 |
+
if filename[:-1] in stubs:
|
| 379 |
+
return True # Extension module
|
| 380 |
+
pkg = base[len(egg_dir) + 1 :].replace(os.sep, '.')
|
| 381 |
+
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
|
| 382 |
+
skip = 16 # skip magic & reserved? & date & file size
|
| 383 |
+
f = open(filename, 'rb')
|
| 384 |
+
f.read(skip)
|
| 385 |
+
code = marshal.load(f)
|
| 386 |
+
f.close()
|
| 387 |
+
safe = True
|
| 388 |
+
symbols = dict.fromkeys(iter_symbols(code))
|
| 389 |
+
for bad in ['__file__', '__path__']:
|
| 390 |
+
if bad in symbols:
|
| 391 |
+
log.warn("%s: module references %s", module, bad)
|
| 392 |
+
safe = False
|
| 393 |
+
if 'inspect' in symbols:
|
| 394 |
+
for bad in [
|
| 395 |
+
'getsource',
|
| 396 |
+
'getabsfile',
|
| 397 |
+
'getfile',
|
| 398 |
+
'getsourcefile',
|
| 399 |
+
'getsourcelines',
|
| 400 |
+
'findsource',
|
| 401 |
+
'getcomments',
|
| 402 |
+
'getframeinfo',
|
| 403 |
+
'getinnerframes',
|
| 404 |
+
'getouterframes',
|
| 405 |
+
'stack',
|
| 406 |
+
'trace',
|
| 407 |
+
]:
|
| 408 |
+
if bad in symbols:
|
| 409 |
+
log.warn("%s: module MAY be using inspect.%s", module, bad)
|
| 410 |
+
safe = False
|
| 411 |
+
return safe
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
def iter_symbols(code):
|
| 415 |
+
"""Yield names and strings used by `code` and its nested code objects"""
|
| 416 |
+
yield from code.co_names
|
| 417 |
+
for const in code.co_consts:
|
| 418 |
+
if isinstance(const, str):
|
| 419 |
+
yield const
|
| 420 |
+
elif isinstance(const, CodeType):
|
| 421 |
+
yield from iter_symbols(const)
|
| 422 |
+
|
| 423 |
+
|
| 424 |
+
def can_scan() -> bool:
|
| 425 |
+
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
| 426 |
+
# CPython, PyPy, etc.
|
| 427 |
+
return True
|
| 428 |
+
log.warn("Unable to analyze compiled code on this platform.")
|
| 429 |
+
log.warn(
|
| 430 |
+
"Please ask the author to include a 'zip_safe'"
|
| 431 |
+
" setting (either True or False) in the package's setup.py"
|
| 432 |
+
)
|
| 433 |
+
return False
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
# Attribute names of options for commands that might need to be convinced to
|
| 437 |
+
# install to the egg build directory
|
| 438 |
+
|
| 439 |
+
INSTALL_DIRECTORY_ATTRS = ['install_lib', 'install_dir', 'install_data', 'install_base']
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
def make_zipfile(
|
| 443 |
+
zip_filename: StrPathT,
|
| 444 |
+
base_dir,
|
| 445 |
+
verbose: bool = False,
|
| 446 |
+
dry_run: bool = False,
|
| 447 |
+
compress=True,
|
| 448 |
+
mode: _ZipFileMode = 'w',
|
| 449 |
+
) -> StrPathT:
|
| 450 |
+
"""Create a zip file from all the files under 'base_dir'. The output
|
| 451 |
+
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
|
| 452 |
+
Python module (if available) or the InfoZIP "zip" utility (if installed
|
| 453 |
+
and found on the default search path). If neither tool is available,
|
| 454 |
+
raises DistutilsExecError. Returns the name of the output zip file.
|
| 455 |
+
"""
|
| 456 |
+
import zipfile
|
| 457 |
+
|
| 458 |
+
mkpath(os.path.dirname(zip_filename), dry_run=dry_run) # type: ignore[arg-type] # python/mypy#18075
|
| 459 |
+
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
| 460 |
+
|
| 461 |
+
def visit(z, dirname, names):
|
| 462 |
+
for name in names:
|
| 463 |
+
path = os.path.normpath(os.path.join(dirname, name))
|
| 464 |
+
if os.path.isfile(path):
|
| 465 |
+
p = path[len(base_dir) + 1 :]
|
| 466 |
+
if not dry_run:
|
| 467 |
+
z.write(path, p)
|
| 468 |
+
log.debug("adding '%s'", p)
|
| 469 |
+
|
| 470 |
+
compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
|
| 471 |
+
if not dry_run:
|
| 472 |
+
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
|
| 473 |
+
for dirname, dirs, files in sorted_walk(base_dir):
|
| 474 |
+
visit(z, dirname, files)
|
| 475 |
+
z.close()
|
| 476 |
+
else:
|
| 477 |
+
for dirname, dirs, files in sorted_walk(base_dir):
|
| 478 |
+
visit(None, dirname, files)
|
| 479 |
+
return zip_filename
|
falcon/lib/python3.10/site-packages/setuptools/command/bdist_rpm.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from ..dist import Distribution
|
| 2 |
+
from ..warnings import SetuptoolsDeprecationWarning
|
| 3 |
+
|
| 4 |
+
import distutils.command.bdist_rpm as orig
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class bdist_rpm(orig.bdist_rpm):
|
| 8 |
+
"""
|
| 9 |
+
Override the default bdist_rpm behavior to do the following:
|
| 10 |
+
|
| 11 |
+
1. Run egg_info to ensure the name and version are properly calculated.
|
| 12 |
+
2. Always run 'install' using --single-version-externally-managed to
|
| 13 |
+
disable eggs in RPM distributions.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution
|
| 17 |
+
|
| 18 |
+
def run(self) -> None:
|
| 19 |
+
SetuptoolsDeprecationWarning.emit(
|
| 20 |
+
"Deprecated command",
|
| 21 |
+
"""
|
| 22 |
+
bdist_rpm is deprecated and will be removed in a future version.
|
| 23 |
+
Use bdist_wheel (wheel packages) instead.
|
| 24 |
+
""",
|
| 25 |
+
see_url="https://github.com/pypa/setuptools/issues/1988",
|
| 26 |
+
due_date=(2023, 10, 30), # Deprecation introduced in 22 Oct 2021.
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
# ensure distro name is up-to-date
|
| 30 |
+
self.run_command('egg_info')
|
| 31 |
+
|
| 32 |
+
orig.bdist_rpm.run(self)
|
| 33 |
+
|
| 34 |
+
def _make_spec_file(self):
|
| 35 |
+
spec = orig.bdist_rpm._make_spec_file(self)
|
| 36 |
+
return [
|
| 37 |
+
line.replace(
|
| 38 |
+
"setup.py install ",
|
| 39 |
+
"setup.py install --single-version-externally-managed ",
|
| 40 |
+
).replace("%setup", "%setup -n %{name}-%{unmangled_version}")
|
| 41 |
+
for line in spec
|
| 42 |
+
]
|
falcon/lib/python3.10/site-packages/setuptools/command/bdist_wheel.py
ADDED
|
@@ -0,0 +1,610 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Create a wheel (.whl) distribution.
|
| 3 |
+
|
| 4 |
+
A wheel is a built archive format.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from __future__ import annotations
|
| 8 |
+
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import shutil
|
| 12 |
+
import struct
|
| 13 |
+
import sys
|
| 14 |
+
import sysconfig
|
| 15 |
+
import warnings
|
| 16 |
+
from collections.abc import Iterable, Sequence
|
| 17 |
+
from email.generator import BytesGenerator
|
| 18 |
+
from glob import iglob
|
| 19 |
+
from typing import Literal, cast
|
| 20 |
+
from zipfile import ZIP_DEFLATED, ZIP_STORED
|
| 21 |
+
|
| 22 |
+
from packaging import tags, version as _packaging_version
|
| 23 |
+
from wheel.wheelfile import WheelFile
|
| 24 |
+
|
| 25 |
+
from .. import Command, __version__, _shutil
|
| 26 |
+
from ..warnings import SetuptoolsDeprecationWarning
|
| 27 |
+
from .egg_info import egg_info as egg_info_cls
|
| 28 |
+
|
| 29 |
+
from distutils import log
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def safe_name(name: str) -> str:
|
| 33 |
+
"""Convert an arbitrary string to a standard distribution name
|
| 34 |
+
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
|
| 35 |
+
"""
|
| 36 |
+
return re.sub("[^A-Za-z0-9.]+", "-", name)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def safe_version(version: str) -> str:
|
| 40 |
+
"""
|
| 41 |
+
Convert an arbitrary string to a standard version string
|
| 42 |
+
"""
|
| 43 |
+
try:
|
| 44 |
+
# normalize the version
|
| 45 |
+
return str(_packaging_version.Version(version))
|
| 46 |
+
except _packaging_version.InvalidVersion:
|
| 47 |
+
version = version.replace(" ", ".")
|
| 48 |
+
return re.sub("[^A-Za-z0-9.]+", "-", version)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
setuptools_major_version = int(__version__.split(".")[0])
|
| 52 |
+
|
| 53 |
+
PY_LIMITED_API_PATTERN = r"cp3\d"
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def _is_32bit_interpreter() -> bool:
|
| 57 |
+
return struct.calcsize("P") == 4
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def python_tag() -> str:
|
| 61 |
+
return f"py{sys.version_info.major}"
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def get_platform(archive_root: str | None) -> str:
|
| 65 |
+
"""Return our platform name 'win32', 'linux_x86_64'"""
|
| 66 |
+
result = sysconfig.get_platform()
|
| 67 |
+
if result.startswith("macosx") and archive_root is not None: # pragma: no cover
|
| 68 |
+
from wheel.macosx_libfile import calculate_macosx_platform_tag
|
| 69 |
+
|
| 70 |
+
result = calculate_macosx_platform_tag(archive_root, result)
|
| 71 |
+
elif _is_32bit_interpreter():
|
| 72 |
+
if result == "linux-x86_64":
|
| 73 |
+
# pip pull request #3497
|
| 74 |
+
result = "linux-i686"
|
| 75 |
+
elif result == "linux-aarch64":
|
| 76 |
+
# packaging pull request #234
|
| 77 |
+
# TODO armv8l, packaging pull request #690 => this did not land
|
| 78 |
+
# in pip/packaging yet
|
| 79 |
+
result = "linux-armv7l"
|
| 80 |
+
|
| 81 |
+
return result.replace("-", "_")
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def get_flag(
|
| 85 |
+
var: str, fallback: bool, expected: bool = True, warn: bool = True
|
| 86 |
+
) -> bool:
|
| 87 |
+
"""Use a fallback value for determining SOABI flags if the needed config
|
| 88 |
+
var is unset or unavailable."""
|
| 89 |
+
val = sysconfig.get_config_var(var)
|
| 90 |
+
if val is None:
|
| 91 |
+
if warn:
|
| 92 |
+
warnings.warn(
|
| 93 |
+
f"Config variable '{var}' is unset, Python ABI tag may be incorrect",
|
| 94 |
+
RuntimeWarning,
|
| 95 |
+
stacklevel=2,
|
| 96 |
+
)
|
| 97 |
+
return fallback
|
| 98 |
+
return val == expected
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def get_abi_tag() -> str | None:
|
| 102 |
+
"""Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
|
| 103 |
+
soabi: str = sysconfig.get_config_var("SOABI")
|
| 104 |
+
impl = tags.interpreter_name()
|
| 105 |
+
if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
|
| 106 |
+
d = ""
|
| 107 |
+
u = ""
|
| 108 |
+
if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
|
| 109 |
+
d = "d"
|
| 110 |
+
|
| 111 |
+
abi = f"{impl}{tags.interpreter_version()}{d}{u}"
|
| 112 |
+
elif soabi and impl == "cp" and soabi.startswith("cpython"):
|
| 113 |
+
# non-Windows
|
| 114 |
+
abi = "cp" + soabi.split("-")[1]
|
| 115 |
+
elif soabi and impl == "cp" and soabi.startswith("cp"):
|
| 116 |
+
# Windows
|
| 117 |
+
abi = soabi.split("-")[0]
|
| 118 |
+
if hasattr(sys, "gettotalrefcount"):
|
| 119 |
+
# using debug build; append "d" flag
|
| 120 |
+
abi += "d"
|
| 121 |
+
elif soabi and impl == "pp":
|
| 122 |
+
# we want something like pypy36-pp73
|
| 123 |
+
abi = "-".join(soabi.split("-")[:2])
|
| 124 |
+
abi = abi.replace(".", "_").replace("-", "_")
|
| 125 |
+
elif soabi and impl == "graalpy":
|
| 126 |
+
abi = "-".join(soabi.split("-")[:3])
|
| 127 |
+
abi = abi.replace(".", "_").replace("-", "_")
|
| 128 |
+
elif soabi:
|
| 129 |
+
abi = soabi.replace(".", "_").replace("-", "_")
|
| 130 |
+
else:
|
| 131 |
+
abi = None
|
| 132 |
+
|
| 133 |
+
return abi
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def safer_name(name: str) -> str:
|
| 137 |
+
return safe_name(name).replace("-", "_")
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def safer_version(version: str) -> str:
|
| 141 |
+
return safe_version(version).replace("-", "_")
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
class bdist_wheel(Command):
|
| 145 |
+
description = "create a wheel distribution"
|
| 146 |
+
|
| 147 |
+
supported_compressions = {
|
| 148 |
+
"stored": ZIP_STORED,
|
| 149 |
+
"deflated": ZIP_DEFLATED,
|
| 150 |
+
}
|
| 151 |
+
|
| 152 |
+
user_options = [
|
| 153 |
+
("bdist-dir=", "b", "temporary directory for creating the distribution"),
|
| 154 |
+
(
|
| 155 |
+
"plat-name=",
|
| 156 |
+
"p",
|
| 157 |
+
"platform name to embed in generated filenames "
|
| 158 |
+
f"[default: {get_platform(None)}]",
|
| 159 |
+
),
|
| 160 |
+
(
|
| 161 |
+
"keep-temp",
|
| 162 |
+
"k",
|
| 163 |
+
"keep the pseudo-installation tree around after "
|
| 164 |
+
"creating the distribution archive",
|
| 165 |
+
),
|
| 166 |
+
("dist-dir=", "d", "directory to put final built distributions in"),
|
| 167 |
+
("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
|
| 168 |
+
(
|
| 169 |
+
"relative",
|
| 170 |
+
None,
|
| 171 |
+
"build the archive using relative paths [default: false]",
|
| 172 |
+
),
|
| 173 |
+
(
|
| 174 |
+
"owner=",
|
| 175 |
+
"u",
|
| 176 |
+
"Owner name used when creating a tar file [default: current user]",
|
| 177 |
+
),
|
| 178 |
+
(
|
| 179 |
+
"group=",
|
| 180 |
+
"g",
|
| 181 |
+
"Group name used when creating a tar file [default: current group]",
|
| 182 |
+
),
|
| 183 |
+
("universal", None, "*DEPRECATED* make a universal wheel [default: false]"),
|
| 184 |
+
(
|
| 185 |
+
"compression=",
|
| 186 |
+
None,
|
| 187 |
+
f"zipfile compression (one of: {', '.join(supported_compressions)}) [default: 'deflated']",
|
| 188 |
+
),
|
| 189 |
+
(
|
| 190 |
+
"python-tag=",
|
| 191 |
+
None,
|
| 192 |
+
f"Python implementation compatibility tag [default: '{python_tag()}']",
|
| 193 |
+
),
|
| 194 |
+
(
|
| 195 |
+
"build-number=",
|
| 196 |
+
None,
|
| 197 |
+
"Build number for this particular version. "
|
| 198 |
+
"As specified in PEP-0427, this must start with a digit. "
|
| 199 |
+
"[default: None]",
|
| 200 |
+
),
|
| 201 |
+
(
|
| 202 |
+
"py-limited-api=",
|
| 203 |
+
None,
|
| 204 |
+
"Python tag (cp32|cp33|cpNN) for abi3 wheel tag [default: false]",
|
| 205 |
+
),
|
| 206 |
+
(
|
| 207 |
+
"dist-info-dir=",
|
| 208 |
+
None,
|
| 209 |
+
"directory where a pre-generated dist-info can be found (e.g. as a "
|
| 210 |
+
"result of calling the PEP517 'prepare_metadata_for_build_wheel' "
|
| 211 |
+
"method)",
|
| 212 |
+
),
|
| 213 |
+
]
|
| 214 |
+
|
| 215 |
+
boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
|
| 216 |
+
|
| 217 |
+
def initialize_options(self) -> None:
|
| 218 |
+
self.bdist_dir: str | None = None
|
| 219 |
+
self.data_dir = ""
|
| 220 |
+
self.plat_name: str | None = None
|
| 221 |
+
self.plat_tag: str | None = None
|
| 222 |
+
self.format = "zip"
|
| 223 |
+
self.keep_temp = False
|
| 224 |
+
self.dist_dir: str | None = None
|
| 225 |
+
self.dist_info_dir = None
|
| 226 |
+
self.egginfo_dir: str | None = None
|
| 227 |
+
self.root_is_pure: bool | None = None
|
| 228 |
+
self.skip_build = False
|
| 229 |
+
self.relative = False
|
| 230 |
+
self.owner = None
|
| 231 |
+
self.group = None
|
| 232 |
+
self.universal = False
|
| 233 |
+
self.compression: str | int = "deflated"
|
| 234 |
+
self.python_tag = python_tag()
|
| 235 |
+
self.build_number: str | None = None
|
| 236 |
+
self.py_limited_api: str | Literal[False] = False
|
| 237 |
+
self.plat_name_supplied = False
|
| 238 |
+
|
| 239 |
+
def finalize_options(self) -> None:
|
| 240 |
+
if not self.bdist_dir:
|
| 241 |
+
bdist_base = self.get_finalized_command("bdist").bdist_base
|
| 242 |
+
self.bdist_dir = os.path.join(bdist_base, "wheel")
|
| 243 |
+
|
| 244 |
+
if self.dist_info_dir is None:
|
| 245 |
+
egg_info = cast(egg_info_cls, self.distribution.get_command_obj("egg_info"))
|
| 246 |
+
egg_info.ensure_finalized() # needed for correct `wheel_dist_name`
|
| 247 |
+
|
| 248 |
+
self.data_dir = self.wheel_dist_name + ".data"
|
| 249 |
+
self.plat_name_supplied = bool(self.plat_name)
|
| 250 |
+
|
| 251 |
+
need_options = ("dist_dir", "plat_name", "skip_build")
|
| 252 |
+
|
| 253 |
+
self.set_undefined_options("bdist", *zip(need_options, need_options))
|
| 254 |
+
|
| 255 |
+
self.root_is_pure = not (
|
| 256 |
+
self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
|
| 257 |
+
)
|
| 258 |
+
|
| 259 |
+
self._validate_py_limited_api()
|
| 260 |
+
|
| 261 |
+
# Support legacy [wheel] section for setting universal
|
| 262 |
+
wheel = self.distribution.get_option_dict("wheel")
|
| 263 |
+
if "universal" in wheel: # pragma: no cover
|
| 264 |
+
# please don't define this in your global configs
|
| 265 |
+
log.warn("The [wheel] section is deprecated. Use [bdist_wheel] instead.")
|
| 266 |
+
val = wheel["universal"][1].strip()
|
| 267 |
+
if val.lower() in ("1", "true", "yes"):
|
| 268 |
+
self.universal = True
|
| 269 |
+
|
| 270 |
+
if self.universal:
|
| 271 |
+
SetuptoolsDeprecationWarning.emit(
|
| 272 |
+
"bdist_wheel.universal is deprecated",
|
| 273 |
+
"""
|
| 274 |
+
With Python 2.7 end-of-life, support for building universal wheels
|
| 275 |
+
(i.e., wheels that support both Python 2 and Python 3)
|
| 276 |
+
is being obviated.
|
| 277 |
+
Please discontinue using this option, or if you still need it,
|
| 278 |
+
file an issue with pypa/setuptools describing your use case.
|
| 279 |
+
""",
|
| 280 |
+
due_date=(2025, 8, 30), # Introduced in 2024-08-30
|
| 281 |
+
)
|
| 282 |
+
|
| 283 |
+
if self.build_number is not None and not self.build_number[:1].isdigit():
|
| 284 |
+
raise ValueError("Build tag (build-number) must start with a digit.")
|
| 285 |
+
|
| 286 |
+
def _validate_py_limited_api(self) -> None:
|
| 287 |
+
if not self.py_limited_api:
|
| 288 |
+
return
|
| 289 |
+
|
| 290 |
+
if not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api):
|
| 291 |
+
raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
|
| 292 |
+
|
| 293 |
+
if sysconfig.get_config_var("Py_GIL_DISABLED"):
|
| 294 |
+
raise ValueError(
|
| 295 |
+
f"`py_limited_api={self.py_limited_api!r}` not supported. "
|
| 296 |
+
"`Py_LIMITED_API` is currently incompatible with "
|
| 297 |
+
f"`Py_GIL_DISABLED` ({sys.abiflags=!r}). "
|
| 298 |
+
"See https://github.com/python/cpython/issues/111506."
|
| 299 |
+
)
|
| 300 |
+
|
| 301 |
+
@property
|
| 302 |
+
def wheel_dist_name(self) -> str:
|
| 303 |
+
"""Return distribution full name with - replaced with _"""
|
| 304 |
+
components = [
|
| 305 |
+
safer_name(self.distribution.get_name()),
|
| 306 |
+
safer_version(self.distribution.get_version()),
|
| 307 |
+
]
|
| 308 |
+
if self.build_number:
|
| 309 |
+
components.append(self.build_number)
|
| 310 |
+
return "-".join(components)
|
| 311 |
+
|
| 312 |
+
def get_tag(self) -> tuple[str, str, str]:
|
| 313 |
+
# bdist sets self.plat_name if unset, we should only use it for purepy
|
| 314 |
+
# wheels if the user supplied it.
|
| 315 |
+
if self.plat_name_supplied and self.plat_name:
|
| 316 |
+
plat_name = self.plat_name
|
| 317 |
+
elif self.root_is_pure:
|
| 318 |
+
plat_name = "any"
|
| 319 |
+
else:
|
| 320 |
+
# macosx contains system version in platform name so need special handle
|
| 321 |
+
if self.plat_name and not self.plat_name.startswith("macosx"):
|
| 322 |
+
plat_name = self.plat_name
|
| 323 |
+
else:
|
| 324 |
+
# on macosx always limit the platform name to comply with any
|
| 325 |
+
# c-extension modules in bdist_dir, since the user can specify
|
| 326 |
+
# a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
|
| 327 |
+
|
| 328 |
+
# on other platforms, and on macosx if there are no c-extension
|
| 329 |
+
# modules, use the default platform name.
|
| 330 |
+
plat_name = get_platform(self.bdist_dir)
|
| 331 |
+
|
| 332 |
+
if _is_32bit_interpreter():
|
| 333 |
+
if plat_name in ("linux-x86_64", "linux_x86_64"):
|
| 334 |
+
plat_name = "linux_i686"
|
| 335 |
+
if plat_name in ("linux-aarch64", "linux_aarch64"):
|
| 336 |
+
# TODO armv8l, packaging pull request #690 => this did not land
|
| 337 |
+
# in pip/packaging yet
|
| 338 |
+
plat_name = "linux_armv7l"
|
| 339 |
+
|
| 340 |
+
plat_name = (
|
| 341 |
+
plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_")
|
| 342 |
+
)
|
| 343 |
+
|
| 344 |
+
if self.root_is_pure:
|
| 345 |
+
if self.universal:
|
| 346 |
+
impl = "py2.py3"
|
| 347 |
+
else:
|
| 348 |
+
impl = self.python_tag
|
| 349 |
+
tag = (impl, "none", plat_name)
|
| 350 |
+
else:
|
| 351 |
+
impl_name = tags.interpreter_name()
|
| 352 |
+
impl_ver = tags.interpreter_version()
|
| 353 |
+
impl = impl_name + impl_ver
|
| 354 |
+
# We don't work on CPython 3.1, 3.0.
|
| 355 |
+
if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
|
| 356 |
+
impl = self.py_limited_api
|
| 357 |
+
abi_tag = "abi3"
|
| 358 |
+
else:
|
| 359 |
+
abi_tag = str(get_abi_tag()).lower()
|
| 360 |
+
tag = (impl, abi_tag, plat_name)
|
| 361 |
+
# issue gh-374: allow overriding plat_name
|
| 362 |
+
supported_tags = [
|
| 363 |
+
(t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
|
| 364 |
+
]
|
| 365 |
+
assert tag in supported_tags, (
|
| 366 |
+
f"would build wheel with unsupported tag {tag}"
|
| 367 |
+
)
|
| 368 |
+
return tag
|
| 369 |
+
|
| 370 |
+
def run(self):
|
| 371 |
+
build_scripts = self.reinitialize_command("build_scripts")
|
| 372 |
+
build_scripts.executable = "python"
|
| 373 |
+
build_scripts.force = True
|
| 374 |
+
|
| 375 |
+
build_ext = self.reinitialize_command("build_ext")
|
| 376 |
+
build_ext.inplace = False
|
| 377 |
+
|
| 378 |
+
if not self.skip_build:
|
| 379 |
+
self.run_command("build")
|
| 380 |
+
|
| 381 |
+
install = self.reinitialize_command("install", reinit_subcommands=True)
|
| 382 |
+
install.root = self.bdist_dir
|
| 383 |
+
install.compile = False
|
| 384 |
+
install.skip_build = self.skip_build
|
| 385 |
+
install.warn_dir = False
|
| 386 |
+
|
| 387 |
+
# A wheel without setuptools scripts is more cross-platform.
|
| 388 |
+
# Use the (undocumented) `no_ep` option to setuptools'
|
| 389 |
+
# install_scripts command to avoid creating entry point scripts.
|
| 390 |
+
install_scripts = self.reinitialize_command("install_scripts")
|
| 391 |
+
install_scripts.no_ep = True
|
| 392 |
+
|
| 393 |
+
# Use a custom scheme for the archive, because we have to decide
|
| 394 |
+
# at installation time which scheme to use.
|
| 395 |
+
for key in ("headers", "scripts", "data", "purelib", "platlib"):
|
| 396 |
+
setattr(install, "install_" + key, os.path.join(self.data_dir, key))
|
| 397 |
+
|
| 398 |
+
basedir_observed = ""
|
| 399 |
+
|
| 400 |
+
if os.name == "nt":
|
| 401 |
+
# win32 barfs if any of these are ''; could be '.'?
|
| 402 |
+
# (distutils.command.install:change_roots bug)
|
| 403 |
+
basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
|
| 404 |
+
self.install_libbase = self.install_lib = basedir_observed
|
| 405 |
+
|
| 406 |
+
setattr(
|
| 407 |
+
install,
|
| 408 |
+
"install_purelib" if self.root_is_pure else "install_platlib",
|
| 409 |
+
basedir_observed,
|
| 410 |
+
)
|
| 411 |
+
|
| 412 |
+
log.info(f"installing to {self.bdist_dir}")
|
| 413 |
+
|
| 414 |
+
self.run_command("install")
|
| 415 |
+
|
| 416 |
+
impl_tag, abi_tag, plat_tag = self.get_tag()
|
| 417 |
+
archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
|
| 418 |
+
if not self.relative:
|
| 419 |
+
archive_root = self.bdist_dir
|
| 420 |
+
else:
|
| 421 |
+
archive_root = os.path.join(
|
| 422 |
+
self.bdist_dir, self._ensure_relative(install.install_base)
|
| 423 |
+
)
|
| 424 |
+
|
| 425 |
+
self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
|
| 426 |
+
distinfo_dirname = (
|
| 427 |
+
f"{safer_name(self.distribution.get_name())}-"
|
| 428 |
+
f"{safer_version(self.distribution.get_version())}.dist-info"
|
| 429 |
+
)
|
| 430 |
+
distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
|
| 431 |
+
if self.dist_info_dir:
|
| 432 |
+
# Use the given dist-info directly.
|
| 433 |
+
log.debug(f"reusing {self.dist_info_dir}")
|
| 434 |
+
shutil.copytree(self.dist_info_dir, distinfo_dir)
|
| 435 |
+
# Egg info is still generated, so remove it now to avoid it getting
|
| 436 |
+
# copied into the wheel.
|
| 437 |
+
_shutil.rmtree(self.egginfo_dir)
|
| 438 |
+
else:
|
| 439 |
+
# Convert the generated egg-info into dist-info.
|
| 440 |
+
self.egg2dist(self.egginfo_dir, distinfo_dir)
|
| 441 |
+
|
| 442 |
+
self.write_wheelfile(distinfo_dir)
|
| 443 |
+
|
| 444 |
+
# Make the archive
|
| 445 |
+
if not os.path.exists(self.dist_dir):
|
| 446 |
+
os.makedirs(self.dist_dir)
|
| 447 |
+
|
| 448 |
+
wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
|
| 449 |
+
with WheelFile(wheel_path, "w", self._zip_compression()) as wf:
|
| 450 |
+
wf.write_files(archive_root)
|
| 451 |
+
|
| 452 |
+
# Add to 'Distribution.dist_files' so that the "upload" command works
|
| 453 |
+
getattr(self.distribution, "dist_files", []).append((
|
| 454 |
+
"bdist_wheel",
|
| 455 |
+
f"{sys.version_info.major}.{sys.version_info.minor}",
|
| 456 |
+
wheel_path,
|
| 457 |
+
))
|
| 458 |
+
|
| 459 |
+
if not self.keep_temp:
|
| 460 |
+
log.info(f"removing {self.bdist_dir}")
|
| 461 |
+
if not self.dry_run:
|
| 462 |
+
_shutil.rmtree(self.bdist_dir)
|
| 463 |
+
|
| 464 |
+
def write_wheelfile(
|
| 465 |
+
self, wheelfile_base: str, generator: str = f"setuptools ({__version__})"
|
| 466 |
+
) -> None:
|
| 467 |
+
from email.message import Message
|
| 468 |
+
|
| 469 |
+
msg = Message()
|
| 470 |
+
msg["Wheel-Version"] = "1.0" # of the spec
|
| 471 |
+
msg["Generator"] = generator
|
| 472 |
+
msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
|
| 473 |
+
if self.build_number is not None:
|
| 474 |
+
msg["Build"] = self.build_number
|
| 475 |
+
|
| 476 |
+
# Doesn't work for bdist_wininst
|
| 477 |
+
impl_tag, abi_tag, plat_tag = self.get_tag()
|
| 478 |
+
for impl in impl_tag.split("."):
|
| 479 |
+
for abi in abi_tag.split("."):
|
| 480 |
+
for plat in plat_tag.split("."):
|
| 481 |
+
msg["Tag"] = "-".join((impl, abi, plat))
|
| 482 |
+
|
| 483 |
+
wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
|
| 484 |
+
log.info(f"creating {wheelfile_path}")
|
| 485 |
+
with open(wheelfile_path, "wb") as f:
|
| 486 |
+
BytesGenerator(f, maxheaderlen=0).flatten(msg)
|
| 487 |
+
|
| 488 |
+
def _ensure_relative(self, path: str) -> str:
|
| 489 |
+
# copied from dir_util, deleted
|
| 490 |
+
drive, path = os.path.splitdrive(path)
|
| 491 |
+
if path[0:1] == os.sep:
|
| 492 |
+
path = drive + path[1:]
|
| 493 |
+
return path
|
| 494 |
+
|
| 495 |
+
@property
|
| 496 |
+
def license_paths(self) -> Iterable[str]:
|
| 497 |
+
if setuptools_major_version >= 57:
|
| 498 |
+
# Setuptools has resolved any patterns to actual file names
|
| 499 |
+
return self.distribution.metadata.license_files or ()
|
| 500 |
+
|
| 501 |
+
files = set[str]()
|
| 502 |
+
metadata = self.distribution.get_option_dict("metadata")
|
| 503 |
+
if setuptools_major_version >= 42:
|
| 504 |
+
# Setuptools recognizes the license_files option but does not do globbing
|
| 505 |
+
patterns = cast(Sequence[str], self.distribution.metadata.license_files)
|
| 506 |
+
else:
|
| 507 |
+
# Prior to those, wheel is entirely responsible for handling license files
|
| 508 |
+
if "license_files" in metadata:
|
| 509 |
+
patterns = metadata["license_files"][1].split()
|
| 510 |
+
else:
|
| 511 |
+
patterns = ()
|
| 512 |
+
|
| 513 |
+
if "license_file" in metadata:
|
| 514 |
+
warnings.warn(
|
| 515 |
+
'The "license_file" option is deprecated. Use "license_files" instead.',
|
| 516 |
+
DeprecationWarning,
|
| 517 |
+
stacklevel=2,
|
| 518 |
+
)
|
| 519 |
+
files.add(metadata["license_file"][1])
|
| 520 |
+
|
| 521 |
+
if not files and not patterns and not isinstance(patterns, list):
|
| 522 |
+
patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
|
| 523 |
+
|
| 524 |
+
for pattern in patterns:
|
| 525 |
+
for path in iglob(pattern):
|
| 526 |
+
if path.endswith("~"):
|
| 527 |
+
log.debug(
|
| 528 |
+
f'ignoring license file "{path}" as it looks like a backup'
|
| 529 |
+
)
|
| 530 |
+
continue
|
| 531 |
+
|
| 532 |
+
if path not in files and os.path.isfile(path):
|
| 533 |
+
log.info(
|
| 534 |
+
f'adding license file "{path}" (matched pattern "{pattern}")'
|
| 535 |
+
)
|
| 536 |
+
files.add(path)
|
| 537 |
+
|
| 538 |
+
return files
|
| 539 |
+
|
| 540 |
+
def egg2dist(self, egginfo_path: str, distinfo_path: str) -> None:
|
| 541 |
+
"""Convert an .egg-info directory into a .dist-info directory"""
|
| 542 |
+
|
| 543 |
+
def adios(p: str) -> None:
|
| 544 |
+
"""Appropriately delete directory, file or link."""
|
| 545 |
+
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
|
| 546 |
+
_shutil.rmtree(p)
|
| 547 |
+
elif os.path.exists(p):
|
| 548 |
+
os.unlink(p)
|
| 549 |
+
|
| 550 |
+
adios(distinfo_path)
|
| 551 |
+
|
| 552 |
+
if not os.path.exists(egginfo_path):
|
| 553 |
+
# There is no egg-info. This is probably because the egg-info
|
| 554 |
+
# file/directory is not named matching the distribution name used
|
| 555 |
+
# to name the archive file. Check for this case and report
|
| 556 |
+
# accordingly.
|
| 557 |
+
import glob
|
| 558 |
+
|
| 559 |
+
pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
|
| 560 |
+
possible = glob.glob(pat)
|
| 561 |
+
err = f"Egg metadata expected at {egginfo_path} but not found"
|
| 562 |
+
if possible:
|
| 563 |
+
alt = os.path.basename(possible[0])
|
| 564 |
+
err += f" ({alt} found - possible misnamed archive file?)"
|
| 565 |
+
|
| 566 |
+
raise ValueError(err)
|
| 567 |
+
|
| 568 |
+
# .egg-info is a directory
|
| 569 |
+
pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
|
| 570 |
+
|
| 571 |
+
# ignore common egg metadata that is useless to wheel
|
| 572 |
+
shutil.copytree(
|
| 573 |
+
egginfo_path,
|
| 574 |
+
distinfo_path,
|
| 575 |
+
ignore=lambda x, y: {
|
| 576 |
+
"PKG-INFO",
|
| 577 |
+
"requires.txt",
|
| 578 |
+
"SOURCES.txt",
|
| 579 |
+
"not-zip-safe",
|
| 580 |
+
},
|
| 581 |
+
)
|
| 582 |
+
|
| 583 |
+
# delete dependency_links if it is only whitespace
|
| 584 |
+
dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
|
| 585 |
+
with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
|
| 586 |
+
dependency_links = dependency_links_file.read().strip()
|
| 587 |
+
if not dependency_links:
|
| 588 |
+
adios(dependency_links_path)
|
| 589 |
+
|
| 590 |
+
metadata_path = os.path.join(distinfo_path, "METADATA")
|
| 591 |
+
shutil.copy(pkginfo_path, metadata_path)
|
| 592 |
+
|
| 593 |
+
for license_path in self.license_paths:
|
| 594 |
+
filename = os.path.basename(license_path)
|
| 595 |
+
shutil.copy(license_path, os.path.join(distinfo_path, filename))
|
| 596 |
+
|
| 597 |
+
adios(egginfo_path)
|
| 598 |
+
|
| 599 |
+
def _zip_compression(self) -> int:
|
| 600 |
+
if (
|
| 601 |
+
isinstance(self.compression, int)
|
| 602 |
+
and self.compression in self.supported_compressions.values()
|
| 603 |
+
):
|
| 604 |
+
return self.compression
|
| 605 |
+
|
| 606 |
+
compression = self.supported_compressions.get(str(self.compression))
|
| 607 |
+
if compression is not None:
|
| 608 |
+
return compression
|
| 609 |
+
|
| 610 |
+
raise ValueError(f"Unsupported compression: {self.compression!r}")
|
falcon/lib/python3.10/site-packages/setuptools/command/build.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import Protocol
|
| 4 |
+
|
| 5 |
+
from ..dist import Distribution
|
| 6 |
+
|
| 7 |
+
from distutils.command.build import build as _build
|
| 8 |
+
|
| 9 |
+
_ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"}
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class build(_build):
|
| 13 |
+
distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution
|
| 14 |
+
|
| 15 |
+
# copy to avoid sharing the object with parent class
|
| 16 |
+
sub_commands = _build.sub_commands[:]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class SubCommand(Protocol):
|
| 20 |
+
"""In order to support editable installations (see :pep:`660`) all
|
| 21 |
+
build subcommands **SHOULD** implement this protocol. They also **MUST** inherit
|
| 22 |
+
from ``setuptools.Command``.
|
| 23 |
+
|
| 24 |
+
When creating an :pep:`editable wheel <660>`, ``setuptools`` will try to evaluate
|
| 25 |
+
custom ``build`` subcommands using the following procedure:
|
| 26 |
+
|
| 27 |
+
1. ``setuptools`` will set the ``editable_mode`` attribute to ``True``
|
| 28 |
+
2. ``setuptools`` will execute the ``run()`` command.
|
| 29 |
+
|
| 30 |
+
.. important::
|
| 31 |
+
Subcommands **SHOULD** take advantage of ``editable_mode=True`` to adequate
|
| 32 |
+
its behaviour or perform optimisations.
|
| 33 |
+
|
| 34 |
+
For example, if a subcommand doesn't need to generate an extra file and
|
| 35 |
+
all it does is to copy a source file into the build directory,
|
| 36 |
+
``run()`` **SHOULD** simply "early return".
|
| 37 |
+
|
| 38 |
+
Similarly, if the subcommand creates files that would be placed alongside
|
| 39 |
+
Python files in the final distribution, during an editable install
|
| 40 |
+
the command **SHOULD** generate these files "in place" (i.e. write them to
|
| 41 |
+
the original source directory, instead of using the build directory).
|
| 42 |
+
Note that ``get_output_mapping()`` should reflect that and include mappings
|
| 43 |
+
for "in place" builds accordingly.
|
| 44 |
+
|
| 45 |
+
3. ``setuptools`` use any knowledge it can derive from the return values of
|
| 46 |
+
``get_outputs()`` and ``get_output_mapping()`` to create an editable wheel.
|
| 47 |
+
When relevant ``setuptools`` **MAY** attempt to use file links based on the value
|
| 48 |
+
of ``get_output_mapping()``. Alternatively, ``setuptools`` **MAY** attempt to use
|
| 49 |
+
:doc:`import hooks <python:reference/import>` to redirect any attempt to import
|
| 50 |
+
to the directory with the original source code and other files built in place.
|
| 51 |
+
|
| 52 |
+
Please note that custom sub-commands **SHOULD NOT** rely on ``run()`` being
|
| 53 |
+
executed (or not) to provide correct return values for ``get_outputs()``,
|
| 54 |
+
``get_output_mapping()`` or ``get_source_files()``. The ``get_*`` methods should
|
| 55 |
+
work independently of ``run()``.
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
editable_mode: bool = False
|
| 59 |
+
"""Boolean flag that will be set to ``True`` when setuptools is used for an
|
| 60 |
+
editable installation (see :pep:`660`).
|
| 61 |
+
Implementations **SHOULD** explicitly set the default value of this attribute to
|
| 62 |
+
``False``.
|
| 63 |
+
When subcommands run, they can use this flag to perform optimizations or change
|
| 64 |
+
their behaviour accordingly.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
build_lib: str
|
| 68 |
+
"""String representing the directory where the build artifacts should be stored,
|
| 69 |
+
e.g. ``build/lib``.
|
| 70 |
+
For example, if a distribution wants to provide a Python module named ``pkg.mod``,
|
| 71 |
+
then a corresponding file should be written to ``{build_lib}/package/module.py``.
|
| 72 |
+
A way of thinking about this is that the files saved under ``build_lib``
|
| 73 |
+
would be eventually copied to one of the directories in :obj:`site.PREFIXES`
|
| 74 |
+
upon installation.
|
| 75 |
+
|
| 76 |
+
A command that produces platform-independent files (e.g. compiling text templates
|
| 77 |
+
into Python functions), **CAN** initialize ``build_lib`` by copying its value from
|
| 78 |
+
the ``build_py`` command. On the other hand, a command that produces
|
| 79 |
+
platform-specific files **CAN** initialize ``build_lib`` by copying its value from
|
| 80 |
+
the ``build_ext`` command. In general this is done inside the ``finalize_options``
|
| 81 |
+
method with the help of the ``set_undefined_options`` command::
|
| 82 |
+
|
| 83 |
+
def finalize_options(self):
|
| 84 |
+
self.set_undefined_options("build_py", ("build_lib", "build_lib"))
|
| 85 |
+
...
|
| 86 |
+
"""
|
| 87 |
+
|
| 88 |
+
def initialize_options(self) -> None:
|
| 89 |
+
"""(Required by the original :class:`setuptools.Command` interface)"""
|
| 90 |
+
...
|
| 91 |
+
|
| 92 |
+
def finalize_options(self) -> None:
|
| 93 |
+
"""(Required by the original :class:`setuptools.Command` interface)"""
|
| 94 |
+
...
|
| 95 |
+
|
| 96 |
+
def run(self) -> None:
|
| 97 |
+
"""(Required by the original :class:`setuptools.Command` interface)"""
|
| 98 |
+
...
|
| 99 |
+
|
| 100 |
+
def get_source_files(self) -> list[str]:
|
| 101 |
+
"""
|
| 102 |
+
Return a list of all files that are used by the command to create the expected
|
| 103 |
+
outputs.
|
| 104 |
+
For example, if your build command transpiles Java files into Python, you should
|
| 105 |
+
list here all the Java files.
|
| 106 |
+
The primary purpose of this function is to help populating the ``sdist``
|
| 107 |
+
with all the files necessary to build the distribution.
|
| 108 |
+
All files should be strings relative to the project root directory.
|
| 109 |
+
"""
|
| 110 |
+
...
|
| 111 |
+
|
| 112 |
+
def get_outputs(self) -> list[str]:
|
| 113 |
+
"""
|
| 114 |
+
Return a list of files intended for distribution as they would have been
|
| 115 |
+
produced by the build.
|
| 116 |
+
These files should be strings in the form of
|
| 117 |
+
``"{build_lib}/destination/file/path"``.
|
| 118 |
+
|
| 119 |
+
.. note::
|
| 120 |
+
The return value of ``get_output()`` should include all files used as keys
|
| 121 |
+
in ``get_output_mapping()`` plus files that are generated during the build
|
| 122 |
+
and don't correspond to any source file already present in the project.
|
| 123 |
+
"""
|
| 124 |
+
...
|
| 125 |
+
|
| 126 |
+
def get_output_mapping(self) -> dict[str, str]:
|
| 127 |
+
"""
|
| 128 |
+
Return a mapping between destination files as they would be produced by the
|
| 129 |
+
build (dict keys) into the respective existing (source) files (dict values).
|
| 130 |
+
Existing (source) files should be represented as strings relative to the project
|
| 131 |
+
root directory.
|
| 132 |
+
Destination files should be strings in the form of
|
| 133 |
+
``"{build_lib}/destination/file/path"``.
|
| 134 |
+
"""
|
| 135 |
+
...
|
falcon/lib/python3.10/site-packages/setuptools/command/build_ext.py
ADDED
|
@@ -0,0 +1,469 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import itertools
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
from collections.abc import Iterator
|
| 7 |
+
from importlib.machinery import EXTENSION_SUFFIXES
|
| 8 |
+
from importlib.util import cache_from_source as _compiled_file_name
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from typing import TYPE_CHECKING
|
| 11 |
+
|
| 12 |
+
from setuptools.dist import Distribution
|
| 13 |
+
from setuptools.errors import BaseError
|
| 14 |
+
from setuptools.extension import Extension, Library
|
| 15 |
+
|
| 16 |
+
from distutils import log
|
| 17 |
+
from distutils.ccompiler import new_compiler
|
| 18 |
+
from distutils.sysconfig import customize_compiler, get_config_var
|
| 19 |
+
|
| 20 |
+
if TYPE_CHECKING:
|
| 21 |
+
# Cython not installed on CI tests, causing _build_ext to be `Any`
|
| 22 |
+
from distutils.command.build_ext import build_ext as _build_ext
|
| 23 |
+
else:
|
| 24 |
+
try:
|
| 25 |
+
# Attempt to use Cython for building extensions, if available
|
| 26 |
+
from Cython.Distutils.build_ext import build_ext as _build_ext
|
| 27 |
+
|
| 28 |
+
# Additionally, assert that the compiler module will load
|
| 29 |
+
# also. Ref #1229.
|
| 30 |
+
__import__('Cython.Compiler.Main')
|
| 31 |
+
except ImportError:
|
| 32 |
+
from distutils.command.build_ext import build_ext as _build_ext
|
| 33 |
+
|
| 34 |
+
# make sure _config_vars is initialized
|
| 35 |
+
get_config_var("LDSHARED")
|
| 36 |
+
# Not publicly exposed in typeshed distutils stubs, but this is done on purpose
|
| 37 |
+
# See https://github.com/pypa/setuptools/pull/4228#issuecomment-1959856400
|
| 38 |
+
from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa: E402
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def _customize_compiler_for_shlib(compiler):
|
| 42 |
+
if sys.platform == "darwin":
|
| 43 |
+
# building .dylib requires additional compiler flags on OSX; here we
|
| 44 |
+
# temporarily substitute the pyconfig.h variables so that distutils'
|
| 45 |
+
# 'customize_compiler' uses them before we build the shared libraries.
|
| 46 |
+
tmp = _CONFIG_VARS.copy()
|
| 47 |
+
try:
|
| 48 |
+
# XXX Help! I don't have any idea whether these are right...
|
| 49 |
+
_CONFIG_VARS['LDSHARED'] = (
|
| 50 |
+
"gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
|
| 51 |
+
)
|
| 52 |
+
_CONFIG_VARS['CCSHARED'] = " -dynamiclib"
|
| 53 |
+
_CONFIG_VARS['SO'] = ".dylib"
|
| 54 |
+
customize_compiler(compiler)
|
| 55 |
+
finally:
|
| 56 |
+
_CONFIG_VARS.clear()
|
| 57 |
+
_CONFIG_VARS.update(tmp)
|
| 58 |
+
else:
|
| 59 |
+
customize_compiler(compiler)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
have_rtld = False
|
| 63 |
+
use_stubs = False
|
| 64 |
+
libtype = 'shared'
|
| 65 |
+
|
| 66 |
+
if sys.platform == "darwin":
|
| 67 |
+
use_stubs = True
|
| 68 |
+
elif os.name != 'nt':
|
| 69 |
+
try:
|
| 70 |
+
import dl # type: ignore[import-not-found] # https://github.com/python/mypy/issues/13002
|
| 71 |
+
|
| 72 |
+
use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
|
| 73 |
+
except ImportError:
|
| 74 |
+
pass
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def if_dl(s):
|
| 78 |
+
return s if have_rtld else ''
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def get_abi3_suffix():
|
| 82 |
+
"""Return the file extension for an abi3-compliant Extension()"""
|
| 83 |
+
for suffix in EXTENSION_SUFFIXES:
|
| 84 |
+
if '.abi3' in suffix: # Unix
|
| 85 |
+
return suffix
|
| 86 |
+
elif suffix == '.pyd': # Windows
|
| 87 |
+
return suffix
|
| 88 |
+
return None
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class build_ext(_build_ext):
|
| 92 |
+
distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution
|
| 93 |
+
editable_mode = False
|
| 94 |
+
inplace = False
|
| 95 |
+
|
| 96 |
+
def run(self):
|
| 97 |
+
"""Build extensions in build directory, then copy if --inplace"""
|
| 98 |
+
old_inplace, self.inplace = self.inplace, False
|
| 99 |
+
_build_ext.run(self)
|
| 100 |
+
self.inplace = old_inplace
|
| 101 |
+
if old_inplace:
|
| 102 |
+
self.copy_extensions_to_source()
|
| 103 |
+
|
| 104 |
+
def _get_inplace_equivalent(self, build_py, ext: Extension) -> tuple[str, str]:
|
| 105 |
+
fullname = self.get_ext_fullname(ext.name)
|
| 106 |
+
filename = self.get_ext_filename(fullname)
|
| 107 |
+
modpath = fullname.split('.')
|
| 108 |
+
package = '.'.join(modpath[:-1])
|
| 109 |
+
package_dir = build_py.get_package_dir(package)
|
| 110 |
+
inplace_file = os.path.join(package_dir, os.path.basename(filename))
|
| 111 |
+
regular_file = os.path.join(self.build_lib, filename)
|
| 112 |
+
return (inplace_file, regular_file)
|
| 113 |
+
|
| 114 |
+
def copy_extensions_to_source(self) -> None:
|
| 115 |
+
build_py = self.get_finalized_command('build_py')
|
| 116 |
+
for ext in self.extensions:
|
| 117 |
+
inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext)
|
| 118 |
+
|
| 119 |
+
# Always copy, even if source is older than destination, to ensure
|
| 120 |
+
# that the right extensions for the current Python/platform are
|
| 121 |
+
# used.
|
| 122 |
+
if os.path.exists(regular_file) or not ext.optional:
|
| 123 |
+
self.copy_file(regular_file, inplace_file, level=self.verbose)
|
| 124 |
+
|
| 125 |
+
if ext._needs_stub:
|
| 126 |
+
inplace_stub = self._get_equivalent_stub(ext, inplace_file)
|
| 127 |
+
self._write_stub_file(inplace_stub, ext, compile=True)
|
| 128 |
+
# Always compile stub and remove the original (leave the cache behind)
|
| 129 |
+
# (this behaviour was observed in previous iterations of the code)
|
| 130 |
+
|
| 131 |
+
def _get_equivalent_stub(self, ext: Extension, output_file: str) -> str:
|
| 132 |
+
dir_ = os.path.dirname(output_file)
|
| 133 |
+
_, _, name = ext.name.rpartition(".")
|
| 134 |
+
return f"{os.path.join(dir_, name)}.py"
|
| 135 |
+
|
| 136 |
+
def _get_output_mapping(self) -> Iterator[tuple[str, str]]:
|
| 137 |
+
if not self.inplace:
|
| 138 |
+
return
|
| 139 |
+
|
| 140 |
+
build_py = self.get_finalized_command('build_py')
|
| 141 |
+
opt = self.get_finalized_command('install_lib').optimize or ""
|
| 142 |
+
|
| 143 |
+
for ext in self.extensions:
|
| 144 |
+
inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext)
|
| 145 |
+
yield (regular_file, inplace_file)
|
| 146 |
+
|
| 147 |
+
if ext._needs_stub:
|
| 148 |
+
# This version of `build_ext` always builds artifacts in another dir,
|
| 149 |
+
# when "inplace=True" is given it just copies them back.
|
| 150 |
+
# This is done in the `copy_extensions_to_source` function, which
|
| 151 |
+
# always compile stub files via `_compile_and_remove_stub`.
|
| 152 |
+
# At the end of the process, a `.pyc` stub file is created without the
|
| 153 |
+
# corresponding `.py`.
|
| 154 |
+
|
| 155 |
+
inplace_stub = self._get_equivalent_stub(ext, inplace_file)
|
| 156 |
+
regular_stub = self._get_equivalent_stub(ext, regular_file)
|
| 157 |
+
inplace_cache = _compiled_file_name(inplace_stub, optimization=opt)
|
| 158 |
+
output_cache = _compiled_file_name(regular_stub, optimization=opt)
|
| 159 |
+
yield (output_cache, inplace_cache)
|
| 160 |
+
|
| 161 |
+
def get_ext_filename(self, fullname: str) -> str:
|
| 162 |
+
so_ext = os.getenv('SETUPTOOLS_EXT_SUFFIX')
|
| 163 |
+
if so_ext:
|
| 164 |
+
filename = os.path.join(*fullname.split('.')) + so_ext
|
| 165 |
+
else:
|
| 166 |
+
filename = _build_ext.get_ext_filename(self, fullname)
|
| 167 |
+
ext_suffix = get_config_var('EXT_SUFFIX')
|
| 168 |
+
if not isinstance(ext_suffix, str):
|
| 169 |
+
raise OSError(
|
| 170 |
+
"Configuration variable EXT_SUFFIX not found for this platform "
|
| 171 |
+
"and environment variable SETUPTOOLS_EXT_SUFFIX is missing"
|
| 172 |
+
)
|
| 173 |
+
so_ext = ext_suffix
|
| 174 |
+
|
| 175 |
+
if fullname in self.ext_map:
|
| 176 |
+
ext = self.ext_map[fullname]
|
| 177 |
+
abi3_suffix = get_abi3_suffix()
|
| 178 |
+
if ext.py_limited_api and abi3_suffix: # Use abi3
|
| 179 |
+
filename = filename[: -len(so_ext)] + abi3_suffix
|
| 180 |
+
if isinstance(ext, Library):
|
| 181 |
+
fn, ext = os.path.splitext(filename)
|
| 182 |
+
return self.shlib_compiler.library_filename(fn, libtype)
|
| 183 |
+
elif use_stubs and ext._links_to_dynamic:
|
| 184 |
+
d, fn = os.path.split(filename)
|
| 185 |
+
return os.path.join(d, 'dl-' + fn)
|
| 186 |
+
return filename
|
| 187 |
+
|
| 188 |
+
def initialize_options(self):
|
| 189 |
+
_build_ext.initialize_options(self)
|
| 190 |
+
self.shlib_compiler = None
|
| 191 |
+
self.shlibs = []
|
| 192 |
+
self.ext_map = {}
|
| 193 |
+
self.editable_mode = False
|
| 194 |
+
|
| 195 |
+
def finalize_options(self) -> None:
|
| 196 |
+
_build_ext.finalize_options(self)
|
| 197 |
+
self.extensions = self.extensions or []
|
| 198 |
+
self.check_extensions_list(self.extensions)
|
| 199 |
+
self.shlibs = [ext for ext in self.extensions if isinstance(ext, Library)]
|
| 200 |
+
if self.shlibs:
|
| 201 |
+
self.setup_shlib_compiler()
|
| 202 |
+
for ext in self.extensions:
|
| 203 |
+
ext._full_name = self.get_ext_fullname(ext.name)
|
| 204 |
+
for ext in self.extensions:
|
| 205 |
+
fullname = ext._full_name
|
| 206 |
+
self.ext_map[fullname] = ext
|
| 207 |
+
|
| 208 |
+
# distutils 3.1 will also ask for module names
|
| 209 |
+
# XXX what to do with conflicts?
|
| 210 |
+
self.ext_map[fullname.split('.')[-1]] = ext
|
| 211 |
+
|
| 212 |
+
ltd = self.shlibs and self.links_to_dynamic(ext) or False
|
| 213 |
+
ns = ltd and use_stubs and not isinstance(ext, Library)
|
| 214 |
+
ext._links_to_dynamic = ltd
|
| 215 |
+
ext._needs_stub = ns
|
| 216 |
+
filename = ext._file_name = self.get_ext_filename(fullname)
|
| 217 |
+
libdir = os.path.dirname(os.path.join(self.build_lib, filename))
|
| 218 |
+
if ltd and libdir not in ext.library_dirs:
|
| 219 |
+
ext.library_dirs.append(libdir)
|
| 220 |
+
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
|
| 221 |
+
ext.runtime_library_dirs.append(os.curdir)
|
| 222 |
+
|
| 223 |
+
if self.editable_mode:
|
| 224 |
+
self.inplace = True
|
| 225 |
+
|
| 226 |
+
def setup_shlib_compiler(self):
|
| 227 |
+
compiler = self.shlib_compiler = new_compiler(
|
| 228 |
+
compiler=self.compiler, dry_run=self.dry_run, force=self.force
|
| 229 |
+
)
|
| 230 |
+
_customize_compiler_for_shlib(compiler)
|
| 231 |
+
|
| 232 |
+
if self.include_dirs is not None:
|
| 233 |
+
compiler.set_include_dirs(self.include_dirs)
|
| 234 |
+
if self.define is not None:
|
| 235 |
+
# 'define' option is a list of (name,value) tuples
|
| 236 |
+
for name, value in self.define:
|
| 237 |
+
compiler.define_macro(name, value)
|
| 238 |
+
if self.undef is not None:
|
| 239 |
+
for macro in self.undef:
|
| 240 |
+
compiler.undefine_macro(macro)
|
| 241 |
+
if self.libraries is not None:
|
| 242 |
+
compiler.set_libraries(self.libraries)
|
| 243 |
+
if self.library_dirs is not None:
|
| 244 |
+
compiler.set_library_dirs(self.library_dirs)
|
| 245 |
+
if self.rpath is not None:
|
| 246 |
+
compiler.set_runtime_library_dirs(self.rpath)
|
| 247 |
+
if self.link_objects is not None:
|
| 248 |
+
compiler.set_link_objects(self.link_objects)
|
| 249 |
+
|
| 250 |
+
# hack so distutils' build_extension() builds a library instead
|
| 251 |
+
compiler.link_shared_object = link_shared_object.__get__(compiler) # type: ignore[method-assign]
|
| 252 |
+
|
| 253 |
+
def get_export_symbols(self, ext):
|
| 254 |
+
if isinstance(ext, Library):
|
| 255 |
+
return ext.export_symbols
|
| 256 |
+
return _build_ext.get_export_symbols(self, ext)
|
| 257 |
+
|
| 258 |
+
def build_extension(self, ext) -> None:
|
| 259 |
+
ext._convert_pyx_sources_to_lang()
|
| 260 |
+
_compiler = self.compiler
|
| 261 |
+
try:
|
| 262 |
+
if isinstance(ext, Library):
|
| 263 |
+
self.compiler = self.shlib_compiler
|
| 264 |
+
_build_ext.build_extension(self, ext)
|
| 265 |
+
if ext._needs_stub:
|
| 266 |
+
build_lib = self.get_finalized_command('build_py').build_lib
|
| 267 |
+
self.write_stub(build_lib, ext)
|
| 268 |
+
finally:
|
| 269 |
+
self.compiler = _compiler
|
| 270 |
+
|
| 271 |
+
def links_to_dynamic(self, ext):
|
| 272 |
+
"""Return true if 'ext' links to a dynamic lib in the same package"""
|
| 273 |
+
# XXX this should check to ensure the lib is actually being built
|
| 274 |
+
# XXX as dynamic, and not just using a locally-found version or a
|
| 275 |
+
# XXX static-compiled version
|
| 276 |
+
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
|
| 277 |
+
pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
|
| 278 |
+
return any(pkg + libname in libnames for libname in ext.libraries)
|
| 279 |
+
|
| 280 |
+
def get_source_files(self) -> list[str]:
|
| 281 |
+
return [*_build_ext.get_source_files(self), *self._get_internal_depends()]
|
| 282 |
+
|
| 283 |
+
def _get_internal_depends(self) -> Iterator[str]:
|
| 284 |
+
"""Yield ``ext.depends`` that are contained by the project directory"""
|
| 285 |
+
project_root = Path(self.distribution.src_root or os.curdir).resolve()
|
| 286 |
+
depends = (dep for ext in self.extensions for dep in ext.depends)
|
| 287 |
+
|
| 288 |
+
def skip(orig_path: str, reason: str) -> None:
|
| 289 |
+
log.info(
|
| 290 |
+
"dependency %s won't be automatically "
|
| 291 |
+
"included in the manifest: the path %s",
|
| 292 |
+
orig_path,
|
| 293 |
+
reason,
|
| 294 |
+
)
|
| 295 |
+
|
| 296 |
+
for dep in depends:
|
| 297 |
+
path = Path(dep)
|
| 298 |
+
|
| 299 |
+
if path.is_absolute():
|
| 300 |
+
skip(dep, "must be relative")
|
| 301 |
+
continue
|
| 302 |
+
|
| 303 |
+
if ".." in path.parts:
|
| 304 |
+
skip(dep, "can't have `..` segments")
|
| 305 |
+
continue
|
| 306 |
+
|
| 307 |
+
try:
|
| 308 |
+
resolved = (project_root / path).resolve(strict=True)
|
| 309 |
+
except OSError:
|
| 310 |
+
skip(dep, "doesn't exist")
|
| 311 |
+
continue
|
| 312 |
+
|
| 313 |
+
try:
|
| 314 |
+
resolved.relative_to(project_root)
|
| 315 |
+
except ValueError:
|
| 316 |
+
skip(dep, "must be inside the project root")
|
| 317 |
+
continue
|
| 318 |
+
|
| 319 |
+
yield path.as_posix()
|
| 320 |
+
|
| 321 |
+
def get_outputs(self) -> list[str]:
|
| 322 |
+
if self.inplace:
|
| 323 |
+
return list(self.get_output_mapping().keys())
|
| 324 |
+
return sorted(_build_ext.get_outputs(self) + self.__get_stubs_outputs())
|
| 325 |
+
|
| 326 |
+
def get_output_mapping(self) -> dict[str, str]:
|
| 327 |
+
"""See :class:`setuptools.commands.build.SubCommand`"""
|
| 328 |
+
mapping = self._get_output_mapping()
|
| 329 |
+
return dict(sorted(mapping, key=lambda x: x[0]))
|
| 330 |
+
|
| 331 |
+
def __get_stubs_outputs(self):
|
| 332 |
+
# assemble the base name for each extension that needs a stub
|
| 333 |
+
ns_ext_bases = (
|
| 334 |
+
os.path.join(self.build_lib, *ext._full_name.split('.'))
|
| 335 |
+
for ext in self.extensions
|
| 336 |
+
if ext._needs_stub
|
| 337 |
+
)
|
| 338 |
+
# pair each base with the extension
|
| 339 |
+
pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
|
| 340 |
+
return list(base + fnext for base, fnext in pairs)
|
| 341 |
+
|
| 342 |
+
def __get_output_extensions(self):
|
| 343 |
+
yield '.py'
|
| 344 |
+
yield '.pyc'
|
| 345 |
+
if self.get_finalized_command('build_py').optimize:
|
| 346 |
+
yield '.pyo'
|
| 347 |
+
|
| 348 |
+
def write_stub(self, output_dir, ext, compile=False) -> None:
|
| 349 |
+
stub_file = os.path.join(output_dir, *ext._full_name.split('.')) + '.py'
|
| 350 |
+
self._write_stub_file(stub_file, ext, compile)
|
| 351 |
+
|
| 352 |
+
def _write_stub_file(self, stub_file: str, ext: Extension, compile=False):
|
| 353 |
+
log.info("writing stub loader for %s to %s", ext._full_name, stub_file)
|
| 354 |
+
if compile and os.path.exists(stub_file):
|
| 355 |
+
raise BaseError(stub_file + " already exists! Please delete.")
|
| 356 |
+
if not self.dry_run:
|
| 357 |
+
with open(stub_file, 'w', encoding="utf-8") as f:
|
| 358 |
+
content = '\n'.join([
|
| 359 |
+
"def __bootstrap__():",
|
| 360 |
+
" global __bootstrap__, __file__, __loader__",
|
| 361 |
+
" import sys, os, pkg_resources, importlib.util" + if_dl(", dl"),
|
| 362 |
+
" __file__ = pkg_resources.resource_filename"
|
| 363 |
+
f"(__name__,{os.path.basename(ext._file_name)!r})",
|
| 364 |
+
" del __bootstrap__",
|
| 365 |
+
" if '__loader__' in globals():",
|
| 366 |
+
" del __loader__",
|
| 367 |
+
if_dl(" old_flags = sys.getdlopenflags()"),
|
| 368 |
+
" old_dir = os.getcwd()",
|
| 369 |
+
" try:",
|
| 370 |
+
" os.chdir(os.path.dirname(__file__))",
|
| 371 |
+
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
|
| 372 |
+
" spec = importlib.util.spec_from_file_location(",
|
| 373 |
+
" __name__, __file__)",
|
| 374 |
+
" mod = importlib.util.module_from_spec(spec)",
|
| 375 |
+
" spec.loader.exec_module(mod)",
|
| 376 |
+
" finally:",
|
| 377 |
+
if_dl(" sys.setdlopenflags(old_flags)"),
|
| 378 |
+
" os.chdir(old_dir)",
|
| 379 |
+
"__bootstrap__()",
|
| 380 |
+
"", # terminal \n
|
| 381 |
+
])
|
| 382 |
+
f.write(content)
|
| 383 |
+
if compile:
|
| 384 |
+
self._compile_and_remove_stub(stub_file)
|
| 385 |
+
|
| 386 |
+
def _compile_and_remove_stub(self, stub_file: str):
|
| 387 |
+
from distutils.util import byte_compile
|
| 388 |
+
|
| 389 |
+
byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run)
|
| 390 |
+
optimize = self.get_finalized_command('install_lib').optimize
|
| 391 |
+
if optimize > 0:
|
| 392 |
+
byte_compile(
|
| 393 |
+
[stub_file],
|
| 394 |
+
optimize=optimize,
|
| 395 |
+
force=True,
|
| 396 |
+
dry_run=self.dry_run,
|
| 397 |
+
)
|
| 398 |
+
if os.path.exists(stub_file) and not self.dry_run:
|
| 399 |
+
os.unlink(stub_file)
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
if use_stubs or os.name == 'nt':
|
| 403 |
+
# Build shared libraries
|
| 404 |
+
#
|
| 405 |
+
def link_shared_object(
|
| 406 |
+
self,
|
| 407 |
+
objects,
|
| 408 |
+
output_libname,
|
| 409 |
+
output_dir=None,
|
| 410 |
+
libraries=None,
|
| 411 |
+
library_dirs=None,
|
| 412 |
+
runtime_library_dirs=None,
|
| 413 |
+
export_symbols=None,
|
| 414 |
+
debug: bool = False,
|
| 415 |
+
extra_preargs=None,
|
| 416 |
+
extra_postargs=None,
|
| 417 |
+
build_temp=None,
|
| 418 |
+
target_lang=None,
|
| 419 |
+
) -> None:
|
| 420 |
+
self.link(
|
| 421 |
+
self.SHARED_LIBRARY,
|
| 422 |
+
objects,
|
| 423 |
+
output_libname,
|
| 424 |
+
output_dir,
|
| 425 |
+
libraries,
|
| 426 |
+
library_dirs,
|
| 427 |
+
runtime_library_dirs,
|
| 428 |
+
export_symbols,
|
| 429 |
+
debug,
|
| 430 |
+
extra_preargs,
|
| 431 |
+
extra_postargs,
|
| 432 |
+
build_temp,
|
| 433 |
+
target_lang,
|
| 434 |
+
)
|
| 435 |
+
|
| 436 |
+
else:
|
| 437 |
+
# Build static libraries everywhere else
|
| 438 |
+
libtype = 'static'
|
| 439 |
+
|
| 440 |
+
def link_shared_object(
|
| 441 |
+
self,
|
| 442 |
+
objects,
|
| 443 |
+
output_libname,
|
| 444 |
+
output_dir=None,
|
| 445 |
+
libraries=None,
|
| 446 |
+
library_dirs=None,
|
| 447 |
+
runtime_library_dirs=None,
|
| 448 |
+
export_symbols=None,
|
| 449 |
+
debug: bool = False,
|
| 450 |
+
extra_preargs=None,
|
| 451 |
+
extra_postargs=None,
|
| 452 |
+
build_temp=None,
|
| 453 |
+
target_lang=None,
|
| 454 |
+
) -> None:
|
| 455 |
+
# XXX we need to either disallow these attrs on Library instances,
|
| 456 |
+
# or warn/abort here if set, or something...
|
| 457 |
+
# libraries=None, library_dirs=None, runtime_library_dirs=None,
|
| 458 |
+
# export_symbols=None, extra_preargs=None, extra_postargs=None,
|
| 459 |
+
# build_temp=None
|
| 460 |
+
|
| 461 |
+
assert output_dir is None # distutils build_ext doesn't pass this
|
| 462 |
+
output_dir, filename = os.path.split(output_libname)
|
| 463 |
+
basename, _ext = os.path.splitext(filename)
|
| 464 |
+
if self.library_filename("x").startswith('lib'):
|
| 465 |
+
# strip 'lib' prefix; this is kludgy if some platform uses
|
| 466 |
+
# a different prefix
|
| 467 |
+
basename = basename[3:]
|
| 468 |
+
|
| 469 |
+
self.create_static_lib(objects, basename, output_dir, debug, target_lang)
|
falcon/lib/python3.10/site-packages/setuptools/command/build_py.py
ADDED
|
@@ -0,0 +1,400 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import fnmatch
|
| 4 |
+
import itertools
|
| 5 |
+
import os
|
| 6 |
+
import stat
|
| 7 |
+
import textwrap
|
| 8 |
+
from collections.abc import Iterable, Iterator
|
| 9 |
+
from functools import partial
|
| 10 |
+
from glob import glob
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
|
| 13 |
+
from more_itertools import unique_everseen
|
| 14 |
+
|
| 15 |
+
from .._path import StrPath, StrPathT
|
| 16 |
+
from ..dist import Distribution
|
| 17 |
+
from ..warnings import SetuptoolsDeprecationWarning
|
| 18 |
+
|
| 19 |
+
import distutils.command.build_py as orig
|
| 20 |
+
import distutils.errors
|
| 21 |
+
from distutils.util import convert_path
|
| 22 |
+
|
| 23 |
+
_IMPLICIT_DATA_FILES = ('*.pyi', 'py.typed')
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def make_writable(target) -> None:
|
| 27 |
+
os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class build_py(orig.build_py):
|
| 31 |
+
"""Enhanced 'build_py' command that includes data files with packages
|
| 32 |
+
|
| 33 |
+
The data files are specified via a 'package_data' argument to 'setup()'.
|
| 34 |
+
See 'setuptools.dist.Distribution' for more details.
|
| 35 |
+
|
| 36 |
+
Also, this version of the 'build_py' command allows you to specify both
|
| 37 |
+
'py_modules' and 'packages' in the same setup operation.
|
| 38 |
+
"""
|
| 39 |
+
|
| 40 |
+
distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution
|
| 41 |
+
editable_mode: bool = False
|
| 42 |
+
existing_egg_info_dir: StrPath | None = None #: Private API, internal use only.
|
| 43 |
+
|
| 44 |
+
def finalize_options(self):
|
| 45 |
+
orig.build_py.finalize_options(self)
|
| 46 |
+
self.package_data = self.distribution.package_data
|
| 47 |
+
self.exclude_package_data = self.distribution.exclude_package_data or {}
|
| 48 |
+
if 'data_files' in self.__dict__:
|
| 49 |
+
del self.__dict__['data_files']
|
| 50 |
+
|
| 51 |
+
def copy_file( # type: ignore[override] # No overload, no bytes support
|
| 52 |
+
self,
|
| 53 |
+
infile: StrPath,
|
| 54 |
+
outfile: StrPathT,
|
| 55 |
+
preserve_mode: bool = True,
|
| 56 |
+
preserve_times: bool = True,
|
| 57 |
+
link: str | None = None,
|
| 58 |
+
level: object = 1,
|
| 59 |
+
) -> tuple[StrPathT | str, bool]:
|
| 60 |
+
# Overwrite base class to allow using links
|
| 61 |
+
if link:
|
| 62 |
+
infile = str(Path(infile).resolve())
|
| 63 |
+
outfile = str(Path(outfile).resolve()) # type: ignore[assignment] # Re-assigning a str when outfile is StrPath is ok
|
| 64 |
+
return super().copy_file( # pyright: ignore[reportReturnType] # pypa/distutils#309
|
| 65 |
+
infile, outfile, preserve_mode, preserve_times, link, level
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
def run(self) -> None:
|
| 69 |
+
"""Build modules, packages, and copy data files to build directory"""
|
| 70 |
+
if not (self.py_modules or self.packages) or self.editable_mode:
|
| 71 |
+
return
|
| 72 |
+
|
| 73 |
+
if self.py_modules:
|
| 74 |
+
self.build_modules()
|
| 75 |
+
|
| 76 |
+
if self.packages:
|
| 77 |
+
self.build_packages()
|
| 78 |
+
self.build_package_data()
|
| 79 |
+
|
| 80 |
+
# Only compile actual .py files, using our base class' idea of what our
|
| 81 |
+
# output files are.
|
| 82 |
+
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=False))
|
| 83 |
+
|
| 84 |
+
def __getattr__(self, attr: str):
|
| 85 |
+
"lazily compute data files"
|
| 86 |
+
if attr == 'data_files':
|
| 87 |
+
self.data_files = self._get_data_files()
|
| 88 |
+
return self.data_files
|
| 89 |
+
return orig.build_py.__getattr__(self, attr)
|
| 90 |
+
|
| 91 |
+
def _get_data_files(self):
|
| 92 |
+
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
| 93 |
+
self.analyze_manifest()
|
| 94 |
+
return list(map(self._get_pkg_data_files, self.packages or ()))
|
| 95 |
+
|
| 96 |
+
def get_data_files_without_manifest(self):
|
| 97 |
+
"""
|
| 98 |
+
Generate list of ``(package,src_dir,build_dir,filenames)`` tuples,
|
| 99 |
+
but without triggering any attempt to analyze or build the manifest.
|
| 100 |
+
"""
|
| 101 |
+
# Prevent eventual errors from unset `manifest_files`
|
| 102 |
+
# (that would otherwise be set by `analyze_manifest`)
|
| 103 |
+
self.__dict__.setdefault('manifest_files', {})
|
| 104 |
+
return list(map(self._get_pkg_data_files, self.packages or ()))
|
| 105 |
+
|
| 106 |
+
def _get_pkg_data_files(self, package):
|
| 107 |
+
# Locate package source directory
|
| 108 |
+
src_dir = self.get_package_dir(package)
|
| 109 |
+
|
| 110 |
+
# Compute package build directory
|
| 111 |
+
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
| 112 |
+
|
| 113 |
+
# Strip directory from globbed filenames
|
| 114 |
+
filenames = [
|
| 115 |
+
os.path.relpath(file, src_dir)
|
| 116 |
+
for file in self.find_data_files(package, src_dir)
|
| 117 |
+
]
|
| 118 |
+
return package, src_dir, build_dir, filenames
|
| 119 |
+
|
| 120 |
+
def find_data_files(self, package, src_dir):
|
| 121 |
+
"""Return filenames for package's data files in 'src_dir'"""
|
| 122 |
+
patterns = self._get_platform_patterns(
|
| 123 |
+
self.package_data,
|
| 124 |
+
package,
|
| 125 |
+
src_dir,
|
| 126 |
+
extra_patterns=_IMPLICIT_DATA_FILES,
|
| 127 |
+
)
|
| 128 |
+
globs_expanded = map(partial(glob, recursive=True), patterns)
|
| 129 |
+
# flatten the expanded globs into an iterable of matches
|
| 130 |
+
globs_matches = itertools.chain.from_iterable(globs_expanded)
|
| 131 |
+
glob_files = filter(os.path.isfile, globs_matches)
|
| 132 |
+
files = itertools.chain(
|
| 133 |
+
self.manifest_files.get(package, []),
|
| 134 |
+
glob_files,
|
| 135 |
+
)
|
| 136 |
+
return self.exclude_data_files(package, src_dir, files)
|
| 137 |
+
|
| 138 |
+
def get_outputs(self, include_bytecode: bool = True) -> list[str]: # type: ignore[override] # Using a real boolean instead of 0|1
|
| 139 |
+
"""See :class:`setuptools.commands.build.SubCommand`"""
|
| 140 |
+
if self.editable_mode:
|
| 141 |
+
return list(self.get_output_mapping().keys())
|
| 142 |
+
return super().get_outputs(include_bytecode)
|
| 143 |
+
|
| 144 |
+
def get_output_mapping(self) -> dict[str, str]:
|
| 145 |
+
"""See :class:`setuptools.commands.build.SubCommand`"""
|
| 146 |
+
mapping = itertools.chain(
|
| 147 |
+
self._get_package_data_output_mapping(),
|
| 148 |
+
self._get_module_mapping(),
|
| 149 |
+
)
|
| 150 |
+
return dict(sorted(mapping, key=lambda x: x[0]))
|
| 151 |
+
|
| 152 |
+
def _get_module_mapping(self) -> Iterator[tuple[str, str]]:
|
| 153 |
+
"""Iterate over all modules producing (dest, src) pairs."""
|
| 154 |
+
for package, module, module_file in self.find_all_modules():
|
| 155 |
+
package = package.split('.')
|
| 156 |
+
filename = self.get_module_outfile(self.build_lib, package, module)
|
| 157 |
+
yield (filename, module_file)
|
| 158 |
+
|
| 159 |
+
def _get_package_data_output_mapping(self) -> Iterator[tuple[str, str]]:
|
| 160 |
+
"""Iterate over package data producing (dest, src) pairs."""
|
| 161 |
+
for package, src_dir, build_dir, filenames in self.data_files:
|
| 162 |
+
for filename in filenames:
|
| 163 |
+
target = os.path.join(build_dir, filename)
|
| 164 |
+
srcfile = os.path.join(src_dir, filename)
|
| 165 |
+
yield (target, srcfile)
|
| 166 |
+
|
| 167 |
+
def build_package_data(self) -> None:
|
| 168 |
+
"""Copy data files into build directory"""
|
| 169 |
+
for target, srcfile in self._get_package_data_output_mapping():
|
| 170 |
+
self.mkpath(os.path.dirname(target))
|
| 171 |
+
_outf, _copied = self.copy_file(srcfile, target)
|
| 172 |
+
make_writable(target)
|
| 173 |
+
|
| 174 |
+
def analyze_manifest(self) -> None:
|
| 175 |
+
self.manifest_files: dict[str, list[str]] = {}
|
| 176 |
+
if not self.distribution.include_package_data:
|
| 177 |
+
return
|
| 178 |
+
src_dirs: dict[str, str] = {}
|
| 179 |
+
for package in self.packages or ():
|
| 180 |
+
# Locate package source directory
|
| 181 |
+
src_dirs[assert_relative(self.get_package_dir(package))] = package
|
| 182 |
+
|
| 183 |
+
if (
|
| 184 |
+
self.existing_egg_info_dir
|
| 185 |
+
and Path(self.existing_egg_info_dir, "SOURCES.txt").exists()
|
| 186 |
+
):
|
| 187 |
+
egg_info_dir = self.existing_egg_info_dir
|
| 188 |
+
manifest = Path(egg_info_dir, "SOURCES.txt")
|
| 189 |
+
files = manifest.read_text(encoding="utf-8").splitlines()
|
| 190 |
+
else:
|
| 191 |
+
self.run_command('egg_info')
|
| 192 |
+
ei_cmd = self.get_finalized_command('egg_info')
|
| 193 |
+
egg_info_dir = ei_cmd.egg_info
|
| 194 |
+
files = ei_cmd.filelist.files
|
| 195 |
+
|
| 196 |
+
check = _IncludePackageDataAbuse()
|
| 197 |
+
for path in self._filter_build_files(files, egg_info_dir):
|
| 198 |
+
d, f = os.path.split(assert_relative(path))
|
| 199 |
+
prev = None
|
| 200 |
+
oldf = f
|
| 201 |
+
while d and d != prev and d not in src_dirs:
|
| 202 |
+
prev = d
|
| 203 |
+
d, df = os.path.split(d)
|
| 204 |
+
f = os.path.join(df, f)
|
| 205 |
+
if d in src_dirs:
|
| 206 |
+
if f == oldf:
|
| 207 |
+
if check.is_module(f):
|
| 208 |
+
continue # it's a module, not data
|
| 209 |
+
else:
|
| 210 |
+
importable = check.importable_subpackage(src_dirs[d], f)
|
| 211 |
+
if importable:
|
| 212 |
+
check.warn(importable)
|
| 213 |
+
self.manifest_files.setdefault(src_dirs[d], []).append(path)
|
| 214 |
+
|
| 215 |
+
def _filter_build_files(
|
| 216 |
+
self, files: Iterable[str], egg_info: StrPath
|
| 217 |
+
) -> Iterator[str]:
|
| 218 |
+
"""
|
| 219 |
+
``build_meta`` may try to create egg_info outside of the project directory,
|
| 220 |
+
and this can be problematic for certain plugins (reported in issue #3500).
|
| 221 |
+
|
| 222 |
+
Extensions might also include between their sources files created on the
|
| 223 |
+
``build_lib`` and ``build_temp`` directories.
|
| 224 |
+
|
| 225 |
+
This function should filter this case of invalid files out.
|
| 226 |
+
"""
|
| 227 |
+
build = self.get_finalized_command("build")
|
| 228 |
+
build_dirs = (egg_info, self.build_lib, build.build_temp, build.build_base)
|
| 229 |
+
norm_dirs = [os.path.normpath(p) for p in build_dirs if p]
|
| 230 |
+
|
| 231 |
+
for file in files:
|
| 232 |
+
norm_path = os.path.normpath(file)
|
| 233 |
+
if not os.path.isabs(file) or all(d not in norm_path for d in norm_dirs):
|
| 234 |
+
yield file
|
| 235 |
+
|
| 236 |
+
def get_data_files(self) -> None:
|
| 237 |
+
pass # Lazily compute data files in _get_data_files() function.
|
| 238 |
+
|
| 239 |
+
def check_package(self, package, package_dir):
|
| 240 |
+
"""Check namespace packages' __init__ for declare_namespace"""
|
| 241 |
+
try:
|
| 242 |
+
return self.packages_checked[package]
|
| 243 |
+
except KeyError:
|
| 244 |
+
pass
|
| 245 |
+
|
| 246 |
+
init_py = orig.build_py.check_package(self, package, package_dir)
|
| 247 |
+
self.packages_checked[package] = init_py
|
| 248 |
+
|
| 249 |
+
if not init_py or not self.distribution.namespace_packages:
|
| 250 |
+
return init_py
|
| 251 |
+
|
| 252 |
+
for pkg in self.distribution.namespace_packages:
|
| 253 |
+
if pkg == package or pkg.startswith(package + '.'):
|
| 254 |
+
break
|
| 255 |
+
else:
|
| 256 |
+
return init_py
|
| 257 |
+
|
| 258 |
+
with open(init_py, 'rb') as f:
|
| 259 |
+
contents = f.read()
|
| 260 |
+
if b'declare_namespace' not in contents:
|
| 261 |
+
raise distutils.errors.DistutilsError(
|
| 262 |
+
f"Namespace package problem: {package} is a namespace package, but "
|
| 263 |
+
"its\n__init__.py does not call declare_namespace()! Please "
|
| 264 |
+
'fix it.\n(See the setuptools manual under '
|
| 265 |
+
'"Namespace Packages" for details.)\n"'
|
| 266 |
+
)
|
| 267 |
+
return init_py
|
| 268 |
+
|
| 269 |
+
def initialize_options(self):
|
| 270 |
+
self.packages_checked = {}
|
| 271 |
+
orig.build_py.initialize_options(self)
|
| 272 |
+
self.editable_mode = False
|
| 273 |
+
self.existing_egg_info_dir = None
|
| 274 |
+
|
| 275 |
+
def get_package_dir(self, package):
|
| 276 |
+
res = orig.build_py.get_package_dir(self, package)
|
| 277 |
+
if self.distribution.src_root is not None:
|
| 278 |
+
return os.path.join(self.distribution.src_root, res)
|
| 279 |
+
return res
|
| 280 |
+
|
| 281 |
+
def exclude_data_files(self, package, src_dir, files):
|
| 282 |
+
"""Filter filenames for package's data files in 'src_dir'"""
|
| 283 |
+
files = list(files)
|
| 284 |
+
patterns = self._get_platform_patterns(
|
| 285 |
+
self.exclude_package_data,
|
| 286 |
+
package,
|
| 287 |
+
src_dir,
|
| 288 |
+
)
|
| 289 |
+
match_groups = (fnmatch.filter(files, pattern) for pattern in patterns)
|
| 290 |
+
# flatten the groups of matches into an iterable of matches
|
| 291 |
+
matches = itertools.chain.from_iterable(match_groups)
|
| 292 |
+
bad = set(matches)
|
| 293 |
+
keepers = (fn for fn in files if fn not in bad)
|
| 294 |
+
# ditch dupes
|
| 295 |
+
return list(unique_everseen(keepers))
|
| 296 |
+
|
| 297 |
+
@staticmethod
|
| 298 |
+
def _get_platform_patterns(spec, package, src_dir, extra_patterns=()):
|
| 299 |
+
"""
|
| 300 |
+
yield platform-specific path patterns (suitable for glob
|
| 301 |
+
or fn_match) from a glob-based spec (such as
|
| 302 |
+
self.package_data or self.exclude_package_data)
|
| 303 |
+
matching package in src_dir.
|
| 304 |
+
"""
|
| 305 |
+
raw_patterns = itertools.chain(
|
| 306 |
+
extra_patterns,
|
| 307 |
+
spec.get('', []),
|
| 308 |
+
spec.get(package, []),
|
| 309 |
+
)
|
| 310 |
+
return (
|
| 311 |
+
# Each pattern has to be converted to a platform-specific path
|
| 312 |
+
os.path.join(src_dir, convert_path(pattern))
|
| 313 |
+
for pattern in raw_patterns
|
| 314 |
+
)
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
def assert_relative(path):
|
| 318 |
+
if not os.path.isabs(path):
|
| 319 |
+
return path
|
| 320 |
+
from distutils.errors import DistutilsSetupError
|
| 321 |
+
|
| 322 |
+
msg = (
|
| 323 |
+
textwrap.dedent(
|
| 324 |
+
"""
|
| 325 |
+
Error: setup script specifies an absolute path:
|
| 326 |
+
|
| 327 |
+
%s
|
| 328 |
+
|
| 329 |
+
setup() arguments must *always* be /-separated paths relative to the
|
| 330 |
+
setup.py directory, *never* absolute paths.
|
| 331 |
+
"""
|
| 332 |
+
).lstrip()
|
| 333 |
+
% path
|
| 334 |
+
)
|
| 335 |
+
raise DistutilsSetupError(msg)
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
class _IncludePackageDataAbuse:
|
| 339 |
+
"""Inform users that package or module is included as 'data file'"""
|
| 340 |
+
|
| 341 |
+
class _Warning(SetuptoolsDeprecationWarning):
|
| 342 |
+
_SUMMARY = """
|
| 343 |
+
Package {importable!r} is absent from the `packages` configuration.
|
| 344 |
+
"""
|
| 345 |
+
|
| 346 |
+
_DETAILS = """
|
| 347 |
+
############################
|
| 348 |
+
# Package would be ignored #
|
| 349 |
+
############################
|
| 350 |
+
Python recognizes {importable!r} as an importable package[^1],
|
| 351 |
+
but it is absent from setuptools' `packages` configuration.
|
| 352 |
+
|
| 353 |
+
This leads to an ambiguous overall configuration. If you want to distribute this
|
| 354 |
+
package, please make sure that {importable!r} is explicitly added
|
| 355 |
+
to the `packages` configuration field.
|
| 356 |
+
|
| 357 |
+
Alternatively, you can also rely on setuptools' discovery methods
|
| 358 |
+
(for example by using `find_namespace_packages(...)`/`find_namespace:`
|
| 359 |
+
instead of `find_packages(...)`/`find:`).
|
| 360 |
+
|
| 361 |
+
You can read more about "package discovery" on setuptools documentation page:
|
| 362 |
+
|
| 363 |
+
- https://setuptools.pypa.io/en/latest/userguide/package_discovery.html
|
| 364 |
+
|
| 365 |
+
If you don't want {importable!r} to be distributed and are
|
| 366 |
+
already explicitly excluding {importable!r} via
|
| 367 |
+
`find_namespace_packages(...)/find_namespace` or `find_packages(...)/find`,
|
| 368 |
+
you can try to use `exclude_package_data`, or `include-package-data=False` in
|
| 369 |
+
combination with a more fine grained `package-data` configuration.
|
| 370 |
+
|
| 371 |
+
You can read more about "package data files" on setuptools documentation page:
|
| 372 |
+
|
| 373 |
+
- https://setuptools.pypa.io/en/latest/userguide/datafiles.html
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
[^1]: For Python, any directory (with suitable naming) can be imported,
|
| 377 |
+
even if it does not contain any `.py` files.
|
| 378 |
+
On the other hand, currently there is no concept of package data
|
| 379 |
+
directory, all directories are treated like packages.
|
| 380 |
+
"""
|
| 381 |
+
# _DUE_DATE: still not defined as this is particularly controversial.
|
| 382 |
+
# Warning initially introduced in May 2022. See issue #3340 for discussion.
|
| 383 |
+
|
| 384 |
+
def __init__(self):
|
| 385 |
+
self._already_warned = set()
|
| 386 |
+
|
| 387 |
+
def is_module(self, file):
|
| 388 |
+
return file.endswith(".py") and file[: -len(".py")].isidentifier()
|
| 389 |
+
|
| 390 |
+
def importable_subpackage(self, parent, file):
|
| 391 |
+
pkg = Path(file).parent
|
| 392 |
+
parts = list(itertools.takewhile(str.isidentifier, pkg.parts))
|
| 393 |
+
if parts:
|
| 394 |
+
return ".".join([parent, *parts])
|
| 395 |
+
return None
|
| 396 |
+
|
| 397 |
+
def warn(self, importable):
|
| 398 |
+
if importable not in self._already_warned:
|
| 399 |
+
self._Warning.emit(importable=importable)
|
| 400 |
+
self._already_warned.add(importable)
|
falcon/lib/python3.10/site-packages/setuptools/command/develop.py
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import glob
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
import setuptools
|
| 5 |
+
from setuptools import _normalization, _path, namespaces
|
| 6 |
+
from setuptools.command.easy_install import easy_install
|
| 7 |
+
|
| 8 |
+
from ..unicode_utils import _read_utf8_with_fallback
|
| 9 |
+
|
| 10 |
+
from distutils import log
|
| 11 |
+
from distutils.errors import DistutilsOptionError
|
| 12 |
+
from distutils.util import convert_path
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class develop(namespaces.DevelopInstaller, easy_install):
|
| 16 |
+
"""Set up package for development"""
|
| 17 |
+
|
| 18 |
+
description = "install package in 'development mode'"
|
| 19 |
+
|
| 20 |
+
user_options = easy_install.user_options + [
|
| 21 |
+
("uninstall", "u", "Uninstall this source package"),
|
| 22 |
+
("egg-path=", None, "Set the path to be used in the .egg-link file"),
|
| 23 |
+
]
|
| 24 |
+
|
| 25 |
+
boolean_options = easy_install.boolean_options + ['uninstall']
|
| 26 |
+
|
| 27 |
+
command_consumes_arguments = False # override base
|
| 28 |
+
|
| 29 |
+
def run(self):
|
| 30 |
+
if self.uninstall:
|
| 31 |
+
self.multi_version = True
|
| 32 |
+
self.uninstall_link()
|
| 33 |
+
self.uninstall_namespaces()
|
| 34 |
+
else:
|
| 35 |
+
self.install_for_development()
|
| 36 |
+
self.warn_deprecated_options()
|
| 37 |
+
|
| 38 |
+
def initialize_options(self):
|
| 39 |
+
self.uninstall = None
|
| 40 |
+
self.egg_path = None
|
| 41 |
+
easy_install.initialize_options(self)
|
| 42 |
+
self.setup_path = None
|
| 43 |
+
self.always_copy_from = '.' # always copy eggs installed in curdir
|
| 44 |
+
|
| 45 |
+
def finalize_options(self) -> None:
|
| 46 |
+
import pkg_resources
|
| 47 |
+
|
| 48 |
+
ei = self.get_finalized_command("egg_info")
|
| 49 |
+
self.args = [ei.egg_name]
|
| 50 |
+
|
| 51 |
+
easy_install.finalize_options(self)
|
| 52 |
+
self.expand_basedirs()
|
| 53 |
+
self.expand_dirs()
|
| 54 |
+
# pick up setup-dir .egg files only: no .egg-info
|
| 55 |
+
self.package_index.scan(glob.glob('*.egg'))
|
| 56 |
+
|
| 57 |
+
egg_link_fn = (
|
| 58 |
+
_normalization.filename_component_broken(ei.egg_name) + '.egg-link'
|
| 59 |
+
)
|
| 60 |
+
self.egg_link = os.path.join(self.install_dir, egg_link_fn)
|
| 61 |
+
self.egg_base = ei.egg_base
|
| 62 |
+
if self.egg_path is None:
|
| 63 |
+
self.egg_path = os.path.abspath(ei.egg_base)
|
| 64 |
+
|
| 65 |
+
target = _path.normpath(self.egg_base)
|
| 66 |
+
egg_path = _path.normpath(os.path.join(self.install_dir, self.egg_path))
|
| 67 |
+
if egg_path != target:
|
| 68 |
+
raise DistutilsOptionError(
|
| 69 |
+
"--egg-path must be a relative path from the install"
|
| 70 |
+
" directory to " + target
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
# Make a distribution for the package's source
|
| 74 |
+
self.dist = pkg_resources.Distribution(
|
| 75 |
+
target,
|
| 76 |
+
pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
|
| 77 |
+
project_name=ei.egg_name,
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
self.setup_path = self._resolve_setup_path(
|
| 81 |
+
self.egg_base,
|
| 82 |
+
self.install_dir,
|
| 83 |
+
self.egg_path,
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
@staticmethod
|
| 87 |
+
def _resolve_setup_path(egg_base, install_dir, egg_path):
|
| 88 |
+
"""
|
| 89 |
+
Generate a path from egg_base back to '.' where the
|
| 90 |
+
setup script resides and ensure that path points to the
|
| 91 |
+
setup path from $install_dir/$egg_path.
|
| 92 |
+
"""
|
| 93 |
+
path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
|
| 94 |
+
if path_to_setup != os.curdir:
|
| 95 |
+
path_to_setup = '../' * (path_to_setup.count('/') + 1)
|
| 96 |
+
resolved = _path.normpath(os.path.join(install_dir, egg_path, path_to_setup))
|
| 97 |
+
curdir = _path.normpath(os.curdir)
|
| 98 |
+
if resolved != curdir:
|
| 99 |
+
raise DistutilsOptionError(
|
| 100 |
+
"Can't get a consistent path to setup script from"
|
| 101 |
+
" installation directory",
|
| 102 |
+
resolved,
|
| 103 |
+
curdir,
|
| 104 |
+
)
|
| 105 |
+
return path_to_setup
|
| 106 |
+
|
| 107 |
+
def install_for_development(self) -> None:
|
| 108 |
+
self.run_command('egg_info')
|
| 109 |
+
|
| 110 |
+
# Build extensions in-place
|
| 111 |
+
self.reinitialize_command('build_ext', inplace=True)
|
| 112 |
+
self.run_command('build_ext')
|
| 113 |
+
|
| 114 |
+
if setuptools.bootstrap_install_from:
|
| 115 |
+
self.easy_install(setuptools.bootstrap_install_from)
|
| 116 |
+
setuptools.bootstrap_install_from = None
|
| 117 |
+
|
| 118 |
+
self.install_namespaces()
|
| 119 |
+
|
| 120 |
+
# create an .egg-link in the installation dir, pointing to our egg
|
| 121 |
+
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
|
| 122 |
+
if not self.dry_run:
|
| 123 |
+
with open(self.egg_link, "w", encoding="utf-8") as f:
|
| 124 |
+
f.write(self.egg_path + "\n" + self.setup_path)
|
| 125 |
+
# postprocess the installed distro, fixing up .pth, installing scripts,
|
| 126 |
+
# and handling requirements
|
| 127 |
+
self.process_distribution(None, self.dist, not self.no_deps)
|
| 128 |
+
|
| 129 |
+
def uninstall_link(self) -> None:
|
| 130 |
+
if os.path.exists(self.egg_link):
|
| 131 |
+
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
|
| 132 |
+
|
| 133 |
+
contents = [
|
| 134 |
+
line.rstrip()
|
| 135 |
+
for line in _read_utf8_with_fallback(self.egg_link).splitlines()
|
| 136 |
+
]
|
| 137 |
+
|
| 138 |
+
if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
|
| 139 |
+
log.warn("Link points to %s: uninstall aborted", contents)
|
| 140 |
+
return
|
| 141 |
+
if not self.dry_run:
|
| 142 |
+
os.unlink(self.egg_link)
|
| 143 |
+
if not self.dry_run:
|
| 144 |
+
self.update_pth(self.dist) # remove any .pth link to us
|
| 145 |
+
if self.distribution.scripts:
|
| 146 |
+
# XXX should also check for entry point scripts!
|
| 147 |
+
log.warn("Note: you must uninstall or replace scripts manually!")
|
| 148 |
+
|
| 149 |
+
def install_egg_scripts(self, dist):
|
| 150 |
+
if dist is not self.dist:
|
| 151 |
+
# Installing a dependency, so fall back to normal behavior
|
| 152 |
+
return easy_install.install_egg_scripts(self, dist)
|
| 153 |
+
|
| 154 |
+
# create wrapper scripts in the script dir, pointing to dist.scripts
|
| 155 |
+
|
| 156 |
+
# new-style...
|
| 157 |
+
self.install_wrapper_scripts(dist)
|
| 158 |
+
|
| 159 |
+
# ...and old-style
|
| 160 |
+
for script_name in self.distribution.scripts or []:
|
| 161 |
+
script_path = os.path.abspath(convert_path(script_name))
|
| 162 |
+
script_name = os.path.basename(script_path)
|
| 163 |
+
script_text = _read_utf8_with_fallback(script_path)
|
| 164 |
+
self.install_script(dist, script_name, script_text, script_path)
|
| 165 |
+
|
| 166 |
+
return None
|
| 167 |
+
|
| 168 |
+
def install_wrapper_scripts(self, dist):
|
| 169 |
+
dist = VersionlessRequirement(dist)
|
| 170 |
+
return easy_install.install_wrapper_scripts(self, dist)
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
class VersionlessRequirement:
|
| 174 |
+
"""
|
| 175 |
+
Adapt a pkg_resources.Distribution to simply return the project
|
| 176 |
+
name as the 'requirement' so that scripts will work across
|
| 177 |
+
multiple versions.
|
| 178 |
+
|
| 179 |
+
>>> from pkg_resources import Distribution
|
| 180 |
+
>>> dist = Distribution(project_name='foo', version='1.0')
|
| 181 |
+
>>> str(dist.as_requirement())
|
| 182 |
+
'foo==1.0'
|
| 183 |
+
>>> adapted_dist = VersionlessRequirement(dist)
|
| 184 |
+
>>> str(adapted_dist.as_requirement())
|
| 185 |
+
'foo'
|
| 186 |
+
"""
|
| 187 |
+
|
| 188 |
+
def __init__(self, dist) -> None:
|
| 189 |
+
self.__dist = dist
|
| 190 |
+
|
| 191 |
+
def __getattr__(self, name: str):
|
| 192 |
+
return getattr(self.__dist, name)
|
| 193 |
+
|
| 194 |
+
def as_requirement(self):
|
| 195 |
+
return self.project_name
|
falcon/lib/python3.10/site-packages/setuptools/command/dist_info.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Create a dist_info directory
|
| 3 |
+
As defined in the wheel specification
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import shutil
|
| 8 |
+
from contextlib import contextmanager
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from typing import cast
|
| 11 |
+
|
| 12 |
+
from .. import _normalization
|
| 13 |
+
from .._shutil import rmdir as _rm
|
| 14 |
+
from .egg_info import egg_info as egg_info_cls
|
| 15 |
+
|
| 16 |
+
from distutils import log
|
| 17 |
+
from distutils.core import Command
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class dist_info(Command):
|
| 21 |
+
"""
|
| 22 |
+
This command is private and reserved for internal use of setuptools,
|
| 23 |
+
users should rely on ``setuptools.build_meta`` APIs.
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
description = "DO NOT CALL DIRECTLY, INTERNAL ONLY: create .dist-info directory"
|
| 27 |
+
|
| 28 |
+
user_options = [
|
| 29 |
+
(
|
| 30 |
+
'output-dir=',
|
| 31 |
+
'o',
|
| 32 |
+
"directory inside of which the .dist-info will be"
|
| 33 |
+
"created [default: top of the source tree]",
|
| 34 |
+
),
|
| 35 |
+
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
| 36 |
+
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
| 37 |
+
('no-date', 'D', "Don't include date stamp [default]"),
|
| 38 |
+
('keep-egg-info', None, "*TRANSITIONAL* will be removed in the future"),
|
| 39 |
+
]
|
| 40 |
+
|
| 41 |
+
boolean_options = ['tag-date', 'keep-egg-info']
|
| 42 |
+
negative_opt = {'no-date': 'tag-date'}
|
| 43 |
+
|
| 44 |
+
def initialize_options(self):
|
| 45 |
+
self.output_dir = None
|
| 46 |
+
self.name = None
|
| 47 |
+
self.dist_info_dir = None
|
| 48 |
+
self.tag_date = None
|
| 49 |
+
self.tag_build = None
|
| 50 |
+
self.keep_egg_info = False
|
| 51 |
+
|
| 52 |
+
def finalize_options(self) -> None:
|
| 53 |
+
dist = self.distribution
|
| 54 |
+
project_dir = dist.src_root or os.curdir
|
| 55 |
+
self.output_dir = Path(self.output_dir or project_dir)
|
| 56 |
+
|
| 57 |
+
egg_info = cast(egg_info_cls, self.reinitialize_command("egg_info"))
|
| 58 |
+
egg_info.egg_base = str(self.output_dir)
|
| 59 |
+
|
| 60 |
+
if self.tag_date:
|
| 61 |
+
egg_info.tag_date = self.tag_date
|
| 62 |
+
else:
|
| 63 |
+
self.tag_date = egg_info.tag_date
|
| 64 |
+
|
| 65 |
+
if self.tag_build:
|
| 66 |
+
egg_info.tag_build = self.tag_build
|
| 67 |
+
else:
|
| 68 |
+
self.tag_build = egg_info.tag_build
|
| 69 |
+
|
| 70 |
+
egg_info.finalize_options()
|
| 71 |
+
self.egg_info = egg_info
|
| 72 |
+
|
| 73 |
+
name = _normalization.safer_name(dist.get_name())
|
| 74 |
+
version = _normalization.safer_best_effort_version(dist.get_version())
|
| 75 |
+
self.name = f"{name}-{version}"
|
| 76 |
+
self.dist_info_dir = os.path.join(self.output_dir, f"{self.name}.dist-info")
|
| 77 |
+
|
| 78 |
+
@contextmanager
|
| 79 |
+
def _maybe_bkp_dir(self, dir_path: str, requires_bkp: bool):
|
| 80 |
+
if requires_bkp:
|
| 81 |
+
bkp_name = f"{dir_path}.__bkp__"
|
| 82 |
+
_rm(bkp_name, ignore_errors=True)
|
| 83 |
+
shutil.copytree(dir_path, bkp_name, dirs_exist_ok=True, symlinks=True)
|
| 84 |
+
try:
|
| 85 |
+
yield
|
| 86 |
+
finally:
|
| 87 |
+
_rm(dir_path, ignore_errors=True)
|
| 88 |
+
shutil.move(bkp_name, dir_path)
|
| 89 |
+
else:
|
| 90 |
+
yield
|
| 91 |
+
|
| 92 |
+
def run(self) -> None:
|
| 93 |
+
self.output_dir.mkdir(parents=True, exist_ok=True)
|
| 94 |
+
self.egg_info.run()
|
| 95 |
+
egg_info_dir = self.egg_info.egg_info
|
| 96 |
+
assert os.path.isdir(egg_info_dir), ".egg-info dir should have been created"
|
| 97 |
+
|
| 98 |
+
log.info(f"creating '{os.path.abspath(self.dist_info_dir)}'")
|
| 99 |
+
bdist_wheel = self.get_finalized_command('bdist_wheel')
|
| 100 |
+
|
| 101 |
+
# TODO: if bdist_wheel if merged into setuptools, just add "keep_egg_info" there
|
| 102 |
+
with self._maybe_bkp_dir(egg_info_dir, self.keep_egg_info):
|
| 103 |
+
bdist_wheel.egg2dist(egg_info_dir, self.dist_info_dir)
|
falcon/lib/python3.10/site-packages/setuptools/command/easy_install.py
ADDED
|
@@ -0,0 +1,2365 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Easy Install
|
| 3 |
+
------------
|
| 4 |
+
|
| 5 |
+
A tool for doing automatic download/extract/build of distutils-based Python
|
| 6 |
+
packages. For detailed documentation, see the accompanying EasyInstall.txt
|
| 7 |
+
file, or visit the `EasyInstall home page`__.
|
| 8 |
+
|
| 9 |
+
__ https://setuptools.pypa.io/en/latest/deprecated/easy_install.html
|
| 10 |
+
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
from __future__ import annotations
|
| 14 |
+
|
| 15 |
+
import configparser
|
| 16 |
+
import contextlib
|
| 17 |
+
import io
|
| 18 |
+
import os
|
| 19 |
+
import random
|
| 20 |
+
import re
|
| 21 |
+
import shlex
|
| 22 |
+
import shutil
|
| 23 |
+
import site
|
| 24 |
+
import stat
|
| 25 |
+
import struct
|
| 26 |
+
import subprocess
|
| 27 |
+
import sys
|
| 28 |
+
import sysconfig
|
| 29 |
+
import tempfile
|
| 30 |
+
import textwrap
|
| 31 |
+
import warnings
|
| 32 |
+
import zipfile
|
| 33 |
+
import zipimport
|
| 34 |
+
from collections.abc import Iterable
|
| 35 |
+
from glob import glob
|
| 36 |
+
from sysconfig import get_path
|
| 37 |
+
from typing import TYPE_CHECKING, NoReturn, TypedDict
|
| 38 |
+
|
| 39 |
+
from jaraco.text import yield_lines
|
| 40 |
+
|
| 41 |
+
import pkg_resources
|
| 42 |
+
from pkg_resources import (
|
| 43 |
+
DEVELOP_DIST,
|
| 44 |
+
Distribution,
|
| 45 |
+
DistributionNotFound,
|
| 46 |
+
EggMetadata,
|
| 47 |
+
Environment,
|
| 48 |
+
PathMetadata,
|
| 49 |
+
Requirement,
|
| 50 |
+
VersionConflict,
|
| 51 |
+
WorkingSet,
|
| 52 |
+
find_distributions,
|
| 53 |
+
get_distribution,
|
| 54 |
+
normalize_path,
|
| 55 |
+
resource_string,
|
| 56 |
+
)
|
| 57 |
+
from setuptools import Command
|
| 58 |
+
from setuptools.archive_util import unpack_archive
|
| 59 |
+
from setuptools.command import bdist_egg, egg_info, setopt
|
| 60 |
+
from setuptools.package_index import URL_SCHEME, PackageIndex, parse_requirement_arg
|
| 61 |
+
from setuptools.sandbox import run_setup
|
| 62 |
+
from setuptools.warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
|
| 63 |
+
from setuptools.wheel import Wheel
|
| 64 |
+
|
| 65 |
+
from .._path import ensure_directory
|
| 66 |
+
from .._shutil import attempt_chmod_verbose as chmod, rmtree as _rmtree
|
| 67 |
+
from ..compat import py39, py312
|
| 68 |
+
|
| 69 |
+
from distutils import dir_util, log
|
| 70 |
+
from distutils.command import install
|
| 71 |
+
from distutils.command.build_scripts import first_line_re
|
| 72 |
+
from distutils.errors import (
|
| 73 |
+
DistutilsArgError,
|
| 74 |
+
DistutilsError,
|
| 75 |
+
DistutilsOptionError,
|
| 76 |
+
DistutilsPlatformError,
|
| 77 |
+
)
|
| 78 |
+
from distutils.util import convert_path, get_platform, subst_vars
|
| 79 |
+
|
| 80 |
+
if TYPE_CHECKING:
|
| 81 |
+
from typing_extensions import Self
|
| 82 |
+
|
| 83 |
+
# Turn on PEP440Warnings
|
| 84 |
+
warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
|
| 85 |
+
|
| 86 |
+
__all__ = [
|
| 87 |
+
'easy_install',
|
| 88 |
+
'PthDistributions',
|
| 89 |
+
'extract_wininst_cfg',
|
| 90 |
+
'get_exe_prefixes',
|
| 91 |
+
]
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def is_64bit():
|
| 95 |
+
return struct.calcsize("P") == 8
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def _to_bytes(s):
|
| 99 |
+
return s.encode('utf8')
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def isascii(s):
|
| 103 |
+
try:
|
| 104 |
+
s.encode('ascii')
|
| 105 |
+
except UnicodeError:
|
| 106 |
+
return False
|
| 107 |
+
return True
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def _one_liner(text):
|
| 111 |
+
return textwrap.dedent(text).strip().replace('\n', '; ')
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
class easy_install(Command):
|
| 115 |
+
"""Manage a download/build/install process"""
|
| 116 |
+
|
| 117 |
+
description = "Find/get/install Python packages"
|
| 118 |
+
command_consumes_arguments = True
|
| 119 |
+
|
| 120 |
+
user_options = [
|
| 121 |
+
('prefix=', None, "installation prefix"),
|
| 122 |
+
("zip-ok", "z", "install package as a zipfile"),
|
| 123 |
+
("multi-version", "m", "make apps have to require() a version"),
|
| 124 |
+
("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
|
| 125 |
+
("install-dir=", "d", "install package to DIR"),
|
| 126 |
+
("script-dir=", "s", "install scripts to DIR"),
|
| 127 |
+
("exclude-scripts", "x", "Don't install scripts"),
|
| 128 |
+
("always-copy", "a", "Copy all needed packages to install dir"),
|
| 129 |
+
("index-url=", "i", "base URL of Python Package Index"),
|
| 130 |
+
("find-links=", "f", "additional URL(s) to search for packages"),
|
| 131 |
+
("build-directory=", "b", "download/extract/build in DIR; keep the results"),
|
| 132 |
+
(
|
| 133 |
+
'optimize=',
|
| 134 |
+
'O',
|
| 135 |
+
'also compile with optimization: -O1 for "python -O", '
|
| 136 |
+
'-O2 for "python -OO", and -O0 to disable [default: -O0]',
|
| 137 |
+
),
|
| 138 |
+
('record=', None, "filename in which to record list of installed files"),
|
| 139 |
+
('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
|
| 140 |
+
('site-dirs=', 'S', "list of directories where .pth files work"),
|
| 141 |
+
('editable', 'e', "Install specified packages in editable form"),
|
| 142 |
+
('no-deps', 'N', "don't install dependencies"),
|
| 143 |
+
('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
|
| 144 |
+
('local-snapshots-ok', 'l', "allow building eggs from local checkouts"),
|
| 145 |
+
('version', None, "print version information and exit"),
|
| 146 |
+
(
|
| 147 |
+
'no-find-links',
|
| 148 |
+
None,
|
| 149 |
+
"Don't load find-links defined in packages being installed",
|
| 150 |
+
),
|
| 151 |
+
('user', None, f"install in user site-package '{site.USER_SITE}'"),
|
| 152 |
+
]
|
| 153 |
+
boolean_options = [
|
| 154 |
+
'zip-ok',
|
| 155 |
+
'multi-version',
|
| 156 |
+
'exclude-scripts',
|
| 157 |
+
'upgrade',
|
| 158 |
+
'always-copy',
|
| 159 |
+
'editable',
|
| 160 |
+
'no-deps',
|
| 161 |
+
'local-snapshots-ok',
|
| 162 |
+
'version',
|
| 163 |
+
'user',
|
| 164 |
+
]
|
| 165 |
+
|
| 166 |
+
negative_opt = {'always-unzip': 'zip-ok'}
|
| 167 |
+
create_index = PackageIndex
|
| 168 |
+
|
| 169 |
+
def initialize_options(self):
|
| 170 |
+
EasyInstallDeprecationWarning.emit()
|
| 171 |
+
|
| 172 |
+
# the --user option seems to be an opt-in one,
|
| 173 |
+
# so the default should be False.
|
| 174 |
+
self.user = False
|
| 175 |
+
self.zip_ok = self.local_snapshots_ok = None
|
| 176 |
+
self.install_dir = self.script_dir = self.exclude_scripts = None
|
| 177 |
+
self.index_url = None
|
| 178 |
+
self.find_links = None
|
| 179 |
+
self.build_directory = None
|
| 180 |
+
self.args = None
|
| 181 |
+
self.optimize = self.record = None
|
| 182 |
+
self.upgrade = self.always_copy = self.multi_version = None
|
| 183 |
+
self.editable = self.no_deps = self.allow_hosts = None
|
| 184 |
+
self.root = self.prefix = self.no_report = None
|
| 185 |
+
self.version = None
|
| 186 |
+
self.install_purelib = None # for pure module distributions
|
| 187 |
+
self.install_platlib = None # non-pure (dists w/ extensions)
|
| 188 |
+
self.install_headers = None # for C/C++ headers
|
| 189 |
+
self.install_lib = None # set to either purelib or platlib
|
| 190 |
+
self.install_scripts = None
|
| 191 |
+
self.install_data = None
|
| 192 |
+
self.install_base = None
|
| 193 |
+
self.install_platbase = None
|
| 194 |
+
self.install_userbase = site.USER_BASE
|
| 195 |
+
self.install_usersite = site.USER_SITE
|
| 196 |
+
self.no_find_links = None
|
| 197 |
+
|
| 198 |
+
# Options not specifiable via command line
|
| 199 |
+
self.package_index = None
|
| 200 |
+
self.pth_file = self.always_copy_from = None
|
| 201 |
+
self.site_dirs = None
|
| 202 |
+
self.installed_projects = {}
|
| 203 |
+
# Always read easy_install options, even if we are subclassed, or have
|
| 204 |
+
# an independent instance created. This ensures that defaults will
|
| 205 |
+
# always come from the standard configuration file(s)' "easy_install"
|
| 206 |
+
# section, even if this is a "develop" or "install" command, or some
|
| 207 |
+
# other embedding.
|
| 208 |
+
self._dry_run = None
|
| 209 |
+
self.verbose = self.distribution.verbose
|
| 210 |
+
self.distribution._set_command_options(
|
| 211 |
+
self, self.distribution.get_option_dict('easy_install')
|
| 212 |
+
)
|
| 213 |
+
|
| 214 |
+
def delete_blockers(self, blockers) -> None:
|
| 215 |
+
extant_blockers = (
|
| 216 |
+
filename
|
| 217 |
+
for filename in blockers
|
| 218 |
+
if os.path.exists(filename) or os.path.islink(filename)
|
| 219 |
+
)
|
| 220 |
+
list(map(self._delete_path, extant_blockers))
|
| 221 |
+
|
| 222 |
+
def _delete_path(self, path):
|
| 223 |
+
log.info("Deleting %s", path)
|
| 224 |
+
if self.dry_run:
|
| 225 |
+
return
|
| 226 |
+
|
| 227 |
+
is_tree = os.path.isdir(path) and not os.path.islink(path)
|
| 228 |
+
remover = _rmtree if is_tree else os.unlink
|
| 229 |
+
remover(path)
|
| 230 |
+
|
| 231 |
+
@staticmethod
|
| 232 |
+
def _render_version():
|
| 233 |
+
"""
|
| 234 |
+
Render the Setuptools version and installation details, then exit.
|
| 235 |
+
"""
|
| 236 |
+
ver = f'{sys.version_info.major}.{sys.version_info.minor}'
|
| 237 |
+
dist = get_distribution('setuptools')
|
| 238 |
+
print(f'setuptools {dist.version} from {dist.location} (Python {ver})')
|
| 239 |
+
raise SystemExit
|
| 240 |
+
|
| 241 |
+
def finalize_options(self) -> None: # noqa: C901 # is too complex (25) # FIXME
|
| 242 |
+
self.version and self._render_version()
|
| 243 |
+
|
| 244 |
+
py_version = sys.version.split()[0]
|
| 245 |
+
|
| 246 |
+
self.config_vars = dict(sysconfig.get_config_vars())
|
| 247 |
+
|
| 248 |
+
self.config_vars.update({
|
| 249 |
+
'dist_name': self.distribution.get_name(),
|
| 250 |
+
'dist_version': self.distribution.get_version(),
|
| 251 |
+
'dist_fullname': self.distribution.get_fullname(),
|
| 252 |
+
'py_version': py_version,
|
| 253 |
+
'py_version_short': f'{sys.version_info.major}.{sys.version_info.minor}',
|
| 254 |
+
'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}',
|
| 255 |
+
'sys_prefix': self.config_vars['prefix'],
|
| 256 |
+
'sys_exec_prefix': self.config_vars['exec_prefix'],
|
| 257 |
+
# Only POSIX systems have abiflags
|
| 258 |
+
'abiflags': getattr(sys, 'abiflags', ''),
|
| 259 |
+
# Only python 3.9+ has platlibdir
|
| 260 |
+
'platlibdir': getattr(sys, 'platlibdir', 'lib'),
|
| 261 |
+
})
|
| 262 |
+
with contextlib.suppress(AttributeError):
|
| 263 |
+
# only for distutils outside stdlib
|
| 264 |
+
self.config_vars.update({
|
| 265 |
+
'implementation_lower': install._get_implementation().lower(),
|
| 266 |
+
'implementation': install._get_implementation(),
|
| 267 |
+
})
|
| 268 |
+
|
| 269 |
+
# pypa/distutils#113 Python 3.9 compat
|
| 270 |
+
self.config_vars.setdefault(
|
| 271 |
+
'py_version_nodot_plat',
|
| 272 |
+
getattr(sys, 'windir', '').replace('.', ''),
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
self.config_vars['userbase'] = self.install_userbase
|
| 276 |
+
self.config_vars['usersite'] = self.install_usersite
|
| 277 |
+
if self.user and not site.ENABLE_USER_SITE:
|
| 278 |
+
log.warn("WARNING: The user site-packages directory is disabled.")
|
| 279 |
+
|
| 280 |
+
self._fix_install_dir_for_user_site()
|
| 281 |
+
|
| 282 |
+
self.expand_basedirs()
|
| 283 |
+
self.expand_dirs()
|
| 284 |
+
|
| 285 |
+
self._expand(
|
| 286 |
+
'install_dir',
|
| 287 |
+
'script_dir',
|
| 288 |
+
'build_directory',
|
| 289 |
+
'site_dirs',
|
| 290 |
+
)
|
| 291 |
+
# If a non-default installation directory was specified, default the
|
| 292 |
+
# script directory to match it.
|
| 293 |
+
if self.script_dir is None:
|
| 294 |
+
self.script_dir = self.install_dir
|
| 295 |
+
|
| 296 |
+
if self.no_find_links is None:
|
| 297 |
+
self.no_find_links = False
|
| 298 |
+
|
| 299 |
+
# Let install_dir get set by install_lib command, which in turn
|
| 300 |
+
# gets its info from the install command, and takes into account
|
| 301 |
+
# --prefix and --home and all that other crud.
|
| 302 |
+
self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
|
| 303 |
+
# Likewise, set default script_dir from 'install_scripts.install_dir'
|
| 304 |
+
self.set_undefined_options('install_scripts', ('install_dir', 'script_dir'))
|
| 305 |
+
|
| 306 |
+
if self.user and self.install_purelib:
|
| 307 |
+
self.install_dir = self.install_purelib
|
| 308 |
+
self.script_dir = self.install_scripts
|
| 309 |
+
# default --record from the install command
|
| 310 |
+
self.set_undefined_options('install', ('record', 'record'))
|
| 311 |
+
self.all_site_dirs = get_site_dirs()
|
| 312 |
+
self.all_site_dirs.extend(self._process_site_dirs(self.site_dirs))
|
| 313 |
+
|
| 314 |
+
if not self.editable:
|
| 315 |
+
self.check_site_dir()
|
| 316 |
+
default_index = os.getenv("__EASYINSTALL_INDEX", "https://pypi.org/simple/")
|
| 317 |
+
# ^ Private API for testing purposes only
|
| 318 |
+
self.index_url = self.index_url or default_index
|
| 319 |
+
self.shadow_path = self.all_site_dirs[:]
|
| 320 |
+
for path_item in self.install_dir, normalize_path(self.script_dir):
|
| 321 |
+
if path_item not in self.shadow_path:
|
| 322 |
+
self.shadow_path.insert(0, path_item)
|
| 323 |
+
|
| 324 |
+
if self.allow_hosts is not None:
|
| 325 |
+
hosts = [s.strip() for s in self.allow_hosts.split(',')]
|
| 326 |
+
else:
|
| 327 |
+
hosts = ['*']
|
| 328 |
+
if self.package_index is None:
|
| 329 |
+
self.package_index = self.create_index(
|
| 330 |
+
self.index_url,
|
| 331 |
+
search_path=self.shadow_path,
|
| 332 |
+
hosts=hosts,
|
| 333 |
+
)
|
| 334 |
+
self.local_index = Environment(self.shadow_path + sys.path)
|
| 335 |
+
|
| 336 |
+
if self.find_links is not None:
|
| 337 |
+
if isinstance(self.find_links, str):
|
| 338 |
+
self.find_links = self.find_links.split()
|
| 339 |
+
else:
|
| 340 |
+
self.find_links = []
|
| 341 |
+
if self.local_snapshots_ok:
|
| 342 |
+
self.package_index.scan_egg_links(self.shadow_path + sys.path)
|
| 343 |
+
if not self.no_find_links:
|
| 344 |
+
self.package_index.add_find_links(self.find_links)
|
| 345 |
+
self.set_undefined_options('install_lib', ('optimize', 'optimize'))
|
| 346 |
+
self.optimize = self._validate_optimize(self.optimize)
|
| 347 |
+
|
| 348 |
+
if self.editable and not self.build_directory:
|
| 349 |
+
raise DistutilsArgError(
|
| 350 |
+
"Must specify a build directory (-b) when using --editable"
|
| 351 |
+
)
|
| 352 |
+
if not self.args:
|
| 353 |
+
raise DistutilsArgError(
|
| 354 |
+
"No urls, filenames, or requirements specified (see --help)"
|
| 355 |
+
)
|
| 356 |
+
|
| 357 |
+
self.outputs: list[str] = []
|
| 358 |
+
|
| 359 |
+
@staticmethod
|
| 360 |
+
def _process_site_dirs(site_dirs):
|
| 361 |
+
if site_dirs is None:
|
| 362 |
+
return
|
| 363 |
+
|
| 364 |
+
normpath = map(normalize_path, sys.path)
|
| 365 |
+
site_dirs = [os.path.expanduser(s.strip()) for s in site_dirs.split(',')]
|
| 366 |
+
for d in site_dirs:
|
| 367 |
+
if not os.path.isdir(d):
|
| 368 |
+
log.warn("%s (in --site-dirs) does not exist", d)
|
| 369 |
+
elif normalize_path(d) not in normpath:
|
| 370 |
+
raise DistutilsOptionError(d + " (in --site-dirs) is not on sys.path")
|
| 371 |
+
else:
|
| 372 |
+
yield normalize_path(d)
|
| 373 |
+
|
| 374 |
+
@staticmethod
|
| 375 |
+
def _validate_optimize(value):
|
| 376 |
+
try:
|
| 377 |
+
value = int(value)
|
| 378 |
+
if value not in range(3):
|
| 379 |
+
raise ValueError
|
| 380 |
+
except ValueError as e:
|
| 381 |
+
raise DistutilsOptionError("--optimize must be 0, 1, or 2") from e
|
| 382 |
+
|
| 383 |
+
return value
|
| 384 |
+
|
| 385 |
+
def _fix_install_dir_for_user_site(self):
|
| 386 |
+
"""
|
| 387 |
+
Fix the install_dir if "--user" was used.
|
| 388 |
+
"""
|
| 389 |
+
if not self.user:
|
| 390 |
+
return
|
| 391 |
+
|
| 392 |
+
self.create_home_path()
|
| 393 |
+
if self.install_userbase is None:
|
| 394 |
+
msg = "User base directory is not specified"
|
| 395 |
+
raise DistutilsPlatformError(msg)
|
| 396 |
+
self.install_base = self.install_platbase = self.install_userbase
|
| 397 |
+
scheme_name = f'{os.name}_user'
|
| 398 |
+
self.select_scheme(scheme_name)
|
| 399 |
+
|
| 400 |
+
def _expand_attrs(self, attrs):
|
| 401 |
+
for attr in attrs:
|
| 402 |
+
val = getattr(self, attr)
|
| 403 |
+
if val is not None:
|
| 404 |
+
if os.name == 'posix' or os.name == 'nt':
|
| 405 |
+
val = os.path.expanduser(val)
|
| 406 |
+
val = subst_vars(val, self.config_vars)
|
| 407 |
+
setattr(self, attr, val)
|
| 408 |
+
|
| 409 |
+
def expand_basedirs(self) -> None:
|
| 410 |
+
"""Calls `os.path.expanduser` on install_base, install_platbase and
|
| 411 |
+
root."""
|
| 412 |
+
self._expand_attrs(['install_base', 'install_platbase', 'root'])
|
| 413 |
+
|
| 414 |
+
def expand_dirs(self) -> None:
|
| 415 |
+
"""Calls `os.path.expanduser` on install dirs."""
|
| 416 |
+
dirs = [
|
| 417 |
+
'install_purelib',
|
| 418 |
+
'install_platlib',
|
| 419 |
+
'install_lib',
|
| 420 |
+
'install_headers',
|
| 421 |
+
'install_scripts',
|
| 422 |
+
'install_data',
|
| 423 |
+
]
|
| 424 |
+
self._expand_attrs(dirs)
|
| 425 |
+
|
| 426 |
+
def run(self, show_deprecation: bool = True) -> None:
|
| 427 |
+
if show_deprecation:
|
| 428 |
+
self.announce(
|
| 429 |
+
"WARNING: The easy_install command is deprecated "
|
| 430 |
+
"and will be removed in a future version.",
|
| 431 |
+
log.WARN,
|
| 432 |
+
)
|
| 433 |
+
if self.verbose != self.distribution.verbose:
|
| 434 |
+
log.set_verbosity(self.verbose)
|
| 435 |
+
try:
|
| 436 |
+
for spec in self.args:
|
| 437 |
+
self.easy_install(spec, not self.no_deps)
|
| 438 |
+
if self.record:
|
| 439 |
+
outputs = self.outputs
|
| 440 |
+
if self.root: # strip any package prefix
|
| 441 |
+
root_len = len(self.root)
|
| 442 |
+
for counter in range(len(outputs)):
|
| 443 |
+
outputs[counter] = outputs[counter][root_len:]
|
| 444 |
+
from distutils import file_util
|
| 445 |
+
|
| 446 |
+
self.execute(
|
| 447 |
+
file_util.write_file,
|
| 448 |
+
(self.record, outputs),
|
| 449 |
+
f"writing list of installed files to '{self.record}'",
|
| 450 |
+
)
|
| 451 |
+
self.warn_deprecated_options()
|
| 452 |
+
finally:
|
| 453 |
+
log.set_verbosity(self.distribution.verbose)
|
| 454 |
+
|
| 455 |
+
def pseudo_tempname(self):
|
| 456 |
+
"""Return a pseudo-tempname base in the install directory.
|
| 457 |
+
This code is intentionally naive; if a malicious party can write to
|
| 458 |
+
the target directory you're already in deep doodoo.
|
| 459 |
+
"""
|
| 460 |
+
try:
|
| 461 |
+
pid = os.getpid()
|
| 462 |
+
except Exception:
|
| 463 |
+
pid = random.randint(0, sys.maxsize)
|
| 464 |
+
return os.path.join(self.install_dir, f"test-easy-install-{pid}")
|
| 465 |
+
|
| 466 |
+
def warn_deprecated_options(self) -> None:
|
| 467 |
+
pass
|
| 468 |
+
|
| 469 |
+
def check_site_dir(self) -> None: # is too complex (12) # FIXME
|
| 470 |
+
"""Verify that self.install_dir is .pth-capable dir, if needed"""
|
| 471 |
+
|
| 472 |
+
instdir = normalize_path(self.install_dir)
|
| 473 |
+
pth_file = os.path.join(instdir, 'easy-install.pth')
|
| 474 |
+
|
| 475 |
+
if not os.path.exists(instdir):
|
| 476 |
+
try:
|
| 477 |
+
os.makedirs(instdir)
|
| 478 |
+
except OSError:
|
| 479 |
+
self.cant_write_to_target()
|
| 480 |
+
|
| 481 |
+
# Is it a configured, PYTHONPATH, implicit, or explicit site dir?
|
| 482 |
+
is_site_dir = instdir in self.all_site_dirs
|
| 483 |
+
|
| 484 |
+
if not is_site_dir and not self.multi_version:
|
| 485 |
+
# No? Then directly test whether it does .pth file processing
|
| 486 |
+
is_site_dir = self.check_pth_processing()
|
| 487 |
+
else:
|
| 488 |
+
# make sure we can write to target dir
|
| 489 |
+
testfile = self.pseudo_tempname() + '.write-test'
|
| 490 |
+
test_exists = os.path.exists(testfile)
|
| 491 |
+
try:
|
| 492 |
+
if test_exists:
|
| 493 |
+
os.unlink(testfile)
|
| 494 |
+
open(testfile, 'wb').close()
|
| 495 |
+
os.unlink(testfile)
|
| 496 |
+
except OSError:
|
| 497 |
+
self.cant_write_to_target()
|
| 498 |
+
|
| 499 |
+
if not is_site_dir and not self.multi_version:
|
| 500 |
+
# Can't install non-multi to non-site dir with easy_install
|
| 501 |
+
pythonpath = os.environ.get('PYTHONPATH', '')
|
| 502 |
+
log.warn(self.__no_default_msg, self.install_dir, pythonpath)
|
| 503 |
+
|
| 504 |
+
if is_site_dir:
|
| 505 |
+
if self.pth_file is None:
|
| 506 |
+
self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
|
| 507 |
+
else:
|
| 508 |
+
self.pth_file = None
|
| 509 |
+
|
| 510 |
+
if self.multi_version and not os.path.exists(pth_file):
|
| 511 |
+
self.pth_file = None # don't create a .pth file
|
| 512 |
+
self.install_dir = instdir
|
| 513 |
+
|
| 514 |
+
__cant_write_msg = textwrap.dedent(
|
| 515 |
+
"""
|
| 516 |
+
can't create or remove files in install directory
|
| 517 |
+
|
| 518 |
+
The following error occurred while trying to add or remove files in the
|
| 519 |
+
installation directory:
|
| 520 |
+
|
| 521 |
+
%s
|
| 522 |
+
|
| 523 |
+
The installation directory you specified (via --install-dir, --prefix, or
|
| 524 |
+
the distutils default setting) was:
|
| 525 |
+
|
| 526 |
+
%s
|
| 527 |
+
"""
|
| 528 |
+
).lstrip()
|
| 529 |
+
|
| 530 |
+
__not_exists_id = textwrap.dedent(
|
| 531 |
+
"""
|
| 532 |
+
This directory does not currently exist. Please create it and try again, or
|
| 533 |
+
choose a different installation directory (using the -d or --install-dir
|
| 534 |
+
option).
|
| 535 |
+
"""
|
| 536 |
+
).lstrip()
|
| 537 |
+
|
| 538 |
+
__access_msg = textwrap.dedent(
|
| 539 |
+
"""
|
| 540 |
+
Perhaps your account does not have write access to this directory? If the
|
| 541 |
+
installation directory is a system-owned directory, you may need to sign in
|
| 542 |
+
as the administrator or "root" account. If you do not have administrative
|
| 543 |
+
access to this machine, you may wish to choose a different installation
|
| 544 |
+
directory, preferably one that is listed in your PYTHONPATH environment
|
| 545 |
+
variable.
|
| 546 |
+
|
| 547 |
+
For information on other options, you may wish to consult the
|
| 548 |
+
documentation at:
|
| 549 |
+
|
| 550 |
+
https://setuptools.pypa.io/en/latest/deprecated/easy_install.html
|
| 551 |
+
|
| 552 |
+
Please make the appropriate changes for your system and try again.
|
| 553 |
+
"""
|
| 554 |
+
).lstrip()
|
| 555 |
+
|
| 556 |
+
def cant_write_to_target(self) -> NoReturn:
|
| 557 |
+
msg = self.__cant_write_msg % (
|
| 558 |
+
sys.exc_info()[1],
|
| 559 |
+
self.install_dir,
|
| 560 |
+
)
|
| 561 |
+
|
| 562 |
+
if not os.path.exists(self.install_dir):
|
| 563 |
+
msg += '\n' + self.__not_exists_id
|
| 564 |
+
else:
|
| 565 |
+
msg += '\n' + self.__access_msg
|
| 566 |
+
raise DistutilsError(msg)
|
| 567 |
+
|
| 568 |
+
def check_pth_processing(self): # noqa: C901
|
| 569 |
+
"""Empirically verify whether .pth files are supported in inst. dir"""
|
| 570 |
+
instdir = self.install_dir
|
| 571 |
+
log.info("Checking .pth file support in %s", instdir)
|
| 572 |
+
pth_file = self.pseudo_tempname() + ".pth"
|
| 573 |
+
ok_file = pth_file + '.ok'
|
| 574 |
+
ok_exists = os.path.exists(ok_file)
|
| 575 |
+
tmpl = (
|
| 576 |
+
_one_liner(
|
| 577 |
+
"""
|
| 578 |
+
import os
|
| 579 |
+
f = open({ok_file!r}, 'w', encoding="utf-8")
|
| 580 |
+
f.write('OK')
|
| 581 |
+
f.close()
|
| 582 |
+
"""
|
| 583 |
+
)
|
| 584 |
+
+ '\n'
|
| 585 |
+
)
|
| 586 |
+
try:
|
| 587 |
+
if ok_exists:
|
| 588 |
+
os.unlink(ok_file)
|
| 589 |
+
dirname = os.path.dirname(ok_file)
|
| 590 |
+
os.makedirs(dirname, exist_ok=True)
|
| 591 |
+
f = open(pth_file, 'w', encoding=py312.PTH_ENCODING)
|
| 592 |
+
# ^-- Python<3.13 require encoding="locale" instead of "utf-8",
|
| 593 |
+
# see python/cpython#77102.
|
| 594 |
+
except OSError:
|
| 595 |
+
self.cant_write_to_target()
|
| 596 |
+
else:
|
| 597 |
+
try:
|
| 598 |
+
f.write(tmpl.format(**locals()))
|
| 599 |
+
f.close()
|
| 600 |
+
f = None
|
| 601 |
+
executable = sys.executable
|
| 602 |
+
if os.name == 'nt':
|
| 603 |
+
dirname, basename = os.path.split(executable)
|
| 604 |
+
alt = os.path.join(dirname, 'pythonw.exe')
|
| 605 |
+
use_alt = basename.lower() == 'python.exe' and os.path.exists(alt)
|
| 606 |
+
if use_alt:
|
| 607 |
+
# use pythonw.exe to avoid opening a console window
|
| 608 |
+
executable = alt
|
| 609 |
+
|
| 610 |
+
from distutils.spawn import spawn
|
| 611 |
+
|
| 612 |
+
spawn([executable, '-E', '-c', 'pass'], 0)
|
| 613 |
+
|
| 614 |
+
if os.path.exists(ok_file):
|
| 615 |
+
log.info("TEST PASSED: %s appears to support .pth files", instdir)
|
| 616 |
+
return True
|
| 617 |
+
finally:
|
| 618 |
+
if f:
|
| 619 |
+
f.close()
|
| 620 |
+
if os.path.exists(ok_file):
|
| 621 |
+
os.unlink(ok_file)
|
| 622 |
+
if os.path.exists(pth_file):
|
| 623 |
+
os.unlink(pth_file)
|
| 624 |
+
if not self.multi_version:
|
| 625 |
+
log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
|
| 626 |
+
return False
|
| 627 |
+
|
| 628 |
+
def install_egg_scripts(self, dist) -> None:
|
| 629 |
+
"""Write all the scripts for `dist`, unless scripts are excluded"""
|
| 630 |
+
if not self.exclude_scripts and dist.metadata_isdir('scripts'):
|
| 631 |
+
for script_name in dist.metadata_listdir('scripts'):
|
| 632 |
+
if dist.metadata_isdir('scripts/' + script_name):
|
| 633 |
+
# The "script" is a directory, likely a Python 3
|
| 634 |
+
# __pycache__ directory, so skip it.
|
| 635 |
+
continue
|
| 636 |
+
self.install_script(
|
| 637 |
+
dist, script_name, dist.get_metadata('scripts/' + script_name)
|
| 638 |
+
)
|
| 639 |
+
self.install_wrapper_scripts(dist)
|
| 640 |
+
|
| 641 |
+
def add_output(self, path) -> None:
|
| 642 |
+
if os.path.isdir(path):
|
| 643 |
+
for base, dirs, files in os.walk(path):
|
| 644 |
+
for filename in files:
|
| 645 |
+
self.outputs.append(os.path.join(base, filename))
|
| 646 |
+
else:
|
| 647 |
+
self.outputs.append(path)
|
| 648 |
+
|
| 649 |
+
def not_editable(self, spec) -> None:
|
| 650 |
+
if self.editable:
|
| 651 |
+
raise DistutilsArgError(
|
| 652 |
+
f"Invalid argument {spec!r}: you can't use filenames or URLs "
|
| 653 |
+
"with --editable (except via the --find-links option)."
|
| 654 |
+
)
|
| 655 |
+
|
| 656 |
+
def check_editable(self, spec) -> None:
|
| 657 |
+
if not self.editable:
|
| 658 |
+
return
|
| 659 |
+
|
| 660 |
+
if os.path.exists(os.path.join(self.build_directory, spec.key)):
|
| 661 |
+
raise DistutilsArgError(
|
| 662 |
+
f"{spec.key!r} already exists in {self.build_directory}; can't do a checkout there"
|
| 663 |
+
)
|
| 664 |
+
|
| 665 |
+
@contextlib.contextmanager
|
| 666 |
+
def _tmpdir(self):
|
| 667 |
+
tmpdir = tempfile.mkdtemp(prefix="easy_install-")
|
| 668 |
+
try:
|
| 669 |
+
# cast to str as workaround for #709 and #710 and #712
|
| 670 |
+
yield str(tmpdir)
|
| 671 |
+
finally:
|
| 672 |
+
os.path.exists(tmpdir) and _rmtree(tmpdir)
|
| 673 |
+
|
| 674 |
+
def easy_install(self, spec, deps: bool = False) -> Distribution | None:
|
| 675 |
+
with self._tmpdir() as tmpdir:
|
| 676 |
+
if not isinstance(spec, Requirement):
|
| 677 |
+
if URL_SCHEME(spec):
|
| 678 |
+
# It's a url, download it to tmpdir and process
|
| 679 |
+
self.not_editable(spec)
|
| 680 |
+
dl = self.package_index.download(spec, tmpdir)
|
| 681 |
+
return self.install_item(None, dl, tmpdir, deps, True)
|
| 682 |
+
|
| 683 |
+
elif os.path.exists(spec):
|
| 684 |
+
# Existing file or directory, just process it directly
|
| 685 |
+
self.not_editable(spec)
|
| 686 |
+
return self.install_item(None, spec, tmpdir, deps, True)
|
| 687 |
+
else:
|
| 688 |
+
spec = parse_requirement_arg(spec)
|
| 689 |
+
|
| 690 |
+
self.check_editable(spec)
|
| 691 |
+
dist = self.package_index.fetch_distribution(
|
| 692 |
+
spec,
|
| 693 |
+
tmpdir,
|
| 694 |
+
self.upgrade,
|
| 695 |
+
self.editable,
|
| 696 |
+
not self.always_copy,
|
| 697 |
+
self.local_index,
|
| 698 |
+
)
|
| 699 |
+
if dist is None:
|
| 700 |
+
msg = f"Could not find suitable distribution for {spec!r}"
|
| 701 |
+
if self.always_copy:
|
| 702 |
+
msg += " (--always-copy skips system and development eggs)"
|
| 703 |
+
raise DistutilsError(msg)
|
| 704 |
+
elif dist.precedence == DEVELOP_DIST:
|
| 705 |
+
# .egg-info dists don't need installing, just process deps
|
| 706 |
+
self.process_distribution(spec, dist, deps, "Using")
|
| 707 |
+
return dist
|
| 708 |
+
else:
|
| 709 |
+
return self.install_item(spec, dist.location, tmpdir, deps)
|
| 710 |
+
|
| 711 |
+
def install_item(
|
| 712 |
+
self, spec, download, tmpdir, deps, install_needed: bool = False
|
| 713 |
+
) -> Distribution | None:
|
| 714 |
+
# Installation is also needed if file in tmpdir or is not an egg
|
| 715 |
+
install_needed = install_needed or bool(self.always_copy)
|
| 716 |
+
install_needed = install_needed or os.path.dirname(download) == tmpdir
|
| 717 |
+
install_needed = install_needed or not download.endswith('.egg')
|
| 718 |
+
install_needed = install_needed or (
|
| 719 |
+
self.always_copy_from is not None
|
| 720 |
+
and os.path.dirname(normalize_path(download))
|
| 721 |
+
== normalize_path(self.always_copy_from)
|
| 722 |
+
)
|
| 723 |
+
|
| 724 |
+
if spec and not install_needed:
|
| 725 |
+
# at this point, we know it's a local .egg, we just don't know if
|
| 726 |
+
# it's already installed.
|
| 727 |
+
for dist in self.local_index[spec.project_name]:
|
| 728 |
+
if dist.location == download:
|
| 729 |
+
break
|
| 730 |
+
else:
|
| 731 |
+
install_needed = True # it's not in the local index
|
| 732 |
+
|
| 733 |
+
log.info("Processing %s", os.path.basename(download))
|
| 734 |
+
|
| 735 |
+
if install_needed:
|
| 736 |
+
dists = self.install_eggs(spec, download, tmpdir)
|
| 737 |
+
for dist in dists:
|
| 738 |
+
self.process_distribution(spec, dist, deps)
|
| 739 |
+
else:
|
| 740 |
+
dists = [self.egg_distribution(download)]
|
| 741 |
+
self.process_distribution(spec, dists[0], deps, "Using")
|
| 742 |
+
|
| 743 |
+
if spec is not None:
|
| 744 |
+
for dist in dists:
|
| 745 |
+
if dist in spec:
|
| 746 |
+
return dist
|
| 747 |
+
return None
|
| 748 |
+
|
| 749 |
+
def select_scheme(self, name):
|
| 750 |
+
try:
|
| 751 |
+
install._select_scheme(self, name)
|
| 752 |
+
except AttributeError:
|
| 753 |
+
# stdlib distutils
|
| 754 |
+
install.install.select_scheme(self, name.replace('posix', 'unix'))
|
| 755 |
+
|
| 756 |
+
# FIXME: 'easy_install.process_distribution' is too complex (12)
|
| 757 |
+
def process_distribution( # noqa: C901
|
| 758 |
+
self,
|
| 759 |
+
requirement,
|
| 760 |
+
dist,
|
| 761 |
+
deps: bool = True,
|
| 762 |
+
*info,
|
| 763 |
+
) -> None:
|
| 764 |
+
self.update_pth(dist)
|
| 765 |
+
self.package_index.add(dist)
|
| 766 |
+
if dist in self.local_index[dist.key]:
|
| 767 |
+
self.local_index.remove(dist)
|
| 768 |
+
self.local_index.add(dist)
|
| 769 |
+
self.install_egg_scripts(dist)
|
| 770 |
+
self.installed_projects[dist.key] = dist
|
| 771 |
+
log.info(self.installation_report(requirement, dist, *info))
|
| 772 |
+
if dist.has_metadata('dependency_links.txt') and not self.no_find_links:
|
| 773 |
+
self.package_index.add_find_links(
|
| 774 |
+
dist.get_metadata_lines('dependency_links.txt')
|
| 775 |
+
)
|
| 776 |
+
if not deps and not self.always_copy:
|
| 777 |
+
return
|
| 778 |
+
elif requirement is not None and dist.key != requirement.key:
|
| 779 |
+
log.warn("Skipping dependencies for %s", dist)
|
| 780 |
+
return # XXX this is not the distribution we were looking for
|
| 781 |
+
elif requirement is None or dist not in requirement:
|
| 782 |
+
# if we wound up with a different version, resolve what we've got
|
| 783 |
+
distreq = dist.as_requirement()
|
| 784 |
+
requirement = Requirement(str(distreq))
|
| 785 |
+
log.info("Processing dependencies for %s", requirement)
|
| 786 |
+
try:
|
| 787 |
+
distros = WorkingSet([]).resolve(
|
| 788 |
+
[requirement], self.local_index, self.easy_install
|
| 789 |
+
)
|
| 790 |
+
except DistributionNotFound as e:
|
| 791 |
+
raise DistutilsError(str(e)) from e
|
| 792 |
+
except VersionConflict as e:
|
| 793 |
+
raise DistutilsError(e.report()) from e
|
| 794 |
+
if self.always_copy or self.always_copy_from:
|
| 795 |
+
# Force all the relevant distros to be copied or activated
|
| 796 |
+
for dist in distros:
|
| 797 |
+
if dist.key not in self.installed_projects:
|
| 798 |
+
self.easy_install(dist.as_requirement())
|
| 799 |
+
log.info("Finished processing dependencies for %s", requirement)
|
| 800 |
+
|
| 801 |
+
def should_unzip(self, dist) -> bool:
|
| 802 |
+
if self.zip_ok is not None:
|
| 803 |
+
return not self.zip_ok
|
| 804 |
+
if dist.has_metadata('not-zip-safe'):
|
| 805 |
+
return True
|
| 806 |
+
if not dist.has_metadata('zip-safe'):
|
| 807 |
+
return True
|
| 808 |
+
return False
|
| 809 |
+
|
| 810 |
+
def maybe_move(self, spec, dist_filename, setup_base):
|
| 811 |
+
dst = os.path.join(self.build_directory, spec.key)
|
| 812 |
+
if os.path.exists(dst):
|
| 813 |
+
msg = "%r already exists in %s; build directory %s will not be kept"
|
| 814 |
+
log.warn(msg, spec.key, self.build_directory, setup_base)
|
| 815 |
+
return setup_base
|
| 816 |
+
if os.path.isdir(dist_filename):
|
| 817 |
+
setup_base = dist_filename
|
| 818 |
+
else:
|
| 819 |
+
if os.path.dirname(dist_filename) == setup_base:
|
| 820 |
+
os.unlink(dist_filename) # get it out of the tmp dir
|
| 821 |
+
contents = os.listdir(setup_base)
|
| 822 |
+
if len(contents) == 1:
|
| 823 |
+
dist_filename = os.path.join(setup_base, contents[0])
|
| 824 |
+
if os.path.isdir(dist_filename):
|
| 825 |
+
# if the only thing there is a directory, move it instead
|
| 826 |
+
setup_base = dist_filename
|
| 827 |
+
ensure_directory(dst)
|
| 828 |
+
shutil.move(setup_base, dst)
|
| 829 |
+
return dst
|
| 830 |
+
|
| 831 |
+
def install_wrapper_scripts(self, dist) -> None:
|
| 832 |
+
if self.exclude_scripts:
|
| 833 |
+
return
|
| 834 |
+
for args in ScriptWriter.best().get_args(dist):
|
| 835 |
+
self.write_script(*args)
|
| 836 |
+
|
| 837 |
+
def install_script(self, dist, script_name, script_text, dev_path=None) -> None:
|
| 838 |
+
"""Generate a legacy script wrapper and install it"""
|
| 839 |
+
spec = str(dist.as_requirement())
|
| 840 |
+
is_script = is_python_script(script_text, script_name)
|
| 841 |
+
|
| 842 |
+
if is_script:
|
| 843 |
+
body = self._load_template(dev_path) % locals()
|
| 844 |
+
script_text = ScriptWriter.get_header(script_text) + body
|
| 845 |
+
self.write_script(script_name, _to_bytes(script_text), 'b')
|
| 846 |
+
|
| 847 |
+
@staticmethod
|
| 848 |
+
def _load_template(dev_path):
|
| 849 |
+
"""
|
| 850 |
+
There are a couple of template scripts in the package. This
|
| 851 |
+
function loads one of them and prepares it for use.
|
| 852 |
+
"""
|
| 853 |
+
# See https://github.com/pypa/setuptools/issues/134 for info
|
| 854 |
+
# on script file naming and downstream issues with SVR4
|
| 855 |
+
name = 'script.tmpl'
|
| 856 |
+
if dev_path:
|
| 857 |
+
name = name.replace('.tmpl', ' (dev).tmpl')
|
| 858 |
+
|
| 859 |
+
raw_bytes = resource_string('setuptools', name)
|
| 860 |
+
return raw_bytes.decode('utf-8')
|
| 861 |
+
|
| 862 |
+
def write_script(self, script_name, contents, mode: str = "t", blockers=()) -> None:
|
| 863 |
+
"""Write an executable file to the scripts directory"""
|
| 864 |
+
self.delete_blockers( # clean up old .py/.pyw w/o a script
|
| 865 |
+
[os.path.join(self.script_dir, x) for x in blockers]
|
| 866 |
+
)
|
| 867 |
+
log.info("Installing %s script to %s", script_name, self.script_dir)
|
| 868 |
+
target = os.path.join(self.script_dir, script_name)
|
| 869 |
+
self.add_output(target)
|
| 870 |
+
|
| 871 |
+
if self.dry_run:
|
| 872 |
+
return
|
| 873 |
+
|
| 874 |
+
mask = current_umask()
|
| 875 |
+
ensure_directory(target)
|
| 876 |
+
if os.path.exists(target):
|
| 877 |
+
os.unlink(target)
|
| 878 |
+
|
| 879 |
+
encoding = None if "b" in mode else "utf-8"
|
| 880 |
+
with open(target, "w" + mode, encoding=encoding) as f:
|
| 881 |
+
f.write(contents)
|
| 882 |
+
chmod(target, 0o777 - mask)
|
| 883 |
+
|
| 884 |
+
def install_eggs(self, spec, dist_filename, tmpdir) -> list[Distribution]:
|
| 885 |
+
# .egg dirs or files are already built, so just return them
|
| 886 |
+
installer_map = {
|
| 887 |
+
'.egg': self.install_egg,
|
| 888 |
+
'.exe': self.install_exe,
|
| 889 |
+
'.whl': self.install_wheel,
|
| 890 |
+
}
|
| 891 |
+
try:
|
| 892 |
+
install_dist = installer_map[dist_filename.lower()[-4:]]
|
| 893 |
+
except KeyError:
|
| 894 |
+
pass
|
| 895 |
+
else:
|
| 896 |
+
return [install_dist(dist_filename, tmpdir)]
|
| 897 |
+
|
| 898 |
+
# Anything else, try to extract and build
|
| 899 |
+
setup_base = tmpdir
|
| 900 |
+
if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
|
| 901 |
+
unpack_archive(dist_filename, tmpdir, self.unpack_progress)
|
| 902 |
+
elif os.path.isdir(dist_filename):
|
| 903 |
+
setup_base = os.path.abspath(dist_filename)
|
| 904 |
+
|
| 905 |
+
if (
|
| 906 |
+
setup_base.startswith(tmpdir) # something we downloaded
|
| 907 |
+
and self.build_directory
|
| 908 |
+
and spec is not None
|
| 909 |
+
):
|
| 910 |
+
setup_base = self.maybe_move(spec, dist_filename, setup_base)
|
| 911 |
+
|
| 912 |
+
# Find the setup.py file
|
| 913 |
+
setup_script = os.path.join(setup_base, 'setup.py')
|
| 914 |
+
|
| 915 |
+
if not os.path.exists(setup_script):
|
| 916 |
+
setups = glob(os.path.join(setup_base, '*', 'setup.py'))
|
| 917 |
+
if not setups:
|
| 918 |
+
raise DistutilsError(
|
| 919 |
+
f"Couldn't find a setup script in {os.path.abspath(dist_filename)}"
|
| 920 |
+
)
|
| 921 |
+
if len(setups) > 1:
|
| 922 |
+
raise DistutilsError(
|
| 923 |
+
f"Multiple setup scripts in {os.path.abspath(dist_filename)}"
|
| 924 |
+
)
|
| 925 |
+
setup_script = setups[0]
|
| 926 |
+
|
| 927 |
+
# Now run it, and return the result
|
| 928 |
+
if self.editable:
|
| 929 |
+
log.info(self.report_editable(spec, setup_script))
|
| 930 |
+
return []
|
| 931 |
+
else:
|
| 932 |
+
return self.build_and_install(setup_script, setup_base)
|
| 933 |
+
|
| 934 |
+
def egg_distribution(self, egg_path):
|
| 935 |
+
if os.path.isdir(egg_path):
|
| 936 |
+
metadata = PathMetadata(egg_path, os.path.join(egg_path, 'EGG-INFO'))
|
| 937 |
+
else:
|
| 938 |
+
metadata = EggMetadata(zipimport.zipimporter(egg_path))
|
| 939 |
+
return Distribution.from_filename(egg_path, metadata=metadata)
|
| 940 |
+
|
| 941 |
+
# FIXME: 'easy_install.install_egg' is too complex (11)
|
| 942 |
+
def install_egg(self, egg_path, tmpdir):
|
| 943 |
+
destination = os.path.join(
|
| 944 |
+
self.install_dir,
|
| 945 |
+
os.path.basename(egg_path),
|
| 946 |
+
)
|
| 947 |
+
destination = os.path.abspath(destination)
|
| 948 |
+
if not self.dry_run:
|
| 949 |
+
ensure_directory(destination)
|
| 950 |
+
|
| 951 |
+
dist = self.egg_distribution(egg_path)
|
| 952 |
+
if not (
|
| 953 |
+
os.path.exists(destination) and os.path.samefile(egg_path, destination)
|
| 954 |
+
):
|
| 955 |
+
if os.path.isdir(destination) and not os.path.islink(destination):
|
| 956 |
+
dir_util.remove_tree(destination, dry_run=self.dry_run)
|
| 957 |
+
elif os.path.exists(destination):
|
| 958 |
+
self.execute(
|
| 959 |
+
os.unlink,
|
| 960 |
+
(destination,),
|
| 961 |
+
"Removing " + destination,
|
| 962 |
+
)
|
| 963 |
+
try:
|
| 964 |
+
new_dist_is_zipped = False
|
| 965 |
+
if os.path.isdir(egg_path):
|
| 966 |
+
if egg_path.startswith(tmpdir):
|
| 967 |
+
f, m = shutil.move, "Moving"
|
| 968 |
+
else:
|
| 969 |
+
f, m = shutil.copytree, "Copying"
|
| 970 |
+
elif self.should_unzip(dist):
|
| 971 |
+
self.mkpath(destination)
|
| 972 |
+
f, m = self.unpack_and_compile, "Extracting"
|
| 973 |
+
else:
|
| 974 |
+
new_dist_is_zipped = True
|
| 975 |
+
if egg_path.startswith(tmpdir):
|
| 976 |
+
f, m = shutil.move, "Moving"
|
| 977 |
+
else:
|
| 978 |
+
f, m = shutil.copy2, "Copying"
|
| 979 |
+
self.execute(
|
| 980 |
+
f,
|
| 981 |
+
(egg_path, destination),
|
| 982 |
+
(m + " %s to %s")
|
| 983 |
+
% (os.path.basename(egg_path), os.path.dirname(destination)),
|
| 984 |
+
)
|
| 985 |
+
update_dist_caches(
|
| 986 |
+
destination,
|
| 987 |
+
fix_zipimporter_caches=new_dist_is_zipped,
|
| 988 |
+
)
|
| 989 |
+
except Exception:
|
| 990 |
+
update_dist_caches(destination, fix_zipimporter_caches=False)
|
| 991 |
+
raise
|
| 992 |
+
|
| 993 |
+
self.add_output(destination)
|
| 994 |
+
return self.egg_distribution(destination)
|
| 995 |
+
|
| 996 |
+
def install_exe(self, dist_filename, tmpdir):
|
| 997 |
+
# See if it's valid, get data
|
| 998 |
+
cfg = extract_wininst_cfg(dist_filename)
|
| 999 |
+
if cfg is None:
|
| 1000 |
+
raise DistutilsError(
|
| 1001 |
+
f"{dist_filename} is not a valid distutils Windows .exe"
|
| 1002 |
+
)
|
| 1003 |
+
# Create a dummy distribution object until we build the real distro
|
| 1004 |
+
dist = Distribution(
|
| 1005 |
+
None,
|
| 1006 |
+
project_name=cfg.get('metadata', 'name'),
|
| 1007 |
+
version=cfg.get('metadata', 'version'),
|
| 1008 |
+
platform=get_platform(),
|
| 1009 |
+
)
|
| 1010 |
+
|
| 1011 |
+
# Convert the .exe to an unpacked egg
|
| 1012 |
+
egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg')
|
| 1013 |
+
dist.location = egg_path
|
| 1014 |
+
egg_tmp = egg_path + '.tmp'
|
| 1015 |
+
_egg_info = os.path.join(egg_tmp, 'EGG-INFO')
|
| 1016 |
+
pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
|
| 1017 |
+
ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
|
| 1018 |
+
dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX
|
| 1019 |
+
self.exe_to_egg(dist_filename, egg_tmp)
|
| 1020 |
+
|
| 1021 |
+
# Write EGG-INFO/PKG-INFO
|
| 1022 |
+
if not os.path.exists(pkg_inf):
|
| 1023 |
+
with open(pkg_inf, 'w', encoding="utf-8") as f:
|
| 1024 |
+
f.write('Metadata-Version: 1.0\n')
|
| 1025 |
+
for k, v in cfg.items('metadata'):
|
| 1026 |
+
if k != 'target_version':
|
| 1027 |
+
k = k.replace('_', '-').title()
|
| 1028 |
+
f.write(f'{k}: {v}\n')
|
| 1029 |
+
script_dir = os.path.join(_egg_info, 'scripts')
|
| 1030 |
+
# delete entry-point scripts to avoid duping
|
| 1031 |
+
self.delete_blockers([
|
| 1032 |
+
os.path.join(script_dir, args[0]) for args in ScriptWriter.get_args(dist)
|
| 1033 |
+
])
|
| 1034 |
+
# Build .egg file from tmpdir
|
| 1035 |
+
bdist_egg.make_zipfile(
|
| 1036 |
+
egg_path,
|
| 1037 |
+
egg_tmp,
|
| 1038 |
+
verbose=self.verbose,
|
| 1039 |
+
dry_run=self.dry_run,
|
| 1040 |
+
)
|
| 1041 |
+
# install the .egg
|
| 1042 |
+
return self.install_egg(egg_path, tmpdir)
|
| 1043 |
+
|
| 1044 |
+
# FIXME: 'easy_install.exe_to_egg' is too complex (12)
|
| 1045 |
+
def exe_to_egg(self, dist_filename, egg_tmp) -> None: # noqa: C901
|
| 1046 |
+
"""Extract a bdist_wininst to the directories an egg would use"""
|
| 1047 |
+
# Check for .pth file and set up prefix translations
|
| 1048 |
+
prefixes = get_exe_prefixes(dist_filename)
|
| 1049 |
+
to_compile = []
|
| 1050 |
+
native_libs = []
|
| 1051 |
+
top_level = set()
|
| 1052 |
+
|
| 1053 |
+
def process(src, dst):
|
| 1054 |
+
s = src.lower()
|
| 1055 |
+
for old, new in prefixes:
|
| 1056 |
+
if s.startswith(old):
|
| 1057 |
+
src = new + src[len(old) :]
|
| 1058 |
+
parts = src.split('/')
|
| 1059 |
+
dst = os.path.join(egg_tmp, *parts)
|
| 1060 |
+
dl = dst.lower()
|
| 1061 |
+
if dl.endswith('.pyd') or dl.endswith('.dll'):
|
| 1062 |
+
parts[-1] = bdist_egg.strip_module(parts[-1])
|
| 1063 |
+
top_level.add([os.path.splitext(parts[0])[0]])
|
| 1064 |
+
native_libs.append(src)
|
| 1065 |
+
elif dl.endswith('.py') and old != 'SCRIPTS/':
|
| 1066 |
+
top_level.add([os.path.splitext(parts[0])[0]])
|
| 1067 |
+
to_compile.append(dst)
|
| 1068 |
+
return dst
|
| 1069 |
+
if not src.endswith('.pth'):
|
| 1070 |
+
log.warn("WARNING: can't process %s", src)
|
| 1071 |
+
return None
|
| 1072 |
+
|
| 1073 |
+
# extract, tracking .pyd/.dll->native_libs and .py -> to_compile
|
| 1074 |
+
unpack_archive(dist_filename, egg_tmp, process)
|
| 1075 |
+
stubs = []
|
| 1076 |
+
for res in native_libs:
|
| 1077 |
+
if res.lower().endswith('.pyd'): # create stubs for .pyd's
|
| 1078 |
+
parts = res.split('/')
|
| 1079 |
+
resource = parts[-1]
|
| 1080 |
+
parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
|
| 1081 |
+
pyfile = os.path.join(egg_tmp, *parts)
|
| 1082 |
+
to_compile.append(pyfile)
|
| 1083 |
+
stubs.append(pyfile)
|
| 1084 |
+
bdist_egg.write_stub(resource, pyfile)
|
| 1085 |
+
self.byte_compile(to_compile) # compile .py's
|
| 1086 |
+
bdist_egg.write_safety_flag(
|
| 1087 |
+
os.path.join(egg_tmp, 'EGG-INFO'), bdist_egg.analyze_egg(egg_tmp, stubs)
|
| 1088 |
+
) # write zip-safety flag
|
| 1089 |
+
|
| 1090 |
+
for name in 'top_level', 'native_libs':
|
| 1091 |
+
if locals()[name]:
|
| 1092 |
+
txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
|
| 1093 |
+
if not os.path.exists(txt):
|
| 1094 |
+
with open(txt, 'w', encoding="utf-8") as f:
|
| 1095 |
+
f.write('\n'.join(locals()[name]) + '\n')
|
| 1096 |
+
|
| 1097 |
+
def install_wheel(self, wheel_path, tmpdir):
|
| 1098 |
+
wheel = Wheel(wheel_path)
|
| 1099 |
+
assert wheel.is_compatible()
|
| 1100 |
+
destination = os.path.join(self.install_dir, wheel.egg_name())
|
| 1101 |
+
destination = os.path.abspath(destination)
|
| 1102 |
+
if not self.dry_run:
|
| 1103 |
+
ensure_directory(destination)
|
| 1104 |
+
if os.path.isdir(destination) and not os.path.islink(destination):
|
| 1105 |
+
dir_util.remove_tree(destination, dry_run=self.dry_run)
|
| 1106 |
+
elif os.path.exists(destination):
|
| 1107 |
+
self.execute(
|
| 1108 |
+
os.unlink,
|
| 1109 |
+
(destination,),
|
| 1110 |
+
"Removing " + destination,
|
| 1111 |
+
)
|
| 1112 |
+
try:
|
| 1113 |
+
self.execute(
|
| 1114 |
+
wheel.install_as_egg,
|
| 1115 |
+
(destination,),
|
| 1116 |
+
(
|
| 1117 |
+
f"Installing {os.path.basename(wheel_path)} to {os.path.dirname(destination)}"
|
| 1118 |
+
),
|
| 1119 |
+
)
|
| 1120 |
+
finally:
|
| 1121 |
+
update_dist_caches(destination, fix_zipimporter_caches=False)
|
| 1122 |
+
self.add_output(destination)
|
| 1123 |
+
return self.egg_distribution(destination)
|
| 1124 |
+
|
| 1125 |
+
__mv_warning = textwrap.dedent(
|
| 1126 |
+
"""
|
| 1127 |
+
Because this distribution was installed --multi-version, before you can
|
| 1128 |
+
import modules from this package in an application, you will need to
|
| 1129 |
+
'import pkg_resources' and then use a 'require()' call similar to one of
|
| 1130 |
+
these examples, in order to select the desired version:
|
| 1131 |
+
|
| 1132 |
+
pkg_resources.require("%(name)s") # latest installed version
|
| 1133 |
+
pkg_resources.require("%(name)s==%(version)s") # this exact version
|
| 1134 |
+
pkg_resources.require("%(name)s>=%(version)s") # this version or higher
|
| 1135 |
+
"""
|
| 1136 |
+
).lstrip()
|
| 1137 |
+
|
| 1138 |
+
__id_warning = textwrap.dedent(
|
| 1139 |
+
"""
|
| 1140 |
+
Note also that the installation directory must be on sys.path at runtime for
|
| 1141 |
+
this to work. (e.g. by being the application's script directory, by being on
|
| 1142 |
+
PYTHONPATH, or by being added to sys.path by your code.)
|
| 1143 |
+
"""
|
| 1144 |
+
)
|
| 1145 |
+
|
| 1146 |
+
def installation_report(self, req, dist, what: str = "Installed") -> str:
|
| 1147 |
+
"""Helpful installation message for display to package users"""
|
| 1148 |
+
msg = "\n%(what)s %(eggloc)s%(extras)s"
|
| 1149 |
+
if self.multi_version and not self.no_report:
|
| 1150 |
+
msg += '\n' + self.__mv_warning
|
| 1151 |
+
if self.install_dir not in map(normalize_path, sys.path):
|
| 1152 |
+
msg += '\n' + self.__id_warning
|
| 1153 |
+
|
| 1154 |
+
eggloc = dist.location
|
| 1155 |
+
name = dist.project_name
|
| 1156 |
+
version = dist.version
|
| 1157 |
+
extras = '' # TODO: self.report_extras(req, dist)
|
| 1158 |
+
return msg % locals()
|
| 1159 |
+
|
| 1160 |
+
__editable_msg = textwrap.dedent(
|
| 1161 |
+
"""
|
| 1162 |
+
Extracted editable version of %(spec)s to %(dirname)s
|
| 1163 |
+
|
| 1164 |
+
If it uses setuptools in its setup script, you can activate it in
|
| 1165 |
+
"development" mode by going to that directory and running::
|
| 1166 |
+
|
| 1167 |
+
%(python)s setup.py develop
|
| 1168 |
+
|
| 1169 |
+
See the setuptools documentation for the "develop" command for more info.
|
| 1170 |
+
"""
|
| 1171 |
+
).lstrip()
|
| 1172 |
+
|
| 1173 |
+
def report_editable(self, spec, setup_script):
|
| 1174 |
+
dirname = os.path.dirname(setup_script)
|
| 1175 |
+
python = sys.executable
|
| 1176 |
+
return '\n' + self.__editable_msg % locals()
|
| 1177 |
+
|
| 1178 |
+
def run_setup(self, setup_script, setup_base, args) -> None:
|
| 1179 |
+
sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
|
| 1180 |
+
sys.modules.setdefault('distutils.command.egg_info', egg_info)
|
| 1181 |
+
|
| 1182 |
+
args = list(args)
|
| 1183 |
+
if self.verbose > 2:
|
| 1184 |
+
v = 'v' * (self.verbose - 1)
|
| 1185 |
+
args.insert(0, '-' + v)
|
| 1186 |
+
elif self.verbose < 2:
|
| 1187 |
+
args.insert(0, '-q')
|
| 1188 |
+
if self.dry_run:
|
| 1189 |
+
args.insert(0, '-n')
|
| 1190 |
+
log.info("Running %s %s", setup_script[len(setup_base) + 1 :], ' '.join(args))
|
| 1191 |
+
try:
|
| 1192 |
+
run_setup(setup_script, args)
|
| 1193 |
+
except SystemExit as v:
|
| 1194 |
+
raise DistutilsError(f"Setup script exited with {v.args[0]}") from v
|
| 1195 |
+
|
| 1196 |
+
def build_and_install(self, setup_script, setup_base):
|
| 1197 |
+
args = ['bdist_egg', '--dist-dir']
|
| 1198 |
+
|
| 1199 |
+
dist_dir = tempfile.mkdtemp(
|
| 1200 |
+
prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
|
| 1201 |
+
)
|
| 1202 |
+
try:
|
| 1203 |
+
self._set_fetcher_options(os.path.dirname(setup_script))
|
| 1204 |
+
args.append(dist_dir)
|
| 1205 |
+
|
| 1206 |
+
self.run_setup(setup_script, setup_base, args)
|
| 1207 |
+
all_eggs = Environment([dist_dir])
|
| 1208 |
+
eggs = [
|
| 1209 |
+
self.install_egg(dist.location, setup_base)
|
| 1210 |
+
for key in all_eggs
|
| 1211 |
+
for dist in all_eggs[key]
|
| 1212 |
+
]
|
| 1213 |
+
if not eggs and not self.dry_run:
|
| 1214 |
+
log.warn("No eggs found in %s (setup script problem?)", dist_dir)
|
| 1215 |
+
return eggs
|
| 1216 |
+
finally:
|
| 1217 |
+
_rmtree(dist_dir)
|
| 1218 |
+
log.set_verbosity(self.verbose) # restore our log verbosity
|
| 1219 |
+
|
| 1220 |
+
def _set_fetcher_options(self, base):
|
| 1221 |
+
"""
|
| 1222 |
+
When easy_install is about to run bdist_egg on a source dist, that
|
| 1223 |
+
source dist might have 'setup_requires' directives, requiring
|
| 1224 |
+
additional fetching. Ensure the fetcher options given to easy_install
|
| 1225 |
+
are available to that command as well.
|
| 1226 |
+
"""
|
| 1227 |
+
# find the fetch options from easy_install and write them out
|
| 1228 |
+
# to the setup.cfg file.
|
| 1229 |
+
ei_opts = self.distribution.get_option_dict('easy_install').copy()
|
| 1230 |
+
fetch_directives = (
|
| 1231 |
+
'find_links',
|
| 1232 |
+
'site_dirs',
|
| 1233 |
+
'index_url',
|
| 1234 |
+
'optimize',
|
| 1235 |
+
'allow_hosts',
|
| 1236 |
+
)
|
| 1237 |
+
fetch_options = {}
|
| 1238 |
+
for key, val in ei_opts.items():
|
| 1239 |
+
if key not in fetch_directives:
|
| 1240 |
+
continue
|
| 1241 |
+
fetch_options[key] = val[1]
|
| 1242 |
+
# create a settings dictionary suitable for `edit_config`
|
| 1243 |
+
settings = dict(easy_install=fetch_options)
|
| 1244 |
+
cfg_filename = os.path.join(base, 'setup.cfg')
|
| 1245 |
+
setopt.edit_config(cfg_filename, settings)
|
| 1246 |
+
|
| 1247 |
+
def update_pth(self, dist) -> None: # noqa: C901 # is too complex (11) # FIXME
|
| 1248 |
+
if self.pth_file is None:
|
| 1249 |
+
return
|
| 1250 |
+
|
| 1251 |
+
for d in self.pth_file[dist.key]: # drop old entries
|
| 1252 |
+
if not self.multi_version and d.location == dist.location:
|
| 1253 |
+
continue
|
| 1254 |
+
|
| 1255 |
+
log.info("Removing %s from easy-install.pth file", d)
|
| 1256 |
+
self.pth_file.remove(d)
|
| 1257 |
+
if d.location in self.shadow_path:
|
| 1258 |
+
self.shadow_path.remove(d.location)
|
| 1259 |
+
|
| 1260 |
+
if not self.multi_version:
|
| 1261 |
+
if dist.location in self.pth_file.paths:
|
| 1262 |
+
log.info(
|
| 1263 |
+
"%s is already the active version in easy-install.pth",
|
| 1264 |
+
dist,
|
| 1265 |
+
)
|
| 1266 |
+
else:
|
| 1267 |
+
log.info("Adding %s to easy-install.pth file", dist)
|
| 1268 |
+
self.pth_file.add(dist) # add new entry
|
| 1269 |
+
if dist.location not in self.shadow_path:
|
| 1270 |
+
self.shadow_path.append(dist.location)
|
| 1271 |
+
|
| 1272 |
+
if self.dry_run:
|
| 1273 |
+
return
|
| 1274 |
+
|
| 1275 |
+
self.pth_file.save()
|
| 1276 |
+
|
| 1277 |
+
if dist.key != 'setuptools':
|
| 1278 |
+
return
|
| 1279 |
+
|
| 1280 |
+
# Ensure that setuptools itself never becomes unavailable!
|
| 1281 |
+
# XXX should this check for latest version?
|
| 1282 |
+
filename = os.path.join(self.install_dir, 'setuptools.pth')
|
| 1283 |
+
if os.path.islink(filename):
|
| 1284 |
+
os.unlink(filename)
|
| 1285 |
+
|
| 1286 |
+
with open(filename, 'wt', encoding=py312.PTH_ENCODING) as f:
|
| 1287 |
+
# ^-- Python<3.13 require encoding="locale" instead of "utf-8",
|
| 1288 |
+
# see python/cpython#77102.
|
| 1289 |
+
f.write(self.pth_file.make_relative(dist.location) + '\n')
|
| 1290 |
+
|
| 1291 |
+
def unpack_progress(self, src, dst):
|
| 1292 |
+
# Progress filter for unpacking
|
| 1293 |
+
log.debug("Unpacking %s to %s", src, dst)
|
| 1294 |
+
return dst # only unpack-and-compile skips files for dry run
|
| 1295 |
+
|
| 1296 |
+
def unpack_and_compile(self, egg_path, destination) -> None:
|
| 1297 |
+
to_compile = []
|
| 1298 |
+
to_chmod = []
|
| 1299 |
+
|
| 1300 |
+
def pf(src, dst):
|
| 1301 |
+
if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
|
| 1302 |
+
to_compile.append(dst)
|
| 1303 |
+
elif dst.endswith('.dll') or dst.endswith('.so'):
|
| 1304 |
+
to_chmod.append(dst)
|
| 1305 |
+
self.unpack_progress(src, dst)
|
| 1306 |
+
return not self.dry_run and dst or None
|
| 1307 |
+
|
| 1308 |
+
unpack_archive(egg_path, destination, pf)
|
| 1309 |
+
self.byte_compile(to_compile)
|
| 1310 |
+
if not self.dry_run:
|
| 1311 |
+
for f in to_chmod:
|
| 1312 |
+
mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
|
| 1313 |
+
chmod(f, mode)
|
| 1314 |
+
|
| 1315 |
+
def byte_compile(self, to_compile) -> None:
|
| 1316 |
+
if sys.dont_write_bytecode:
|
| 1317 |
+
return
|
| 1318 |
+
|
| 1319 |
+
from distutils.util import byte_compile
|
| 1320 |
+
|
| 1321 |
+
try:
|
| 1322 |
+
# try to make the byte compile messages quieter
|
| 1323 |
+
log.set_verbosity(self.verbose - 1)
|
| 1324 |
+
|
| 1325 |
+
byte_compile(to_compile, optimize=0, force=True, dry_run=self.dry_run)
|
| 1326 |
+
if self.optimize:
|
| 1327 |
+
byte_compile(
|
| 1328 |
+
to_compile,
|
| 1329 |
+
optimize=self.optimize,
|
| 1330 |
+
force=True,
|
| 1331 |
+
dry_run=self.dry_run,
|
| 1332 |
+
)
|
| 1333 |
+
finally:
|
| 1334 |
+
log.set_verbosity(self.verbose) # restore original verbosity
|
| 1335 |
+
|
| 1336 |
+
__no_default_msg = textwrap.dedent(
|
| 1337 |
+
"""
|
| 1338 |
+
bad install directory or PYTHONPATH
|
| 1339 |
+
|
| 1340 |
+
You are attempting to install a package to a directory that is not
|
| 1341 |
+
on PYTHONPATH and which Python does not read ".pth" files from. The
|
| 1342 |
+
installation directory you specified (via --install-dir, --prefix, or
|
| 1343 |
+
the distutils default setting) was:
|
| 1344 |
+
|
| 1345 |
+
%s
|
| 1346 |
+
|
| 1347 |
+
and your PYTHONPATH environment variable currently contains:
|
| 1348 |
+
|
| 1349 |
+
%r
|
| 1350 |
+
|
| 1351 |
+
Here are some of your options for correcting the problem:
|
| 1352 |
+
|
| 1353 |
+
* You can choose a different installation directory, i.e., one that is
|
| 1354 |
+
on PYTHONPATH or supports .pth files
|
| 1355 |
+
|
| 1356 |
+
* You can add the installation directory to the PYTHONPATH environment
|
| 1357 |
+
variable. (It must then also be on PYTHONPATH whenever you run
|
| 1358 |
+
Python and want to use the package(s) you are installing.)
|
| 1359 |
+
|
| 1360 |
+
* You can set up the installation directory to support ".pth" files by
|
| 1361 |
+
using one of the approaches described here:
|
| 1362 |
+
|
| 1363 |
+
https://setuptools.pypa.io/en/latest/deprecated/easy_install.html#custom-installation-locations
|
| 1364 |
+
|
| 1365 |
+
|
| 1366 |
+
Please make the appropriate changes for your system and try again.
|
| 1367 |
+
"""
|
| 1368 |
+
).strip()
|
| 1369 |
+
|
| 1370 |
+
def create_home_path(self) -> None:
|
| 1371 |
+
"""Create directories under ~."""
|
| 1372 |
+
if not self.user:
|
| 1373 |
+
return
|
| 1374 |
+
home = convert_path(os.path.expanduser("~"))
|
| 1375 |
+
for path in only_strs(self.config_vars.values()):
|
| 1376 |
+
if path.startswith(home) and not os.path.isdir(path):
|
| 1377 |
+
self.debug_print(f"os.makedirs('{path}', 0o700)")
|
| 1378 |
+
os.makedirs(path, 0o700)
|
| 1379 |
+
|
| 1380 |
+
INSTALL_SCHEMES = dict(
|
| 1381 |
+
posix=dict(
|
| 1382 |
+
install_dir='$base/lib/python$py_version_short/site-packages',
|
| 1383 |
+
script_dir='$base/bin',
|
| 1384 |
+
),
|
| 1385 |
+
)
|
| 1386 |
+
|
| 1387 |
+
DEFAULT_SCHEME = dict(
|
| 1388 |
+
install_dir='$base/Lib/site-packages',
|
| 1389 |
+
script_dir='$base/Scripts',
|
| 1390 |
+
)
|
| 1391 |
+
|
| 1392 |
+
def _expand(self, *attrs):
|
| 1393 |
+
config_vars = self.get_finalized_command('install').config_vars
|
| 1394 |
+
|
| 1395 |
+
if self.prefix:
|
| 1396 |
+
# Set default install_dir/scripts from --prefix
|
| 1397 |
+
config_vars = dict(config_vars)
|
| 1398 |
+
config_vars['base'] = self.prefix
|
| 1399 |
+
scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
|
| 1400 |
+
for attr, val in scheme.items():
|
| 1401 |
+
if getattr(self, attr, None) is None:
|
| 1402 |
+
setattr(self, attr, val)
|
| 1403 |
+
|
| 1404 |
+
from distutils.util import subst_vars
|
| 1405 |
+
|
| 1406 |
+
for attr in attrs:
|
| 1407 |
+
val = getattr(self, attr)
|
| 1408 |
+
if val is not None:
|
| 1409 |
+
val = subst_vars(val, config_vars)
|
| 1410 |
+
if os.name == 'posix':
|
| 1411 |
+
val = os.path.expanduser(val)
|
| 1412 |
+
setattr(self, attr, val)
|
| 1413 |
+
|
| 1414 |
+
|
| 1415 |
+
def _pythonpath():
|
| 1416 |
+
items = os.environ.get('PYTHONPATH', '').split(os.pathsep)
|
| 1417 |
+
return filter(None, items)
|
| 1418 |
+
|
| 1419 |
+
|
| 1420 |
+
def get_site_dirs():
|
| 1421 |
+
"""
|
| 1422 |
+
Return a list of 'site' dirs
|
| 1423 |
+
"""
|
| 1424 |
+
|
| 1425 |
+
sitedirs = []
|
| 1426 |
+
|
| 1427 |
+
# start with PYTHONPATH
|
| 1428 |
+
sitedirs.extend(_pythonpath())
|
| 1429 |
+
|
| 1430 |
+
prefixes = [sys.prefix]
|
| 1431 |
+
if sys.exec_prefix != sys.prefix:
|
| 1432 |
+
prefixes.append(sys.exec_prefix)
|
| 1433 |
+
for prefix in prefixes:
|
| 1434 |
+
if not prefix:
|
| 1435 |
+
continue
|
| 1436 |
+
|
| 1437 |
+
if sys.platform in ('os2emx', 'riscos'):
|
| 1438 |
+
sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
|
| 1439 |
+
elif os.sep == '/':
|
| 1440 |
+
sitedirs.extend([
|
| 1441 |
+
os.path.join(
|
| 1442 |
+
prefix,
|
| 1443 |
+
"lib",
|
| 1444 |
+
f"python{sys.version_info.major}.{sys.version_info.minor}",
|
| 1445 |
+
"site-packages",
|
| 1446 |
+
),
|
| 1447 |
+
os.path.join(prefix, "lib", "site-python"),
|
| 1448 |
+
])
|
| 1449 |
+
else:
|
| 1450 |
+
sitedirs.extend([
|
| 1451 |
+
prefix,
|
| 1452 |
+
os.path.join(prefix, "lib", "site-packages"),
|
| 1453 |
+
])
|
| 1454 |
+
if sys.platform != 'darwin':
|
| 1455 |
+
continue
|
| 1456 |
+
|
| 1457 |
+
# for framework builds *only* we add the standard Apple
|
| 1458 |
+
# locations. Currently only per-user, but /Library and
|
| 1459 |
+
# /Network/Library could be added too
|
| 1460 |
+
if 'Python.framework' not in prefix:
|
| 1461 |
+
continue
|
| 1462 |
+
|
| 1463 |
+
home = os.environ.get('HOME')
|
| 1464 |
+
if not home:
|
| 1465 |
+
continue
|
| 1466 |
+
|
| 1467 |
+
home_sp = os.path.join(
|
| 1468 |
+
home,
|
| 1469 |
+
'Library',
|
| 1470 |
+
'Python',
|
| 1471 |
+
f'{sys.version_info.major}.{sys.version_info.minor}',
|
| 1472 |
+
'site-packages',
|
| 1473 |
+
)
|
| 1474 |
+
sitedirs.append(home_sp)
|
| 1475 |
+
lib_paths = get_path('purelib'), get_path('platlib')
|
| 1476 |
+
|
| 1477 |
+
sitedirs.extend(s for s in lib_paths if s not in sitedirs)
|
| 1478 |
+
|
| 1479 |
+
if site.ENABLE_USER_SITE:
|
| 1480 |
+
sitedirs.append(site.USER_SITE)
|
| 1481 |
+
|
| 1482 |
+
with contextlib.suppress(AttributeError):
|
| 1483 |
+
sitedirs.extend(site.getsitepackages())
|
| 1484 |
+
|
| 1485 |
+
return list(map(normalize_path, sitedirs))
|
| 1486 |
+
|
| 1487 |
+
|
| 1488 |
+
def expand_paths(inputs): # noqa: C901 # is too complex (11) # FIXME
|
| 1489 |
+
"""Yield sys.path directories that might contain "old-style" packages"""
|
| 1490 |
+
|
| 1491 |
+
seen = set()
|
| 1492 |
+
|
| 1493 |
+
for dirname in inputs:
|
| 1494 |
+
dirname = normalize_path(dirname)
|
| 1495 |
+
if dirname in seen:
|
| 1496 |
+
continue
|
| 1497 |
+
|
| 1498 |
+
seen.add(dirname)
|
| 1499 |
+
if not os.path.isdir(dirname):
|
| 1500 |
+
continue
|
| 1501 |
+
|
| 1502 |
+
files = os.listdir(dirname)
|
| 1503 |
+
yield dirname, files
|
| 1504 |
+
|
| 1505 |
+
for name in files:
|
| 1506 |
+
if not name.endswith('.pth'):
|
| 1507 |
+
# We only care about the .pth files
|
| 1508 |
+
continue
|
| 1509 |
+
if name in ('easy-install.pth', 'setuptools.pth'):
|
| 1510 |
+
# Ignore .pth files that we control
|
| 1511 |
+
continue
|
| 1512 |
+
|
| 1513 |
+
# Read the .pth file
|
| 1514 |
+
content = _read_pth(os.path.join(dirname, name))
|
| 1515 |
+
lines = list(yield_lines(content))
|
| 1516 |
+
|
| 1517 |
+
# Yield existing non-dupe, non-import directory lines from it
|
| 1518 |
+
for line in lines:
|
| 1519 |
+
if line.startswith("import"):
|
| 1520 |
+
continue
|
| 1521 |
+
|
| 1522 |
+
line = normalize_path(line.rstrip())
|
| 1523 |
+
if line in seen:
|
| 1524 |
+
continue
|
| 1525 |
+
|
| 1526 |
+
seen.add(line)
|
| 1527 |
+
if not os.path.isdir(line):
|
| 1528 |
+
continue
|
| 1529 |
+
|
| 1530 |
+
yield line, os.listdir(line)
|
| 1531 |
+
|
| 1532 |
+
|
| 1533 |
+
def extract_wininst_cfg(dist_filename):
|
| 1534 |
+
"""Extract configuration data from a bdist_wininst .exe
|
| 1535 |
+
|
| 1536 |
+
Returns a configparser.RawConfigParser, or None
|
| 1537 |
+
"""
|
| 1538 |
+
f = open(dist_filename, 'rb')
|
| 1539 |
+
try:
|
| 1540 |
+
endrec = zipfile._EndRecData(f)
|
| 1541 |
+
if endrec is None:
|
| 1542 |
+
return None
|
| 1543 |
+
|
| 1544 |
+
prepended = (endrec[9] - endrec[5]) - endrec[6]
|
| 1545 |
+
if prepended < 12: # no wininst data here
|
| 1546 |
+
return None
|
| 1547 |
+
f.seek(prepended - 12)
|
| 1548 |
+
|
| 1549 |
+
tag, cfglen, _bmlen = struct.unpack("<iii", f.read(12))
|
| 1550 |
+
if tag not in (0x1234567A, 0x1234567B):
|
| 1551 |
+
return None # not a valid tag
|
| 1552 |
+
|
| 1553 |
+
f.seek(prepended - (12 + cfglen))
|
| 1554 |
+
init = {'version': '', 'target_version': ''}
|
| 1555 |
+
cfg = configparser.RawConfigParser(init)
|
| 1556 |
+
try:
|
| 1557 |
+
part = f.read(cfglen)
|
| 1558 |
+
# Read up to the first null byte.
|
| 1559 |
+
config = part.split(b'\0', 1)[0]
|
| 1560 |
+
# Now the config is in bytes, but for RawConfigParser, it should
|
| 1561 |
+
# be text, so decode it.
|
| 1562 |
+
config = config.decode(sys.getfilesystemencoding())
|
| 1563 |
+
cfg.read_file(io.StringIO(config))
|
| 1564 |
+
except configparser.Error:
|
| 1565 |
+
return None
|
| 1566 |
+
if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
|
| 1567 |
+
return None
|
| 1568 |
+
return cfg
|
| 1569 |
+
|
| 1570 |
+
finally:
|
| 1571 |
+
f.close()
|
| 1572 |
+
|
| 1573 |
+
|
| 1574 |
+
def get_exe_prefixes(exe_filename):
|
| 1575 |
+
"""Get exe->egg path translations for a given .exe file"""
|
| 1576 |
+
|
| 1577 |
+
prefixes = [
|
| 1578 |
+
('PURELIB/', ''),
|
| 1579 |
+
('PLATLIB/pywin32_system32', ''),
|
| 1580 |
+
('PLATLIB/', ''),
|
| 1581 |
+
('SCRIPTS/', 'EGG-INFO/scripts/'),
|
| 1582 |
+
('DATA/lib/site-packages', ''),
|
| 1583 |
+
]
|
| 1584 |
+
z = zipfile.ZipFile(exe_filename)
|
| 1585 |
+
try:
|
| 1586 |
+
for info in z.infolist():
|
| 1587 |
+
name = info.filename
|
| 1588 |
+
parts = name.split('/')
|
| 1589 |
+
if len(parts) == 3 and parts[2] == 'PKG-INFO':
|
| 1590 |
+
if parts[1].endswith('.egg-info'):
|
| 1591 |
+
prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
|
| 1592 |
+
break
|
| 1593 |
+
if len(parts) != 2 or not name.endswith('.pth'):
|
| 1594 |
+
continue
|
| 1595 |
+
if name.endswith('-nspkg.pth'):
|
| 1596 |
+
continue
|
| 1597 |
+
if parts[0].upper() in ('PURELIB', 'PLATLIB'):
|
| 1598 |
+
contents = z.read(name).decode()
|
| 1599 |
+
for pth in yield_lines(contents):
|
| 1600 |
+
pth = pth.strip().replace('\\', '/')
|
| 1601 |
+
if not pth.startswith('import'):
|
| 1602 |
+
prefixes.append(((f'{parts[0]}/{pth}/'), ''))
|
| 1603 |
+
finally:
|
| 1604 |
+
z.close()
|
| 1605 |
+
prefixes = [(x.lower(), y) for x, y in prefixes]
|
| 1606 |
+
prefixes.sort()
|
| 1607 |
+
prefixes.reverse()
|
| 1608 |
+
return prefixes
|
| 1609 |
+
|
| 1610 |
+
|
| 1611 |
+
class PthDistributions(Environment):
|
| 1612 |
+
"""A .pth file with Distribution paths in it"""
|
| 1613 |
+
|
| 1614 |
+
def __init__(self, filename, sitedirs=()) -> None:
|
| 1615 |
+
self.filename = filename
|
| 1616 |
+
self.sitedirs = list(map(normalize_path, sitedirs))
|
| 1617 |
+
self.basedir = normalize_path(os.path.dirname(self.filename))
|
| 1618 |
+
self.paths, self.dirty = self._load()
|
| 1619 |
+
# keep a copy if someone manually updates the paths attribute on the instance
|
| 1620 |
+
self._init_paths = self.paths[:]
|
| 1621 |
+
super().__init__([], None, None)
|
| 1622 |
+
for path in yield_lines(self.paths):
|
| 1623 |
+
list(map(self.add, find_distributions(path, True)))
|
| 1624 |
+
|
| 1625 |
+
def _load_raw(self):
|
| 1626 |
+
paths = []
|
| 1627 |
+
dirty = saw_import = False
|
| 1628 |
+
seen = set(self.sitedirs)
|
| 1629 |
+
content = _read_pth(self.filename)
|
| 1630 |
+
for line in content.splitlines():
|
| 1631 |
+
path = line.rstrip()
|
| 1632 |
+
# still keep imports and empty/commented lines for formatting
|
| 1633 |
+
paths.append(path)
|
| 1634 |
+
if line.startswith(('import ', 'from ')):
|
| 1635 |
+
saw_import = True
|
| 1636 |
+
continue
|
| 1637 |
+
stripped_path = path.strip()
|
| 1638 |
+
if not stripped_path or stripped_path.startswith('#'):
|
| 1639 |
+
continue
|
| 1640 |
+
# skip non-existent paths, in case somebody deleted a package
|
| 1641 |
+
# manually, and duplicate paths as well
|
| 1642 |
+
normalized_path = normalize_path(os.path.join(self.basedir, path))
|
| 1643 |
+
if normalized_path in seen or not os.path.exists(normalized_path):
|
| 1644 |
+
log.debug("cleaned up dirty or duplicated %r", path)
|
| 1645 |
+
dirty = True
|
| 1646 |
+
paths.pop()
|
| 1647 |
+
continue
|
| 1648 |
+
seen.add(normalized_path)
|
| 1649 |
+
# remove any trailing empty/blank line
|
| 1650 |
+
while paths and not paths[-1].strip():
|
| 1651 |
+
paths.pop()
|
| 1652 |
+
dirty = True
|
| 1653 |
+
return paths, dirty or (paths and saw_import)
|
| 1654 |
+
|
| 1655 |
+
def _load(self):
|
| 1656 |
+
if os.path.isfile(self.filename):
|
| 1657 |
+
return self._load_raw()
|
| 1658 |
+
return [], False
|
| 1659 |
+
|
| 1660 |
+
def save(self) -> None:
|
| 1661 |
+
"""Write changed .pth file back to disk"""
|
| 1662 |
+
# first reload the file
|
| 1663 |
+
last_paths, last_dirty = self._load()
|
| 1664 |
+
# and check that there are no difference with what we have.
|
| 1665 |
+
# there can be difference if someone else has written to the file
|
| 1666 |
+
# since we first loaded it.
|
| 1667 |
+
# we don't want to lose the eventual new paths added since then.
|
| 1668 |
+
for path in last_paths[:]:
|
| 1669 |
+
if path not in self.paths:
|
| 1670 |
+
self.paths.append(path)
|
| 1671 |
+
log.info("detected new path %r", path)
|
| 1672 |
+
last_dirty = True
|
| 1673 |
+
else:
|
| 1674 |
+
last_paths.remove(path)
|
| 1675 |
+
# also, re-check that all paths are still valid before saving them
|
| 1676 |
+
for path in self.paths[:]:
|
| 1677 |
+
if path not in last_paths and not path.startswith((
|
| 1678 |
+
'import ',
|
| 1679 |
+
'from ',
|
| 1680 |
+
'#',
|
| 1681 |
+
)):
|
| 1682 |
+
absolute_path = os.path.join(self.basedir, path)
|
| 1683 |
+
if not os.path.exists(absolute_path):
|
| 1684 |
+
self.paths.remove(path)
|
| 1685 |
+
log.info("removing now non-existent path %r", path)
|
| 1686 |
+
last_dirty = True
|
| 1687 |
+
|
| 1688 |
+
self.dirty |= last_dirty or self.paths != self._init_paths
|
| 1689 |
+
if not self.dirty:
|
| 1690 |
+
return
|
| 1691 |
+
|
| 1692 |
+
rel_paths = list(map(self.make_relative, self.paths))
|
| 1693 |
+
if rel_paths:
|
| 1694 |
+
log.debug("Saving %s", self.filename)
|
| 1695 |
+
lines = self._wrap_lines(rel_paths)
|
| 1696 |
+
data = '\n'.join(lines) + '\n'
|
| 1697 |
+
if os.path.islink(self.filename):
|
| 1698 |
+
os.unlink(self.filename)
|
| 1699 |
+
with open(self.filename, 'wt', encoding=py312.PTH_ENCODING) as f:
|
| 1700 |
+
# ^-- Python<3.13 require encoding="locale" instead of "utf-8",
|
| 1701 |
+
# see python/cpython#77102.
|
| 1702 |
+
f.write(data)
|
| 1703 |
+
elif os.path.exists(self.filename):
|
| 1704 |
+
log.debug("Deleting empty %s", self.filename)
|
| 1705 |
+
os.unlink(self.filename)
|
| 1706 |
+
|
| 1707 |
+
self.dirty = False
|
| 1708 |
+
self._init_paths[:] = self.paths[:]
|
| 1709 |
+
|
| 1710 |
+
@staticmethod
|
| 1711 |
+
def _wrap_lines(lines):
|
| 1712 |
+
return lines
|
| 1713 |
+
|
| 1714 |
+
def add(self, dist) -> None:
|
| 1715 |
+
"""Add `dist` to the distribution map"""
|
| 1716 |
+
new_path = dist.location not in self.paths and (
|
| 1717 |
+
dist.location not in self.sitedirs
|
| 1718 |
+
or
|
| 1719 |
+
# account for '.' being in PYTHONPATH
|
| 1720 |
+
dist.location == os.getcwd()
|
| 1721 |
+
)
|
| 1722 |
+
if new_path:
|
| 1723 |
+
self.paths.append(dist.location)
|
| 1724 |
+
self.dirty = True
|
| 1725 |
+
super().add(dist)
|
| 1726 |
+
|
| 1727 |
+
def remove(self, dist) -> None:
|
| 1728 |
+
"""Remove `dist` from the distribution map"""
|
| 1729 |
+
while dist.location in self.paths:
|
| 1730 |
+
self.paths.remove(dist.location)
|
| 1731 |
+
self.dirty = True
|
| 1732 |
+
super().remove(dist)
|
| 1733 |
+
|
| 1734 |
+
def make_relative(self, path):
|
| 1735 |
+
npath, last = os.path.split(normalize_path(path))
|
| 1736 |
+
baselen = len(self.basedir)
|
| 1737 |
+
parts = [last]
|
| 1738 |
+
sep = os.altsep == '/' and '/' or os.sep
|
| 1739 |
+
while len(npath) >= baselen:
|
| 1740 |
+
if npath == self.basedir:
|
| 1741 |
+
parts.append(os.curdir)
|
| 1742 |
+
parts.reverse()
|
| 1743 |
+
return sep.join(parts)
|
| 1744 |
+
npath, last = os.path.split(npath)
|
| 1745 |
+
parts.append(last)
|
| 1746 |
+
else:
|
| 1747 |
+
return path
|
| 1748 |
+
|
| 1749 |
+
|
| 1750 |
+
class RewritePthDistributions(PthDistributions):
|
| 1751 |
+
@classmethod
|
| 1752 |
+
def _wrap_lines(cls, lines):
|
| 1753 |
+
yield cls.prelude
|
| 1754 |
+
yield from lines
|
| 1755 |
+
yield cls.postlude
|
| 1756 |
+
|
| 1757 |
+
prelude = _one_liner(
|
| 1758 |
+
"""
|
| 1759 |
+
import sys
|
| 1760 |
+
sys.__plen = len(sys.path)
|
| 1761 |
+
"""
|
| 1762 |
+
)
|
| 1763 |
+
postlude = _one_liner(
|
| 1764 |
+
"""
|
| 1765 |
+
import sys
|
| 1766 |
+
new = sys.path[sys.__plen:]
|
| 1767 |
+
del sys.path[sys.__plen:]
|
| 1768 |
+
p = getattr(sys, '__egginsert', 0)
|
| 1769 |
+
sys.path[p:p] = new
|
| 1770 |
+
sys.__egginsert = p + len(new)
|
| 1771 |
+
"""
|
| 1772 |
+
)
|
| 1773 |
+
|
| 1774 |
+
|
| 1775 |
+
if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite':
|
| 1776 |
+
PthDistributions = RewritePthDistributions # type: ignore[misc] # Overwriting type
|
| 1777 |
+
|
| 1778 |
+
|
| 1779 |
+
def _first_line_re():
|
| 1780 |
+
"""
|
| 1781 |
+
Return a regular expression based on first_line_re suitable for matching
|
| 1782 |
+
strings.
|
| 1783 |
+
"""
|
| 1784 |
+
if isinstance(first_line_re.pattern, str):
|
| 1785 |
+
return first_line_re
|
| 1786 |
+
|
| 1787 |
+
# first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
|
| 1788 |
+
return re.compile(first_line_re.pattern.decode())
|
| 1789 |
+
|
| 1790 |
+
|
| 1791 |
+
def update_dist_caches(dist_path, fix_zipimporter_caches):
|
| 1792 |
+
"""
|
| 1793 |
+
Fix any globally cached `dist_path` related data
|
| 1794 |
+
|
| 1795 |
+
`dist_path` should be a path of a newly installed egg distribution (zipped
|
| 1796 |
+
or unzipped).
|
| 1797 |
+
|
| 1798 |
+
sys.path_importer_cache contains finder objects that have been cached when
|
| 1799 |
+
importing data from the original distribution. Any such finders need to be
|
| 1800 |
+
cleared since the replacement distribution might be packaged differently,
|
| 1801 |
+
e.g. a zipped egg distribution might get replaced with an unzipped egg
|
| 1802 |
+
folder or vice versa. Having the old finders cached may then cause Python
|
| 1803 |
+
to attempt loading modules from the replacement distribution using an
|
| 1804 |
+
incorrect loader.
|
| 1805 |
+
|
| 1806 |
+
zipimport.zipimporter objects are Python loaders charged with importing
|
| 1807 |
+
data packaged inside zip archives. If stale loaders referencing the
|
| 1808 |
+
original distribution, are left behind, they can fail to load modules from
|
| 1809 |
+
the replacement distribution. E.g. if an old zipimport.zipimporter instance
|
| 1810 |
+
is used to load data from a new zipped egg archive, it may cause the
|
| 1811 |
+
operation to attempt to locate the requested data in the wrong location -
|
| 1812 |
+
one indicated by the original distribution's zip archive directory
|
| 1813 |
+
information. Such an operation may then fail outright, e.g. report having
|
| 1814 |
+
read a 'bad local file header', or even worse, it may fail silently &
|
| 1815 |
+
return invalid data.
|
| 1816 |
+
|
| 1817 |
+
zipimport._zip_directory_cache contains cached zip archive directory
|
| 1818 |
+
information for all existing zipimport.zipimporter instances and all such
|
| 1819 |
+
instances connected to the same archive share the same cached directory
|
| 1820 |
+
information.
|
| 1821 |
+
|
| 1822 |
+
If asked, and the underlying Python implementation allows it, we can fix
|
| 1823 |
+
all existing zipimport.zipimporter instances instead of having to track
|
| 1824 |
+
them down and remove them one by one, by updating their shared cached zip
|
| 1825 |
+
archive directory information. This, of course, assumes that the
|
| 1826 |
+
replacement distribution is packaged as a zipped egg.
|
| 1827 |
+
|
| 1828 |
+
If not asked to fix existing zipimport.zipimporter instances, we still do
|
| 1829 |
+
our best to clear any remaining zipimport.zipimporter related cached data
|
| 1830 |
+
that might somehow later get used when attempting to load data from the new
|
| 1831 |
+
distribution and thus cause such load operations to fail. Note that when
|
| 1832 |
+
tracking down such remaining stale data, we can not catch every conceivable
|
| 1833 |
+
usage from here, and we clear only those that we know of and have found to
|
| 1834 |
+
cause problems if left alive. Any remaining caches should be updated by
|
| 1835 |
+
whomever is in charge of maintaining them, i.e. they should be ready to
|
| 1836 |
+
handle us replacing their zip archives with new distributions at runtime.
|
| 1837 |
+
|
| 1838 |
+
"""
|
| 1839 |
+
# There are several other known sources of stale zipimport.zipimporter
|
| 1840 |
+
# instances that we do not clear here, but might if ever given a reason to
|
| 1841 |
+
# do so:
|
| 1842 |
+
# * Global setuptools pkg_resources.working_set (a.k.a. 'master working
|
| 1843 |
+
# set') may contain distributions which may in turn contain their
|
| 1844 |
+
# zipimport.zipimporter loaders.
|
| 1845 |
+
# * Several zipimport.zipimporter loaders held by local variables further
|
| 1846 |
+
# up the function call stack when running the setuptools installation.
|
| 1847 |
+
# * Already loaded modules may have their __loader__ attribute set to the
|
| 1848 |
+
# exact loader instance used when importing them. Python 3.4 docs state
|
| 1849 |
+
# that this information is intended mostly for introspection and so is
|
| 1850 |
+
# not expected to cause us problems.
|
| 1851 |
+
normalized_path = normalize_path(dist_path)
|
| 1852 |
+
_uncache(normalized_path, sys.path_importer_cache)
|
| 1853 |
+
if fix_zipimporter_caches:
|
| 1854 |
+
_replace_zip_directory_cache_data(normalized_path)
|
| 1855 |
+
else:
|
| 1856 |
+
# Here, even though we do not want to fix existing and now stale
|
| 1857 |
+
# zipimporter cache information, we still want to remove it. Related to
|
| 1858 |
+
# Python's zip archive directory information cache, we clear each of
|
| 1859 |
+
# its stale entries in two phases:
|
| 1860 |
+
# 1. Clear the entry so attempting to access zip archive information
|
| 1861 |
+
# via any existing stale zipimport.zipimporter instances fails.
|
| 1862 |
+
# 2. Remove the entry from the cache so any newly constructed
|
| 1863 |
+
# zipimport.zipimporter instances do not end up using old stale
|
| 1864 |
+
# zip archive directory information.
|
| 1865 |
+
# This whole stale data removal step does not seem strictly necessary,
|
| 1866 |
+
# but has been left in because it was done before we started replacing
|
| 1867 |
+
# the zip archive directory information cache content if possible, and
|
| 1868 |
+
# there are no relevant unit tests that we can depend on to tell us if
|
| 1869 |
+
# this is really needed.
|
| 1870 |
+
_remove_and_clear_zip_directory_cache_data(normalized_path)
|
| 1871 |
+
|
| 1872 |
+
|
| 1873 |
+
def _collect_zipimporter_cache_entries(normalized_path, cache):
|
| 1874 |
+
"""
|
| 1875 |
+
Return zipimporter cache entry keys related to a given normalized path.
|
| 1876 |
+
|
| 1877 |
+
Alternative path spellings (e.g. those using different character case or
|
| 1878 |
+
those using alternative path separators) related to the same path are
|
| 1879 |
+
included. Any sub-path entries are included as well, i.e. those
|
| 1880 |
+
corresponding to zip archives embedded in other zip archives.
|
| 1881 |
+
|
| 1882 |
+
"""
|
| 1883 |
+
result = []
|
| 1884 |
+
prefix_len = len(normalized_path)
|
| 1885 |
+
for p in cache:
|
| 1886 |
+
np = normalize_path(p)
|
| 1887 |
+
if np.startswith(normalized_path) and np[prefix_len : prefix_len + 1] in (
|
| 1888 |
+
os.sep,
|
| 1889 |
+
'',
|
| 1890 |
+
):
|
| 1891 |
+
result.append(p)
|
| 1892 |
+
return result
|
| 1893 |
+
|
| 1894 |
+
|
| 1895 |
+
def _update_zipimporter_cache(normalized_path, cache, updater=None):
|
| 1896 |
+
"""
|
| 1897 |
+
Update zipimporter cache data for a given normalized path.
|
| 1898 |
+
|
| 1899 |
+
Any sub-path entries are processed as well, i.e. those corresponding to zip
|
| 1900 |
+
archives embedded in other zip archives.
|
| 1901 |
+
|
| 1902 |
+
Given updater is a callable taking a cache entry key and the original entry
|
| 1903 |
+
(after already removing the entry from the cache), and expected to update
|
| 1904 |
+
the entry and possibly return a new one to be inserted in its place.
|
| 1905 |
+
Returning None indicates that the entry should not be replaced with a new
|
| 1906 |
+
one. If no updater is given, the cache entries are simply removed without
|
| 1907 |
+
any additional processing, the same as if the updater simply returned None.
|
| 1908 |
+
|
| 1909 |
+
"""
|
| 1910 |
+
for p in _collect_zipimporter_cache_entries(normalized_path, cache):
|
| 1911 |
+
# N.B. pypy's custom zipimport._zip_directory_cache implementation does
|
| 1912 |
+
# not support the complete dict interface:
|
| 1913 |
+
# * Does not support item assignment, thus not allowing this function
|
| 1914 |
+
# to be used only for removing existing cache entries.
|
| 1915 |
+
# * Does not support the dict.pop() method, forcing us to use the
|
| 1916 |
+
# get/del patterns instead. For more detailed information see the
|
| 1917 |
+
# following links:
|
| 1918 |
+
# https://github.com/pypa/setuptools/issues/202#issuecomment-202913420
|
| 1919 |
+
# https://foss.heptapod.net/pypy/pypy/-/blob/144c4e65cb6accb8e592f3a7584ea38265d1873c/pypy/module/zipimport/interp_zipimport.py
|
| 1920 |
+
old_entry = cache[p]
|
| 1921 |
+
del cache[p]
|
| 1922 |
+
new_entry = updater and updater(p, old_entry)
|
| 1923 |
+
if new_entry is not None:
|
| 1924 |
+
cache[p] = new_entry
|
| 1925 |
+
|
| 1926 |
+
|
| 1927 |
+
def _uncache(normalized_path, cache):
|
| 1928 |
+
_update_zipimporter_cache(normalized_path, cache)
|
| 1929 |
+
|
| 1930 |
+
|
| 1931 |
+
def _remove_and_clear_zip_directory_cache_data(normalized_path):
|
| 1932 |
+
def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
|
| 1933 |
+
old_entry.clear()
|
| 1934 |
+
|
| 1935 |
+
_update_zipimporter_cache(
|
| 1936 |
+
normalized_path,
|
| 1937 |
+
zipimport._zip_directory_cache,
|
| 1938 |
+
updater=clear_and_remove_cached_zip_archive_directory_data,
|
| 1939 |
+
)
|
| 1940 |
+
|
| 1941 |
+
|
| 1942 |
+
# PyPy Python implementation does not allow directly writing to the
|
| 1943 |
+
# zipimport._zip_directory_cache and so prevents us from attempting to correct
|
| 1944 |
+
# its content. The best we can do there is clear the problematic cache content
|
| 1945 |
+
# and have PyPy repopulate it as needed. The downside is that if there are any
|
| 1946 |
+
# stale zipimport.zipimporter instances laying around, attempting to use them
|
| 1947 |
+
# will fail due to not having its zip archive directory information available
|
| 1948 |
+
# instead of being automatically corrected to use the new correct zip archive
|
| 1949 |
+
# directory information.
|
| 1950 |
+
if '__pypy__' in sys.builtin_module_names:
|
| 1951 |
+
_replace_zip_directory_cache_data = _remove_and_clear_zip_directory_cache_data
|
| 1952 |
+
else:
|
| 1953 |
+
|
| 1954 |
+
def _replace_zip_directory_cache_data(normalized_path):
|
| 1955 |
+
def replace_cached_zip_archive_directory_data(path, old_entry):
|
| 1956 |
+
# N.B. In theory, we could load the zip directory information just
|
| 1957 |
+
# once for all updated path spellings, and then copy it locally and
|
| 1958 |
+
# update its contained path strings to contain the correct
|
| 1959 |
+
# spelling, but that seems like a way too invasive move (this cache
|
| 1960 |
+
# structure is not officially documented anywhere and could in
|
| 1961 |
+
# theory change with new Python releases) for no significant
|
| 1962 |
+
# benefit.
|
| 1963 |
+
old_entry.clear()
|
| 1964 |
+
zipimport.zipimporter(path)
|
| 1965 |
+
old_entry.update(zipimport._zip_directory_cache[path])
|
| 1966 |
+
return old_entry
|
| 1967 |
+
|
| 1968 |
+
_update_zipimporter_cache(
|
| 1969 |
+
normalized_path,
|
| 1970 |
+
zipimport._zip_directory_cache,
|
| 1971 |
+
updater=replace_cached_zip_archive_directory_data,
|
| 1972 |
+
)
|
| 1973 |
+
|
| 1974 |
+
|
| 1975 |
+
def is_python(text, filename='<string>'):
|
| 1976 |
+
"Is this string a valid Python script?"
|
| 1977 |
+
try:
|
| 1978 |
+
compile(text, filename, 'exec')
|
| 1979 |
+
except (SyntaxError, TypeError):
|
| 1980 |
+
return False
|
| 1981 |
+
else:
|
| 1982 |
+
return True
|
| 1983 |
+
|
| 1984 |
+
|
| 1985 |
+
def is_sh(executable):
|
| 1986 |
+
"""Determine if the specified executable is a .sh (contains a #! line)"""
|
| 1987 |
+
try:
|
| 1988 |
+
with open(executable, encoding='latin-1') as fp:
|
| 1989 |
+
magic = fp.read(2)
|
| 1990 |
+
except OSError:
|
| 1991 |
+
return executable
|
| 1992 |
+
return magic == '#!'
|
| 1993 |
+
|
| 1994 |
+
|
| 1995 |
+
def nt_quote_arg(arg):
|
| 1996 |
+
"""Quote a command line argument according to Windows parsing rules"""
|
| 1997 |
+
return subprocess.list2cmdline([arg])
|
| 1998 |
+
|
| 1999 |
+
|
| 2000 |
+
def is_python_script(script_text, filename):
|
| 2001 |
+
"""Is this text, as a whole, a Python script? (as opposed to shell/bat/etc."""
|
| 2002 |
+
if filename.endswith('.py') or filename.endswith('.pyw'):
|
| 2003 |
+
return True # extension says it's Python
|
| 2004 |
+
if is_python(script_text, filename):
|
| 2005 |
+
return True # it's syntactically valid Python
|
| 2006 |
+
if script_text.startswith('#!'):
|
| 2007 |
+
# It begins with a '#!' line, so check if 'python' is in it somewhere
|
| 2008 |
+
return 'python' in script_text.splitlines()[0].lower()
|
| 2009 |
+
|
| 2010 |
+
return False # Not any Python I can recognize
|
| 2011 |
+
|
| 2012 |
+
|
| 2013 |
+
class _SplitArgs(TypedDict, total=False):
|
| 2014 |
+
comments: bool
|
| 2015 |
+
posix: bool
|
| 2016 |
+
|
| 2017 |
+
|
| 2018 |
+
class CommandSpec(list):
|
| 2019 |
+
"""
|
| 2020 |
+
A command spec for a #! header, specified as a list of arguments akin to
|
| 2021 |
+
those passed to Popen.
|
| 2022 |
+
"""
|
| 2023 |
+
|
| 2024 |
+
options: list[str] = []
|
| 2025 |
+
split_args = _SplitArgs()
|
| 2026 |
+
|
| 2027 |
+
@classmethod
|
| 2028 |
+
def best(cls):
|
| 2029 |
+
"""
|
| 2030 |
+
Choose the best CommandSpec class based on environmental conditions.
|
| 2031 |
+
"""
|
| 2032 |
+
return cls
|
| 2033 |
+
|
| 2034 |
+
@classmethod
|
| 2035 |
+
def _sys_executable(cls):
|
| 2036 |
+
_default = os.path.normpath(sys.executable)
|
| 2037 |
+
return os.environ.get('__PYVENV_LAUNCHER__', _default)
|
| 2038 |
+
|
| 2039 |
+
@classmethod
|
| 2040 |
+
def from_param(cls, param: Self | str | Iterable[str] | None) -> Self:
|
| 2041 |
+
"""
|
| 2042 |
+
Construct a CommandSpec from a parameter to build_scripts, which may
|
| 2043 |
+
be None.
|
| 2044 |
+
"""
|
| 2045 |
+
if isinstance(param, cls):
|
| 2046 |
+
return param
|
| 2047 |
+
if isinstance(param, str):
|
| 2048 |
+
return cls.from_string(param)
|
| 2049 |
+
if isinstance(param, Iterable):
|
| 2050 |
+
return cls(param)
|
| 2051 |
+
if param is None:
|
| 2052 |
+
return cls.from_environment()
|
| 2053 |
+
raise TypeError(f"Argument has an unsupported type {type(param)}")
|
| 2054 |
+
|
| 2055 |
+
@classmethod
|
| 2056 |
+
def from_environment(cls):
|
| 2057 |
+
return cls([cls._sys_executable()])
|
| 2058 |
+
|
| 2059 |
+
@classmethod
|
| 2060 |
+
def from_string(cls, string: str) -> Self:
|
| 2061 |
+
"""
|
| 2062 |
+
Construct a command spec from a simple string representing a command
|
| 2063 |
+
line parseable by shlex.split.
|
| 2064 |
+
"""
|
| 2065 |
+
items = shlex.split(string, **cls.split_args)
|
| 2066 |
+
return cls(items)
|
| 2067 |
+
|
| 2068 |
+
def install_options(self, script_text: str):
|
| 2069 |
+
self.options = shlex.split(self._extract_options(script_text))
|
| 2070 |
+
cmdline = subprocess.list2cmdline(self)
|
| 2071 |
+
if not isascii(cmdline):
|
| 2072 |
+
self.options[:0] = ['-x']
|
| 2073 |
+
|
| 2074 |
+
@staticmethod
|
| 2075 |
+
def _extract_options(orig_script):
|
| 2076 |
+
"""
|
| 2077 |
+
Extract any options from the first line of the script.
|
| 2078 |
+
"""
|
| 2079 |
+
first = (orig_script + '\n').splitlines()[0]
|
| 2080 |
+
match = _first_line_re().match(first)
|
| 2081 |
+
options = match.group(1) or '' if match else ''
|
| 2082 |
+
return options.strip()
|
| 2083 |
+
|
| 2084 |
+
def as_header(self):
|
| 2085 |
+
return self._render(self + list(self.options))
|
| 2086 |
+
|
| 2087 |
+
@staticmethod
|
| 2088 |
+
def _strip_quotes(item):
|
| 2089 |
+
_QUOTES = '"\''
|
| 2090 |
+
for q in _QUOTES:
|
| 2091 |
+
if item.startswith(q) and item.endswith(q):
|
| 2092 |
+
return item[1:-1]
|
| 2093 |
+
return item
|
| 2094 |
+
|
| 2095 |
+
@staticmethod
|
| 2096 |
+
def _render(items):
|
| 2097 |
+
cmdline = subprocess.list2cmdline(
|
| 2098 |
+
CommandSpec._strip_quotes(item.strip()) for item in items
|
| 2099 |
+
)
|
| 2100 |
+
return '#!' + cmdline + '\n'
|
| 2101 |
+
|
| 2102 |
+
|
| 2103 |
+
# For pbr compat; will be removed in a future version.
|
| 2104 |
+
sys_executable = CommandSpec._sys_executable()
|
| 2105 |
+
|
| 2106 |
+
|
| 2107 |
+
class WindowsCommandSpec(CommandSpec):
|
| 2108 |
+
split_args = _SplitArgs(posix=False)
|
| 2109 |
+
|
| 2110 |
+
|
| 2111 |
+
class ScriptWriter:
|
| 2112 |
+
"""
|
| 2113 |
+
Encapsulates behavior around writing entry point scripts for console and
|
| 2114 |
+
gui apps.
|
| 2115 |
+
"""
|
| 2116 |
+
|
| 2117 |
+
template = textwrap.dedent(
|
| 2118 |
+
r"""
|
| 2119 |
+
# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
|
| 2120 |
+
import re
|
| 2121 |
+
import sys
|
| 2122 |
+
|
| 2123 |
+
# for compatibility with easy_install; see #2198
|
| 2124 |
+
__requires__ = %(spec)r
|
| 2125 |
+
|
| 2126 |
+
try:
|
| 2127 |
+
from importlib.metadata import distribution
|
| 2128 |
+
except ImportError:
|
| 2129 |
+
try:
|
| 2130 |
+
from importlib_metadata import distribution
|
| 2131 |
+
except ImportError:
|
| 2132 |
+
from pkg_resources import load_entry_point
|
| 2133 |
+
|
| 2134 |
+
|
| 2135 |
+
def importlib_load_entry_point(spec, group, name):
|
| 2136 |
+
dist_name, _, _ = spec.partition('==')
|
| 2137 |
+
matches = (
|
| 2138 |
+
entry_point
|
| 2139 |
+
for entry_point in distribution(dist_name).entry_points
|
| 2140 |
+
if entry_point.group == group and entry_point.name == name
|
| 2141 |
+
)
|
| 2142 |
+
return next(matches).load()
|
| 2143 |
+
|
| 2144 |
+
|
| 2145 |
+
globals().setdefault('load_entry_point', importlib_load_entry_point)
|
| 2146 |
+
|
| 2147 |
+
|
| 2148 |
+
if __name__ == '__main__':
|
| 2149 |
+
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
| 2150 |
+
sys.exit(load_entry_point(%(spec)r, %(group)r, %(name)r)())
|
| 2151 |
+
"""
|
| 2152 |
+
).lstrip()
|
| 2153 |
+
|
| 2154 |
+
command_spec_class = CommandSpec
|
| 2155 |
+
|
| 2156 |
+
@classmethod
|
| 2157 |
+
def get_args(cls, dist, header=None):
|
| 2158 |
+
"""
|
| 2159 |
+
Yield write_script() argument tuples for a distribution's
|
| 2160 |
+
console_scripts and gui_scripts entry points.
|
| 2161 |
+
"""
|
| 2162 |
+
if header is None:
|
| 2163 |
+
header = cls.get_header()
|
| 2164 |
+
spec = str(dist.as_requirement())
|
| 2165 |
+
for type_ in 'console', 'gui':
|
| 2166 |
+
group = type_ + '_scripts'
|
| 2167 |
+
for name in dist.get_entry_map(group).keys():
|
| 2168 |
+
cls._ensure_safe_name(name)
|
| 2169 |
+
script_text = cls.template % locals()
|
| 2170 |
+
args = cls._get_script_args(type_, name, header, script_text)
|
| 2171 |
+
yield from args
|
| 2172 |
+
|
| 2173 |
+
@staticmethod
|
| 2174 |
+
def _ensure_safe_name(name):
|
| 2175 |
+
"""
|
| 2176 |
+
Prevent paths in *_scripts entry point names.
|
| 2177 |
+
"""
|
| 2178 |
+
has_path_sep = re.search(r'[\\/]', name)
|
| 2179 |
+
if has_path_sep:
|
| 2180 |
+
raise ValueError("Path separators not allowed in script names")
|
| 2181 |
+
|
| 2182 |
+
@classmethod
|
| 2183 |
+
def best(cls):
|
| 2184 |
+
"""
|
| 2185 |
+
Select the best ScriptWriter for this environment.
|
| 2186 |
+
"""
|
| 2187 |
+
if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'):
|
| 2188 |
+
return WindowsScriptWriter.best()
|
| 2189 |
+
else:
|
| 2190 |
+
return cls
|
| 2191 |
+
|
| 2192 |
+
@classmethod
|
| 2193 |
+
def _get_script_args(cls, type_, name, header, script_text):
|
| 2194 |
+
# Simply write the stub with no extension.
|
| 2195 |
+
yield (name, header + script_text)
|
| 2196 |
+
|
| 2197 |
+
@classmethod
|
| 2198 |
+
def get_header(
|
| 2199 |
+
cls,
|
| 2200 |
+
script_text: str = "",
|
| 2201 |
+
executable: str | CommandSpec | Iterable[str] | None = None,
|
| 2202 |
+
) -> str:
|
| 2203 |
+
"""Create a #! line, getting options (if any) from script_text"""
|
| 2204 |
+
cmd = cls.command_spec_class.best().from_param(executable)
|
| 2205 |
+
cmd.install_options(script_text)
|
| 2206 |
+
return cmd.as_header()
|
| 2207 |
+
|
| 2208 |
+
|
| 2209 |
+
class WindowsScriptWriter(ScriptWriter):
|
| 2210 |
+
command_spec_class = WindowsCommandSpec
|
| 2211 |
+
|
| 2212 |
+
@classmethod
|
| 2213 |
+
def best(cls):
|
| 2214 |
+
"""
|
| 2215 |
+
Select the best ScriptWriter suitable for Windows
|
| 2216 |
+
"""
|
| 2217 |
+
writer_lookup = dict(
|
| 2218 |
+
executable=WindowsExecutableLauncherWriter,
|
| 2219 |
+
natural=cls,
|
| 2220 |
+
)
|
| 2221 |
+
# for compatibility, use the executable launcher by default
|
| 2222 |
+
launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
|
| 2223 |
+
return writer_lookup[launcher]
|
| 2224 |
+
|
| 2225 |
+
@classmethod
|
| 2226 |
+
def _get_script_args(cls, type_, name, header, script_text):
|
| 2227 |
+
"For Windows, add a .py extension"
|
| 2228 |
+
ext = dict(console='.pya', gui='.pyw')[type_]
|
| 2229 |
+
if ext not in os.environ['PATHEXT'].lower().split(';'):
|
| 2230 |
+
msg = (
|
| 2231 |
+
"{ext} not listed in PATHEXT; scripts will not be "
|
| 2232 |
+
"recognized as executables."
|
| 2233 |
+
).format(**locals())
|
| 2234 |
+
SetuptoolsWarning.emit(msg)
|
| 2235 |
+
old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
|
| 2236 |
+
old.remove(ext)
|
| 2237 |
+
header = cls._adjust_header(type_, header)
|
| 2238 |
+
blockers = [name + x for x in old]
|
| 2239 |
+
yield name + ext, header + script_text, 't', blockers
|
| 2240 |
+
|
| 2241 |
+
@classmethod
|
| 2242 |
+
def _adjust_header(cls, type_, orig_header):
|
| 2243 |
+
"""
|
| 2244 |
+
Make sure 'pythonw' is used for gui and 'python' is used for
|
| 2245 |
+
console (regardless of what sys.executable is).
|
| 2246 |
+
"""
|
| 2247 |
+
pattern = 'pythonw.exe'
|
| 2248 |
+
repl = 'python.exe'
|
| 2249 |
+
if type_ == 'gui':
|
| 2250 |
+
pattern, repl = repl, pattern
|
| 2251 |
+
pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
|
| 2252 |
+
new_header = pattern_ob.sub(string=orig_header, repl=repl)
|
| 2253 |
+
return new_header if cls._use_header(new_header) else orig_header
|
| 2254 |
+
|
| 2255 |
+
@staticmethod
|
| 2256 |
+
def _use_header(new_header):
|
| 2257 |
+
"""
|
| 2258 |
+
Should _adjust_header use the replaced header?
|
| 2259 |
+
|
| 2260 |
+
On non-windows systems, always use. On
|
| 2261 |
+
Windows systems, only use the replaced header if it resolves
|
| 2262 |
+
to an executable on the system.
|
| 2263 |
+
"""
|
| 2264 |
+
clean_header = new_header[2:-1].strip('"')
|
| 2265 |
+
return sys.platform != 'win32' or shutil.which(clean_header)
|
| 2266 |
+
|
| 2267 |
+
|
| 2268 |
+
class WindowsExecutableLauncherWriter(WindowsScriptWriter):
|
| 2269 |
+
@classmethod
|
| 2270 |
+
def _get_script_args(cls, type_, name, header, script_text):
|
| 2271 |
+
"""
|
| 2272 |
+
For Windows, add a .py extension and an .exe launcher
|
| 2273 |
+
"""
|
| 2274 |
+
if type_ == 'gui':
|
| 2275 |
+
launcher_type = 'gui'
|
| 2276 |
+
ext = '-script.pyw'
|
| 2277 |
+
old = ['.pyw']
|
| 2278 |
+
else:
|
| 2279 |
+
launcher_type = 'cli'
|
| 2280 |
+
ext = '-script.py'
|
| 2281 |
+
old = ['.py', '.pyc', '.pyo']
|
| 2282 |
+
hdr = cls._adjust_header(type_, header)
|
| 2283 |
+
blockers = [name + x for x in old]
|
| 2284 |
+
yield (name + ext, hdr + script_text, 't', blockers)
|
| 2285 |
+
yield (
|
| 2286 |
+
name + '.exe',
|
| 2287 |
+
get_win_launcher(launcher_type),
|
| 2288 |
+
'b', # write in binary mode
|
| 2289 |
+
)
|
| 2290 |
+
if not is_64bit():
|
| 2291 |
+
# install a manifest for the launcher to prevent Windows
|
| 2292 |
+
# from detecting it as an installer (which it will for
|
| 2293 |
+
# launchers like easy_install.exe). Consider only
|
| 2294 |
+
# adding a manifest for launchers detected as installers.
|
| 2295 |
+
# See Distribute #143 for details.
|
| 2296 |
+
m_name = name + '.exe.manifest'
|
| 2297 |
+
yield (m_name, load_launcher_manifest(name), 't')
|
| 2298 |
+
|
| 2299 |
+
|
| 2300 |
+
def get_win_launcher(type):
|
| 2301 |
+
"""
|
| 2302 |
+
Load the Windows launcher (executable) suitable for launching a script.
|
| 2303 |
+
|
| 2304 |
+
`type` should be either 'cli' or 'gui'
|
| 2305 |
+
|
| 2306 |
+
Returns the executable as a byte string.
|
| 2307 |
+
"""
|
| 2308 |
+
launcher_fn = f'{type}.exe'
|
| 2309 |
+
if is_64bit():
|
| 2310 |
+
if get_platform() == "win-arm64":
|
| 2311 |
+
launcher_fn = launcher_fn.replace(".", "-arm64.")
|
| 2312 |
+
else:
|
| 2313 |
+
launcher_fn = launcher_fn.replace(".", "-64.")
|
| 2314 |
+
else:
|
| 2315 |
+
launcher_fn = launcher_fn.replace(".", "-32.")
|
| 2316 |
+
return resource_string('setuptools', launcher_fn)
|
| 2317 |
+
|
| 2318 |
+
|
| 2319 |
+
def load_launcher_manifest(name):
|
| 2320 |
+
manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
|
| 2321 |
+
return manifest.decode('utf-8') % vars()
|
| 2322 |
+
|
| 2323 |
+
|
| 2324 |
+
def current_umask():
|
| 2325 |
+
tmp = os.umask(0o022)
|
| 2326 |
+
os.umask(tmp)
|
| 2327 |
+
return tmp
|
| 2328 |
+
|
| 2329 |
+
|
| 2330 |
+
def only_strs(values):
|
| 2331 |
+
"""
|
| 2332 |
+
Exclude non-str values. Ref #3063.
|
| 2333 |
+
"""
|
| 2334 |
+
return filter(lambda val: isinstance(val, str), values)
|
| 2335 |
+
|
| 2336 |
+
|
| 2337 |
+
def _read_pth(fullname: str) -> str:
|
| 2338 |
+
# Python<3.13 require encoding="locale" instead of "utf-8", see python/cpython#77102
|
| 2339 |
+
# In the case old versions of setuptools are producing `pth` files with
|
| 2340 |
+
# different encodings that might be problematic... So we fallback to "locale".
|
| 2341 |
+
|
| 2342 |
+
try:
|
| 2343 |
+
with open(fullname, encoding=py312.PTH_ENCODING) as f:
|
| 2344 |
+
return f.read()
|
| 2345 |
+
except UnicodeDecodeError: # pragma: no cover
|
| 2346 |
+
# This error may only happen for Python >= 3.13
|
| 2347 |
+
# TODO: Possible deprecation warnings to be added in the future:
|
| 2348 |
+
# ``.pth file {fullname!r} is not UTF-8.``
|
| 2349 |
+
# Your environment contain {fullname!r} that cannot be read as UTF-8.
|
| 2350 |
+
# This is likely to have been produced with an old version of setuptools.
|
| 2351 |
+
# Please be mindful that this is deprecated and in the future, non-utf8
|
| 2352 |
+
# .pth files may cause setuptools to fail.
|
| 2353 |
+
with open(fullname, encoding=py39.LOCALE_ENCODING) as f:
|
| 2354 |
+
return f.read()
|
| 2355 |
+
|
| 2356 |
+
|
| 2357 |
+
class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning):
|
| 2358 |
+
_SUMMARY = "easy_install command is deprecated."
|
| 2359 |
+
_DETAILS = """
|
| 2360 |
+
Please avoid running ``setup.py`` and ``easy_install``.
|
| 2361 |
+
Instead, use pypa/build, pypa/installer or other
|
| 2362 |
+
standards-based tools.
|
| 2363 |
+
"""
|
| 2364 |
+
_SEE_URL = "https://github.com/pypa/setuptools/issues/917"
|
| 2365 |
+
# _DUE_DATE not defined yet
|
falcon/lib/python3.10/site-packages/setuptools/command/editable_wheel.py
ADDED
|
@@ -0,0 +1,925 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Create a wheel that, when installed, will make the source package 'editable'
|
| 3 |
+
(add it to the interpreter's path, including metadata) per PEP 660. Replaces
|
| 4 |
+
'setup.py develop'.
|
| 5 |
+
|
| 6 |
+
.. note::
|
| 7 |
+
One of the mechanisms briefly mentioned in PEP 660 to implement editable installs is
|
| 8 |
+
to create a separated directory inside ``build`` and use a .pth file to point to that
|
| 9 |
+
directory. In the context of this file such directory is referred as
|
| 10 |
+
*auxiliary build directory* or ``auxiliary_dir``.
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
from __future__ import annotations
|
| 14 |
+
|
| 15 |
+
import io
|
| 16 |
+
import logging
|
| 17 |
+
import os
|
| 18 |
+
import shutil
|
| 19 |
+
import traceback
|
| 20 |
+
from collections.abc import Iterable, Iterator, Mapping
|
| 21 |
+
from contextlib import suppress
|
| 22 |
+
from enum import Enum
|
| 23 |
+
from inspect import cleandoc
|
| 24 |
+
from itertools import chain, starmap
|
| 25 |
+
from pathlib import Path
|
| 26 |
+
from tempfile import TemporaryDirectory
|
| 27 |
+
from types import TracebackType
|
| 28 |
+
from typing import TYPE_CHECKING, Protocol, TypeVar, cast
|
| 29 |
+
|
| 30 |
+
from .. import Command, _normalization, _path, _shutil, errors, namespaces
|
| 31 |
+
from .._path import StrPath
|
| 32 |
+
from ..compat import py312
|
| 33 |
+
from ..discovery import find_package_path
|
| 34 |
+
from ..dist import Distribution
|
| 35 |
+
from ..warnings import InformationOnly, SetuptoolsDeprecationWarning, SetuptoolsWarning
|
| 36 |
+
from .build import build as build_cls
|
| 37 |
+
from .build_py import build_py as build_py_cls
|
| 38 |
+
from .dist_info import dist_info as dist_info_cls
|
| 39 |
+
from .egg_info import egg_info as egg_info_cls
|
| 40 |
+
from .install import install as install_cls
|
| 41 |
+
from .install_scripts import install_scripts as install_scripts_cls
|
| 42 |
+
|
| 43 |
+
if TYPE_CHECKING:
|
| 44 |
+
from typing_extensions import Self
|
| 45 |
+
|
| 46 |
+
from .._vendor.wheel.wheelfile import WheelFile
|
| 47 |
+
|
| 48 |
+
_P = TypeVar("_P", bound=StrPath)
|
| 49 |
+
_logger = logging.getLogger(__name__)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class _EditableMode(Enum):
|
| 53 |
+
"""
|
| 54 |
+
Possible editable installation modes:
|
| 55 |
+
`lenient` (new files automatically added to the package - DEFAULT);
|
| 56 |
+
`strict` (requires a new installation when files are added/removed); or
|
| 57 |
+
`compat` (attempts to emulate `python setup.py develop` - DEPRECATED).
|
| 58 |
+
"""
|
| 59 |
+
|
| 60 |
+
STRICT = "strict"
|
| 61 |
+
LENIENT = "lenient"
|
| 62 |
+
COMPAT = "compat" # TODO: Remove `compat` after Dec/2022.
|
| 63 |
+
|
| 64 |
+
@classmethod
|
| 65 |
+
def convert(cls, mode: str | None) -> _EditableMode:
|
| 66 |
+
if not mode:
|
| 67 |
+
return _EditableMode.LENIENT # default
|
| 68 |
+
|
| 69 |
+
_mode = mode.upper()
|
| 70 |
+
if _mode not in _EditableMode.__members__:
|
| 71 |
+
raise errors.OptionError(f"Invalid editable mode: {mode!r}. Try: 'strict'.")
|
| 72 |
+
|
| 73 |
+
if _mode == "COMPAT":
|
| 74 |
+
SetuptoolsDeprecationWarning.emit(
|
| 75 |
+
"Compat editable installs",
|
| 76 |
+
"""
|
| 77 |
+
The 'compat' editable mode is transitional and will be removed
|
| 78 |
+
in future versions of `setuptools`.
|
| 79 |
+
Please adapt your code accordingly to use either the 'strict' or the
|
| 80 |
+
'lenient' modes.
|
| 81 |
+
""",
|
| 82 |
+
see_docs="userguide/development_mode.html",
|
| 83 |
+
# TODO: define due_date
|
| 84 |
+
# There is a series of shortcomings with the available editable install
|
| 85 |
+
# methods, and they are very controversial. This is something that still
|
| 86 |
+
# needs work.
|
| 87 |
+
# Moreover, `pip` is still hiding this warning, so users are not aware.
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
return _EditableMode[_mode]
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
_STRICT_WARNING = """
|
| 94 |
+
New or renamed files may not be automatically picked up without a new installation.
|
| 95 |
+
"""
|
| 96 |
+
|
| 97 |
+
_LENIENT_WARNING = """
|
| 98 |
+
Options like `package-data`, `include/exclude-package-data` or
|
| 99 |
+
`packages.find.exclude/include` may have no effect.
|
| 100 |
+
"""
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class editable_wheel(Command):
|
| 104 |
+
"""Build 'editable' wheel for development.
|
| 105 |
+
This command is private and reserved for internal use of setuptools,
|
| 106 |
+
users should rely on ``setuptools.build_meta`` APIs.
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
description = "DO NOT CALL DIRECTLY, INTERNAL ONLY: create PEP 660 editable wheel"
|
| 110 |
+
|
| 111 |
+
user_options = [
|
| 112 |
+
("dist-dir=", "d", "directory to put final built distributions in"),
|
| 113 |
+
("dist-info-dir=", "I", "path to a pre-build .dist-info directory"),
|
| 114 |
+
("mode=", None, cleandoc(_EditableMode.__doc__ or "")),
|
| 115 |
+
]
|
| 116 |
+
|
| 117 |
+
def initialize_options(self):
|
| 118 |
+
self.dist_dir = None
|
| 119 |
+
self.dist_info_dir = None
|
| 120 |
+
self.project_dir = None
|
| 121 |
+
self.mode = None
|
| 122 |
+
|
| 123 |
+
def finalize_options(self) -> None:
|
| 124 |
+
dist = self.distribution
|
| 125 |
+
self.project_dir = dist.src_root or os.curdir
|
| 126 |
+
self.package_dir = dist.package_dir or {}
|
| 127 |
+
self.dist_dir = Path(self.dist_dir or os.path.join(self.project_dir, "dist"))
|
| 128 |
+
|
| 129 |
+
def run(self) -> None:
|
| 130 |
+
try:
|
| 131 |
+
self.dist_dir.mkdir(exist_ok=True)
|
| 132 |
+
self._ensure_dist_info()
|
| 133 |
+
|
| 134 |
+
# Add missing dist_info files
|
| 135 |
+
self.reinitialize_command("bdist_wheel")
|
| 136 |
+
bdist_wheel = self.get_finalized_command("bdist_wheel")
|
| 137 |
+
bdist_wheel.write_wheelfile(self.dist_info_dir)
|
| 138 |
+
|
| 139 |
+
self._create_wheel_file(bdist_wheel)
|
| 140 |
+
except Exception:
|
| 141 |
+
traceback.print_exc()
|
| 142 |
+
project = self.distribution.name or self.distribution.get_name()
|
| 143 |
+
_DebuggingTips.emit(project=project)
|
| 144 |
+
raise
|
| 145 |
+
|
| 146 |
+
def _ensure_dist_info(self):
|
| 147 |
+
if self.dist_info_dir is None:
|
| 148 |
+
dist_info = cast(dist_info_cls, self.reinitialize_command("dist_info"))
|
| 149 |
+
dist_info.output_dir = self.dist_dir
|
| 150 |
+
dist_info.ensure_finalized()
|
| 151 |
+
dist_info.run()
|
| 152 |
+
self.dist_info_dir = dist_info.dist_info_dir
|
| 153 |
+
else:
|
| 154 |
+
assert str(self.dist_info_dir).endswith(".dist-info")
|
| 155 |
+
assert Path(self.dist_info_dir, "METADATA").exists()
|
| 156 |
+
|
| 157 |
+
def _install_namespaces(self, installation_dir, pth_prefix):
|
| 158 |
+
# XXX: Only required to support the deprecated namespace practice
|
| 159 |
+
dist = self.distribution
|
| 160 |
+
if not dist.namespace_packages:
|
| 161 |
+
return
|
| 162 |
+
|
| 163 |
+
src_root = Path(self.project_dir, self.package_dir.get("", ".")).resolve()
|
| 164 |
+
installer = _NamespaceInstaller(dist, installation_dir, pth_prefix, src_root)
|
| 165 |
+
installer.install_namespaces()
|
| 166 |
+
|
| 167 |
+
def _find_egg_info_dir(self) -> str | None:
|
| 168 |
+
parent_dir = Path(self.dist_info_dir).parent if self.dist_info_dir else Path()
|
| 169 |
+
candidates = map(str, parent_dir.glob("*.egg-info"))
|
| 170 |
+
return next(candidates, None)
|
| 171 |
+
|
| 172 |
+
def _configure_build(
|
| 173 |
+
self, name: str, unpacked_wheel: StrPath, build_lib: StrPath, tmp_dir: StrPath
|
| 174 |
+
):
|
| 175 |
+
"""Configure commands to behave in the following ways:
|
| 176 |
+
|
| 177 |
+
- Build commands can write to ``build_lib`` if they really want to...
|
| 178 |
+
(but this folder is expected to be ignored and modules are expected to live
|
| 179 |
+
in the project directory...)
|
| 180 |
+
- Binary extensions should be built in-place (editable_mode = True)
|
| 181 |
+
- Data/header/script files are not part of the "editable" specification
|
| 182 |
+
so they are written directly to the unpacked_wheel directory.
|
| 183 |
+
"""
|
| 184 |
+
# Non-editable files (data, headers, scripts) are written directly to the
|
| 185 |
+
# unpacked_wheel
|
| 186 |
+
|
| 187 |
+
dist = self.distribution
|
| 188 |
+
wheel = str(unpacked_wheel)
|
| 189 |
+
build_lib = str(build_lib)
|
| 190 |
+
data = str(Path(unpacked_wheel, f"{name}.data", "data"))
|
| 191 |
+
headers = str(Path(unpacked_wheel, f"{name}.data", "headers"))
|
| 192 |
+
scripts = str(Path(unpacked_wheel, f"{name}.data", "scripts"))
|
| 193 |
+
|
| 194 |
+
# egg-info may be generated again to create a manifest (used for package data)
|
| 195 |
+
egg_info = cast(
|
| 196 |
+
egg_info_cls, dist.reinitialize_command("egg_info", reinit_subcommands=True)
|
| 197 |
+
)
|
| 198 |
+
egg_info.egg_base = str(tmp_dir)
|
| 199 |
+
egg_info.ignore_egg_info_in_manifest = True
|
| 200 |
+
|
| 201 |
+
build = cast(
|
| 202 |
+
build_cls, dist.reinitialize_command("build", reinit_subcommands=True)
|
| 203 |
+
)
|
| 204 |
+
install = cast(
|
| 205 |
+
install_cls, dist.reinitialize_command("install", reinit_subcommands=True)
|
| 206 |
+
)
|
| 207 |
+
|
| 208 |
+
build.build_platlib = build.build_purelib = build.build_lib = build_lib
|
| 209 |
+
install.install_purelib = install.install_platlib = install.install_lib = wheel
|
| 210 |
+
install.install_scripts = build.build_scripts = scripts
|
| 211 |
+
install.install_headers = headers
|
| 212 |
+
install.install_data = data
|
| 213 |
+
|
| 214 |
+
install_scripts = cast(
|
| 215 |
+
install_scripts_cls, dist.get_command_obj("install_scripts")
|
| 216 |
+
)
|
| 217 |
+
install_scripts.no_ep = True
|
| 218 |
+
|
| 219 |
+
build.build_temp = str(tmp_dir)
|
| 220 |
+
|
| 221 |
+
build_py = cast(build_py_cls, dist.get_command_obj("build_py"))
|
| 222 |
+
build_py.compile = False
|
| 223 |
+
build_py.existing_egg_info_dir = self._find_egg_info_dir()
|
| 224 |
+
|
| 225 |
+
self._set_editable_mode()
|
| 226 |
+
|
| 227 |
+
build.ensure_finalized()
|
| 228 |
+
install.ensure_finalized()
|
| 229 |
+
|
| 230 |
+
def _set_editable_mode(self):
|
| 231 |
+
"""Set the ``editable_mode`` flag in the build sub-commands"""
|
| 232 |
+
dist = self.distribution
|
| 233 |
+
build = dist.get_command_obj("build")
|
| 234 |
+
for cmd_name in build.get_sub_commands():
|
| 235 |
+
cmd = dist.get_command_obj(cmd_name)
|
| 236 |
+
if hasattr(cmd, "editable_mode"):
|
| 237 |
+
cmd.editable_mode = True
|
| 238 |
+
elif hasattr(cmd, "inplace"):
|
| 239 |
+
cmd.inplace = True # backward compatibility with distutils
|
| 240 |
+
|
| 241 |
+
def _collect_build_outputs(self) -> tuple[list[str], dict[str, str]]:
|
| 242 |
+
files: list[str] = []
|
| 243 |
+
mapping: dict[str, str] = {}
|
| 244 |
+
build = self.get_finalized_command("build")
|
| 245 |
+
|
| 246 |
+
for cmd_name in build.get_sub_commands():
|
| 247 |
+
cmd = self.get_finalized_command(cmd_name)
|
| 248 |
+
if hasattr(cmd, "get_outputs"):
|
| 249 |
+
files.extend(cmd.get_outputs() or [])
|
| 250 |
+
if hasattr(cmd, "get_output_mapping"):
|
| 251 |
+
mapping.update(cmd.get_output_mapping() or {})
|
| 252 |
+
|
| 253 |
+
return files, mapping
|
| 254 |
+
|
| 255 |
+
def _run_build_commands(
|
| 256 |
+
self,
|
| 257 |
+
dist_name: str,
|
| 258 |
+
unpacked_wheel: StrPath,
|
| 259 |
+
build_lib: StrPath,
|
| 260 |
+
tmp_dir: StrPath,
|
| 261 |
+
) -> tuple[list[str], dict[str, str]]:
|
| 262 |
+
self._configure_build(dist_name, unpacked_wheel, build_lib, tmp_dir)
|
| 263 |
+
self._run_build_subcommands()
|
| 264 |
+
files, mapping = self._collect_build_outputs()
|
| 265 |
+
self._run_install("headers")
|
| 266 |
+
self._run_install("scripts")
|
| 267 |
+
self._run_install("data")
|
| 268 |
+
return files, mapping
|
| 269 |
+
|
| 270 |
+
def _run_build_subcommands(self) -> None:
|
| 271 |
+
"""
|
| 272 |
+
Issue #3501 indicates that some plugins/customizations might rely on:
|
| 273 |
+
|
| 274 |
+
1. ``build_py`` not running
|
| 275 |
+
2. ``build_py`` always copying files to ``build_lib``
|
| 276 |
+
|
| 277 |
+
However both these assumptions may be false in editable_wheel.
|
| 278 |
+
This method implements a temporary workaround to support the ecosystem
|
| 279 |
+
while the implementations catch up.
|
| 280 |
+
"""
|
| 281 |
+
# TODO: Once plugins/customisations had the chance to catch up, replace
|
| 282 |
+
# `self._run_build_subcommands()` with `self.run_command("build")`.
|
| 283 |
+
# Also remove _safely_run, TestCustomBuildPy. Suggested date: Aug/2023.
|
| 284 |
+
build = self.get_finalized_command("build")
|
| 285 |
+
for name in build.get_sub_commands():
|
| 286 |
+
cmd = self.get_finalized_command(name)
|
| 287 |
+
if name == "build_py" and type(cmd) is not build_py_cls:
|
| 288 |
+
self._safely_run(name)
|
| 289 |
+
else:
|
| 290 |
+
self.run_command(name)
|
| 291 |
+
|
| 292 |
+
def _safely_run(self, cmd_name: str):
|
| 293 |
+
try:
|
| 294 |
+
return self.run_command(cmd_name)
|
| 295 |
+
except Exception:
|
| 296 |
+
SetuptoolsDeprecationWarning.emit(
|
| 297 |
+
"Customization incompatible with editable install",
|
| 298 |
+
f"""
|
| 299 |
+
{traceback.format_exc()}
|
| 300 |
+
|
| 301 |
+
If you are seeing this warning it is very likely that a setuptools
|
| 302 |
+
plugin or customization overrides the `{cmd_name}` command, without
|
| 303 |
+
taking into consideration how editable installs run build steps
|
| 304 |
+
starting from setuptools v64.0.0.
|
| 305 |
+
|
| 306 |
+
Plugin authors and developers relying on custom build steps are
|
| 307 |
+
encouraged to update their `{cmd_name}` implementation considering the
|
| 308 |
+
information about editable installs in
|
| 309 |
+
https://setuptools.pypa.io/en/latest/userguide/extension.html.
|
| 310 |
+
|
| 311 |
+
For the time being `setuptools` will silence this error and ignore
|
| 312 |
+
the faulty command, but this behaviour will change in future versions.
|
| 313 |
+
""",
|
| 314 |
+
# TODO: define due_date
|
| 315 |
+
# There is a series of shortcomings with the available editable install
|
| 316 |
+
# methods, and they are very controversial. This is something that still
|
| 317 |
+
# needs work.
|
| 318 |
+
)
|
| 319 |
+
|
| 320 |
+
def _create_wheel_file(self, bdist_wheel):
|
| 321 |
+
from wheel.wheelfile import WheelFile
|
| 322 |
+
|
| 323 |
+
dist_info = self.get_finalized_command("dist_info")
|
| 324 |
+
dist_name = dist_info.name
|
| 325 |
+
tag = "-".join(bdist_wheel.get_tag())
|
| 326 |
+
build_tag = "0.editable" # According to PEP 427 needs to start with digit
|
| 327 |
+
archive_name = f"{dist_name}-{build_tag}-{tag}.whl"
|
| 328 |
+
wheel_path = Path(self.dist_dir, archive_name)
|
| 329 |
+
if wheel_path.exists():
|
| 330 |
+
wheel_path.unlink()
|
| 331 |
+
|
| 332 |
+
unpacked_wheel = TemporaryDirectory(suffix=archive_name)
|
| 333 |
+
build_lib = TemporaryDirectory(suffix=".build-lib")
|
| 334 |
+
build_tmp = TemporaryDirectory(suffix=".build-temp")
|
| 335 |
+
|
| 336 |
+
with unpacked_wheel as unpacked, build_lib as lib, build_tmp as tmp:
|
| 337 |
+
unpacked_dist_info = Path(unpacked, Path(self.dist_info_dir).name)
|
| 338 |
+
shutil.copytree(self.dist_info_dir, unpacked_dist_info)
|
| 339 |
+
self._install_namespaces(unpacked, dist_name)
|
| 340 |
+
files, mapping = self._run_build_commands(dist_name, unpacked, lib, tmp)
|
| 341 |
+
strategy = self._select_strategy(dist_name, tag, lib)
|
| 342 |
+
with strategy, WheelFile(wheel_path, "w") as wheel_obj:
|
| 343 |
+
strategy(wheel_obj, files, mapping)
|
| 344 |
+
wheel_obj.write_files(unpacked)
|
| 345 |
+
|
| 346 |
+
return wheel_path
|
| 347 |
+
|
| 348 |
+
def _run_install(self, category: str):
|
| 349 |
+
has_category = getattr(self.distribution, f"has_{category}", None)
|
| 350 |
+
if has_category and has_category():
|
| 351 |
+
_logger.info(f"Installing {category} as non editable")
|
| 352 |
+
self.run_command(f"install_{category}")
|
| 353 |
+
|
| 354 |
+
def _select_strategy(
|
| 355 |
+
self,
|
| 356 |
+
name: str,
|
| 357 |
+
tag: str,
|
| 358 |
+
build_lib: StrPath,
|
| 359 |
+
) -> EditableStrategy:
|
| 360 |
+
"""Decides which strategy to use to implement an editable installation."""
|
| 361 |
+
build_name = f"__editable__.{name}-{tag}"
|
| 362 |
+
project_dir = Path(self.project_dir)
|
| 363 |
+
mode = _EditableMode.convert(self.mode)
|
| 364 |
+
|
| 365 |
+
if mode is _EditableMode.STRICT:
|
| 366 |
+
auxiliary_dir = _empty_dir(Path(self.project_dir, "build", build_name))
|
| 367 |
+
return _LinkTree(self.distribution, name, auxiliary_dir, build_lib)
|
| 368 |
+
|
| 369 |
+
packages = _find_packages(self.distribution)
|
| 370 |
+
has_simple_layout = _simple_layout(packages, self.package_dir, project_dir)
|
| 371 |
+
is_compat_mode = mode is _EditableMode.COMPAT
|
| 372 |
+
if set(self.package_dir) == {""} and has_simple_layout or is_compat_mode:
|
| 373 |
+
# src-layout(ish) is relatively safe for a simple pth file
|
| 374 |
+
src_dir = self.package_dir.get("", ".")
|
| 375 |
+
return _StaticPth(self.distribution, name, [Path(project_dir, src_dir)])
|
| 376 |
+
|
| 377 |
+
# Use a MetaPathFinder to avoid adding accidental top-level packages/modules
|
| 378 |
+
return _TopLevelFinder(self.distribution, name)
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
class EditableStrategy(Protocol):
|
| 382 |
+
def __call__(
|
| 383 |
+
self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]
|
| 384 |
+
) -> object: ...
|
| 385 |
+
def __enter__(self) -> Self: ...
|
| 386 |
+
def __exit__(
|
| 387 |
+
self,
|
| 388 |
+
_exc_type: type[BaseException] | None,
|
| 389 |
+
_exc_value: BaseException | None,
|
| 390 |
+
_traceback: TracebackType | None,
|
| 391 |
+
) -> object: ...
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
class _StaticPth:
|
| 395 |
+
def __init__(self, dist: Distribution, name: str, path_entries: list[Path]) -> None:
|
| 396 |
+
self.dist = dist
|
| 397 |
+
self.name = name
|
| 398 |
+
self.path_entries = path_entries
|
| 399 |
+
|
| 400 |
+
def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]):
|
| 401 |
+
entries = "\n".join(str(p.resolve()) for p in self.path_entries)
|
| 402 |
+
contents = _encode_pth(f"{entries}\n")
|
| 403 |
+
wheel.writestr(f"__editable__.{self.name}.pth", contents)
|
| 404 |
+
|
| 405 |
+
def __enter__(self) -> Self:
|
| 406 |
+
msg = f"""
|
| 407 |
+
Editable install will be performed using .pth file to extend `sys.path` with:
|
| 408 |
+
{list(map(os.fspath, self.path_entries))!r}
|
| 409 |
+
"""
|
| 410 |
+
_logger.warning(msg + _LENIENT_WARNING)
|
| 411 |
+
return self
|
| 412 |
+
|
| 413 |
+
def __exit__(
|
| 414 |
+
self,
|
| 415 |
+
_exc_type: object,
|
| 416 |
+
_exc_value: object,
|
| 417 |
+
_traceback: object,
|
| 418 |
+
) -> None:
|
| 419 |
+
pass
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
class _LinkTree(_StaticPth):
|
| 423 |
+
"""
|
| 424 |
+
Creates a ``.pth`` file that points to a link tree in the ``auxiliary_dir``.
|
| 425 |
+
|
| 426 |
+
This strategy will only link files (not dirs), so it can be implemented in
|
| 427 |
+
any OS, even if that means using hardlinks instead of symlinks.
|
| 428 |
+
|
| 429 |
+
By collocating ``auxiliary_dir`` and the original source code, limitations
|
| 430 |
+
with hardlinks should be avoided.
|
| 431 |
+
"""
|
| 432 |
+
|
| 433 |
+
def __init__(
|
| 434 |
+
self,
|
| 435 |
+
dist: Distribution,
|
| 436 |
+
name: str,
|
| 437 |
+
auxiliary_dir: StrPath,
|
| 438 |
+
build_lib: StrPath,
|
| 439 |
+
) -> None:
|
| 440 |
+
self.auxiliary_dir = Path(auxiliary_dir)
|
| 441 |
+
self.build_lib = Path(build_lib).resolve()
|
| 442 |
+
self._file = dist.get_command_obj("build_py").copy_file
|
| 443 |
+
super().__init__(dist, name, [self.auxiliary_dir])
|
| 444 |
+
|
| 445 |
+
def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]):
|
| 446 |
+
self._create_links(files, mapping)
|
| 447 |
+
super().__call__(wheel, files, mapping)
|
| 448 |
+
|
| 449 |
+
def _normalize_output(self, file: str) -> str | None:
|
| 450 |
+
# Files relative to build_lib will be normalized to None
|
| 451 |
+
with suppress(ValueError):
|
| 452 |
+
path = Path(file).resolve().relative_to(self.build_lib)
|
| 453 |
+
return str(path).replace(os.sep, '/')
|
| 454 |
+
return None
|
| 455 |
+
|
| 456 |
+
def _create_file(self, relative_output: str, src_file: str, link=None):
|
| 457 |
+
dest = self.auxiliary_dir / relative_output
|
| 458 |
+
if not dest.parent.is_dir():
|
| 459 |
+
dest.parent.mkdir(parents=True)
|
| 460 |
+
self._file(src_file, dest, link=link)
|
| 461 |
+
|
| 462 |
+
def _create_links(self, outputs, output_mapping: Mapping[str, str]):
|
| 463 |
+
self.auxiliary_dir.mkdir(parents=True, exist_ok=True)
|
| 464 |
+
link_type = "sym" if _can_symlink_files(self.auxiliary_dir) else "hard"
|
| 465 |
+
normalised = ((self._normalize_output(k), v) for k, v in output_mapping.items())
|
| 466 |
+
# remove files that are not relative to build_lib
|
| 467 |
+
mappings = {k: v for k, v in normalised if k is not None}
|
| 468 |
+
|
| 469 |
+
for output in outputs:
|
| 470 |
+
relative = self._normalize_output(output)
|
| 471 |
+
if relative and relative not in mappings:
|
| 472 |
+
self._create_file(relative, output)
|
| 473 |
+
|
| 474 |
+
for relative, src in mappings.items():
|
| 475 |
+
self._create_file(relative, src, link=link_type)
|
| 476 |
+
|
| 477 |
+
def __enter__(self) -> Self:
|
| 478 |
+
msg = "Strict editable install will be performed using a link tree.\n"
|
| 479 |
+
_logger.warning(msg + _STRICT_WARNING)
|
| 480 |
+
return self
|
| 481 |
+
|
| 482 |
+
def __exit__(
|
| 483 |
+
self,
|
| 484 |
+
_exc_type: object,
|
| 485 |
+
_exc_value: object,
|
| 486 |
+
_traceback: object,
|
| 487 |
+
) -> None:
|
| 488 |
+
msg = f"""\n
|
| 489 |
+
Strict editable installation performed using the auxiliary directory:
|
| 490 |
+
{self.auxiliary_dir}
|
| 491 |
+
|
| 492 |
+
Please be careful to not remove this directory, otherwise you might not be able
|
| 493 |
+
to import/use your package.
|
| 494 |
+
"""
|
| 495 |
+
InformationOnly.emit("Editable installation.", msg)
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
class _TopLevelFinder:
|
| 499 |
+
def __init__(self, dist: Distribution, name: str) -> None:
|
| 500 |
+
self.dist = dist
|
| 501 |
+
self.name = name
|
| 502 |
+
|
| 503 |
+
def template_vars(self) -> tuple[str, str, dict[str, str], dict[str, list[str]]]:
|
| 504 |
+
src_root = self.dist.src_root or os.curdir
|
| 505 |
+
top_level = chain(_find_packages(self.dist), _find_top_level_modules(self.dist))
|
| 506 |
+
package_dir = self.dist.package_dir or {}
|
| 507 |
+
roots = _find_package_roots(top_level, package_dir, src_root)
|
| 508 |
+
|
| 509 |
+
namespaces_ = dict(
|
| 510 |
+
chain(
|
| 511 |
+
_find_namespaces(self.dist.packages or [], roots),
|
| 512 |
+
((ns, []) for ns in _find_virtual_namespaces(roots)),
|
| 513 |
+
)
|
| 514 |
+
)
|
| 515 |
+
|
| 516 |
+
legacy_namespaces = {
|
| 517 |
+
pkg: find_package_path(pkg, roots, self.dist.src_root or "")
|
| 518 |
+
for pkg in self.dist.namespace_packages or []
|
| 519 |
+
}
|
| 520 |
+
|
| 521 |
+
mapping = {**roots, **legacy_namespaces}
|
| 522 |
+
# ^-- We need to explicitly add the legacy_namespaces to the mapping to be
|
| 523 |
+
# able to import their modules even if another package sharing the same
|
| 524 |
+
# namespace is installed in a conventional (non-editable) way.
|
| 525 |
+
|
| 526 |
+
name = f"__editable__.{self.name}.finder"
|
| 527 |
+
finder = _normalization.safe_identifier(name)
|
| 528 |
+
return finder, name, mapping, namespaces_
|
| 529 |
+
|
| 530 |
+
def get_implementation(self) -> Iterator[tuple[str, bytes]]:
|
| 531 |
+
finder, name, mapping, namespaces_ = self.template_vars()
|
| 532 |
+
|
| 533 |
+
content = bytes(_finder_template(name, mapping, namespaces_), "utf-8")
|
| 534 |
+
yield (f"{finder}.py", content)
|
| 535 |
+
|
| 536 |
+
content = _encode_pth(f"import {finder}; {finder}.install()")
|
| 537 |
+
yield (f"__editable__.{self.name}.pth", content)
|
| 538 |
+
|
| 539 |
+
def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]):
|
| 540 |
+
for file, content in self.get_implementation():
|
| 541 |
+
wheel.writestr(file, content)
|
| 542 |
+
|
| 543 |
+
def __enter__(self) -> Self:
|
| 544 |
+
msg = "Editable install will be performed using a meta path finder.\n"
|
| 545 |
+
_logger.warning(msg + _LENIENT_WARNING)
|
| 546 |
+
return self
|
| 547 |
+
|
| 548 |
+
def __exit__(
|
| 549 |
+
self,
|
| 550 |
+
_exc_type: object,
|
| 551 |
+
_exc_value: object,
|
| 552 |
+
_traceback: object,
|
| 553 |
+
) -> None:
|
| 554 |
+
msg = """\n
|
| 555 |
+
Please be careful with folders in your working directory with the same
|
| 556 |
+
name as your package as they may take precedence during imports.
|
| 557 |
+
"""
|
| 558 |
+
InformationOnly.emit("Editable installation.", msg)
|
| 559 |
+
|
| 560 |
+
|
| 561 |
+
def _encode_pth(content: str) -> bytes:
|
| 562 |
+
"""
|
| 563 |
+
Prior to Python 3.13 (see https://github.com/python/cpython/issues/77102),
|
| 564 |
+
.pth files are always read with 'locale' encoding, the recommendation
|
| 565 |
+
from the cpython core developers is to write them as ``open(path, "w")``
|
| 566 |
+
and ignore warnings (see python/cpython#77102, pypa/setuptools#3937).
|
| 567 |
+
This function tries to simulate this behaviour without having to create an
|
| 568 |
+
actual file, in a way that supports a range of active Python versions.
|
| 569 |
+
(There seems to be some variety in the way different version of Python handle
|
| 570 |
+
``encoding=None``, not all of them use ``locale.getpreferredencoding(False)``
|
| 571 |
+
or ``locale.getencoding()``).
|
| 572 |
+
"""
|
| 573 |
+
with io.BytesIO() as buffer:
|
| 574 |
+
wrapper = io.TextIOWrapper(buffer, encoding=py312.PTH_ENCODING)
|
| 575 |
+
# TODO: Python 3.13 replace the whole function with `bytes(content, "utf-8")`
|
| 576 |
+
wrapper.write(content)
|
| 577 |
+
wrapper.flush()
|
| 578 |
+
buffer.seek(0)
|
| 579 |
+
return buffer.read()
|
| 580 |
+
|
| 581 |
+
|
| 582 |
+
def _can_symlink_files(base_dir: Path) -> bool:
|
| 583 |
+
with TemporaryDirectory(dir=str(base_dir.resolve())) as tmp:
|
| 584 |
+
path1, path2 = Path(tmp, "file1.txt"), Path(tmp, "file2.txt")
|
| 585 |
+
path1.write_text("file1", encoding="utf-8")
|
| 586 |
+
with suppress(AttributeError, NotImplementedError, OSError):
|
| 587 |
+
os.symlink(path1, path2)
|
| 588 |
+
if path2.is_symlink() and path2.read_text(encoding="utf-8") == "file1":
|
| 589 |
+
return True
|
| 590 |
+
|
| 591 |
+
try:
|
| 592 |
+
os.link(path1, path2) # Ensure hard links can be created
|
| 593 |
+
except Exception as ex:
|
| 594 |
+
msg = (
|
| 595 |
+
"File system does not seem to support either symlinks or hard links. "
|
| 596 |
+
"Strict editable installs require one of them to be supported."
|
| 597 |
+
)
|
| 598 |
+
raise LinksNotSupported(msg) from ex
|
| 599 |
+
return False
|
| 600 |
+
|
| 601 |
+
|
| 602 |
+
def _simple_layout(
|
| 603 |
+
packages: Iterable[str], package_dir: dict[str, str], project_dir: StrPath
|
| 604 |
+
) -> bool:
|
| 605 |
+
"""Return ``True`` if:
|
| 606 |
+
- all packages are contained by the same parent directory, **and**
|
| 607 |
+
- all packages become importable if the parent directory is added to ``sys.path``.
|
| 608 |
+
|
| 609 |
+
>>> _simple_layout(['a'], {"": "src"}, "/tmp/myproj")
|
| 610 |
+
True
|
| 611 |
+
>>> _simple_layout(['a', 'a.b'], {"": "src"}, "/tmp/myproj")
|
| 612 |
+
True
|
| 613 |
+
>>> _simple_layout(['a', 'a.b'], {}, "/tmp/myproj")
|
| 614 |
+
True
|
| 615 |
+
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"": "src"}, "/tmp/myproj")
|
| 616 |
+
True
|
| 617 |
+
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "a", "b": "b"}, ".")
|
| 618 |
+
True
|
| 619 |
+
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a", "b": "_b"}, ".")
|
| 620 |
+
False
|
| 621 |
+
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a"}, "/tmp/myproj")
|
| 622 |
+
False
|
| 623 |
+
>>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a.a1.a2": "_a2"}, ".")
|
| 624 |
+
False
|
| 625 |
+
>>> _simple_layout(['a', 'a.b'], {"": "src", "a.b": "_ab"}, "/tmp/myproj")
|
| 626 |
+
False
|
| 627 |
+
>>> # Special cases, no packages yet:
|
| 628 |
+
>>> _simple_layout([], {"": "src"}, "/tmp/myproj")
|
| 629 |
+
True
|
| 630 |
+
>>> _simple_layout([], {"a": "_a", "": "src"}, "/tmp/myproj")
|
| 631 |
+
False
|
| 632 |
+
"""
|
| 633 |
+
layout = {pkg: find_package_path(pkg, package_dir, project_dir) for pkg in packages}
|
| 634 |
+
if not layout:
|
| 635 |
+
return set(package_dir) in ({}, {""})
|
| 636 |
+
parent = os.path.commonpath(starmap(_parent_path, layout.items()))
|
| 637 |
+
return all(
|
| 638 |
+
_path.same_path(Path(parent, *key.split('.')), value)
|
| 639 |
+
for key, value in layout.items()
|
| 640 |
+
)
|
| 641 |
+
|
| 642 |
+
|
| 643 |
+
def _parent_path(pkg, pkg_path):
|
| 644 |
+
"""Infer the parent path containing a package, that if added to ``sys.path`` would
|
| 645 |
+
allow importing that package.
|
| 646 |
+
When ``pkg`` is directly mapped into a directory with a different name, return its
|
| 647 |
+
own path.
|
| 648 |
+
>>> _parent_path("a", "src/a")
|
| 649 |
+
'src'
|
| 650 |
+
>>> _parent_path("b", "src/c")
|
| 651 |
+
'src/c'
|
| 652 |
+
"""
|
| 653 |
+
parent = pkg_path[: -len(pkg)] if pkg_path.endswith(pkg) else pkg_path
|
| 654 |
+
return parent.rstrip("/" + os.sep)
|
| 655 |
+
|
| 656 |
+
|
| 657 |
+
def _find_packages(dist: Distribution) -> Iterator[str]:
|
| 658 |
+
yield from iter(dist.packages or [])
|
| 659 |
+
|
| 660 |
+
py_modules = dist.py_modules or []
|
| 661 |
+
nested_modules = [mod for mod in py_modules if "." in mod]
|
| 662 |
+
if dist.ext_package:
|
| 663 |
+
yield dist.ext_package
|
| 664 |
+
else:
|
| 665 |
+
ext_modules = dist.ext_modules or []
|
| 666 |
+
nested_modules += [x.name for x in ext_modules if "." in x.name]
|
| 667 |
+
|
| 668 |
+
for module in nested_modules:
|
| 669 |
+
package, _, _ = module.rpartition(".")
|
| 670 |
+
yield package
|
| 671 |
+
|
| 672 |
+
|
| 673 |
+
def _find_top_level_modules(dist: Distribution) -> Iterator[str]:
|
| 674 |
+
py_modules = dist.py_modules or []
|
| 675 |
+
yield from (mod for mod in py_modules if "." not in mod)
|
| 676 |
+
|
| 677 |
+
if not dist.ext_package:
|
| 678 |
+
ext_modules = dist.ext_modules or []
|
| 679 |
+
yield from (x.name for x in ext_modules if "." not in x.name)
|
| 680 |
+
|
| 681 |
+
|
| 682 |
+
def _find_package_roots(
|
| 683 |
+
packages: Iterable[str],
|
| 684 |
+
package_dir: Mapping[str, str],
|
| 685 |
+
src_root: StrPath,
|
| 686 |
+
) -> dict[str, str]:
|
| 687 |
+
pkg_roots: dict[str, str] = {
|
| 688 |
+
pkg: _absolute_root(find_package_path(pkg, package_dir, src_root))
|
| 689 |
+
for pkg in sorted(packages)
|
| 690 |
+
}
|
| 691 |
+
|
| 692 |
+
return _remove_nested(pkg_roots)
|
| 693 |
+
|
| 694 |
+
|
| 695 |
+
def _absolute_root(path: StrPath) -> str:
|
| 696 |
+
"""Works for packages and top-level modules"""
|
| 697 |
+
path_ = Path(path)
|
| 698 |
+
parent = path_.parent
|
| 699 |
+
|
| 700 |
+
if path_.exists():
|
| 701 |
+
return str(path_.resolve())
|
| 702 |
+
else:
|
| 703 |
+
return str(parent.resolve() / path_.name)
|
| 704 |
+
|
| 705 |
+
|
| 706 |
+
def _find_virtual_namespaces(pkg_roots: dict[str, str]) -> Iterator[str]:
|
| 707 |
+
"""By carefully designing ``package_dir``, it is possible to implement the logical
|
| 708 |
+
structure of PEP 420 in a package without the corresponding directories.
|
| 709 |
+
|
| 710 |
+
Moreover a parent package can be purposefully/accidentally skipped in the discovery
|
| 711 |
+
phase (e.g. ``find_packages(include=["mypkg.*"])``, when ``mypkg.foo`` is included
|
| 712 |
+
by ``mypkg`` itself is not).
|
| 713 |
+
We consider this case to also be a virtual namespace (ignoring the original
|
| 714 |
+
directory) to emulate a non-editable installation.
|
| 715 |
+
|
| 716 |
+
This function will try to find these kinds of namespaces.
|
| 717 |
+
"""
|
| 718 |
+
for pkg in pkg_roots:
|
| 719 |
+
if "." not in pkg:
|
| 720 |
+
continue
|
| 721 |
+
parts = pkg.split(".")
|
| 722 |
+
for i in range(len(parts) - 1, 0, -1):
|
| 723 |
+
partial_name = ".".join(parts[:i])
|
| 724 |
+
path = Path(find_package_path(partial_name, pkg_roots, ""))
|
| 725 |
+
if not path.exists() or partial_name not in pkg_roots:
|
| 726 |
+
# partial_name not in pkg_roots ==> purposefully/accidentally skipped
|
| 727 |
+
yield partial_name
|
| 728 |
+
|
| 729 |
+
|
| 730 |
+
def _find_namespaces(
|
| 731 |
+
packages: list[str], pkg_roots: dict[str, str]
|
| 732 |
+
) -> Iterator[tuple[str, list[str]]]:
|
| 733 |
+
for pkg in packages:
|
| 734 |
+
path = find_package_path(pkg, pkg_roots, "")
|
| 735 |
+
if Path(path).exists() and not Path(path, "__init__.py").exists():
|
| 736 |
+
yield (pkg, [path])
|
| 737 |
+
|
| 738 |
+
|
| 739 |
+
def _remove_nested(pkg_roots: dict[str, str]) -> dict[str, str]:
|
| 740 |
+
output = dict(pkg_roots.copy())
|
| 741 |
+
|
| 742 |
+
for pkg, path in reversed(list(pkg_roots.items())):
|
| 743 |
+
if any(
|
| 744 |
+
pkg != other and _is_nested(pkg, path, other, other_path)
|
| 745 |
+
for other, other_path in pkg_roots.items()
|
| 746 |
+
):
|
| 747 |
+
output.pop(pkg)
|
| 748 |
+
|
| 749 |
+
return output
|
| 750 |
+
|
| 751 |
+
|
| 752 |
+
def _is_nested(pkg: str, pkg_path: str, parent: str, parent_path: str) -> bool:
|
| 753 |
+
"""
|
| 754 |
+
Return ``True`` if ``pkg`` is nested inside ``parent`` both logically and in the
|
| 755 |
+
file system.
|
| 756 |
+
>>> _is_nested("a.b", "path/a/b", "a", "path/a")
|
| 757 |
+
True
|
| 758 |
+
>>> _is_nested("a.b", "path/a/b", "a", "otherpath/a")
|
| 759 |
+
False
|
| 760 |
+
>>> _is_nested("a.b", "path/a/b", "c", "path/c")
|
| 761 |
+
False
|
| 762 |
+
>>> _is_nested("a.a", "path/a/a", "a", "path/a")
|
| 763 |
+
True
|
| 764 |
+
>>> _is_nested("b.a", "path/b/a", "a", "path/a")
|
| 765 |
+
False
|
| 766 |
+
"""
|
| 767 |
+
norm_pkg_path = _path.normpath(pkg_path)
|
| 768 |
+
rest = pkg.replace(parent, "", 1).strip(".").split(".")
|
| 769 |
+
return pkg.startswith(parent) and norm_pkg_path == _path.normpath(
|
| 770 |
+
Path(parent_path, *rest)
|
| 771 |
+
)
|
| 772 |
+
|
| 773 |
+
|
| 774 |
+
def _empty_dir(dir_: _P) -> _P:
|
| 775 |
+
"""Create a directory ensured to be empty. Existing files may be removed."""
|
| 776 |
+
_shutil.rmtree(dir_, ignore_errors=True)
|
| 777 |
+
os.makedirs(dir_)
|
| 778 |
+
return dir_
|
| 779 |
+
|
| 780 |
+
|
| 781 |
+
class _NamespaceInstaller(namespaces.Installer):
|
| 782 |
+
def __init__(self, distribution, installation_dir, editable_name, src_root) -> None:
|
| 783 |
+
self.distribution = distribution
|
| 784 |
+
self.src_root = src_root
|
| 785 |
+
self.installation_dir = installation_dir
|
| 786 |
+
self.editable_name = editable_name
|
| 787 |
+
self.outputs: list[str] = []
|
| 788 |
+
self.dry_run = False
|
| 789 |
+
|
| 790 |
+
def _get_nspkg_file(self):
|
| 791 |
+
"""Installation target."""
|
| 792 |
+
return os.path.join(self.installation_dir, self.editable_name + self.nspkg_ext)
|
| 793 |
+
|
| 794 |
+
def _get_root(self):
|
| 795 |
+
"""Where the modules/packages should be loaded from."""
|
| 796 |
+
return repr(str(self.src_root))
|
| 797 |
+
|
| 798 |
+
|
| 799 |
+
_FINDER_TEMPLATE = """\
|
| 800 |
+
from __future__ import annotations
|
| 801 |
+
import sys
|
| 802 |
+
from importlib.machinery import ModuleSpec, PathFinder
|
| 803 |
+
from importlib.machinery import all_suffixes as module_suffixes
|
| 804 |
+
from importlib.util import spec_from_file_location
|
| 805 |
+
from itertools import chain
|
| 806 |
+
from pathlib import Path
|
| 807 |
+
|
| 808 |
+
MAPPING: dict[str, str] = {mapping!r}
|
| 809 |
+
NAMESPACES: dict[str, list[str]] = {namespaces!r}
|
| 810 |
+
PATH_PLACEHOLDER = {name!r} + ".__path_hook__"
|
| 811 |
+
|
| 812 |
+
|
| 813 |
+
class _EditableFinder: # MetaPathFinder
|
| 814 |
+
@classmethod
|
| 815 |
+
def find_spec(cls, fullname: str, path=None, target=None) -> ModuleSpec | None: # type: ignore
|
| 816 |
+
# Top-level packages and modules (we know these exist in the FS)
|
| 817 |
+
if fullname in MAPPING:
|
| 818 |
+
pkg_path = MAPPING[fullname]
|
| 819 |
+
return cls._find_spec(fullname, Path(pkg_path))
|
| 820 |
+
|
| 821 |
+
# Handle immediate children modules (required for namespaces to work)
|
| 822 |
+
# To avoid problems with case sensitivity in the file system we delegate
|
| 823 |
+
# to the importlib.machinery implementation.
|
| 824 |
+
parent, _, child = fullname.rpartition(".")
|
| 825 |
+
if parent and parent in MAPPING:
|
| 826 |
+
return PathFinder.find_spec(fullname, path=[MAPPING[parent]])
|
| 827 |
+
|
| 828 |
+
# Other levels of nesting should be handled automatically by importlib
|
| 829 |
+
# using the parent path.
|
| 830 |
+
return None
|
| 831 |
+
|
| 832 |
+
@classmethod
|
| 833 |
+
def _find_spec(cls, fullname: str, candidate_path: Path) -> ModuleSpec | None:
|
| 834 |
+
init = candidate_path / "__init__.py"
|
| 835 |
+
candidates = (candidate_path.with_suffix(x) for x in module_suffixes())
|
| 836 |
+
for candidate in chain([init], candidates):
|
| 837 |
+
if candidate.exists():
|
| 838 |
+
return spec_from_file_location(fullname, candidate)
|
| 839 |
+
return None
|
| 840 |
+
|
| 841 |
+
|
| 842 |
+
class _EditableNamespaceFinder: # PathEntryFinder
|
| 843 |
+
@classmethod
|
| 844 |
+
def _path_hook(cls, path) -> type[_EditableNamespaceFinder]:
|
| 845 |
+
if path == PATH_PLACEHOLDER:
|
| 846 |
+
return cls
|
| 847 |
+
raise ImportError
|
| 848 |
+
|
| 849 |
+
@classmethod
|
| 850 |
+
def _paths(cls, fullname: str) -> list[str]:
|
| 851 |
+
paths = NAMESPACES[fullname]
|
| 852 |
+
if not paths and fullname in MAPPING:
|
| 853 |
+
paths = [MAPPING[fullname]]
|
| 854 |
+
# Always add placeholder, for 2 reasons:
|
| 855 |
+
# 1. __path__ cannot be empty for the spec to be considered namespace.
|
| 856 |
+
# 2. In the case of nested namespaces, we need to force
|
| 857 |
+
# import machinery to query _EditableNamespaceFinder again.
|
| 858 |
+
return [*paths, PATH_PLACEHOLDER]
|
| 859 |
+
|
| 860 |
+
@classmethod
|
| 861 |
+
def find_spec(cls, fullname: str, target=None) -> ModuleSpec | None: # type: ignore
|
| 862 |
+
if fullname in NAMESPACES:
|
| 863 |
+
spec = ModuleSpec(fullname, None, is_package=True)
|
| 864 |
+
spec.submodule_search_locations = cls._paths(fullname)
|
| 865 |
+
return spec
|
| 866 |
+
return None
|
| 867 |
+
|
| 868 |
+
@classmethod
|
| 869 |
+
def find_module(cls, _fullname) -> None:
|
| 870 |
+
return None
|
| 871 |
+
|
| 872 |
+
|
| 873 |
+
def install():
|
| 874 |
+
if not any(finder == _EditableFinder for finder in sys.meta_path):
|
| 875 |
+
sys.meta_path.append(_EditableFinder)
|
| 876 |
+
|
| 877 |
+
if not NAMESPACES:
|
| 878 |
+
return
|
| 879 |
+
|
| 880 |
+
if not any(hook == _EditableNamespaceFinder._path_hook for hook in sys.path_hooks):
|
| 881 |
+
# PathEntryFinder is needed to create NamespaceSpec without private APIS
|
| 882 |
+
sys.path_hooks.append(_EditableNamespaceFinder._path_hook)
|
| 883 |
+
if PATH_PLACEHOLDER not in sys.path:
|
| 884 |
+
sys.path.append(PATH_PLACEHOLDER) # Used just to trigger the path hook
|
| 885 |
+
"""
|
| 886 |
+
|
| 887 |
+
|
| 888 |
+
def _finder_template(
|
| 889 |
+
name: str, mapping: Mapping[str, str], namespaces: dict[str, list[str]]
|
| 890 |
+
) -> str:
|
| 891 |
+
"""Create a string containing the code for the``MetaPathFinder`` and
|
| 892 |
+
``PathEntryFinder``.
|
| 893 |
+
"""
|
| 894 |
+
mapping = dict(sorted(mapping.items(), key=lambda p: p[0]))
|
| 895 |
+
return _FINDER_TEMPLATE.format(name=name, mapping=mapping, namespaces=namespaces)
|
| 896 |
+
|
| 897 |
+
|
| 898 |
+
class LinksNotSupported(errors.FileError):
|
| 899 |
+
"""File system does not seem to support either symlinks or hard links."""
|
| 900 |
+
|
| 901 |
+
|
| 902 |
+
class _DebuggingTips(SetuptoolsWarning):
|
| 903 |
+
_SUMMARY = "Problem in editable installation."
|
| 904 |
+
_DETAILS = """
|
| 905 |
+
An error happened while installing `{project}` in editable mode.
|
| 906 |
+
|
| 907 |
+
The following steps are recommended to help debug this problem:
|
| 908 |
+
|
| 909 |
+
- Try to install the project normally, without using the editable mode.
|
| 910 |
+
Does the error still persist?
|
| 911 |
+
(If it does, try fixing the problem before attempting the editable mode).
|
| 912 |
+
- If you are using binary extensions, make sure you have all OS-level
|
| 913 |
+
dependencies installed (e.g. compilers, toolchains, binary libraries, ...).
|
| 914 |
+
- Try the latest version of setuptools (maybe the error was already fixed).
|
| 915 |
+
- If you (or your project dependencies) are using any setuptools extension
|
| 916 |
+
or customization, make sure they support the editable mode.
|
| 917 |
+
|
| 918 |
+
After following the steps above, if the problem still persists and
|
| 919 |
+
you think this is related to how setuptools handles editable installations,
|
| 920 |
+
please submit a reproducible example
|
| 921 |
+
(see https://stackoverflow.com/help/minimal-reproducible-example) to:
|
| 922 |
+
|
| 923 |
+
https://github.com/pypa/setuptools/issues
|
| 924 |
+
"""
|
| 925 |
+
_SEE_DOCS = "userguide/development_mode.html"
|
falcon/lib/python3.10/site-packages/setuptools/command/egg_info.py
ADDED
|
@@ -0,0 +1,720 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""setuptools.command.egg_info
|
| 2 |
+
|
| 3 |
+
Create a distribution's .egg-info directory and contents"""
|
| 4 |
+
|
| 5 |
+
import functools
|
| 6 |
+
import os
|
| 7 |
+
import re
|
| 8 |
+
import sys
|
| 9 |
+
import time
|
| 10 |
+
from collections.abc import Callable
|
| 11 |
+
|
| 12 |
+
import packaging
|
| 13 |
+
import packaging.requirements
|
| 14 |
+
import packaging.version
|
| 15 |
+
|
| 16 |
+
import setuptools.unicode_utils as unicode_utils
|
| 17 |
+
from setuptools import Command
|
| 18 |
+
from setuptools.command import bdist_egg
|
| 19 |
+
from setuptools.command.sdist import sdist, walk_revctrl
|
| 20 |
+
from setuptools.command.setopt import edit_config
|
| 21 |
+
from setuptools.glob import glob
|
| 22 |
+
|
| 23 |
+
from .. import _entry_points, _normalization
|
| 24 |
+
from .._importlib import metadata
|
| 25 |
+
from ..warnings import SetuptoolsDeprecationWarning
|
| 26 |
+
from . import _requirestxt
|
| 27 |
+
|
| 28 |
+
import distutils.errors
|
| 29 |
+
import distutils.filelist
|
| 30 |
+
from distutils import log
|
| 31 |
+
from distutils.errors import DistutilsInternalError
|
| 32 |
+
from distutils.filelist import FileList as _FileList
|
| 33 |
+
from distutils.util import convert_path
|
| 34 |
+
|
| 35 |
+
PY_MAJOR = f'{sys.version_info.major}.{sys.version_info.minor}'
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME
|
| 39 |
+
"""
|
| 40 |
+
Translate a file path glob like '*.txt' in to a regular expression.
|
| 41 |
+
This differs from fnmatch.translate which allows wildcards to match
|
| 42 |
+
directory separators. It also knows about '**/' which matches any number of
|
| 43 |
+
directories.
|
| 44 |
+
"""
|
| 45 |
+
pat = ''
|
| 46 |
+
|
| 47 |
+
# This will split on '/' within [character classes]. This is deliberate.
|
| 48 |
+
chunks = glob.split(os.path.sep)
|
| 49 |
+
|
| 50 |
+
sep = re.escape(os.sep)
|
| 51 |
+
valid_char = f'[^{sep}]'
|
| 52 |
+
|
| 53 |
+
for c, chunk in enumerate(chunks):
|
| 54 |
+
last_chunk = c == len(chunks) - 1
|
| 55 |
+
|
| 56 |
+
# Chunks that are a literal ** are globstars. They match anything.
|
| 57 |
+
if chunk == '**':
|
| 58 |
+
if last_chunk:
|
| 59 |
+
# Match anything if this is the last component
|
| 60 |
+
pat += '.*'
|
| 61 |
+
else:
|
| 62 |
+
# Match '(name/)*'
|
| 63 |
+
pat += f'(?:{valid_char}+{sep})*'
|
| 64 |
+
continue # Break here as the whole path component has been handled
|
| 65 |
+
|
| 66 |
+
# Find any special characters in the remainder
|
| 67 |
+
i = 0
|
| 68 |
+
chunk_len = len(chunk)
|
| 69 |
+
while i < chunk_len:
|
| 70 |
+
char = chunk[i]
|
| 71 |
+
if char == '*':
|
| 72 |
+
# Match any number of name characters
|
| 73 |
+
pat += valid_char + '*'
|
| 74 |
+
elif char == '?':
|
| 75 |
+
# Match a name character
|
| 76 |
+
pat += valid_char
|
| 77 |
+
elif char == '[':
|
| 78 |
+
# Character class
|
| 79 |
+
inner_i = i + 1
|
| 80 |
+
# Skip initial !/] chars
|
| 81 |
+
if inner_i < chunk_len and chunk[inner_i] == '!':
|
| 82 |
+
inner_i = inner_i + 1
|
| 83 |
+
if inner_i < chunk_len and chunk[inner_i] == ']':
|
| 84 |
+
inner_i = inner_i + 1
|
| 85 |
+
|
| 86 |
+
# Loop till the closing ] is found
|
| 87 |
+
while inner_i < chunk_len and chunk[inner_i] != ']':
|
| 88 |
+
inner_i = inner_i + 1
|
| 89 |
+
|
| 90 |
+
if inner_i >= chunk_len:
|
| 91 |
+
# Got to the end of the string without finding a closing ]
|
| 92 |
+
# Do not treat this as a matching group, but as a literal [
|
| 93 |
+
pat += re.escape(char)
|
| 94 |
+
else:
|
| 95 |
+
# Grab the insides of the [brackets]
|
| 96 |
+
inner = chunk[i + 1 : inner_i]
|
| 97 |
+
char_class = ''
|
| 98 |
+
|
| 99 |
+
# Class negation
|
| 100 |
+
if inner[0] == '!':
|
| 101 |
+
char_class = '^'
|
| 102 |
+
inner = inner[1:]
|
| 103 |
+
|
| 104 |
+
char_class += re.escape(inner)
|
| 105 |
+
pat += f'[{char_class}]'
|
| 106 |
+
|
| 107 |
+
# Skip to the end ]
|
| 108 |
+
i = inner_i
|
| 109 |
+
else:
|
| 110 |
+
pat += re.escape(char)
|
| 111 |
+
i += 1
|
| 112 |
+
|
| 113 |
+
# Join each chunk with the dir separator
|
| 114 |
+
if not last_chunk:
|
| 115 |
+
pat += sep
|
| 116 |
+
|
| 117 |
+
pat += r'\Z'
|
| 118 |
+
return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
class InfoCommon:
|
| 122 |
+
tag_build = None
|
| 123 |
+
tag_date = None
|
| 124 |
+
|
| 125 |
+
@property
|
| 126 |
+
def name(self):
|
| 127 |
+
return _normalization.safe_name(self.distribution.get_name())
|
| 128 |
+
|
| 129 |
+
def tagged_version(self):
|
| 130 |
+
tagged = self._maybe_tag(self.distribution.get_version())
|
| 131 |
+
return _normalization.safe_version(tagged)
|
| 132 |
+
|
| 133 |
+
def _maybe_tag(self, version):
|
| 134 |
+
"""
|
| 135 |
+
egg_info may be called more than once for a distribution,
|
| 136 |
+
in which case the version string already contains all tags.
|
| 137 |
+
"""
|
| 138 |
+
return (
|
| 139 |
+
version
|
| 140 |
+
if self.vtags and self._already_tagged(version)
|
| 141 |
+
else version + self.vtags
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
def _already_tagged(self, version: str) -> bool:
|
| 145 |
+
# Depending on their format, tags may change with version normalization.
|
| 146 |
+
# So in addition the regular tags, we have to search for the normalized ones.
|
| 147 |
+
return version.endswith(self.vtags) or version.endswith(self._safe_tags())
|
| 148 |
+
|
| 149 |
+
def _safe_tags(self) -> str:
|
| 150 |
+
# To implement this we can rely on `safe_version` pretending to be version 0
|
| 151 |
+
# followed by tags. Then we simply discard the starting 0 (fake version number)
|
| 152 |
+
try:
|
| 153 |
+
return _normalization.safe_version(f"0{self.vtags}")[1:]
|
| 154 |
+
except packaging.version.InvalidVersion:
|
| 155 |
+
return _normalization.safe_name(self.vtags.replace(' ', '.'))
|
| 156 |
+
|
| 157 |
+
def tags(self) -> str:
|
| 158 |
+
version = ''
|
| 159 |
+
if self.tag_build:
|
| 160 |
+
version += self.tag_build
|
| 161 |
+
if self.tag_date:
|
| 162 |
+
version += time.strftime("%Y%m%d")
|
| 163 |
+
return version
|
| 164 |
+
|
| 165 |
+
vtags = property(tags)
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
class egg_info(InfoCommon, Command):
|
| 169 |
+
description = "create a distribution's .egg-info directory"
|
| 170 |
+
|
| 171 |
+
user_options = [
|
| 172 |
+
(
|
| 173 |
+
'egg-base=',
|
| 174 |
+
'e',
|
| 175 |
+
"directory containing .egg-info directories"
|
| 176 |
+
" [default: top of the source tree]",
|
| 177 |
+
),
|
| 178 |
+
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
| 179 |
+
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
| 180 |
+
('no-date', 'D', "Don't include date stamp [default]"),
|
| 181 |
+
]
|
| 182 |
+
|
| 183 |
+
boolean_options = ['tag-date']
|
| 184 |
+
negative_opt = {
|
| 185 |
+
'no-date': 'tag-date',
|
| 186 |
+
}
|
| 187 |
+
|
| 188 |
+
def initialize_options(self):
|
| 189 |
+
self.egg_base = None
|
| 190 |
+
self.egg_name = None
|
| 191 |
+
self.egg_info = None
|
| 192 |
+
self.egg_version = None
|
| 193 |
+
self.ignore_egg_info_in_manifest = False
|
| 194 |
+
|
| 195 |
+
####################################
|
| 196 |
+
# allow the 'tag_svn_revision' to be detected and
|
| 197 |
+
# set, supporting sdists built on older Setuptools.
|
| 198 |
+
@property
|
| 199 |
+
def tag_svn_revision(self) -> None:
|
| 200 |
+
pass
|
| 201 |
+
|
| 202 |
+
@tag_svn_revision.setter
|
| 203 |
+
def tag_svn_revision(self, value):
|
| 204 |
+
pass
|
| 205 |
+
|
| 206 |
+
####################################
|
| 207 |
+
|
| 208 |
+
def save_version_info(self, filename) -> None:
|
| 209 |
+
"""
|
| 210 |
+
Materialize the value of date into the
|
| 211 |
+
build tag. Install build keys in a deterministic order
|
| 212 |
+
to avoid arbitrary reordering on subsequent builds.
|
| 213 |
+
"""
|
| 214 |
+
# follow the order these keys would have been added
|
| 215 |
+
# when PYTHONHASHSEED=0
|
| 216 |
+
egg_info = dict(tag_build=self.tags(), tag_date=0)
|
| 217 |
+
edit_config(filename, dict(egg_info=egg_info))
|
| 218 |
+
|
| 219 |
+
def finalize_options(self) -> None:
|
| 220 |
+
# Note: we need to capture the current value returned
|
| 221 |
+
# by `self.tagged_version()`, so we can later update
|
| 222 |
+
# `self.distribution.metadata.version` without
|
| 223 |
+
# repercussions.
|
| 224 |
+
self.egg_name = self.name
|
| 225 |
+
self.egg_version = self.tagged_version()
|
| 226 |
+
parsed_version = packaging.version.Version(self.egg_version)
|
| 227 |
+
|
| 228 |
+
try:
|
| 229 |
+
is_version = isinstance(parsed_version, packaging.version.Version)
|
| 230 |
+
spec = "%s==%s" if is_version else "%s===%s"
|
| 231 |
+
packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version))
|
| 232 |
+
except ValueError as e:
|
| 233 |
+
raise distutils.errors.DistutilsOptionError(
|
| 234 |
+
f"Invalid distribution name or version syntax: {self.egg_name}-{self.egg_version}"
|
| 235 |
+
) from e
|
| 236 |
+
|
| 237 |
+
if self.egg_base is None:
|
| 238 |
+
dirs = self.distribution.package_dir
|
| 239 |
+
self.egg_base = (dirs or {}).get('', os.curdir)
|
| 240 |
+
|
| 241 |
+
self.ensure_dirname('egg_base')
|
| 242 |
+
self.egg_info = _normalization.filename_component(self.egg_name) + '.egg-info'
|
| 243 |
+
if self.egg_base != os.curdir:
|
| 244 |
+
self.egg_info = os.path.join(self.egg_base, self.egg_info)
|
| 245 |
+
|
| 246 |
+
# Set package version for the benefit of dumber commands
|
| 247 |
+
# (e.g. sdist, bdist_wininst, etc.)
|
| 248 |
+
#
|
| 249 |
+
self.distribution.metadata.version = self.egg_version
|
| 250 |
+
|
| 251 |
+
def _get_egg_basename(self, py_version=PY_MAJOR, platform=None):
|
| 252 |
+
"""Compute filename of the output egg. Private API."""
|
| 253 |
+
return _egg_basename(self.egg_name, self.egg_version, py_version, platform)
|
| 254 |
+
|
| 255 |
+
def write_or_delete_file(self, what, filename, data, force: bool = False) -> None:
|
| 256 |
+
"""Write `data` to `filename` or delete if empty
|
| 257 |
+
|
| 258 |
+
If `data` is non-empty, this routine is the same as ``write_file()``.
|
| 259 |
+
If `data` is empty but not ``None``, this is the same as calling
|
| 260 |
+
``delete_file(filename)`. If `data` is ``None``, then this is a no-op
|
| 261 |
+
unless `filename` exists, in which case a warning is issued about the
|
| 262 |
+
orphaned file (if `force` is false), or deleted (if `force` is true).
|
| 263 |
+
"""
|
| 264 |
+
if data:
|
| 265 |
+
self.write_file(what, filename, data)
|
| 266 |
+
elif os.path.exists(filename):
|
| 267 |
+
if data is None and not force:
|
| 268 |
+
log.warn("%s not set in setup(), but %s exists", what, filename)
|
| 269 |
+
return
|
| 270 |
+
else:
|
| 271 |
+
self.delete_file(filename)
|
| 272 |
+
|
| 273 |
+
def write_file(self, what, filename, data) -> None:
|
| 274 |
+
"""Write `data` to `filename` (if not a dry run) after announcing it
|
| 275 |
+
|
| 276 |
+
`what` is used in a log message to identify what is being written
|
| 277 |
+
to the file.
|
| 278 |
+
"""
|
| 279 |
+
log.info("writing %s to %s", what, filename)
|
| 280 |
+
data = data.encode("utf-8")
|
| 281 |
+
if not self.dry_run:
|
| 282 |
+
f = open(filename, 'wb')
|
| 283 |
+
f.write(data)
|
| 284 |
+
f.close()
|
| 285 |
+
|
| 286 |
+
def delete_file(self, filename) -> None:
|
| 287 |
+
"""Delete `filename` (if not a dry run) after announcing it"""
|
| 288 |
+
log.info("deleting %s", filename)
|
| 289 |
+
if not self.dry_run:
|
| 290 |
+
os.unlink(filename)
|
| 291 |
+
|
| 292 |
+
def run(self) -> None:
|
| 293 |
+
# Pre-load to avoid iterating over entry-points while an empty .egg-info
|
| 294 |
+
# exists in sys.path. See pypa/pyproject-hooks#206
|
| 295 |
+
writers = list(metadata.entry_points(group='egg_info.writers'))
|
| 296 |
+
|
| 297 |
+
self.mkpath(self.egg_info)
|
| 298 |
+
try:
|
| 299 |
+
os.utime(self.egg_info, None)
|
| 300 |
+
except OSError as e:
|
| 301 |
+
msg = f"Cannot update time stamp of directory '{self.egg_info}'"
|
| 302 |
+
raise distutils.errors.DistutilsFileError(msg) from e
|
| 303 |
+
for ep in writers:
|
| 304 |
+
writer = ep.load()
|
| 305 |
+
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
|
| 306 |
+
|
| 307 |
+
# Get rid of native_libs.txt if it was put there by older bdist_egg
|
| 308 |
+
nl = os.path.join(self.egg_info, "native_libs.txt")
|
| 309 |
+
if os.path.exists(nl):
|
| 310 |
+
self.delete_file(nl)
|
| 311 |
+
|
| 312 |
+
self.find_sources()
|
| 313 |
+
|
| 314 |
+
def find_sources(self) -> None:
|
| 315 |
+
"""Generate SOURCES.txt manifest file"""
|
| 316 |
+
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
|
| 317 |
+
mm = manifest_maker(self.distribution)
|
| 318 |
+
mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest
|
| 319 |
+
mm.manifest = manifest_filename
|
| 320 |
+
mm.run()
|
| 321 |
+
self.filelist = mm.filelist
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
class FileList(_FileList):
|
| 325 |
+
# Implementations of the various MANIFEST.in commands
|
| 326 |
+
|
| 327 |
+
def __init__(
|
| 328 |
+
self, warn=None, debug_print=None, ignore_egg_info_dir: bool = False
|
| 329 |
+
) -> None:
|
| 330 |
+
super().__init__(warn, debug_print)
|
| 331 |
+
self.ignore_egg_info_dir = ignore_egg_info_dir
|
| 332 |
+
|
| 333 |
+
def process_template_line(self, line) -> None:
|
| 334 |
+
# Parse the line: split it up, make sure the right number of words
|
| 335 |
+
# is there, and return the relevant words. 'action' is always
|
| 336 |
+
# defined: it's the first word of the line. Which of the other
|
| 337 |
+
# three are defined depends on the action; it'll be either
|
| 338 |
+
# patterns, (dir and patterns), or (dir_pattern).
|
| 339 |
+
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
|
| 340 |
+
|
| 341 |
+
action_map: dict[str, Callable] = {
|
| 342 |
+
'include': self.include,
|
| 343 |
+
'exclude': self.exclude,
|
| 344 |
+
'global-include': self.global_include,
|
| 345 |
+
'global-exclude': self.global_exclude,
|
| 346 |
+
'recursive-include': functools.partial(
|
| 347 |
+
self.recursive_include,
|
| 348 |
+
dir,
|
| 349 |
+
),
|
| 350 |
+
'recursive-exclude': functools.partial(
|
| 351 |
+
self.recursive_exclude,
|
| 352 |
+
dir,
|
| 353 |
+
),
|
| 354 |
+
'graft': self.graft,
|
| 355 |
+
'prune': self.prune,
|
| 356 |
+
}
|
| 357 |
+
log_map = {
|
| 358 |
+
'include': "warning: no files found matching '%s'",
|
| 359 |
+
'exclude': ("warning: no previously-included files found matching '%s'"),
|
| 360 |
+
'global-include': (
|
| 361 |
+
"warning: no files found matching '%s' anywhere in distribution"
|
| 362 |
+
),
|
| 363 |
+
'global-exclude': (
|
| 364 |
+
"warning: no previously-included files matching "
|
| 365 |
+
"'%s' found anywhere in distribution"
|
| 366 |
+
),
|
| 367 |
+
'recursive-include': (
|
| 368 |
+
"warning: no files found matching '%s' under directory '%s'"
|
| 369 |
+
),
|
| 370 |
+
'recursive-exclude': (
|
| 371 |
+
"warning: no previously-included files matching "
|
| 372 |
+
"'%s' found under directory '%s'"
|
| 373 |
+
),
|
| 374 |
+
'graft': "warning: no directories found matching '%s'",
|
| 375 |
+
'prune': "no previously-included directories found matching '%s'",
|
| 376 |
+
}
|
| 377 |
+
|
| 378 |
+
try:
|
| 379 |
+
process_action = action_map[action]
|
| 380 |
+
except KeyError:
|
| 381 |
+
msg = f"Invalid MANIFEST.in: unknown action {action!r} in {line!r}"
|
| 382 |
+
raise DistutilsInternalError(msg) from None
|
| 383 |
+
|
| 384 |
+
# OK, now we know that the action is valid and we have the
|
| 385 |
+
# right number of words on the line for that action -- so we
|
| 386 |
+
# can proceed with minimal error-checking.
|
| 387 |
+
|
| 388 |
+
action_is_recursive = action.startswith('recursive-')
|
| 389 |
+
if action in {'graft', 'prune'}:
|
| 390 |
+
patterns = [dir_pattern]
|
| 391 |
+
extra_log_args = (dir,) if action_is_recursive else ()
|
| 392 |
+
log_tmpl = log_map[action]
|
| 393 |
+
|
| 394 |
+
self.debug_print(
|
| 395 |
+
' '.join(
|
| 396 |
+
[action] + ([dir] if action_is_recursive else []) + patterns,
|
| 397 |
+
)
|
| 398 |
+
)
|
| 399 |
+
for pattern in patterns:
|
| 400 |
+
if not process_action(pattern):
|
| 401 |
+
log.warn(log_tmpl, pattern, *extra_log_args)
|
| 402 |
+
|
| 403 |
+
def _remove_files(self, predicate):
|
| 404 |
+
"""
|
| 405 |
+
Remove all files from the file list that match the predicate.
|
| 406 |
+
Return True if any matching files were removed
|
| 407 |
+
"""
|
| 408 |
+
found = False
|
| 409 |
+
for i in range(len(self.files) - 1, -1, -1):
|
| 410 |
+
if predicate(self.files[i]):
|
| 411 |
+
self.debug_print(" removing " + self.files[i])
|
| 412 |
+
del self.files[i]
|
| 413 |
+
found = True
|
| 414 |
+
return found
|
| 415 |
+
|
| 416 |
+
def include(self, pattern):
|
| 417 |
+
"""Include files that match 'pattern'."""
|
| 418 |
+
found = [f for f in glob(pattern) if not os.path.isdir(f)]
|
| 419 |
+
self.extend(found)
|
| 420 |
+
return bool(found)
|
| 421 |
+
|
| 422 |
+
def exclude(self, pattern):
|
| 423 |
+
"""Exclude files that match 'pattern'."""
|
| 424 |
+
match = translate_pattern(pattern)
|
| 425 |
+
return self._remove_files(match.match)
|
| 426 |
+
|
| 427 |
+
def recursive_include(self, dir, pattern):
|
| 428 |
+
"""
|
| 429 |
+
Include all files anywhere in 'dir/' that match the pattern.
|
| 430 |
+
"""
|
| 431 |
+
full_pattern = os.path.join(dir, '**', pattern)
|
| 432 |
+
found = [f for f in glob(full_pattern, recursive=True) if not os.path.isdir(f)]
|
| 433 |
+
self.extend(found)
|
| 434 |
+
return bool(found)
|
| 435 |
+
|
| 436 |
+
def recursive_exclude(self, dir, pattern):
|
| 437 |
+
"""
|
| 438 |
+
Exclude any file anywhere in 'dir/' that match the pattern.
|
| 439 |
+
"""
|
| 440 |
+
match = translate_pattern(os.path.join(dir, '**', pattern))
|
| 441 |
+
return self._remove_files(match.match)
|
| 442 |
+
|
| 443 |
+
def graft(self, dir):
|
| 444 |
+
"""Include all files from 'dir/'."""
|
| 445 |
+
found = [
|
| 446 |
+
item
|
| 447 |
+
for match_dir in glob(dir)
|
| 448 |
+
for item in distutils.filelist.findall(match_dir)
|
| 449 |
+
]
|
| 450 |
+
self.extend(found)
|
| 451 |
+
return bool(found)
|
| 452 |
+
|
| 453 |
+
def prune(self, dir):
|
| 454 |
+
"""Filter out files from 'dir/'."""
|
| 455 |
+
match = translate_pattern(os.path.join(dir, '**'))
|
| 456 |
+
return self._remove_files(match.match)
|
| 457 |
+
|
| 458 |
+
def global_include(self, pattern):
|
| 459 |
+
"""
|
| 460 |
+
Include all files anywhere in the current directory that match the
|
| 461 |
+
pattern. This is very inefficient on large file trees.
|
| 462 |
+
"""
|
| 463 |
+
if self.allfiles is None:
|
| 464 |
+
self.findall()
|
| 465 |
+
match = translate_pattern(os.path.join('**', pattern))
|
| 466 |
+
found = [f for f in self.allfiles if match.match(f)]
|
| 467 |
+
self.extend(found)
|
| 468 |
+
return bool(found)
|
| 469 |
+
|
| 470 |
+
def global_exclude(self, pattern):
|
| 471 |
+
"""
|
| 472 |
+
Exclude all files anywhere that match the pattern.
|
| 473 |
+
"""
|
| 474 |
+
match = translate_pattern(os.path.join('**', pattern))
|
| 475 |
+
return self._remove_files(match.match)
|
| 476 |
+
|
| 477 |
+
def append(self, item) -> None:
|
| 478 |
+
if item.endswith('\r'): # Fix older sdists built on Windows
|
| 479 |
+
item = item[:-1]
|
| 480 |
+
path = convert_path(item)
|
| 481 |
+
|
| 482 |
+
if self._safe_path(path):
|
| 483 |
+
self.files.append(path)
|
| 484 |
+
|
| 485 |
+
def extend(self, paths) -> None:
|
| 486 |
+
self.files.extend(filter(self._safe_path, paths))
|
| 487 |
+
|
| 488 |
+
def _repair(self):
|
| 489 |
+
"""
|
| 490 |
+
Replace self.files with only safe paths
|
| 491 |
+
|
| 492 |
+
Because some owners of FileList manipulate the underlying
|
| 493 |
+
``files`` attribute directly, this method must be called to
|
| 494 |
+
repair those paths.
|
| 495 |
+
"""
|
| 496 |
+
self.files = list(filter(self._safe_path, self.files))
|
| 497 |
+
|
| 498 |
+
def _safe_path(self, path):
|
| 499 |
+
enc_warn = "'%s' not %s encodable -- skipping"
|
| 500 |
+
|
| 501 |
+
# To avoid accidental trans-codings errors, first to unicode
|
| 502 |
+
u_path = unicode_utils.filesys_decode(path)
|
| 503 |
+
if u_path is None:
|
| 504 |
+
log.warn(f"'{path}' in unexpected encoding -- skipping")
|
| 505 |
+
return False
|
| 506 |
+
|
| 507 |
+
# Must ensure utf-8 encodability
|
| 508 |
+
utf8_path = unicode_utils.try_encode(u_path, "utf-8")
|
| 509 |
+
if utf8_path is None:
|
| 510 |
+
log.warn(enc_warn, path, 'utf-8')
|
| 511 |
+
return False
|
| 512 |
+
|
| 513 |
+
try:
|
| 514 |
+
# ignore egg-info paths
|
| 515 |
+
is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path
|
| 516 |
+
if self.ignore_egg_info_dir and is_egg_info:
|
| 517 |
+
return False
|
| 518 |
+
# accept is either way checks out
|
| 519 |
+
if os.path.exists(u_path) or os.path.exists(utf8_path):
|
| 520 |
+
return True
|
| 521 |
+
# this will catch any encode errors decoding u_path
|
| 522 |
+
except UnicodeEncodeError:
|
| 523 |
+
log.warn(enc_warn, path, sys.getfilesystemencoding())
|
| 524 |
+
|
| 525 |
+
|
| 526 |
+
class manifest_maker(sdist):
|
| 527 |
+
template = "MANIFEST.in"
|
| 528 |
+
|
| 529 |
+
def initialize_options(self) -> None:
|
| 530 |
+
self.use_defaults = True
|
| 531 |
+
self.prune = True
|
| 532 |
+
self.manifest_only = True
|
| 533 |
+
self.force_manifest = True
|
| 534 |
+
self.ignore_egg_info_dir = False
|
| 535 |
+
|
| 536 |
+
def finalize_options(self) -> None:
|
| 537 |
+
pass
|
| 538 |
+
|
| 539 |
+
def run(self) -> None:
|
| 540 |
+
self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir)
|
| 541 |
+
if not os.path.exists(self.manifest):
|
| 542 |
+
self.write_manifest() # it must exist so it'll get in the list
|
| 543 |
+
self.add_defaults()
|
| 544 |
+
if os.path.exists(self.template):
|
| 545 |
+
self.read_template()
|
| 546 |
+
self.add_license_files()
|
| 547 |
+
self._add_referenced_files()
|
| 548 |
+
self.prune_file_list()
|
| 549 |
+
self.filelist.sort()
|
| 550 |
+
self.filelist.remove_duplicates()
|
| 551 |
+
self.write_manifest()
|
| 552 |
+
|
| 553 |
+
def _manifest_normalize(self, path):
|
| 554 |
+
path = unicode_utils.filesys_decode(path)
|
| 555 |
+
return path.replace(os.sep, '/')
|
| 556 |
+
|
| 557 |
+
def write_manifest(self) -> None:
|
| 558 |
+
"""
|
| 559 |
+
Write the file list in 'self.filelist' to the manifest file
|
| 560 |
+
named by 'self.manifest'.
|
| 561 |
+
"""
|
| 562 |
+
self.filelist._repair()
|
| 563 |
+
|
| 564 |
+
# Now _repairs should encodability, but not unicode
|
| 565 |
+
files = [self._manifest_normalize(f) for f in self.filelist.files]
|
| 566 |
+
msg = f"writing manifest file '{self.manifest}'"
|
| 567 |
+
self.execute(write_file, (self.manifest, files), msg)
|
| 568 |
+
|
| 569 |
+
def warn(self, msg) -> None:
|
| 570 |
+
if not self._should_suppress_warning(msg):
|
| 571 |
+
sdist.warn(self, msg)
|
| 572 |
+
|
| 573 |
+
@staticmethod
|
| 574 |
+
def _should_suppress_warning(msg):
|
| 575 |
+
"""
|
| 576 |
+
suppress missing-file warnings from sdist
|
| 577 |
+
"""
|
| 578 |
+
return re.match(r"standard file .*not found", msg)
|
| 579 |
+
|
| 580 |
+
def add_defaults(self) -> None:
|
| 581 |
+
sdist.add_defaults(self)
|
| 582 |
+
self.filelist.append(self.template)
|
| 583 |
+
self.filelist.append(self.manifest)
|
| 584 |
+
rcfiles = list(walk_revctrl())
|
| 585 |
+
if rcfiles:
|
| 586 |
+
self.filelist.extend(rcfiles)
|
| 587 |
+
elif os.path.exists(self.manifest):
|
| 588 |
+
self.read_manifest()
|
| 589 |
+
|
| 590 |
+
if os.path.exists("setup.py"):
|
| 591 |
+
# setup.py should be included by default, even if it's not
|
| 592 |
+
# the script called to create the sdist
|
| 593 |
+
self.filelist.append("setup.py")
|
| 594 |
+
|
| 595 |
+
ei_cmd = self.get_finalized_command('egg_info')
|
| 596 |
+
self.filelist.graft(ei_cmd.egg_info)
|
| 597 |
+
|
| 598 |
+
def add_license_files(self) -> None:
|
| 599 |
+
license_files = self.distribution.metadata.license_files or []
|
| 600 |
+
for lf in license_files:
|
| 601 |
+
log.info("adding license file '%s'", lf)
|
| 602 |
+
self.filelist.extend(license_files)
|
| 603 |
+
|
| 604 |
+
def _add_referenced_files(self):
|
| 605 |
+
"""Add files referenced by the config (e.g. `file:` directive) to filelist"""
|
| 606 |
+
referenced = getattr(self.distribution, '_referenced_files', [])
|
| 607 |
+
# ^-- fallback if dist comes from distutils or is a custom class
|
| 608 |
+
for rf in referenced:
|
| 609 |
+
log.debug("adding file referenced by config '%s'", rf)
|
| 610 |
+
self.filelist.extend(referenced)
|
| 611 |
+
|
| 612 |
+
def _safe_data_files(self, build_py):
|
| 613 |
+
"""
|
| 614 |
+
The parent class implementation of this method
|
| 615 |
+
(``sdist``) will try to include data files, which
|
| 616 |
+
might cause recursion problems when
|
| 617 |
+
``include_package_data=True``.
|
| 618 |
+
|
| 619 |
+
Therefore, avoid triggering any attempt of
|
| 620 |
+
analyzing/building the manifest again.
|
| 621 |
+
"""
|
| 622 |
+
if hasattr(build_py, 'get_data_files_without_manifest'):
|
| 623 |
+
return build_py.get_data_files_without_manifest()
|
| 624 |
+
|
| 625 |
+
SetuptoolsDeprecationWarning.emit(
|
| 626 |
+
"`build_py` command does not inherit from setuptools' `build_py`.",
|
| 627 |
+
"""
|
| 628 |
+
Custom 'build_py' does not implement 'get_data_files_without_manifest'.
|
| 629 |
+
Please extend command classes from setuptools instead of distutils.
|
| 630 |
+
""",
|
| 631 |
+
see_url="https://peps.python.org/pep-0632/",
|
| 632 |
+
# due_date not defined yet, old projects might still do it?
|
| 633 |
+
)
|
| 634 |
+
return build_py.get_data_files()
|
| 635 |
+
|
| 636 |
+
|
| 637 |
+
def write_file(filename, contents) -> None:
|
| 638 |
+
"""Create a file with the specified name and write 'contents' (a
|
| 639 |
+
sequence of strings without line terminators) to it.
|
| 640 |
+
"""
|
| 641 |
+
contents = "\n".join(contents)
|
| 642 |
+
|
| 643 |
+
# assuming the contents has been vetted for utf-8 encoding
|
| 644 |
+
contents = contents.encode("utf-8")
|
| 645 |
+
|
| 646 |
+
with open(filename, "wb") as f: # always write POSIX-style manifest
|
| 647 |
+
f.write(contents)
|
| 648 |
+
|
| 649 |
+
|
| 650 |
+
def write_pkg_info(cmd, basename, filename) -> None:
|
| 651 |
+
log.info("writing %s", filename)
|
| 652 |
+
if not cmd.dry_run:
|
| 653 |
+
metadata = cmd.distribution.metadata
|
| 654 |
+
metadata.version, oldver = cmd.egg_version, metadata.version
|
| 655 |
+
metadata.name, oldname = cmd.egg_name, metadata.name
|
| 656 |
+
|
| 657 |
+
try:
|
| 658 |
+
# write unescaped data to PKG-INFO, so older pkg_resources
|
| 659 |
+
# can still parse it
|
| 660 |
+
metadata.write_pkg_info(cmd.egg_info)
|
| 661 |
+
finally:
|
| 662 |
+
metadata.name, metadata.version = oldname, oldver
|
| 663 |
+
|
| 664 |
+
safe = getattr(cmd.distribution, 'zip_safe', None)
|
| 665 |
+
|
| 666 |
+
bdist_egg.write_safety_flag(cmd.egg_info, safe)
|
| 667 |
+
|
| 668 |
+
|
| 669 |
+
def warn_depends_obsolete(cmd, basename, filename) -> None:
|
| 670 |
+
"""
|
| 671 |
+
Unused: left to avoid errors when updating (from source) from <= 67.8.
|
| 672 |
+
Old installations have a .dist-info directory with the entry-point
|
| 673 |
+
``depends.txt = setuptools.command.egg_info:warn_depends_obsolete``.
|
| 674 |
+
This may trigger errors when running the first egg_info in build_meta.
|
| 675 |
+
TODO: Remove this function in a version sufficiently > 68.
|
| 676 |
+
"""
|
| 677 |
+
|
| 678 |
+
|
| 679 |
+
# Export API used in entry_points
|
| 680 |
+
write_requirements = _requirestxt.write_requirements
|
| 681 |
+
write_setup_requirements = _requirestxt.write_setup_requirements
|
| 682 |
+
|
| 683 |
+
|
| 684 |
+
def write_toplevel_names(cmd, basename, filename) -> None:
|
| 685 |
+
pkgs = dict.fromkeys([
|
| 686 |
+
k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names()
|
| 687 |
+
])
|
| 688 |
+
cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
|
| 689 |
+
|
| 690 |
+
|
| 691 |
+
def overwrite_arg(cmd, basename, filename) -> None:
|
| 692 |
+
write_arg(cmd, basename, filename, True)
|
| 693 |
+
|
| 694 |
+
|
| 695 |
+
def write_arg(cmd, basename, filename, force: bool = False) -> None:
|
| 696 |
+
argname = os.path.splitext(basename)[0]
|
| 697 |
+
value = getattr(cmd.distribution, argname, None)
|
| 698 |
+
if value is not None:
|
| 699 |
+
value = '\n'.join(value) + '\n'
|
| 700 |
+
cmd.write_or_delete_file(argname, filename, value, force)
|
| 701 |
+
|
| 702 |
+
|
| 703 |
+
def write_entries(cmd, basename, filename) -> None:
|
| 704 |
+
eps = _entry_points.load(cmd.distribution.entry_points)
|
| 705 |
+
defn = _entry_points.render(eps)
|
| 706 |
+
cmd.write_or_delete_file('entry points', filename, defn, True)
|
| 707 |
+
|
| 708 |
+
|
| 709 |
+
def _egg_basename(egg_name, egg_version, py_version=None, platform=None):
|
| 710 |
+
"""Compute filename of the output egg. Private API."""
|
| 711 |
+
name = _normalization.filename_component(egg_name)
|
| 712 |
+
version = _normalization.filename_component(egg_version)
|
| 713 |
+
egg = f"{name}-{version}-py{py_version or PY_MAJOR}"
|
| 714 |
+
if platform:
|
| 715 |
+
egg += f"-{platform}"
|
| 716 |
+
return egg
|
| 717 |
+
|
| 718 |
+
|
| 719 |
+
class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
|
| 720 |
+
"""Deprecated behavior warning for EggInfo, bypassing suppression."""
|
falcon/lib/python3.10/site-packages/setuptools/command/install_egg_info.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
from setuptools import Command, namespaces
|
| 4 |
+
from setuptools.archive_util import unpack_archive
|
| 5 |
+
|
| 6 |
+
from .._path import ensure_directory
|
| 7 |
+
|
| 8 |
+
from distutils import dir_util, log
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class install_egg_info(namespaces.Installer, Command):
|
| 12 |
+
"""Install an .egg-info directory for the package"""
|
| 13 |
+
|
| 14 |
+
description = "Install an .egg-info directory for the package"
|
| 15 |
+
|
| 16 |
+
user_options = [
|
| 17 |
+
('install-dir=', 'd', "directory to install to"),
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
def initialize_options(self):
|
| 21 |
+
self.install_dir = None
|
| 22 |
+
|
| 23 |
+
def finalize_options(self) -> None:
|
| 24 |
+
self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
|
| 25 |
+
ei_cmd = self.get_finalized_command("egg_info")
|
| 26 |
+
basename = f"{ei_cmd._get_egg_basename()}.egg-info"
|
| 27 |
+
self.source = ei_cmd.egg_info
|
| 28 |
+
self.target = os.path.join(self.install_dir, basename)
|
| 29 |
+
self.outputs: list[str] = []
|
| 30 |
+
|
| 31 |
+
def run(self) -> None:
|
| 32 |
+
self.run_command('egg_info')
|
| 33 |
+
if os.path.isdir(self.target) and not os.path.islink(self.target):
|
| 34 |
+
dir_util.remove_tree(self.target, dry_run=self.dry_run)
|
| 35 |
+
elif os.path.exists(self.target):
|
| 36 |
+
self.execute(os.unlink, (self.target,), "Removing " + self.target)
|
| 37 |
+
if not self.dry_run:
|
| 38 |
+
ensure_directory(self.target)
|
| 39 |
+
self.execute(self.copytree, (), f"Copying {self.source} to {self.target}")
|
| 40 |
+
self.install_namespaces()
|
| 41 |
+
|
| 42 |
+
def get_outputs(self):
|
| 43 |
+
return self.outputs
|
| 44 |
+
|
| 45 |
+
def copytree(self) -> None:
|
| 46 |
+
# Copy the .egg-info tree to site-packages
|
| 47 |
+
def skimmer(src, dst):
|
| 48 |
+
# filter out source-control directories; note that 'src' is always
|
| 49 |
+
# a '/'-separated path, regardless of platform. 'dst' is a
|
| 50 |
+
# platform-specific path.
|
| 51 |
+
for skip in '.svn/', 'CVS/':
|
| 52 |
+
if src.startswith(skip) or '/' + skip in src:
|
| 53 |
+
return None
|
| 54 |
+
self.outputs.append(dst)
|
| 55 |
+
log.debug("Copying %s to %s", src, dst)
|
| 56 |
+
return dst
|
| 57 |
+
|
| 58 |
+
unpack_archive(self.source, self.target, skimmer)
|
falcon/lib/python3.10/site-packages/setuptools/command/install_lib.py
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from itertools import product, starmap
|
| 6 |
+
|
| 7 |
+
from .._path import StrPath
|
| 8 |
+
from ..dist import Distribution
|
| 9 |
+
|
| 10 |
+
import distutils.command.install_lib as orig
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class install_lib(orig.install_lib):
|
| 14 |
+
"""Don't add compiled flags to filenames of non-Python files"""
|
| 15 |
+
|
| 16 |
+
distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution
|
| 17 |
+
|
| 18 |
+
def run(self) -> None:
|
| 19 |
+
self.build()
|
| 20 |
+
outfiles = self.install()
|
| 21 |
+
if outfiles is not None:
|
| 22 |
+
# always compile, in case we have any extension stubs to deal with
|
| 23 |
+
self.byte_compile(outfiles)
|
| 24 |
+
|
| 25 |
+
def get_exclusions(self):
|
| 26 |
+
"""
|
| 27 |
+
Return a collections.Sized collections.Container of paths to be
|
| 28 |
+
excluded for single_version_externally_managed installations.
|
| 29 |
+
"""
|
| 30 |
+
all_packages = (
|
| 31 |
+
pkg
|
| 32 |
+
for ns_pkg in self._get_SVEM_NSPs()
|
| 33 |
+
for pkg in self._all_packages(ns_pkg)
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
excl_specs = product(all_packages, self._gen_exclusion_paths())
|
| 37 |
+
return set(starmap(self._exclude_pkg_path, excl_specs))
|
| 38 |
+
|
| 39 |
+
def _exclude_pkg_path(self, pkg, exclusion_path):
|
| 40 |
+
"""
|
| 41 |
+
Given a package name and exclusion path within that package,
|
| 42 |
+
compute the full exclusion path.
|
| 43 |
+
"""
|
| 44 |
+
parts = pkg.split('.') + [exclusion_path]
|
| 45 |
+
return os.path.join(self.install_dir, *parts)
|
| 46 |
+
|
| 47 |
+
@staticmethod
|
| 48 |
+
def _all_packages(pkg_name):
|
| 49 |
+
"""
|
| 50 |
+
>>> list(install_lib._all_packages('foo.bar.baz'))
|
| 51 |
+
['foo.bar.baz', 'foo.bar', 'foo']
|
| 52 |
+
"""
|
| 53 |
+
while pkg_name:
|
| 54 |
+
yield pkg_name
|
| 55 |
+
pkg_name, _sep, _child = pkg_name.rpartition('.')
|
| 56 |
+
|
| 57 |
+
def _get_SVEM_NSPs(self):
|
| 58 |
+
"""
|
| 59 |
+
Get namespace packages (list) but only for
|
| 60 |
+
single_version_externally_managed installations and empty otherwise.
|
| 61 |
+
"""
|
| 62 |
+
# TODO: is it necessary to short-circuit here? i.e. what's the cost
|
| 63 |
+
# if get_finalized_command is called even when namespace_packages is
|
| 64 |
+
# False?
|
| 65 |
+
if not self.distribution.namespace_packages:
|
| 66 |
+
return []
|
| 67 |
+
|
| 68 |
+
install_cmd = self.get_finalized_command('install')
|
| 69 |
+
svem = install_cmd.single_version_externally_managed
|
| 70 |
+
|
| 71 |
+
return self.distribution.namespace_packages if svem else []
|
| 72 |
+
|
| 73 |
+
@staticmethod
|
| 74 |
+
def _gen_exclusion_paths():
|
| 75 |
+
"""
|
| 76 |
+
Generate file paths to be excluded for namespace packages (bytecode
|
| 77 |
+
cache files).
|
| 78 |
+
"""
|
| 79 |
+
# always exclude the package module itself
|
| 80 |
+
yield '__init__.py'
|
| 81 |
+
|
| 82 |
+
yield '__init__.pyc'
|
| 83 |
+
yield '__init__.pyo'
|
| 84 |
+
|
| 85 |
+
if not hasattr(sys, 'implementation'):
|
| 86 |
+
return
|
| 87 |
+
|
| 88 |
+
base = os.path.join('__pycache__', '__init__.' + sys.implementation.cache_tag)
|
| 89 |
+
yield base + '.pyc'
|
| 90 |
+
yield base + '.pyo'
|
| 91 |
+
yield base + '.opt-1.pyc'
|
| 92 |
+
yield base + '.opt-2.pyc'
|
| 93 |
+
|
| 94 |
+
def copy_tree(
|
| 95 |
+
self,
|
| 96 |
+
infile: StrPath,
|
| 97 |
+
outfile: str,
|
| 98 |
+
# override: Using actual booleans
|
| 99 |
+
preserve_mode: bool = True, # type: ignore[override]
|
| 100 |
+
preserve_times: bool = True, # type: ignore[override]
|
| 101 |
+
preserve_symlinks: bool = False, # type: ignore[override]
|
| 102 |
+
level: object = 1,
|
| 103 |
+
) -> list[str]:
|
| 104 |
+
assert preserve_mode
|
| 105 |
+
assert preserve_times
|
| 106 |
+
assert not preserve_symlinks
|
| 107 |
+
exclude = self.get_exclusions()
|
| 108 |
+
|
| 109 |
+
if not exclude:
|
| 110 |
+
return orig.install_lib.copy_tree(self, infile, outfile)
|
| 111 |
+
|
| 112 |
+
# Exclude namespace package __init__.py* files from the output
|
| 113 |
+
|
| 114 |
+
from setuptools.archive_util import unpack_directory
|
| 115 |
+
|
| 116 |
+
from distutils import log
|
| 117 |
+
|
| 118 |
+
outfiles: list[str] = []
|
| 119 |
+
|
| 120 |
+
def pf(src: str, dst: str):
|
| 121 |
+
if dst in exclude:
|
| 122 |
+
log.warn("Skipping installation of %s (namespace package)", dst)
|
| 123 |
+
return False
|
| 124 |
+
|
| 125 |
+
log.info("copying %s -> %s", src, os.path.dirname(dst))
|
| 126 |
+
outfiles.append(dst)
|
| 127 |
+
return dst
|
| 128 |
+
|
| 129 |
+
unpack_directory(infile, outfile, pf)
|
| 130 |
+
return outfiles
|
| 131 |
+
|
| 132 |
+
def get_outputs(self):
|
| 133 |
+
outputs = orig.install_lib.get_outputs(self)
|
| 134 |
+
exclude = self.get_exclusions()
|
| 135 |
+
if exclude:
|
| 136 |
+
return [f for f in outputs if f not in exclude]
|
| 137 |
+
return outputs
|
falcon/lib/python3.10/site-packages/setuptools/command/install_scripts.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
from .._path import ensure_directory
|
| 7 |
+
from ..dist import Distribution
|
| 8 |
+
|
| 9 |
+
import distutils.command.install_scripts as orig
|
| 10 |
+
from distutils import log
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class install_scripts(orig.install_scripts):
|
| 14 |
+
"""Do normal script install, plus any egg_info wrapper scripts"""
|
| 15 |
+
|
| 16 |
+
distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution
|
| 17 |
+
|
| 18 |
+
def initialize_options(self) -> None:
|
| 19 |
+
orig.install_scripts.initialize_options(self)
|
| 20 |
+
self.no_ep = False
|
| 21 |
+
|
| 22 |
+
def run(self) -> None:
|
| 23 |
+
self.run_command("egg_info")
|
| 24 |
+
if self.distribution.scripts:
|
| 25 |
+
orig.install_scripts.run(self) # run first to set up self.outfiles
|
| 26 |
+
else:
|
| 27 |
+
self.outfiles: list[str] = []
|
| 28 |
+
if self.no_ep:
|
| 29 |
+
# don't install entry point scripts into .egg file!
|
| 30 |
+
return
|
| 31 |
+
self._install_ep_scripts()
|
| 32 |
+
|
| 33 |
+
def _install_ep_scripts(self):
|
| 34 |
+
# Delay import side-effects
|
| 35 |
+
from pkg_resources import Distribution, PathMetadata
|
| 36 |
+
|
| 37 |
+
from . import easy_install as ei
|
| 38 |
+
|
| 39 |
+
ei_cmd = self.get_finalized_command("egg_info")
|
| 40 |
+
dist = Distribution(
|
| 41 |
+
ei_cmd.egg_base,
|
| 42 |
+
PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
|
| 43 |
+
ei_cmd.egg_name,
|
| 44 |
+
ei_cmd.egg_version,
|
| 45 |
+
)
|
| 46 |
+
bs_cmd = self.get_finalized_command('build_scripts')
|
| 47 |
+
exec_param = getattr(bs_cmd, 'executable', None)
|
| 48 |
+
writer = ei.ScriptWriter
|
| 49 |
+
if exec_param == sys.executable:
|
| 50 |
+
# In case the path to the Python executable contains a space, wrap
|
| 51 |
+
# it so it's not split up.
|
| 52 |
+
exec_param = [exec_param]
|
| 53 |
+
# resolve the writer to the environment
|
| 54 |
+
writer = writer.best()
|
| 55 |
+
cmd = writer.command_spec_class.best().from_param(exec_param)
|
| 56 |
+
for args in writer.get_args(dist, cmd.as_header()):
|
| 57 |
+
self.write_script(*args)
|
| 58 |
+
|
| 59 |
+
def write_script(self, script_name, contents, mode: str = "t", *ignored) -> None:
|
| 60 |
+
"""Write an executable file to the scripts directory"""
|
| 61 |
+
from setuptools.command.easy_install import chmod, current_umask
|
| 62 |
+
|
| 63 |
+
log.info("Installing %s script to %s", script_name, self.install_dir)
|
| 64 |
+
target = os.path.join(self.install_dir, script_name)
|
| 65 |
+
self.outfiles.append(target)
|
| 66 |
+
|
| 67 |
+
encoding = None if "b" in mode else "utf-8"
|
| 68 |
+
mask = current_umask()
|
| 69 |
+
if not self.dry_run:
|
| 70 |
+
ensure_directory(target)
|
| 71 |
+
with open(target, "w" + mode, encoding=encoding) as f:
|
| 72 |
+
f.write(contents)
|
| 73 |
+
chmod(target, 0o777 - mask)
|
falcon/lib/python3.10/site-packages/setuptools/command/launcher manifest.xml
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
| 2 |
+
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
| 3 |
+
<assemblyIdentity version="1.0.0.0"
|
| 4 |
+
processorArchitecture="X86"
|
| 5 |
+
name="%(name)s"
|
| 6 |
+
type="win32"/>
|
| 7 |
+
<!-- Identify the application security requirements. -->
|
| 8 |
+
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
| 9 |
+
<security>
|
| 10 |
+
<requestedPrivileges>
|
| 11 |
+
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
| 12 |
+
</requestedPrivileges>
|
| 13 |
+
</security>
|
| 14 |
+
</trustInfo>
|
| 15 |
+
</assembly>
|